Skip to content

Commit dfd528e

Browse files
authored
RI-6425: generate big data for tests
1 parent 77a2511 commit dfd528e

File tree

2 files changed

+331
-0
lines changed

2 files changed

+331
-0
lines changed

tests/e2e/helpers/keys.ts

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import { BrowserPage } from '../pageObjects';
66
import { KeyData, AddKeyArguments } from '../pageObjects/browser-page';
77
import { COMMANDS_TO_CREATE_KEY, KeyTypesTexts } from './constants';
88
import { Common } from './common';
9+
import { populateBigKeys, populateDb } from './scripts/generate-big-data';
910

1011
const browserPage = new BrowserPage();
1112

@@ -298,6 +299,32 @@ export async function deleteAllKeysFromDB(host: string, port: string): Promise<v
298299
}
299300
}
300301

302+
export async function populateBigData(host: string, port: string): Promise<void> {
303+
const url = `redis://default@${host}:${port}`;
304+
const client = createClient({
305+
url,
306+
socket: {
307+
connectTimeout: 10000
308+
}
309+
});
310+
311+
client.on('error', (error: Error) => {
312+
console.error('Redis Client Error', error);
313+
});
314+
315+
try {
316+
await populateDb(client, {
317+
mainKeysLimit: 50_000, // 50_000 main keys, default 500_000
318+
secondaryKeysLimit: 12_500, // 12_500 secondary keys, default 125_000
319+
});
320+
await populateBigKeys(client);
321+
} catch (error) {
322+
console.error('Error populating database:', error);
323+
} finally {
324+
await client.disconnect();
325+
}
326+
}
327+
301328
/**
302329
* Verifying if the Keys are in the List of keys
303330
* @param keyNames The names of the keys
Lines changed: 304 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,304 @@
1+
import { isNull, isNumber } from 'lodash';
2+
import RedisClient from '@redis/client/dist/lib/client';
3+
4+
5+
const iterationsPrimary = 500_000;
6+
const iterationsSecondary = 125_000;
7+
const batchSizeDefault = 10_000;
8+
9+
type CommandType = [cmd: string, ...args: (string | number)[]];
10+
type CommandsType = CommandType[];
11+
12+
function prepareCommandArgs(args: CommandType) {
13+
const strArgs = args.map((arg) => (isNumber(arg) ? arg.toString() : arg)) as string[];
14+
if (!strArgs || !strArgs.length) {
15+
return [];
16+
}
17+
const cmdArg = strArgs.shift() || '';
18+
return [...cmdArg.split(' '), ...strArgs];
19+
}
20+
21+
function prepareCommandOptions(options: {
22+
replyEncoding?: string;
23+
}): any {
24+
let replyEncoding = null;
25+
26+
if (options?.replyEncoding === 'utf8') {
27+
replyEncoding = 'utf8';
28+
}
29+
30+
return {
31+
returnBuffers: isNull(replyEncoding),
32+
};
33+
}
34+
35+
async function sendCommand(client: RedisClient<any, any, any>, command: CommandType, options ?: any) {
36+
let commandArgs = prepareCommandArgs(command);
37+
return client.sendCommand(commandArgs, prepareCommandOptions(options));
38+
}
39+
40+
async function sendPipeline(
41+
client: RedisClient<any, any, any>,
42+
commands: CommandsType,
43+
options?: any,
44+
) {
45+
return Promise.all(
46+
commands.map(
47+
(cmd) => sendCommand(client, cmd, options)
48+
.then((res: any) => [null, res])
49+
.catch((e: any) => [e, null]),
50+
),
51+
);
52+
}
53+
54+
function* generateBigData(baseKey: string, separator: string, limit: number, batchSize = batchSizeDefault) {
55+
const keyTypes = [
56+
'string', 'json', 'hash', 'list', 'set', 'zset',
57+
];
58+
let sent = 0;
59+
while (sent < limit) {
60+
const commands: CommandsType = [];
61+
for (let i = 0; i < batchSize && sent < limit; i++) {
62+
sent += 1;
63+
for (const keyType of keyTypes) {
64+
const keyName = `${baseKey}${separator}${sent}${separator}${keyType}`;
65+
let command: CommandType;
66+
switch (keyType) {
67+
case 'json':
68+
command = ['json.set', keyName, '$', JSON.stringify({ id: sent })];
69+
break;
70+
case 'hash':
71+
command = ['hset', keyName, 'k0', sent];
72+
break;
73+
case 'list':
74+
command = ['lpush', keyName, sent];
75+
break;
76+
case 'set':
77+
command = ['sadd', keyName, sent];
78+
break;
79+
case 'zset':
80+
command = ['zadd', keyName, 0, sent];
81+
break;
82+
case 'string':
83+
default:
84+
command = ['set', keyName, `${sent}`];
85+
break;
86+
}
87+
commands.push(command);
88+
}
89+
}
90+
yield commands;
91+
}
92+
}
93+
94+
const SIZES = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
95+
const toBytes = (size: number, type: string): number => {
96+
const key = SIZES.indexOf(type.toUpperCase());
97+
98+
return Math.floor(size * 1024 ** key);
99+
};
100+
101+
function generateRepeatedString(char: string = 'a', size: number | string = '10KB'): string {
102+
let bytes: number = 0;
103+
if (typeof size === 'string') {
104+
const unit = size.slice(-2).toUpperCase();
105+
const value = parseInt(size.slice(0, -2), 10);
106+
if (!SIZES.includes(unit)) {
107+
console.warn(`Invalid unit ${unit}, expected one of ${SIZES}`);
108+
bytes = value;
109+
} else {
110+
bytes = toBytes(value, unit);
111+
}
112+
} else if (typeof size === 'number') {
113+
bytes = size;
114+
}
115+
116+
return char.repeat(bytes);
117+
}
118+
119+
async function seedBigKeys(
120+
client: RedisClient<any, any, any>,
121+
keyName: string,
122+
command = 'set',
123+
generateCommand: (i: number) => (string | number) | (string | number)[],
124+
limit = 1_000_001, batchSize = 100,
125+
) {
126+
const bigCommands: CommandsType = [];
127+
let batchCommands: (string | number)[] = [];
128+
for (let i = 1; i < limit; i++) {
129+
const items = generateCommand(i);
130+
if (Array.isArray(items)) {
131+
batchCommands.push(...items);
132+
} else {
133+
batchCommands.push(items);
134+
}
135+
if (i % batchSize === 0) {
136+
bigCommands.push([command, keyName, ...batchCommands]);
137+
batchCommands = [];
138+
}
139+
}
140+
await sendPipeline(client, bigCommands);
141+
}
142+
143+
/**
144+
* Populate big keys in Redis Database
145+
*
146+
* Generates a range of keys and values to demonstrate large data sets.
147+
*
148+
* @param client - The Redis client to use.
149+
* @param withBigStrings - If `true`, generates big string keys as well.
150+
* @returns A promise that resolves when the keys are populated.
151+
*/
152+
export const populateBigKeys = async (client: RedisClient<any, any, any>, withBigStrings = false) => {
153+
const bigStrings = [
154+
{
155+
char: 'a',
156+
size: '1MB',
157+
content: '1MB key',
158+
},
159+
{
160+
char: 'b',
161+
size: '2MB',
162+
content: '2MB key',
163+
},
164+
{
165+
char: 'c',
166+
size: '3MB',
167+
content: '3MB key',
168+
},
169+
{
170+
char: 'd',
171+
size: '4MB',
172+
content: '4MB key',
173+
},
174+
{
175+
char: 'e',
176+
size: '5MB',
177+
content: '5MB key',
178+
},
179+
];
180+
try {
181+
console.log('Starting big keys...');
182+
await client.connect();
183+
if (withBigStrings) {
184+
console.log('Generating big string keys...');
185+
const bigKeyStringCommands: CommandsType = [];
186+
for (const {
187+
char,
188+
size,
189+
content
190+
} of bigStrings) {
191+
const key = generateRepeatedString(char, size);
192+
bigKeyStringCommands.push(['set', key, content]);
193+
}
194+
await sendPipeline(client, bigKeyStringCommands);
195+
}
196+
// big string 5M
197+
console.log('Generating 5 MB string key...');
198+
const bigStringKey = 'big string 5MB';
199+
await sendCommand(client, ['set', bigStringKey, generateRepeatedString('e', '5MB')]);
200+
201+
// big hash 1M
202+
console.log('Generating 1_000_000 fields hash key...');
203+
await seedBigKeys(client, 'big hash 1M', 'hset', (i) => [`key${i}`, i], 1_000_001, 100);
204+
// big list 1M
205+
console.log('Generating 1_000_000 items list key...');
206+
await seedBigKeys(client, 'big list 1M', 'lpush', (i) => i, 1_000_001, 100);
207+
// big set 1M
208+
console.log('Generating 1_000_000 items set key...');
209+
await seedBigKeys(client, 'big set 1M', 'sadd', (i) => i, 1_000_001, 100);
210+
// big zset 1M
211+
console.log('Generating 1_000_000 items zset key...');
212+
await seedBigKeys(client, 'big zset 1M', 'zadd', (i) => [i, i], 1_000_001, 100);
213+
console.log('Done');
214+
} catch (e) {
215+
console.error(e);
216+
} finally {
217+
await client.disconnect();
218+
}
219+
};
220+
221+
type PopulateDbOptionsType = {
222+
mainKeysLimit?: number;
223+
secondaryKeysLimit?: number;
224+
separatorPrimary?: string;
225+
separatorSecondary?: string;
226+
baseKeys?: string[];
227+
secondaryKeys?: string[];
228+
}
229+
230+
/**
231+
* Populate Redis database with data.
232+
*
233+
* Populates Redis database with data in format of:
234+
* - Primary key: `device:eu-east-1:1`, `device:eu-east-1:2`, ... `device:eu-east-1:1000`
235+
* - Secondary key: `device_eu-east-1_1`, `device_eu-east-1_2`, ... `device_eu-east-1_1000`
236+
*
237+
* @param client - Redis client object
238+
* @param {PopulateDbOptionsType} options - Options object
239+
*/
240+
export const populateDb = async (
241+
client: RedisClient<any, any, any>,
242+
{
243+
mainKeysLimit = iterationsPrimary,
244+
secondaryKeysLimit = iterationsSecondary,
245+
baseKeys = [
246+
'device', 'mobile', 'user',
247+
],
248+
secondaryKeys = [
249+
'eu-east-1', 'eu-west-1', 'us-east-1', 'us-west-1',
250+
],
251+
separatorPrimary = ':',
252+
separatorSecondary = '_',
253+
}: PopulateDbOptionsType,
254+
): Promise<void> => {
255+
256+
try {
257+
console.log('Starting...');
258+
client.on('error', err => console.log('Redis Client Error', err));
259+
let executions = 0;
260+
await client.connect();
261+
for (let bk of baseKeys) {
262+
const generator = generateBigData(bk, separatorPrimary, mainKeysLimit);
263+
for (const commands of generator) {
264+
// process the commands
265+
await sendPipeline(client, commands);
266+
console.log(`${bk}: ${++executions}`);
267+
}
268+
269+
for (let sk of secondaryKeys) {
270+
const generator = generateBigData(`${bk}${separatorSecondary}${sk}`, separatorSecondary, secondaryKeysLimit);
271+
for (const commands of generator) {
272+
// process the commands
273+
await sendPipeline(client, commands);
274+
console.log(`${bk}${separatorSecondary}${sk}: ${++executions}`);
275+
}
276+
}
277+
}
278+
279+
console.log('Done');
280+
} catch (e) {
281+
console.error(e);
282+
} finally {
283+
await client.disconnect();
284+
}
285+
};
286+
// const host = '127.0.0.1';
287+
// const port = '6666';
288+
// const port = '8103';
289+
// const url = `redis://default@${host}:${port}`;
290+
291+
// const client: RedisClient<any, any, any> = createClient({url});
292+
//
293+
// const remoteClient = createClient({
294+
// username: 'default',
295+
// password: 'Lg7qA8JPsOcBE8Em7e9fSRcHHHvsNpP7',
296+
// socket: {
297+
// host: 'redis-13690.crce8.us-east-1-mz.ec2.qa-cloud.redislabs.com',
298+
// port: 13690
299+
// }
300+
// });
301+
// populateBigKeys(client, true).then(() => {
302+
// console.log('Populating DB...');
303+
// return populateDb(client, iterationsPrimary, iterationsSecondary);
304+
// });

0 commit comments

Comments
 (0)