Skip to content

Latest commit

 

History

History
173 lines (128 loc) · 3.15 KB

File metadata and controls

173 lines (128 loc) · 3.15 KB

Memory Management

Handle large datasets efficiently without memory issues.

Use Generators

batch() - Process in Chunks

// Process data in batches of 100 records
foreach ($db->find()
    ->from('users')
    ->orderBy('id')
    ->batch(100) as $batch) {
    
    foreach ($batch as $user) {
        processUser($user);
    }
    
    // Memory is freed after each batch
    unset($batch);
    gc_collect_cycles();
}

each() - One Record at a Time

// Process one record at a time
foreach ($db->find()
    ->from('users')
    ->where('active', 1)
    ->each(50) as $user) {
    
    processUser($user);
    
    // Memory stays low
}

stream() - Streaming

// Stream results with minimal memory
foreach ($db->find()
    ->from('users')
    ->orderBy('id')
    ->stream() as $user) {
    
    processUser($user);
    
    // Results are not buffered
}

Avoid Loading Everything

❌ Bad: Load All Data

// Loads entire table into memory
$users = $db->find()->from('users')->get();

foreach ($users as $user) {
    processUser($user);  // 1M users = 1GB+ memory
}

✅ Good: Use Generators

// Streams one at a time
foreach ($db->find()->from('users')->stream() as $user) {
    processUser($user);  // Only one user in memory
}

Limit Query Results

Always Use LIMIT

// ✅ Good: Limited results
$users = $db->find()
    ->from('users')
    ->where('active', 1)
    ->orderBy('created_at', 'DESC')
    ->limit(100)
    ->get();

// ❌ Bad: Could load millions
$users = $db->find()->from('users')->get();

Process Large Datasets

Export to CSV

function exportUsersToCsv($filename) {
    $file = fopen($filename, 'w');
    fputcsv($file, ['ID', 'Name', 'Email']);
    
    foreach ($db->find()
        ->from('users')
        ->orderBy('id')
        ->stream() as $user) {
        
        fputcsv($file, [$user['id'], $user['name'], $user['email']]);
    }
    
    fclose($file);
}

Batch Updates

// Update in batches
foreach ($db->find()
    ->from('users')
    ->where('last_login', Db::now('-90 DAYS'), '<')
    ->orderBy('id')
    ->batch(1000) as $batch) {
    
    foreach ($batch as $user) {
        $db->find()
            ->table('users')
            ->where('id', $user['id'])
            ->update(['status' => 'inactive']);
    }
}

Monitor Memory Usage

Check Memory Consumption

$initialMemory = memory_get_usage(true);

foreach ($db->find()->from('users')->stream() as $user) {
    processUser($user);
}

$finalMemory = memory_get_usage(true);
$usedMemory = ($finalMemory - $initialMemory) / 1024 / 1024;
echo "Memory used: {$usedMemory} MB\n";

Clear Memory

Explicit Cleanup

foreach ($db->find()->from('users')->batch(1000) as $batch) {
    processBatch($batch);
    
    // Clear variables
    unset($batch);
    
    // Force garbage collection every 10 batches
    if (($i++ % 10) === 0) {
        gc_collect_cycles();
    }
}

Next Steps