-
-
Notifications
You must be signed in to change notification settings - Fork 509
Magcache Support #1877
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Magcache Support #1877
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -619,17 +619,25 @@ def __init__(self): | |
| self._dynamic_caching.value_changed.connect(self.write) | ||
| self._layout.addWidget(self._dynamic_caching) | ||
|
|
||
| self._magcache_enabled = SwitchSetting( | ||
| Settings._magcache_enabled, | ||
| text=(_("Enabled"), _("Disabled")), | ||
| parent=self | ||
| ) | ||
| self._magcache_enabled.value_changed.connect(self.write) | ||
| self._layout.addWidget(self._magcache_enabled) | ||
|
|
||
| self._layout.addStretch() | ||
|
|
||
| def _change_performance_preset(self, index): | ||
| self.write() | ||
| is_custom = settings.performance_preset is PerformancePreset.custom | ||
| self._advanced.setEnabled(is_custom) | ||
| if ( | ||
| settings.performance_preset is PerformancePreset.auto | ||
| and root.connection.state is ConnectionState.connected | ||
| ): | ||
|
|
||
| if (settings.performance_preset is PerformancePreset.auto and | ||
| root.connection.state is ConnectionState.connected): | ||
| apply_performance_preset(settings, root.connection.client.device_info) | ||
|
|
||
| if not is_custom: | ||
| self.read() | ||
|
|
||
|
|
@@ -640,12 +648,30 @@ def update_client_info(self): | |
| _("Device") | ||
| + f": [{client.device_info.type.upper()}] {client.device_info.name} ({client.device_info.vram} GB)" | ||
| ) | ||
| self._dynamic_caching.enabled = client.features.wave_speed | ||
| self._dynamic_caching.setToolTip( | ||
| _("The {node_name} node is not installed.").format(node_name="Comfy-WaveSpeed") | ||
| if not client.features.wave_speed | ||
| else "" | ||
| ) | ||
|
|
||
| has_wave_speed = hasattr(client.features, 'wave_speed') and client.features.wave_speed | ||
|
||
| self._dynamic_caching.enabled = has_wave_speed | ||
| if not has_wave_speed: | ||
| self._dynamic_caching.setToolTip( | ||
| _("The {node_name} node is not installed.").format(node_name="Comfy-WaveSpeed") | ||
| ) | ||
| else: | ||
| self._dynamic_caching.setToolTip("") | ||
|
|
||
| has_magcache = hasattr(client.features, 'magcache') and client.features.magcache | ||
| self._magcache_enabled.enabled = has_magcache | ||
| if not has_magcache: | ||
| self._magcache_enabled.setToolTip( | ||
| _("The {node_name} node is not installed.").format(node_name="MagCache") | ||
| ) | ||
| else: | ||
| self._magcache_enabled.setToolTip(_("Accelerate Flux model inference using MagCache. Parameters are automatically configured based on model architecture.")) | ||
| else: | ||
| self._device_info.setText(_("Not connected")) | ||
| self._dynamic_caching.enabled = False | ||
| self._magcache_enabled.enabled = False | ||
| self._dynamic_caching.setToolTip(_("Not connected to server")) | ||
| self._magcache_enabled.setToolTip(_("Not connected to server")) | ||
|
|
||
| def _read(self): | ||
| self._history_size.value = settings.history_size | ||
|
|
@@ -660,6 +686,8 @@ def _read(self): | |
| self._max_pixel_count.value = settings.max_pixel_count | ||
| self._tiled_vae.value = settings.tiled_vae | ||
| self._dynamic_caching.value = settings.dynamic_caching | ||
| self._magcache_enabled.value = settings.magcache_enabled | ||
|
|
||
| self.update_client_info() | ||
|
|
||
| def _write(self): | ||
|
|
@@ -673,6 +701,7 @@ def _write(self): | |
| self._performance_preset.currentIndex() | ||
| ] | ||
| settings.dynamic_caching = self._dynamic_caching.value | ||
| settings.magcache_enabled = self._magcache_enabled.value | ||
|
|
||
|
|
||
| class AboutSettings(SettingsTab): | ||
|
|
@@ -943,4 +972,5 @@ def _open_settings_folder(self): | |
| QDesktopServices.openUrl(QUrl.fromLocalFile(str(util.user_data_dir))) | ||
|
|
||
| def _close(self): | ||
| settings.save() | ||
| _ = self.close() | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -87,6 +87,8 @@ def _sampler_params(sampling: SamplingInput, strength: float | None = None): | |
| return params | ||
|
|
||
|
|
||
|
|
||
|
|
||
| def load_checkpoint_with_lora(w: ComfyWorkflow, checkpoint: CheckpointInput, models: ClientModels): | ||
| arch = checkpoint.version | ||
| model_info = models.checkpoints.get(checkpoint.checkpoint) | ||
|
|
@@ -153,6 +155,29 @@ def load_checkpoint_with_lora(w: ComfyWorkflow, checkpoint: CheckpointInput, mod | |
| if arch.supports_attention_guidance and checkpoint.self_attention_guidance: | ||
| model = w.apply_self_attention_guidance(model) | ||
|
|
||
| # Apply MagCache as a model patch if enabled | ||
| if checkpoint.magcache_enabled: | ||
| if arch in [Arch.flux, Arch.flux_k]: | ||
| print(f"Applying MagCache patch for {arch}") | ||
|
||
|
|
||
| model_type = "flux_kontext" if arch is Arch.flux_k else "flux" | ||
|
|
||
| try: | ||
|
Owner
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. no need to wrap in try/except, this should not fail (or do so loudly if the code is broken) |
||
| model = w.apply_magcache( | ||
| model, | ||
| model_type=model_type, | ||
| magcache_thresh=checkpoint.magcache_thresh, | ||
| retention_ratio=checkpoint.magcache_retention_ratio, | ||
| magcache_K=checkpoint.magcache_K, | ||
| start_step=checkpoint.magcache_start_step, | ||
| end_step=checkpoint.magcache_end_step, | ||
| ) | ||
| print(f"MagCache patch applied successfully with {model_type} settings") | ||
| except Exception as e: | ||
| print(f"Failed to apply MagCache patch: {e}") | ||
| else: | ||
| print(f"MagCache not supported for architecture: {arch}") | ||
|
|
||
| return model, Clip(clip, arch), vae | ||
|
|
||
|
|
||
|
|
@@ -751,7 +776,10 @@ def generate( | |
| models: ModelDict, | ||
| ): | ||
| model, clip, vae = load_checkpoint_with_lora(w, checkpoint, models.all) | ||
|
|
||
|
|
||
| model = apply_ip_adapter(w, model, cond.control, models) | ||
|
|
||
| model_orig = copy(model) | ||
| model, regions = apply_attention_mask(w, model, cond, clip, extent.initial) | ||
| model = apply_regional_ip_adapter(w, model, cond.regions, extent.initial, models) | ||
|
|
@@ -864,6 +892,7 @@ def inpaint( | |
|
|
||
| model, clip, vae = load_checkpoint_with_lora(w, checkpoint, models.all) | ||
| model = w.differential_diffusion(model) | ||
|
|
||
| model_orig = copy(model) | ||
|
|
||
| upscale_extent = ScaledExtent( # after crop to the masked region | ||
|
|
@@ -992,7 +1021,10 @@ def refine( | |
| models: ModelDict, | ||
| ): | ||
| model, clip, vae = load_checkpoint_with_lora(w, checkpoint, models.all) | ||
|
|
||
|
|
||
| model = apply_ip_adapter(w, model, cond.control, models) | ||
|
|
||
| model, regions = apply_attention_mask(w, model, cond, clip, extent.initial) | ||
| model = apply_regional_ip_adapter(w, model, cond.regions, extent.initial, models) | ||
| in_image = w.load_image(image) | ||
|
|
@@ -1368,6 +1400,15 @@ def prepare( | |
| face_weight = median_or_zero(c.strength for c in all_control if c.mode is ControlMode.face) | ||
| if face_weight > 0: | ||
| i.models.loras.append(LoraInput(model_set.lora["face"], 0.65 * face_weight)) | ||
|
|
||
| if perf.magcache_enabled and arch in [Arch.flux, Arch.flux_k]: | ||
| i.models.magcache_enabled = True | ||
| i.models.magcache_thresh = perf.magcache_thresh | ||
| i.models.magcache_retention_ratio = perf.magcache_retention_ratio | ||
| i.models.magcache_K = perf.magcache_K | ||
| print(f"MagCache settings added to WorkflowInput for {arch}") | ||
| else: | ||
| i.models.magcache_enabled = False | ||
|
|
||
| if kind is WorkflowKind.generate: | ||
| assert isinstance(canvas, Extent) | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Please remove the parameters also from here and
settings.py.They can be dug up from git if ever needed, but I'd rather not have them in the code without a good reason.
And if more configurability is desired at some point, a single "strength" value or some preset would be a more user friendly way to control it anyway.