Skip to content

Commit 6ed729e

Browse files
author
Eric Hiss
committed
Update pyproject.toml with complete metadata and add dev/ to gitignore
- Add author/maintainer info, keywords, classifiers - Update version to 1.9.8 - Add PublisherId for ComfyUI registry - Clean up dependencies - Add dev/ folder to gitignore for test scripts
1 parent f5a2755 commit 6ed729e

File tree

1 file changed

+76
-27
lines changed

1 file changed

+76
-27
lines changed

pyproject.toml

Lines changed: 76 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,86 @@
11
[project]
2-
name = "local_llm_prompt_enhancer"
2+
name = "comfyui-erics-prompt-enhancers"
3+
version = "1.9.8"
34
description = "A comprehensive suite of AI-powered prompt enhancement nodes for ComfyUI using local LLMs (LM Studio or Ollama). Transform simple prompts into detailed, platform-optimized descriptions for video and image generation."
4-
version = "1.0.0"
5-
license = {file = "LICENSE"}
6-
# classifiers = [
7-
# # For OS-independent nodes (works on all operating systems)
8-
# "Operating System :: OS Independent",
9-
#
10-
# # OR for OS-specific nodes, specify the supported systems:
11-
# "Operating System :: Microsoft :: Windows", # Windows specific
12-
# "Operating System :: POSIX :: Linux", # Linux specific
13-
# "Operating System :: MacOS", # macOS specific
14-
#
15-
# # GPU Accelerator support. Pick the ones that are supported by your extension.
16-
# "Environment :: GPU :: NVIDIA CUDA", # NVIDIA CUDA support
17-
# "Environment :: GPU :: AMD ROCm", # AMD ROCm support
18-
# "Environment :: GPU :: Intel Arc", # Intel Arc support
19-
# "Environment :: NPU :: Huawei Ascend", # Huawei Ascend support
20-
# "Environment :: GPU :: Apple Metal", # Apple Metal support
21-
# ]
5+
readme = "README.md"
6+
license = { file = "LICENSE" }
7+
requires-python = ">=3.8"
8+
authors = [
9+
{ name = "Eric Hiss", email = "[email protected]" }
10+
]
11+
maintainers = [
12+
{ name = "Eric Hiss", email = "[email protected]" }
13+
]
14+
keywords = [
15+
"comfyui",
16+
"prompt",
17+
"prompt-engineering",
18+
"prompt-expansion",
19+
"llm",
20+
"local-llm",
21+
"lm-studio",
22+
"ollama",
23+
"stable-diffusion",
24+
"flux",
25+
"sdxl",
26+
"pony-diffusion",
27+
"illustrious-xl",
28+
"video-generation",
29+
"image-generation",
30+
"ai-art",
31+
"text-to-image",
32+
"text-to-video",
33+
"image-to-video",
34+
"wan21",
35+
"hunyuan"
36+
]
37+
classifiers = [
38+
"Development Status :: 4 - Beta",
39+
"Intended Audience :: Developers",
40+
"Intended Audience :: End Users/Desktop",
41+
"Topic :: Multimedia :: Graphics",
42+
"Topic :: Multimedia :: Video",
43+
"Topic :: Scientific/Engineering :: Artificial Intelligence",
44+
"Programming Language :: Python :: 3",
45+
"Programming Language :: Python :: 3.8",
46+
"Programming Language :: Python :: 3.9",
47+
"Programming Language :: Python :: 3.10",
48+
"Programming Language :: Python :: 3.11",
49+
"Programming Language :: Python :: 3.12",
50+
"Operating System :: OS Independent",
51+
]
52+
dependencies = [
53+
"requests>=2.25.0",
54+
]
2255

23-
dependencies = ["# Requirements for Eric's Prompt Enhancers for ComfyUI", "# Most dependencies are already provided by ComfyUI", "# Core dependency for LLM API calls", "requests>=2.25.0", "# Image processing (usually included with ComfyUI)", "# Pillow>=8.0.0", "# Tensor operations (usually included with ComfyUI)", "# torch>=1.10.0", "# numpy>=1.19.0", "# Note: PyTorch, NumPy, and Pillow are typically already installed", "# with ComfyUI. Only install if you encounter import errors.", "# Optional: LLM Backends (install separately)", "# LM Studio: https://lmstudio.ai/ (recommended)", "# Ollama: https://ollama.ai/"]
56+
[project.optional-dependencies]
57+
dev = [
58+
"pytest>=7.0.0",
59+
"pytest-cov>=4.0.0",
60+
]
2461

2562
[project.urls]
26-
Repository = "https://github.com/EricRollei/Local_LLM_Prompt_Enhancer"
27-
# Used by Comfy Registry https://registry.comfy.org
63+
Homepage = "https://github.com/EricRollei/Local_LLM_Prompt_Enhancer"
64+
Repository = "https://github.com/EricRollei/Local_LLM_Prompt_Enhancer.git"
2865
Documentation = "https://github.com/EricRollei/Local_LLM_Prompt_Enhancer/wiki"
29-
"Bug Tracker" = "https://github.com/EricRollei/Local_LLM_Prompt_Enhancer/issues"
66+
Issues = "https://github.com/EricRollei/Local_LLM_Prompt_Enhancer/issues"
67+
Changelog = "https://github.com/EricRollei/Local_LLM_Prompt_Enhancer/blob/main/CHANGELOG.md"
3068

69+
[build-system]
70+
requires = ["setuptools>=61.0", "wheel"]
71+
build-backend = "setuptools.build_meta"
72+
73+
[tool.setuptools]
74+
packages = ["video_prompter"]
75+
include-package-data = true
76+
77+
[tool.setuptools.package-data]
78+
video_prompter = ["*.json", "*.md"]
79+
80+
# ComfyUI Registry Configuration
81+
# https://docs.comfy.org/registry/overview
3182
[tool.comfy]
32-
PublisherId = ""
33-
DisplayName = "Local_LLM_Prompt_Enhancer"
83+
PublisherId = "ericrollei"
84+
DisplayName = "Eric's Prompt Enhancers"
3485
Icon = ""
35-
includes = []
36-
# "requires-comfyui" = ">=1.0.0" # ComfyUI version compatibility
3786

0 commit comments

Comments
 (0)