Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
72 commits
Select commit Hold shift + click to select a range
bb40e7e
Removing 2.7 support and switching everything to new practices
xiancg May 7, 2023
270e3cf
New package setup
xiancg Aug 3, 2024
f9dc598
Update logger
xiancg Aug 3, 2024
29228ad
Logger update and removed six calls
xiancg Aug 3, 2024
3944c62
Updating format and removing eval statements
xiancg Aug 4, 2024
0e74614
Adding pathlib and making sure TokenNumber and Token are taken in loa…
xiancg Aug 4, 2024
721f128
Working on typing hints to improve readability
xiancg Aug 4, 2024
1c143f6
Typing hints
xiancg Aug 4, 2024
78eced5
Parameterize explicit solve test
xiancg Aug 4, 2024
72316fc
Parameterize defaults tests
xiancg Aug 4, 2024
02aebeb
Parameterize implicit solving test
xiancg Aug 4, 2024
4ef373d
Parameterize parsing tests
xiancg Aug 4, 2024
ab7ad6d
Parameterize repeated tokens
xiancg Aug 4, 2024
c828ed0
Parameterize repeating tokens
xiancg Aug 4, 2024
2a38c90
Parameterization of anchor tests
xiancg Aug 4, 2024
ba8372d
More parameterization on naming tests
xiancg Aug 4, 2024
db14be8
Remove comment
xiancg Aug 4, 2024
f535ba3
WIP rules tests parameterization
xiancg Aug 4, 2024
aa898fc
Validate basic rule creation parameters
xiancg Aug 5, 2024
638c895
Rules tests parametrization
xiancg Aug 5, 2024
584c456
Wip token tests parametrization
xiancg Aug 6, 2024
3ded37b
Tokens tests parametrization finished
xiancg Aug 6, 2024
870da79
Update docs
xiancg Aug 6, 2024
7cf4ddf
Fixing outdated tox
xiancg Aug 6, 2024
df05641
Merge branch 'master' into refactor
xiancg Aug 7, 2024
ef92972
Remove old travis
xiancg Aug 7, 2024
34f456d
Merge branch 'master' into refactor
xiancg Aug 7, 2024
cb57fb5
Setup shell before invoking ruff
xiancg Aug 7, 2024
4906430
Using pipenv run
xiancg Aug 7, 2024
ebdcf82
Badges
xiancg Aug 7, 2024
4bdded2
Fixing docs buils
xiancg Aug 7, 2024
0794381
docs requirements
xiancg Aug 7, 2024
4930a59
Update yml with requirements
xiancg Aug 7, 2024
8e01a76
Merge branch 'master' into refactor
xiancg Aug 7, 2024
93611b4
Update badges in docs
xiancg Aug 7, 2024
96dfd41
Merge branch 'master' into refactor
xiancg Aug 7, 2024
879181d
Update coverage
xiancg Aug 10, 2024
a06c8a4
Adds coverage badge
xiancg Aug 10, 2024
8fce812
Testing environment
xiancg Aug 10, 2024
583e488
Add lcov report
xiancg Aug 10, 2024
057036f
wrong token placement
xiancg Aug 10, 2024
9122ca7
Merge branch 'master' into refactor
xiancg Aug 10, 2024
50c3fbe
Update get_repo logic and adds validation functions
xiancg Aug 11, 2024
60dc4ec
Adds tokens and rules validations. Refactor save_session
xiancg Aug 11, 2024
be5a88c
Update load function conf naming
xiancg Aug 11, 2024
f0c10d6
Not templates, rules
xiancg Aug 11, 2024
bb8d379
Limit to pull requests
xiancg Aug 11, 2024
e4aa558
Adding referenced rules
xiancg Aug 11, 2024
55ad519
Working on new referenced functionality an tests
xiancg Aug 11, 2024
a243b9d
Adds validation function and test
xiancg Dec 23, 2024
1b31d15
All tests passing
xiancg Dec 23, 2024
aba19d4
Validations for token numbers
xiancg Dec 23, 2024
4fd1bd2
Padding validation and more docs for it
xiancg Dec 23, 2024
5f3b565
Version up
xiancg Dec 23, 2024
023de7d
Merge branch 'master' into refactor
xiancg Dec 23, 2024
52126a3
Version up
xiancg Dec 23, 2024
36305df
Ignore flake8 for now
xiancg Dec 23, 2024
7d54804
Fix .conf file naming in logs
xiancg Dec 23, 2024
61ceac2
Merge branch 'master' into refactor
xiancg Dec 23, 2024
6c60b27
Missing log info in raise statement
xiancg Jan 6, 2025
b027a75
Merge branch 'master' into refactor
xiancg Jan 6, 2025
98f45dc
Let the user know about casing mismatches
xiancg Jan 6, 2025
24c8be4
Merge branch 'master' into refactor
xiancg Jan 6, 2025
dfb9dda
Improve error info if casing is the problem
xiancg Jan 7, 2025
c0049f4
Implements validation against data passed by the user
xiancg Jan 7, 2025
e4156af
Version up
xiancg Jan 7, 2025
7026449
Merge branch 'master' into refactor
xiancg Jan 7, 2025
64a0c65
Implements fallback value for required tokens
xiancg Jan 8, 2025
74c110a
Make sure we only enforce this on Token and not TokenNumber
xiancg Jan 8, 2025
6af78cb
Update docs to include new fallback attribute of Token
xiancg Jan 8, 2025
9c4c69f
Version up
xiancg Jan 8, 2025
8e9613d
Merge branch 'master' into refactor
xiancg Jan 8, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions docs/source/changelog.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
Changelog
================================

1.4.5-beta
---------------------------------------

**Improvements:**
- Adds user values to validate function so names can be validated against expected data.
- Adds fallback to required Tokens, useful for when a Token is required but a default value is needed in case the user doesn't provide one.

1.3.1-beta
---------------------------------------

Expand Down
13 changes: 9 additions & 4 deletions docs/source/tokens.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,25 @@ Tokens are the meaningful parts of a template. A token can be required, meaning

If options are present, then one of them is the default one. Each option follows a {full_name:abbreviation} schema, so that names can be short but meaning can be recovered easily. The default option might be passed explicitly by the user by passing a *default* argument (it must match one of the options in the Token). If no default options is explicitly passed, the Token will sort options alphabetically and pick the first one. Please notice if you pass the *default* option explicitly, you can use the abbreviation or the full option name.

If fallback is defined, it will be used on required tokens if nothing is passed by the user.

.. code-block:: python
:linenos:

n.add_token('whatAffects')
n.add_token('whatLights')
n.add_token('shadowType', fallback='soft')
n.add_token_number('digits')
n.add_token('category', natural='nat',
practical='pra', dramatic='dra',
volumetric='vol', default='nat')

In line 1 we're creating a **Required Token**. This means that for solving the user must provide a value. This is a explicit solve.
In line 1 we're creating a **Required Token**. This means that in order to solve the user must provide a value, else an error will be raised. This is a explicit solve.

In line 2 we're creating a **Required Token** with a fallback. This means that if the user doesn't provide a value, the Token will solve to the fallback value. This is an implicit solve.

In line 2 we're creating a **Number Token**. This is a special Token really useful for working with version like or counting parts of a name. It's always required.
In line 3 we're creating a **Number Token**. This is a special Token really useful for working with version like or counting parts of a name. It's always required.

In line 3 we're creating an **Optional Token**, which means that for solving the user can pass one of the options in the Token or simply ignore passing a value and the Token will solve to it's default option. This is an implicit solve, which helps to greatly reduce the amount of info that needs to be passed to solve for certain cases.
In line 4 we're creating an **Optional Token**, which means that for solving the user can pass one of the options in the Token or simply ignore passing a value and the Token will solve to it's default option. This is an implicit solve, which helps to greatly reduce the amount of info that needs to be passed to solve for certain cases.

For more information on implicit and explicit solving please check :doc:`usage/solving`

Expand Down
9 changes: 6 additions & 3 deletions docs/source/usage/repositories.rst
Original file line number Diff line number Diff line change
Expand Up @@ -57,18 +57,21 @@ When saving the session, all Tokens and Rules in memory will be saved to the rep
:linenos:

n.add_token('whatAffects')
n.add_token('shadowType', fallback='soft')
n.add_token_number('digits')
n.add_token(
'category',
natural='nat', practical='pra', dramatic='dra',
volumetric='vol', default='nat'
)

In line 1 we're creating a **Required Token**. This means that for solving the user has to provide a value. This is a explicit solve.
In line 1 we're creating a **Required Token**. This means that in order to solve the user must provide a value, else an error will be raised. This is a explicit solve.

In line 2 we're creating a **Number Token**. This is a special Token really useful for working with version like or counting parts of a name. It's always required.
In line 2 we're creating a **Required Token** with a fallback. This means that if the user doesn't provide a value, the Token will solve to the fallback value. This is an implicit solve.

In line 3 we're creating an **Optional Token**, which means that for solving the user can pass one of the options in the Token or simply ignore passing a value and the Token will solve to it's default option. This is an implicit solve, which helps to greatly reduce the amount of info that needs to be passed to solve for certain cases.
In line 3 we're creating a **Number Token**. This is a special Token really useful for working with version like or counting parts of a name. It's always required.

In line 4 we're creating an **Optional Token**, which means that for solving the user can pass one of the options in the Token or simply ignore passing a value and the Token will solve to it's default option. This is an implicit solve, which helps to greatly reduce the amount of info that needs to be passed to solve for certain cases.

For more information on implicit and explicit solving please check :doc:`solving`

Expand Down
9 changes: 6 additions & 3 deletions docs/source/usage/solving.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ Let's set these Tokens and Rules.

# CREATE TOKENS
n.add_token('whatAffects')
n.add_token('shadowType', fallback='soft')
n.add_token_number('digits')
n.add_token(
'category',
Expand All @@ -33,7 +34,7 @@ Let's set these Tokens and Rules.
# CREATE RULES
n.add_rule(
'lights',
'{category}_{function}_{whatAffects}_{digits}_{type}'
'{category}_{function}_{whatAffects}_{shadowType}_{digits}_{type}'
)

n.set_active_rule("lights")
Expand All @@ -45,6 +46,8 @@ It would not make any sense to make the user pass each and every Token all the t

That's why vfxnaming.solve() accepts both args and kwargs. Not only that, but if given Token is optional and you want to use it's default value, you don't need to pass it at all.

Even if you make a required tokken, you can still define a fallback value for it.

.. code-block:: python

n.solve(
Expand All @@ -57,9 +60,9 @@ That's why vfxnaming.solve() accepts both args and kwargs. Not only that, but if
Each of these calls to vfxnaming.solve() will produce the exact same result:

.. note::
natural_custom_chars_001_LGT
natural_custom_chars_soft_001_LGT

If you don't pass a required Token (either as an argument or keyword argument), such as 'whatAffects' in this example, you'll get a **TokenError**. You'll also get a **TokenError** if you try to parse a value that doesn't match any of the options in the Token.
If you don't pass a required Token (either as an argument or keyword argument), such as 'whatAffects' in this example, you'll get a **TokenError**, unless it has a fallback value defined. You'll also get a **TokenError** if you try to parse a value that doesn't match any of the options in the Token.

Solving rules with repeated tokens
-----------------------------------------
Expand Down
5 changes: 4 additions & 1 deletion docs/source/usage/validating.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ Let's set these Tokens and Rule.

# CREATE TOKENS
n.reset_tokens()
n.add_token("whatAffects")
n.add_token("whatAffects", fallback="nothing")
n.add_token_number("digits")
n.add_token(
"category",
Expand Down Expand Up @@ -58,6 +58,9 @@ And then let's validate these names:
n.validate("dramatic_bounce_chars_001_LGT")
# Result: True

n.validate("dramatic_bounce_nothing_001_LGT")
# Result: True

n.validate("dramatic_bounce_chars_001")
# Result: False. Last token is missing.

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "vfxnaming"
version = "1.4.5-beta"
version = "1.5.0-beta"
authors = [
{ name="Chris Granados", email="info@chrisgranados.com" },
]
Expand Down
14 changes: 13 additions & 1 deletion src/vfxnaming/naming.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,14 @@ def solve(*args, **kwargs) -> AnyStr:
fields_inc += 1
continue
elif token.required and kwargs.get(f) is None and len(args) == 0:
raise SolvingError(f"Token {token.name} is required but was not passed.")
if len(token.fallback):
values[f] = token.fallback
fields_inc += 1
continue
else:
raise SolvingError(
f"Token {token.name} is required but was not passed."
)
# Not required and not passed as keyword argument
elif not token.required and kwargs.get(f) is None:
values[f] = token.solve()
Expand Down Expand Up @@ -191,6 +198,11 @@ def validate(name: AnyStr, **kwargs) -> bool:
values[f] = token.solve(kwargs.get(rule.fields[fields_inc]))
fields_inc += 1
continue
elif token.required and isinstance(token, tokens.Token):
if len(token.fallback):
values[f] = token.fallback
fields_inc += 1
continue
fields_inc += 1
logger.debug(f"Validating rule '{rule.name}' with values {values}")
return rule.validate(name, **values)
Expand Down
2 changes: 2 additions & 0 deletions src/vfxnaming/rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,8 @@ def validate(self, name: AnyStr, **validate_values) -> bool: # noqa: C901
repeated_fields[each] = 1
if repeated_fields:
logger.debug(f"Repeated tokens: {', '.join(repeated_fields.keys())}")

# Validate values passed by the user
if len(validate_values):
for key, value in name_parts:
# Strip number that was added to make group name unique
Expand Down
33 changes: 29 additions & 4 deletions src/vfxnaming/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def __init__(self, name: AnyStr):
self._name: AnyStr = name
self._default = None
self._options: Dict = {}
self._fallback = ""

def add_option(self, fullname: AnyStr, abbreviation: AnyStr) -> bool:
"""Add an option pair to this Token.
Expand Down Expand Up @@ -138,7 +139,9 @@ def solve(self, name: Union[AnyStr, None] = None) -> AnyStr:
"""
if self.required and name:
return name
elif self.required and name is None:
elif self.required and len(self._fallback):
return self._fallback
elif self.required and not name:
raise TokenError(
f"Token {self.name} is required. name parameter must be passed."
)
Expand Down Expand Up @@ -227,6 +230,19 @@ def options(self) -> Dict:
"""
return copy.deepcopy(self._options)

@property
def fallback(self) -> AnyStr:
return self._fallback

@fallback.setter
def fallback(self, f: AnyStr):
if self.required:
self._fallback = f
else:
logger.warning(
f"Token '{self.name}' has options, use {self.name}.default instead."
)


class TokenNumber(Serializable):
def __init__(self, name: AnyStr):
Expand Down Expand Up @@ -288,7 +304,7 @@ def parse(self, value: AnyStr) -> int:
suffix_index += 1

if prefix_index != -1 and self.prefix != "":
if value[prefix_index : len(self.prefix)] != self.prefix:
if value[prefix_index : len(self.prefix)] != self.prefix: # noqa: E203
logger.warning(f"Prefix '{self.prefix}' not found in '{value}'")
if suffix_index != -1 and self.suffix != "":
if value[-suffix_index:] != self.suffix:
Expand Down Expand Up @@ -358,15 +374,18 @@ def options(self) -> Dict:
return copy.deepcopy(self._options)


def add_token(name: AnyStr, **kwargs) -> Token:
def add_token(name: AnyStr, fallback: AnyStr = "", **kwargs) -> Token:
"""Add token to current naming session. If 'default' keyword argument is found,
set it as default for the token instance.

Args:
name (str): Name that best describes the token, this will be used as a way
to invoke the Token object.

kwargs: Each argument following the name is treated as an option for the
fallback (str, optional): Fallback value to use if token is required. Default is ""
and will raise and error, making the token mandatory.

kwargs: Each argument following fallback is treated as an option for the
new Token.

Raises:
Expand All @@ -392,6 +411,12 @@ def add_token(name: AnyStr, **kwargs) -> Token:
break
else:
raise TokenError("Default value must match one of the options passed.")
if len(fallback):
if isinstance(fallback, str):
token.fallback = fallback
else:
raise TokenError(f"Fallback must be a string. Got {type(fallback)}")

_tokens[name] = token
return token

Expand Down
73 changes: 69 additions & 4 deletions tests/tokens_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@ def setup(self):
tokens.reset_tokens()

@pytest.mark.parametrize(
"name,kwargs",
"name,fallback,kwargs",
[
("test", {}),
("test", "", {}),
(
"category",
"",
{
"natural": "natural",
"practical": "practical",
Expand All @@ -26,10 +27,11 @@ def setup(self):
"default": "natural",
},
),
("fallbacktest", "imfallback", {}),
],
)
def test_add(self, name: str, kwargs):
result = tokens.add_token(name, **kwargs)
def test_add(self, name: str, fallback: str, kwargs):
result = tokens.add_token(name, fallback, **kwargs)
assert isinstance(result, tokens.Token) is True

def test_reset_tokens(self):
Expand Down Expand Up @@ -118,6 +120,69 @@ def test_has_option_abbreviation(self, abbreviation: str, expected: bool):
assert result is expected


class Test_TokenFallback:
@pytest.fixture(autouse=True)
def setup(self):
rules.reset_rules()
tokens.reset_tokens()
tokens.add_token("whatAffects", fallback="nothing")
tokens.add_token_number("number")
tokens.add_token(
"category",
natural="natural",
practical="practical",
dramatic="dramatic",
volumetric="volumetric",
default="natural",
)
tokens.add_token(
"function",
key="key",
fill="fill",
ambient="ambient",
bounce="bounce",
rim="rim",
kick="kick",
custom="custom",
default="custom",
)
tokens.add_token("type", lighting="LGT", default="LGT")
rules.add_rule("lights", "{category}_{function}_{whatAffects}_{number}_{type}")

def test_token_has_fallback(self):
assert tokens.get_token("whatAffects").fallback == "nothing"

@pytest.mark.parametrize(
"name,data,expected",
[
(
"natural_ambient_chars_024_LGT",
{
"category": "natural",
"function": "ambient",
"whatAffects": "chars",
"number": 24,
"type": "lighting",
},
True,
),
(
"natural_ambient_nothing_003_LGT",
{
"category": "natural",
"function": "ambient",
"number": 3,
"type": "lighting",
},
True,
),
],
)
def test_fallback_solve(self, name: str, data: dict, expected: bool):
solved = n.solve(**data)
assert (name == solved) is expected


class Test_TokenNumber:
@pytest.fixture(autouse=True)
def setup(self):
Expand Down
Loading