-
Notifications
You must be signed in to change notification settings - Fork 65
feat: Expose additional data handlers as an argument in train #409
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -15,7 +15,10 @@ | |
| """Unit Tests for SFT Trainer. | ||
| """ | ||
|
|
||
| # pylint: disable=too-many-lines | ||
|
|
||
| # Standard | ||
| from dataclasses import asdict | ||
| import copy | ||
| import json | ||
| import os | ||
|
|
@@ -46,6 +49,13 @@ | |
| from tuning import sft_trainer | ||
| from tuning.config import configs, peft_config | ||
| from tuning.config.tracker_configs import FileLoggingTrackerConfig | ||
| from tuning.data.data_config import ( | ||
| DataConfig, | ||
| DataHandlerConfig, | ||
| DataPreProcessorConfig, | ||
| DataSetConfig, | ||
| ) | ||
| from tuning.data.data_handlers import apply_dataset_formatting | ||
|
|
||
| MODEL_ARGS = configs.ModelArguments( | ||
| model_name_or_path=MODEL_NAME, use_flash_attn=False, torch_dtype="float32" | ||
|
|
@@ -1124,3 +1134,98 @@ def test_pretokenized_dataset_wrong_format(): | |
| # is essentially swallowing a KeyError here. | ||
| with pytest.raises(ValueError): | ||
| sft_trainer.train(MODEL_ARGS, data_args, train_args, PEFT_PT_ARGS) | ||
|
|
||
|
|
||
| ########################################################################### | ||
| ### Tests for checking different cases for the argument additional_handlers | ||
| ### The argument `additional_handlers` in train::sft_trainer.py is used to pass | ||
| ### extra data handlers which should be a Dict[str,callable] | ||
|
|
||
| ### Test for checking if bad additional_handlers argument | ||
| ### (which is not Dict[str,callable]) throws an error | ||
| @pytest.mark.parametrize( | ||
| "additional_handlers", | ||
| [ | ||
| "thisisnotokay", | ||
| [], | ||
| {lambda x: {"x": x}: "notokayeither"}, | ||
| {"thisisfine": "thisisnot"}, | ||
| ], | ||
| ) | ||
| def test_run_with_bad_additional_data_handlers(additional_handlers): | ||
| with tempfile.TemporaryDirectory() as tempdir: | ||
| train_args = copy.deepcopy(TRAIN_ARGS) | ||
| train_args.output_dir = tempdir | ||
|
|
||
| with pytest.raises( | ||
| ValueError, match="Handlers should be of type Dict, str to callable" | ||
| ): | ||
| sft_trainer.train( | ||
| MODEL_ARGS, | ||
| DATA_ARGS, | ||
| train_args, | ||
| PEFT_PT_ARGS, | ||
| additional_data_handlers=additional_handlers, | ||
| ) | ||
|
|
||
|
|
||
| ### Test for checking if additional_handlers=None should work | ||
| def test_run_with_additional_data_handlers_as_none(): | ||
| with tempfile.TemporaryDirectory() as tempdir: | ||
| train_args = copy.deepcopy(TRAIN_ARGS) | ||
| train_args.output_dir = tempdir | ||
|
|
||
| sft_trainer.train( | ||
| MODEL_ARGS, | ||
| DATA_ARGS, | ||
| train_args, | ||
| PEFT_PT_ARGS, | ||
| additional_data_handlers=None, | ||
| ) | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We can just add this line after this:
|
||
|
|
||
|
|
||
| ### Test for checking if a good additional_handlers argument | ||
| ### can take a data handler and can successfully run a e2e training. | ||
| def test_run_by_passing_additional_data_handlers(): | ||
|
|
||
| # This is my test handler | ||
| TEST_HANDLER = "my_test_handler" | ||
|
|
||
| def test_handler(element, tokenizer, **kwargs): | ||
| return apply_dataset_formatting(element, tokenizer, "custom_formatted_field") | ||
|
|
||
| # This data config calls for data handler to be applied to dataset | ||
| preprocessor_config = DataPreProcessorConfig() | ||
| handler_config = DataHandlerConfig(name="my_test_handler", arguments=None) | ||
| dataaset_config = DataSetConfig( | ||
| name="test_dataset", | ||
| data_paths=TWITTER_COMPLAINTS_DATA_JSON, | ||
| data_handlers=[handler_config], | ||
| ) | ||
| data_config = DataConfig( | ||
| dataprocessor=preprocessor_config, datasets=[dataaset_config] | ||
| ) | ||
|
|
||
| # dump the data config to a file, also test if json data config works | ||
| with tempfile.NamedTemporaryFile( | ||
| "w", delete=False, suffix=".json" | ||
| ) as temp_data_file: | ||
| data_config_raw = json.dumps(asdict(data_config)) | ||
| temp_data_file.write(data_config_raw) | ||
| data_config_path = temp_data_file.name | ||
|
|
||
| # now launch sft trainer after registering data handler | ||
| with tempfile.TemporaryDirectory() as tempdir: | ||
| train_args = copy.deepcopy(TRAIN_ARGS) | ||
| train_args.output_dir = tempdir | ||
| data_args = copy.deepcopy(DATA_ARGS) | ||
| data_args.data_config_path = data_config_path | ||
| data_args.dataset_text_field = "custom_formatted_field" | ||
|
|
||
| sft_trainer.train( | ||
| MODEL_ARGS, | ||
| DATA_ARGS, | ||
| train_args, | ||
| PEFT_PT_ARGS, | ||
| additional_data_handlers={TEST_HANDLER: test_handler}, | ||
| ) | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Here also, we can just add this line after this:
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -13,7 +13,7 @@ | |
| # limitations under the License. | ||
|
|
||
| # Standard | ||
| from typing import Dict, List, Union | ||
| from typing import Callable, Dict, List, Union | ||
| import logging | ||
| import os | ||
|
|
||
|
|
@@ -35,7 +35,7 @@ class DataPreProcessor: | |
| tokenizer = None | ||
| data_config: DataConfig = None | ||
| processor_config: DataPreProcessorConfig = None | ||
| registered_handlers: Dict[str, callable] = None | ||
| registered_handlers: Dict[str, Callable] = None | ||
|
|
||
| def __init__( | ||
| self, processor_config: DataPreProcessorConfig, tokenizer: AutoTokenizer | ||
|
|
@@ -46,8 +46,25 @@ def __init__( | |
| # Initialize other objects | ||
| self.registered_handlers = {} | ||
|
|
||
| def register_data_handler(self, name: str, func: callable): | ||
| # Auto register available data handlers | ||
| for k, v in AVAILABLE_DATA_HANDLERS.items(): | ||
| self.registered_handlers[k] = v | ||
|
|
||
| def register_data_handler(self, name: str, func: Callable): | ||
| if not isinstance(name, str) or not callable(func): | ||
| raise ValueError("Handlers should be of type Dict, str to callable") | ||
| if name in self.registered_handlers: | ||
| logging.warning("Handler name %s existed is being overwritten", name) | ||
|
||
| self.registered_handlers[name] = func | ||
dushyantbehl marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| logging.info("Registered new handler %s", name) | ||
|
|
||
| def register_data_handlers(self, handlers: Dict[str, Callable]): | ||
| if handlers is None: | ||
| return | ||
| if not isinstance(handlers, Dict): | ||
| raise ValueError("Handlers should be of type Dict, str to callable") | ||
| for k, v in handlers.items(): | ||
| self.register_data_handler(name=k, func=v) | ||
|
|
||
| def load_dataset( | ||
| self, | ||
|
|
@@ -238,19 +255,14 @@ def process_dataset_configs( | |
| return train_dataset | ||
|
|
||
|
|
||
| def autoregister_available_handlers(processor: DataPreProcessor): | ||
| if processor is None: | ||
| return | ||
| for name, func in AVAILABLE_DATA_HANDLERS.items(): | ||
| processor.register_data_handler(name=name, func=func) | ||
|
|
||
|
|
||
| def get_datapreprocessor( | ||
| processor_config: DataPreProcessorConfig, tokenizer: AutoTokenizer | ||
| processor_config: DataPreProcessorConfig, | ||
| tokenizer: AutoTokenizer, | ||
| additional_data_handlers: Dict[str, Callable] = None, | ||
| ) -> DataPreProcessor: | ||
| processor = DataPreProcessor( | ||
| processor_config=processor_config, | ||
| tokenizer=tokenizer, | ||
| ) | ||
| autoregister_available_handlers(processor) | ||
| processor.register_data_handlers(additional_data_handlers) | ||
| return processor | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nit: docstrings for all added test cases
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Added comments. Thanks @willmj