Skip to content

Commit

Permalink
🎨 Format
Browse files Browse the repository at this point in the history
Signed-off-by: Evaline Ju <[email protected]>
  • Loading branch information
evaline-ju committed Nov 2, 2023
1 parent 04dd2a1 commit 8cab2c3
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 13 deletions.
8 changes: 6 additions & 2 deletions caikit_nlp/modules/text_generation/peft_prompt_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,9 @@ def train(
PeftPromptTuning
Instance of this class with tuned prompt vectors.
"""
error.value_check("<NLP46653367E>", len(train_stream) > 0, "train_stream cannot be empty")
error.value_check(
"<NLP46653367E>", len(train_stream) > 0, "train_stream cannot be empty"
)

# Configure random seed
transformers.set_seed(seed)
Expand Down Expand Up @@ -389,7 +391,9 @@ def train(

train_stream = train_stream.map(convert_to_generation_record)
if val_stream:
error.value_check("<NLP63201425E>", len(val_stream) > 0, "val_stream cannot be empty")
error.value_check(
"<NLP63201425E>", len(val_stream) > 0, "val_stream cannot be empty"
)

val_stream = val_stream.map(convert_to_generation_record)

Expand Down
4 changes: 3 additions & 1 deletion caikit_nlp/modules/text_generation/text_generation_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,9 @@ def train(
TextGeneration
Instance of this class with fine-tuned models.
"""
error.value_check("<NLP96406893E>", len(train_stream) > 0, "train_stream cannot be empty")
error.value_check(
"<NLP96406893E>", len(train_stream) > 0, "train_stream cannot be empty"
)

torch_dtype = get_torch_dtype(torch_dtype)

Expand Down
24 changes: 14 additions & 10 deletions tests/fixtures/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,11 +115,13 @@ def causal_lm_train_kwargs():
"base_model": HFAutoCausalLM.bootstrap(
model_name=CAUSAL_LM_MODEL, tokenizer_name=CAUSAL_LM_MODEL
),
"train_stream": caikit.core.data_model.DataStream.from_iterable([
caikit_nlp.data_model.GenerationTrainRecord(
input="@foo what a cute dog!", output="no complaint"
),
]),
"train_stream": caikit.core.data_model.DataStream.from_iterable(
[
caikit_nlp.data_model.GenerationTrainRecord(
input="@foo what a cute dog!", output="no complaint"
),
]
),
"num_epochs": 0,
"tuning_config": caikit_nlp.data_model.TuningConfig(
num_virtual_tokens=8, prompt_tuning_init_text="hello world"
Expand Down Expand Up @@ -153,11 +155,13 @@ def seq2seq_lm_train_kwargs():
"base_model": HFAutoSeq2SeqLM.bootstrap(
model_name=SEQ2SEQ_LM_MODEL, tokenizer_name=SEQ2SEQ_LM_MODEL
),
"train_stream": caikit.core.data_model.DataStream.from_iterable([
caikit_nlp.data_model.GenerationTrainRecord(
input="@foo what a cute dog!", output="no complaint"
),
]),
"train_stream": caikit.core.data_model.DataStream.from_iterable(
[
caikit_nlp.data_model.GenerationTrainRecord(
input="@foo what a cute dog!", output="no complaint"
),
]
),
"num_epochs": 0,
"tuning_config": caikit_nlp.data_model.TuningConfig(
num_virtual_tokens=16, prompt_tuning_init_text="hello world"
Expand Down

0 comments on commit 8cab2c3

Please sign in to comment.