Skip to content

Commit 4bbca96

Browse files
Santi Adavanisantiatpml
authored andcommitted
Added indent for info display
1 parent 421af8f commit 4bbca96

File tree

1 file changed

+13
-6
lines changed

1 file changed

+13
-6
lines changed

pgml-extension/src/bindings/transformers/transformers.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1017,7 +1017,7 @@ def on_log(self, args, state, control, logs=None, **kwargs):
10171017
logs["step"] = state.global_step
10181018
logs["max_steps"] = state.max_steps
10191019
logs["timestamp"] = str(datetime.now())
1020-
print_info(json.dumps(logs))
1020+
print_info(json.dumps(logs, indent=4))
10211021
insert_logs(self.project_id, self.model_id, json.dumps(logs))
10221022

10231023

@@ -1275,7 +1275,6 @@ def evaluate(self):
12751275

12761276
if "eval_accuracy" in metrics.keys():
12771277
metrics["accuracy"] = metrics.pop("eval_accuracy")
1278-
12791278

12801279
# Drop all the keys that are not floats or ints to be compatible for pgml-extension metrics typechecks
12811280
metrics = {
@@ -1286,6 +1285,7 @@ def evaluate(self):
12861285

12871286
return metrics
12881287

1288+
12891289
class FineTuningTextPairClassification(FineTuningTextClassification):
12901290
def __init__(
12911291
self,
@@ -1313,7 +1313,7 @@ def __init__(
13131313
super().__init__(
13141314
project_id, model_id, train_dataset, test_dataset, path, hyperparameters
13151315
)
1316-
1316+
13171317
def tokenize_function(self, example):
13181318
"""
13191319
Tokenizes the input text using the tokenizer specified in the class.
@@ -1326,13 +1326,20 @@ def tokenize_function(self, example):
13261326
13271327
"""
13281328
if self.tokenizer_args:
1329-
tokenized_example = self.tokenizer(example["text1"], example["text2"], **self.tokenizer_args)
1329+
tokenized_example = self.tokenizer(
1330+
example["text1"], example["text2"], **self.tokenizer_args
1331+
)
13301332
else:
13311333
tokenized_example = self.tokenizer(
1332-
example["text1"], example["text2"], padding=True, truncation=True, return_tensors="pt"
1334+
example["text1"],
1335+
example["text2"],
1336+
padding=True,
1337+
truncation=True,
1338+
return_tensors="pt",
13331339
)
13341340
return tokenized_example
13351341

1342+
13361343
class FineTuningConversation(FineTuningBase):
13371344
def __init__(
13381345
self,
@@ -1459,7 +1466,7 @@ def formatting_prompts_func(example):
14591466
callbacks=[PGMLCallback(self.project_id, self.model_id)],
14601467
)
14611468
print_info("Creating Supervised Fine Tuning trainer done. Training ... ")
1462-
1469+
14631470
# Train
14641471
self.trainer.train()
14651472

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy