Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 7 additions & 10 deletions pgml-extension/src/bindings/transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,6 @@ def convert_dtype(kwargs):
def convert_eos_token(tokenizer, args):
if "eos_token" in args:
args["eos_token_id"] = tokenizer.convert_tokens_to_ids(args.pop("eos_token"))
else:
args["eos_token_id"] = tokenizer.eos_token_id


def ensure_device(kwargs):
Expand All @@ -94,15 +92,14 @@ def transform(task, args, inputs):
inputs = json.loads(inputs)

key = ",".join([f"{key}:{val}" for (key, val) in sorted(task.items())])
ensure_device(task)
convert_dtype(task)

model = task.get("model", None)
if model and "tokenizer" not in task:
task["tokenizer"] = AutoTokenizer.from_pretrained(model)

if key not in __cache_transform_pipeline_by_task:
__cache_transform_pipeline_by_task[key] = transformers.pipeline(**task)
ensure_device(task)
convert_dtype(task)
pipe = transformers.pipeline(**task)
if pipe.tokenizer is None:
pipe.tokenizer = AutoTokenizer.from_pretrained(pipe.model.name_or_path)
__cache_transform_pipeline_by_task[key] = pipe

pipe = __cache_transform_pipeline_by_task[key]

if pipe.task == "question-answering":
Expand Down
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy