Content-Length: 6242 | pFad | http://github.com/postgresml/postgresml/pull/1013.diff
thub.com diff --git a/pgml-dashboard/content/blog/announcing-gptq-and-ggml-quantized-llm-support-for-huggingface-transformers.md b/pgml-dashboard/content/blog/announcing-gptq-and-ggml-quantized-llm-support-for-huggingface-transformers.md index 23a2d241e..a5f2e5cae 100644 --- a/pgml-dashboard/content/blog/announcing-gptq-and-ggml-quantized-llm-support-for-huggingface-transformers.md +++ b/pgml-dashboard/content/blog/announcing-gptq-and-ggml-quantized-llm-support-for-huggingface-transformers.md @@ -104,7 +104,10 @@ PostgresML will automatically use GPTQ or GGML when a HuggingFace model has one SELECT pgml.transform( task => '{ "task": "text-generation", - "model": "mlabonne/gpt2-GPTQ-4bit" + "model": "mlabonne/gpt2-GPTQ-4bit", + "model_basename": "gptq_model-4bit-128g", + "use_triton": true, + "use_safetensors": true }'::JSONB, inputs => ARRAY[ 'Once upon a time,' diff --git a/pgml-extension/examples/transformers.sql b/pgml-extension/examples/transformers.sql index cd336c397..0ff74c35a 100644 --- a/pgml-extension/examples/transformers.sql +++ b/pgml-extension/examples/transformers.sql @@ -90,14 +90,15 @@ SELECT pgml.transform( 'Dominic Cobb is the foremost practitioner of the artistic science of extraction, inserting oneself into a subject''s dreams to obtain hidden information without the subject knowing, a concept taught to him by his professor father-in-law, Dr. Stephen Miles. Dom''s associates are Miles'' former students, who Dom requires as he has given up being the dream architect for reasons he won''t disclose. Dom''s primary associate, Arthur, believes it has something to do with Dom''s deceased wife, Mal, who often figures prominently and violently in those dreams, or Dom''s want to "go home" (get back to his own reality, which includes two young children). Dom''s work is generally in corporate espionage. As the subjects don''t want the information to get into the wrong hands, the clients have zero tolerance for failure. Dom is also a wanted man, as many of his past subjects have learned what Dom has done to them. One of those subjects, Mr. Saito, offers Dom a job he can''t refuse: to take the concept one step further into inception, namely planting thoughts into the subject''s dreams without them knowing. Inception can fundamentally alter that person as a being. Saito''s target is Robert Michael Fischer, the heir to an energy business empire, which has the potential to rule the world if continued on the current trajectory. Beyond the complex logistics of the dream architecture of the case and some unknowns concerning Fischer, the biggest obstacles in success for the team become worrying about one aspect of inception which Cobb fails to disclose to the other team members prior to the job, and Cobb''s newest associate Ariadne''s belief that Cobb''s own subconscious, especially as it relates to Mal, may be taking over what happens in the dreams.' ] ); + SELECT pgml.transform( task => '{"task": "text-classification", "model": "finiteautomata/bertweet-base-sentiment-analysis" }'::JSONB, inputs => ARRAY[ - 'I love how amazingly simple ML has become!', + 'I love how amazingly simple ML has become!', 'I hate doing mundane and thankless tasks. ☹️' - ], + ] ) AS positivity; SELECT pgml.transform( diff --git a/pgml-extension/requirements-xformers.txt b/pgml-extension/requirements-xformers.txt index 95e303b61..9a7c49f72 100644 --- a/pgml-extension/requirements-xformers.txt +++ b/pgml-extension/requirements-xformers.txt @@ -1 +1 @@ -xformers==0.0.20 +xformers==0.0.21 diff --git a/pgml-extension/requirements.txt b/pgml-extension/requirements.txt index db0c5d242..1f5446beb 100644 --- a/pgml-extension/requirements.txt +++ b/pgml-extension/requirements.txt @@ -1,16 +1,16 @@ -accelerate==0.21.0 -auto-gptq==0.3.0 -bitsandbytes==0.40.2 +accelerate==0.22.0 +auto-gptq==0.4.2 +bitsandbytes==0.41.1 catboost==1.2 -ctransformers==0.2.13 -datasets==2.13.1 -deepspeed==0.10.0 -huggingface-hub==0.16.4 +ctransformers==0.2.27 +datasets==2.14.5 +deepspeed==0.10.3 +huggingface-hub==0.17.1 InstructorEmbedding==1.0.1 -lightgbm==4.0.0 -orjson==3.9.2 -pandas==2.0.3 -rich==13.4.2 +lightgbm==4.1.0 +orjson==3.9.7 +pandas==2.1.0 +rich==13.5.2 rouge==1.0.1 sacrebleu==2.3.1 sacremoses==0.0.53 @@ -21,9 +21,9 @@ tokenizers==0.13.3 torch==2.0.1 torchaudio==2.0.2 torchvision==0.15.2 -tqdm==4.65.0 -transformers==4.31.0 -xgboost==1.7.6 -langchain==0.0.237 +tqdm==4.66.1 +transformers==4.33.1 +xgboost==2.0.0 +langchain==0.0.287 einops==0.6.1 pynvml==11.5.0 diff --git a/pgml-extension/src/bindings/transformers/transformers.py b/pgml-extension/src/bindings/transformers/transformers.py index af948e9ef..f220be89d 100644 --- a/pgml-extension/src/bindings/transformers/transformers.py +++ b/pgml-extension/src/bindings/transformers/transformers.py @@ -89,11 +89,12 @@ def ensure_device(kwargs): class GPTQPipeline(object): def __init__(self, model_name, **task): - import auto_gptq + from auto_gptq import AutoGPTQForCausalLM, BaseQuantizeConfig from huggingface_hub import snapshot_download model_path = snapshot_download(model_name) - self.model = auto_gptq.AutoGPTQForCausalLM.from_quantized(model_path, **task) + quantized_config = BaseQuantizeConfig.from_pretrained(model_path) + self.model = AutoGPTQForCausalLM.from_quantized(model_path, quantized_config=quantized_config, **task) if "use_fast_tokenizer" in task: self.tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=task.pop("use_fast_tokenizer")) else: @@ -192,9 +193,13 @@ def create_pipeline(task): ensure_device(task) convert_dtype(task) model_name = task.get("model", None) - if model_name and "-ggml" in model_name: + if model_name: + lower = model_name.lower() + else: + lower = None + if lower and "-ggml" in lower: pipe = GGMLPipeline(model_name, **task) - elif model_name and "-gptq" in model_name: + elif lower and "-gptq" in lower: pipe = GPTQPipeline(model_name, **task) else: try:Fetched URL: http://github.com/postgresml/postgresml/pull/1013.diff
Alternative Proxies: