Skip to content

Silas add ranking #1498

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jun 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 20 additions & 11 deletions pgml-extension/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pgml-extension/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "pgml"
version = "2.8.5"
version = "2.9.0"
edition = "2021"

[lib]
Expand Down Expand Up @@ -41,7 +41,7 @@ ndarray-stats = "0.5.1"
parking_lot = "0.12"
pgrx = "=0.11.3"
pgrx-pg-sys = "=0.11.3"
pyo3 = { version = "0.20.0", features = ["auto-initialize"], optional = true }
pyo3 = { version = "0.20.0", features = ["anyhow", "auto-initialize"], optional = true }
rand = "0.8"
rmp-serde = { version = "1.1" }
signal-hook = "0.3"
Expand Down
15 changes: 15 additions & 0 deletions pgml-extension/sql/pgml--2.8.5--2.9.0.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
-- src/api.rs:613
-- pgml::api::rank
CREATE FUNCTION pgml."rank"(
"transformer" TEXT, /* &str */
"query" TEXT, /* &str */
"documents" TEXT[], /* alloc::vec::Vec<&str> */
"kwargs" jsonb DEFAULT '{}' /* pgrx::datum::json::JsonB */
) RETURNS TABLE (
"corpus_id" bigint, /* i64 */
"score" double precision, /* f64 */
"text" TEXT /* core::option::Option<alloc::string::String> */
)
IMMUTABLE STRICT PARALLEL SAFE
LANGUAGE c /* Rust */
AS 'MODULE_PATHNAME', 'rank_wrapper';
16 changes: 15 additions & 1 deletion pgml-extension/src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -603,7 +603,21 @@ pub fn embed_batch(
kwargs: default!(JsonB, "'{}'"),
) -> SetOfIterator<'static, Vec<f32>> {
match crate::bindings::transformers::embed(transformer, inputs, &kwargs.0) {
Ok(output) => SetOfIterator::new(output.into_iter()),
Ok(output) => SetOfIterator::new(output),
Err(e) => error!("{e}"),
}
}

#[cfg(all(feature = "python", not(feature = "use_as_lib")))]
#[pg_extern(immutable, parallel_safe, name = "rank")]
pub fn rank(
transformer: &str,
query: &str,
documents: Vec<&str>,
kwargs: default!(JsonB, "'{}'"),
) -> TableIterator<'static, (name!(corpus_id, i64), name!(score, f64), name!(text, Option<String>))> {
match crate::bindings::transformers::rank(transformer, query, documents, &kwargs.0) {
Ok(output) => TableIterator::new(output.into_iter().map(|x| (x.corpus_id, x.score, x.text))),
Err(e) => error!("{e}"),
}
}
Expand Down
96 changes: 95 additions & 1 deletion pgml-extension/src/bindings/transformers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ use std::{collections::HashMap, path::Path};
use anyhow::{anyhow, bail, Context, Result};
use pgrx::*;
use pyo3::prelude::*;
use pyo3::types::PyTuple;
use pyo3::types::{PyBool, PyDict, PyFloat, PyInt, PyList, PyString, PyTuple};
use serde::Deserialize;
use serde_json::Value;

use crate::create_pymodule;
Expand All @@ -21,6 +22,59 @@ pub use transform::*;

create_pymodule!("/src/bindings/transformers/transformers.py");

// Need a wrapper so we can implement traits for it
struct Json(Value);

impl From<Json> for Value {
fn from(value: Json) -> Self {
value.0
}
}

impl FromPyObject<'_> for Json {
fn extract(ob: &PyAny) -> PyResult<Self> {
if ob.is_instance_of::<PyDict>() {
let dict: &PyDict = ob.downcast()?;
let mut json = serde_json::Map::new();
for (key, value) in dict.iter() {
let value = Json::extract(value)?;
json.insert(String::extract(key)?, value.0);
}
Ok(Self(serde_json::Value::Object(json)))
} else if ob.is_instance_of::<PyBool>() {
let value = bool::extract(ob)?;
Ok(Self(serde_json::Value::Bool(value)))
} else if ob.is_instance_of::<PyInt>() {
let value = i64::extract(ob)?;
Ok(Self(serde_json::Value::Number(value.into())))
} else if ob.is_instance_of::<PyFloat>() {
let value = f64::extract(ob)?;
let value =
serde_json::value::Number::from_f64(value).context("Could not convert f64 to serde_json::Number")?;
Ok(Self(serde_json::Value::Number(value)))
} else if ob.is_instance_of::<PyString>() {
let value = String::extract(ob)?;
Ok(Self(serde_json::Value::String(value)))
} else if ob.is_instance_of::<PyList>() {
let value = ob.downcast::<PyList>()?;
let mut json_values = Vec::new();
for v in value {
let v = v.extract::<Json>()?;
json_values.push(v.0);
}
Ok(Self(serde_json::Value::Array(json_values)))
} else {
if ob.is_none() {
return Ok(Self(serde_json::Value::Null));
}
Err(anyhow::anyhow!(
"Unsupported type for JSON conversion: {:?}",
ob.get_type()
))?
}
}
}

pub fn get_model_from(task: &Value) -> Result<String> {
Python::with_gil(|py| -> Result<String> {
let get_model_from = get_module!(PY_MODULE)
Expand Down Expand Up @@ -55,6 +109,46 @@ pub fn embed(transformer: &str, inputs: Vec<&str>, kwargs: &serde_json::Value) -
})
}

#[derive(Deserialize)]
pub struct RankResult {
pub corpus_id: i64,
pub score: f64,
pub text: Option<String>,
}

pub fn rank(
transformer: &str,
query: &str,
documents: Vec<&str>,
kwargs: &serde_json::Value,
) -> Result<Vec<RankResult>> {
let kwargs = serde_json::to_string(kwargs)?;
Python::with_gil(|py| -> Result<Vec<RankResult>> {
let embed: Py<PyAny> = get_module!(PY_MODULE).getattr(py, "rank").format_traceback(py)?;
let output = embed
.call1(
py,
PyTuple::new(
py,
&[
transformer.to_string().into_py(py),
query.into_py(py),
documents.into_py(py),
kwargs.into_py(py),
],
),
)
.format_traceback(py)?;
let out: Vec<Json> = output.extract(py).format_traceback(py)?;
out.into_iter()
.map(|x| {
let x: RankResult = serde_json::from_value(x.0)?;
Ok(x)
})
.collect()
})
}

pub fn finetune_text_classification(
task: &Task,
dataset: TextClassificationDataset,
Expand Down
29 changes: 28 additions & 1 deletion pgml-extension/src/bindings/transformers/transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import orjson
from rouge import Rouge
from sacrebleu.metrics import BLEU
from sentence_transformers import SentenceTransformer
from sentence_transformers import SentenceTransformer, CrossEncoder
from sklearn.metrics import (
mean_squared_error,
r2_score,
Expand Down Expand Up @@ -500,6 +500,33 @@ def transform(task, args, inputs, stream=False):
return orjson.dumps(pipe(inputs, **args), default=orjson_default).decode()


def create_cross_encoder(transformer):
return CrossEncoder(transformer)


def rank_using(model, query, documents, kwargs):
if isinstance(kwargs, str):
kwargs = orjson.loads(kwargs)

# The score is a numpy float32 before we convert it
return [
{"score": x.pop("score").item(), **x}
for x in model.rank(query, documents, **kwargs)
]


def rank(transformer, query, documents, kwargs):
kwargs = orjson.loads(kwargs)

if transformer not in __cache_sentence_transformer_by_name:
__cache_sentence_transformer_by_name[transformer] = create_cross_encoder(
transformer
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you pass kwargs through to create_cross_encoder we can specify the device, https://www.sbert.net/docs/package_reference/cross_encoder/cross_encoder.html?highlight=crossencoder#sentence_transformers.cross_encoder.CrossEncoder

We should do this for the SentenceTransformer constructor, too.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We will have to create a separate argument for this, or pop specific arguments from kwargs. If just pass kwargs straight through we will get an unexpected keyword argument error.

)
model = __cache_sentence_transformer_by_name[transformer]

return rank_using(model, query, documents, kwargs)


def create_embedding(transformer):
return SentenceTransformer(transformer)

Expand Down
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy