Skip to content

Commit 9fa0e42

Browse files
authored
Updated to use parking_lot and cleaned up tests (#1221)
1 parent 1323a84 commit 9fa0e42

File tree

7 files changed

+17
-18
lines changed

7 files changed

+17
-18
lines changed

pgml-sdks/pgml/Cargo.lock

Lines changed: 1 addition & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pgml-sdks/pgml/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ is-terminal = "0.4"
4343
colored = "2"
4444
ctrlc = "3"
4545
inquire = "0.6"
46+
parking_lot = "0.12.1"
4647

4748
[features]
4849
default = []

pgml-sdks/pgml/javascript/tests/jest.config.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,5 +5,5 @@ export default {
55
transform: {
66
'^.+\\.tsx?$': 'ts-jest'
77
},
8-
testTimeout: 30000,
8+
testTimeout: 300000,
99
}

pgml-sdks/pgml/javascript/tests/typescript-tests/test.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ it("can transformer pipeline stream", async () => {
309309
it("can open source ai create", () => {
310310
const client = pgml.newOpenSourceAI();
311311
const results = client.chat_completions_create(
312-
"mistralai/Mistral-7B-v0.1",
312+
"HuggingFaceH4/zephyr-7b-beta",
313313
[
314314
{
315315
role: "system",
@@ -328,7 +328,7 @@ it("can open source ai create", () => {
328328
it("can open source ai create async", async () => {
329329
const client = pgml.newOpenSourceAI();
330330
const results = await client.chat_completions_create_async(
331-
"mistralai/Mistral-7B-v0.1",
331+
"HuggingFaceH4/zephyr-7b-beta",
332332
[
333333
{
334334
role: "system",
@@ -347,7 +347,7 @@ it("can open source ai create async", async () => {
347347
it("can open source ai create stream", () => {
348348
const client = pgml.newOpenSourceAI();
349349
const it = client.chat_completions_create_stream(
350-
"mistralai/Mistral-7B-v0.1",
350+
"HuggingFaceH4/zephyr-7b-beta",
351351
[
352352
{
353353
role: "system",
@@ -369,7 +369,7 @@ it("can open source ai create stream", () => {
369369
it("can open source ai create stream async", async () => {
370370
const client = pgml.newOpenSourceAI();
371371
const it = await client.chat_completions_create_stream_async(
372-
"mistralai/Mistral-7B-v0.1",
372+
"HuggingFaceH4/zephyr-7b-beta",
373373
[
374374
{
375375
role: "system",

pgml-sdks/pgml/python/tests/test.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,7 @@ async def test_transformer_pipeline_stream():
328328
def test_open_source_ai_create():
329329
client = pgml.OpenSourceAI()
330330
results = client.chat_completions_create(
331-
"mistralai/Mistral-7B-v0.1",
331+
"HuggingFaceH4/zephyr-7b-beta",
332332
[
333333
{
334334
"role": "system",
@@ -348,7 +348,7 @@ def test_open_source_ai_create():
348348
async def test_open_source_ai_create_async():
349349
client = pgml.OpenSourceAI()
350350
results = await client.chat_completions_create_async(
351-
"mistralai/Mistral-7B-v0.1",
351+
"HuggingFaceH4/zephyr-7b-beta",
352352
[
353353
{
354354
"role": "system",
@@ -367,7 +367,7 @@ async def test_open_source_ai_create_async():
367367
def test_open_source_ai_create_stream():
368368
client = pgml.OpenSourceAI()
369369
results = client.chat_completions_create_stream(
370-
"mistralai/Mistral-7B-v0.1",
370+
"HuggingFaceH4/zephyr-7b-beta",
371371
[
372372
{
373373
"role": "system",
@@ -389,7 +389,7 @@ def test_open_source_ai_create_stream():
389389
async def test_open_source_ai_create_stream_async():
390390
client = pgml.OpenSourceAI()
391391
results = await client.chat_completions_create_stream_async(
392-
"mistralai/Mistral-7B-v0.1",
392+
"HuggingFaceH4/zephyr-7b-beta",
393393
[
394394
{
395395
"role": "system",

pgml-sdks/pgml/src/lib.rs

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@
44
//!
55
//! With this SDK, you can seamlessly manage various database tables related to documents, text chunks, text splitters, LLM (Language Model) models, and embeddings. By leveraging the SDK's capabilities, you can efficiently index LLM embeddings using PgVector for fast and accurate queries.
66
7+
use parking_lot::RwLock;
78
use sqlx::{postgres::PgPoolOptions, PgPool};
89
use std::collections::HashMap;
910
use std::env;
10-
use std::sync::RwLock;
1111
use tokio::runtime::Runtime;
1212
use tracing::Level;
1313
use tracing_subscriber::FmtSubscriber;
@@ -34,7 +34,6 @@ mod utils;
3434

3535
// Re-export
3636
pub use builtins::Builtins;
37-
pub use cli::cli;
3837
pub use collection::Collection;
3938
pub use model::Model;
4039
pub use open_source_ai::OpenSourceAI;
@@ -52,9 +51,7 @@ static DATABASE_POOLS: RwLock<Option<HashMap<String, PgPool>>> = RwLock::new(Non
5251
// Even though this function does not use async anywhere, for whatever reason it must be async or
5352
// sqlx's connect_lazy will throw an error
5453
async fn get_or_initialize_pool(database_url: &Option<String>) -> anyhow::Result<PgPool> {
55-
let mut pools = DATABASE_POOLS
56-
.write()
57-
.expect("Error getting DATABASE_POOLS for writing");
54+
let mut pools = DATABASE_POOLS.write();
5855
let pools = pools.get_or_insert_with(HashMap::new);
5956
let environment_url = std::env::var("DATABASE_URL");
6057
let environment_url = environment_url.as_deref();

pgml-sdks/pgml/src/open_source_ai.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -401,7 +401,7 @@ mod tests {
401401
#[test]
402402
fn can_open_source_ai_create() -> anyhow::Result<()> {
403403
let client = OpenSourceAI::new(None);
404-
let results = client.chat_completions_create(Json::from_serializable("mistralai/Mistral-7B-v0.1"), vec![
404+
let results = client.chat_completions_create(Json::from_serializable("HuggingFaceH4/zephyr-7b-beta"), vec![
405405
serde_json::json!({"role": "system", "content": "You are a friendly chatbot who always responds in the style of a pirate"}).into(),
406406
serde_json::json!({"role": "user", "content": "How many helicopters can a human eat in one sitting?"}).into(),
407407
], Some(10), None, Some(3), None)?;
@@ -412,7 +412,7 @@ mod tests {
412412
#[sqlx::test]
413413
fn can_open_source_ai_create_async() -> anyhow::Result<()> {
414414
let client = OpenSourceAI::new(None);
415-
let results = client.chat_completions_create_async(Json::from_serializable("mistralai/Mistral-7B-v0.1"), vec![
415+
let results = client.chat_completions_create_async(Json::from_serializable("HuggingFaceH4/zephyr-7b-beta"), vec![
416416
serde_json::json!({"role": "system", "content": "You are a friendly chatbot who always responds in the style of a pirate"}).into(),
417417
serde_json::json!({"role": "user", "content": "How many helicopters can a human eat in one sitting?"}).into(),
418418
], Some(10), None, Some(3), None).await?;
@@ -423,7 +423,7 @@ mod tests {
423423
#[sqlx::test]
424424
fn can_open_source_ai_create_stream_async() -> anyhow::Result<()> {
425425
let client = OpenSourceAI::new(None);
426-
let mut stream = client.chat_completions_create_stream_async(Json::from_serializable("mistralai/Mistral-7B-v0.1"), vec![
426+
let mut stream = client.chat_completions_create_stream_async(Json::from_serializable("HuggingFaceH4/zephyr-7b-beta"), vec![
427427
serde_json::json!({"role": "system", "content": "You are a friendly chatbot who always responds in the style of a pirate"}).into(),
428428
serde_json::json!({"role": "user", "content": "How many helicopters can a human eat in one sitting?"}).into(),
429429
], Some(10), None, Some(3), None).await?;
@@ -436,7 +436,7 @@ mod tests {
436436
#[test]
437437
fn can_open_source_ai_create_stream() -> anyhow::Result<()> {
438438
let client = OpenSourceAI::new(None);
439-
let iterator = client.chat_completions_create_stream(Json::from_serializable("mistralai/Mistral-7B-v0.1"), vec![
439+
let iterator = client.chat_completions_create_stream(Json::from_serializable("HuggingFaceH4/zephyr-7b-beta"), vec![
440440
serde_json::json!({"role": "system", "content": "You are a friendly chatbot who always responds in the style of a pirate"}).into(),
441441
serde_json::json!({"role": "user", "content": "How many helicopters can a human eat in one sitting?"}).into(),
442442
], Some(10), None, Some(3), None)?;

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy