From 6e8e4682ee6287ca393bbc0927c431008e4e1972 Mon Sep 17 00:00:00 2001 From: SilasMarvin <19626586+SilasMarvin@users.noreply.github.com> Date: Wed, 6 Dec 2023 14:46:32 -0800 Subject: [PATCH] Updated to use parking_lot and cleaned up tests --- pgml-sdks/pgml/Cargo.lock | 1 + pgml-sdks/pgml/Cargo.toml | 1 + pgml-sdks/pgml/javascript/tests/jest.config.js | 2 +- pgml-sdks/pgml/javascript/tests/typescript-tests/test.ts | 8 ++++---- pgml-sdks/pgml/python/tests/test.py | 8 ++++---- pgml-sdks/pgml/src/lib.rs | 7 ++----- pgml-sdks/pgml/src/open_source_ai.rs | 8 ++++---- 7 files changed, 17 insertions(+), 18 deletions(-) diff --git a/pgml-sdks/pgml/Cargo.lock b/pgml-sdks/pgml/Cargo.lock index b377c4167..131380b9d 100644 --- a/pgml-sdks/pgml/Cargo.lock +++ b/pgml-sdks/pgml/Cargo.lock @@ -1455,6 +1455,7 @@ dependencies = [ "lopdf", "md5", "neon", + "parking_lot 0.12.1", "pyo3", "pyo3-asyncio", "regex", diff --git a/pgml-sdks/pgml/Cargo.toml b/pgml-sdks/pgml/Cargo.toml index ac7dc23ed..7b85a47ed 100644 --- a/pgml-sdks/pgml/Cargo.toml +++ b/pgml-sdks/pgml/Cargo.toml @@ -43,6 +43,7 @@ is-terminal = "0.4" colored = "2" ctrlc = "3" inquire = "0.6" +parking_lot = "0.12.1" [features] default = [] diff --git a/pgml-sdks/pgml/javascript/tests/jest.config.js b/pgml-sdks/pgml/javascript/tests/jest.config.js index 7cf8a2c1e..66337065d 100644 --- a/pgml-sdks/pgml/javascript/tests/jest.config.js +++ b/pgml-sdks/pgml/javascript/tests/jest.config.js @@ -5,5 +5,5 @@ export default { transform: { '^.+\\.tsx?$': 'ts-jest' }, - testTimeout: 30000, + testTimeout: 300000, } diff --git a/pgml-sdks/pgml/javascript/tests/typescript-tests/test.ts b/pgml-sdks/pgml/javascript/tests/typescript-tests/test.ts index acc766bd8..ad0c9cd78 100644 --- a/pgml-sdks/pgml/javascript/tests/typescript-tests/test.ts +++ b/pgml-sdks/pgml/javascript/tests/typescript-tests/test.ts @@ -309,7 +309,7 @@ it("can transformer pipeline stream", async () => { it("can open source ai create", () => { const client = pgml.newOpenSourceAI(); const results = client.chat_completions_create( - "mistralai/Mistral-7B-v0.1", + "HuggingFaceH4/zephyr-7b-beta", [ { role: "system", @@ -328,7 +328,7 @@ it("can open source ai create", () => { it("can open source ai create async", async () => { const client = pgml.newOpenSourceAI(); const results = await client.chat_completions_create_async( - "mistralai/Mistral-7B-v0.1", + "HuggingFaceH4/zephyr-7b-beta", [ { role: "system", @@ -347,7 +347,7 @@ it("can open source ai create async", async () => { it("can open source ai create stream", () => { const client = pgml.newOpenSourceAI(); const it = client.chat_completions_create_stream( - "mistralai/Mistral-7B-v0.1", + "HuggingFaceH4/zephyr-7b-beta", [ { role: "system", @@ -369,7 +369,7 @@ it("can open source ai create stream", () => { it("can open source ai create stream async", async () => { const client = pgml.newOpenSourceAI(); const it = await client.chat_completions_create_stream_async( - "mistralai/Mistral-7B-v0.1", + "HuggingFaceH4/zephyr-7b-beta", [ { role: "system", diff --git a/pgml-sdks/pgml/python/tests/test.py b/pgml-sdks/pgml/python/tests/test.py index 5c3a4df33..748367867 100644 --- a/pgml-sdks/pgml/python/tests/test.py +++ b/pgml-sdks/pgml/python/tests/test.py @@ -328,7 +328,7 @@ async def test_transformer_pipeline_stream(): def test_open_source_ai_create(): client = pgml.OpenSourceAI() results = client.chat_completions_create( - "mistralai/Mistral-7B-v0.1", + "HuggingFaceH4/zephyr-7b-beta", [ { "role": "system", @@ -348,7 +348,7 @@ def test_open_source_ai_create(): async def test_open_source_ai_create_async(): client = pgml.OpenSourceAI() results = await client.chat_completions_create_async( - "mistralai/Mistral-7B-v0.1", + "HuggingFaceH4/zephyr-7b-beta", [ { "role": "system", @@ -367,7 +367,7 @@ async def test_open_source_ai_create_async(): def test_open_source_ai_create_stream(): client = pgml.OpenSourceAI() results = client.chat_completions_create_stream( - "mistralai/Mistral-7B-v0.1", + "HuggingFaceH4/zephyr-7b-beta", [ { "role": "system", @@ -389,7 +389,7 @@ def test_open_source_ai_create_stream(): async def test_open_source_ai_create_stream_async(): client = pgml.OpenSourceAI() results = await client.chat_completions_create_stream_async( - "mistralai/Mistral-7B-v0.1", + "HuggingFaceH4/zephyr-7b-beta", [ { "role": "system", diff --git a/pgml-sdks/pgml/src/lib.rs b/pgml-sdks/pgml/src/lib.rs index 4fb0b7caf..74a5c529b 100644 --- a/pgml-sdks/pgml/src/lib.rs +++ b/pgml-sdks/pgml/src/lib.rs @@ -4,10 +4,10 @@ //! //! With this SDK, you can seamlessly manage various database tables related to documents, text chunks, text splitters, LLM (Language Model) models, and embeddings. By leveraging the SDK's capabilities, you can efficiently index LLM embeddings using PgVector for fast and accurate queries. +use parking_lot::RwLock; use sqlx::{postgres::PgPoolOptions, PgPool}; use std::collections::HashMap; use std::env; -use std::sync::RwLock; use tokio::runtime::Runtime; use tracing::Level; use tracing_subscriber::FmtSubscriber; @@ -34,7 +34,6 @@ mod utils; // Re-export pub use builtins::Builtins; -pub use cli::cli; pub use collection::Collection; pub use model::Model; pub use open_source_ai::OpenSourceAI; @@ -52,9 +51,7 @@ static DATABASE_POOLS: RwLock>> = RwLock::new(Non // Even though this function does not use async anywhere, for whatever reason it must be async or // sqlx's connect_lazy will throw an error async fn get_or_initialize_pool(database_url: &Option) -> anyhow::Result { - let mut pools = DATABASE_POOLS - .write() - .expect("Error getting DATABASE_POOLS for writing"); + let mut pools = DATABASE_POOLS.write(); let pools = pools.get_or_insert_with(HashMap::new); let environment_url = std::env::var("DATABASE_URL"); let environment_url = environment_url.as_deref(); diff --git a/pgml-sdks/pgml/src/open_source_ai.rs b/pgml-sdks/pgml/src/open_source_ai.rs index f2204c275..d4c02215e 100644 --- a/pgml-sdks/pgml/src/open_source_ai.rs +++ b/pgml-sdks/pgml/src/open_source_ai.rs @@ -401,7 +401,7 @@ mod tests { #[test] fn can_open_source_ai_create() -> anyhow::Result<()> { let client = OpenSourceAI::new(None); - let results = client.chat_completions_create(Json::from_serializable("mistralai/Mistral-7B-v0.1"), vec![ + let results = client.chat_completions_create(Json::from_serializable("HuggingFaceH4/zephyr-7b-beta"), vec![ serde_json::json!({"role": "system", "content": "You are a friendly chatbot who always responds in the style of a pirate"}).into(), serde_json::json!({"role": "user", "content": "How many helicopters can a human eat in one sitting?"}).into(), ], Some(10), None, Some(3), None)?; @@ -412,7 +412,7 @@ mod tests { #[sqlx::test] fn can_open_source_ai_create_async() -> anyhow::Result<()> { let client = OpenSourceAI::new(None); - let results = client.chat_completions_create_async(Json::from_serializable("mistralai/Mistral-7B-v0.1"), vec![ + let results = client.chat_completions_create_async(Json::from_serializable("HuggingFaceH4/zephyr-7b-beta"), vec![ serde_json::json!({"role": "system", "content": "You are a friendly chatbot who always responds in the style of a pirate"}).into(), serde_json::json!({"role": "user", "content": "How many helicopters can a human eat in one sitting?"}).into(), ], Some(10), None, Some(3), None).await?; @@ -423,7 +423,7 @@ mod tests { #[sqlx::test] fn can_open_source_ai_create_stream_async() -> anyhow::Result<()> { let client = OpenSourceAI::new(None); - let mut stream = client.chat_completions_create_stream_async(Json::from_serializable("mistralai/Mistral-7B-v0.1"), vec![ + let mut stream = client.chat_completions_create_stream_async(Json::from_serializable("HuggingFaceH4/zephyr-7b-beta"), vec![ serde_json::json!({"role": "system", "content": "You are a friendly chatbot who always responds in the style of a pirate"}).into(), serde_json::json!({"role": "user", "content": "How many helicopters can a human eat in one sitting?"}).into(), ], Some(10), None, Some(3), None).await?; @@ -436,7 +436,7 @@ mod tests { #[test] fn can_open_source_ai_create_stream() -> anyhow::Result<()> { let client = OpenSourceAI::new(None); - let iterator = client.chat_completions_create_stream(Json::from_serializable("mistralai/Mistral-7B-v0.1"), vec![ + let iterator = client.chat_completions_create_stream(Json::from_serializable("HuggingFaceH4/zephyr-7b-beta"), vec![ serde_json::json!({"role": "system", "content": "You are a friendly chatbot who always responds in the style of a pirate"}).into(), serde_json::json!({"role": "user", "content": "How many helicopters can a human eat in one sitting?"}).into(), ], Some(10), None, Some(3), None)?; pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy