Skip to content

Instantly share code, notes, and snippets.

@aaronvg
Created August 22, 2024 19:40
Show Gist options
  • Select an option

  • Save aaronvg/5e4c6bf292cdbbb7fb299136f4d32503 to your computer and use it in GitHub Desktop.

Select an option

Save aaronvg/5e4c6bf292cdbbb7fb299136f4d32503 to your computer and use it in GitHub Desktop.
In engine/baml-runtime/tests/test_runtime.rs
```
// #[cfg(feature = "internal")]
mod internal_tests {
use std::collections::HashMap;
use baml_runtime::BamlRuntime;
use baml_runtime::InternalRuntimeInterface;
use baml_types::BamlValue;
use baml_runtime::{
internal::llm_client::LLMResponse, DiagnosticsError, IRHelper, RenderedPrompt,
};
use wasm_bindgen_test::*;
use wasm_logger;
#[tokio::test]
// #[wasm_bindgen_test]
async fn test_call_function() -> Result<(), Box<dyn std::error::Error>> {
// wasm_logger::init(wasm_logger::Config::new(log::Level::Info));
log::info!("Running test_call_function");
// let directory = PathBuf::from("/Users/aaronvillalpando/Projects/baml/integ-tests/baml_src");
// let files = vec![
// PathBuf::from(
// "/Users/aaronvillalpando/Projects/baml/integ-tests/baml_src/ExtractNames.baml",
// ),
// PathBuf::from(
// "/Users/aaronvillalpando/Projects/baml/integ-tests/baml_src/ExtractNames.baml",
// ),
// ];
let mut files = HashMap::new();
files.insert(
"main.baml",
r##"
generator lang_python {
}
class Email {
subject string
body string
from_address string
}
enum OrderStatus {
ORDERED
SHIPPED
DELIVERED
CANCELLED
}
class OrderInfo {
order_status OrderStatus
tracking_number string?
estimated_arrival_date string?
}
client<llm> GPT4Turbo {
provider baml-openai-chat
options {
model gpt-4-1106-preview
api_key env.OPENAI_API_KEY
}
}
function GetOrderInfo(input: string) -> OrderInfo {
client GPT4Turbo
prompt #"
Extract this info from the email in JSON format:
Before you output the JSON, please explain your
reasoning step-by-step. Here is an example on how to do this:
'If we think step by step we can see that ...
therefore the output JSON is:
{
... the json schema ...
}'
"#
}
"##,
);
log::info!("Files: {:?}", files);
let runtime = BamlRuntime::from_file_content(
"baml_src",
&files,
[("OPENAI_API_KEY", "OPENAI_API_KEY")].into(),
)?;
log::info!("Runtime:");
let params = [(
"input".into(),
baml_types::BamlValue::String("Attention Is All You Need. Mark. Hello.".into()),
)]
.into_iter()
.collect();
let ctx = runtime.create_ctx_manager(BamlValue::String("test".to_string()), None);
let (res, _) = runtime
.call_function("GetOrderInfo".to_string(), &params, &ctx, None, None)
.await;
// runtime.get_test_params(function_name, test_name, ctx);
// runtime.internal().render_prompt(function_name, ctx, params, node_index)
assert!(res.is_ok(), "Result: {:#?}", res.err());
Ok(())
}
// #[wasm_bindgen_test]
// async fn test_run_test() {
// let client = OpenAIClient::new();
// let response = client.call_llm("test".to_string()).await;
// // Add further assertions
// }
}
```
to run the test:
cargo test test_call_function --no-default-features --features "internal"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment