From 640decb15a87deac3567897b00a67fde2974b94e Mon Sep 17 00:00:00 2001 From: Santi Adavani Date: Wed, 6 Dec 2023 11:28:18 -0800 Subject: [PATCH] using max new tokens instead of max_length --- pgml-sdks/pgml/src/open_source_ai.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pgml-sdks/pgml/src/open_source_ai.rs b/pgml-sdks/pgml/src/open_source_ai.rs index 18adde288..f2204c275 100644 --- a/pgml-sdks/pgml/src/open_source_ai.rs +++ b/pgml-sdks/pgml/src/open_source_ai.rs @@ -236,7 +236,7 @@ mistralai/Mistral-7B-v0.1 let md5_digest = md5::compute(to_hash.as_bytes()); let fingerprint = uuid::Uuid::from_slice(&md5_digest.0)?; - let mut args = serde_json::json!({ "max_length": max_tokens, "temperature": temperature, "do_sample": true, "num_return_sequences": n }); + let mut args = serde_json::json!({ "max_new_tokens": max_tokens, "temperature": temperature, "do_sample": true, "num_return_sequences": n }); if let Some(t) = chat_template .or_else(|| try_get_model_chat_template(&model_name).map(|s| s.to_string())) { @@ -322,7 +322,7 @@ mistralai/Mistral-7B-v0.1 let md5_digest = md5::compute(to_hash.as_bytes()); let fingerprint = uuid::Uuid::from_slice(&md5_digest.0)?; - let mut args = serde_json::json!({ "max_length": max_tokens, "temperature": temperature, "do_sample": true, "num_return_sequences": n }); + let mut args = serde_json::json!({ "max_new_tokens": max_tokens, "temperature": temperature, "do_sample": true, "num_return_sequences": n }); if let Some(t) = chat_template .or_else(|| try_get_model_chat_template(&model_name).map(|s| s.to_string())) {