Skip to content

Commit

Permalink
Merge pull request #35 from arcmindai/dev
Browse files Browse the repository at this point in the history
feat(gpt) upgrade controller, brain to gpt4o and increase num result …
  • Loading branch information
kinwo committed May 26, 2024
2 parents 27b99b0 + b4e23c1 commit 18f9cca
Show file tree
Hide file tree
Showing 9 changed files with 88 additions and 29 deletions.
2 changes: 1 addition & 1 deletion scripts/deploy_brain.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ fi

# To deplopy locally, update IC_NETWORK to local. To deploy to ic, update IC_NETWORK to ic.
IC_NETWORK=${IC_NETWORK:-local}
GPT_MODEL=gpt-4
GPT_MODEL=gpt-4o

# Deploy brain canister
CONTROLLER_PRINCIPAL=$(dfx canister --network $IC_NETWORK id arcmindai_controller)
Expand Down
2 changes: 1 addition & 1 deletion scripts/deploy_controller.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ BRAIN_PRINCIPAL=$(dfx canister --network $IC_NETWORK id arcmindai_brain)
TOOLS_PRINCIPAL=$(dfx canister --network $IC_NETWORK id arcmindai_tools)
BATTERY_PRINCIPAL=$(dfx canister --network $IC_NETWORK id cycles_battery)

BROWSE_WEBSITE_GPT_MODEL=gpt-3.5-turbo-1106
BROWSE_WEBSITE_GPT_MODEL=gpt-4o

# Deploy controller canister
echo Deploying controller canister BATTERY_PRINCIPAL=$BATTERY_PRINCIPAL, BATTERY_API_KEY=$BATTERY_API_KEY on $IC_NETWORK
Expand Down
2 changes: 2 additions & 0 deletions src/arcmindai_brain/arcmindai_brain.did
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ service : (opt principal, text, text, opt text, opt principal) -> {
check_cycles_and_topup : () -> ();
generate_embeddings : (text, int8, opt text) -> (Result);
get_battery_canister : () -> (opt principal) query;
get_gpt_model : () -> (text) query;
get_owner : () -> (opt principal) query;
update_gpt_model : (text) -> ();
update_owner : (principal) -> ();
}
26 changes: 26 additions & 0 deletions src/arcmindai_brain/src/datatype.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,32 @@ pub struct OpenAIResultMessage {
pub content: String,
}

#[derive(serde::Serialize, Deserialize)]
pub struct OpenAIResultMessageContent {
pub thoughts: OpenAIResultMessageContentThoughts,
pub command: OpenAIResultMessageContentCommand,
}

#[derive(serde::Serialize, Deserialize)]
pub struct OpenAIResultMessageContentThoughts {
pub text: String,
pub reasoning: String,
pub plan: String,
pub criticism: String,
pub speak: String,
}

#[derive(serde::Serialize, Deserialize)]
pub struct OpenAIResultMessageContentCommand {
pub name: String,
pub args: serde_json::Value,
}

#[derive(serde::Serialize, Deserialize)]
pub struct OpenAIResultMessageContentCommandArgs {
pub query: String,
}

#[derive(serde::Serialize, Deserialize)]
pub struct OpenAIResultUsage {
pub prompt_tokens: u32,
Expand Down
54 changes: 33 additions & 21 deletions src/arcmindai_brain/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use async_recursion::async_recursion;
use guards::assert_owner;

mod tokenutil;
use tokenutil::{truncate_question, MAX_16K_TOKENS};
use tokenutil::{truncate_question, MAX_128K_TOKENS};

mod httputil;
use httputil::{
Expand All @@ -47,7 +47,6 @@ thread_local! {
}

// ---------------------- ArcMind AI Agent ----------------------
const MAX_DEFAULT_TOKENS: usize = 8000;
const MAX_NUM_RETIRES: i8 = 2;
const GPT_TEMPERATURE: f32 = 0.5;

Expand Down Expand Up @@ -77,10 +76,7 @@ async fn ask(
};

// Truncate question if reaching the max token limit of the model
let max_token_limit = match gpt_model.as_str() {
"gpt-3.5-turbo-16k" => MAX_16K_TOKENS,
_ => MAX_DEFAULT_TOKENS,
};
let max_token_limit = MAX_128K_TOKENS;

// log gpt_model and max_token_limit
ic_cdk::println!(
Expand Down Expand Up @@ -162,17 +158,27 @@ fn transform_openai_chat_completion(args: TransformArgs) -> HttpResponse {
.expect("Transformed response is not UTF-8 encoded.");
let json_str = res_str.replace("\n", "");

let openai_result = serde_json::from_str(json_str.as_str());
let openai_result: Result<OpenAIResult, serde_json::Error> =
serde_json::from_str(json_str.as_str());
if openai_result.is_err() {
// log invalid json str
ic_cdk::println!("Invalid JSON str = {:?}", json_str);

res.body = format!("Invalid JSON str = {:?}", json_str)
.as_bytes()
.to_vec();
return res;
}

let openai_body: OpenAIResult = openai_result.unwrap();
let content = &openai_body.choices[0].message.content;
res.body = content.as_bytes().to_vec();
let msg_content = &openai_body.choices[0].message.content;

// replace ``` with empty string
let msg_content = msg_content.replace("```", "");
// replace extra json title with empty string
let msg_content = msg_content.replace("json", "");

res.body = msg_content.as_bytes().to_vec();
return res;
}

Expand Down Expand Up @@ -297,21 +303,24 @@ pub fn get_owner() -> Option<Principal> {
#[candid_method(update)]
pub fn update_owner(new_owner: Principal) {
STATE.with(|state| {
let open_api_key = state.borrow().openai_api_key.clone();
let gpt_model = state.borrow().gpt_model.clone();
let battery_api_key = state.borrow().battery_api_key.clone();
let battery_canister = state.borrow().battery_canister.clone();
state.borrow_mut().owner = Some(new_owner);
});
}

*state.borrow_mut() = State {
owner: Some(new_owner),
openai_api_key: open_api_key,
gpt_model: gpt_model,
battery_api_key: battery_api_key,
battery_canister: battery_canister,
};
#[update(guard = "assert_owner")]
#[candid_method(update)]
pub fn update_gpt_model(new_gpt_model: String) {
STATE.with(|state| {
state.borrow_mut().gpt_model = new_gpt_model;
});
}

#[query]
#[candid_method(query)]
pub fn get_gpt_model() -> String {
STATE.with(|state| (*state.borrow()).gpt_model.clone())
}

#[update]
fn start_cycles_check_timer(secs: u64) {
let secs = Duration::from_secs(secs);
Expand Down Expand Up @@ -403,7 +412,7 @@ fn pre_upgrade() {
fn post_upgrade(
_owner: Option<Principal>,
_openai_api_key: String,
_gpt_model: String,
gpt_model: String,
battery_api_key: Option<String>,
battery_canister: Option<Principal>,
) {
Expand All @@ -418,6 +427,9 @@ fn post_upgrade(
s.borrow_mut().battery_api_key = battery_api_key.clone();
});

// Update gpt_model
update_gpt_model(gpt_model);

start_cycles_check_timer(CYCLES_BALANCE_CHECK_MIN_INTERVAL_SECS);
}

Expand Down
4 changes: 2 additions & 2 deletions src/arcmindai_brain/src/tokenutil.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use tiktoken_rs::cl100k_base;

pub const MAX_16K_TOKENS: usize = 15000;
pub const MAX_128K_TOKENS: usize = 127 * 1000;

pub fn truncate_question(question: String, max_token_limit: usize) -> String {
// check no. of tokens again
Expand All @@ -16,7 +16,7 @@ pub fn truncate_question(question: String, max_token_limit: usize) -> String {
.collect::<String>();
ic_cdk::println!(
"tokens_len reached limit {}!! Question is truncated to: \n{}",
MAX_16K_TOKENS,
MAX_128K_TOKENS,
safe_question
);

Expand Down
2 changes: 2 additions & 0 deletions src/arcmindai_controller/arcmindai_controller.did
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ service : (
get_battery_canister : () -> (opt principal) query;
get_beamfi_canister : () -> (opt principal) query;
get_brain_canister : () -> (opt principal) query;
get_browse_website_gpt_model : () -> (opt text) query;
get_chathistory : () -> (vec ChatHistory) query;
get_goal : (nat64) -> (opt Goal) query;
get_max_num_thoughts_allowed : () -> (nat64) query;
Expand All @@ -43,5 +44,6 @@ service : (
is_paused : () -> (bool) query;
start_new_goal : (text) -> ();
toggle_pause_cof : () -> ();
update_browse_website_gpt_model : (opt text) -> ();
update_owner : (principal) -> ();
}
23 changes: 20 additions & 3 deletions src/arcmindai_controller/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,9 @@ const CYCLES_TOPUP_AMT: u64 = 4 * CYCLES_ONE_TC;

const CYCLES_TOPUP_GROUP: &str = "arcmindai_controller";

const RECENT_CHAT_HISTORY: usize = 40;
const RECENT_CHAT_HISTORY: usize = 80;
const DATE_TIME_FORMAT: &str = "[year]-[month]-[day] [hour]:[minute]:[second]";
const MAX_NUM_COF_PER_GOAL: u16 = 60;
const MAX_NUM_COF_PER_GOAL: u16 = 100;
const DEFAULT_MAX_NUM_THOUGHTS_ALLOWED: u16 = 500;

#[derive(Serialize, Deserialize)]
Expand Down Expand Up @@ -958,6 +958,20 @@ pub fn update_owner(new_owner: Principal) {
});
}

#[update(guard = "assert_owner")]
#[candid_method(update)]
pub fn update_browse_website_gpt_model(new_model: Option<String>) {
STATE.with(|state| {
state.borrow_mut().browse_website_gpt_model = new_model;
});
}

#[query]
#[candid_method(query)]
pub fn get_browse_website_gpt_model() -> Option<String> {
STATE.with(|state| (*state.borrow()).browse_website_gpt_model.clone())
}

#[update(guard = "assert_owner")]
#[candid_method(update)]
pub fn toggle_pause_cof() {
Expand Down Expand Up @@ -1165,7 +1179,7 @@ fn post_upgrade(
_vector_canister: Option<Principal>,
_beamfi_canister: Option<Principal>,
battery_canister: Option<Principal>,
_browse_website_gpt_model: Option<String>,
browse_website_gpt_model: Option<String>,
_billing_key: Option<String>,
battery_api_key: Option<String>,
) {
Expand All @@ -1191,6 +1205,9 @@ fn post_upgrade(
s.borrow_mut().battery_api_key = battery_api_key.clone();
});

// Update browse_website_gpt_model
update_browse_website_gpt_model(browse_website_gpt_model);

// log update of battery_canister
ic_cdk::println!(
"Controller canisters: post_upgrade: battery_canister: {:?}",
Expand Down
2 changes: 1 addition & 1 deletion src/arcmindai_tools/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ mod util;
use util::generate_request_id;

const BROWSE_WEBSITE_PROXY_URL: &str = "https://browsewebsite-4gbndkvjta-uc.a.run.app";
const MAX_NUM_GOOGLE_SEARCH_RESULTS: i32 = 3;
const MAX_NUM_GOOGLE_SEARCH_RESULTS: i32 = 6;

// 3 days
const CYCLES_BALANCE_CHECK_MIN_INTERVAL_SECS: u64 = 60 * 60 * 24 * 3;
Expand Down

0 comments on commit 18f9cca

Please sign in to comment.