Compare commits

...

4 Commits

Author SHA1 Message Date
6037643a28 chore: update gitignore 2025-11-05 08:16:27 -03:00
de9dd7a49a feat: group repports in a week 2025-11-05 08:15:49 -03:00
47901d4a9f chore: update gitignore 2025-11-05 08:15:15 -03:00
14bf25e83a style: apply cargo fmt 2025-11-03 10:56:00 -03:00
5 changed files with 708 additions and 319 deletions

3
.gitignore vendored
View File

@@ -3,8 +3,9 @@ src/.env
.env .env
log/ log/
.zip .zip
.vscode/ .vscode
evaluations evaluations
groupped
# Added by cargo # Added by cargo
# #
# already existing elements were commented out # already existing elements were commented out

View File

@@ -1,17 +1,16 @@
use std::fmt::Debug; use std::fmt::Debug;
use chrono::Datelike;
use itertools::Itertools; use itertools::Itertools;
use polars::prelude::buffer::validate_utf8;
use polars::prelude::*; use polars::prelude::*;
use reqwest; use reqwest;
use serde::Deserialize;
use walkdir;
use std::time::Duration;
use std::env; use std::env;
use std::time::Duration;
use csv; use csv;
pub mod send_mail_util;
pub mod zip_directory_util;
#[derive(Debug, serde::Deserialize)] #[derive(Debug, serde::Deserialize)]
struct CsvHeader { struct CsvHeader {
CATEGORIA: String, CATEGORIA: String,
@@ -29,17 +28,9 @@ struct CsvEvaluation {
DISPONIBILIDADE: u8, DISPONIBILIDADE: u8,
CONHECIMENTO_TÉCNICO: u8, CONHECIMENTO_TÉCNICO: u8,
DIDATISMO: u8, DIDATISMO: u8,
ID_TALK: String ID_TALK: String,
} }
// impl TryFrom::<csv::DeserializeRecordsIter<&[u8], CsvHeader>> for CsvEvaluation {
// type Error = &'static str;
// fn try_from(value: csv::DeserializeRecordsIter<&[u8], CsvHeader>) -> Result<Self, Self::Error> {
// todo!()
// }
// }
fn main() { fn main() {
match dotenv::dotenv().ok() { match dotenv::dotenv().ok() {
Some(_) => println!("Environment variables loaded from .env file"), Some(_) => println!("Environment variables loaded from .env file"),
@@ -52,7 +43,11 @@ fn main() {
.unwrap_or("11432".to_string()) .unwrap_or("11432".to_string())
.parse::<u16>() .parse::<u16>()
.unwrap_or(11432); .unwrap_or(11432);
let OLLAMA_AI_MODEL_DATA_SANITIZATION = env::var("OLLAMA_AI_MODEL_DATA_SANITIZATION").expect("Missing environment variable OLLAMA_AI_MODEL_DATA_SANITIZATION"); let OLLAMA_AI_MODEL_DATA_SANITIZATION = env::var("OLLAMA_AI_MODEL_DATA_SANITIZATION")
.expect("Missing environment variable OLLAMA_AI_MODEL_DATA_SANITIZATION");
let BOT_EMAIL = env::var("BOT_EMAIL").expect("BOT_EMAIL has not been set!");
let BOT_EMAIL_PASSWORD =
env::var("BOT_EMAIL_PASSWORD").expect("BOT_EMAIL_PASSWORD has not been set!");
let ip_address = ipaddress::IPAddress::parse(OLLAMA_URL.to_string()); let ip_address = ipaddress::IPAddress::parse(OLLAMA_URL.to_string());
let OLLAMA_SANITIZED_IP = match ip_address { let OLLAMA_SANITIZED_IP = match ip_address {
@@ -70,182 +65,316 @@ fn main() {
let current_date = chrono::Local::now(); let current_date = chrono::Local::now();
let formatted_date = current_date.format("%Y-%m-%d").to_string(); let formatted_date = current_date.format("%Y-%m-%d").to_string();
let previous_week_folder_names = std::fs::read_dir(std::path::Path::new("./evaluations")).expect("Failed to read directory ./evaluations") let current_date = chrono::Local::now();
.filter_map_ok(|entry| { let first_day_of_current_week = current_date
if entry.metadata().unwrap().is_dir(){ .date_naive()
Some(entry.file_name()) .week(chrono::Weekday::Sun)
} .first_day();
else { let current_date_minus_one_week = first_day_of_current_week
None .checked_sub_days(chrono::Days::new(1))
} .expect("Failed to subtract one day");
}) let first_day_of_last_week = current_date_minus_one_week
.filter_map_ok(|entry_string_name| { .week(chrono::Weekday::Sun)
let regex_match_date = regex::Regex::new(r"(\d{4}-\d{2}-\d{2})").expect("Failed to build regex"); .first_day();
let last_day_of_last_week = current_date_minus_one_week
let filename = entry_string_name.to_str().unwrap(); .week(chrono::Weekday::Sun)
let matches_find = regex_match_date.find(filename); .last_day();
match matches_find { let previous_week_folder_names = std::fs::read_dir(std::path::Path::new("./evaluations"))
Some(found) => { .expect("Failed to read directory ./evaluations")
let date = chrono::NaiveDate::parse_from_str(found.as_str(), "%Y-%m-%d"); .filter_map_ok(|entry| {
return Some((date.unwrap().week(chrono::Weekday::Sun), entry_string_name)); if entry.metadata().unwrap().is_dir() {
}, Some(entry.file_name())
None => {return None;} } else {
}; None
}) }
.filter_map_ok(|(week, directory_string)| { })
let current_date = chrono::Local::now(); .filter_map_ok(|entry_string_name| {
let first_day_of_current_week = current_date.date_naive().week(chrono::Weekday::Sun).first_day(); let regex_match_date =
let current_date_minus_one_week = first_day_of_current_week.checked_sub_days(chrono::Days::new(1)).expect("Failed to subtract one day"); regex::Regex::new(r"(\d{4}-\d{2}-\d{2})").expect("Failed to build regex");
let first_day_of_last_week = current_date_minus_one_week.week(chrono::Weekday::Sun).first_day();
let first_day_of_week_in_folder_name = week.first_day();
if first_day_of_last_week == first_day_of_week_in_folder_name { let filename = entry_string_name.to_str().unwrap();
return Some(directory_string); let matches_find = regex_match_date.find(filename);
}
return None; match matches_find {
}) Some(found) => {
.filter_map(|value| { let date = chrono::NaiveDate::parse_from_str(found.as_str(), "%Y-%m-%d");
if value.is_ok() {return Some(value.unwrap());} return Some((date.unwrap().week(chrono::Weekday::Sun), entry_string_name));
else {return None;} }
}) None => {
.sorted() return None;
.collect_vec(); }
};
})
.filter_map_ok(|(week, directory_string)| {
let first_day_of_week_in_folder_name = week.first_day();
if first_day_of_last_week == first_day_of_week_in_folder_name {
return Some(directory_string);
}
return None;
})
.filter_map(|value| {
if value.is_ok() {
return Some(value.unwrap());
} else {
return None;
}
})
.sorted()
.collect_vec();
println!("{:?}", previous_week_folder_names); println!("{:?}", previous_week_folder_names);
let prompt_data_sanitization = std::fs::read_to_string("./PROMPT_DATA_SANITIZATION.txt").expect("Failed to read PROMPT_DATA_SANITIZATION.txt"); let prompt_data_sanitization = std::fs::read_to_string("./PROMPT_DATA_SANITIZATION.txt")
.expect("Failed to read PROMPT_DATA_SANITIZATION.txt");
let client = reqwest::blocking::Client::new(); let client = reqwest::blocking::Client::new();
let a = previous_week_folder_names.iter().map(|folder_name| { let groupped_values = previous_week_folder_names
.iter()
.map(|folder_name| {
let folder_base_path = std::path::Path::new("./evaluations"); let folder_base_path = std::path::Path::new("./evaluations");
let folder_date_path = folder_base_path.join(folder_name); let folder_date_path = folder_base_path.join(folder_name);
std::fs::read_dir(folder_date_path) std::fs::read_dir(folder_date_path)
}) })
.filter_map_ok(|files_inside_folder_on_date | .filter_map_ok(|files_inside_folder_on_date| {
{ let groupped_by_user_on_day = files_inside_folder_on_date
files_inside_folder_on_date.filter_ok(|entry| { .filter_ok(|entry| {
let entry_file_name_as_str = entry.file_name().into_string().expect("Failed to get filename as a String"); let entry_file_name_as_str = entry
.file_name()
.into_string()
.expect("Failed to get filename as a String");
entry_file_name_as_str.ends_with(".csv") && entry_file_name_as_str.ends_with(".csv")
!entry_file_name_as_str.contains("response_time.csv") && !entry_file_name_as_str.contains("response_time.csv")
})
.filter_map(|value|{
if value.is_ok() {return Some(value.unwrap());}
None
})
.take(1)
.map(|file_name_csv| {
println!("{:?}", file_name_csv.path());
let file_contents = std::fs::read_to_string(file_name_csv.path()).expect("Failed to read CSV file");
let ollama_api_request = client.post(format!("http://{OLLAMA_SANITIZED_IP}:{OLLAMA_PORT}/api/generate"))
.body(
serde_json::json!({
"model": OLLAMA_AI_MODEL_DATA_SANITIZATION,
"prompt": format!("{prompt_data_sanitization} \n{file_contents}"),
"temperature": 0.0, // Get predictable and reproducible output
"stream": false,
}).to_string()
);
let result = ollama_api_request.timeout(Duration::from_secs(3600)).send();
match result {
Ok(response) => {println!("Response: {:?}", response);
let response_json = response.json::<serde_json::Value>().expect("Failed to deserialize response to JSON");
let ai_response = response_json["response"]
.as_str()
.expect("Failed to get AI response as string");
let ai_response = ai_response.to_string();
let ai_response = if let Some(resp) = ai_response.strip_prefix(" ").unwrap_or(&ai_response).strip_prefix("```csv\n") { resp.to_string() } else { ai_response };
let ai_response = if let Some(resp) = ai_response.strip_suffix(" ").unwrap_or(&ai_response).strip_suffix("```") { resp.to_string() } else { ai_response };
return Ok((ai_response, file_name_csv));
},
Err(error) => {println!("Error {error}"); return Err(error);}
};
})
.filter_map_ok(|(ai_repsonse, file_path_csv)| {
let mut reader = csv::ReaderBuilder::new()
.has_headers(true)
.delimiter(b';')
.from_reader(ai_repsonse.as_bytes());
let mut deserialized_iter = reader.deserialize::<CsvHeader>();
let mut columns = deserialized_iter.filter_ok(|value| {
value.PONTOS.is_some()
}) })
.map_ok(|value| { .filter_map(|value| {
let col = Column::new(value.CATEGORIA.into(), [value.PONTOS.unwrap() as u32]); if value.is_ok() {
col return Some(value.unwrap());
}) }
.filter_map(|value|{
if value.is_ok() {return Some(value.unwrap());}
None None
}) })
.collect_vec(); .map(|file_name_csv| {
println!("{:?}", file_name_csv.path());
if columns.len() != 9 { return None;} let file_contents = std::fs::read_to_string(file_name_csv.path())
.expect("Failed to read CSV file");
// Parse id talk from file_path let ollama_api_request = client
// filename example is: CC - Erraoander Quintana - 515578 - 20251020515578.csv .post(format!(
// id talk is the last information, so in the example is: 20251020515578 "http://{OLLAMA_SANITIZED_IP}:{OLLAMA_PORT}/api/generate"
let regex_filename = regex::Regex::new(r"(CC - )((\w+\s*)+) - (\d+) - (\d+).csv").unwrap(); ))
.body(
serde_json::json!({
"model": OLLAMA_AI_MODEL_DATA_SANITIZATION,
"prompt": format!("{prompt_data_sanitization} \n{file_contents}"),
"temperature": 0.0, // Get predictable and reproducible output
"stream": false,
})
.to_string(),
);
let filename = file_path_csv let result = ollama_api_request.timeout(Duration::from_secs(3600)).send();
.file_name()
.into_string()
.expect("Failed to convert file name as Rust &str");
let found_regex_groups_in_filename = regex_filename.captures(
filename.as_str()
).expect("Failed to do regex capture");
let user_name = found_regex_groups_in_filename.get(2).expect("Failed to get the id from regex maches"); match result {
let talk_id = found_regex_groups_in_filename.get(5).expect("Failed to get the id from regex maches"); Ok(response) => {
println!("Response: {:?}", response);
let response_json = response
.json::<serde_json::Value>()
.expect("Failed to deserialize response to JSON");
let ai_response = response_json["response"]
.as_str()
.expect("Failed to get AI response as string");
columns.push(Column::new("ID_TALK".into(), [talk_id.clone().as_str()])); let ai_response = ai_response.to_string();
let df = polars::frame::DataFrame::new(columns).expect("Failed to concatenate into a dataframe"); let ai_response = if let Some(resp) = ai_response
.strip_prefix(" ")
println!("{:?}", df); .unwrap_or(&ai_response)
// Create a dataframe with the evaluation columns plus the talk id .strip_prefix("```csv\n")
{
// return a tuple with the dataframe and the user name, so it can be correctly merged after resp.to_string()
return Some((user_name.as_str().to_owned(), df)); } else {
ai_response
};
let ai_response = if let Some(resp) = ai_response
.strip_suffix(" ")
.unwrap_or(&ai_response)
.strip_suffix("```")
{
resp.to_string()
} else {
ai_response
};
}) return Ok((ai_response, file_name_csv));
.for_each(|username| {}); }
// println!("{:?}", files_inside_folder_on_date); Err(error) => {
println!("Error {error}");
return Err(error);
}
};
})
.filter_map_ok(|(ai_repsonse, file_path_csv)| {
let mut reader = csv::ReaderBuilder::new()
.has_headers(true)
.delimiter(b';')
.from_reader(ai_repsonse.as_bytes());
return Some(()); let mut deserialized_iter = reader.deserialize::<CsvHeader>();
let mut columns = deserialized_iter
.filter_ok(|value| value.PONTOS.is_some())
.map_ok(|value| {
let col =
Column::new(value.CATEGORIA.into(), [value.PONTOS.unwrap() as u32]);
col
})
.filter_map(|value| {
if value.is_ok() {
return Some(value.unwrap());
}
None
})
.collect_vec();
if columns.len() != 9 {
return None;
}
// Parse id talk from file_path
// filename example is: CC - Erraoander Quintana - 515578 - 20251020515578.csv
// id talk is the last information, so in the example is: 20251020515578
let regex_filename =
regex::Regex::new(r"(CC - )((\w+\s*)+) - (\d+) - (\d+).csv").unwrap();
let filename = file_path_csv
.file_name()
.into_string()
.expect("Failed to convert file name as Rust &str");
let found_regex_groups_in_filename = regex_filename
.captures(filename.as_str())
.expect("Failed to do regex capture");
let user_name = found_regex_groups_in_filename
.get(2)
.expect("Failed to get the id from regex maches");
let talk_id = found_regex_groups_in_filename
.get(5)
.expect("Failed to get the id from regex maches");
columns.push(Column::new("ID_TALK".into(), [talk_id.clone().as_str()]));
let df = polars::frame::DataFrame::new(columns)
.expect("Failed to concatenate into a dataframe");
// return a tuple with the dataframe and the user name, so it can be correctly merged after
return Some((user_name.as_str().to_owned(), df));
})
.filter_map(|res| {
if res.is_ok() {
return Some(res.unwrap());
}
return None;
})
.into_group_map()
.into_iter()
.map(|(name, eval_dataframe_vec)| {
let groupped_df = eval_dataframe_vec
.iter()
.cloned()
.reduce(|acc, e| acc.vstack(&e).unwrap())
.expect("Failed to concatenate dataframes");
(name, groupped_df)
})
.into_group_map();
dbg!(&groupped_by_user_on_day);
return Some(groupped_by_user_on_day);
}) })
.collect_vec(); .filter_map(|res| {
if res.is_ok() {
return Some(res.unwrap());
}
return None;
})
.reduce(|mut acc, mut e| {
e.iter_mut().for_each(|(key, val)| {
if acc.contains_key(key) {
acc.get_mut(key)
.expect("Failed to obtain key that should already be present")
.append(val);
} else {
acc.insert(key.to_owned(), val.to_owned());
}
});
acc
})
.and_then(|groupped_hashmap_df| {
let result = groupped_hashmap_df
.iter()
.map(|(key, val)| {
let dfs = val
.iter()
.cloned()
.reduce(|acc, e| acc.vstack(&e).unwrap())
.expect("Failed to concatenate dataframes");
(key.clone(), dfs)
})
.collect_vec();
return Some(result);
});
// println!("{:?}", a); // Setup groupped folder
if !std::fs::exists(format!("./groupped/")).unwrap() {
std::fs::create_dir(format!("./groupped")).expect("Failed to create directory")
}
// Setup previous week folder
if !std::fs::exists(format!(
"./groupped/{first_day_of_last_week} - {last_day_of_last_week}"
))
.unwrap()
{
std::fs::create_dir(format!(
"./groupped/{first_day_of_last_week} - {last_day_of_last_week}"
))
.expect("Failed to create directory")
}
// Read CSV files inside folder match groupped_values {
Some(mut val) => {
val.iter_mut().for_each(|(agent, groupped_evaluations)| {
let mut save_file_csv = std::fs::File::create(format!(
"./groupped/{first_day_of_last_week} - {last_day_of_last_week}/{agent}"
))
.expect("Could not create csv file for saving");
CsvWriter::new(&mut save_file_csv)
.include_header(true)
.with_separator(b';')
.finish(groupped_evaluations)
.expect("Failed to save Groupped DataFrame to CSV File");
});
}
None => {}
}
// Use AI to sanitize the data zip_directory_util::zip_directory_util::zip_source_dir_to_dst_file(
std::path::Path::new(&format!(
"./groupped/{first_day_of_last_week} - {last_day_of_last_week}"
)),
std::path::Path::new(&format!(
"./groupped/{first_day_of_last_week} - {last_day_of_last_week}.zip"
)),
);
// Save into a hashmap, with the user name as key, the date, evaluation let recipients = "Wilson da Conceição Oliveira <wilson.oliveira@nova.net.br>, Isadora G. Moura de Moura <isadora.moura@nova.net.br>";
send_mail_util::send_mail_util::send_email(
// Final file should look like &format!(
/* "Relatório agrupado dos atendimentos semana {first_day_of_last_week} - {last_day_of_last_week}"
Header: Att1, att2, att3, ... ),
categoria1 &BOT_EMAIL,
categoria2 &BOT_EMAIL_PASSWORD,
categoria3 recipients,
... &format!("./groupped/{first_day_of_last_week} - {last_day_of_last_week}.zip"),
);
*/ }
}

View File

@@ -166,20 +166,34 @@ fn main() -> anyhow::Result<()> {
// Create a folder named with the day_before // Create a folder named with the day_before
if !std::fs::exists(format!("./evaluations/{formatted_day_before}")).unwrap() { if !std::fs::exists(format!("./evaluations/{formatted_day_before}")).unwrap() {
std::fs::create_dir(format!("./evaluations/{formatted_day_before}")).expect("Failed to create directory") std::fs::create_dir(format!("./evaluations/{formatted_day_before}"))
.expect("Failed to create directory")
} }
// Create the response time folder // Create the response time folder
if !std::fs::exists(format!("./evaluations/{formatted_day_before}/response_time.csv")).unwrap() { if !std::fs::exists(format!(
let mut response_time_file = std::fs::File::create_new(format!("./evaluations/{formatted_day_before}/response_time.csv")).expect("Failed to response_time.csv"); "./evaluations/{formatted_day_before}/response_time.csv"
))
.unwrap()
{
let mut response_time_file = std::fs::File::create_new(format!(
"./evaluations/{formatted_day_before}/response_time.csv"
))
.expect("Failed to response_time.csv");
} }
// Read system prompt // Read system prompt
let prompt = std::fs::read_to_string("PROMPT.txt").unwrap(); let prompt = std::fs::read_to_string("PROMPT.txt").unwrap();
let filter_file_contents = std::fs::read_to_string("FILTER.txt").unwrap_or(String::new()); let filter_file_contents = std::fs::read_to_string("FILTER.txt").unwrap_or(String::new());
let filter_keywords = filter_file_contents.split("\n").collect::<Vec<&str>>(); let filter_keywords = filter_file_contents.split("\n").collect::<Vec<&str>>();
let talks_array = get_piperun_chats_on_date(&PIPERUN_API_URL, &client, &access_token, formatted_day_before_at_midnight, formatted_day_before_at_23_59_59); let talks_array = get_piperun_chats_on_date(
&PIPERUN_API_URL,
&client,
&access_token,
formatted_day_before_at_midnight,
formatted_day_before_at_23_59_59,
);
println!("Number of consolidated talks: {}", talks_array.len()); println!("Number of consolidated talks: {}", talks_array.len());
@@ -215,41 +229,82 @@ fn main() -> anyhow::Result<()> {
let talk_id_get_result = talk_id_get_request.send(); let talk_id_get_result = talk_id_get_request.send();
return talk_id_get_result; return talk_id_get_result;
}).filter_map_ok(|result| { })
let json = result.json::<serde_json::Value>().expect("Failed to deserialize response to JSON").to_owned(); .filter_map_ok(|result| {
let json = result
.json::<serde_json::Value>()
.expect("Failed to deserialize response to JSON")
.to_owned();
let talk_histories = &json["talk_histories"]; let talk_histories = &json["talk_histories"];
let data = &talk_histories["data"]; let data = &talk_histories["data"];
// Filter chats that have very few messages // Filter chats that have very few messages
let talk_lenght = talk_histories.as_array().expect("Wrong message type received from talk histories").len(); let talk_lenght = talk_histories
if talk_lenght < MINIMUM_NUMBER_OF_MESSAGES_TO_EVALUATE {return None;} .as_array()
.expect("Wrong message type received from talk histories")
.len();
if talk_lenght < MINIMUM_NUMBER_OF_MESSAGES_TO_EVALUATE {
return None;
}
// Filter chats that have less that specified ammount of talks with support agent form the last queue transfer // Filter chats that have less that specified ammount of talks with support agent form the last queue transfer
let found = talk_histories.as_array().expect("Wrong message type received from talk histories").into_iter().enumerate().find(|(pos, message_object)|{ let found = talk_histories
let message = message_object["message"].as_str().expect("Failed to decode message as string"); .as_array()
let found = message.find("Atendimento transferido para a fila [NovaNet -> Atendimento -> Suporte]"); .expect("Wrong message type received from talk histories")
found.is_some() .into_iter()
}); .enumerate()
.find(|(pos, message_object)| {
let message = message_object["message"]
.as_str()
.expect("Failed to decode message as string");
let found = message.find(
"Atendimento transferido para a fila [NovaNet -> Atendimento -> Suporte]",
);
found.is_some()
});
match found { match found {
None => {return None;}, None => {
return None;
}
Some(pos) => { Some(pos) => {
let pos_found = pos.0; let pos_found = pos.0;
if pos_found < MINIMUM_NUMBER_OF_MESSAGES_WITH_AGENT_TO_EVALUATE {return None;} if pos_found < MINIMUM_NUMBER_OF_MESSAGES_WITH_AGENT_TO_EVALUATE {
return None;
}
} }
}; };
// Filter Bot finished chats // Filter Bot finished chats
if json["agent"]["user"]["name"].as_str().unwrap_or("unknown_user") == "PipeBot" {return None;} if json["agent"]["user"]["name"]
.as_str()
.unwrap_or("unknown_user")
== "PipeBot"
{
return None;
}
// Apply keyword based filtering // Apply keyword based filtering
let filter_keywords_found = talk_histories.as_array().expect("Wrong message type received from talk histories").into_iter().any(|message_object|{ let filter_keywords_found = talk_histories
let message = message_object["message"].as_str().expect("Failed to decode message as string"); .as_array()
let found = filter_keywords.iter().any(|keyword|{message.to_uppercase().find(&keyword.to_uppercase()).is_some()}); .expect("Wrong message type received from talk histories")
found .into_iter()
}); .any(|message_object| {
let message = message_object["message"]
.as_str()
.expect("Failed to decode message as string");
let found = filter_keywords.iter().any(|keyword| {
message
.to_uppercase()
.find(&keyword.to_uppercase())
.is_some()
});
found
});
if filter_keywords_found {return None;} if filter_keywords_found {
return None;
}
return Some(json); return Some(json);
}); });
@@ -264,80 +319,122 @@ fn main() -> anyhow::Result<()> {
// dbg!(&talk_histories); // dbg!(&talk_histories);
// talk_histories.as_array().unwrap().into_iter().enumerate().for_each(|(pos, message_obj)|{println!("{}: {}", pos, message_obj["message"])}); // talk_histories.as_array().unwrap().into_iter().enumerate().for_each(|(pos, message_obj)|{println!("{}: {}", pos, message_obj["message"])});
// find the bot transfer message // find the bot transfer message
let bot_transfer_message = talk_histories let bot_transfer_message = talk_histories
.as_array().expect("Wrong message type received from talk histories").into_iter() .as_array()
.enumerate() .expect("Wrong message type received from talk histories")
.filter(|(pos, message_object)| { .into_iter()
let user_name = message_object["user"]["name"].as_str().expect("Failed to decode message as string"); .enumerate()
user_name == "PipeBot".to_string() .filter(|(pos, message_object)| {
}).find(|(pos, message_object)|{ let user_name = message_object["user"]["name"]
let message = message_object["message"].as_str().expect("Failed to decode message as string"); .as_str()
// let found = message.find("Atendimento transferido para a fila [NovaNet -> Atendimento -> Suporte]"); .expect("Failed to decode message as string");
let found = message.find("Atendimento entregue da fila de espera para o agente"); user_name == "PipeBot".to_string()
found.is_some() })
}); .find(|(pos, message_object)| {
let message = message_object["message"]
.as_str()
.expect("Failed to decode message as string");
// let found = message.find("Atendimento transferido para a fila [NovaNet -> Atendimento -> Suporte]");
let found =
message.find("Atendimento entregue da fila de espera para o agente");
found.is_some()
});
// Find first agent message sent after the last bot message // Find first agent message sent after the last bot message
let (pos, transfer_message) = bot_transfer_message.expect("Failed to get the transfer bot message position"); let (pos, transfer_message) =
bot_transfer_message.expect("Failed to get the transfer bot message position");
let msg = talk_histories.as_array().expect("Wrong message type received from talk histories").into_iter()
.take(pos) let msg = talk_histories
.rev() .as_array()
.filter(|message| { .expect("Wrong message type received from talk histories")
message["type"] == "out".to_string() && message["user"]["name"] != "PipeBot".to_string() .into_iter()
}) .take(pos)
.take(1).collect_vec(); .rev()
.filter(|message| {
message["type"] == "out".to_string()
&& message["user"]["name"] != "PipeBot".to_string()
})
.take(1)
.collect_vec();
let agent_first_message = msg[0]; let agent_first_message = msg[0];
// Calculate time difference between bot message and agent message // Calculate time difference between bot message and agent message
let date_user_message_sent = agent_first_message["sent_at"].as_str().unwrap(); let date_user_message_sent = agent_first_message["sent_at"].as_str().unwrap();
let format = "%Y-%m-%d %H:%M:%S"; let format = "%Y-%m-%d %H:%M:%S";
let date_user_message_sent_parsed = match chrono::NaiveDateTime::parse_from_str(date_user_message_sent, format) { let date_user_message_sent_parsed =
Ok(dt) => dt, match chrono::NaiveDateTime::parse_from_str(date_user_message_sent, format) {
Err(e) => {println!("Error parsing DateTime: {}", e); panic!("Failed parsing date")}, Ok(dt) => dt,
Err(e) => {
println!("Error parsing DateTime: {}", e);
panic!("Failed parsing date")
}
};
let date_transfer_message_sent_parsed = match chrono::NaiveDateTime::parse_from_str(
transfer_message["sent_at"].as_str().unwrap(),
format,
) {
Ok(dt) => dt,
Err(e) => {
println!("Error parsing DateTime: {}", e);
panic!("Failed parsing date")
}
}; };
let date_transfer_message_sent_parsed = match chrono::NaiveDateTime::parse_from_str(transfer_message["sent_at"].as_str().unwrap(), format) { let response_time = (date_user_message_sent_parsed - date_transfer_message_sent_parsed)
Ok(dt) => dt, .as_seconds_f32();
Err(e) => {println!("Error parsing DateTime: {}", e); panic!("Failed parsing date")}, let name = agent_first_message["user"]["name"]
}; .as_str()
.unwrap()
let response_time = (date_user_message_sent_parsed - date_transfer_message_sent_parsed).as_seconds_f32(); .to_owned();
let name = agent_first_message["user"]["name"].as_str().unwrap().to_owned();
let id = json["tracking_number"].as_str().unwrap_or("").to_owned(); let id = json["tracking_number"].as_str().unwrap_or("").to_owned();
let bot_transfer_date = date_transfer_message_sent_parsed.to_owned(); let bot_transfer_date = date_transfer_message_sent_parsed.to_owned();
let user_response_date = date_user_message_sent.to_owned(); let user_response_date = date_user_message_sent.to_owned();
println!("response_time: {}s", (date_user_message_sent_parsed - date_transfer_message_sent_parsed).as_seconds_f32()); println!(
"response_time: {}s",
(date_user_message_sent_parsed - date_transfer_message_sent_parsed)
.as_seconds_f32()
);
format!("{};{};{};{};{}", name, id, response_time, bot_transfer_date, user_response_date) format!(
}).reduce(|acc, e|{format!("{}\n{}",acc,e)}) "{};{};{};{};{}",
name, id, response_time, bot_transfer_date, user_response_date
)
})
.reduce(|acc, e| format!("{}\n{}", acc, e))
.unwrap_or("".to_string()); .unwrap_or("".to_string());
// return Ok(()); // return Ok(());
// Open file and write to it // Open file and write to it
let header = "NOME;ID_TALK;TEMPO DE RESPOSTA;TRANFERENCIA PELO BOT;PRIMEIRA RESPOSTA DO AGENTE"; let header = "NOME;ID_TALK;TEMPO DE RESPOSTA;TRANFERENCIA PELO BOT;PRIMEIRA RESPOSTA DO AGENTE";
let mut response_time_file = std::fs::OpenOptions::new().write(true).open(format!("./evaluations/{formatted_day_before}/response_time.csv")).expect("Failed to open response time file for write"); let mut response_time_file = std::fs::OpenOptions::new()
response_time_file.write_all(format!("{header}\n{response_time}").as_bytes()).expect("Failed to write header to file"); .write(true)
.open(format!(
"./evaluations/{formatted_day_before}/response_time.csv"
))
.expect("Failed to open response time file for write");
response_time_file
.write_all(format!("{header}\n{response_time}").as_bytes())
.expect("Failed to write header to file");
filtered_chats filtered_chats.clone().skip(0).take(10).for_each(|result| {
.clone() let json = result.unwrap();
.skip(0) let talk_histories = &json["talk_histories"];
.take(10) let data = &talk_histories["data"];
.for_each(|result| {
let json = result.unwrap();
let talk_histories = &json["talk_histories"];
let data = &talk_histories["data"];
let talk = talk_histories.as_array().expect("Wrong message type received from talk histories").iter().rev().map(|message_object|
{
let new_json_filtered = format!( let talk = talk_histories
"{{ .as_array()
.expect("Wrong message type received from talk histories")
.iter()
.rev()
.map(|message_object| {
let new_json_filtered = format!(
"{{
message: {}, message: {},
sent_at: {}, sent_at: {},
type: {}, type: {},
@@ -350,44 +447,71 @@ fn main() -> anyhow::Result<()> {
); );
// println!("{}", new_json_filtered); // println!("{}", new_json_filtered);
new_json_filtered new_json_filtered
}).reduce(|acc, e| {format!("{acc}\n{e}")}).expect("Error extracting talk"); })
.reduce(|acc, e| format!("{acc}\n{e}"))
.expect("Error extracting talk");
println!("{prompt}\n {talk}"); println!("{prompt}\n {talk}");
let ollama_api_request = client.post(format!("http://{OLLAMA_SANITIZED_IP}:{OLLAMA_PORT}/api/generate")) let ollama_api_request = client
.body( .post(format!(
serde_json::json!({ "http://{OLLAMA_SANITIZED_IP}:{OLLAMA_PORT}/api/generate"
"model": OLLAMA_AI_MODEL, ))
"prompt": format!("{prompt} \n{talk}"), .body(
// "options": serde_json::json!({"temperature": 0.1}), serde_json::json!({
"stream": false, "model": OLLAMA_AI_MODEL,
}).to_string() "prompt": format!("{prompt} \n{talk}"),
); // "options": serde_json::json!({"temperature": 0.1}),
"stream": false,
})
.to_string(),
);
let result = ollama_api_request.timeout(Duration::from_secs(3600)).send(); let result = ollama_api_request.timeout(Duration::from_secs(3600)).send();
match result { match result {
Ok(response) => {println!("Response: {:?}", response); Ok(response) => {
let response_json = response.json::<serde_json::Value>().expect("Failed to deserialize response to JSON"); println!("Response: {:?}", response);
println!("{}", response_json); let response_json = response
let ai_response = response_json["response"] .json::<serde_json::Value>()
.as_str() .expect("Failed to deserialize response to JSON");
.expect("Failed to get AI response as string"); println!("{}", response_json);
println!("AI Response: {}", ai_response); let ai_response = response_json["response"]
.as_str()
.expect("Failed to get AI response as string");
println!("AI Response: {}", ai_response);
let csv_response = ai_response.replace("```csv\n", "").replace("```", ""); let csv_response = ai_response.replace("```csv\n", "").replace("```", "");
// Save the CSV response to a file // Save the CSV response to a file
let user_name = &json["agent"]["user"]["name"].as_str().unwrap_or("unknown_user"); let user_name = &json["agent"]["user"]["name"]
let talk_id = &json["id"].as_u64().unwrap_or(0); .as_str()
let tracking_number = &json["tracking_number"].as_str().unwrap_or(""); .unwrap_or("unknown_user");
std::fs::write(format!("./evaluations/{}/{} - {} - {}.csv", formatted_day_before, user_name, talk_id, tracking_number), csv_response).expect("Unable to write file"); let talk_id = &json["id"].as_u64().unwrap_or(0);
std::fs::write(format!("./evaluations/{}/{} - {} - {} - prompt.txt", formatted_day_before, user_name, talk_id, tracking_number), format!("{prompt} \n{talk}")).expect("Unable to write file"); let tracking_number = &json["tracking_number"].as_str().unwrap_or("");
}, std::fs::write(
Err(error) => {println!("Error {error}");} format!(
}; "./evaluations/{}/{} - {} - {}.csv",
}); formatted_day_before, user_name, talk_id, tracking_number
),
csv_response,
)
.expect("Unable to write file");
std::fs::write(
format!(
"./evaluations/{}/{} - {} - {} - prompt.txt",
formatted_day_before, user_name, talk_id, tracking_number
),
format!("{prompt} \n{talk}"),
)
.expect("Unable to write file");
}
Err(error) => {
println!("Error {error}");
}
};
});
// Compress folder into zip // Compress folder into zip
let source_dir_str = format!("./evaluations/{formatted_day_before}"); let source_dir_str = format!("./evaluations/{formatted_day_before}");
@@ -410,7 +534,13 @@ fn main() -> anyhow::Result<()> {
return Ok(()); return Ok(());
} }
fn get_piperun_chats_on_date(PIPERUN_API_URL: &String, client: &reqwest::blocking::Client, access_token: &String, formatted_day_before_at_midnight: String, formatted_day_before_at_23_59_59: String) -> Vec<serde_json::Value> { fn get_piperun_chats_on_date(
PIPERUN_API_URL: &String,
client: &reqwest::blocking::Client,
access_token: &String,
formatted_day_before_at_midnight: String,
formatted_day_before_at_23_59_59: String,
) -> Vec<serde_json::Value> {
let start_of_talk_code: String = "talk_start".to_string(); let start_of_talk_code: String = "talk_start".to_string();
let support_queue_id: String = "13".to_string(); let support_queue_id: String = "13".to_string();
@@ -455,57 +585,70 @@ fn get_piperun_chats_on_date(PIPERUN_API_URL: &String, client: &reqwest::blockin
} }
}; };
let mut aggregated_talks = json_response["data"].as_array().expect("Failed to parse messages as array").to_owned(); let mut aggregated_talks = json_response["data"]
.as_array()
.expect("Failed to parse messages as array")
.to_owned();
let current_page = json_response["current_page"].as_i64().expect("Failed to obtain current page number"); let current_page = json_response["current_page"]
let last_page = json_response["last_page"].as_i64().expect("Failed to obtain current page number"); .as_i64()
.expect("Failed to obtain current page number");
let last_page = json_response["last_page"]
.as_i64()
.expect("Failed to obtain current page number");
let mut all_other_messages = (current_page..last_page).into_iter() let mut all_other_messages = (current_page..last_page)
.map(|page| { .into_iter()
let page_to_request = page + 1; .map(|page| {
let talks_request = client let page_to_request = page + 1;
.get(format!("https://{}/api/v2/reports/talks", PIPERUN_API_URL)) let talks_request = client
.bearer_auth(access_token) .get(format!("https://{}/api/v2/reports/talks", PIPERUN_API_URL))
.header("Content-Type", "application/json") .bearer_auth(access_token)
.header("Accept", "application/json") .header("Content-Type", "application/json")
.query(&[ .header("Accept", "application/json")
("page", page_to_request.to_string()), .query(&[
("perPage", per_page.clone()), ("page", page_to_request.to_string()),
("report_type", report_type.clone()), ("perPage", per_page.clone()),
("start_date", formatted_day_before_at_midnight.clone()), ("report_type", report_type.clone()),
("end_date", formatted_day_before_at_23_59_59.clone()), ("start_date", formatted_day_before_at_midnight.clone()),
("date_range_type", start_of_talk_code.clone()), ("end_date", formatted_day_before_at_23_59_59.clone()),
("queue_id[]", support_queue_id.clone()), ("date_range_type", start_of_talk_code.clone()),
]); ("queue_id[]", support_queue_id.clone()),
]);
println!("Sending request for consolidated talks... {talks_request:?}"); println!("Sending request for consolidated talks... {talks_request:?}");
let talks_response = talks_request.send(); let talks_response = talks_request.send();
let json_response = match talks_response { let json_response = match talks_response {
Ok(resp) => { Ok(resp) => {
if resp.status().is_success() { if resp.status().is_success() {
let json: serde_json::Value = resp.json().unwrap(); let json: serde_json::Value = resp.json().unwrap();
json json
} else { } else {
eprintln!("Failed to get consolidated talks: {}", resp.status()); eprintln!("Failed to get consolidated talks: {}", resp.status());
let json: serde_json::Value = resp.json().unwrap(); let json: serde_json::Value = resp.json().unwrap();
eprintln!("Response body: {:?}", json); eprintln!("Response body: {:?}", json);
panic!("Failed to retrieve consolidated talks from Piperun API"); panic!("Failed to retrieve consolidated talks from Piperun API");
} }
} }
Err(e) => { Err(e) => {
eprintln!("Error: {e}"); eprintln!("Error: {e}");
panic!("Failed to send the request for talks to PipeRUN API"); panic!("Failed to send the request for talks to PipeRUN API");
} }
}; };
let aggregated_talks = json_response["data"].as_array().expect("Failed to parse messages as array").to_owned(); let aggregated_talks = json_response["data"]
.as_array()
return aggregated_talks; .expect("Failed to parse messages as array")
}) .to_owned();
.reduce(|mut this, mut acc| {acc.append(&mut this); acc})
.expect("Failed to concatenate all vectors of messages");
return aggregated_talks;
})
.reduce(|mut this, mut acc| {
acc.append(&mut this);
acc
})
.expect("Failed to concatenate all vectors of messages");
aggregated_talks.append(&mut all_other_messages); aggregated_talks.append(&mut all_other_messages);
aggregated_talks aggregated_talks
@@ -596,7 +739,7 @@ fn send_email(
let filebody = std::fs::read(zip_file_name).unwrap(); let filebody = std::fs::read(zip_file_name).unwrap();
let content_type = ContentType::parse("application/zip").unwrap(); let content_type = ContentType::parse("application/zip").unwrap();
let attachment = Attachment::new(zip_file_name.to_string()).body(filebody, content_type); let attachment = Attachment::new(zip_file_name.to_string()).body(filebody, content_type);
let mailboxes : Mailboxes = to.parse().unwrap(); let mailboxes: Mailboxes = to.parse().unwrap();
let to_header: message::header::To = mailboxes.into(); let to_header: message::header::To = mailboxes.into();
let email = Message::builder() let email = Message::builder()

46
src/send_mail_util.rs Normal file
View File

@@ -0,0 +1,46 @@
pub mod send_mail_util {
use lettre::{
Message, SmtpTransport, Transport,
message::{self, Attachment, Mailboxes, MultiPart, SinglePart, header::ContentType},
};
pub fn send_email(
subject_of_email: &str,
bot_email: &str,
bot_email_password: &str,
to: &str,
zip_file_name: &str,
) {
let filebody = std::fs::read(zip_file_name).unwrap();
let content_type = ContentType::parse("application/zip").unwrap();
let attachment = Attachment::new(zip_file_name.to_string()).body(filebody, content_type);
let mailboxes: Mailboxes = to.parse().unwrap();
let to_header: message::header::To = mailboxes.into();
let email = Message::builder()
.from(format!("PipeRUN bot <{bot_email}>").parse().unwrap())
.reply_to(format!("PipeRUN bot <{bot_email}>").parse().unwrap())
.mailbox(to_header)
.subject(format!("{subject_of_email}"))
.multipart(
MultiPart::mixed()
.singlepart(
SinglePart::builder()
.header(ContentType::TEXT_PLAIN)
.body(String::from("Avaliacao dos atendimentos")),
)
.singlepart(attachment),
)
.unwrap();
// Create the SMTPS transport
let sender = SmtpTransport::from_url(&format!(
"smtps://{bot_email}:{bot_email_password}@mail.nova.net.br"
))
.unwrap()
.build();
// Send the email via remote relay
sender.send(&email).unwrap();
}
}

70
src/zip_directory_util.rs Normal file
View File

@@ -0,0 +1,70 @@
pub mod zip_directory_util {
use std::io::prelude::*;
use zip::write::SimpleFileOptions;
use std::fs::File;
use std::path::Path;
use walkdir::{DirEntry, WalkDir};
fn zip_dir<T>(
it: &mut dyn Iterator<Item = DirEntry>,
prefix: &Path,
writer: T,
method: zip::CompressionMethod,
) where
T: Write + Seek,
{
let mut zip = zip::ZipWriter::new(writer);
let options = SimpleFileOptions::default()
.compression_method(method)
.unix_permissions(0o755);
let prefix = Path::new(prefix);
let mut buffer = Vec::new();
for entry in it {
let path = entry.path();
let name = path.strip_prefix(prefix).unwrap();
let path_as_string = name
.to_str()
.map(str::to_owned)
.expect("Failed to parse path");
// Write file or directory explicitly
// Some unzip tools unzip files with directory paths correctly, some do not!
if path.is_file() {
println!("adding file {path:?} as {name:?} ...");
zip.start_file(path_as_string, options)
.expect("Failed to add file");
let mut f = File::open(path).unwrap();
f.read_to_end(&mut buffer).expect("Failed to read file");
zip.write_all(&buffer).expect("Failed to write file");
buffer.clear();
} else if !name.as_os_str().is_empty() {
// Only if not root! Avoids path spec / warning
// and mapname conversion failed error on unzip
println!("adding dir {path_as_string:?} as {name:?} ...");
zip.add_directory(path_as_string, options)
.expect("Failed to add directory");
}
}
zip.finish().expect("Failed to ZIP");
}
pub fn zip_source_dir_to_dst_file(src_dir: &Path, dst_file: &Path) {
if !Path::new(src_dir).is_dir() {
panic!("src_dir must be a directory");
}
let method = zip::CompressionMethod::Stored;
let path = Path::new(dst_file);
let file = File::create(path).unwrap();
let walkdir = WalkDir::new(src_dir);
let it = walkdir.into_iter();
zip_dir(&mut it.filter_map(|e| e.ok()), src_dir, file, method);
}
}