Files
fin_piperun-bot/src/groupped_repport_monthly.rs

507 lines
19 KiB
Rust

use std::fmt::Debug;
use chrono::{Datelike, NaiveDate};
use itertools::Itertools;
use polars::prelude::*;
use reqwest;
use std::env;
use std::time::Duration;
use csv;
pub mod send_mail_util;
pub mod zip_directory_util;
#[derive(Debug, serde::Deserialize)]
struct CsvHeader {
CATEGORIA: String,
PONTOS: Option<u8>,
}
#[derive(Debug, serde::Deserialize)]
struct CsvEvaluation {
APRESENTAÇÃO: u8,
CONFIRMAÇÃO_DE_EMAIL: u8,
CONFIRMAÇÃO_DE_TELEFONE: u8,
PROTOCOLO: u8,
USO_DO_PORTUGUÊS: u8,
PACIÊNCIA_E_EDUCAÇÃO: u8,
DISPONIBILIDADE: u8,
ESCLARECIMENTO: u8,
ID_TALK: String,
}
// --- ADICAO PARA AGRUPAR O response_time.csv ---
#[derive(Debug, serde::Deserialize)]
struct ResponseTimeRecord {
NOME: String,
ID_TALK: String,
#[serde(rename = "TEMPO DE RESPOSTA")]
TEMPO_DE_RESPOSTA: u32,
#[serde(rename = "TRANFERENCIA PELO BOT")]
TRANFERENCIA_PELO_BOT: String,
#[serde(rename = "PRIMEIRA RESPOSTA DO AGENTE")]
PRIMEIRA_RESPOSTA_DO_AGENTE: String,
}
// --- FIM DA ADIÇÃO ---
fn main() {
match dotenv::dotenv().ok() {
Some(_) => println!("Environment variables loaded from .env file"),
None => eprintln!("Failed to load .env file, using defaults"),
}
// Read environment variables
let OLLAMA_URL = env::var("OLLAMA_URL").unwrap_or("localhost".to_string());
let OLLAMA_PORT = env::var("OLLAMA_PORT")
.unwrap_or("11432".to_string())
.parse::<u16>()
.unwrap_or(11432);
let OLLAMA_AI_MODEL_DATA_SANITIZATION = env::var("OLLAMA_AI_MODEL_DATA_SANITIZATION")
.expect("Missing environment variable OLLAMA_AI_MODEL_DATA_SANITIZATION");
let BOT_EMAIL = env::var("BOT_EMAIL").expect("BOT_EMAIL has not been set!");
let BOT_EMAIL_PASSWORD =
env::var("BOT_EMAIL_PASSWORD").expect("BOT_EMAIL_PASSWORD has not been set!");
let ip_address = ipaddress::IPAddress::parse(OLLAMA_URL.to_string());
let OLLAMA_SANITIZED_IP = match ip_address {
Ok(ip) => {
if ip.is_ipv4() {
OLLAMA_URL.clone()
} else {
format!("[{}]", OLLAMA_URL.clone())
}
}
Err(e) => OLLAMA_URL.clone(),
};
// Get the current day in the format YYYY-MM-DD
let current_date = chrono::Local::now();
let formatted_date = current_date.format("%Y-%m-%d").to_string();
let current_date = chrono::Local::now();
// let first_day_of_current_month = NaiveDate::fro
let current_day_of_last_month = current_date
.checked_sub_months(chrono::Months::new(1))
.expect("Failed to subtract one month");
let first_day_of_last_month = NaiveDate::from_ymd_opt(
current_day_of_last_month.year(),
current_day_of_last_month.month(),
1,
)
.expect("Failed to obtain date");
let last_day_of_last_month = NaiveDate::from_ymd_opt(
current_day_of_last_month.year(),
current_day_of_last_month.month(),
current_day_of_last_month.num_days_in_month() as u32,
)
.expect("Failed to obtain date");
let previous_month_folder_names = std::fs::read_dir(std::path::Path::new("./evaluations"))
.expect("Failed to read directory ./evaluations")
.filter_map_ok(|entry| {
if entry.metadata().unwrap().is_dir() {
Some(entry.file_name())
} else {
None
}
})
.filter_map_ok(|entry_string_name| {
let regex_match_date =
regex::Regex::new(r"(\d{4}-\d{2}-\d{2})").expect("Failed to build regex");
let filename = entry_string_name.to_str().unwrap();
let matches_find = regex_match_date.find(filename);
match matches_find {
Some(found) => {
let date = chrono::NaiveDate::parse_from_str(found.as_str(), "%Y-%m-%d");
return Some((date.unwrap(), entry_string_name));
}
None => {
return None;
}
};
})
.filter_map_ok(|(folder_name_date, directory_string)| {
if folder_name_date.year() == first_day_of_last_month.year()
&& folder_name_date.month() == first_day_of_last_month.month()
{
return Some(directory_string);
}
return None;
})
.filter_map(|value| {
if value.is_ok() {
return Some(value.unwrap());
} else {
return None;
}
})
.sorted()
.collect_vec();
println!("{:?}", previous_month_folder_names);
let prompt_data_sanitization = std::fs::read_to_string("./PROMPT_DATA_SANITIZATION.txt")
.expect("Failed to read PROMPT_DATA_SANITIZATION.txt");
let client = reqwest::blocking::Client::new();
let groupped_values = previous_month_folder_names
.iter()
.map(|folder_name| {
let folder_base_path = std::path::Path::new("./evaluations");
let folder_date_path = folder_base_path.join(folder_name);
std::fs::read_dir(folder_date_path)
})
.filter_map_ok(|files_inside_folder_on_date| {
let groupped_by_user_on_day = files_inside_folder_on_date
.filter_ok(|entry| {
let entry_file_name_as_str = entry
.file_name()
.into_string()
.expect("Failed to get filename as a String");
entry_file_name_as_str.ends_with(".csv")
&& !entry_file_name_as_str.contains("response_time.csv")
})
.filter_map(|value| {
if value.is_ok() {
return Some(value.unwrap());
}
None
})
.map(|file_name_csv| {
println!("{:?}", file_name_csv.path());
let file_contents = std::fs::read_to_string(file_name_csv.path())
.expect("Failed to read CSV file");
let ollama_api_request = client
.post(format!(
"http://{OLLAMA_SANITIZED_IP}:{OLLAMA_PORT}/api/generate"
))
.body(
serde_json::json!({
"model": OLLAMA_AI_MODEL_DATA_SANITIZATION,
"prompt": format!("{prompt_data_sanitization} \n{file_contents}"),
"temperature": 0.0, // Get predictable and reproducible output
"stream": false,
})
.to_string(),
);
let result = ollama_api_request.timeout(Duration::from_secs(3600)).send();
match result {
Ok(response) => {
println!("Response: {:?}", response);
let response_json = response
.json::<serde_json::Value>()
.expect("Failed to deserialize response to JSON");
let ai_response = response_json["response"]
.as_str()
.expect("Failed to get AI response as string");
let ai_response = ai_response.to_string();
let ai_response = if let Some(resp) = ai_response
.strip_prefix(" ")
.unwrap_or(&ai_response)
.strip_prefix("```csv\n")
{
resp.to_string()
} else {
ai_response
};
let ai_response = if let Some(resp) = ai_response
.strip_suffix(" ")
.unwrap_or(&ai_response)
.strip_suffix("```")
{
resp.to_string()
} else {
ai_response
};
return Ok((ai_response, file_name_csv));
}
Err(error) => {
println!("Error {error}");
return Err(error);
}
};
})
.filter_map_ok(|(ai_repsonse, file_path_csv)| {
let mut reader = csv::ReaderBuilder::new()
.has_headers(true)
.delimiter(b';')
.from_reader(ai_repsonse.as_bytes());
let mut deserialized_iter = reader.deserialize::<CsvHeader>();
let mut columns = deserialized_iter
.filter_ok(|value| value.PONTOS.is_some())
.map_ok(|value| {
let col =
Column::new(value.CATEGORIA.into(), [value.PONTOS.unwrap() as u32]);
col
})
.filter_map(|value| {
if value.is_ok() {
return Some(value.unwrap());
}
None
})
.collect_vec();
if columns.len() != 8 {
return None;
}
// Parse id talk from file_path
// filename example is: CC - Erraoander Quintana - 515578 - 20251020515578.csv
// id talk is the last information, so in the example is: 20251020515578
let regex_filename =
// regex::Regex::new(r"(FIN - )((\w+\s*)+) - (\d+) - (\d+).csv").unwrap();
regex::Regex::new(r"FIN - (.+?) - (\d+) - (\d+)\.csv").unwrap();
let filename = file_path_csv
.file_name()
.into_string()
.expect("Failed to convert file name as Rust &str");
let found_regex_groups_in_filename = regex_filename
.captures(filename.as_str())
.expect("Failed to do regex capture");
let user_name = found_regex_groups_in_filename
.get(1)
.expect("Failed to get the id from regex maches");
let talk_id = found_regex_groups_in_filename
.get(3)
.expect("Failed to get the id from regex maches");
let excelence_percentual = columns
.iter()
.map(|col| col.as_materialized_series().u32().unwrap().sum().unwrap())
.sum::<u32>() as f32
/ columns.iter().len() as f32
* 100.0;
columns.push(Column::new(
"PERCENTUAL DE EXELENCIA".into(),
[format!("{excelence_percentual:.2}")],
));
columns.push(Column::new("ID_TALK".into(), [talk_id.clone().as_str()]));
let df = polars::frame::DataFrame::new(columns)
.expect("Failed to concatenate into a dataframe");
// return a tuple with the dataframe and the user name, so it can be correctly merged after
return Some((user_name.as_str().to_owned(), df));
})
.filter_map(|res| {
if res.is_ok() {
return Some(res.unwrap());
}
return None;
})
.into_group_map()
.into_iter()
.map(|(name, eval_dataframe_vec)| {
let groupped_df = eval_dataframe_vec
.iter()
.cloned()
.reduce(|acc, e| acc.vstack(&e).unwrap())
.expect("Failed to concatenate dataframes");
(name, groupped_df)
})
.into_group_map();
dbg!(&groupped_by_user_on_day);
return Some(groupped_by_user_on_day);
})
.filter_map(|res| {
if res.is_ok() {
return Some(res.unwrap());
}
return None;
})
.reduce(|mut acc, mut e| {
e.iter_mut().for_each(|(key, val)| {
if acc.contains_key(key) {
acc.get_mut(key)
.expect("Failed to obtain key that should already be present")
.append(val);
} else {
acc.insert(key.to_owned(), val.to_owned());
}
});
acc
})
.and_then(|groupped_hashmap_df| {
let result = groupped_hashmap_df
.iter()
.map(|(key, val)| {
let dfs = val
.iter()
.cloned()
.reduce(|acc, e| acc.vstack(&e).unwrap())
.expect("Failed to concatenate dataframes");
(key.clone(), dfs)
})
.collect_vec();
return Some(result);
});
// Setup groupped folder
if !std::fs::exists(format!("./groupped/")).unwrap() {
std::fs::create_dir(format!("./groupped")).expect("Failed to create directory")
}
// Setup previous week folder
if !std::fs::exists(format!("./groupped/{first_day_of_last_month}")).unwrap() {
std::fs::create_dir(format!("./groupped/{first_day_of_last_month}"))
.expect("Failed to create directory")
}
match groupped_values {
Some(mut val) => {
val.iter_mut().for_each(|(agent, groupped_evaluations)| {
let mut save_file_csv = std::fs::File::create(format!(
"./groupped/{first_day_of_last_month}/{agent}.csv"
))
.expect("Could not create csv file for saving");
CsvWriter::new(&mut save_file_csv)
.include_header(true)
.with_separator(b';')
.finish(groupped_evaluations)
.expect("Failed to save Groupped DataFrame to CSV File");
});
}
None => {}
}
// --- ADICAO DO PROCESSAMENTO MENSAL DO response_time.csv ---
let response_times_data = previous_month_folder_names
.iter()
.map(|folder_name| {
let folder_base_path = std::path::Path::new("./evaluations");
let folder_date_path = folder_base_path.join(folder_name);
std::fs::read_dir(folder_date_path)
})
.filter_map_ok(|files_inside_folder_on_date| {
let response_time_files = files_inside_folder_on_date
.filter_ok(|entry| {
let entry_file_name_as_str = entry
.file_name()
.into_string()
.expect("Failed to get filename as a String");
entry_file_name_as_str.ends_with("response_time.csv")
})
.filter_map(|value| {
if value.is_ok() {
return Some(value.unwrap());
}
None
})
.map(|file_path| {
println!("Processing response time file: {:?}", file_path.path());
let mut rdr = csv::ReaderBuilder::new()
.delimiter(b';')
.has_headers(true)
.from_reader(std::fs::File::open(file_path.path()).unwrap());
let records: Vec<ResponseTimeRecord> = rdr
.deserialize()
.filter_map(Result::ok)
.collect();
records
})
.flat_map(|records| records)
.collect_vec();
Some(response_time_files)
})
.filter_map(|res| {
if res.is_ok() {
return Some(res.unwrap());
}
return None;
})
.flat_map(|records| records)
.collect_vec();
// Salvar response times consolidados do mês
if !response_times_data.is_empty() {
let response_time_file_path = format!(
"./groupped/{first_day_of_last_month}/response_times_consolidated_{first_day_of_last_month}.csv"
);
let mut wtr = csv::WriterBuilder::new()
.delimiter(b';')
.from_path(&response_time_file_path)
.expect("Failed to create response times CSV");
// Escrever cabeçalho
wtr.write_record(&["NOME", "ID_TALK", "TEMPO DE RESPOSTA", "TRANFERENCIA PELO BOT", "PRIMEIRA RESPOSTA DO AGENTE"])
.expect("Failed to write header");
for record in &response_times_data {
wtr.write_record(&[
&record.NOME,
&record.ID_TALK,
&record.TEMPO_DE_RESPOSTA.to_string(),
&record.TRANFERENCIA_PELO_BOT,
&record.PRIMEIRA_RESPOSTA_DO_AGENTE,
]).expect("Failed to write record");
}
wtr.flush().expect("Failed to flush writer");
// Calcular estatísticas mensais
let total_records = response_times_data.len();
let avg_response_time = response_times_data.iter()
.map(|r| r.TEMPO_DE_RESPOSTA)
.sum::<u32>() as f32 / total_records as f32;
let min_response_time = response_times_data.iter()
.map(|r| r.TEMPO_DE_RESPOSTA)
.min()
.unwrap_or(0);
let max_response_time = response_times_data.iter()
.map(|r| r.TEMPO_DE_RESPOSTA)
.max()
.unwrap_or(0);
println!("Response times consolidated successfully for month {}!", first_day_of_last_month);
println!("Total records: {}", total_records);
println!("Average response time: {:.2} seconds", avg_response_time);
println!("Min response time: {} seconds", min_response_time);
println!("Max response time: {} seconds", max_response_time);
} else {
println!("No response time data found for the month {}.", first_day_of_last_month);
}
// --- FIM DA ADIÇÃO ---
zip_directory_util::zip_directory_util::zip_source_dir_to_dst_file(
std::path::Path::new(&format!("./groupped/{first_day_of_last_month}")),
std::path::Path::new(&format!("./groupped/{first_day_of_last_month}.zip")),
);
let recipients = "Wilson da Conceição Oliveira <wilson.oliveira@nova.net.br>, nicolas.borges@nova.net.br";
println!("Trying to send mail... {recipients}");
send_mail_util::send_mail_util::send_email(
&format!("Relatório agrupado dos atendimentos da fila do Financeiro N2 do mês {first_day_of_last_month}"),
&BOT_EMAIL,
&BOT_EMAIL_PASSWORD,
recipients,
&format!("./groupped/{first_day_of_last_month}.zip"),
);
}