use crate::{ combined_result::CombinedResult, executor::{ExecResult, Waiter}, misc::OneOrVec, models::{Agent, AssignedJob, AssignedJobById, FatJobMeta, JobType, Payload, ThinJobMeta}, proc_output::ProcOutput, ufs, }; use std::collections::HashMap; use std::process::exit; use tokio::process::Command; pub struct UnnamedJobsBatch { waiter: Waiter, is_running: bool, } impl UnnamedJobsBatch { pub fn from_meta_with_id(jobs: impl OneOrVec<(ThinJobMeta, AssignedJobById)>) -> Self { let jobs = jobs.into_vec(); let mut waiter = Waiter::new(); for (meta, job) in jobs { waiter.push(run_assigned_job(meta, job)); } Self { waiter, is_running: false, } } pub fn from_meta(metas: impl OneOrVec) -> Self { let jobs: Vec<_> = metas .into_vec() .into_iter() .map(|meta| { let job_id = meta.id; ( meta, AssignedJobById { job_id, ..Default::default() }, ) }) .collect(); UnnamedJobsBatch::from_meta_with_id(jobs) } /// Spawn jobs pub async fn spawn(mut self) -> Self { self.waiter = self.waiter.spawn().await; self.is_running = true; self } /// Spawn jobs and wait for result pub async fn wait(self) -> Vec { let waiter = if !self.is_running { self.spawn().await.waiter } else { self.waiter }; waiter.wait().await } /// Spawn one job and wait for result pub async fn wait_one(self) -> ExecResult { self.wait().await.pop().unwrap() } } pub async fn run_assigned_job(meta: ThinJobMeta, ids: AssignedJobById) -> ExecResult { let mut job = AssignedJob::from((&meta, ids)); match meta.exec_type { JobType::Shell => { let (argv, _prepared_payload) = { if let Some(ref payload) = meta.payload { let (prep_exec, prep_exec_path) = ufs::prepare_executable(payload)?; let argv_with_exec = meta.argv.replace("{}", &prep_exec_path); (argv_with_exec, Some(prep_exec)) } else { (meta.argv.clone(), None) } }; let mut split_cmd = shlex::split(&argv).unwrap().into_iter(); let cmd = split_cmd.nth(0).unwrap(); let args = split_cmd.collect::>(); let cmd_result = Command::new(cmd).args(args).output().await; let (data, retcode) = match cmd_result { Ok(output) => ( ProcOutput::from_output(&output).into_vec(), output.status.code(), ), Err(e) => ( ProcOutput::new() .set_stderr(e.to_string().into_bytes()) .into_vec(), None, ), }; job.result = Some(data); job.retcode = retcode; } JobType::Init => { job.set_result(&Agent::run().await); job.retcode = Some(0); } JobType::Service => todo!(), JobType::Update => todo!(), JobType::Terminate => exit(0), }; Ok(job) } pub fn fat_meta_to_thin(meta: FatJobMeta) -> Result { let payload_ident = if let Some(mut payload) = meta.payload { let job_name = match &meta.alias { Some(a) => a.to_string(), None => meta.id.simple().to_string(), }; payload.write_self_into(&job_name)?; Some(job_name) } else { None }; Ok(ThinJobMeta { alias: meta.alias, argv: meta.argv, id: meta.id, exec_type: meta.exec_type, platform: meta.platform, payload: payload_ident, schedule: meta.schedule, }) } pub fn thin_meta_to_fat(meta: ThinJobMeta) -> Result, ufs::Error> { let payload = if let Some(payload) = meta.payload { let mut fat_payload = Payload::Ident(payload); fat_payload.read_into_self()?; Some(fat_payload) } else { None }; Ok(FatJobMeta { alias: meta.alias, argv: meta.argv, id: meta.id, exec_type: meta.exec_type, platform: meta.platform, payload, schedule: meta.schedule, }) } /// Store jobs and get results by name pub struct NamedJobsBatch { runner: Option, job_names: Vec, results: HashMap, } impl NamedJobsBatch { pub fn from_shell( named_jobs: impl OneOrVec<(&'static str, &'static str)>, ) -> CombinedResult { let mut result = CombinedResult::new(); let jobs: Vec<_> = named_jobs .into_vec() .into_iter() .filter_map(|(alias, cmd)| { match FatJobMeta::builder() .with_shell(cmd) .with_alias(alias) .build() { Ok(fat_meta) => match fat_meta_to_thin(fat_meta) { Ok(thin_meta) => Some(thin_meta), Err(e) => { result.err(e); None } }, Err(e) => { result.err(e); None } } }) .collect(); result.ok(Self::from_meta(jobs)); result } pub fn from_meta(named_jobs: impl OneOrVec) -> Self { let (job_names, job_metas): (Vec<_>, Vec<_>) = named_jobs .into_vec() .into_iter() .map(|meta| (meta.alias.clone().unwrap(), meta)) .unzip(); Self { runner: Some(UnnamedJobsBatch::from_meta(job_metas)), job_names, results: HashMap::new(), } } pub async fn wait(mut self) -> NamedJobsBatch { let results = self.runner.take().unwrap().wait().await; for (name, result) in self.job_names.into_iter().zip(results.into_iter()) { self.results.insert(name, result); } NamedJobsBatch:: { runner: None, job_names: vec![], results: self.results, } } } impl NamedJobsBatch { pub fn pop_opt(&mut self, name: &'static str) -> Option { self.results.remove(name) } pub fn pop(&mut self, name: &'static str) -> ExecResult { self.pop_opt(name).unwrap() } } #[cfg(test)] mod tests { use super::*; use crate::{ jobs::{NamedJobsBatch, UnnamedJobsBatch}, models::{misc::JobType, FatJobMeta}, unwrap_enum, UError, }; use std::time::SystemTime; type TestResult = Result>; #[tokio::test] async fn test_is_really_async() { const SLEEP_SECS: u64 = 1; let job = FatJobMeta::from_shell(format!("sleep {SLEEP_SECS}")).unwrap(); let sleep_jobs = (0..50) .map(|_| fat_meta_to_thin(job.clone()).unwrap()) .collect::>(); let now = SystemTime::now(); UnnamedJobsBatch::from_meta(sleep_jobs).wait().await; assert!(now.elapsed().unwrap().as_secs() < SLEEP_SECS + 2) } #[rstest] #[case::sh_payload( "/bin/sh {}", Some(b"echo test01 > /tmp/asd; cat /tmp/asd".as_slice()), "test01" )] #[case::python_cmd(r#"/usr/bin/python3 -c 'print("test02")'"#, None, "test02")] #[case::sh_multiline_payload( "/{}", Some( br#"#!/bin/sh TMPPATH=/tmp/lol mkdir -p $TMPPATH echo test03 > $TMPPATH/t cat $TMPPATH/t rm -rf $TMPPATH"#.as_slice() ), "test03" )] #[case::standalone_binary_with_args( "/{} 'some msg as arg'", Some(include_bytes!("../tests/fixtures/echoer").as_slice()), "some msg as arg" )] #[tokio::test] async fn test_shell_job( #[case] cmd: &str, #[case] payload: Option<&[u8]>, #[case] expected_result: &str, ) -> TestResult { let mut job = FatJobMeta::builder().with_shell(cmd); if let Some(p) = payload { job = job.with_payload(p); } let job = fat_meta_to_thin(job.build().unwrap()).unwrap(); let result = UnnamedJobsBatch::from_meta(job).wait_one().await.unwrap(); let result = result.to_str_result(); assert_eq!(result.trim(), expected_result); Ok(()) } #[tokio::test] async fn test_complex_load() -> TestResult { const SLEEP_SECS: u64 = 1; let now = SystemTime::now(); let longest_job = FatJobMeta::from_shell(format!("sleep {}", SLEEP_SECS)).unwrap(); let longest_job = UnnamedJobsBatch::from_meta(fat_meta_to_thin(longest_job).unwrap()) .spawn() .await; let ls = UnnamedJobsBatch::from_meta( fat_meta_to_thin(FatJobMeta::from_shell("ls").unwrap()).unwrap(), ) .wait_one() .await .unwrap(); assert_eq!(ls.retcode.unwrap(), 0); let folders = ls.to_str_result(); let subfolders_jobs = folders .lines() .map(|f| fat_meta_to_thin(FatJobMeta::from_shell(format!("ls {f}")).unwrap()).unwrap()) .collect::>(); let ls_subfolders = UnnamedJobsBatch::from_meta(subfolders_jobs).wait().await; for result in ls_subfolders { assert_eq!(result.unwrap().retcode.unwrap(), 0); } longest_job.wait().await; assert_eq!(now.elapsed().unwrap().as_secs(), SLEEP_SECS); Ok(()) } /* #[tokio::test] async fn test_exec_multiple_jobs_nowait() -> UResult<()> { const REPEATS: usize = 10; let job = JobMeta::from_shell("whoami"); let sleep_jobs: Vec = (0..=REPEATS).map(|_| job.clone()).collect(); build_jobs(sleep_jobs).spawn().await; let mut completed = 0; while completed < REPEATS { let c = pop_completed().await.len(); if c > 0 { completed += c; println!("{}", c); } } Ok(()) } */ #[tokio::test] async fn test_failing_shell_job() -> TestResult { let job = fat_meta_to_thin(FatJobMeta::from_shell("lol_kek_puk").unwrap()).unwrap(); let job_result = UnnamedJobsBatch::from_meta(job).wait_one().await.unwrap(); let output = job_result.to_str_result(); assert!(output.contains("No such file")); assert!(job_result.retcode.is_none()); Ok(()) } #[rstest] #[case::no_binary("/bin/bash {}", None, "contains executable")] #[case::no_path_to_binary("/bin/bash", Some(b"whoami".as_slice()), "contains no executable")] #[tokio::test] async fn test_job_building_failed( #[case] cmd: &str, #[case] payload: Option<&[u8]>, #[case] err_str: &str, ) -> TestResult { let mut job = FatJobMeta::builder().with_shell(cmd); if let Some(p) = payload { job = job.with_payload(p); } let err = job.build().unwrap_err(); let err_msg = unwrap_enum!(err, UError::JobBuildError); assert!(err_msg.contains(err_str)); Ok(()) } #[tokio::test] async fn test_different_job_types() -> TestResult { let mut jobs = NamedJobsBatch::from_meta( [ FatJobMeta::builder() .with_shell("sleep 3") .with_alias("sleeper") .build() .unwrap(), FatJobMeta::builder() .with_type(JobType::Init) .with_alias("gatherer") .build() .unwrap(), ] .into_iter() .map(|meta| fat_meta_to_thin(meta).unwrap()) .collect::>(), ) .wait() .await; let gathered = jobs.pop("gatherer").unwrap(); assert_eq!(gathered.alias.unwrap(), "gatherer"); Ok(()) } }