split jobs into submodules, reworked db interface Closes #11 See merge request root/unki!24-update-check
commit
06b731b221
19 changed files with 830 additions and 925 deletions
@ -0,0 +1,3 @@ |
||||
[build] |
||||
target = "x86_64-unknown-linux-gnu" # -musl" |
||||
|
@ -1,16 +0,0 @@ |
||||
use thiserror::Error; |
||||
use diesel::result::Error as DslError; |
||||
|
||||
pub type USrvResult<T> = Result<T, USrvError>; |
||||
|
||||
#[derive(Error, Debug)] |
||||
pub enum USrvError { |
||||
#[error("{0} is not found")] |
||||
NotFound(String), |
||||
|
||||
#[error("Error processing {0}")] |
||||
ProcessingError(String), |
||||
|
||||
#[error(transparent)] |
||||
DBError(#[from] DslError) |
||||
} |
@ -1,570 +0,0 @@ |
||||
use std::{ |
||||
time::{SystemTime, Duration}, |
||||
thread, |
||||
sync::{RwLock, RwLockReadGuard}, |
||||
cmp::PartialEq, |
||||
fmt, |
||||
string::ToString, |
||||
path::PathBuf, |
||||
fs, |
||||
process::Output, |
||||
collections::HashMap, |
||||
ops::Deref, |
||||
}; |
||||
use serde::{ |
||||
Serialize, |
||||
Deserialize |
||||
}; |
||||
use uuid::Uuid; |
||||
use guess_host_triple::guess_host_triple; |
||||
use tokio::{ |
||||
process::Command |
||||
}; |
||||
use crate::{ |
||||
utils::systime_to_string, |
||||
models::schema::*, |
||||
Agent, |
||||
UError, |
||||
UResult, |
||||
UID, |
||||
Waiter, |
||||
OneOrMany, |
||||
DynFut, |
||||
}; |
||||
use diesel_derive_enum::DbEnum; |
||||
use diesel::{ |
||||
Queryable, |
||||
Identifiable, |
||||
Insertable, |
||||
}; |
||||
use strum::Display; |
||||
|
||||
type Cache = HashMap<Uuid, JobMeta>; |
||||
|
||||
lazy_static! { |
||||
static ref JOB_CACHE: RwLock<Cache> = RwLock::new(HashMap::new()); |
||||
} |
||||
|
||||
pub struct JobCache; |
||||
|
||||
impl JobCache { |
||||
pub fn insert(job_meta: JobMeta) { |
||||
JOB_CACHE.write().unwrap().insert(job_meta.id, job_meta); |
||||
} |
||||
|
||||
pub fn contains(uid: &Uuid) -> bool { |
||||
JOB_CACHE.read().unwrap().contains_key(uid) |
||||
} |
||||
|
||||
pub fn get(uid: &Uuid) -> Option<JobCacheHolder> { |
||||
if !Self::contains(uid) { |
||||
return None |
||||
} |
||||
let lock = JOB_CACHE.read().unwrap(); |
||||
Some(JobCacheHolder(lock, uid)) |
||||
} |
||||
|
||||
pub fn remove(uid: &Uuid) { |
||||
JOB_CACHE.write().unwrap().remove(uid); |
||||
} |
||||
} |
||||
|
||||
pub struct JobCacheHolder<'jm>(pub RwLockReadGuard<'jm, Cache>, pub &'jm Uuid); |
||||
|
||||
impl<'jm> Deref for JobCacheHolder<'jm> { |
||||
type Target = JobMeta; |
||||
|
||||
fn deref(&self) -> &Self::Target { |
||||
self.0.get(self.1).unwrap() |
||||
} |
||||
} |
||||
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)] |
||||
pub enum ManageAction { |
||||
Ping, |
||||
UpdateAvailable, |
||||
JobsResultsRequest, |
||||
Terminate |
||||
} |
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] |
||||
pub enum JobSchedule { |
||||
Once, |
||||
Permanent, |
||||
//Scheduled
|
||||
} |
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, DbEnum, Display)] |
||||
#[PgType = "JobState"] |
||||
#[DieselType = "Jobstate"] |
||||
pub enum JobState { |
||||
Queued, // server created a job, but client didn't get it yet
|
||||
//Pending, // client got a job, but not running yet
|
||||
Running, // client is currently running a job
|
||||
Finished, |
||||
} |
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, DbEnum, Display)] |
||||
#[PgType = "JobType"] |
||||
#[DieselType = "Jobtype"] |
||||
pub enum JobType { |
||||
Manage, |
||||
Shell, |
||||
Python, |
||||
Binary, |
||||
Dummy |
||||
} |
||||
|
||||
#[derive(Clone, Debug)] |
||||
pub struct JobOutput { |
||||
pub stdout: Vec<u8>, |
||||
pub stderr: Vec<u8>, |
||||
} |
||||
|
||||
impl JobOutput { |
||||
const STREAM_BORDER: &'static str = "***"; |
||||
const STDOUT: &'static str = "STDOUT"; |
||||
const STDERR: &'static str = "STDERR"; |
||||
|
||||
#[inline] |
||||
fn create_delim(header: &'static str) -> String { |
||||
format!("{border} {head} {border}\n", |
||||
border = JobOutput::STREAM_BORDER, |
||||
head = header |
||||
) |
||||
} |
||||
|
||||
pub fn new() -> Self { |
||||
Self { |
||||
stdout: vec![], |
||||
stderr: vec![], |
||||
} |
||||
} |
||||
|
||||
pub fn stdout(mut self, data: Vec<u8>) -> Self { |
||||
self.stdout = data; |
||||
self |
||||
} |
||||
|
||||
pub fn stderr(mut self, data: Vec<u8>) -> Self { |
||||
self.stderr = data; |
||||
self |
||||
} |
||||
|
||||
pub fn multiline(&self) -> Vec<u8> { |
||||
let mut result: Vec<u8> = vec![]; |
||||
if self.stdout.len() > 0 { |
||||
result.extend(JobOutput::create_delim(JobOutput::STDOUT).into_bytes()); |
||||
result.extend(&self.stdout); |
||||
result.push(b'\n'); |
||||
} |
||||
|
||||
if self.stderr.len() > 0 { |
||||
result.extend(JobOutput::create_delim(JobOutput::STDERR).into_bytes()); |
||||
result.extend(&self.stderr); |
||||
result.push(b'\n'); |
||||
} |
||||
result |
||||
} |
||||
|
||||
pub fn from_raw(raw: &[u8]) -> Option<Self> { |
||||
let raw = String::from_utf8_lossy(raw); |
||||
let err_header = JobOutput::create_delim(JobOutput::STDERR); |
||||
raw.strip_prefix(&JobOutput::create_delim(JobOutput::STDOUT)) |
||||
.map(|s: &str| { |
||||
let mut parts = s.split(&err_header) |
||||
.map(|d| d.trim().as_bytes().to_vec()) |
||||
.collect::<Vec<Vec<u8>>>() |
||||
.into_iter(); |
||||
JobOutput::new() |
||||
.stdout(parts.next().unwrap()) |
||||
.stderr(parts.next().unwrap_or(vec![])) |
||||
}) |
||||
} |
||||
|
||||
pub fn into_appropriate(self) -> Vec<u8> { |
||||
if self.stdout.len() > 0 { |
||||
self.stdout |
||||
} else if self.stderr.len() > 0 { |
||||
self.stderr |
||||
} else { |
||||
b"No data".to_vec() |
||||
} |
||||
} |
||||
} |
||||
|
||||
#[derive(
|
||||
Serialize, |
||||
Deserialize, |
||||
Clone, |
||||
Debug, |
||||
Queryable, |
||||
Identifiable, |
||||
Insertable |
||||
)] |
||||
#[table_name = "jobs"] |
||||
pub struct JobMeta { |
||||
pub alias: String, |
||||
pub id: Uuid, |
||||
pub exec_type: JobType, |
||||
//pub schedule: JobSchedule,
|
||||
pub platform: String, |
||||
pub payload: Option<Vec<u8>>, |
||||
} |
||||
|
||||
impl fmt::Display for JobMeta { |
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
||||
let mut out = format!("Job {}", self.id); |
||||
out += &format!(" ({})", self.alias); |
||||
out += &format!("\nExecutable type: {}", self.exec_type); |
||||
out += &format!("\nPlatform: {}", self.platform); |
||||
if self.exec_type == JobType::Shell && self.payload.is_some() { |
||||
out += &format!("\nPayload: {}", String::from_utf8_lossy(self.payload.as_ref().unwrap())); |
||||
} |
||||
write!(f, "{}", out) |
||||
} |
||||
} |
||||
|
||||
impl JobMeta { |
||||
pub fn from_shell<S: Into<String>>(shell_cmd: S) -> Self { |
||||
let shell_cmd = shell_cmd.into(); |
||||
let job_name = shell_cmd.split(" ").nth(0).unwrap(); |
||||
Self { |
||||
alias: job_name.to_string(), |
||||
payload: Some(shell_cmd.into_bytes()), |
||||
..Default::default() |
||||
} |
||||
} |
||||
/* |
||||
pub fn from_file(path: PathBuf) -> UResult<Self> { |
||||
let data = fs::read(path) |
||||
.map_err(|e| UError::FilesystemError( |
||||
path.to_string_lossy().to_string(), |
||||
e.to_string() |
||||
))?; |
||||
let filename = path.file_name().unwrap().to_str().unwrap(); |
||||
|
||||
}*/ |
||||
} |
||||
|
||||
impl Default for JobMeta { |
||||
fn default() -> Self { |
||||
Self { |
||||
id: Uuid::new_v4(), |
||||
alias: String::new(), |
||||
exec_type: JobType::Shell, |
||||
platform: guess_host_triple().unwrap_or("unknown").to_string(), |
||||
payload: None |
||||
} |
||||
} |
||||
} |
||||
|
||||
|
||||
#[derive(
|
||||
Serialize, |
||||
Deserialize, |
||||
Clone, |
||||
Debug, |
||||
Queryable, |
||||
Identifiable, |
||||
Insertable, |
||||
AsChangeset, |
||||
)] |
||||
#[table_name = "results"] |
||||
pub struct ExactJob { |
||||
pub agent_id: Uuid, |
||||
pub created: SystemTime, |
||||
pub id: Uuid, |
||||
pub job_id: Uuid, |
||||
pub result: Option<Vec<u8>>, |
||||
pub state: JobState, |
||||
pub retcode: Option<i32>, |
||||
pub updated: SystemTime, |
||||
} |
||||
|
||||
impl fmt::Display for ExactJob { |
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
||||
let mut out = format!("Result {}", self.id); |
||||
out += &format!("\nAgent {}", self.agent_id); |
||||
out += &format!("\nJob: {}", self.job_id); |
||||
out += &format!("\nUpdated: {}", systime_to_string(&self.updated)); |
||||
out += &format!("\nState: {}", self.state); |
||||
if self.state == JobState::Finished { |
||||
if self.retcode.is_some() { |
||||
out += &format!("\nReturn code: {}", self.retcode.unwrap()); |
||||
} |
||||
if self.result.is_some() { |
||||
out += &format!("\nResult: {}", String::from_utf8_lossy(self.result.as_ref().unwrap())); |
||||
} |
||||
} |
||||
write!(f, "{}", out) |
||||
} |
||||
} |
||||
|
||||
impl ExactJob { |
||||
pub fn from_meta(job_id: Uuid, result_id: Option<Uuid>) -> Self { |
||||
Self { |
||||
id: result_id.unwrap_or(Uuid::new_v4()), |
||||
agent_id: *UID, |
||||
job_id, |
||||
..Default::default() |
||||
} |
||||
} |
||||
|
||||
//pub fn as_job_output(&self) -> JobOutput {}
|
||||
} |
||||
|
||||
impl Default for ExactJob { |
||||
fn default() -> Self { |
||||
Self { |
||||
agent_id: Uuid::nil(), |
||||
created: SystemTime::now(), |
||||
id: Uuid::new_v4(), |
||||
job_id: Uuid::nil(), |
||||
result: None, |
||||
state: JobState::Queued, |
||||
retcode: None, |
||||
updated: SystemTime::now() |
||||
} |
||||
} |
||||
} |
||||
|
||||
pub struct Job { |
||||
exec_type: JobType, |
||||
payload: Option<Vec<u8>>, |
||||
result: ExactJob |
||||
} |
||||
|
||||
impl Job { |
||||
fn build(job_meta: &JobMeta, result_id: Uuid) -> UResult<Self> { |
||||
match job_meta.exec_type { |
||||
JobType::Shell => { |
||||
let curr_platform = guess_host_triple().unwrap_or("unknown").to_string(); |
||||
if job_meta.platform != curr_platform { |
||||
return Err(UError::InsuitablePlatform( |
||||
job_meta.platform.clone(), curr_platform |
||||
)) |
||||
} |
||||
let job_meta = job_meta.clone(); |
||||
Ok(Self { |
||||
exec_type: job_meta.exec_type, |
||||
payload: job_meta.payload, |
||||
result: ExactJob::from_meta(job_meta.id.clone(), Some(result_id)) |
||||
}) |
||||
}, |
||||
_ => todo!() |
||||
} |
||||
} |
||||
|
||||
async fn run(mut self) -> UResult<ExactJob> { |
||||
match self.exec_type { |
||||
JobType::Shell => { |
||||
let str_payload = match &self.payload { |
||||
Some(box_payload) => { |
||||
String::from_utf8_lossy(box_payload).into_owned() |
||||
} |
||||
None => unimplemented!() |
||||
}; |
||||
let mut cmd_parts = str_payload |
||||
.split(" ") |
||||
.map(String::from) |
||||
.collect::<Vec<String>>() |
||||
.into_iter(); |
||||
let cmd = cmd_parts.nth(0).unwrap(); |
||||
let args = cmd_parts.collect::<Vec<_>>(); |
||||
let cmd_result = Command::new(cmd) |
||||
.args(args) |
||||
.output() |
||||
.await; |
||||
let (data, retcode) = match cmd_result { |
||||
Ok(Output {status, stdout, stderr}) => { |
||||
( |
||||
Some(JobOutput::new() |
||||
.stdout(stdout) |
||||
.stderr(stderr) |
||||
.multiline() |
||||
), |
||||
status.code() |
||||
) |
||||
} |
||||
Err(e) => { |
||||
( |
||||
Some(UError::JobError(e.to_string()).to_string().into_bytes()), |
||||
None |
||||
) |
||||
} |
||||
}; |
||||
self.result.result = data; |
||||
self.result.retcode = retcode; |
||||
self.result.updated = SystemTime::now(); |
||||
self.result.state = JobState::Finished; |
||||
}, |
||||
_ => todo!() |
||||
} |
||||
Ok(self.result) |
||||
} |
||||
} |
||||
|
||||
pub fn build_jobs_with_result<J: OneOrMany<ExactJob>>(job_requests: J) -> Waiter { |
||||
let prepared_jobs = job_requests.into_vec() |
||||
.into_iter() |
||||
.filter_map(|jr| -> Option<DynFut> { |
||||
let job = { |
||||
let job_meta = JobCache::get(&jr.job_id); |
||||
if job_meta.is_none() { |
||||
Err(UError::NoJob(jr.job_id)) |
||||
} else { |
||||
Job::build(&*job_meta.unwrap(), jr.id) |
||||
} |
||||
}; |
||||
match job { |
||||
Ok(j) => Some(Box::pin(j.run())), |
||||
Err(e) => { |
||||
warn!("Job building error: {}", e); |
||||
None |
||||
} |
||||
} |
||||
}).collect::<Vec<DynFut>>(); |
||||
Waiter::new(prepared_jobs) |
||||
} |
||||
|
||||
pub fn build_jobs<J: OneOrMany<JobMeta>>(job_metas: J) -> Waiter { |
||||
let job_requests = job_metas.into_vec().into_iter().map(|jm| { |
||||
let j_uid = jm.id; |
||||
JobCache::insert(jm); |
||||
ExactJob::from_meta(j_uid, None) |
||||
}).collect::<Vec<ExactJob>>(); |
||||
build_jobs_with_result(job_requests) |
||||
} |
||||
|
||||
|
||||
#[cfg(test)] |
||||
mod tests { |
||||
use super::*; |
||||
use crate::{build_jobs, utils::vec_to_string, pop_completed}; |
||||
|
||||
#[tokio::test] |
||||
async fn test_is_really_async() { |
||||
const SLEEP_SECS: u64 = 1; |
||||
let job = JobMeta::from_shell(format!("sleep {}", SLEEP_SECS)); |
||||
let sleep_jobs: Vec<JobMeta> = (0..50).map(|_| job.clone()).collect(); |
||||
let now = SystemTime::now(); |
||||
build_jobs(sleep_jobs).run_until_complete().await; |
||||
assert!(now.elapsed().unwrap().as_secs() < SLEEP_SECS+2) |
||||
} |
||||
|
||||
#[tokio::test] |
||||
async fn test_shell_job() -> UResult<()> { |
||||
let job = JobMeta::from_shell("whoami"); |
||||
let job_result = build_jobs(job) |
||||
.run_one_until_complete() |
||||
.await; |
||||
let stdout = JobOutput::from_raw( |
||||
&job_result.unwrap().result.unwrap() |
||||
).unwrap().stdout; |
||||
assert_eq!( |
||||
vec_to_string(&stdout).trim(), |
||||
"plazmoid" |
||||
); |
||||
Ok(()) |
||||
} |
||||
|
||||
#[tokio::test] |
||||
async fn test_complex_load() -> UResult<()> { |
||||
const SLEEP_SECS: u64 = 1; |
||||
let now = SystemTime::now(); |
||||
let longest_job = JobMeta::from_shell(format!("sleep {}", SLEEP_SECS)); |
||||
let longest_job = build_jobs(longest_job).spawn().await; |
||||
let ls = build_jobs(JobMeta::from_shell("ls")) |
||||
.run_one_until_complete() |
||||
.await |
||||
.unwrap(); |
||||
assert_eq!(ls.retcode.unwrap(), 0); |
||||
let result = JobOutput::from_raw(&ls.result.unwrap()).unwrap(); |
||||
let folders = String::from_utf8_lossy( |
||||
&result.stdout |
||||
); |
||||
let subfolders_jobs: Vec<JobMeta> = folders |
||||
.lines() |
||||
.map(|f| JobMeta::from_shell(format!("ls {}", f))) |
||||
.collect(); |
||||
let ls_subfolders = build_jobs( |
||||
subfolders_jobs |
||||
).run_until_complete().await; |
||||
for result in ls_subfolders { |
||||
assert_eq!(result.unwrap().retcode.unwrap(), 0); |
||||
} |
||||
longest_job.wait().await; |
||||
assert_eq!(now.elapsed().unwrap().as_secs(), SLEEP_SECS); |
||||
Ok(()) |
||||
} |
||||
/* |
||||
#[tokio::test] |
||||
async fn test_exec_multiple_jobs_nowait() -> UResult<()> { |
||||
const REPEATS: usize = 10; |
||||
let job = JobMeta::from_shell("whoami"); |
||||
let sleep_jobs: Vec<JobMeta> = (0..=REPEATS).map(|_| job.clone()).collect(); |
||||
build_jobs(sleep_jobs).spawn().await; |
||||
let mut completed = 0; |
||||
while completed < REPEATS { |
||||
let c = pop_completed().await.len(); |
||||
if c > 0 { |
||||
completed += c; |
||||
println!("{}", c); |
||||
} |
||||
} |
||||
Ok(()) |
||||
} |
||||
*/ |
||||
#[tokio::test] |
||||
async fn test_failing_shell_job() -> UResult<()> { |
||||
let job = JobMeta::from_shell("lol_kek_puk"); |
||||
let job_result = build_jobs(job).run_one_until_complete().await.unwrap(); |
||||
let output = JobOutput::from_raw(&job_result.result.unwrap()); |
||||
assert!(output.is_none()); |
||||
assert!(job_result.retcode.is_none()); |
||||
Ok(()) |
||||
} |
||||
|
||||
#[test] |
||||
fn test_to_multiline() { |
||||
let mut output = JobOutput::new(); |
||||
output.stdout = b"lol".to_vec(); |
||||
output.stderr = b"kek".to_vec(); |
||||
assert_eq!( |
||||
vec_to_string(&output.multiline()), |
||||
String::from( |
||||
"*** STDOUT ***\n\ |
||||
lol\n\ |
||||
*** STDERR ***\n\ |
||||
kek\n" |
||||
) |
||||
) |
||||
} |
||||
|
||||
#[test] |
||||
fn test_to_multiline_stderr_only() { |
||||
let mut output = JobOutput::new(); |
||||
output.stderr = b"kek".to_vec(); |
||||
assert_eq!( |
||||
vec_to_string(&output.multiline()), |
||||
String::from( |
||||
"*** STDERR ***\n\ |
||||
kek\n" |
||||
) |
||||
) |
||||
} |
||||
|
||||
#[test] |
||||
fn test_from_multiline() { |
||||
let txt = "*** STDOUT ***\n\ |
||||
puk\n".as_bytes(); |
||||
let output = JobOutput::from_raw(txt).unwrap(); |
||||
assert_eq!( |
||||
vec_to_string(&output.stdout), |
||||
"puk".to_string() |
||||
); |
||||
assert_eq!(output.stderr.len(), 0); |
||||
} |
||||
} |
@ -0,0 +1,116 @@ |
||||
use super::{ExactJob, JobCache, JobMeta, JobOutput, JobState, JobType}; |
||||
use crate::{ |
||||
executor::{DynFut, Waiter}, |
||||
utils::OneOrMany, |
||||
UError, UResult, |
||||
}; |
||||
use guess_host_triple::guess_host_triple; |
||||
use std::{process::Output, time::SystemTime}; |
||||
use tokio::process::Command; |
||||
use uuid::Uuid; |
||||
|
||||
pub struct Job { |
||||
exec_type: JobType, |
||||
payload: Option<Vec<u8>>, |
||||
result: ExactJob, |
||||
} |
||||
|
||||
impl Job { |
||||
fn build(job_meta: &JobMeta, result_id: Uuid) -> UResult<Self> { |
||||
match job_meta.exec_type { |
||||
JobType::Shell => { |
||||
let curr_platform = guess_host_triple().unwrap_or("unknown").to_string(); |
||||
if job_meta.platform != curr_platform { |
||||
return Err(UError::InsuitablePlatform( |
||||
job_meta.platform.clone(), |
||||
curr_platform, |
||||
)); |
||||
} |
||||
let job_meta = job_meta.clone(); |
||||
Ok(Self { |
||||
exec_type: job_meta.exec_type, |
||||
payload: job_meta.payload, |
||||
result: ExactJob::from_meta(job_meta.id.clone(), Some(result_id)), |
||||
}) |
||||
} |
||||
_ => todo!(), |
||||
} |
||||
} |
||||
|
||||
async fn run(mut self) -> UResult<ExactJob> { |
||||
match self.exec_type { |
||||
JobType::Shell => { |
||||
let str_payload = match &self.payload { |
||||
Some(box_payload) => String::from_utf8_lossy(box_payload).into_owned(), |
||||
None => unimplemented!(), |
||||
}; |
||||
let mut cmd_parts = str_payload |
||||
.split(" ") |
||||
.map(String::from) |
||||
.collect::<Vec<String>>() |
||||
.into_iter(); |
||||
let cmd = cmd_parts.nth(0).unwrap(); |
||||
let args = cmd_parts.collect::<Vec<_>>(); |
||||
let cmd_result = Command::new(cmd).args(args).output().await; |
||||
let (data, retcode) = match cmd_result { |
||||
Ok(Output { |
||||
status, |
||||
stdout, |
||||
stderr, |
||||
}) => ( |
||||
JobOutput::new().stdout(stdout).stderr(stderr).multiline(), |
||||
status.code(), |
||||
), |
||||
Err(e) => ( |
||||
UError::JobError(e.to_string()).to_string().into_bytes(), |
||||
None, |
||||
), |
||||
}; |
||||
self.result.result = Some(data); |
||||
self.result.retcode = retcode; |
||||
self.result.updated = SystemTime::now(); |
||||
self.result.state = JobState::Finished; |
||||
} |
||||
_ => todo!(), |
||||
} |
||||
Ok(self.result) |
||||
} |
||||
} |
||||
|
||||
pub fn build_jobs_with_result<J: OneOrMany<ExactJob>>(job_requests: J) -> Waiter { |
||||
let prepared_jobs = job_requests |
||||
.into_vec() |
||||
.into_iter() |
||||
.filter_map(|jr| -> Option<DynFut> { |
||||
let job = { |
||||
let job_meta = JobCache::get(&jr.job_id); |
||||
if job_meta.is_none() { |
||||
Err(UError::NoJob(jr.job_id)) |
||||
} else { |
||||
Job::build(&*job_meta.unwrap(), jr.id) |
||||
} |
||||
}; |
||||
match job { |
||||
Ok(j) => Some(Box::pin(j.run())), |
||||
Err(e) => { |
||||
warn!("Job building error: {}", e); |
||||
None |
||||
} |
||||
} |
||||
}) |
||||
.collect::<Vec<DynFut>>(); |
||||
Waiter::new(prepared_jobs) |
||||
} |
||||
|
||||
pub fn build_jobs<J: OneOrMany<JobMeta>>(job_metas: J) -> Waiter { |
||||
let job_requests = job_metas |
||||
.into_vec() |
||||
.into_iter() |
||||
.map(|jm| { |
||||
let j_uid = jm.id; |
||||
JobCache::insert(jm); |
||||
ExactJob::from_meta(j_uid, None) |
||||
}) |
||||
.collect::<Vec<ExactJob>>(); |
||||
build_jobs_with_result(job_requests) |
||||
} |
@ -0,0 +1,47 @@ |
||||
use crate::models::JobMeta; |
||||
use std::{ |
||||
collections::HashMap, |
||||
ops::Deref, |
||||
sync::{RwLock, RwLockReadGuard}, |
||||
}; |
||||
use uuid::Uuid; |
||||
|
||||
type Cache = HashMap<Uuid, JobMeta>; |
||||
|
||||
lazy_static! { |
||||
static ref JOB_CACHE: RwLock<Cache> = RwLock::new(HashMap::new()); |
||||
} |
||||
|
||||
pub struct JobCache; |
||||
|
||||
impl JobCache { |
||||
pub fn insert(job_meta: JobMeta) { |
||||
JOB_CACHE.write().unwrap().insert(job_meta.id, job_meta); |
||||
} |
||||
|
||||
pub fn contains(uid: &Uuid) -> bool { |
||||
JOB_CACHE.read().unwrap().contains_key(uid) |
||||
} |
||||
|
||||
pub fn get(uid: &Uuid) -> Option<JobCacheHolder> { |
||||
if !Self::contains(uid) { |
||||
return None; |
||||
} |
||||
let lock = JOB_CACHE.read().unwrap(); |
||||
Some(JobCacheHolder(lock, uid)) |
||||
} |
||||
|
||||
pub fn remove(uid: &Uuid) { |
||||
JOB_CACHE.write().unwrap().remove(uid); |
||||
} |
||||
} |
||||
|
||||
pub struct JobCacheHolder<'jm>(pub RwLockReadGuard<'jm, Cache>, pub &'jm Uuid); |
||||
|
||||
impl<'jm> Deref for JobCacheHolder<'jm> { |
||||
type Target = JobMeta; |
||||
|
||||
fn deref(&self) -> &Self::Target { |
||||
self.0.get(self.1).unwrap() |
||||
} |
||||
} |
@ -0,0 +1,68 @@ |
||||
use super::JobType; |
||||
use crate::models::schema::*; |
||||
use diesel::{Identifiable, Insertable, Queryable}; |
||||
use guess_host_triple::guess_host_triple; |
||||
use serde::{Deserialize, Serialize}; |
||||
use std::fmt; |
||||
use uuid::Uuid; |
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Queryable, Identifiable, Insertable)] |
||||
#[table_name = "jobs"] |
||||
pub struct JobMeta { |
||||
pub alias: String, |
||||
pub id: Uuid, |
||||
pub exec_type: JobType, |
||||
//pub schedule: JobSchedule,
|
||||
pub platform: String, |
||||
pub payload: Option<Vec<u8>>, |
||||
} |
||||
|
||||
impl fmt::Display for JobMeta { |
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
||||
let mut out = format!("Job {}", self.id); |
||||
out += &format!(" ({})", self.alias); |
||||
out += &format!("\nExecutable type: {}", self.exec_type); |
||||
out += &format!("\nPlatform: {}", self.platform); |
||||
if self.exec_type == JobType::Shell && self.payload.is_some() { |
||||
out += &format!( |
||||
"\nPayload: {}", |
||||
String::from_utf8_lossy(self.payload.as_ref().unwrap()) |
||||
); |
||||
} |
||||
write!(f, "{}", out) |
||||
} |
||||
} |
||||
|
||||
impl JobMeta { |
||||
pub fn from_shell<S: Into<String>>(shell_cmd: S) -> Self { |
||||
let shell_cmd = shell_cmd.into(); |
||||
let job_name = shell_cmd.split(" ").nth(0).unwrap(); |
||||
Self { |
||||
alias: job_name.to_string(), |
||||
payload: Some(shell_cmd.into_bytes()), |
||||
..Default::default() |
||||
} |
||||
} |
||||
/* |
||||
pub fn from_file(path: PathBuf) -> UResult<Self> { |
||||
let data = fs::read(path) |
||||
.map_err(|e| UError::FilesystemError( |
||||
path.to_string_lossy().to_string(), |
||||
e.to_string() |
||||
))?; |
||||
let filename = path.file_name().unwrap().to_str().unwrap(); |
||||
|
||||
}*/ |
||||
} |
||||
|
||||
impl Default for JobMeta { |
||||
fn default() -> Self { |
||||
Self { |
||||
id: Uuid::new_v4(), |
||||
alias: String::new(), |
||||
exec_type: JobType::Shell, |
||||
platform: guess_host_triple().unwrap_or("unknown").to_string(), |
||||
payload: None, |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,164 @@ |
||||
use diesel_derive_enum::DbEnum; |
||||
use serde::{Deserialize, Serialize}; |
||||
use strum::Display; |
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)] |
||||
pub enum ManageAction { |
||||
Ping, |
||||
UpdateAvailable, |
||||
JobsResultsRequest, |
||||
Terminate, |
||||
} |
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] |
||||
pub enum JobSchedule { |
||||
Once, |
||||
Permanent, |
||||
//Scheduled
|
||||
} |
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, DbEnum, Display)] |
||||
#[PgType = "JobState"] |
||||
#[DieselType = "Jobstate"] |
||||
pub enum JobState { |
||||
Queued, // server created a job, but client didn't get it yet
|
||||
//Pending, // client got a job, but not running yet
|
||||
Running, // client is currently running a job
|
||||
Finished, |
||||
} |
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, DbEnum, Display)] |
||||
#[PgType = "JobType"] |
||||
#[DieselType = "Jobtype"] |
||||
pub enum JobType { |
||||
Manage, |
||||
Shell, |
||||
Python, |
||||
Binary, |
||||
Dummy, |
||||
} |
||||
|
||||
#[derive(Clone, Debug)] |
||||
pub struct JobOutput { |
||||
pub stdout: Vec<u8>, |
||||
pub stderr: Vec<u8>, |
||||
} |
||||
|
||||
impl JobOutput { |
||||
const STREAM_BORDER: &'static str = "***"; |
||||
const STDOUT: &'static str = "STDOUT"; |
||||
const STDERR: &'static str = "STDERR"; |
||||
|
||||
#[inline] |
||||
fn create_delim(header: &'static str) -> String { |
||||
format!( |
||||
"{border} {head} {border}\n", |
||||
border = JobOutput::STREAM_BORDER, |
||||
head = header |
||||
) |
||||
} |
||||
|
||||
pub fn new() -> Self { |
||||
Self { |
||||
stdout: vec![], |
||||
stderr: vec![], |
||||
} |
||||
} |
||||
|
||||
pub fn stdout(mut self, data: Vec<u8>) -> Self { |
||||
self.stdout = data; |
||||
self |
||||
} |
||||
|
||||
pub fn stderr(mut self, data: Vec<u8>) -> Self { |
||||
self.stderr = data; |
||||
self |
||||
} |
||||
|
||||
pub fn multiline(&self) -> Vec<u8> { |
||||
let mut result: Vec<u8> = vec![]; |
||||
if self.stdout.len() > 0 { |
||||
result.extend(JobOutput::create_delim(JobOutput::STDOUT).into_bytes()); |
||||
result.extend(&self.stdout); |
||||
result.push(b'\n'); |
||||
} |
||||
|
||||
if self.stderr.len() > 0 { |
||||
result.extend(JobOutput::create_delim(JobOutput::STDERR).into_bytes()); |
||||
result.extend(&self.stderr); |
||||
result.push(b'\n'); |
||||
} |
||||
result |
||||
} |
||||
|
||||
pub fn from_raw(raw: &[u8]) -> Option<Self> { |
||||
let raw = String::from_utf8_lossy(raw); |
||||
let err_header = JobOutput::create_delim(JobOutput::STDERR); |
||||
raw.strip_prefix(&JobOutput::create_delim(JobOutput::STDOUT)) |
||||
.map(|s: &str| { |
||||
let mut parts = s |
||||
.split(&err_header) |
||||
.map(|d| d.trim().as_bytes().to_vec()) |
||||
.collect::<Vec<Vec<u8>>>() |
||||
.into_iter(); |
||||
JobOutput::new() |
||||
.stdout(parts.next().unwrap()) |
||||
.stderr(parts.next().unwrap_or(vec![])) |
||||
}) |
||||
} |
||||
|
||||
pub fn into_appropriate(self) -> Vec<u8> { |
||||
if self.stdout.len() > 0 { |
||||
self.stdout |
||||
} else if self.stderr.len() > 0 { |
||||
self.stderr |
||||
} else { |
||||
b"No data".to_vec() |
||||
} |
||||
} |
||||
} |
||||
|
||||
#[cfg(test)] |
||||
mod tests { |
||||
use super::*; |
||||
use crate::utils::vec_to_string; |
||||
|
||||
#[test] |
||||
fn test_to_multiline() { |
||||
let mut output = JobOutput::new(); |
||||
output.stdout = b"lol".to_vec(); |
||||
output.stderr = b"kek".to_vec(); |
||||
assert_eq!( |
||||
vec_to_string(&output.multiline()), |
||||
String::from( |
||||
"*** STDOUT ***\n\ |
||||
lol\n\ |
||||
*** STDERR ***\n\ |
||||
kek\n" |
||||
) |
||||
) |
||||
} |
||||
|
||||
#[test] |
||||
fn test_to_multiline_stderr_only() { |
||||
let mut output = JobOutput::new(); |
||||
output.stderr = b"kek".to_vec(); |
||||
assert_eq!( |
||||
vec_to_string(&output.multiline()), |
||||
String::from( |
||||
"*** STDERR ***\n\ |
||||
kek\n" |
||||
) |
||||
) |
||||
} |
||||
|
||||
#[test] |
||||
fn test_from_multiline() { |
||||
let txt = "*** STDOUT ***\n\ |
||||
puk\n" |
||||
.as_bytes(); |
||||
let output = JobOutput::from_raw(txt).unwrap(); |
||||
assert_eq!(vec_to_string(&output.stdout), "puk".to_string()); |
||||
assert_eq!(output.stderr.len(), 0); |
||||
} |
||||
} |
@ -0,0 +1,7 @@ |
||||
pub mod builder; |
||||
pub mod cache; |
||||
pub mod meta; |
||||
pub mod misc; |
||||
pub mod result; |
||||
|
||||
pub use {builder::*, cache::*, meta::*, misc::*, result::*}; |
@ -0,0 +1,155 @@ |
||||
use super::JobState; |
||||
use crate::{models::schema::*, utils::systime_to_string, UID}; |
||||
use diesel::{Identifiable, Insertable, Queryable}; |
||||
use serde::{Deserialize, Serialize}; |
||||
use std::{fmt, time::SystemTime}; |
||||
use uuid::Uuid; |
||||
|
||||
#[derive(
|
||||
Serialize, Deserialize, Clone, Debug, Queryable, Identifiable, Insertable, AsChangeset, |
||||
)] |
||||
#[table_name = "results"] |
||||
pub struct ExactJob { |
||||
pub agent_id: Uuid, |
||||
pub created: SystemTime, |
||||
pub id: Uuid, |
||||
pub job_id: Uuid, |
||||
pub result: Option<Vec<u8>>, |
||||
pub state: JobState, |
||||
pub retcode: Option<i32>, |
||||
pub updated: SystemTime, |
||||
} |
||||
|
||||
impl Default for ExactJob { |
||||
fn default() -> Self { |
||||
Self { |
||||
agent_id: Uuid::nil(), |
||||
created: SystemTime::now(), |
||||
id: Uuid::new_v4(), |
||||
job_id: Uuid::nil(), |
||||
result: None, |
||||
state: JobState::Queued, |
||||
retcode: None, |
||||
updated: SystemTime::now(), |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl fmt::Display for ExactJob { |
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
||||
let mut out = format!("Result {}", self.id); |
||||
out += &format!("\nAgent {}", self.agent_id); |
||||
out += &format!("\nJob: {}", self.job_id); |
||||
out += &format!("\nUpdated: {}", systime_to_string(&self.updated)); |
||||
out += &format!("\nState: {}", self.state); |
||||
if self.state == JobState::Finished { |
||||
if self.retcode.is_some() { |
||||
out += &format!("\nReturn code: {}", self.retcode.unwrap()); |
||||
} |
||||
if self.result.is_some() { |
||||
out += &format!( |
||||
"\nResult: {}", |
||||
String::from_utf8_lossy(self.result.as_ref().unwrap()) |
||||
); |
||||
} |
||||
} |
||||
write!(f, "{}", out) |
||||
} |
||||
} |
||||
|
||||
impl ExactJob { |
||||
pub fn from_meta(job_id: Uuid, result_id: Option<Uuid>) -> Self { |
||||
Self { |
||||
id: result_id.unwrap_or(Uuid::new_v4()), |
||||
agent_id: *UID, |
||||
job_id, |
||||
..Default::default() |
||||
} |
||||
} |
||||
|
||||
//pub fn as_job_output(&self) -> JobOutput {}
|
||||
} |
||||
|
||||
#[cfg(test)] |
||||
mod tests { |
||||
use super::*; |
||||
use crate::{ |
||||
models::jobs::{build_jobs, JobMeta, JobOutput}, |
||||
utils::vec_to_string, |
||||
UResult, |
||||
}; |
||||
|
||||
#[tokio::test] |
||||
async fn test_is_really_async() { |
||||
const SLEEP_SECS: u64 = 1; |
||||
let job = JobMeta::from_shell(format!("sleep {}", SLEEP_SECS)); |
||||
let sleep_jobs: Vec<JobMeta> = (0..50).map(|_| job.clone()).collect(); |
||||
let now = SystemTime::now(); |
||||
build_jobs(sleep_jobs).run_until_complete().await; |
||||
assert!(now.elapsed().unwrap().as_secs() < SLEEP_SECS + 2) |
||||
} |
||||
|
||||
#[tokio::test] |
||||
async fn test_shell_job() -> UResult<()> { |
||||
let job = JobMeta::from_shell("whoami"); |
||||
let job_result = build_jobs(job).run_one_until_complete().await; |
||||
let stdout = JobOutput::from_raw(&job_result.unwrap().result.unwrap()) |
||||
.unwrap() |
||||
.stdout; |
||||
assert_eq!(vec_to_string(&stdout).trim(), "plazmoid"); |
||||
Ok(()) |
||||
} |
||||
|
||||
#[tokio::test] |
||||
async fn test_complex_load() -> UResult<()> { |
||||
const SLEEP_SECS: u64 = 1; |
||||
let now = SystemTime::now(); |
||||
let longest_job = JobMeta::from_shell(format!("sleep {}", SLEEP_SECS)); |
||||
let longest_job = build_jobs(longest_job).spawn().await; |
||||
let ls = build_jobs(JobMeta::from_shell("ls")) |
||||
.run_one_until_complete() |
||||
.await |
||||
.unwrap(); |
||||
assert_eq!(ls.retcode.unwrap(), 0); |
||||
let result = JobOutput::from_raw(&ls.result.unwrap()).unwrap(); |
||||
let folders = String::from_utf8_lossy(&result.stdout); |
||||
let subfolders_jobs: Vec<JobMeta> = folders |
||||
.lines() |
||||
.map(|f| JobMeta::from_shell(format!("ls {}", f))) |
||||
.collect(); |
||||
let ls_subfolders = build_jobs(subfolders_jobs).run_until_complete().await; |
||||
for result in ls_subfolders { |
||||
assert_eq!(result.unwrap().retcode.unwrap(), 0); |
||||
} |
||||
longest_job.wait().await; |
||||
assert_eq!(now.elapsed().unwrap().as_secs(), SLEEP_SECS); |
||||
Ok(()) |
||||
} |
||||
/* |
||||
#[tokio::test] |
||||
async fn test_exec_multiple_jobs_nowait() -> UResult<()> { |
||||
const REPEATS: usize = 10; |
||||
let job = JobMeta::from_shell("whoami"); |
||||
let sleep_jobs: Vec<JobMeta> = (0..=REPEATS).map(|_| job.clone()).collect(); |
||||
build_jobs(sleep_jobs).spawn().await; |
||||
let mut completed = 0; |
||||
while completed < REPEATS { |
||||
let c = pop_completed().await.len(); |
||||
if c > 0 { |
||||
completed += c; |
||||
println!("{}", c); |
||||
} |
||||
} |
||||
Ok(()) |
||||
} |
||||
*/ |
||||
#[tokio::test] |
||||
async fn test_failing_shell_job() -> UResult<()> { |
||||
let job = JobMeta::from_shell("lol_kek_puk"); |
||||
let job_result = build_jobs(job).run_one_until_complete().await.unwrap(); |
||||
let output = JobOutput::from_raw(&job_result.result.unwrap()); |
||||
assert!(output.is_none()); |
||||
assert!(job_result.retcode.is_none()); |
||||
Ok(()) |
||||
} |
||||
} |
Loading…
Reference in new issue