pull/1/head
plazmoid 3 years ago
parent 56bdb3bac7
commit 348bf4a90a
  1. 61
      bin/u_agent/src/lib.rs
  2. 6
      bin/u_server/src/filters.rs
  3. 7
      bin/u_server/src/handlers.rs
  4. 21
      bin/u_server/src/u_server.rs
  5. 13
      integration/tests/helpers/panel.rs
  6. 8
      lib/u_api_proc_macro/src/lib.rs
  7. 3
      lib/u_lib/Cargo.toml
  8. 17
      lib/u_lib/src/builder.rs
  9. 1
      lib/u_lib/src/cache.rs
  10. 1
      lib/u_lib/src/config.rs
  11. 4
      lib/u_lib/src/datatypes.rs
  12. 28
      lib/u_lib/src/errors/chan.rs
  13. 5
      lib/u_lib/src/errors/mod.rs
  14. 46
      lib/u_lib/src/errors/variants.rs
  15. 8
      lib/u_lib/src/executor.rs
  16. 5
      lib/u_lib/src/lib.rs
  17. 0
      lib/u_lib/src/models/error.rs
  18. 25
      lib/u_lib/src/models/jobs/assigned.rs
  19. 29
      lib/u_lib/src/models/jobs/meta.rs
  20. 4
      lib/u_lib/src/models/mod.rs
  21. 4
      lib/u_lib/src/utils/combined_result.rs
  22. 12
      lib/u_lib/src/utils/fmt/hexlify.rs
  23. 5
      lib/u_lib/src/utils/fmt/mod.rs
  24. 80
      lib/u_lib/src/utils/fmt/stripped.rs
  25. 2
      lib/u_lib/src/utils/misc.rs
  26. 4
      lib/u_lib/src/utils/mod.rs
  27. 12
      lib/u_lib/src/utils/tempfile.rs
  28. 1
      scripts/cargo_musl.sh

@ -8,14 +8,18 @@ extern crate log;
extern crate env_logger; extern crate env_logger;
use std::env; use std::env;
use std::panic;
use std::sync::Arc;
use tokio::time::{sleep, Duration}; use tokio::time::{sleep, Duration};
use u_lib::{ use u_lib::{
api::ClientHandler, api::ClientHandler,
builder::JobBuilder, builder::JobBuilder,
cache::JobCache, cache::JobCache,
errors::ErrChan,
executor::pop_completed, executor::pop_completed,
messaging::Reportable, messaging::Reportable,
models::AssignedJob, models::AssignedJob,
UError,
UID, UID,
//daemonize //daemonize
}; };
@ -26,12 +30,12 @@ pub async fn process_request(job_requests: Vec<AssignedJob>, client: &ClientHand
if job_requests.len() > 0 { if job_requests.len() > 0 {
for jr in &job_requests { for jr in &job_requests {
if !JobCache::contains(&jr.job_id) { if !JobCache::contains(&jr.job_id) {
info!("Fetching job: {}", &jr.job_id); debug!("Fetching job: {}", &jr.job_id);
let fetched_job = loop { let fetched_job = loop {
match client.get_jobs(Some(jr.job_id)).await { match client.get_jobs(Some(jr.job_id)).await {
Ok(mut result) => break result.pop().unwrap(), Ok(mut result) => break result.pop().unwrap(),
Err(err) => { Err(err) => {
error!("{:?} \nretrying...", err); debug!("{:?} \nretrying...", err);
sleep(Duration::from_secs(ITERATION_LATENCY)).await; sleep(Duration::from_secs(ITERATION_LATENCY)).await;
} }
} }
@ -39,7 +43,7 @@ pub async fn process_request(job_requests: Vec<AssignedJob>, client: &ClientHand
JobCache::insert(fetched_job); JobCache::insert(fetched_job);
} }
} }
info!( debug!(
"Scheduling jobs: {}", "Scheduling jobs: {}",
job_requests job_requests
.iter() .iter()
@ -50,41 +54,56 @@ pub async fn process_request(job_requests: Vec<AssignedJob>, client: &ClientHand
let mut builder = JobBuilder::from_request(job_requests); let mut builder = JobBuilder::from_request(job_requests);
let errors = builder.pop_errors(); let errors = builder.pop_errors();
if errors.len() > 0 { if errors.len() > 0 {
error!( errors.into_iter().for_each(ErrChan::send)
"Some errors encountered: \n{}",
errors
.iter()
.map(|j| j.to_string())
.collect::<Vec<String>>()
.join("\n")
);
} }
builder.unwrap_one().spawn().await; builder.unwrap_one().spawn().await;
} }
} }
pub async fn run_forever() { async fn error_reporting(client: Arc<ClientHandler>) {
//daemonize(); loop {
env_logger::init(); let err = ErrChan::recv();
let arg_ip = env::args().nth(1); debug!("Error encountered: {:?}", err);
let client = ClientHandler::new(arg_ip.as_deref()); 'retry: for _ in 0..3 {
info!("Connecting to the server"); match client.report(&[Reportable::Error(err.to_string())]).await {
Ok(_) => break 'retry,
Err(e) => {
debug!("Reporting error: {:?}", e);
sleep(Duration::from_secs(10)).await;
}
}
}
}
}
async fn do_stuff(client: Arc<ClientHandler>) -> ! {
loop { loop {
match client.get_personal_jobs(Some(*UID)).await { match client.get_personal_jobs(Some(*UID)).await {
Ok(resp) => { Ok(resp) => {
let job_requests = resp.into_builtin_vec(); let job_requests = resp.into_builtin_vec();
process_request(job_requests, &client).await; process_request(job_requests, &client).await;
} }
Err(err) => { Err(err) => ErrChan::send(err),
error!("{:?}", err);
}
} }
let result: Vec<Reportable> = pop_completed().await.into_iter().collect(); let result: Vec<Reportable> = pop_completed().await.into_iter().collect();
if result.len() > 0 { if result.len() > 0 {
if let Err(err) = client.report(&result).await { if let Err(err) = client.report(&result).await {
error!("{:?}", err); ErrChan::send(err)
} }
} }
sleep(Duration::from_secs(ITERATION_LATENCY)).await; sleep(Duration::from_secs(ITERATION_LATENCY)).await;
} }
} }
pub async fn run_forever() {
//daemonize();
env_logger::init();
let arg_ip = env::args().nth(1);
let client = Arc::new(ClientHandler::new(arg_ip.as_deref()));
let _cli = client.clone();
panic::set_hook(Box::new(|panic_info| {
ErrChan::send(UError::Panic(panic_info.to_string()))
}));
tokio::spawn(error_reporting(_cli));
do_stuff(client).await;
}

@ -70,7 +70,11 @@ pub fn make_filters() -> impl Filter<Extract = (impl Reply,), Error = Rejection>
.and(warp::path("report")) .and(warp::path("report"))
.and(get_content::<Vec<Reportable>>().and_then(Endpoints::report)); .and(get_content::<Vec<Reportable>>().and_then(Endpoints::report));
let auth_token = format!("Bearer {}", env::var("ADMIN_AUTH_TOKEN").unwrap()).into_boxed_str(); let auth_token = format!(
"Bearer {}",
env::var("ADMIN_AUTH_TOKEN").expect("No auth token provided")
)
.into_boxed_str();
let auth_header = warp::header::exact("authorization", Box::leak(auth_token)); let auth_header = warp::header::exact("authorization", Box::leak(auth_token));
let auth_zone = (get_agents let auth_zone = (get_agents

@ -5,7 +5,7 @@ use serde::Serialize;
use u_lib::{ use u_lib::{
messaging::{AsMsg, BaseMessage, Reportable}, messaging::{AsMsg, BaseMessage, Reportable},
models::*, models::*,
utils::OneOrVec, utils::{OneOrVec, Stripped},
ULocalError, ULocalError,
}; };
use uuid::Uuid; use uuid::Uuid;
@ -162,6 +162,11 @@ impl Endpoints {
} }
Reportable::Error(e) => { Reportable::Error(e) => {
let err = AgentError::from_msg(e, id); let err = AgentError::from_msg(e, id);
warn!(
"{} reported an error: {}",
err.agent_id,
Stripped(&err.msg.as_str())
);
UDB::lock_db().report_error(&err).unwrap(); UDB::lock_db().report_error(&err).unwrap();
} }
Reportable::Dummy => (), Reportable::Dummy => (),

@ -1,22 +1,24 @@
#[macro_use] #[macro_use]
extern crate log; extern crate log;
#[macro_use] #[cfg_attr(test, macro_use)]
extern crate mockall; extern crate mockall;
#[macro_use] #[cfg_attr(test, macro_use)]
extern crate mockall_double; extern crate mockall_double;
// because of linking errors // due to linking errors
extern crate openssl; extern crate openssl;
#[macro_use] // don't touch anything
extern crate diesel; extern crate diesel;
// // in this block
mod db; mod db;
mod filters; mod filters;
mod handlers; mod handlers;
use db::UDB; use db::UDB;
use filters::make_filters; use filters::make_filters;
use std::path::PathBuf;
use u_lib::{config::MASTER_PORT, models::*, utils::init_env}; use u_lib::{config::MASTER_PORT, models::*, utils::init_env};
use warp::Filter; use warp::Filter;
@ -28,7 +30,7 @@ fn prefill_jobs() {
.with_alias("agent_hello") .with_alias("agent_hello")
.build() .build()
.unwrap(); .unwrap();
UDB::lock_db().insert_jobs(&[agent_hello]).ok(); UDB::lock_db().insert_jobs(&[agent_hello]).unwrap();
} }
fn init_logger() { fn init_logger() {
@ -60,11 +62,12 @@ fn init_all() {
pub async fn serve() { pub async fn serve() {
init_all(); init_all();
let routes = make_filters(); let routes = make_filters();
let certs_dir = PathBuf::from("certs");
warp::serve(routes.with(warp::log("warp"))) warp::serve(routes.with(warp::log("warp")))
.tls() .tls()
.cert_path("./certs/server.crt") .cert_path(certs_dir.join("server.crt"))
.key_path("./certs/server.key") .key_path(certs_dir.join("server.key"))
.client_auth_required_path("./certs/ca.crt") .client_auth_required_path(certs_dir.join("ca.crt"))
.run(([0, 0, 0, 0], MASTER_PORT)) .run(([0, 0, 0, 0], MASTER_PORT))
.await; .await;
} }

@ -2,7 +2,7 @@ use serde::de::DeserializeOwned;
use serde_json::from_slice; use serde_json::from_slice;
use shlex::split; use shlex::split;
use std::process::{Command, Output}; use std::process::{Command, Output};
use u_lib::{datatypes::DataResult, messaging::AsMsg}; use u_lib::{datatypes::DataResult, messaging::AsMsg, utils::VecDisplay};
const PANEL_BINARY: &str = "/u_panel"; const PANEL_BINARY: &str = "/u_panel";
@ -21,8 +21,13 @@ impl Panel {
pub fn output_argv<T: AsMsg + DeserializeOwned>(args: &[&str]) -> PanelResult<T> { pub fn output_argv<T: AsMsg + DeserializeOwned>(args: &[&str]) -> PanelResult<T> {
let result = Self::run(args); let result = Self::run(args);
assert!(result.status.success()); from_slice(&result.stdout).map_err(|e| {
from_slice(&result.stdout).map_err(|e| e.to_string()) eprintln!(
"Failed to decode panel response: '{}'",
VecDisplay(result.stdout)
);
e.to_string()
})
} }
pub fn output<T: AsMsg + DeserializeOwned>(args: impl Into<String>) -> PanelResult<T> { pub fn output<T: AsMsg + DeserializeOwned>(args: impl Into<String>) -> PanelResult<T> {
@ -39,7 +44,7 @@ impl Panel {
fn status_is_ok<T: AsMsg + DeserializeOwned>(data: PanelResult<T>) -> T { fn status_is_ok<T: AsMsg + DeserializeOwned>(data: PanelResult<T>) -> T {
match data.unwrap() { match data.unwrap() {
DataResult::Ok(r) => r, DataResult::Ok(r) => r,
DataResult::Err(e) => panic!("Panel failed with erroneous status: {}", e), DataResult::Err(e) => panic!("Panel failed: {}", e),
} }
} }

@ -69,7 +69,7 @@ pub fn api_route(args: TokenStream, item: TokenStream) -> TokenStream {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(e) => Err(UError::from(e)) Err(e) => Err(UError::from(e))
}; };
match is_success { let result = match is_success {
Ok(_) => response.json::<BaseMessage<#return_ty>>() Ok(_) => response.json::<BaseMessage<#return_ty>>()
.await .await
.map(|msg| msg.into_inner()) .map(|msg| msg.into_inner())
@ -82,14 +82,14 @@ pub fn api_route(args: TokenStream, item: TokenStream) -> TokenStream {
Err(UError::NetError(err_src, _)) => Err( Err(UError::NetError(err_src, _)) => Err(
UError::NetError( UError::NetError(
err_src, err_src,
response.text().await.unwrap() response.text().await?
) )
), ),
_ => unreachable!() _ => unreachable!()
} };
Ok(result?)
} }
}; };
//eprintln!("#!#! RESULT:\n{}", q);
q.into() q.into()
} }

@ -28,6 +28,8 @@ strum = { version = "0.20", features = ["derive"] }
once_cell = "1.7.2" once_cell = "1.7.2"
shlex = "1.0.0" shlex = "1.0.0"
u_api_proc_macro = { version = "*", path = "../u_api_proc_macro" } u_api_proc_macro = { version = "*", path = "../u_api_proc_macro" }
crossbeam = "0.8.1"
backtrace = "0.3.61"
[dependencies.diesel] [dependencies.diesel]
version = "1.4.5" version = "1.4.5"
@ -35,3 +37,4 @@ features = ["postgres", "uuid"]
[dev-dependencies] [dev-dependencies]
test-case = "1.1.0" test-case = "1.1.0"
rstest = "0.11"

@ -1,9 +1,4 @@
use crate::{ use crate::{UError, UErrorBt, UResult, cache::JobCache, executor::{Waiter, DynFut}, messaging::Reportable, models::{Agent, AssignedJob, JobMeta, JobType}, utils::{CombinedResult, OneOrVec}};
UError, UResult, cache::JobCache, executor::{Waiter, DynFut},
models::{Agent, AssignedJob, JobMeta, JobType},
messaging::Reportable,
utils::{CombinedResult, OneOrVec}
};
use guess_host_triple::guess_host_triple; use guess_host_triple::guess_host_triple;
use std::collections::HashMap; use std::collections::HashMap;
@ -15,11 +10,11 @@ impl JobBuilder {
pub fn from_request(job_requests: impl OneOrVec<AssignedJob>) -> CombinedResult<Self> { pub fn from_request(job_requests: impl OneOrVec<AssignedJob>) -> CombinedResult<Self> {
let job_requests = job_requests.into_vec(); let job_requests = job_requests.into_vec();
let mut prepared: Vec<DynFut> = vec![]; let mut prepared: Vec<DynFut> = vec![];
let mut result = CombinedResult::new(); let mut result = CombinedResult::<JobBuilder, UErrorBt>::new();
for req in job_requests { for req in job_requests {
let job_meta = JobCache::get(&req.job_id); let job_meta = JobCache::get(&req.job_id);
if job_meta.is_none() { if job_meta.is_none() {
result.err(UError::NoJob(req.job_id)); result.err(UError::NoJob(req.job_id).into_bt());
continue; continue;
} }
let job_meta = job_meta.unwrap(); let job_meta = job_meta.unwrap();
@ -29,12 +24,12 @@ impl JobBuilder {
JobType::Shell => { JobType::Shell => {
let meta = JobCache::get(&req.job_id).ok_or(UError::NoJob(req.job_id))?; let meta = JobCache::get(&req.job_id).ok_or(UError::NoJob(req.job_id))?;
let curr_platform = guess_host_triple().unwrap_or("unknown").to_string(); let curr_platform = guess_host_triple().unwrap_or("unknown").to_string();
//extend platform checking (partial check) //TODO: extend platform checking (partial check)
if meta.platform != curr_platform { if meta.platform != curr_platform {
return Err(UError::InsuitablePlatform( return Err(UError::InsuitablePlatform(
meta.platform.clone(), meta.platform.clone(),
curr_platform, curr_platform,
)); ).into());
} }
let job = AssignedJob::new(req.job_id, Some(&req)); let job = AssignedJob::new(req.job_id, Some(&req));
prepared.push(Box::pin(job.run())) prepared.push(Box::pin(job.run()))
@ -298,7 +293,7 @@ mod tests {
job = job.with_payload(p); job = job.with_payload(p);
} }
let err = job.build().unwrap_err(); let err = job.build().unwrap_err();
let err_msg = unwrap_enum!(err, UError::JobArgsError); let err_msg = unwrap_enum!(err.err, UError::JobArgsError);
assert!(err_msg.contains(err_str)); assert!(err_msg.contains(err_str));
Ok(()) Ok(())
} }

@ -1,4 +1,5 @@
use crate::models::JobMeta; use crate::models::JobMeta;
use lazy_static::lazy_static;
use std::{ use std::{
collections::HashMap, collections::HashMap,
ops::Deref, ops::Deref,

@ -1,3 +1,4 @@
use lazy_static::lazy_static;
use uuid::Uuid; use uuid::Uuid;
pub const MASTER_SERVER: &str = "127.0.0.1"; //Ipv4Addr::new(3,9,16,40) pub const MASTER_SERVER: &str = "127.0.0.1"; //Ipv4Addr::new(3,9,16,40)

@ -1,4 +1,4 @@
use crate::UError; use crate::UErrorBt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -6,5 +6,5 @@ use serde::{Deserialize, Serialize};
#[serde(tag = "status", content = "data")] #[serde(tag = "status", content = "data")]
pub enum DataResult<M> { pub enum DataResult<M> {
Ok(M), Ok(M),
Err(UError), Err(UErrorBt),
} }

@ -0,0 +1,28 @@
use crate::UErrorBt;
use crossbeam::channel::{self, Receiver, Sender};
use once_cell::sync::OnceCell;
type ChanError = UErrorBt;
static ERR_CHAN: OnceCell<ErrChan> = OnceCell::new();
pub struct ErrChan {
tx: Sender<ChanError>,
rx: Receiver<ChanError>,
}
impl ErrChan {
fn get() -> &'static Self {
ERR_CHAN.get_or_init(|| {
let (tx, rx) = channel::bounded(20);
Self { tx, rx }
})
}
pub fn send(msg: impl Into<UErrorBt>) {
Self::get().tx.send(msg.into()).unwrap()
}
pub fn recv() -> ChanError {
Self::get().rx.recv().unwrap()
}
}

@ -0,0 +1,5 @@
mod chan;
mod variants;
pub use chan::*;
pub use variants::*;

@ -1,16 +1,39 @@
use backtrace::Backtrace as CrateBacktrace;
use diesel::result::Error as DslError; use diesel::result::Error as DslError;
use reqwest::Error as ReqError; use reqwest::Error as ReqError;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt;
use thiserror::Error; use thiserror::Error;
use uuid::Uuid; use uuid::Uuid;
pub type UResult<T> = std::result::Result<T, UError>; pub type UResult<T> = std::result::Result<T, UErrorBt>;
pub type ULocalResult<T> = std::result::Result<T, ULocalError>; pub type ULocalResult<T> = std::result::Result<T, ULocalError>;
#[derive(Error, Debug, Serialize, Deserialize, Clone)]
pub struct UErrorBt {
pub err: UError,
pub backtrace: String,
}
impl fmt::Display for UErrorBt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}\nBACKTRACE: \n{:?}", self.err, self.backtrace)
}
}
impl From<UError> for UErrorBt {
fn from(err: UError) -> UErrorBt {
UErrorBt {
err,
backtrace: format!("{:?}", CrateBacktrace::new()),
}
}
}
#[derive(Error, Debug, Serialize, Deserialize, Clone)] #[derive(Error, Debug, Serialize, Deserialize, Clone)]
pub enum UError { pub enum UError {
#[error("Error: {0}")] #[error("Runtime error: {0}")]
Raw(String), Runtime(String),
#[error("Connection error: {0}. Body: {1}")] #[error("Connection error: {0}. Body: {1}")]
NetError(String, String), NetError(String, String),
@ -33,11 +56,26 @@ pub enum UError {
#[error("Job {0} doesn't exist")] #[error("Job {0} doesn't exist")]
NoJob(Uuid), NoJob(Uuid),
#[error("Error opening {0}: {1}")] #[error("Error while processing {0}: {1}")]
FSError(String, String), FSError(String, String),
#[error("Wrong auth token")] #[error("Wrong auth token")]
WrongToken, WrongToken,
#[error("Panicked: {0}")]
Panic(String),
}
impl UError {
pub fn into_bt(self) -> UErrorBt {
UErrorBt::from(self)
}
}
impl From<ReqError> for UErrorBt {
fn from(e: ReqError) -> Self {
UError::from(e).into_bt()
}
} }
impl From<ReqError> for UError { impl From<ReqError> for UError {

@ -55,12 +55,12 @@ impl Waiter {
tx.send(fid).await.unwrap(); tx.send(fid).await.unwrap();
result result
}; };
let handle = JoinInfo { let handler = JoinInfo {
handle: spawn(task_wrapper), handle: spawn(task_wrapper),
completed: false, completed: false,
collectable, collectable,
}; };
FUT_RESULTS.lock().await.insert(fid, handle); FUT_RESULTS.lock().await.insert(fid, handler);
} }
self self
} }
@ -147,9 +147,9 @@ mod tests {
}) })
}; };
assert_eq!(0, *val.lock().await); assert_eq!(0, *val.lock().await);
spawn(async {}).await.ok(); spawn(async {}).await.unwrap();
assert_eq!(5, *val.lock().await); assert_eq!(5, *val.lock().await);
t.await.ok(); t.await.unwrap();
assert_eq!(5, *val.lock().await); assert_eq!(5, *val.lock().await);
} }
} }

@ -11,16 +11,13 @@ pub mod models;
pub mod utils; pub mod utils;
pub use config::UID; pub use config::UID;
pub use errors::{UError, ULocalError, ULocalResult, UResult}; pub use errors::{UError, UErrorBt, ULocalError, ULocalResult, UResult};
pub mod schema_exports { pub mod schema_exports {
pub use crate::models::{Agentstate, Jobstate, Jobtype}; pub use crate::models::{Agentstate, Jobstate, Jobtype};
pub use diesel::sql_types::*; pub use diesel::sql_types::*;
} }
#[macro_use]
extern crate lazy_static;
#[macro_use] #[macro_use]
extern crate diesel; extern crate diesel;

@ -1,6 +1,7 @@
use super::JobState; use super::JobState;
use crate::{ use crate::{
cache::JobCache, cache::JobCache,
errors::UError,
messaging::Reportable, messaging::Reportable,
models::{schema::*, JobOutput}, models::{schema::*, JobOutput},
utils::{systime_to_string, TempFile}, utils::{systime_to_string, TempFile},
@ -78,16 +79,22 @@ impl AssignedJob {
pub async fn run(mut self) -> Reportable { pub async fn run(mut self) -> Reportable {
let (argv, _payload) = { let (argv, _payload) = {
let meta = JobCache::get(&self.job_id).unwrap(); let meta = JobCache::get(&self.job_id).unwrap();
let extracted_payload = meta if let Some(ref payload) = meta.payload {
.payload let extracted_payload = match TempFile::write_exec(payload) {
.as_ref() Ok(p) => p,
.and_then(|p| TempFile::write_exec(p).ok()); Err(e) => {
let argv = if let Some(ref p) = &extracted_payload { return Reportable::Error(
meta.argv.replace("{}", &p.get_path()) UError::Runtime(e.to_string()).into_bt().to_string(),
)
}
};
(
meta.argv.replace("{}", &extracted_payload.get_path()),
Some(extracted_payload),
)
} else { } else {
meta.argv.clone() (meta.argv.clone(), None)
}; }
(argv, extracted_payload)
}; };
let mut split_cmd = shlex::split(&argv).unwrap().into_iter(); let mut split_cmd = shlex::split(&argv).unwrap().into_iter();
let cmd = split_cmd.nth(0).unwrap(); let cmd = split_cmd.nth(0).unwrap();

@ -1,9 +1,10 @@
use super::JobType; use super::JobType;
use crate::{models::schema::*, UError, UResult}; use crate::{models::schema::*, utils::Stripped, UError, UResult};
use diesel::{Identifiable, Insertable, Queryable}; use diesel::{Identifiable, Insertable, Queryable};
use guess_host_triple::guess_host_triple; use guess_host_triple::guess_host_triple;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use std::str::from_utf8;
use uuid::Uuid; use uuid::Uuid;
#[derive(Serialize, Deserialize, Clone, Debug, Queryable, Identifiable, Insertable)] #[derive(Serialize, Deserialize, Clone, Debug, Queryable, Identifiable, Insertable)]
@ -41,20 +42,12 @@ impl fmt::Display for JobMeta {
out += &format!("\nPlatform: {}", self.platform); out += &format!("\nPlatform: {}", self.platform);
if let Some(ref payload) = self.payload { if let Some(ref payload) = self.payload {
if self.exec_type == JobType::Shell { if self.exec_type == JobType::Shell {
let (pld_len, large) = { let payload = if let Ok(str_payload) = from_utf8(payload) {
let pl = payload.len(); Stripped(&str_payload).to_string()
if pl > 20 { } else {
(20, true) Stripped(&payload).to_string()
} else {
(pl, false)
}
}; };
let pld_beginning = &payload[..pld_len]; out += &format!("\nPayload: {}", payload);
out += &format!(
"\nPayload: {}{}",
String::from_utf8_lossy(pld_beginning),
if large { "" } else { " <...>" }
);
} }
} }
write!(f, "{}", out) write!(f, "{}", out)
@ -118,14 +111,15 @@ impl JobMetaBuilder {
shlex::split(&inner.argv).ok_or(UError::JobArgsError("Shlex failed".into()))?; shlex::split(&inner.argv).ok_or(UError::JobArgsError("Shlex failed".into()))?;
let empty_err = UError::JobArgsError("Empty argv".into()); let empty_err = UError::JobArgsError("Empty argv".into());
if argv_parts.get(0).ok_or(empty_err.clone())?.len() == 0 { if argv_parts.get(0).ok_or(empty_err.clone())?.len() == 0 {
return Err(empty_err); return Err(empty_err.into());
} }
match inner.payload.as_ref() { match inner.payload.as_ref() {
Some(_) => { Some(_) => {
if !inner.argv.contains("{}") { if !inner.argv.contains("{}") {
return Err(UError::JobArgsError( return Err(UError::JobArgsError(
"Argv contains no executable placeholder".into(), "Argv contains no executable placeholder".into(),
)); )
.into());
} else { } else {
() ()
} }
@ -135,7 +129,8 @@ impl JobMetaBuilder {
return Err(UError::JobArgsError( return Err(UError::JobArgsError(
"No payload provided, but argv contains executable placeholder" "No payload provided, but argv contains executable placeholder"
.into(), .into(),
)); )
.into());
} else { } else {
() ()
} }

@ -1,6 +1,6 @@
mod agent; mod agent;
mod errors; mod error;
mod jobs; mod jobs;
pub mod schema; pub mod schema;
pub use crate::models::{agent::*, errors::*, jobs::*}; pub use crate::models::{agent::*, error::*, jobs::*};

@ -1,7 +1,7 @@
use crate::utils::OneOrVec; use crate::utils::OneOrVec;
use crate::UError; use crate::UErrorBt;
pub struct CombinedResult<T, E = UError> { pub struct CombinedResult<T, E = UErrorBt> {
ok: Vec<T>, ok: Vec<T>,
err: Vec<E>, err: Vec<E>,
} }

@ -10,3 +10,15 @@ impl<'a> fmt::LowerHex for Hexlify<'a> {
Ok(()) Ok(())
} }
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_hexlify() {
let data = b"\x5a\x6b\x23\x4f\xa3\x7f\x9e";
let result = "5a6b234fa37f9e";
assert_eq!(format!("{:x}", Hexlify(data)), result);
}
}

@ -0,0 +1,5 @@
mod hexlify;
mod stripped;
pub use hexlify::*;
pub use stripped::*;

@ -0,0 +1,80 @@
use std::fmt;
use std::iter::Iterator;
use std::slice::Iter as SliceIter;
use std::str::Chars;
const MAX_DATA_LEN: usize = 200;
pub trait Strippable {
//TODO: waiting for stabilizing GATs
type Item: fmt::Display;
type TypeIter: Iterator<Item = Self::Item>;
fn length(&self) -> usize;
fn iterator(&self) -> Self::TypeIter;
}
impl<'a> Strippable for &'a str {
type Item = char;
type TypeIter = Chars<'a>;
fn length(&self) -> usize {
self.len()
}
fn iterator(&self) -> Self::TypeIter {
self.chars()
}
}
impl<'a> Strippable for &'a Vec<u8> {
type Item = &'a u8;
type TypeIter = SliceIter<'a, u8>;
fn length(&self) -> usize {
self.len()
}
fn iterator(&self) -> Self::TypeIter {
self.iter()
}
}
pub struct Stripped<'inner, Inner: Strippable + 'inner>(pub &'inner Inner);
impl<'inner, Inner: Strippable + 'inner> Stripped<'inner, Inner> {
fn iter(&self) -> Inner::TypeIter {
self.0.iterator()
}
fn placeholder(&self) -> &'static str {
if self.0.length() >= MAX_DATA_LEN {
" <...>"
} else {
""
}
}
}
impl<'inner, Inner: Strippable + 'inner> fmt::Display for Stripped<'inner, Inner> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let placeholder = self.placeholder();
for c in self.iter().take(MAX_DATA_LEN - placeholder.len()) {
write!(f, "{}", c)?;
}
write!(f, "{}", placeholder)
}
}
#[cfg(test)]
mod tests {
use super::*;
use rstest::*;
#[rstest]
#[case("abc", 3)]
#[case("abcde".repeat(50), MAX_DATA_LEN)]
fn test_strip(#[case] input: impl Into<String>, #[case] result_len: usize) {
let s = input.into();
assert_eq!(Stripped(&s.as_str()).to_string().len(), result_len);
}
}

@ -22,7 +22,7 @@ impl<T> OneOrVec<T> for Vec<T> {
#[macro_export] #[macro_export]
macro_rules! unwrap_enum { macro_rules! unwrap_enum {
($src:ident, $t:path) => { ($src:expr, $t:path) => {
if let $t(result) = $src { if let $t(result) = $src {
result result
} else { } else {

@ -1,6 +1,6 @@
mod combined_result; mod combined_result;
mod conv; mod conv;
mod hexlify; mod fmt;
mod misc; mod misc;
mod storage; mod storage;
mod tempfile; mod tempfile;
@ -8,7 +8,7 @@ mod vec_display;
pub use combined_result::*; pub use combined_result::*;
pub use conv::*; pub use conv::*;
pub use hexlify::*; pub use fmt::*;
pub use misc::*; pub use misc::*;
pub use storage::*; pub use storage::*;
pub use tempfile::*; pub use tempfile::*;

@ -1,3 +1,4 @@
use crate::{UError, UResult};
use std::{env::temp_dir, fs, ops::Drop, os::unix::fs::PermissionsExt, path::PathBuf}; use std::{env::temp_dir, fs, ops::Drop, os::unix::fs::PermissionsExt, path::PathBuf};
use uuid::Uuid; use uuid::Uuid;
@ -17,16 +18,17 @@ impl TempFile {
Self { path } Self { path }
} }
pub fn write_all(&self, data: &[u8]) -> Result<(), String> { pub fn write_all(&self, data: &[u8]) -> UResult<()> {
fs::write(&self.path, data).map_err(|e| e.to_string()) fs::write(&self.path, data).map_err(|e| UError::FSError(self.get_path(), e.to_string()))?;
Ok(())
} }
pub fn write_exec(data: &[u8]) -> Result<Self, (String, String)> { pub fn write_exec(data: &[u8]) -> UResult<Self> {
let this = Self::new(); let this = Self::new();
let path = this.get_path(); let path = this.get_path();
this.write_all(data).map_err(|e| (path.clone(), e))?; this.write_all(data)?;
let perms = fs::Permissions::from_mode(0o555); let perms = fs::Permissions::from_mode(0o555);
fs::set_permissions(&path, perms).map_err(|e| (path, e.to_string()))?; fs::set_permissions(&path, perms).map_err(|e| UError::FSError(path, e.to_string()))?;
Ok(this) Ok(this)
} }
} }

@ -4,6 +4,7 @@ source $(dirname $0)/rootdir.sh #set ROOTDIR
ARGS=$@ ARGS=$@
docker run \ docker run \
--env-file $ROOTDIR/.env \ --env-file $ROOTDIR/.env \
--env-file $ROOTDIR/.env.private \
-v $ROOTDIR:/volume \ -v $ROOTDIR:/volume \
-v cargo-cache:/root/.cargo/registry \ -v cargo-cache:/root/.cargo/registry \
-w /volume \ -w /volume \

Loading…
Cancel
Save