revert docker image caching, improved env gathering

pull/1/head
plazmoid 2 years ago
parent 5d04aa61d6
commit c60890fd67
  1. 11
      Makefile.toml
  2. 9
      bin/u_agent/src/lib.rs
  3. 1
      bin/u_panel/Cargo.toml
  4. 35
      bin/u_panel/src/argparse.rs
  5. 27
      bin/u_panel/src/main.rs
  6. 23
      bin/u_server/Cargo.toml
  7. 9
      bin/u_server/src/db.rs
  8. 1
      bin/u_server/src/handlers.rs
  9. 4
      bin/u_server/src/main.rs
  10. 40
      bin/u_server/src/u_server.rs
  11. 11
      images/integration-tests/tests_base.Dockerfile
  12. 12
      images/integration-tests/tests_runner.Dockerfile
  13. 3
      integration/Cargo.toml
  14. 1
      integration/docker-compose.yml
  15. 13
      integration/docker.py
  16. 4
      integration/integration_tests.sh
  17. 5
      integration/tests/fixtures/agent.rs
  18. 5
      integration/tests/helpers/mod.rs
  19. 5
      integration/tests/lib.rs
  20. 4
      lib/u_lib/Cargo.toml
  21. 94
      lib/u_lib/src/builder.rs
  22. 3
      lib/u_lib/src/lib.rs
  23. 25
      lib/u_lib/src/utils/env.rs
  24. 22
      lib/u_lib/src/utils/mod.rs
  25. 22
      lib/u_lib/src/utils/proc_output.rs

@ -30,11 +30,14 @@ args = ["build", "--target", "${TARGET}", "${@}"]
[tasks.release_tasks] [tasks.release_tasks]
script = ''' script = '''
if [[ "${@}" =~ "release" ]]; then if [[ "${@}" =~ "--release" ]]; then
echo "Stripping binaries..."
strip $(ls ./target/${TARGET}/release/u_* -1 | grep -v ".d")
echo "Creating symlink to release dir..." echo "Creating symlink to release dir..."
ln -s ./target/${TARGET}/release ./release || true ln -s ./target/${TARGET}/release ./release || true
BINS=$(ls ./release/u_* -1 | grep -v ".d")
echo "Stripping..."
strip $BINS
echo "Packing..."
upx -9 $BINS
fi fi
''' '''
@ -56,7 +59,7 @@ args = ["test", "--target", "${TARGET}", "--lib", "--", "${@}"]
[tasks.integration] [tasks.integration]
script = ''' script = '''
cd ./integration cd ./integration
bash integration_tests.sh bash integration_tests.sh ${@}
''' '''
[tasks.gen_schema] [tasks.gen_schema]

@ -12,7 +12,8 @@ use std::sync::Arc;
use tokio::time::{sleep, Duration}; use tokio::time::{sleep, Duration};
use u_lib::{ use u_lib::{
api::ClientHandler, builder::JobBuilder, cache::JobCache, errors::ErrChan, api::ClientHandler, builder::JobBuilder, cache::JobCache, errors::ErrChan,
executor::pop_completed, messaging::Reportable, models::AssignedJob, utils::Env, UError, UID, executor::pop_completed, messaging::Reportable, models::AssignedJob, utils::load_env_default,
UError, UID,
}; };
const ITERATION_LATENCY: u64 = 5; const ITERATION_LATENCY: u64 = 5;
@ -86,14 +87,14 @@ async fn do_stuff(client: Arc<ClientHandler>) -> ! {
} }
} }
pub async fn run_forever() { pub async fn run_forever() -> ! {
//daemonize(); //daemonize();
env_logger::init(); env_logger::init();
let env = Env::init_default().unwrap(); let env = load_env_default().unwrap();
let client = Arc::new(ClientHandler::new(&env.u_server)); let client = Arc::new(ClientHandler::new(&env.u_server));
panic::set_hook(Box::new(|panic_info| { panic::set_hook(Box::new(|panic_info| {
ErrChan::send(UError::Panic(panic_info.to_string())) ErrChan::send(UError::Panic(panic_info.to_string()))
})); }));
tokio::spawn(error_reporting(client.clone())); tokio::spawn(error_reporting(client.clone()));
do_stuff(client).await; do_stuff(client).await
} }

@ -21,7 +21,6 @@ tui = { version = "0.16", default-features = false, features = ['crossterm'] }
crossterm = "0.22.1" crossterm = "0.22.1"
anyhow = "1.0.44" anyhow = "1.0.44"
strum = { version = "0.22.0", features = ["derive"] } strum = { version = "0.22.0", features = ["derive"] }
async-trait = "0.1.51"
once_cell = "1.8.0" once_cell = "1.8.0"
crossbeam = "0.8.1" crossbeam = "0.8.1"
async-channel = "1.6.1" async-channel = "1.6.1"

@ -1,7 +1,8 @@
use crate::CLIENT;
use std::fmt; use std::fmt;
use structopt::StructOpt; use structopt::StructOpt;
use u_lib::{datatypes::DataResult, messaging::AsMsg, models::JobMeta, UError, UResult}; use u_lib::{
api::ClientHandler, datatypes::DataResult, messaging::AsMsg, models::JobMeta, UError, UResult,
};
use uuid::Uuid; use uuid::Uuid;
#[derive(StructOpt, Debug)] #[derive(StructOpt, Debug)]
@ -82,7 +83,7 @@ fn parse_uuid(src: &str) -> Result<Uuid, String> {
Uuid::parse_str(src).map_err(|e| e.to_string()) Uuid::parse_str(src).map_err(|e| e.to_string())
} }
pub async fn process_cmd(args: Args) -> UResult<()> { pub async fn process_cmd(client: ClientHandler, args: Args) -> UResult<()> {
struct Printer { struct Printer {
json: bool, json: bool,
} }
@ -107,8 +108,8 @@ pub async fn process_cmd(args: Args) -> UResult<()> {
let printer = Printer { json: args.json }; let printer = Printer { json: args.json };
match args.cmd { match args.cmd {
Cmd::Agents(action) => match action { Cmd::Agents(action) => match action {
LD::List { uid } => printer.print(CLIENT.get().unwrap().get_agents(uid).await), LD::List { uid } => printer.print(client.get_agents(uid).await),
LD::Delete { uid } => printer.print(CLIENT.get().unwrap().del(Some(uid)).await), LD::Delete { uid } => printer.print(client.del(Some(uid)).await),
}, },
Cmd::Jobs(action) => match action { Cmd::Jobs(action) => match action {
JobALD::Add { JobALD::Add {
@ -120,30 +121,18 @@ pub async fn process_cmd(args: Args) -> UResult<()> {
.with_shell(cmd.join(" ")) .with_shell(cmd.join(" "))
.with_alias(alias) .with_alias(alias)
.build()?; .build()?;
printer.print(CLIENT.get().unwrap().upload_jobs(&[job]).await); printer.print(client.upload_jobs(&[job]).await);
}
JobALD::LD(LD::List { uid }) => {
printer.print(CLIENT.get().unwrap().get_jobs(uid).await)
}
JobALD::LD(LD::Delete { uid }) => {
printer.print(CLIENT.get().unwrap().del(Some(uid)).await)
} }
JobALD::LD(LD::List { uid }) => printer.print(client.get_jobs(uid).await),
JobALD::LD(LD::Delete { uid }) => printer.print(client.del(Some(uid)).await),
}, },
Cmd::Map(action) => match action { Cmd::Map(action) => match action {
JobMapALD::Add { JobMapALD::Add {
agent_uid, agent_uid,
job_idents, job_idents,
} => printer.print( } => printer.print(client.set_jobs(Some(agent_uid), &job_idents).await),
CLIENT JobMapALD::List { uid } => printer.print(client.get_agent_jobs(uid).await),
.get() JobMapALD::Delete { uid } => printer.print(client.del(Some(uid)).await),
.unwrap()
.set_jobs(Some(agent_uid), &job_idents)
.await,
),
JobMapALD::List { uid } => {
printer.print(CLIENT.get().unwrap().get_agent_jobs(uid).await)
}
JobMapALD::Delete { uid } => printer.print(CLIENT.get().unwrap().del(Some(uid)).await),
}, },
/*Cmd::TUI(args) => crate::tui::init_tui(&args) /*Cmd::TUI(args) => crate::tui::init_tui(&args)
.await .await

@ -2,36 +2,29 @@ mod argparse;
mod server; mod server;
//mod tui; //mod tui;
#[macro_use]
extern crate async_trait;
#[macro_use] #[macro_use]
extern crate tracing; extern crate tracing;
use anyhow::Result as AnyResult;
use argparse::{process_cmd, Args}; use argparse::{process_cmd, Args};
use once_cell::sync::OnceCell;
use serde::Deserialize; use serde::Deserialize;
use std::process;
use structopt::StructOpt; use structopt::StructOpt;
use u_lib::api::ClientHandler; use u_lib::api::ClientHandler;
use u_lib::utils::Env; use u_lib::utils::{env::default_host, load_env};
pub static CLIENT: OnceCell<ClientHandler> = OnceCell::new();
#[derive(Deserialize)] #[derive(Deserialize)]
struct AccessEnv { struct AccessEnv {
admin_auth_token: String, admin_auth_token: String,
#[serde(default = "default_host")]
u_server: String,
} }
#[tokio::main] #[tokio::main]
async fn main() { async fn main() -> AnyResult<()> {
let env = Env::<AccessEnv>::init().unwrap(); let env = load_env::<AccessEnv>()?;
let client = ClientHandler::new(&env.u_server).password(env.admin_auth_token);
let args = Args::from_args();
CLIENT.get_or_init(|| ClientHandler::new(&env.u_server).password(env.inner.admin_auth_token)); process_cmd(client, args).await?;
Ok(())
let args: Args = Args::from_args();
if let Err(e) = process_cmd(args).await {
eprintln!("Error: {}", e);
process::exit(1)
}
} }

@ -12,28 +12,15 @@ warp = { version = "0.3.1", features = ["tls"] }
uuid = { version = "0.6.5", features = ["serde", "v4"] } uuid = { version = "0.6.5", features = ["serde", "v4"] }
once_cell = "1.7.2" once_cell = "1.7.2"
hyper = "0.14" hyper = "0.14"
mockall = "0.9.1"
mockall_double = "0.2"
openssl = "*" openssl = "*"
diesel = { version = "1.4.5", features = ["postgres", "uuid"] }
serde = { version = "1.0", features = ["derive"] }
tokio = { version = "1.9", features = ["macros"] }
u_lib = { path = "../../lib/u_lib", version = "*" }
[dependencies.diesel]
features = ["postgres", "uuid"]
version = "1.4.5"
[dependencies.serde]
features = ["derive"]
version = "1.0.114"
[dependencies.tokio]
features = ["macros"]
version = "1.9"
[dependencies.u_lib]
path = "../../lib/u_lib"
version = "*"
[dev-dependencies] [dev-dependencies]
test-case = "1.1.0" rstest = "0.12"
[lib] [lib]
name = "u_server_lib" name = "u_server_lib"

@ -4,7 +4,7 @@ use serde::Deserialize;
use std::sync::{Arc, Mutex, MutexGuard}; use std::sync::{Arc, Mutex, MutexGuard};
use u_lib::{ use u_lib::{
models::{schema, Agent, AgentError, AssignedJob, JobMeta, JobState}, models::{schema, Agent, AgentError, AssignedJob, JobMeta, JobState},
utils::Env, utils::load_env,
ULocalError, ULocalResult, ULocalError, ULocalResult,
}; };
use uuid::Uuid; use uuid::Uuid;
@ -23,14 +23,13 @@ struct DBEnv {
db_password: String, db_password: String,
} }
#[cfg_attr(test, automock)]
impl UDB { impl UDB {
pub fn lock_db() -> MutexGuard<'static, UDB> { pub fn lock_db() -> MutexGuard<'static, Self> {
DB.get_or_init(|| { DB.get_or_init(|| {
let env = Env::<DBEnv>::init().unwrap(); let env = load_env::<DBEnv>().unwrap();
let db_url = format!( let db_url = format!(
"postgres://{}:{}@{}/{}", "postgres://{}:{}@{}/{}",
env.inner.db_user, env.inner.db_password, env.inner.db_host, env.inner.db_name env.db_user, env.db_password, env.db_host, env.db_name
); );
let conn = PgConnection::establish(&db_url).unwrap(); let conn = PgConnection::establish(&db_url).unwrap();
let instance = UDB { conn }; let instance = UDB { conn };

@ -32,7 +32,6 @@ pub fn build_message<M: AsMsg + Serialize>(m: M) -> Response<Body> {
pub struct Endpoints; pub struct Endpoints;
#[cfg_attr(test, automock)]
impl Endpoints { impl Endpoints {
pub async fn add_agent(msg: Agent) -> Result<Response<Body>, Rejection> { pub async fn add_agent(msg: Agent) -> Result<Response<Body>, Rejection> {
info!("hnd: add_agent"); info!("hnd: add_agent");

@ -1,6 +1,6 @@
use u_server_lib::serve; use u_server_lib::serve;
#[tokio::main] #[tokio::main]
async fn main() { async fn main() -> Result<(), String> {
serve().await; serve().await
} }

@ -1,10 +1,9 @@
#[macro_use] #[macro_use]
extern crate log; extern crate log;
#[cfg_attr(test, macro_use)] #[cfg(test)]
extern crate mockall; #[macro_use]
#[cfg_attr(test, macro_use)] extern crate rstest;
extern crate mockall_double;
// due to linking errors // due to linking errors
extern crate openssl; extern crate openssl;
@ -19,7 +18,7 @@ mod init;
use init::*; use init::*;
use serde::Deserialize; use serde::Deserialize;
use std::path::PathBuf; use std::path::PathBuf;
use u_lib::{config::MASTER_PORT, utils::Env}; use u_lib::{config::MASTER_PORT, utils::load_env};
use warp::Filter; use warp::Filter;
#[derive(Deserialize)] #[derive(Deserialize)]
@ -32,8 +31,8 @@ pub async fn serve() -> Result<(), String> {
init_logger(); init_logger();
prefill_jobs(); prefill_jobs();
let env = Env::<ServEnv>::init().map_err(|e| e.to_string())?; let env = load_env::<ServEnv>().map_err(|e| e.to_string())?;
let routes = init_filters(&env.inner.admin_auth_token); let routes = init_filters(&env.admin_auth_token);
let certs_dir = PathBuf::from("certs"); let certs_dir = PathBuf::from("certs");
warp::serve(routes.with(warp::log("warp"))) warp::serve(routes.with(warp::log("warp")))
.tls() .tls()
@ -45,25 +44,26 @@ pub async fn serve() -> Result<(), String> {
Ok(()) Ok(())
} }
/*
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
#[double]
use crate::handlers::Endpoints; use crate::handlers::Endpoints;
use handlers::build_ok; use handlers::build_ok;
use mockall::predicate::*;
use test_case::test_case;
use u_lib::messaging::{AsMsg, BaseMessage, Reportable}; use u_lib::messaging::{AsMsg, BaseMessage, Reportable};
use uuid::Uuid; use uuid::Uuid;
use warp::test::request; use warp::test;
#[test_case(Some(Uuid::new_v4()))] #[rstest]
#[test_case(None => panics)] #[case(Some(Uuid::new_v4()))]
#[should_panic]
#[case(None)]
#[tokio::test] #[tokio::test]
async fn test_get_agent_jobs_unauthorized(uid: Option<Uuid>) { async fn test_get_agent_jobs_unauthorized(#[case] uid: Option<Uuid>) {
let mock = Endpoints::get_agent_jobs_context(); let mock = Endpoints::faux();
mock.expect().with(eq(uid)).returning(|_| Ok(build_ok(""))); when!(mock.get_agent_jobs).then_return(Ok(build_ok("")));
request() //mock.expect().with(eq(uid)).returning(|_| Ok(build_ok("")));
test::request()
.path(&format!( .path(&format!(
"/get_agent_jobs/{}", "/get_agent_jobs/{}",
uid.map(|u| u.simple().to_string()).unwrap_or(String::new()) uid.map(|u| u.simple().to_string()).unwrap_or(String::new())
@ -72,16 +72,15 @@ mod tests {
.filter(&init_filters("")) .filter(&init_filters(""))
.await .await
.unwrap(); .unwrap();
mock.checkpoint();
} }
#[tokio::test] #[tokio::test]
async fn test_report_unauth_successful() { async fn test_report_unauth_successful() {
let mock = Endpoints::report_context(); let mock = Endpoints::report();
mock.expect() mock.expect()
.withf(|msg: &BaseMessage<'_, Vec<Reportable>>| msg.inner_ref()[0] == Reportable::Dummy) .withf(|msg: &BaseMessage<'_, Vec<Reportable>>| msg.inner_ref()[0] == Reportable::Dummy)
.returning(|_| Ok(build_ok(""))); .returning(|_| Ok(build_ok("")));
request() test::request()
.path("/report/") .path("/report/")
.method("POST") .method("POST")
.json(&vec![Reportable::Dummy].as_message()) .json(&vec![Reportable::Dummy].as_message())
@ -91,3 +90,4 @@ mod tests {
mock.checkpoint(); mock.checkpoint();
} }
} }
*/

@ -1,11 +0,0 @@
# build from lib/
FROM rust:1.60 as chef
RUN rustup target add x86_64-unknown-linux-musl
RUN cargo install cargo-chef
COPY u_lib /lib/u_lib
COPY u_api_proc_macro /lib/u_api_proc_macro
COPY certs /certs
WORKDIR /app

@ -1,12 +1,4 @@
# build from integration/ FROM rust:1.60
FROM unki/tests_base as chef
FROM chef as planner
COPY . /app
RUN cargo chef prepare --recipe-path recipe.json
FROM chef as builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release --recipe-path recipe.json
RUN rustup target add x86_64-unknown-linux-musl
CMD ["sleep", "3600"] CMD ["sleep", "3600"]

@ -16,7 +16,8 @@ serde_json = "1.0"
serde = { version = "1.0.114", features = ["derive"] } serde = { version = "1.0.114", features = ["derive"] }
futures = "0.3.5" futures = "0.3.5"
shlex = "1.0.0" shlex = "1.0.0"
rstest = "0.11" rstest = "0.12"
once_cell = "1.10.0"
[dependencies.u_lib] [dependencies.u_lib]
path = "../lib/u_lib" path = "../lib/u_lib"

@ -67,6 +67,7 @@ services:
condition: service_healthy condition: service_healthy
tests_runner: tests_runner:
user: "${DOCKER_UID}:${DOCKER_GID}"
image: unki/tests_runner image: unki/tests_runner
networks: networks:
- u_net - u_net

@ -1,9 +1,9 @@
import subprocess import subprocess
from utils import * from utils import *
BASE_IMAGE_DIR = '../images/integration-tests' BASE_IMAGE_DIR = '../images/integration-tests'
# do not reorder
DOCKERFILES = [ DOCKERFILES = [
{ {
'name': 'u_agent', 'name': 'u_agent',
@ -17,13 +17,9 @@ DOCKERFILES = [
'name': 'u_db', 'name': 'u_db',
'ctx': BASE_IMAGE_DIR, 'ctx': BASE_IMAGE_DIR,
}, },
{
'name': 'tests_base',
'ctx': '../lib',
},
{ {
'name': 'tests_runner', 'name': 'tests_runner',
'ctx': '../integration', 'ctx': BASE_IMAGE_DIR,
}, },
] ]
@ -75,11 +71,10 @@ def rebuild_images_if_needed(force_rebuild=False):
log(f'Building docker image {img_name}') log(f'Building docker image {img_name}')
cmd = [ cmd = [
'build', 'build',
'-t', '-t', img_name,
img_name, '-f', f'{BASE_IMAGE_DIR}/{name}.{df_suffix}',
ctx, ctx,
] ]
cmd += ['-f', f'{BASE_IMAGE_DIR}/{name}.{df_suffix}']
if force_rebuild: if force_rebuild:
cmd += ['--no-cache'] cmd += ['--no-cache']
docker(cmd) docker(cmd)

@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
set -e set -e
cp -r ../certs ../lib/certs export DOCKER_UID=$(id -u)
export DOCKER_GID=$(id -g)
python integration_tests.py $@ python integration_tests.py $@
rm -rf ../lib/certs

@ -1,3 +1,4 @@
use crate::helpers::ENV;
use u_lib::{api::ClientHandler, messaging::Reportable, models::*}; use u_lib::{api::ClientHandler, messaging::Reportable, models::*};
use uuid::Uuid; use uuid::Uuid;
@ -7,14 +8,14 @@ pub struct RegisteredAgent {
impl RegisteredAgent { impl RegisteredAgent {
pub async fn unregister(self) { pub async fn unregister(self) {
let cli = ClientHandler::new(None); let cli = ClientHandler::new(&ENV.u_server);
cli.del(Some(self.uid)).await.unwrap(); cli.del(Some(self.uid)).await.unwrap();
} }
} }
#[fixture] #[fixture]
pub async fn register_agent() -> RegisteredAgent { pub async fn register_agent() -> RegisteredAgent {
let cli = ClientHandler::new(None); let cli = ClientHandler::new(&ENV.u_server);
let agent_uid = Uuid::new_v4(); let agent_uid = Uuid::new_v4();
let resp = cli let resp = cli
.get_personal_jobs(Some(agent_uid)) .get_personal_jobs(Some(agent_uid))

@ -1,3 +1,8 @@
pub mod panel; pub mod panel;
pub use panel::Panel; pub use panel::Panel;
use once_cell::sync::Lazy;
use u_lib::utils::{env::DefaultEnv, load_env_default};
pub static ENV: Lazy<DefaultEnv> = Lazy::new(|| load_env_default().unwrap());

@ -2,21 +2,20 @@ mod fixtures;
mod helpers; mod helpers;
mod integration; mod integration;
use crate::helpers::ENV;
use u_lib::config::MASTER_PORT; use u_lib::config::MASTER_PORT;
use u_lib::utils::Env;
#[macro_use] #[macro_use]
extern crate rstest; extern crate rstest;
#[tokio::test] #[tokio::test]
async fn test_non_auth_connection_dropped() { async fn test_non_auth_connection_dropped() {
let env = Env::init_default().unwrap();
let client = reqwest::ClientBuilder::new() let client = reqwest::ClientBuilder::new()
.danger_accept_invalid_certs(true) .danger_accept_invalid_certs(true)
.build() .build()
.unwrap(); .unwrap();
match client match client
.get(format!("https://{}:{}", &env.u_server, MASTER_PORT)) .get(format!("https://{}:{}", &ENV.u_server, MASTER_PORT))
.send() .send()
.await .await
{ {

@ -16,7 +16,6 @@ lazy_static = "1.4.0"
futures = "0.3.5" futures = "0.3.5"
thiserror = "*" thiserror = "*"
log = "*" log = "*"
mockall = "0.9.1"
env_logger = "0.8.3" env_logger = "0.8.3"
diesel-derive-enum = { version = "1", features = ["postgres"] } diesel-derive-enum = { version = "1", features = ["postgres"] }
chrono = "0.4.19" chrono = "0.4.19"
@ -36,5 +35,4 @@ guess_host_triple = "0.1.2"
openssl = "*" openssl = "*"
[dev-dependencies] [dev-dependencies]
test-case = "1.1.0" rstest = "0.12"
rstest = "0.11"

@ -1,12 +1,10 @@
use crate::{ use crate::{
UError,
UErrorBt,
UResult,
cache::JobCache, cache::JobCache,
executor::{Waiter, DynFut}, executor::{DynFut, Waiter},
messaging::Reportable, messaging::Reportable,
models::{Agent, AssignedJob, JobMeta, JobType}, models::{Agent, AssignedJob, JobMeta, JobType},
utils::{CombinedResult, OneOrVec} utils::{CombinedResult, OneOrVec},
UError, UErrorBt, UResult,
}; };
use guess_host_triple::guess_host_triple; use guess_host_triple::guess_host_triple;
use std::collections::HashMap; use std::collections::HashMap;
@ -38,7 +36,8 @@ impl JobBuilder {
return Err(UError::InsuitablePlatform( return Err(UError::InsuitablePlatform(
meta.platform.clone(), meta.platform.clone(),
curr_platform, curr_platform,
).into()); )
.into());
} }
let job = AssignedJob::new(req.job_id, Some(&req)); let job = AssignedJob::new(req.job_id, Some(&req));
prepared.push(Box::pin(job.run())) prepared.push(Box::pin(job.run()))
@ -152,19 +151,13 @@ impl NamedJobBuilder {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use test_case::test_case;
use std::{time::SystemTime};
use crate::{ use crate::{
errors::UError,
models::{
JobMeta,
misc::JobType
},
builder::{JobBuilder, NamedJobBuilder}, builder::{JobBuilder, NamedJobBuilder},
models::{misc::JobType, JobMeta},
unwrap_enum, unwrap_enum,
}; };
use std::time::SystemTime;
type TestResult<R = ()> = Result<R, Box<dyn std::error::Error>>; type TestResult<R = ()> = Result<R, Box<dyn std::error::Error>>;
@ -178,19 +171,14 @@ mod tests {
assert!(now.elapsed().unwrap().as_secs() < SLEEP_SECS + 2) assert!(now.elapsed().unwrap().as_secs() < SLEEP_SECS + 2)
} }
#[test_case( #[rstest]
#[case::sh_payload(
"/bin/sh {}", "/bin/sh {}",
Some(b"echo test01 > /tmp/asd; cat /tmp/asd"), Some(b"echo test01 > /tmp/asd; cat /tmp/asd".as_slice()),
"test01" "test01"
;"sh payload"
)] )]
#[test_case( #[case::python_cmd(r#"/usr/bin/python3 -c 'print("test02")'"#, None, "test02")]
r#"/usr/bin/python3 -c 'print("test02")'"#, #[case::sh_multiline_payload(
None,
"test02"
;"python cmd"
)]
#[test_case(
"/{}", "/{}",
Some( Some(
br#"#!/bin/sh br#"#!/bin/sh
@ -198,19 +186,21 @@ mod tests {
mkdir -p $TMPPATH mkdir -p $TMPPATH
echo test03 > $TMPPATH/t echo test03 > $TMPPATH/t
cat $TMPPATH/t cat $TMPPATH/t
rm -rf $TMPPATH"# rm -rf $TMPPATH"#.as_slice()
), ),
"test03" "test03"
;"sh multiline payload"
)] )]
#[test_case( #[case::standalone_binary_with_args(
"/{} 'some msg as arg'", "/{} 'some msg as arg'",
Some(include_bytes!("../tests/fixtures/echoer")), Some(include_bytes!("../tests/fixtures/echoer").as_slice()),
"some msg as arg" "some msg as arg"
;"standalone binary with args"
)] )]
#[tokio::test] #[tokio::test]
async fn test_shell_job(cmd: &str, payload: Option<&[u8]>, expected_result: &str) -> TestResult { async fn test_shell_job(
#[case] cmd: &str,
#[case] payload: Option<&[u8]>,
#[case] expected_result: &str,
) -> TestResult {
let mut job = JobMeta::builder().with_shell(cmd); let mut job = JobMeta::builder().with_shell(cmd);
if let Some(p) = payload { if let Some(p) = payload {
job = job.with_payload(p); job = job.with_payload(p);
@ -228,8 +218,12 @@ mod tests {
const SLEEP_SECS: u64 = 1; const SLEEP_SECS: u64 = 1;
let now = SystemTime::now(); let now = SystemTime::now();
let longest_job = JobMeta::from_shell(format!("sleep {}", SLEEP_SECS)).unwrap(); let longest_job = JobMeta::from_shell(format!("sleep {}", SLEEP_SECS)).unwrap();
let longest_job = JobBuilder::from_meta(longest_job).unwrap_one().spawn().await; let longest_job = JobBuilder::from_meta(longest_job)
let ls = JobBuilder::from_meta(JobMeta::from_shell("ls")?).unwrap_one() .unwrap_one()
.spawn()
.await;
let ls = JobBuilder::from_meta(JobMeta::from_shell("ls")?)
.unwrap_one()
.wait_one() .wait_one()
.await; .await;
let ls = unwrap_enum!(ls, Reportable::Assigned); let ls = unwrap_enum!(ls, Reportable::Assigned);
@ -272,10 +266,7 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_failing_shell_job() -> TestResult { async fn test_failing_shell_job() -> TestResult {
let job = JobMeta::from_shell("lol_kek_puk")?; let job = JobMeta::from_shell("lol_kek_puk")?;
let job_result = JobBuilder::from_meta(job) let job_result = JobBuilder::from_meta(job).unwrap_one().wait_one().await;
.unwrap_one()
.wait_one()
.await;
let job_result = unwrap_enum!(job_result, Reportable::Assigned); let job_result = unwrap_enum!(job_result, Reportable::Assigned);
let output = job_result.to_string_result(); let output = job_result.to_string_result();
assert!(output.contains("No such file")); assert!(output.contains("No such file"));
@ -283,20 +274,15 @@ mod tests {
Ok(()) Ok(())
} }
#[test_case( #[rstest]
"/bin/bash {}", #[case::no_binary("/bin/bash {}", None, "contains executable")]
None, #[case::no_path_to_binary("/bin/bash", Some(b"whoami".as_slice()), "contains no executable")]
"contains executable"
; "no binary"
)]
#[test_case(
"/bin/bash",
Some(b"whoami"),
"contains no executable"
; "no path to binary"
)]
#[tokio::test] #[tokio::test]
async fn test_job_building_failed(cmd: &str, payload: Option<&[u8]>, err_str: &str) -> TestResult { async fn test_job_building_failed(
#[case] cmd: &str,
#[case] payload: Option<&[u8]>,
#[case] err_str: &str,
) -> TestResult {
let mut job = JobMeta::builder().with_shell(cmd); let mut job = JobMeta::builder().with_shell(cmd);
if let Some(p) = payload { if let Some(p) = payload {
job = job.with_payload(p); job = job.with_payload(p);
@ -311,11 +297,15 @@ mod tests {
async fn test_different_job_types() -> TestResult { async fn test_different_job_types() -> TestResult {
let mut jobs = NamedJobBuilder::from_meta(vec![ let mut jobs = NamedJobBuilder::from_meta(vec![
("sleeper", JobMeta::from_shell("sleep 3")?), ("sleeper", JobMeta::from_shell("sleep 3")?),
("gatherer", JobMeta::builder().with_type(JobType::Manage).build()?) (
]).wait().await; "gatherer",
JobMeta::builder().with_type(JobType::Manage).build()?,
),
])
.wait()
.await;
let gathered = jobs.pop("gatherer"); let gathered = jobs.pop("gatherer");
assert_eq!(unwrap_enum!(gathered, Reportable::Agent).alias, None); assert_eq!(unwrap_enum!(gathered, Reportable::Agent).alias, None);
Ok(()) Ok(())
} }
} }

@ -40,5 +40,6 @@ extern crate diesel;
extern crate log; extern crate log;
extern crate env_logger; extern crate env_logger;
#[cfg(test)]
#[macro_use] #[macro_use]
extern crate mockall; extern crate rstest;

@ -2,35 +2,28 @@ use envy::{from_env, Result as EnvResult};
use serde::{de::DeserializeOwned, Deserialize}; use serde::{de::DeserializeOwned, Deserialize};
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct NoneEnv; pub struct DefaultEnv {
#[derive(Deserialize)]
pub struct Env<E = NoneEnv> {
#[serde(default = "default_host")] #[serde(default = "default_host")]
pub u_server: String, pub u_server: String,
pub inner: E,
} }
impl Env { pub fn load_env<E: DeserializeOwned>() -> EnvResult<E> {
pub fn init_default() -> EnvResult<Self> { dot();
let envs = [".env", ".env.private"];
for envfile in &envs {
dotenv::from_filename(envfile).ok();
}
from_env() from_env()
} }
pub fn load_env_default() -> EnvResult<DefaultEnv> {
dot();
from_env()
} }
impl<E: DeserializeOwned> Env<E> { fn dot() {
pub fn init() -> EnvResult<Self> {
let envs = [".env", ".env.private"]; let envs = [".env", ".env.private"];
for envfile in &envs { for envfile in &envs {
dotenv::from_filename(envfile).ok(); dotenv::from_filename(envfile).ok();
} }
from_env()
}
} }
fn default_host() -> String { pub fn default_host() -> String {
"ortem.xyz".to_string() "ortem.xyz".to_string()
} }

@ -1,19 +1,19 @@
mod combined_result; pub mod combined_result;
mod conv; pub mod conv;
mod env; pub mod env;
mod fmt; pub mod fmt;
mod misc; pub mod misc;
mod proc_output; pub mod proc_output;
mod storage; pub mod storage;
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
mod tempfile; pub mod tempfile;
#[cfg(unix)] #[cfg(unix)]
mod unix; pub mod unix;
mod vec_display; pub mod vec_display;
pub use combined_result::*; pub use combined_result::*;
pub use conv::*; pub use conv::*;
pub use env::Env; pub use env::{load_env, load_env_default};
pub use fmt::*; pub use fmt::*;
pub use misc::*; pub use misc::*;
pub use proc_output::*; pub use proc_output::*;

@ -117,46 +117,42 @@ impl ProcOutput {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::utils::{bytes_to_string, ProcOutput}; use crate::utils::{bytes_to_string, ProcOutput};
use test_case::test_case;
const STDOUT: &str = "<***STDOUT***>"; const STDOUT: &str = "<***STDOUT***>";
const STDERR: &str = "<***STDERR***>"; const STDERR: &str = "<***STDERR***>";
#[test_case( #[rstest]
#[case::stdout_stderr(
"lol", "lol",
"kek", "kek",
&format!("{}lol{}kek", STDOUT, STDERR) &format!("{}lol{}kek", STDOUT, STDERR)
;"stdout stderr"
)] )]
#[test_case( #[case::stderr(
"", "",
"kek", "kek",
&format!("{}kek", STDERR) &format!("{}kek", STDERR)
;"stderr"
)] )]
fn test_to_combined(stdout: &str, stderr: &str, result: &str) { fn test_to_combined(#[case] stdout: &str, #[case] stderr: &str, #[case] result: &str) {
let output = ProcOutput::new() let output = ProcOutput::new()
.stdout(stdout.as_bytes().to_vec()) .stdout(stdout.as_bytes().to_vec())
.stderr(stderr.as_bytes().to_vec()); .stderr(stderr.as_bytes().to_vec());
assert_eq!(&bytes_to_string(&output.into_combined()), result) assert_eq!(&bytes_to_string(&output.into_combined()), result)
} }
#[test_case( #[rstest]
#[case::stdout_stderr(
&format!("{}lal{}kik", STDOUT, STDERR), &format!("{}lal{}kik", STDOUT, STDERR),
"lal\nkik" "lal\nkik"
;"stdout stderr"
)] )]
#[test_case( #[case::stdout(
&format!("{}qeq", STDOUT), &format!("{}qeq", STDOUT),
"qeq" "qeq"
;"stdout"
)] )]
#[test_case( #[case::stderr(
&format!("{}vev", STDERR), &format!("{}vev", STDERR),
"vev" "vev"
;"stderr"
)] )]
fn test_from_combined(src: &str, result: &str) { fn test_from_combined(#[case] src: &str, #[case] result: &str) {
let output = ProcOutput::from_combined(src.as_bytes()).unwrap(); let output = ProcOutput::from_combined(src.as_bytes()).unwrap();
assert_eq!(bytes_to_string(&output.to_appropriate()).trim(), result); assert_eq!(bytes_to_string(&output.to_appropriate()).trim(), result);
} }

Loading…
Cancel
Save