improve env parsing & optimize integration tests

pull/1/head
plazmoid 2 years ago
parent 88f17eab02
commit 5d04aa61d6
  1. 17
      bin/u_agent/src/lib.rs
  2. 36
      bin/u_panel/src/argparse.rs
  3. 25
      bin/u_panel/src/main.rs
  4. 10
      bin/u_panel/src/server/mod.rs
  5. 23
      bin/u_server/src/db.rs
  6. 42
      bin/u_server/src/init.rs
  7. 56
      bin/u_server/src/u_server.rs
  8. 11
      images/integration-tests/tests_base.Dockerfile
  9. 12
      images/integration-tests/tests_runner.Dockerfile
  10. 2
      integration/Cargo.toml
  11. 1
      integration/docker-compose.yml
  12. 38
      integration/docker.py
  13. 2
      integration/integration_tests.sh
  14. 2
      integration/tests/helpers/panel.rs
  15. 2
      integration/tests/integration/behaviour.rs
  16. 1
      integration/tests/integration/mod.rs
  17. 8
      integration/tests/lib.rs
  18. 1
      lib/u_lib/Cargo.toml
  19. 13
      lib/u_lib/src/api.rs
  20. 1
      lib/u_lib/src/config.rs
  21. 36
      lib/u_lib/src/utils/env.rs
  22. 7
      lib/u_lib/src/utils/misc.rs
  23. 2
      lib/u_lib/src/utils/mod.rs

@ -7,21 +7,12 @@
extern crate log; extern crate log;
extern crate env_logger; extern crate env_logger;
use std::env;
use std::panic; use std::panic;
use std::sync::Arc; use std::sync::Arc;
use tokio::time::{sleep, Duration}; use tokio::time::{sleep, Duration};
use u_lib::{ use u_lib::{
api::ClientHandler, api::ClientHandler, builder::JobBuilder, cache::JobCache, errors::ErrChan,
builder::JobBuilder, executor::pop_completed, messaging::Reportable, models::AssignedJob, utils::Env, UError, UID,
cache::JobCache,
errors::ErrChan,
executor::pop_completed,
messaging::Reportable,
models::AssignedJob,
UError,
UID,
//daemonize
}; };
const ITERATION_LATENCY: u64 = 5; const ITERATION_LATENCY: u64 = 5;
@ -98,8 +89,8 @@ async fn do_stuff(client: Arc<ClientHandler>) -> ! {
pub async fn run_forever() { pub async fn run_forever() {
//daemonize(); //daemonize();
env_logger::init(); env_logger::init();
let arg_ip = env::args().nth(1); let env = Env::init_default().unwrap();
let client = Arc::new(ClientHandler::new(arg_ip.as_deref())); let client = Arc::new(ClientHandler::new(&env.u_server));
panic::set_hook(Box::new(|panic_info| { panic::set_hook(Box::new(|panic_info| {
ErrChan::send(UError::Panic(panic_info.to_string())) ErrChan::send(UError::Panic(panic_info.to_string()))
})); }));

@ -17,7 +17,7 @@ enum Cmd {
Agents(LD), Agents(LD),
Jobs(JobALD), Jobs(JobALD),
Map(JobMapALD), Map(JobMapALD),
TUI(TUIArgs), //TUI(TUIArgs),
Serve, Serve,
} }
@ -107,8 +107,8 @@ pub async fn process_cmd(args: Args) -> UResult<()> {
let printer = Printer { json: args.json }; let printer = Printer { json: args.json };
match args.cmd { match args.cmd {
Cmd::Agents(action) => match action { Cmd::Agents(action) => match action {
LD::List { uid } => printer.print(CLIENT.get_agents(uid).await), LD::List { uid } => printer.print(CLIENT.get().unwrap().get_agents(uid).await),
LD::Delete { uid } => printer.print(CLIENT.del(Some(uid)).await), LD::Delete { uid } => printer.print(CLIENT.get().unwrap().del(Some(uid)).await),
}, },
Cmd::Jobs(action) => match action { Cmd::Jobs(action) => match action {
JobALD::Add { JobALD::Add {
@ -120,22 +120,34 @@ pub async fn process_cmd(args: Args) -> UResult<()> {
.with_shell(cmd.join(" ")) .with_shell(cmd.join(" "))
.with_alias(alias) .with_alias(alias)
.build()?; .build()?;
printer.print(CLIENT.upload_jobs(&[job]).await); printer.print(CLIENT.get().unwrap().upload_jobs(&[job]).await);
}
JobALD::LD(LD::List { uid }) => {
printer.print(CLIENT.get().unwrap().get_jobs(uid).await)
}
JobALD::LD(LD::Delete { uid }) => {
printer.print(CLIENT.get().unwrap().del(Some(uid)).await)
} }
JobALD::LD(LD::List { uid }) => printer.print(CLIENT.get_jobs(uid).await),
JobALD::LD(LD::Delete { uid }) => printer.print(CLIENT.del(Some(uid)).await),
}, },
Cmd::Map(action) => match action { Cmd::Map(action) => match action {
JobMapALD::Add { JobMapALD::Add {
agent_uid, agent_uid,
job_idents, job_idents,
} => printer.print(CLIENT.set_jobs(Some(agent_uid), &job_idents).await), } => printer.print(
JobMapALD::List { uid } => printer.print(CLIENT.get_agent_jobs(uid).await), CLIENT
JobMapALD::Delete { uid } => printer.print(CLIENT.del(Some(uid)).await), .get()
.unwrap()
.set_jobs(Some(agent_uid), &job_idents)
.await,
),
JobMapALD::List { uid } => {
printer.print(CLIENT.get().unwrap().get_agent_jobs(uid).await)
}
JobMapALD::Delete { uid } => printer.print(CLIENT.get().unwrap().del(Some(uid)).await),
}, },
Cmd::TUI(args) => crate::tui::init_tui(&args) /*Cmd::TUI(args) => crate::tui::init_tui(&args)
.await .await
.map_err(|e| UError::PanelError(e.to_string()))?, .map_err(|e| UError::PanelError(e.to_string()))?,*/
Cmd::Serve => crate::server::serve().map_err(|e| UError::PanelError(e.to_string()))?, Cmd::Serve => crate::server::serve().map_err(|e| UError::PanelError(e.to_string()))?,
} }
Ok(()) Ok(())

@ -1,6 +1,6 @@
mod argparse; mod argparse;
mod tui;
mod server; mod server;
//mod tui;
#[macro_use] #[macro_use]
extern crate async_trait; extern crate async_trait;
@ -9,21 +9,26 @@ extern crate async_trait;
extern crate tracing; extern crate tracing;
use argparse::{process_cmd, Args}; use argparse::{process_cmd, Args};
use once_cell::sync::Lazy; use once_cell::sync::OnceCell;
use std::env; use serde::Deserialize;
use std::process; use std::process;
use structopt::StructOpt; use structopt::StructOpt;
use u_lib::api::ClientHandler; use u_lib::api::ClientHandler;
use u_lib::utils::init_env; use u_lib::utils::Env;
pub static CLIENT: Lazy<ClientHandler> = Lazy::new(|| { pub static CLIENT: OnceCell<ClientHandler> = OnceCell::new();
let token = env::var("ADMIN_AUTH_TOKEN").expect("access token is not set");
ClientHandler::new(None).password(token.clone())
});
#[tokio::main(flavor = "multi_thread")] #[derive(Deserialize)]
struct AccessEnv {
admin_auth_token: String,
}
#[tokio::main]
async fn main() { async fn main() {
init_env(); let env = Env::<AccessEnv>::init().unwrap();
CLIENT.get_or_init(|| ClientHandler::new(&env.u_server).password(env.inner.admin_auth_token));
let args: Args = Args::from_args(); let args: Args = Args::from_args();
if let Err(e) = process_cmd(args).await { if let Err(e) = process_cmd(args).await {
eprintln!("Error: {}", e); eprintln!("Error: {}", e);

@ -33,11 +33,11 @@ async fn main_page() -> impl Responder {
HttpResponse::Ok().body(index) HttpResponse::Ok().body(index)
} }
#[get("/{file}")] #[get("/{path}")]
async fn static_files_adapter(file: web::Path<(String,)>) -> impl Responder { async fn static_files_adapter(path: web::Path<(String,)>) -> impl Responder {
let file = file.into_inner().0; let path = path.into_inner().0;
let mimetype = mime_guess::from_path(&file).first_or_octet_stream(); let mimetype = mime_guess::from_path(&path).first_or_octet_stream();
match Files::get_static(file) { match Files::get_static(path) {
Some(data) => HttpResponse::Ok() Some(data) => HttpResponse::Ok()
.content_type(mimetype.to_string()) .content_type(mimetype.to_string())
.body(data), .body(data),

@ -1,11 +1,10 @@
use diesel::{pg::PgConnection, prelude::*, result::Error as DslError}; use diesel::{pg::PgConnection, prelude::*, result::Error as DslError};
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use std::{ use serde::Deserialize;
env, use std::sync::{Arc, Mutex, MutexGuard};
sync::{Arc, Mutex, MutexGuard},
};
use u_lib::{ use u_lib::{
models::{schema, Agent, AgentError, AssignedJob, JobMeta, JobState}, models::{schema, Agent, AgentError, AssignedJob, JobMeta, JobState},
utils::Env,
ULocalError, ULocalResult, ULocalError, ULocalResult,
}; };
use uuid::Uuid; use uuid::Uuid;
@ -16,18 +15,22 @@ pub struct UDB {
static DB: OnceCell<Arc<Mutex<UDB>>> = OnceCell::new(); static DB: OnceCell<Arc<Mutex<UDB>>> = OnceCell::new();
#[derive(Deserialize)]
struct DBEnv {
db_host: String,
db_name: String,
db_user: String,
db_password: String,
}
#[cfg_attr(test, automock)] #[cfg_attr(test, automock)]
impl UDB { impl UDB {
pub fn lock_db() -> MutexGuard<'static, UDB> { pub fn lock_db() -> MutexGuard<'static, UDB> {
DB.get_or_init(|| { DB.get_or_init(|| {
let _getenv = |v| env::var(v).unwrap(); let env = Env::<DBEnv>::init().unwrap();
let db_host = _getenv("DB_HOST");
let db_name = _getenv("DB_NAME");
let db_user = _getenv("DB_USER");
let db_password = _getenv("DB_PASSWORD");
let db_url = format!( let db_url = format!(
"postgres://{}:{}@{}/{}", "postgres://{}:{}@{}/{}",
db_user, db_password, db_host, db_name env.inner.db_user, env.inner.db_password, env.inner.db_host, env.inner.db_name
); );
let conn = PgConnection::establish(&db_url).unwrap(); let conn = PgConnection::establish(&db_url).unwrap();
let instance = UDB { conn }; let instance = UDB { conn };

@ -1,6 +1,7 @@
use crate::db::UDB;
use crate::handlers::Endpoints; use crate::handlers::Endpoints;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use std::env; use std::path::PathBuf;
use u_lib::{ use u_lib::{
messaging::{AsMsg, BaseMessage, Reportable}, messaging::{AsMsg, BaseMessage, Reportable},
models::*, models::*,
@ -15,7 +16,9 @@ where
body::content_length_limit(1024 * 64).and(body::json::<BaseMessage<M>>()) body::content_length_limit(1024 * 64).and(body::json::<BaseMessage<M>>())
} }
pub fn make_filters() -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone { pub fn init_filters(
auth_token: &str,
) -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {
let infallible_none = |_| async { Ok::<(Option<Uuid>,), std::convert::Infallible>((None,)) }; let infallible_none = |_| async { Ok::<(Option<Uuid>,), std::convert::Infallible>((None,)) };
let get_agents = warp::get() let get_agents = warp::get()
@ -70,11 +73,7 @@ pub fn make_filters() -> impl Filter<Extract = (impl Reply,), Error = Rejection>
.and(warp::path("report")) .and(warp::path("report"))
.and(get_content::<Vec<Reportable>>().and_then(Endpoints::report)); .and(get_content::<Vec<Reportable>>().and_then(Endpoints::report));
let auth_token = format!( let auth_token = format!("Bearer {auth_token}",).into_boxed_str();
"Bearer {}",
env::var("ADMIN_AUTH_TOKEN").expect("No auth token provided")
)
.into_boxed_str();
let auth_header = warp::header::exact("authorization", Box::leak(auth_token)); let auth_header = warp::header::exact("authorization", Box::leak(auth_token));
let auth_zone = (get_agents let auth_zone = (get_agents
@ -89,3 +88,32 @@ pub fn make_filters() -> impl Filter<Extract = (impl Reply,), Error = Rejection>
auth_zone.or(agent_zone) auth_zone.or(agent_zone)
} }
pub fn prefill_jobs() {
let agent_hello = JobMeta::builder()
.with_type(misc::JobType::Manage)
.with_alias("agent_hello")
.build()
.unwrap();
UDB::lock_db().insert_jobs(&[agent_hello]).unwrap();
}
pub fn init_logger() {
use simplelog::*;
use std::fs::OpenOptions;
let log_cfg = ConfigBuilder::new()
.set_time_format_str("%x %X")
.set_time_to_local(true)
.build();
let logfile = OpenOptions::new()
.append(true)
.create(true)
.open(PathBuf::from("logs").join("u_server.log"))
.unwrap();
let level = LevelFilter::Info;
let loggers = vec![
WriteLogger::new(level, log_cfg.clone(), logfile) as Box<dyn SharedLogger>,
TermLogger::new(level, log_cfg, TerminalMode::Stderr, ColorChoice::Auto),
];
CombinedLogger::init(loggers).unwrap();
}

@ -13,56 +13,27 @@ extern crate diesel;
// in this block // in this block
mod db; mod db;
mod filters;
mod handlers; mod handlers;
mod init;
use db::UDB; use init::*;
use filters::make_filters; use serde::Deserialize;
use std::path::PathBuf; use std::path::PathBuf;
use u_lib::{config::MASTER_PORT, models::*, utils::init_env}; use u_lib::{config::MASTER_PORT, utils::Env};
use warp::Filter; use warp::Filter;
const LOGFILE: &str = "u_server.log"; #[derive(Deserialize)]
struct ServEnv {
fn prefill_jobs() { admin_auth_token: String,
let agent_hello = JobMeta::builder()
.with_type(misc::JobType::Manage)
.with_alias("agent_hello")
.build()
.unwrap();
UDB::lock_db().insert_jobs(&[agent_hello]).unwrap();
}
fn init_logger() {
use simplelog::*;
use std::fs::OpenOptions;
let log_cfg = ConfigBuilder::new()
.set_time_format_str("%x %X")
.set_time_to_local(true)
.build();
let logfile = OpenOptions::new()
.append(true)
.create(true)
.open(PathBuf::from("logs").join(LOGFILE))
.unwrap();
let level = LevelFilter::Info;
let loggers = vec![
WriteLogger::new(level, log_cfg.clone(), logfile) as Box<dyn SharedLogger>,
TermLogger::new(level, log_cfg, TerminalMode::Stderr, ColorChoice::Auto),
];
CombinedLogger::init(loggers).unwrap();
} }
fn init_all() { //TODO: tracing-subscriber
pub async fn serve() -> Result<(), String> {
init_logger(); init_logger();
init_env();
prefill_jobs(); prefill_jobs();
}
//TODO: tracing-subscriber let env = Env::<ServEnv>::init().map_err(|e| e.to_string())?;
pub async fn serve() { let routes = init_filters(&env.inner.admin_auth_token);
init_all();
let routes = make_filters();
let certs_dir = PathBuf::from("certs"); let certs_dir = PathBuf::from("certs");
warp::serve(routes.with(warp::log("warp"))) warp::serve(routes.with(warp::log("warp")))
.tls() .tls()
@ -71,6 +42,7 @@ pub async fn serve() {
.client_auth_required_path(certs_dir.join("ca.crt")) .client_auth_required_path(certs_dir.join("ca.crt"))
.run(([0, 0, 0, 0], MASTER_PORT)) .run(([0, 0, 0, 0], MASTER_PORT))
.await; .await;
Ok(())
} }
#[cfg(test)] #[cfg(test)]
@ -97,7 +69,7 @@ mod tests {
uid.map(|u| u.simple().to_string()).unwrap_or(String::new()) uid.map(|u| u.simple().to_string()).unwrap_or(String::new())
)) ))
.method("GET") .method("GET")
.filter(&make_filters()) .filter(&init_filters(""))
.await .await
.unwrap(); .unwrap();
mock.checkpoint(); mock.checkpoint();
@ -113,7 +85,7 @@ mod tests {
.path("/report/") .path("/report/")
.method("POST") .method("POST")
.json(&vec![Reportable::Dummy].as_message()) .json(&vec![Reportable::Dummy].as_message())
.filter(&make_filters()) .filter(&init_filters(""))
.await .await
.unwrap(); .unwrap();
mock.checkpoint(); mock.checkpoint();

@ -0,0 +1,11 @@
# build from lib/
FROM rust:1.60 as chef
RUN rustup target add x86_64-unknown-linux-musl
RUN cargo install cargo-chef
COPY u_lib /lib/u_lib
COPY u_api_proc_macro /lib/u_api_proc_macro
COPY certs /certs
WORKDIR /app

@ -1,4 +1,12 @@
FROM rust:1.55 # build from integration/
FROM unki/tests_base as chef
FROM chef as planner
COPY . /app
RUN cargo chef prepare --recipe-path recipe.json
FROM chef as builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release --recipe-path recipe.json
RUN rustup target add x86_64-unknown-linux-musl
CMD ["sleep", "3600"] CMD ["sleep", "3600"]

@ -25,4 +25,4 @@ version = "*"
[[test]] [[test]]
name = "integration" name = "integration"
path = "tests/tests.rs" path = "tests/lib.rs"

@ -71,7 +71,6 @@ services:
networks: networks:
- u_net - u_net
volumes: volumes:
- ~/.cargo/registry:/root/.cargo/registry
- ./:/tests/ - ./:/tests/
- ../certs:/certs - ../certs:/certs
- ../release/u_panel:/u_panel - ../release/u_panel:/u_panel

@ -3,24 +3,29 @@ from utils import *
BASE_IMAGE_DIR = '../images/integration-tests' BASE_IMAGE_DIR = '../images/integration-tests'
DOCKERFILES = { # do not reorder
'u_agent': { DOCKERFILES = [
{
'name': 'u_agent',
'ctx': BASE_IMAGE_DIR, 'ctx': BASE_IMAGE_DIR,
'dockerfile_prefix': 'u_agent'
}, },
'u_server': { {
'name': 'u_server',
'ctx': BASE_IMAGE_DIR, 'ctx': BASE_IMAGE_DIR,
'dockerfile_prefix': 'u_server'
}, },
'u_db': { {
'name': 'u_db',
'ctx': BASE_IMAGE_DIR, 'ctx': BASE_IMAGE_DIR,
'dockerfile_prefix': 'u_db'
}, },
'tests_runner': { {
'ctx': BASE_IMAGE_DIR, 'name': 'tests_base',
'dockerfile_prefix': 'tests_runner' 'ctx': '../lib',
},
{
'name': 'tests_runner',
'ctx': '../integration',
}, },
} ]
def docker(args): def docker(args):
@ -62,11 +67,11 @@ def check_state(containers):
def rebuild_images_if_needed(force_rebuild=False): def rebuild_images_if_needed(force_rebuild=False):
for img_name, data in DOCKERFILES.items(): for img in DOCKERFILES:
ctx = data['ctx'] ctx = img['ctx']
df_prefix = data.get('dockerfile_prefix') name = img.get('name')
df_suffix = 'Dockerfile' df_suffix = 'Dockerfile'
img_name = f'unki/{img_name}' img_name = f'unki/{name}'
log(f'Building docker image {img_name}') log(f'Building docker image {img_name}')
cmd = [ cmd = [
'build', 'build',
@ -74,8 +79,7 @@ def rebuild_images_if_needed(force_rebuild=False):
img_name, img_name,
ctx, ctx,
] ]
if df_prefix: cmd += ['-f', f'{BASE_IMAGE_DIR}/{name}.{df_suffix}']
cmd += ['-f', f'{ctx}/{df_prefix}.{df_suffix}']
if force_rebuild: if force_rebuild:
cmd += ['--no-cache'] cmd += ['--no-cache']
docker(cmd) docker(cmd)

@ -1,3 +1,5 @@
#!/bin/bash #!/bin/bash
set -e set -e
cp -r ../certs ../lib/certs
python integration_tests.py $@ python integration_tests.py $@
rm -rf ../lib/certs

@ -36,7 +36,7 @@ impl Panel {
} }
pub fn output<T: DeserializeOwned>(args: impl Into<String> + Display) -> PanelResult<T> { pub fn output<T: DeserializeOwned>(args: impl Into<String> + Display) -> PanelResult<T> {
println!("Executing 'u_panel {}'", &args); println!("Executing '{PANEL_BINARY} {}'", &args);
let splitted = split(args.into().as_ref()).unwrap(); let splitted = split(args.into().as_ref()).unwrap();
Self::output_argv( Self::output_argv(
splitted splitted

@ -27,7 +27,7 @@ async fn test_setup_tasks() -> TestResult {
let agents: Vec<Agent> = Panel::check_output("agents list"); let agents: Vec<Agent> = Panel::check_output("agents list");
let agent_uid = agents[0].id; let agent_uid = agents[0].id;
let job_alias = "passwd_contents"; let job_alias = "passwd_contents";
let cmd = format!("jobs add --alias {} 'cat /etc/passwd'", job_alias); let cmd = format!("jobs add --alias {job_alias} 'cat /etc/passwd'");
Panel::check_status(cmd); Panel::check_status(cmd);
let cmd = format!("map add {} {}", agent_uid, job_alias); let cmd = format!("map add {} {}", agent_uid, job_alias);
let assigned_uids: Vec<Uuid> = Panel::check_output(cmd); let assigned_uids: Vec<Uuid> = Panel::check_output(cmd);

@ -1,22 +1,22 @@
mod behaviour;
mod fixtures; mod fixtures;
mod helpers; mod helpers;
mod integration;
use std::env;
use u_lib::config::MASTER_PORT; use u_lib::config::MASTER_PORT;
use u_lib::utils::Env;
#[macro_use] #[macro_use]
extern crate rstest; extern crate rstest;
#[tokio::test] #[tokio::test]
async fn test_non_auth_connection_dropped() { async fn test_non_auth_connection_dropped() {
let env_server = env::var("U_SERVER").unwrap(); let env = Env::init_default().unwrap();
let client = reqwest::ClientBuilder::new() let client = reqwest::ClientBuilder::new()
.danger_accept_invalid_certs(true) .danger_accept_invalid_certs(true)
.build() .build()
.unwrap(); .unwrap();
match client match client
.get(format!("https://{}:{}", env_server, MASTER_PORT)) .get(format!("https://{}:{}", &env.u_server, MASTER_PORT))
.send() .send()
.await .await
{ {

@ -27,6 +27,7 @@ u_api_proc_macro = { version = "*", path = "../u_api_proc_macro" }
crossbeam = "0.8.1" crossbeam = "0.8.1"
backtrace = "0.3.61" backtrace = "0.3.61"
diesel = { version = "1.4.5", features = ["postgres", "uuid"] } diesel = { version = "1.4.5", features = ["postgres", "uuid"] }
envy = "0.4.2"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]
reqwest = { version = "0.11", features = ["json", "native-tls"] } reqwest = { version = "0.11", features = ["json", "native-tls"] }

@ -1,14 +1,11 @@
use crate::messaging;
//#[allow(non_upper_case_globals)]
use crate::{ use crate::{
config::{MASTER_PORT, MASTER_SERVER}, config::MASTER_PORT,
messaging::{AsMsg, BaseMessage}, messaging::{self, AsMsg, BaseMessage},
models, models,
utils::{opt_to_string, VecDisplay}, utils::{opt_to_string, VecDisplay},
UError, UResult, UError, UResult,
}; };
use reqwest::{Certificate, Client, Identity, RequestBuilder, Url}; use reqwest::{Certificate, Client, Identity, RequestBuilder, Url};
use std::env;
use u_api_proc_macro::api_route; use u_api_proc_macro::api_route;
use uuid::Uuid; use uuid::Uuid;
@ -22,9 +19,7 @@ pub struct ClientHandler {
} }
impl ClientHandler { impl ClientHandler {
pub fn new(server: Option<&str>) -> Self { pub fn new(server: &str) -> Self {
let env_server = env::var("U_SERVER").unwrap_or(String::from(MASTER_SERVER));
let master_server = server.unwrap_or(env_server.as_str());
let identity = Identity::from_pkcs12_der(AGENT_IDENTITY, "").unwrap(); let identity = Identity::from_pkcs12_der(AGENT_IDENTITY, "").unwrap();
let client = Client::builder() let client = Client::builder()
.identity(identity) .identity(identity)
@ -33,7 +28,7 @@ impl ClientHandler {
.unwrap(); .unwrap();
Self { Self {
client, client,
base_url: Url::parse(&format!("https://{}:{}", master_server, MASTER_PORT)).unwrap(), base_url: Url::parse(&format!("https://{}:{}", server, MASTER_PORT)).unwrap(),
password: None, password: None,
} }
} }

@ -1,7 +1,6 @@
use lazy_static::lazy_static; use lazy_static::lazy_static;
use uuid::Uuid; use uuid::Uuid;
pub const MASTER_SERVER: &str = "ortem.xyz"; //Ipv4Addr::new(3,9,16,40)
pub const MASTER_PORT: u16 = 63714; pub const MASTER_PORT: u16 = 63714;
lazy_static! { lazy_static! {

@ -0,0 +1,36 @@
use envy::{from_env, Result as EnvResult};
use serde::{de::DeserializeOwned, Deserialize};
#[derive(Deserialize)]
pub struct NoneEnv;
#[derive(Deserialize)]
pub struct Env<E = NoneEnv> {
#[serde(default = "default_host")]
pub u_server: String,
pub inner: E,
}
impl Env {
pub fn init_default() -> EnvResult<Self> {
let envs = [".env", ".env.private"];
for envfile in &envs {
dotenv::from_filename(envfile).ok();
}
from_env()
}
}
impl<E: DeserializeOwned> Env<E> {
pub fn init() -> EnvResult<Self> {
let envs = [".env", ".env.private"];
for envfile in &envs {
dotenv::from_filename(envfile).ok();
}
from_env()
}
}
fn default_host() -> String {
"ortem.xyz".to_string()
}

@ -24,10 +24,3 @@ macro_rules! unwrap_enum {
} }
}; };
} }
pub fn init_env() {
let envs = [".env", ".env.private"];
for envfile in &envs {
dotenv::from_filename(envfile).ok();
}
}

@ -1,5 +1,6 @@
mod combined_result; mod combined_result;
mod conv; mod conv;
mod env;
mod fmt; mod fmt;
mod misc; mod misc;
mod proc_output; mod proc_output;
@ -12,6 +13,7 @@ mod vec_display;
pub use combined_result::*; pub use combined_result::*;
pub use conv::*; pub use conv::*;
pub use env::Env;
pub use fmt::*; pub use fmt::*;
pub use misc::*; pub use misc::*;
pub use proc_output::*; pub use proc_output::*;

Loading…
Cancel
Save