big improvements (as usual)

* removed api client macro
* fixed passing --release mode in cargo make
* optimized integration tests
* added logging module (tracing)
* allow u_panel to alter entries
* reworked u_panel args (CRUD)
* improved db hooks
* started implementing web-interface
** incapsulated all frontend in binary
** setup workflow
** make u_panel accept commands from interface
pull/1/head
plazmoid 2 years ago
parent 83252b6f95
commit c70cdbd262
  1. 1
      Cargo.toml
  2. 35
      Makefile.toml
  3. 4
      bin/u_agent/Cargo.toml
  4. 34
      bin/u_agent/src/lib.rs
  5. 7
      bin/u_panel/Cargo.toml
  6. 120
      bin/u_panel/src/argparse.rs
  7. 2
      bin/u_panel/src/main.rs
  8. 17
      bin/u_panel/src/server/errors.rs
  9. 48
      bin/u_panel/src/server/fe/.gitignore
  10. 2
      bin/u_panel/src/server/fe/angular.json
  11. 4
      bin/u_panel/src/server/fe/package.json
  12. 50
      bin/u_panel/src/server/fe/src/app/app.component.html
  13. 78
      bin/u_panel/src/server/fe/src/app/app.component.ts
  14. 12
      bin/u_panel/src/server/fe/src/app/app.module.ts
  15. 5
      bin/u_panel/src/server/fe/src/index.html
  16. 3
      bin/u_panel/src/server/fe/src/styles.less
  17. 40
      bin/u_panel/src/server/mod.rs
  18. 2
      bin/u_panel/src/tui/mod.rs
  19. 3
      bin/u_server/Cargo.toml
  20. 5
      bin/u_server/src/db.rs
  21. 85
      bin/u_server/src/handlers.rs
  22. 136
      bin/u_server/src/init.rs
  23. 151
      bin/u_server/src/u_server.rs
  24. 2
      images/integration-tests/tests_runner.Dockerfile
  25. 3
      integration/Cargo.toml
  26. 7
      integration/docker-compose.yml
  27. 1
      integration/integration_tests.sh
  28. 6
      integration/tests/fixtures/agent.rs
  29. 2
      integration/tests/helpers/panel.rs
  30. 16
      lib/u_api_proc_macro/Cargo.toml
  31. 181
      lib/u_api_proc_macro/src/lib.rs
  32. 15
      lib/u_api_proc_macro/tests/tests.rs
  33. 9
      lib/u_lib/Cargo.toml
  34. 160
      lib/u_lib/src/api.rs
  35. 22
      lib/u_lib/src/builder.rs
  36. 6
      lib/u_lib/src/config.rs
  37. 11
      lib/u_lib/src/errors/variants.rs
  38. 3
      lib/u_lib/src/lib.rs
  39. 16
      lib/u_lib/src/logging.rs
  40. 7
      lib/u_lib/src/messaging/base.rs
  41. 7
      lib/u_lib/src/models/agent.rs
  42. 4
      lib/u_lib/src/models/jobs/assigned.rs
  43. 87
      lib/u_lib/src/models/jobs/meta.rs
  44. 3
      lib/u_lib/src/models/jobs/misc.rs
  45. 8
      lib/u_lib/src/utils/combined_result.rs
  46. 2
      scripts/deploy.sh
  47. 1
      scripts/gen_certs.sh

@ -5,7 +5,6 @@ members = [
"bin/u_run", "bin/u_run",
"bin/u_server", "bin/u_server",
"lib/u_lib", "lib/u_lib",
"lib/u_api_proc_macro",
"integration" "integration"
] ]

@ -1,3 +1,21 @@
# i need to preserve --release in args, not to pass cargo make -p release
# due to cargo failing to parse "" argument
env_scripts = ['''
#!@duckscript
args = array ${1} ${2} ${3} ${4} ${5} ${6} ${7}
set_env PROFILE_OVERRIDE debug
for arg in ${args}
e = eq ${arg} "--release"
if ${e}
set_env PROFILE_OVERRIDE release
end
end
profile = get_env PROFILE_OVERRIDE
echo PROFILE_OVERRIDE=${profile}
''']
[config] [config]
default_to_workspace = false default_to_workspace = false
@ -11,6 +29,7 @@ PG_CONFIG_X86_64_UNKNOWN_LINUX_GNU = "${STATIC_PREFIX}/bin/pg_config"
OPENSSL_STATIC = "true" OPENSSL_STATIC = "true"
OPENSSL_DIR = "${STATIC_PREFIX}" OPENSSL_DIR = "${STATIC_PREFIX}"
[tasks.build_static_libs] [tasks.build_static_libs]
script = "./scripts/build_musl_libs.sh" script = "./scripts/build_musl_libs.sh"
@ -30,28 +49,21 @@ command = "${CARGO}"
args = ["build", "--target", "${TARGET}", "${@}"] args = ["build", "--target", "${TARGET}", "${@}"]
[tasks.release_tasks] [tasks.release_tasks]
condition = { env = { "PROFILE_OVERRIDE" = "release"} }
script = ''' script = '''
if [[ "${@}" =~ "--release" ]]; then BINS=$(ls ./target/${TARGET}/${PROFILE_OVERRIDE}/u_* -1 | grep -v ".d")
echo "Creating symlink to release dir..."
ln -s ${ROOTDIR}/target/${TARGET}/release ${ROOTDIR}/release || true
BINS=$(ls ./release/u_* -1 | grep -v ".d")
echo "Stripping..." echo "Stripping..."
strip $BINS strip $BINS
echo "Packing..." echo "Packing..."
upx -9 $BINS upx -9 $BINS
fi
''' '''
[tasks.build] [tasks.build]
dependencies = ["cargo_build", "release_tasks"] dependencies = ["cargo_build", "release_tasks"]
command = "true" clear = true
args = []
[tasks.run] [tasks.run]
script = ''' disabled = true
echo "wtf are you running? run binaries dud!"
exit 1
'''
[tasks.unit] [tasks.unit]
command = "${CARGO}" command = "${CARGO}"
@ -59,6 +71,7 @@ args = ["test", "--target", "${TARGET}", "--lib", "--", "${@}"]
[tasks.integration] [tasks.integration]
script = ''' script = '''
echo "!!! This task doesn't perform project rebuild, trigger it manually if need"
cd ./integration cd ./integration
bash integration_tests.sh ${@} bash integration_tests.sh ${@}
''' '''

@ -2,7 +2,7 @@
name = "u_agent" name = "u_agent"
version = "0.1.0" version = "0.1.0"
authors = ["plazmoid <kronos44@mail.ru>"] authors = ["plazmoid <kronos44@mail.ru>"]
edition = "2018" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -10,11 +10,11 @@ edition = "2018"
tokio = { version = "1.2.0", features = ["macros", "rt-multi-thread", "process", "time"] } tokio = { version = "1.2.0", features = ["macros", "rt-multi-thread", "process", "time"] }
sysinfo = "0.10.5" sysinfo = "0.10.5"
log = "^0.4" log = "^0.4"
env_logger = "0.8.3"
uuid = "0.6.5" uuid = "0.6.5"
reqwest = { version = "0.11", features = ["json"] } reqwest = { version = "0.11", features = ["json"] }
openssl = "*" openssl = "*"
u_lib = { version = "*", path = "../../lib/u_lib" } u_lib = { version = "*", path = "../../lib/u_lib" }
daemonize = "0.4.1"
[build-dependencies] [build-dependencies]
openssl = "*" openssl = "*"

@ -5,15 +5,15 @@
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate env_logger;
use daemonize::Daemonize;
use std::panic; use std::panic;
use std::sync::Arc; use std::sync::Arc;
use tokio::time::{sleep, Duration}; use tokio::time::{sleep, Duration};
use u_lib::{ use u_lib::{
api::ClientHandler, builder::JobBuilder, cache::JobCache, errors::ErrChan, api::ClientHandler, builder::JobBuilder, cache::JobCache, config::get_self_uid,
executor::pop_completed, messaging::Reportable, models::AssignedJob, utils::load_env_default, errors::ErrChan, executor::pop_completed, logging::init_logger, messaging::Reportable,
UError, UID, models::AssignedJob, utils::load_env_default, UError,
}; };
const ITERATION_LATENCY: u64 = 5; const ITERATION_LATENCY: u64 = 5;
@ -70,10 +70,9 @@ async fn error_reporting(client: Arc<ClientHandler>) -> ! {
async fn do_stuff(client: Arc<ClientHandler>) -> ! { async fn do_stuff(client: Arc<ClientHandler>) -> ! {
loop { loop {
match client.get_personal_jobs(Some(*UID)).await { match client.get_personal_jobs(Some(get_self_uid())).await {
Ok(resp) => { Ok(resp) => {
let job_requests = resp.into_builtin_vec(); process_request(resp, &client).await;
process_request(job_requests, &client).await;
} }
Err(err) => ErrChan::send(err), Err(err) => ErrChan::send(err),
} }
@ -88,13 +87,26 @@ async fn do_stuff(client: Arc<ClientHandler>) -> ! {
} }
pub async fn run_forever() -> ! { pub async fn run_forever() -> ! {
//daemonize();
env_logger::init();
let env = load_env_default().unwrap();
let client = Arc::new(ClientHandler::new(&env.u_server));
panic::set_hook(Box::new(|panic_info| { panic::set_hook(Box::new(|panic_info| {
ErrChan::send(UError::Panic(panic_info.to_string())) ErrChan::send(UError::Panic(panic_info.to_string()))
})); }));
if cfg!(debug_assertions) {
init_logger(format!(
"u_agent-{}",
get_self_uid()
.hyphenated()
.to_string()
.split("-")
.next()
.unwrap()
));
} else {
if let Err(e) = Daemonize::new().start() {
ErrChan::send(UError::Runtime(e.to_string()))
}
}
let env = load_env_default().unwrap();
let client = Arc::new(ClientHandler::new(&env.u_server, None));
tokio::spawn(error_reporting(client.clone())); tokio::spawn(error_reporting(client.clone()));
do_stuff(client).await do_stuff(client).await
} }

@ -2,7 +2,7 @@
name = "u_panel" name = "u_panel"
version = "0.1.0" version = "0.1.0"
authors = ["plazmoid <kronos44@mail.ru>"] authors = ["plazmoid <kronos44@mail.ru>"]
edition = "2018" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -11,12 +11,11 @@ actix-web = "3.3.2"
backtrace = "0.3.61" backtrace = "0.3.61"
structopt = "0.3.21" structopt = "0.3.21"
log = "^0.4" log = "^0.4"
env_logger = "0.7.1"
uuid = "0.6.5" uuid = "0.6.5"
serde_json = "1.0.4" serde_json = "1.0.4"
serde = { version = "1.0.114", features = ["derive"] } serde = { version = "1.0.114", features = ["derive"] }
tokio = { version = "1.11.0", features = ["rt", "rt-multi-thread"] } tokio = { version = "1.11.0", features = ["rt", "rt-multi-thread"] }
u_lib = { version = "*", path = "../../lib/u_lib" } u_lib = { version = "*", path = "../../lib/u_lib", features = ["panel"] }
tui = { version = "0.16", default-features = false, features = ['crossterm'] } tui = { version = "0.16", default-features = false, features = ['crossterm'] }
crossterm = "0.22.1" crossterm = "0.22.1"
anyhow = "1.0.44" anyhow = "1.0.44"
@ -30,3 +29,5 @@ signal-hook = "0.3.12"
tracing-appender = "0.2.0" tracing-appender = "0.2.0"
rust-embed = { version = "6.3.0", features = ["debug-embed", "compression"] } rust-embed = { version = "6.3.0", features = ["debug-embed", "compression"] }
mime_guess = "2.0.4" mime_guess = "2.0.4"
shlex = "1.1.0"
thiserror = "1.0.31"

@ -1,7 +1,10 @@
use std::fmt;
use structopt::StructOpt; use structopt::StructOpt;
use u_lib::{ use u_lib::{
api::ClientHandler, datatypes::PanelResult, messaging::AsMsg, models::JobMeta, UError, UResult, api::ClientHandler,
datatypes::PanelResult,
messaging::AsMsg,
models::{Agent, AssignedJob, JobMeta, RawJobMeta},
UError, UResult,
}; };
use uuid::Uuid; use uuid::Uuid;
@ -9,15 +12,13 @@ use uuid::Uuid;
pub struct Args { pub struct Args {
#[structopt(subcommand)] #[structopt(subcommand)]
cmd: Cmd, cmd: Cmd,
#[structopt(long)]
json: bool,
} }
#[derive(StructOpt, Debug)] #[derive(StructOpt, Debug)]
enum Cmd { enum Cmd {
Agents(LD), Agents(RUD),
Jobs(JobALD), Jobs(JobCRUD),
Map(JobMapALD), Map(JobMapCRUD),
//TUI(TUIArgs), //TUI(TUIArgs),
Serve, Serve,
} }
@ -29,18 +30,12 @@ pub struct TUIArgs {
} }
#[derive(StructOpt, Debug)] #[derive(StructOpt, Debug)]
enum JobALD { enum JobCRUD {
Add { Create {
//#[structopt(long, parse(try_from_str = parse_uuid))] job: String,
//agent: Option<Uuid>,
#[structopt(long)]
alias: String,
#[structopt(subcommand)]
cmd: JobCmd,
}, },
#[structopt(flatten)] #[structopt(flatten)]
LD(LD), RUD(RUD),
} }
#[derive(StructOpt, Debug)] #[derive(StructOpt, Debug)]
@ -50,29 +45,26 @@ enum JobCmd {
} }
#[derive(StructOpt, Debug)] #[derive(StructOpt, Debug)]
enum JobMapALD { enum JobMapCRUD {
Add { Create {
#[structopt(parse(try_from_str = parse_uuid))] #[structopt(parse(try_from_str = parse_uuid))]
agent_uid: Uuid, agent_uid: Uuid,
job_idents: Vec<String>, job_idents: Vec<String>,
}, },
List { #[structopt(flatten)]
#[structopt(parse(try_from_str = parse_uuid))] RUD(RUD),
uid: Option<Uuid>,
},
Delete {
#[structopt(parse(try_from_str = parse_uuid))]
uid: Uuid,
},
} }
#[derive(StructOpt, Debug)] #[derive(StructOpt, Debug)]
enum LD { enum RUD {
List { Read {
#[structopt(parse(try_from_str = parse_uuid))] #[structopt(parse(try_from_str = parse_uuid))]
uid: Option<Uuid>, uid: Option<Uuid>,
}, },
Update {
item: String,
},
Delete { Delete {
#[structopt(parse(try_from_str = parse_uuid))] #[structopt(parse(try_from_str = parse_uuid))]
uid: Uuid, uid: Uuid,
@ -83,61 +75,55 @@ fn parse_uuid(src: &str) -> Result<Uuid, String> {
Uuid::parse_str(src).map_err(|e| e.to_string()) Uuid::parse_str(src).map_err(|e| e.to_string())
} }
pub async fn process_cmd(client: ClientHandler, args: Args) -> UResult<()> { pub async fn process_cmd(client: ClientHandler, args: Args) -> UResult<String> {
struct Printer { fn to_json<Msg: AsMsg>(data: UResult<Msg>) -> String {
json: bool,
}
impl Printer {
pub fn print<Msg: AsMsg + fmt::Display>(&self, data: UResult<Msg>) {
if self.json {
let data = match data { let data = match data {
Ok(r) => PanelResult::Ok(r), Ok(r) => PanelResult::Ok(r),
Err(e) => PanelResult::Err(e), Err(e) => PanelResult::Err(e),
}; };
println!("{}", serde_json::to_string_pretty(&data).unwrap()); serde_json::to_string(&data).unwrap()
} else {
match data {
Ok(r) => println!("{}", r),
Err(e) => eprintln!("Error: {}", e),
}
}
}
} }
let printer = Printer { json: args.json }; Ok(match args.cmd {
match args.cmd {
Cmd::Agents(action) => match action { Cmd::Agents(action) => match action {
LD::List { uid } => printer.print(client.get_agents(uid).await), RUD::Read { uid } => to_json(client.get_agents(uid).await),
LD::Delete { uid } => printer.print(client.del(Some(uid)).await), RUD::Update { item } => {
let agent = serde_json::from_str::<Agent>(&item)?;
to_json(client.update_item(agent).await)
}
RUD::Delete { uid } => to_json(client.del(uid).await),
}, },
Cmd::Jobs(action) => match action { Cmd::Jobs(action) => match action {
JobALD::Add { JobCRUD::Create { job } => {
cmd: JobCmd::Cmd(cmd), let raw_job = serde_json::from_str::<RawJobMeta>(&job)?;
alias, let job = raw_job.into_builder().build()?;
.. to_json(client.upload_jobs(&[job]).await)
} => {
let job = JobMeta::builder()
.with_shell(cmd.join(" "))
.with_alias(alias)
.build()?;
printer.print(client.upload_jobs(&[job]).await);
} }
JobALD::LD(LD::List { uid }) => printer.print(client.get_jobs(uid).await), JobCRUD::RUD(RUD::Read { uid }) => to_json(client.get_jobs(uid).await),
JobALD::LD(LD::Delete { uid }) => printer.print(client.del(Some(uid)).await), JobCRUD::RUD(RUD::Update { item }) => {
let job = serde_json::from_str::<JobMeta>(&item)?;
to_json(client.update_item(job).await)
}
JobCRUD::RUD(RUD::Delete { uid }) => to_json(client.del(uid).await),
}, },
Cmd::Map(action) => match action { Cmd::Map(action) => match action {
JobMapALD::Add { JobMapCRUD::Create {
agent_uid, agent_uid,
job_idents, job_idents,
} => printer.print(client.set_jobs(Some(agent_uid), &job_idents).await), } => to_json(client.set_jobs(agent_uid, &job_idents).await),
JobMapALD::List { uid } => printer.print(client.get_agent_jobs(uid).await), JobMapCRUD::RUD(RUD::Read { uid }) => to_json(client.get_agent_jobs(uid).await),
JobMapALD::Delete { uid } => printer.print(client.del(Some(uid)).await), JobMapCRUD::RUD(RUD::Update { item }) => {
let assigned = serde_json::from_str::<AssignedJob>(&item)?;
to_json(client.update_item(assigned).await)
}
JobMapCRUD::RUD(RUD::Delete { uid }) => to_json(client.del(uid).await),
}, },
/*Cmd::TUI(args) => crate::tui::init_tui(&args) /*Cmd::TUI(args) => crate::tui::init_tui(&args)
.await .await
.map_err(|e| UError::PanelError(e.to_string()))?,*/ .map_err(|e| UError::PanelError(e.to_string()))?,*/
Cmd::Serve => crate::server::serve().map_err(|e| UError::PanelError(e.to_string()))?, Cmd::Serve => {
crate::server::serve(client).map_err(|e| UError::PanelError(e.to_string()))?;
String::new()
} }
Ok(()) })
} }

@ -22,7 +22,7 @@ struct AccessEnv {
#[tokio::main] #[tokio::main]
async fn main() -> AnyResult<()> { async fn main() -> AnyResult<()> {
let env = load_env::<AccessEnv>()?; let env = load_env::<AccessEnv>()?;
let client = ClientHandler::new(&env.u_server).password(env.admin_auth_token); let client = ClientHandler::new(&env.u_server, Some(env.admin_auth_token));
let args = Args::from_args(); let args = Args::from_args();
process_cmd(client, args).await?; process_cmd(client, args).await?;

@ -0,0 +1,17 @@
use actix_web::http::StatusCode;
use actix_web::ResponseError;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Arg parse error: {0}")]
ArgparseError(#[from] structopt::clap::Error),
#[error("Just an error: {0}")]
JustError(String),
}
impl ResponseError for Error {
fn status_code(&self) -> actix_web::http::StatusCode {
StatusCode::BAD_REQUEST
}
}

@ -0,0 +1,48 @@
# See http://help.github.com/ignore-files/ for more about ignoring files.
# compiled output
/dist
/tmp
/out-tsc
# Only exists if Bazel was run
/bazel-out
# dependencies
/node_modules
# profiling files
chrome-profiler-events*.json
# IDEs and editors
/.idea
.project
.classpath
.c9/
*.launch
.settings/
*.sublime-workspace
# IDE - VSCode
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
.history/*
# misc
/.angular/cache
/.sass-cache
/connect.lock
/coverage
/libpeerconnection.log
npm-debug.log
yarn-error.log
testem.log
/typings
# System Files
.DS_Store
Thumbs.db
package-lock.json

@ -31,6 +31,7 @@
"src/assets" "src/assets"
], ],
"styles": [ "styles": [
"./node_modules/@angular/material/prebuilt-themes/deeppurple-amber.css",
"src/styles.less" "src/styles.less"
], ],
"scripts": [] "scripts": []
@ -99,6 +100,7 @@
"src/assets" "src/assets"
], ],
"styles": [ "styles": [
"./node_modules/@angular/material/prebuilt-themes/deeppurple-amber.css",
"src/styles.less" "src/styles.less"
], ],
"scripts": [] "scripts": []

@ -11,15 +11,19 @@
"private": true, "private": true,
"dependencies": { "dependencies": {
"@angular/animations": "~13.1.0", "@angular/animations": "~13.1.0",
"@angular/cdk": "^13.3.9",
"@angular/common": "~13.1.0", "@angular/common": "~13.1.0",
"@angular/compiler": "~13.1.0", "@angular/compiler": "~13.1.0",
"@angular/core": "~13.1.0", "@angular/core": "~13.1.0",
"@angular/forms": "~13.1.0", "@angular/forms": "~13.1.0",
"@angular/material": "^13.3.9",
"@angular/platform-browser": "~13.1.0", "@angular/platform-browser": "~13.1.0",
"@angular/platform-browser-dynamic": "~13.1.0", "@angular/platform-browser-dynamic": "~13.1.0",
"@angular/router": "~13.1.0", "@angular/router": "~13.1.0",
"@types/uuid": "^8.3.4",
"rxjs": "~7.4.0", "rxjs": "~7.4.0",
"tslib": "^2.3.0", "tslib": "^2.3.0",
"uuid": "^8.3.2",
"zone.js": "~0.11.4" "zone.js": "~0.11.4"
}, },
"devDependencies": { "devDependencies": {

@ -1 +1,49 @@
<span>{{ title }}</span> <mat-tab-group animationDuration="0ms" mat-align-tabs="center">
<mat-tab label="Agents">
<div class="example-container mat-elevation-z8">
<div class="example-loading-shade" *ngIf="isLoadingResults">
<mat-spinner *ngIf="isLoadingResults"></mat-spinner>
</div>
<div class="example-table-container">
<table mat-table [dataSource]="table_data" class="example-table" matSort matSortActive="id" matSortDisableClear
matSortDirection="desc">
<ng-container matColumnDef="id">
<th mat-header-cell *matHeaderCellDef>id</th>
<td mat-cell *matCellDef="let row">{{row.id}}</td>
</ng-container>
<ng-container matColumnDef="alias">
<th mat-header-cell *matHeaderCellDef>Alias</th>
<td mat-cell *matCellDef="let row">{{row.alias}}</td>
</ng-container>
<ng-container matColumnDef="username">
<th mat-header-cell *matHeaderCellDef>user@hostname</th>
<td mat-cell *matCellDef="let row">{{row.username}}@{{row.hostname}}</td>
</ng-container>
<ng-container matColumnDef="last_active">
<th mat-header-cell *matHeaderCellDef mat-sort-header disableClear>
Last active
</th>
<td mat-cell *matCellDef="let row">{{row.last_active}}</td>
</ng-container>
<tr mat-header-row *matHeaderRowDef="displayedColumns"></tr>
<tr mat-row *matRowDef="let row; columns: displayedColumns;"></tr>
</table>
<button mat-raised-button (click)="fetch_agents()">Refresh</button>
</div>
<!-- <mat-paginator [length]="resultsLength" [pageSize]="30" aria-label="Select page of GitHub search results">
</mat-paginator> -->
</div>
</mat-tab>
<mat-tab label="Jobs"></mat-tab>
<mat-tab label="Results"></mat-tab>
</mat-tab-group>

@ -1,10 +1,82 @@
import { Component } from '@angular/core'; import { HttpClient } from '@angular/common/http';
import { Component, ViewChild, AfterViewInit } from '@angular/core';
import { timer, Observable, of as observableOf } from 'rxjs';
import { catchError, map, startWith, switchMap } from 'rxjs/operators';
interface Agent {
alias: string | null,
hostname: string,
id: string,
is_root: boolean,
is_root_allowed: boolean,
last_active: Date,
platform: string,
regtime: Date,
state: "new" | "active" | "banned",
token: string | null,
username: string,
}
@Component({ @Component({
selector: 'app-root', selector: 'app-root',
templateUrl: './app.component.html', templateUrl: './app.component.html',
styleUrls: ['./app.component.less'] styleUrls: ['./app.component.less']
}) })
export class AppComponent { export class AppComponent implements AfterViewInit {
title = 'ты лох'; displayedColumns: string[] = ['id', 'alias', 'username', 'last_active'];
exampleDatabase!: ExampleHttpDatabase | null;
table_data: Agent[] = [];
isLoadingResults = true;
constructor(private _httpClient: HttpClient) { }
ngAfterViewInit() {
this.exampleDatabase = new ExampleHttpDatabase(this._httpClient);
this.fetch_agents();
// If the user changes the sort order, reset back to the first page.
//this.sort.sortChange.subscribe(() => (this.paginator.pageIndex = 0));
}
fetch_agents() {
timer(0)
.pipe(
startWith({}),
switchMap(() => {
this.isLoadingResults = true;
return this.exampleDatabase!.getAgents().pipe(catchError(() => observableOf(null)));
}),
map(data => {
// Flip flag to show that loading has finished.
this.isLoadingResults = false;
if (data === null) {
return [];
}
// Only refresh the result length if there is new data. In case of rate
// limit errors, we do not want to reset the paginator to zero, as that
// would prevent users from re-triggering requests.
return data.data;
}),
)
.subscribe(data => { if (typeof data !== 'string') { this.table_data = data } else { alert(`Error: ${data}`) } });
}
}
interface ServerResponse<T> {
status: "ok" | "err",
data: T | string
}
class ExampleHttpDatabase {
constructor(private _httpClient: HttpClient) { }
getAgents(): Observable<ServerResponse<Agent[]>> {
const requestUrl = "/cmd/";
const cmd = "agents list";
return this._httpClient.post<ServerResponse<Agent[]>>(requestUrl, cmd);
}
} }

@ -3,6 +3,11 @@ import { BrowserModule } from '@angular/platform-browser';
import { AppRoutingModule } from './app-routing.module'; import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component'; import { AppComponent } from './app.component';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { MatTabsModule } from '@angular/material/tabs';
import { MatTableModule } from '@angular/material/table';
import { MatProgressSpinnerModule } from '@angular/material/progress-spinner';
import { HttpClientModule } from '@angular/common/http';
@NgModule({ @NgModule({
declarations: [ declarations: [
@ -10,7 +15,12 @@ import { AppComponent } from './app.component';
], ],
imports: [ imports: [
BrowserModule, BrowserModule,
AppRoutingModule HttpClientModule,
AppRoutingModule,
MatTabsModule,
MatTableModule,
MatProgressSpinnerModule,
BrowserAnimationsModule
], ],
providers: [], providers: [],
bootstrap: [AppComponent] bootstrap: [AppComponent]

@ -6,8 +6,11 @@
<base href="/"> <base href="/">
<meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="icon" type="image/x-icon" href="favicon.ico"> <link rel="icon" type="image/x-icon" href="favicon.ico">
<link rel="preconnect" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
</head> </head>
<body> <body class="mat-typography">
<app-root></app-root> <app-root></app-root>
</body> </body>
</html> </html>

@ -1 +1,4 @@
/* You can add global styles to this file, and also import other style files */ /* You can add global styles to this file, and also import other style files */
html, body { height: 100%; }
body { margin: 0; font-family: Roboto, "Helvetica Neue", sans-serif; }

@ -11,10 +11,15 @@ almost all fields are editable, rows are deletable
*/ */
use actix_web::{get, web, App, HttpResponse, HttpServer, Responder}; mod errors;
use crate::{process_cmd, Args};
use actix_web::{get, middleware::Logger, post, web, App, HttpResponse, HttpServer, Responder};
use errors::Error;
use rust_embed::RustEmbed; use rust_embed::RustEmbed;
use std::borrow::Cow; use std::borrow::Cow;
use u_lib::unwrap_enum; use structopt::StructOpt;
use u_lib::{api::ClientHandler, logging::init_logger, unwrap_enum};
#[derive(RustEmbed)] #[derive(RustEmbed)]
#[folder = "./src/server/fe/dist/fe/"] #[folder = "./src/server/fe/dist/fe/"]
@ -45,11 +50,36 @@ async fn static_files_adapter(path: web::Path<(String,)>) -> impl Responder {
} }
} }
#[post("/cmd/")]
async fn send_cmd(
cmd: web::Json<String>,
client: web::Data<ClientHandler>,
) -> Result<impl Responder, Error> {
let parsed_cmd = Args::from_iter_safe(
shlex::split(&cmd.into_inner()).ok_or(Error::JustError("shlex failed".to_string()))?,
)?;
Ok(
match process_cmd(client.as_ref().clone(), parsed_cmd).await {
Ok(r) => HttpResponse::Ok().body(r),
Err(e) => HttpResponse::BadRequest().body(e.to_string()),
},
)
}
#[actix_web::main] #[actix_web::main]
pub async fn serve() -> std::io::Result<()> { pub async fn serve(client: ClientHandler) -> std::io::Result<()> {
init_logger("u_panel");
let addr = "127.0.0.1:8080"; let addr = "127.0.0.1:8080";
println!("Serving at http://{}", addr); info!("Serving at http://{}", addr);
HttpServer::new(|| App::new().service(main_page).service(static_files_adapter)) HttpServer::new(move || {
App::new()
.wrap(Logger::default())
.app_data(web::Data::new(client.clone()))
.service(main_page)
.service(send_cmd)
.service(static_files_adapter)
})
.bind(addr)? .bind(addr)?
.run() .run()
.await .await

@ -148,7 +148,7 @@ fn init_signal_handlers(gui: bool) {
fn init_logger() { fn init_logger() {
use tracing_appender::rolling::{RollingFileAppender, Rotation}; use tracing_appender::rolling::{RollingFileAppender, Rotation};
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFlter;
if env::var("RUST_LOG").is_err() { if env::var("RUST_LOG").is_err() {
env::set_var("RUST_LOG", "info") env::set_var("RUST_LOG", "info")

@ -1,12 +1,11 @@
[package] [package]
authors = ["plazmoid <kronos44@mail.ru>"] authors = ["plazmoid <kronos44@mail.ru>"]
edition = "2018" edition = "2021"
name = "u_server" name = "u_server"
version = "0.1.0" version = "0.1.0"
[dependencies] [dependencies]
log = "0.4.11" log = "0.4.11"
simplelog = "0.10"
thiserror = "*" thiserror = "*"
warp = { version = "0.3.1", features = ["tls"] } warp = { version = "0.3.1", features = ["tls"] }
uuid = { version = "0.6.5", features = ["serde", "v4"] } uuid = { version = "0.6.5", features = ["serde", "v4"] }

@ -31,8 +31,9 @@ impl UDB {
"postgres://{}:{}@{}/{}", "postgres://{}:{}@{}/{}",
env.db_user, env.db_password, env.db_host, env.db_name env.db_user, env.db_password, env.db_host, env.db_name
); );
let conn = PgConnection::establish(&db_url).unwrap(); let instance = UDB {
let instance = UDB { conn }; conn: PgConnection::establish(&db_url).unwrap(),
};
Arc::new(Mutex::new(instance)) Arc::new(Mutex::new(instance))
}) })
.lock() .lock()

@ -7,50 +7,32 @@ use u_lib::{
utils::{OneOrVec, Stripped}, utils::{OneOrVec, Stripped},
}; };
use uuid::Uuid; use uuid::Uuid;
use warp::{ use warp::Rejection;
http::{Response, StatusCode},
Rejection, Reply,
};
/*
pub fn build_response(code: StatusCode, body: impl Into<Body>) -> Response<Body> {
Response::builder().status(code).body(body.into()).unwrap()
}
pub fn build_ok(body: impl Into<Body>) -> Response<Body> {
build_response(StatusCode::OK, body)
}
pub fn build_err(body: impl ToString) -> Response<Body> { type EndpResult<T> = Result<T, Rejection>;
build_response(StatusCode::BAD_REQUEST, body.to_string())
}
pub fn build_message<M: AsMsg + Serialize>(m: M) -> Response<Body> {
warp::reply::json(&m.as_message()).into_response()
}
*/
pub struct Endpoints; pub struct Endpoints;
impl Endpoints { impl Endpoints {
pub async fn add_agent(msg: Agent) -> Result<(), Rejection> { pub async fn add_agent(msg: Agent) -> EndpResult<()> {
UDB::lock_db().insert_agent(&msg).map_err(From::from) UDB::lock_db().insert_agent(&msg).map_err(From::from)
} }
pub async fn get_agents(uid: Option<Uuid>) -> Result<Vec<Agent>, Rejection> { pub async fn get_agents(uid: Option<Uuid>) -> EndpResult<Vec<Agent>> {
UDB::lock_db().get_agents(uid).map_err(From::from) UDB::lock_db().get_agents(uid).map_err(From::from)
} }
pub async fn get_jobs(uid: Option<Uuid>) -> Result<Vec<JobMeta>, Rejection> { pub async fn get_jobs(uid: Option<Uuid>) -> EndpResult<Vec<JobMeta>> {
UDB::lock_db().get_jobs(uid).map_err(From::from) UDB::lock_db().get_jobs(uid).map_err(From::from)
} }
pub async fn get_agent_jobs(uid: Option<Uuid>) -> Result<Vec<AssignedJob>, Rejection> { pub async fn get_agent_jobs(uid: Option<Uuid>) -> EndpResult<Vec<AssignedJob>> {
UDB::lock_db() UDB::lock_db()
.get_exact_jobs(uid, false) .get_exact_jobs(uid, false)
.map_err(From::from) .map_err(From::from)
} }
pub async fn get_personal_jobs(uid: Option<Uuid>) -> Result<Vec<AssignedJob>, Rejection> { pub async fn get_personal_jobs(uid: Option<Uuid>) -> EndpResult<Vec<AssignedJob>> {
let agents = UDB::lock_db().get_agents(uid)?; let agents = UDB::lock_db().get_agents(uid)?;
if agents.is_empty() { if agents.is_empty() {
let db = UDB::lock_db(); let db = UDB::lock_db();
@ -58,30 +40,26 @@ impl Endpoints {
let job = db.find_job_by_alias("agent_hello")?; let job = db.find_job_by_alias("agent_hello")?;
db.set_jobs_for_agent(&uid.unwrap(), &[job.id])?; db.set_jobs_for_agent(&uid.unwrap(), &[job.id])?;
} }
let result = UDB::lock_db().get_exact_jobs(uid, true); let result = UDB::lock_db().get_exact_jobs(uid, true)?;
match result {
Ok(r) => {
let db = UDB::lock_db(); let db = UDB::lock_db();
for j in r.iter() { for j in result.iter() {
db.update_job_status(j.id, JobState::Running)?; db.update_job_status(j.id, JobState::Running)?;
} }
Ok(r) Ok(result)
}
Err(e) => Err(e.into()),
}
} }
pub async fn upload_jobs(msg: BaseMessage<'static, Vec<JobMeta>>) -> Result<(), Rejection> { pub async fn upload_jobs(msg: BaseMessage<'static, Vec<JobMeta>>) -> EndpResult<()> {
UDB::lock_db() UDB::lock_db()
.insert_jobs(&msg.into_inner()) .insert_jobs(&msg.into_inner())
.map_err(From::from) .map_err(From::from)
} }
pub async fn del(uid: Uuid) -> Result<usize, Rejection> { pub async fn del(uid: Uuid) -> EndpResult<usize> {
let db = UDB::lock_db(); let db = UDB::lock_db();
let del_fns = &[UDB::del_agents, UDB::del_jobs, UDB::del_results]; let del_fns = &[UDB::del_agents, UDB::del_jobs, UDB::del_results];
for del_fn in del_fns { for del_fn in del_fns {
let affected = del_fn(&db, &[uid]).unwrap(); let affected = del_fn(&db, &[uid])?;
if affected > 0 { if affected > 0 {
return Ok(affected); return Ok(affected);
} }
@ -92,7 +70,7 @@ impl Endpoints {
pub async fn set_jobs( pub async fn set_jobs(
agent_uid: Uuid, agent_uid: Uuid,
msg: BaseMessage<'static, Vec<String>>, msg: BaseMessage<'static, Vec<String>>,
) -> Result<Vec<Uuid>, Rejection> { ) -> EndpResult<Vec<Uuid>> {
msg.into_inner() msg.into_inner()
.into_iter() .into_iter()
.map(|ident| { .map(|ident| {
@ -106,7 +84,7 @@ impl Endpoints {
pub async fn report<Data: OneOrVec<Reportable> + AsMsg + 'static>( pub async fn report<Data: OneOrVec<Reportable> + AsMsg + 'static>(
msg: BaseMessage<'static, Data>, msg: BaseMessage<'static, Data>,
) -> Result<(), Rejection> { ) -> EndpResult<()> {
let id = msg.id; let id = msg.id;
let mut failed = vec![]; let mut failed = vec![];
for entry in msg.into_inner().into_vec() { for entry in msg.into_inner().into_vec() {
@ -134,7 +112,7 @@ impl Endpoints {
err.agent_id, err.agent_id,
Stripped(&err.msg.as_str()) Stripped(&err.msg.as_str())
); );
UDB::lock_db().report_error(&err).unwrap(); UDB::lock_db().report_error(&err)?;
} }
Reportable::Dummy => (), Reportable::Dummy => (),
} }
@ -144,4 +122,33 @@ impl Endpoints {
} }
Ok(()) Ok(())
} }
pub async fn update_agent(agent: BaseMessage<'static, Agent>) -> EndpResult<()> {
agent
.into_inner()
.save_changes::<Agent>(&UDB::lock_db().conn)
.map_err(Error::from)?;
Ok(())
}
pub async fn update_job(job: BaseMessage<'static, JobMeta>) -> EndpResult<()> {
job.into_inner()
.save_changes::<JobMeta>(&UDB::lock_db().conn)
.map_err(Error::from)?;
Ok(())
}
pub async fn update_assigned_job(
assigned: BaseMessage<'static, AssignedJob>,
) -> EndpResult<()> {
assigned
.into_inner()
.save_changes::<AssignedJob>(&UDB::lock_db().conn)
.map_err(Error::from)?;
Ok(())
}
pub async fn download(_file_uid: String) -> EndpResult<Vec<u8>> {
todo!()
}
} }

@ -1,136 +0,0 @@
use crate::handlers::Endpoints;
use crate::{db::UDB, errors::SResult};
use serde::de::DeserializeOwned;
use std::path::PathBuf;
use u_lib::{
messaging::{AsMsg, BaseMessage, Reportable},
models::*,
};
use uuid::Uuid;
use warp::{
body,
reply::{json, reply, Json},
Filter, Rejection, Reply,
};
fn get_content<M>() -> impl Filter<Extract = (BaseMessage<'static, M>,), Error = Rejection> + Clone
where
M: AsMsg + Sync + Send + DeserializeOwned + 'static,
{
body::content_length_limit(1024 * 64).and(body::json::<BaseMessage<M>>())
}
fn into_message<M: AsMsg>(msg: M) -> Json {
json(&msg.as_message())
}
pub fn init_filters(
auth_token: &str,
) -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {
let infallible_none = |_| async { Ok::<(Option<Uuid>,), std::convert::Infallible>((None,)) };
let get_agents = warp::get()
.and(warp::path("get_agents"))
.and(
warp::path::param::<Uuid>()
.map(Some)
.or_else(infallible_none),
)
.and_then(Endpoints::get_agents)
.map(into_message);
let upload_jobs = warp::post()
.and(warp::path("upload_jobs"))
.and(get_content::<Vec<JobMeta>>())
.and_then(Endpoints::upload_jobs)
.map(|_| reply());
let get_jobs = warp::get()
.and(warp::path("get_jobs"))
.and(
warp::path::param::<Uuid>()
.map(Some)
.or_else(infallible_none),
)
.and_then(Endpoints::get_jobs)
.map(into_message);
let get_agent_jobs = warp::get()
.and(warp::path("get_agent_jobs"))
.and(
warp::path::param::<Uuid>()
.map(Some)
.or_else(infallible_none),
)
.and_then(Endpoints::get_agent_jobs)
.map(into_message);
let get_personal_jobs = warp::get()
.and(warp::path("get_personal_jobs"))
.and(warp::path::param::<Uuid>().map(Some))
.and_then(Endpoints::get_personal_jobs)
.map(into_message);
let del = warp::get()
.and(warp::path("del"))
.and(warp::path::param::<Uuid>())
.and_then(Endpoints::del)
.map(|_| reply());
let set_jobs = warp::post()
.and(warp::path("set_jobs"))
.and(warp::path::param::<Uuid>())
.and(get_content::<Vec<String>>())
.and_then(Endpoints::set_jobs)
.map(into_message);
let report = warp::post()
.and(warp::path("report"))
.and(get_content::<Vec<Reportable>>())
.and_then(Endpoints::report)
.map(|_| reply());
let auth_token = format!("Bearer {auth_token}",).into_boxed_str();
let auth_header = warp::header::exact("authorization", Box::leak(auth_token));
let auth_zone = (get_agents
.or(get_jobs)
.or(upload_jobs)
.or(del)
.or(set_jobs)
.or(get_agent_jobs))
.and(auth_header);
let agent_zone = get_jobs.clone().or(get_personal_jobs).or(report);
auth_zone.or(agent_zone)
}
pub fn prefill_jobs() -> SResult<()> {
let agent_hello = JobMeta::builder()
.with_type(misc::JobType::Manage)
.with_alias("agent_hello")
.build()
.unwrap();
UDB::lock_db().insert_jobs(&[agent_hello])
}
pub fn init_logger() {
use simplelog::*;
use std::fs::OpenOptions;
let log_cfg = ConfigBuilder::new()
.set_time_format_str("%x %X")
.set_time_to_local(true)
.build();
let logfile = OpenOptions::new()
.append(true)
.create(true)
.open(PathBuf::from("logs").join("u_server.log"))
.unwrap();
let level = LevelFilter::Info;
let loggers = vec![
WriteLogger::new(level, log_cfg.clone(), logfile) as Box<dyn SharedLogger>,
TermLogger::new(level, log_cfg, TerminalMode::Stderr, ColorChoice::Auto),
];
CombinedLogger::init(loggers).unwrap();
}

@ -14,28 +14,159 @@ extern crate diesel;
mod db; mod db;
mod errors; mod errors;
mod handlers; mod handlers;
mod init;
use errors::{Error, SResult}; use errors::{Error, SResult};
use init::*; use serde::{de::DeserializeOwned, Deserialize};
use serde::Deserialize;
use std::path::PathBuf; use std::path::PathBuf;
use u_lib::{config::MASTER_PORT, utils::load_env}; use u_lib::{
use warp::Filter; config::MASTER_PORT,
logging::init_logger,
messaging::{AsMsg, BaseMessage, Reportable},
models::*,
utils::load_env,
};
use uuid::Uuid;
use warp::{
body,
reply::{json, reply, Json},
Filter, Rejection, Reply,
};
use crate::db::UDB;
use crate::handlers::Endpoints;
#[derive(Deserialize)] #[derive(Deserialize)]
struct ServEnv { struct ServEnv {
admin_auth_token: String, admin_auth_token: String,
} }
//TODO: tracing-subscriber fn get_content<M>() -> impl Filter<Extract = (BaseMessage<'static, M>,), Error = Rejection> + Clone
where
M: AsMsg + Sync + Send + DeserializeOwned + 'static,
{
body::content_length_limit(1024 * 64).and(body::json::<BaseMessage<M>>())
}
fn into_message<M: AsMsg>(msg: M) -> Json {
json(&msg.as_message())
}
pub fn init_endpoints(
auth_token: &str,
) -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {
let path = |p: &'static str| warp::post().and(warp::path(p));
let infallible_none = |_| async { Ok::<(Option<Uuid>,), std::convert::Infallible>((None,)) };
let get_agents = path("get_agents")
.and(
warp::path::param::<Uuid>()
.map(Some)
.or_else(infallible_none),
)
.and_then(Endpoints::get_agents)
.map(into_message);
let upload_jobs = path("upload_jobs")
.and(get_content::<Vec<JobMeta>>())
.and_then(Endpoints::upload_jobs)
.map(ok);
let get_jobs = path("get_jobs")
.and(
warp::path::param::<Uuid>()
.map(Some)
.or_else(infallible_none),
)
.and_then(Endpoints::get_jobs)
.map(into_message);
let get_agent_jobs = path("get_agent_jobs")
.and(
warp::path::param::<Uuid>()
.map(Some)
.or_else(infallible_none),
)
.and_then(Endpoints::get_agent_jobs)
.map(into_message);
let get_personal_jobs = path("get_personal_jobs")
.and(warp::path::param::<Uuid>().map(Some))
.and_then(Endpoints::get_personal_jobs)
.map(into_message);
let del = path("del")
.and(warp::path::param::<Uuid>())
.and_then(Endpoints::del)
.map(ok);
let set_jobs = path("set_jobs")
.and(warp::path::param::<Uuid>())
.and(get_content::<Vec<String>>())
.and_then(Endpoints::set_jobs)
.map(into_message);
let report = path("report")
.and(get_content::<Vec<Reportable>>())
.and_then(Endpoints::report)
.map(ok);
let update_agent = path("update_item")
.and(get_content::<Agent>())
.and_then(Endpoints::update_agent)
.map(ok);
let update_job = path("update_item")
.and(get_content::<JobMeta>())
.and_then(Endpoints::update_job)
.map(ok);
let update_assigned_job = path("update_item")
.and(get_content::<AssignedJob>())
.and_then(Endpoints::update_assigned_job)
.map(ok);
let download = path("download")
.and(warp::path::param::<String>())
.and_then(Endpoints::download)
.map(ok);
let auth_token = format!("Bearer {auth_token}",).into_boxed_str();
let auth_header = warp::header::exact("authorization", Box::leak(auth_token));
let auth_zone = (get_agents
.or(get_jobs)
.or(upload_jobs)
.or(del)
.or(set_jobs)
.or(get_agent_jobs)
.or(update_agent)
.or(update_job)
.or(update_assigned_job)
.or(download))
.and(auth_header);
let agent_zone = get_jobs.or(get_personal_jobs).or(report).or(download);
auth_zone.or(agent_zone)
}
pub fn prefill_jobs() -> SResult<()> {
let agent_hello = RawJobMeta::builder()
.with_type(misc::JobType::Manage)
.with_alias("agent_hello")
.build()
.unwrap();
UDB::lock_db().insert_jobs(&[agent_hello])
}
pub async fn serve() -> SResult<()> { pub async fn serve() -> SResult<()> {
init_logger(); init_logger("u_server");
prefill_jobs()?; prefill_jobs()?;
let env = load_env::<ServEnv>().map_err(|e| Error::Other(e.to_string()))?; let env = load_env::<ServEnv>().map_err(|e| Error::Other(e.to_string()))?;
let routes = init_filters(&env.admin_auth_token); let routes = init_endpoints(&env.admin_auth_token);
let certs_dir = PathBuf::from("certs"); let certs_dir = PathBuf::from("certs");
warp::serve(routes.with(warp::log("warp"))) warp::serve(routes.with(warp::log("warp")))
.tls() .tls()
.cert_path(certs_dir.join("server.crt")) .cert_path(certs_dir.join("server.crt"))
@ -46,6 +177,10 @@ pub async fn serve() -> SResult<()> {
Ok(()) Ok(())
} }
fn ok<T>(_: T) -> impl Reply {
reply()
}
/* /*
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {

@ -1,4 +1,4 @@
FROM rust:1.60 FROM rust:1.62
RUN rustup target add x86_64-unknown-linux-musl RUN rustup target add x86_64-unknown-linux-musl
CMD ["sleep", "3600"] CMD ["sleep", "3600"]

@ -2,14 +2,13 @@
name = "integration" name = "integration"
version = "0.1.0" version = "0.1.0"
authors = ["plazmoid <kronos44@mail.ru>"] authors = ["plazmoid <kronos44@mail.ru>"]
edition = "2018" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
tokio = { version = "1.2.0", features = ["macros", "rt-multi-thread", "process", "time"] } tokio = { version = "1.2.0", features = ["macros", "rt-multi-thread", "process", "time"] }
log = "^0.4" log = "^0.4"
env_logger = "0.8.3"
uuid = { version = "0.6.5", features = ["serde", "v4"] } uuid = { version = "0.6.5", features = ["serde", "v4"] }
reqwest = { version = "0.11", features = ["json"] } reqwest = { version = "0.11", features = ["json"] }
serde_json = "1.0" serde_json = "1.0"

@ -11,7 +11,7 @@ services:
networks: networks:
- u_net - u_net
volumes: volumes:
- ../release/u_server:/unki/u_server - ../target/x86_64-unknown-linux-musl/${PROFILE:-debug}/u_server:/unki/u_server
- ../certs:/unki/certs - ../certs:/unki/certs
- ../logs:/unki/logs:rw - ../logs:/unki/logs:rw
working_dir: /unki working_dir: /unki
@ -58,7 +58,7 @@ services:
networks: networks:
- u_net - u_net
volumes: volumes:
- ../release/u_agent:/u_agent - ../target/x86_64-unknown-linux-musl/${PROFILE:-debug}/u_agent:/u_agent
command: /u_agent u_server command: /u_agent u_server
env_file: env_file:
- ../.env - ../.env
@ -76,9 +76,8 @@ services:
volumes: volumes:
- ./:/tests/ - ./:/tests/
- ../certs:/certs - ../certs:/certs
- ../release/u_panel:/u_panel - ../target/x86_64-unknown-linux-musl/${PROFILE:-debug}/u_panel:/u_panel
- ../lib/u_lib:/lib/u_lib - ../lib/u_lib:/lib/u_lib
- ../lib/u_api_proc_macro:/lib/u_api_proc_macro
working_dir: working_dir:
/tests/ /tests/
depends_on: depends_on:

@ -2,4 +2,5 @@
set -e set -e
export DOCKER_UID=$(id -u) export DOCKER_UID=$(id -u)
export DOCKER_GID=$(id -g) export DOCKER_GID=$(id -g)
[[ "$@" =~ "--release" ]] && export PROFILE=release || export PROFILE=debug
python integration_tests.py $@ python integration_tests.py $@

@ -8,14 +8,14 @@ pub struct RegisteredAgent {
impl RegisteredAgent { impl RegisteredAgent {
pub async fn unregister(self) { pub async fn unregister(self) {
let cli = ClientHandler::new(&ENV.u_server); let cli = ClientHandler::new(&ENV.u_server, None);
cli.del(Some(self.uid)).await.unwrap(); cli.del(self.uid).await.unwrap();
} }
} }
#[fixture] #[fixture]
pub async fn register_agent() -> RegisteredAgent { pub async fn register_agent() -> RegisteredAgent {
let cli = ClientHandler::new(&ENV.u_server); let cli = ClientHandler::new(&ENV.u_server, None);
let agent_uid = Uuid::new_v4(); let agent_uid = Uuid::new_v4();
let resp = cli let resp = cli
.get_personal_jobs(Some(agent_uid)) .get_personal_jobs(Some(agent_uid))

@ -37,7 +37,7 @@ impl Panel {
pub fn output<T: DeserializeOwned + Debug>( pub fn output<T: DeserializeOwned + Debug>(
args: impl Into<String> + Display, args: impl Into<String> + Display,
) -> PanelResult<T> { ) -> PanelResult<T> {
eprintln!("EXEC >>> {PANEL_BINARY} {}", &args); eprintln!(">>> {PANEL_BINARY} {}", &args);
let splitted = shlex::split(args.into().as_ref()).unwrap(); let splitted = shlex::split(args.into().as_ref()).unwrap();
let result = Self::output_argv( let result = Self::output_argv(
splitted splitted

@ -1,16 +0,0 @@
[package]
name = "u_api_proc_macro"
version = "0.1.0"
authors = ["plazmoid <kronos44@mail.ru>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
proc-macro = true
[dependencies]
syn = { version = "1.0", features = ["full", "extra-traits"] }
quote = "1.0"
strum = { version = "0.20", features = ["derive"] }
proc-macro2 = "1.0"

@ -1,181 +0,0 @@
use proc_macro::TokenStream;
use proc_macro2::{Ident, TokenStream as TokenStream2};
use quote::quote;
use std::{collections::HashMap, str::FromStr};
use strum::EnumString;
use syn::{
parse_macro_input, punctuated::Punctuated, AttributeArgs, FnArg, ItemFn, Lit, NestedMeta,
ReturnType, Signature, Token, Type,
};
#[derive(EnumString, Debug)]
enum ReqMethod {
GET,
POST,
}
#[derive(Debug)]
struct Endpoint {
method: ReqMethod,
}
#[derive(Debug)]
struct FnArgs {
url_param: Option<Type>,
payload: Option<Type>,
}
#[proc_macro_attribute]
pub fn api_route(args: TokenStream, item: TokenStream) -> TokenStream {
let args: AttributeArgs = parse_macro_input!(args);
let input: ItemFn = parse_macro_input!(item);
let Signature {
ident,
inputs,
generics,
output,
..
} = input.sig;
let (impl_generics, _, _) = generics.split_for_impl();
let FnArgs { url_param, payload } = parse_fn_args(inputs);
let Endpoint { method } = parse_attr_args(args);
let url_path = build_url_path(&ident, &url_param);
let return_ty = match output {
ReturnType::Type(_, ty) => quote!(#ty),
ReturnType::Default => quote!(()),
};
let request = match method {
ReqMethod::GET => build_get(url_path),
ReqMethod::POST => build_post(url_path, &payload),
};
let url_param = match url_param {
Some(p) => quote!(, param: #p),
None => TokenStream2::new(),
};
let payload = match payload {
Some(p) => quote!(, payload: #p),
None => TokenStream2::new(),
};
let q = quote! {
pub async fn #ident #impl_generics(
&self #url_param #payload
) -> UResult<#return_ty> {
let request = {
#request
};
let response = request.send().await?;
let content_len = response.content_length();
let is_success = match response.error_for_status_ref() {
Ok(_) => Ok(()),
Err(e) => Err(UError::from(e))
};
let resp = response.text().await?;
let result = match is_success {
Ok(_) => {
serde_json::from_str::<BaseMessage<#return_ty>>(&resp)
.map(|msg| msg.into_inner())
.or_else(|e| {
match content_len {
Some(0) => Ok(Default::default()),
_ => Err(UError::NetError(e.to_string(), resp.clone()))
}
})
},
Err(UError::NetError(err_src, _)) => Err(
UError::NetError(
err_src,
resp
)
),
_ => unreachable!()
};
Ok(result?)
}
};
q.into()
}
fn parse_fn_args(raw: Punctuated<FnArg, Token![,]>) -> FnArgs {
let mut arg: HashMap<String, Type> = raw
.into_iter()
.filter_map(|arg| {
if let FnArg::Typed(argt) = arg {
let mut arg_name = String::new();
// did you think I won't overplay you? won't destroy?
|arg_ident| -> TokenStream {
let q: TokenStream = quote!(#arg_ident).into();
arg_name = parse_macro_input!(q as Ident).to_string();
TokenStream::new()
}(argt.pat);
if &arg_name != "url_param" && &arg_name != "payload" {
panic!("Wrong arg name: {}", &arg_name)
}
let arg_type = *argt.ty;
Some((arg_name, arg_type))
} else {
None
}
})
.collect();
FnArgs {
url_param: arg.remove("url_param"),
payload: arg.remove("payload"),
}
}
fn build_get(url: TokenStream2) -> TokenStream2 {
quote! {
let request = self.build_get(#url);
request
}
}
fn build_post(url: TokenStream2, payload: &Option<Type>) -> TokenStream2 {
let pld = match payload {
Some(_) => quote! {
.json(&payload.as_message())
},
None => TokenStream2::new(),
};
quote! {
let request = self.build_post(#url);
request #pld
}
}
fn build_url_path(path: &Ident, url_param: &Option<Type>) -> TokenStream2 {
let url_param = match url_param {
Some(_) => quote! {
+ &opt_to_string(param)
},
None => TokenStream2::new(),
};
quote! {
&format!(
"{}/{}",
stringify!(#path),
String::new() #url_param
)
}
}
fn parse_attr_args(args: AttributeArgs) -> Endpoint {
let mut args = args.into_iter();
let method = match args.next() {
Some(method) => match method {
NestedMeta::Lit(l) => {
if let Lit::Str(s) = l {
match ReqMethod::from_str(&s.value()) {
Ok(v) => v,
Err(_) => panic!("Unknown method"),
}
} else {
panic!("Method must be a str")
}
}
_ => panic!("Method must be on the first place"),
},
None => panic!("Method required"),
};
Endpoint { method }
}

@ -1,15 +0,0 @@
/*
use std::fmt::Display;
use u_api_proc_macro::api_route;
type UResult<T, E> = Result<T, E>;
struct ClientHandler;
struct Paths;
#[test]
fn test1() {
#[api_route("GET", Uuid)]
fn list<T: Display>(url_param: T) {}
}
*/

@ -2,7 +2,7 @@
name = "u_lib" name = "u_lib"
version = "0.1.0" version = "0.1.0"
authors = ["plazmoid <kronos44@mail.ru>"] authors = ["plazmoid <kronos44@mail.ru>"]
edition = "2018" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -16,17 +16,20 @@ lazy_static = "1.4.0"
futures = "0.3.5" futures = "0.3.5"
thiserror = "*" thiserror = "*"
log = "*" log = "*"
env_logger = "0.8.3"
diesel-derive-enum = { version = "1", features = ["postgres"] } diesel-derive-enum = { version = "1", features = ["postgres"] }
chrono = "0.4.19" chrono = "0.4.19"
strum = { version = "0.20", features = ["derive"] } strum = { version = "0.20", features = ["derive"] }
once_cell = "1.7.2" once_cell = "1.7.2"
shlex = "1.0.0" shlex = "1.0.0"
u_api_proc_macro = { version = "*", path = "../u_api_proc_macro" }
crossbeam = "0.8.1" crossbeam = "0.8.1"
diesel = { version = "1.4.5", features = ["postgres", "uuid"] } diesel = { version = "1.4.5", features = ["postgres", "uuid"] }
envy = "0.4.2" envy = "0.4.2"
serde_json = "1.0.81" serde_json = "1.0.81"
tracing-subscriber = { version = "0.3.14", features = ["env-filter"] }
tracing-appender = "0.2.2"
[features]
panel = []
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]
reqwest = { version = "0.11", features = ["json", "native-tls"] } reqwest = { version = "0.11", features = ["json", "native-tls"] }

@ -1,98 +1,134 @@
use std::collections::HashMap;
use crate::{ use crate::{
config::MASTER_PORT, config::MASTER_PORT,
messaging::{self, AsMsg, BaseMessage}, messaging::{self, AsMsg, BaseMessage, Empty},
models, models::{self, Agent},
utils::{opt_to_string, VecDisplay}, utils::opt_to_string,
UError, UResult, UError, UResult,
}; };
use reqwest::{Certificate, Client, Identity, RequestBuilder, Url}; use reqwest::{header::HeaderMap, Certificate, Client, Identity, Url};
use u_api_proc_macro::api_route; use serde::de::DeserializeOwned;
use uuid::Uuid; use uuid::Uuid;
const AGENT_IDENTITY: &[u8] = include_bytes!("../../../certs/alice.p12"); const AGENT_IDENTITY: &[u8] = include_bytes!("../../../certs/alice.p12");
const ROOT_CA_CERT: &[u8] = include_bytes!("../../../certs/ca.crt"); const ROOT_CA_CERT: &[u8] = include_bytes!("../../../certs/ca.crt");
#[derive(Clone)]
pub struct ClientHandler { pub struct ClientHandler {
base_url: Url, base_url: Url,
client: Client, client: Client,
password: Option<String>,
} }
impl ClientHandler { impl ClientHandler {
pub fn new(server: &str) -> Self { pub fn new(server: &str, password: Option<String>) -> Self {
let identity = Identity::from_pkcs12_der(AGENT_IDENTITY, "").unwrap(); let identity = Identity::from_pkcs12_der(AGENT_IDENTITY, "").unwrap();
let client = Client::builder() let mut client = Client::builder().identity(identity);
.identity(identity) if let Some(pwd) = password {
client = client.default_headers(
HeaderMap::try_from(&HashMap::from([(
"Authorization".to_string(),
format!("Bearer {pwd}"),
)]))
.unwrap(),
)
}
let client = client
.add_root_certificate(Certificate::from_pem(ROOT_CA_CERT).unwrap()) .add_root_certificate(Certificate::from_pem(ROOT_CA_CERT).unwrap())
.build() .build()
.unwrap(); .unwrap();
Self { Self {
client, client,
base_url: Url::parse(&format!("https://{}:{}", server, MASTER_PORT)).unwrap(), base_url: Url::parse(&format!("https://{}:{}", server, MASTER_PORT)).unwrap(),
password: None,
} }
} }
pub fn password(mut self, password: String) -> ClientHandler { async fn _req<P: AsMsg, M: AsMsg + DeserializeOwned>(
self.password = Some(password); &self,
self url: impl AsRef<str>,
} payload: P,
) -> UResult<M> {
let request = self
.client
.post(self.base_url.join(url.as_ref()).unwrap())
.json(&payload.as_message());
fn set_pwd(&self, rb: RequestBuilder) -> RequestBuilder { let response = request.send().await?;
match &self.password { let is_success = match response.error_for_status_ref() {
Some(p) => rb.bearer_auth(p), Ok(_) => Ok(()),
None => rb, Err(e) => Err(UError::from(e)),
};
let resp = response.text().await?;
match is_success {
Ok(_) => serde_json::from_str::<BaseMessage<M>>(&resp)
.map(|msg| msg.into_inner())
.or_else(|e| Err(UError::NetError(e.to_string(), resp.clone()))),
Err(UError::NetError(err, _)) => Err(UError::NetError(err, resp)),
_ => unreachable!(),
} }
} }
fn build_get(&self, url: &str) -> RequestBuilder { // get jobs for client
let rb = self.client.get(self.base_url.join(url).unwrap()); pub async fn get_personal_jobs(
self.set_pwd(rb) &self,
url_param: Option<Uuid>,
) -> UResult<Vec<models::AssignedJob>> {
self._req(
format!("get_personal_jobs/{}", opt_to_string(url_param)),
Empty,
)
.await
} }
fn build_post(&self, url: &str) -> RequestBuilder {
let rb = self.client.post(self.base_url.join(url).unwrap());
self.set_pwd(rb)
}
//
// get jobs for client
#[api_route("GET")]
async fn get_personal_jobs(&self, url_param: Option<Uuid>) -> VecDisplay<models::AssignedJob> {}
//
// send something to server // send something to server
#[api_route("POST")] pub async fn report(&self, payload: &[messaging::Reportable]) -> UResult<Empty> {
async fn report(&self, payload: &[messaging::Reportable]) -> messaging::Empty {} self._req("report", payload).await
// }
// download file // download file
#[api_route("GET")] pub async fn dl(&self, file: String) -> UResult<Vec<u8>> {
async fn dl(&self, url_param: Option<Uuid>) -> Vec<u8> {} self._req(format!("dl/{file}"), Empty).await
// }
// request download }
#[api_route("POST")]
async fn dlr(&self, url_param: Option<String>) -> messaging::DownloadInfo {}
//##########// Admin area //##########// //##########// Admin area //##########//
/// client listing #[cfg(feature = "panel")]
#[api_route("GET")] impl ClientHandler {
async fn get_agents(&self, url_param: Option<Uuid>) -> VecDisplay<models::Agent> {} /// agent listing
// pub async fn get_agents(&self, agent: Option<Uuid>) -> UResult<Vec<models::Agent>> {
// get all available jobs self._req(format!("get_agents/{}", opt_to_string(agent)), Empty)
#[api_route("GET")] .await
async fn get_jobs(&self, url_param: Option<Uuid>) -> VecDisplay<models::JobMeta> {} }
//
// create and upload job /// update something
#[api_route("POST")] pub async fn update_item(&self, item: impl AsMsg) -> UResult<Empty> {
async fn upload_jobs(&self, payload: &[models::JobMeta]) -> messaging::Empty {} self._req("update_item", item).await
// }
// delete something
#[api_route("GET")] /// get all available jobs
async fn del(&self, url_param: Option<Uuid>) -> i32 {} pub async fn get_jobs(&self, job: Option<Uuid>) -> UResult<Vec<models::JobMeta>> {
// self._req(format!("get_jobs/{}", opt_to_string(job)), Empty)
// set jobs for any client .await
#[api_route("POST")] }
async fn set_jobs(&self, url_param: Option<Uuid>, payload: &[String]) -> VecDisplay<Uuid> {}
// /// create and upload job
// get jobs for any client pub async fn upload_jobs(&self, payload: &[models::JobMeta]) -> UResult<Empty> {
#[api_route("GET")] self._req("upload_jobs", payload).await
async fn get_agent_jobs(&self, url_param: Option<Uuid>) -> VecDisplay<models::AssignedJob> {} }
/// delete something
pub async fn del(&self, item: Uuid) -> UResult<i32> {
self._req(format!("del/{item}"), Empty).await
}
/// set jobs for any agent
pub async fn set_jobs(&self, agent: Uuid, job_idents: &[String]) -> UResult<Vec<Uuid>> {
self._req(format!("set_jobs/{agent}"), job_idents).await
}
/// get jobs for any agent
pub async fn get_agent_jobs(&self, agent: Option<Uuid>) -> UResult<Vec<models::AssignedJob>> {
self._req(format!("set_jobs/{}", opt_to_string(agent)), Empty)
.await
}
} }

@ -2,7 +2,7 @@ use crate::{
cache::JobCache, cache::JobCache,
executor::{DynFut, Waiter}, executor::{DynFut, Waiter},
messaging::Reportable, messaging::Reportable,
models::{Agent, AssignedJob, JobMeta, JobType}, models::{Agent, AssignedJob, JobMeta, JobType, RawJobMeta},
utils::{CombinedResult, OneOrVec}, utils::{CombinedResult, OneOrVec},
UError, UResult, UError, UResult,
}; };
@ -102,7 +102,7 @@ impl NamedJobBuilder {
.into_vec() .into_vec()
.into_iter() .into_iter()
.filter_map( .filter_map(
|(alias, cmd)| match JobMeta::builder().with_shell(cmd).build() { |(alias, cmd)| match RawJobMeta::builder().with_shell(cmd).build() {
Ok(meta) => Some((alias, meta)), Ok(meta) => Some((alias, meta)),
Err(e) => { Err(e) => {
result.err(e); result.err(e);
@ -164,7 +164,7 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_is_really_async() { async fn test_is_really_async() {
const SLEEP_SECS: u64 = 1; const SLEEP_SECS: u64 = 1;
let job = JobMeta::from_shell(format!("sleep {}", SLEEP_SECS)).unwrap(); let job = RawJobMeta::from_shell(format!("sleep {}", SLEEP_SECS)).unwrap();
let sleep_jobs: Vec<JobMeta> = (0..50).map(|_| job.clone()).collect(); let sleep_jobs: Vec<JobMeta> = (0..50).map(|_| job.clone()).collect();
let now = SystemTime::now(); let now = SystemTime::now();
JobBuilder::from_meta(sleep_jobs).unwrap_one().wait().await; JobBuilder::from_meta(sleep_jobs).unwrap_one().wait().await;
@ -201,7 +201,7 @@ mod tests {
#[case] payload: Option<&[u8]>, #[case] payload: Option<&[u8]>,
#[case] expected_result: &str, #[case] expected_result: &str,
) -> TestResult { ) -> TestResult {
let mut job = JobMeta::builder().with_shell(cmd); let mut job = RawJobMeta::builder().with_shell(cmd);
if let Some(p) = payload { if let Some(p) = payload {
job = job.with_payload(p); job = job.with_payload(p);
} }
@ -217,12 +217,12 @@ mod tests {
async fn test_complex_load() -> TestResult { async fn test_complex_load() -> TestResult {
const SLEEP_SECS: u64 = 1; const SLEEP_SECS: u64 = 1;
let now = SystemTime::now(); let now = SystemTime::now();
let longest_job = JobMeta::from_shell(format!("sleep {}", SLEEP_SECS)).unwrap(); let longest_job = RawJobMeta::from_shell(format!("sleep {}", SLEEP_SECS)).unwrap();
let longest_job = JobBuilder::from_meta(longest_job) let longest_job = JobBuilder::from_meta(longest_job)
.unwrap_one() .unwrap_one()
.spawn() .spawn()
.await; .await;
let ls = JobBuilder::from_meta(JobMeta::from_shell("ls")?) let ls = JobBuilder::from_meta(RawJobMeta::from_shell("ls")?)
.unwrap_one() .unwrap_one()
.wait_one() .wait_one()
.await; .await;
@ -231,7 +231,7 @@ mod tests {
let folders = ls.to_string_result(); let folders = ls.to_string_result();
let subfolders_jobs: Vec<JobMeta> = folders let subfolders_jobs: Vec<JobMeta> = folders
.lines() .lines()
.map(|f| JobMeta::from_shell(format!("ls {}", f)).unwrap()) .map(|f| RawJobMeta::from_shell(format!("ls {}", f)).unwrap())
.collect(); .collect();
let ls_subfolders = JobBuilder::from_meta(subfolders_jobs) let ls_subfolders = JobBuilder::from_meta(subfolders_jobs)
.unwrap_one() .unwrap_one()
@ -265,7 +265,7 @@ mod tests {
*/ */
#[tokio::test] #[tokio::test]
async fn test_failing_shell_job() -> TestResult { async fn test_failing_shell_job() -> TestResult {
let job = JobMeta::from_shell("lol_kek_puk")?; let job = RawJobMeta::from_shell("lol_kek_puk")?;
let job_result = JobBuilder::from_meta(job).unwrap_one().wait_one().await; let job_result = JobBuilder::from_meta(job).unwrap_one().wait_one().await;
let job_result = unwrap_enum!(job_result, Reportable::Assigned); let job_result = unwrap_enum!(job_result, Reportable::Assigned);
let output = job_result.to_string_result(); let output = job_result.to_string_result();
@ -283,7 +283,7 @@ mod tests {
#[case] payload: Option<&[u8]>, #[case] payload: Option<&[u8]>,
#[case] err_str: &str, #[case] err_str: &str,
) -> TestResult { ) -> TestResult {
let mut job = JobMeta::builder().with_shell(cmd); let mut job = RawJobMeta::builder().with_shell(cmd);
if let Some(p) = payload { if let Some(p) = payload {
job = job.with_payload(p); job = job.with_payload(p);
} }
@ -296,10 +296,10 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_different_job_types() -> TestResult { async fn test_different_job_types() -> TestResult {
let mut jobs = NamedJobBuilder::from_meta(vec![ let mut jobs = NamedJobBuilder::from_meta(vec![
("sleeper", JobMeta::from_shell("sleep 3")?), ("sleeper", RawJobMeta::from_shell("sleep 3")?),
( (
"gatherer", "gatherer",
JobMeta::builder().with_type(JobType::Manage).build()?, RawJobMeta::builder().with_type(JobType::Manage).build()?,
), ),
]) ])
.wait() .wait()

@ -4,5 +4,9 @@ use uuid::Uuid;
pub const MASTER_PORT: u16 = 63714; pub const MASTER_PORT: u16 = 63714;
lazy_static! { lazy_static! {
pub static ref UID: Uuid = Uuid::new_v4(); static ref UID: Uuid = Uuid::new_v4();
}
pub fn get_self_uid() -> Uuid {
*UID
} }

@ -32,7 +32,7 @@ pub enum UError {
#[error("Job {0} doesn't exist")] #[error("Job {0} doesn't exist")]
NoJob(Uuid), NoJob(Uuid),
#[error("Error while processing {0}: {1}")] #[error("FS error while processing {0}: {1}")]
FSError(String, String), FSError(String, String),
#[error("Wrong auth token")] #[error("Wrong auth token")]
@ -43,6 +43,9 @@ pub enum UError {
#[error("Panel error: {0}")] #[error("Panel error: {0}")]
PanelError(String), PanelError(String),
#[error("Deserialize from json error: {0}")]
DeserializeError(String),
} }
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
@ -51,3 +54,9 @@ impl From<ReqError> for UError {
UError::NetError(e.to_string(), String::new()) UError::NetError(e.to_string(), String::new())
} }
} }
impl From<serde_json::Error> for UError {
fn from(e: serde_json::Error) -> Self {
UError::DeserializeError(e.to_string())
}
}

@ -9,6 +9,7 @@ pub mod exports {
pub mod datatypes; pub mod datatypes;
pub mod errors; pub mod errors;
pub mod executor; pub mod executor;
pub mod logging;
pub mod messaging; pub mod messaging;
pub mod models; pub mod models;
pub mod utils; pub mod utils;
@ -24,7 +25,6 @@ pub mod exports {
pub mod utils; pub mod utils;
} }
pub use config::UID;
pub use errors::{UError, UResult}; pub use errors::{UError, UResult};
pub use exports::*; pub use exports::*;
@ -38,7 +38,6 @@ extern crate diesel;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate env_logger;
#[cfg(test)] #[cfg(test)]
#[macro_use] #[macro_use]

@ -0,0 +1,16 @@
use std::env;
use std::path::Path;
use tracing_appender::rolling;
use tracing_subscriber::EnvFilter;
pub fn init_logger(logfile: impl AsRef<Path> + Send + Sync + 'static) {
if env::var("RUST_LOG").is_err() {
env::set_var("RUST_LOG", "info")
}
tracing_subscriber::fmt::fmt()
.with_env_filter(EnvFilter::from_default_env())
.with_writer(move || rolling::never(".", logfile.as_ref().with_extension("log")))
.init();
}

@ -1,5 +1,5 @@
use crate::config::get_self_uid;
use crate::utils::VecDisplay; use crate::utils::VecDisplay;
use crate::UID;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::borrow::Cow; use std::borrow::Cow;
use std::fmt::Display; use std::fmt::Display;
@ -43,7 +43,10 @@ impl<'cow, I: AsMsg> BaseMessage<'cow, I> {
C: Into<Moo<'cow, I>>, C: Into<Moo<'cow, I>>,
{ {
let Moo(inner) = inner.into(); let Moo(inner) = inner.into();
Self { id: *UID, inner } Self {
id: get_self_uid(),
inner,
}
} }
pub fn into_inner(self) -> I { pub fn into_inner(self) -> I {

@ -6,7 +6,10 @@ use strum::Display;
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
use crate::builder::NamedJobBuilder; use crate::builder::NamedJobBuilder;
use crate::{messaging::Reportable, models::schema::*, unwrap_enum, utils::systime_to_string, UID}; use crate::{
config::get_self_uid, messaging::Reportable, models::schema::*, unwrap_enum,
utils::systime_to_string,
};
use uuid::Uuid; use uuid::Uuid;
@ -107,7 +110,7 @@ impl Default for Agent {
fn default() -> Self { fn default() -> Self {
Self { Self {
alias: None, alias: None,
id: *UID, id: get_self_uid(),
hostname: String::new(), hostname: String::new(),
is_root: false, is_root: false,
is_root_allowed: false, is_root_allowed: false,

@ -2,11 +2,11 @@ use super::JobState;
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
use crate::{cache::JobCache, utils::TempFile}; use crate::{cache::JobCache, utils::TempFile};
use crate::{ use crate::{
config::get_self_uid,
errors::UError, errors::UError,
messaging::Reportable, messaging::Reportable,
models::schema::*, models::schema::*,
utils::{systime_to_string, ProcOutput}, utils::{systime_to_string, ProcOutput},
UID,
}; };
use diesel::{Identifiable, Insertable, Queryable}; use diesel::{Identifiable, Insertable, Queryable};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -119,7 +119,7 @@ impl AssignedJob {
pub fn new(job_id: Uuid, other: Option<&Self>) -> Self { pub fn new(job_id: Uuid, other: Option<&Self>) -> Self {
Self { Self {
agent_id: *UID, agent_id: get_self_uid(),
job_id, job_id,
..other.unwrap_or(&Default::default()).clone() ..other.unwrap_or(&Default::default()).clone()
} }

@ -2,11 +2,14 @@ use super::JobType;
use crate::{models::schema::*, utils::Stripped, UError, UResult}; use crate::{models::schema::*, utils::Stripped, UError, UResult};
use diesel::{Identifiable, Insertable, Queryable}; use diesel::{Identifiable, Insertable, Queryable};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt; use std::path::PathBuf;
use std::str::from_utf8; use std::str::from_utf8;
use std::{fmt, fs};
use uuid::Uuid; use uuid::Uuid;
#[derive(Serialize, Deserialize, Clone, Debug, Queryable, Identifiable, Insertable)] #[derive(
Serialize, Deserialize, Clone, Debug, Queryable, Identifiable, Insertable, AsChangeset,
)]
#[table_name = "jobs"] #[table_name = "jobs"]
pub struct JobMeta { pub struct JobMeta {
pub alias: Option<String>, pub alias: Option<String>,
@ -20,12 +23,28 @@ pub struct JobMeta {
pub payload: Option<Vec<u8>>, pub payload: Option<Vec<u8>>,
} }
impl JobMeta { #[derive(Deserialize)]
pub struct RawJobMeta {
pub alias: Option<String>,
pub argv: String,
pub id: Uuid,
pub exec_type: JobType,
//pub schedule: JobSchedule,
pub platform: String,
pub payload: Option<Vec<u8>>,
pub payload_path: Option<PathBuf>,
}
impl RawJobMeta {
pub fn builder() -> JobMetaBuilder { pub fn builder() -> JobMetaBuilder {
JobMetaBuilder::default() JobMetaBuilder::default()
} }
pub fn from_shell(cmd: impl Into<String>) -> UResult<Self> { pub fn into_builder(self) -> JobMetaBuilder {
JobMetaBuilder { inner: self }
}
pub fn from_shell(cmd: impl Into<String>) -> UResult<JobMeta> {
Self::builder().with_shell(cmd).build() Self::builder().with_shell(cmd).build()
} }
} }
@ -60,29 +79,35 @@ impl Default for JobMeta {
alias: None, alias: None,
argv: String::new(), argv: String::new(),
exec_type: JobType::Shell, exec_type: JobType::Shell,
#[cfg(not(target_arch = "wasm32"))]
platform: guess_host_triple::guess_host_triple() platform: guess_host_triple::guess_host_triple()
.unwrap_or("unknown") .unwrap_or("unknown")
.to_string(), .to_string(),
#[cfg(target_arch = "wasm32")]
platform: "unknown".to_string(),
payload: None, payload: None,
} }
} }
} }
pub struct JobMetaBuilder { impl Default for RawJobMeta {
inner: JobMeta,
}
impl Default for JobMetaBuilder {
fn default() -> Self { fn default() -> Self {
Self { Self {
inner: JobMeta::default(), id: Uuid::new_v4(),
alias: None,
argv: String::new(),
exec_type: JobType::Shell,
platform: guess_host_triple::guess_host_triple()
.unwrap_or("unknown")
.to_string(),
payload: None,
payload_path: None,
} }
} }
} }
#[derive(Default)]
pub struct JobMetaBuilder {
inner: RawJobMeta,
}
impl JobMetaBuilder { impl JobMetaBuilder {
pub fn with_shell(mut self, shell_cmd: impl Into<String>) -> Self { pub fn with_shell(mut self, shell_cmd: impl Into<String>) -> Self {
self.inner.argv = shell_cmd.into(); self.inner.argv = shell_cmd.into();
@ -94,6 +119,11 @@ impl JobMetaBuilder {
self self
} }
pub fn with_payload_src(mut self, path: impl Into<PathBuf>) -> Self {
self.inner.payload_path = Some(path.into());
self
}
pub fn with_alias(mut self, alias: impl Into<String>) -> Self { pub fn with_alias(mut self, alias: impl Into<String>) -> Self {
self.inner.alias = Some(alias.into()); self.inner.alias = Some(alias.into());
self self
@ -117,6 +147,12 @@ impl JobMetaBuilder {
if argv_parts.get(0).ok_or(empty_err.clone())?.is_empty() { if argv_parts.get(0).ok_or(empty_err.clone())?.is_empty() {
return Err(empty_err.into()); return Err(empty_err.into());
} }
if let Some(path) = &inner.payload_path {
let data = fs::read(path.clone()).map_err(|e| {
UError::FSError(path.to_string_lossy().to_string(), e.to_string())
})?;
inner.payload = Some(data)
}
match inner.payload.as_ref() { match inner.payload.as_ref() {
Some(_) => { Some(_) => {
if !inner.argv.contains("{}") { if !inner.argv.contains("{}") {
@ -136,20 +172,23 @@ impl JobMetaBuilder {
} }
} }
}; };
Ok(inner) Ok(inner.into())
} }
JobType::Manage => Ok(inner), JobType::Manage => Ok(inner.into()),
_ => todo!(), _ => todo!(),
} }
} }
/* }
pub fn from_file(path: PathBuf) -> UResult<Self> {
let data = fs::read(path)
.map_err(|e| UError::FilesystemError(
path.to_string_lossy().to_string(),
e.to_string()
))?;
let filename = path.file_name().unwrap().to_str().unwrap();
}*/ impl From<RawJobMeta> for JobMeta {
fn from(rjm: RawJobMeta) -> Self {
JobMeta {
alias: rjm.alias,
argv: rjm.argv,
id: rjm.id,
exec_type: rjm.exec_type,
platform: rjm.platform,
payload: rjm.payload,
}
}
} }

@ -27,11 +27,12 @@ pub enum JobState {
Finished, Finished,
} }
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, DbEnum, Display)] #[derive(Default, Serialize, Deserialize, Clone, Debug, PartialEq, DbEnum, Display)]
#[PgType = "JobType"] #[PgType = "JobType"]
#[DieselType = "Jobtype"] #[DieselType = "Jobtype"]
pub enum JobType { pub enum JobType {
Manage, Manage,
#[default]
Shell, Shell,
Python, Python,
} }

@ -1,12 +1,14 @@
use std::fmt::Debug;
use crate::utils::OneOrVec; use crate::utils::OneOrVec;
use crate::UError; use crate::UError;
pub struct CombinedResult<T, E = UError> { pub struct CombinedResult<T, E: Debug = UError> {
ok: Vec<T>, ok: Vec<T>,
err: Vec<E>, err: Vec<E>,
} }
impl<T, E> CombinedResult<T, E> { impl<T, E: Debug> CombinedResult<T, E> {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
ok: vec![], ok: vec![],
@ -25,7 +27,7 @@ impl<T, E> CombinedResult<T, E> {
pub fn unwrap(self) -> Vec<T> { pub fn unwrap(self) -> Vec<T> {
let err_len = self.err.len(); let err_len = self.err.len();
if err_len > 0 { if err_len > 0 {
panic!("CombinedResult has {} errors", err_len); panic!("CombinedResult has errors: {:?}", self.err);
} }
self.ok self.ok
} }

@ -8,7 +8,7 @@ REMOTE_PATH=$SERVER:$REMOTE_DIR
RSYNC="rsync -arzh --progress" RSYNC="rsync -arzh --progress"
ssh $SERVER mkdir -p $REMOTE_DIR/{release,deploy} ssh $SERVER mkdir -p $REMOTE_DIR/{release,deploy}
$RSYNC $ROOTDIR/release/u_server $REMOTE_PATH/release/u_server $RSYNC $ROOTDIR/target/x86_64-unknown-linux-musl/release/u_server $REMOTE_PATH/release/u_server
$RSYNC $ROOTDIR/certs/server.{crt,key} $REMOTE_PATH/certs $RSYNC $ROOTDIR/certs/server.{crt,key} $REMOTE_PATH/certs
$RSYNC $ROOTDIR/certs/ca.crt $REMOTE_PATH/certs $RSYNC $ROOTDIR/certs/ca.crt $REMOTE_PATH/certs
$RSYNC $ROOTDIR/migrations/ $REMOTE_PATH/migrations $RSYNC $ROOTDIR/migrations/ $REMOTE_PATH/migrations

@ -13,6 +13,7 @@ subjectAltName = @alt_names
[alt_names] [alt_names]
DNS.1 = ortem.xyz DNS.1 = ortem.xyz
DNS.2 = u_server DNS.2 = u_server
DNS.3 = localhost
EOF EOF
openssl req -x509 -newkey rsa:4096 -keyout $DIR/ca.key -out $DIR/ca.crt -nodes -days 365 -subj "/CN=root" openssl req -x509 -newkey rsa:4096 -keyout $DIR/ca.key -out $DIR/ca.crt -nodes -days 365 -subj "/CN=root"

Loading…
Cancel
Save