- fix envs naming and handling - refactor db interface to use deadpool - bump deps - add migrator - simplify schema generation - make daemonize work - workspaced more deps - split integration-tests and deploy ymls - cleanup dead code - fix inages naming for podmanmaster
parent
4bac5ac6e9
commit
699896f335
53 changed files with 988 additions and 589 deletions
@ -1,5 +1,6 @@ |
||||
DB_HOST=u_db |
||||
DB_NAME=u_db |
||||
DB_USER=postgres |
||||
POSTGRES_HOST=u_db |
||||
POSTGRES_DATABASE=u_db |
||||
POSTGRES_USER=u_ser |
||||
POSTGRES_PORT=5432 |
||||
RUST_BACKTRACE=1 |
||||
U_SERVER=u_server |
@ -0,0 +1,13 @@ |
||||
[package] |
||||
name = "migrator" |
||||
version = "0.1.0" |
||||
edition = "2021" |
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html |
||||
|
||||
[dependencies] |
||||
diesel = { workspace = true, features = ["postgres", "serde_json"] } |
||||
diesel_migrations = { version = "2.0.0", features = ["postgres"] } |
||||
openssl = { workspace = true } |
||||
u_lib = { path = "../../lib/u_lib" } |
||||
url = "2.3.1" |
@ -0,0 +1,140 @@ |
||||
use super::query_helper; |
||||
use diesel::dsl::sql; |
||||
use diesel::sql_types::Bool; |
||||
use diesel::*; |
||||
|
||||
use std::env; |
||||
use std::error::Error; |
||||
|
||||
type DatabaseResult<T> = Result<T, Box<dyn Error>>; |
||||
|
||||
pub enum Backend { |
||||
Pg, |
||||
} |
||||
|
||||
impl Backend { |
||||
pub fn for_url(database_url: &str) -> Self { |
||||
match database_url { |
||||
_ if database_url.starts_with("postgres://") |
||||
|| database_url.starts_with("postgresql://") => |
||||
{ |
||||
Backend::Pg |
||||
} |
||||
_ => panic!( |
||||
"At least one backend must be specified for use with this crate. \ |
||||
You may omit the unneeded dependencies in the following command. \n\n \ |
||||
ex. `cargo install diesel_cli --no-default-features --features mysql postgres sqlite` \n" |
||||
), |
||||
} |
||||
} |
||||
} |
||||
|
||||
pub enum InferConnection { |
||||
Pg(PgConnection), |
||||
} |
||||
|
||||
impl InferConnection { |
||||
pub fn establish(database_url: &str) -> DatabaseResult<Self> { |
||||
match Backend::for_url(database_url) { |
||||
Backend::Pg => PgConnection::establish(database_url).map(InferConnection::Pg), |
||||
} |
||||
.map_err(Into::into) |
||||
} |
||||
} |
||||
|
||||
pub fn reset_database() -> DatabaseResult<()> { |
||||
drop_database(&database_url())?; |
||||
setup_database() |
||||
} |
||||
|
||||
pub fn setup_database() -> DatabaseResult<()> { |
||||
let database_url = database_url(); |
||||
|
||||
create_database_if_needed(&database_url)?; |
||||
Ok(()) |
||||
} |
||||
|
||||
pub fn drop_database_command() -> DatabaseResult<()> { |
||||
drop_database(&database_url()) |
||||
} |
||||
|
||||
/// Creates the database specified in the connection url. It returns an error
|
||||
/// it was unable to create the database.
|
||||
fn create_database_if_needed(database_url: &str) -> DatabaseResult<()> { |
||||
match Backend::for_url(database_url) { |
||||
Backend::Pg => { |
||||
if PgConnection::establish(database_url).is_err() { |
||||
let (database, postgres_url) = change_database_of_url(database_url, "postgres"); |
||||
println!("Creating database: {}", database); |
||||
let mut conn = PgConnection::establish(&postgres_url)?; |
||||
query_helper::create_database(&database).execute(&mut conn)?; |
||||
} |
||||
} |
||||
} |
||||
|
||||
Ok(()) |
||||
} |
||||
|
||||
/// Drops the database specified in the connection url. It returns an error
|
||||
/// if it was unable to drop the database.
|
||||
fn drop_database(database_url: &str) -> DatabaseResult<()> { |
||||
match Backend::for_url(database_url) { |
||||
Backend::Pg => { |
||||
let (database, postgres_url) = change_database_of_url(database_url, "postgres"); |
||||
let mut conn = PgConnection::establish(&postgres_url)?; |
||||
if pg_database_exists(&mut conn, &database)? { |
||||
println!("Dropping database: {}", database); |
||||
query_helper::drop_database(&database) |
||||
.if_exists() |
||||
.execute(&mut conn)?; |
||||
} |
||||
} |
||||
} |
||||
Ok(()) |
||||
} |
||||
|
||||
table! { |
||||
pg_database (datname) { |
||||
datname -> Text, |
||||
datistemplate -> Bool, |
||||
} |
||||
} |
||||
|
||||
fn pg_database_exists(conn: &mut PgConnection, database_name: &str) -> QueryResult<bool> { |
||||
use self::pg_database::dsl::*; |
||||
|
||||
pg_database |
||||
.select(datname) |
||||
.filter(datname.eq(database_name)) |
||||
.filter(datistemplate.eq(false)) |
||||
.get_result::<String>(conn) |
||||
.optional() |
||||
.map(|x| x.is_some()) |
||||
} |
||||
|
||||
/// Returns true if the `__diesel_schema_migrations` table exists in the
|
||||
/// database we connect to, returns false if it does not.
|
||||
pub fn schema_table_exists(database_url: &str) -> DatabaseResult<bool> { |
||||
match InferConnection::establish(database_url).unwrap() { |
||||
InferConnection::Pg(mut conn) => select(sql::<Bool>( |
||||
"EXISTS \ |
||||
(SELECT 1 \ |
||||
FROM information_schema.tables \ |
||||
WHERE table_name = '__diesel_schema_migrations')", |
||||
)) |
||||
.get_result(&mut conn), |
||||
} |
||||
.map_err(Into::into) |
||||
} |
||||
|
||||
pub fn database_url() -> String { |
||||
env::var("DATABASE_URL").unwrap() |
||||
} |
||||
|
||||
fn change_database_of_url(database_url: &str, default_database: &str) -> (String, String) { |
||||
let base = ::url::Url::parse(database_url).unwrap(); |
||||
let database = base.path_segments().unwrap().last().unwrap().to_owned(); |
||||
let mut new_url = base.join(default_database).unwrap(); |
||||
new_url.set_query(base.query()); |
||||
(database, new_url.into()) |
||||
} |
@ -0,0 +1,88 @@ |
||||
// due to linking errors
|
||||
extern crate openssl; |
||||
// don't touch anything
|
||||
extern crate diesel; |
||||
// in this block
|
||||
|
||||
pub mod database; |
||||
pub mod query_helper; |
||||
|
||||
use diesel::migration::Migration; |
||||
use diesel::{migration, pg::PgConnection, Connection}; |
||||
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; |
||||
use std::error::Error; |
||||
use u_lib::config; |
||||
use u_lib::db::generate_postgres_url; |
||||
|
||||
const MIGRATIONS: EmbeddedMigrations = embed_migrations!(); |
||||
|
||||
fn main() -> Result<(), Box<dyn Error + Send + Sync>> { |
||||
let action = action::parse_command_line()?; |
||||
let dbconfig = config::db::load()?; |
||||
|
||||
database::setup_database().unwrap(); |
||||
|
||||
let conn = PgConnection::establish(&generate_postgres_url(&dbconfig))?; |
||||
run(action, conn) |
||||
} |
||||
|
||||
fn run(action: action::Action, mut conn: PgConnection) -> migration::Result<()> { |
||||
use action::Action::*; |
||||
match action { |
||||
ListPending => { |
||||
let list = conn.pending_migrations(MIGRATIONS)?; |
||||
if list.is_empty() { |
||||
println!("No pending migrations."); |
||||
} |
||||
for mig in list { |
||||
println!("Pending migration: {}", mig.name()); |
||||
} |
||||
} |
||||
MigrateUp => { |
||||
let list = conn.run_pending_migrations(MIGRATIONS)?; |
||||
if list.is_empty() { |
||||
println!("No pending migrations."); |
||||
} |
||||
for mig in list { |
||||
println!("Applied migration: {}", mig); |
||||
} |
||||
} |
||||
MigrateDown => { |
||||
let mig = conn.revert_last_migration(MIGRATIONS)?; |
||||
println!("Reverted migration: {}", mig); |
||||
} |
||||
} |
||||
Ok(()) |
||||
} |
||||
|
||||
mod action { |
||||
pub enum Action { |
||||
ListPending, |
||||
MigrateUp, |
||||
MigrateDown, |
||||
} |
||||
|
||||
impl TryFrom<&str> for Action { |
||||
type Error = (); |
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> { |
||||
match value { |
||||
"" | "list" => Ok(Action::ListPending), |
||||
"up" => Ok(Action::MigrateUp), |
||||
"down" => Ok(Action::MigrateDown), |
||||
_ => Err(()), |
||||
} |
||||
} |
||||
} |
||||
|
||||
pub fn parse_command_line() -> Result<Action, String> { |
||||
let action_str = std::env::args().nth(1).unwrap_or_default(); |
||||
let action = action_str.as_str().try_into().map_err(|_| { |
||||
format!( |
||||
"unrecognized command line argument: {} (expected 'up', 'down', 'list')", |
||||
action_str |
||||
) |
||||
})?; |
||||
Ok(action) |
||||
} |
||||
} |
@ -0,0 +1,82 @@ |
||||
use diesel::backend::Backend; |
||||
use diesel::query_builder::*; |
||||
use diesel::result::QueryResult; |
||||
use diesel::RunQueryDsl; |
||||
|
||||
#[derive(Debug, Clone)] |
||||
pub struct DropDatabaseStatement { |
||||
db_name: String, |
||||
if_exists: bool, |
||||
} |
||||
|
||||
impl DropDatabaseStatement { |
||||
pub fn new(db_name: &str) -> Self { |
||||
DropDatabaseStatement { |
||||
db_name: db_name.to_owned(), |
||||
if_exists: false, |
||||
} |
||||
} |
||||
|
||||
pub fn if_exists(self) -> Self { |
||||
DropDatabaseStatement { |
||||
if_exists: true, |
||||
..self |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl<DB: Backend> QueryFragment<DB> for DropDatabaseStatement { |
||||
fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, DB>) -> QueryResult<()> { |
||||
out.push_sql("DROP DATABASE "); |
||||
if self.if_exists { |
||||
out.push_sql("IF EXISTS "); |
||||
} |
||||
out.push_identifier(&self.db_name)?; |
||||
Ok(()) |
||||
} |
||||
} |
||||
|
||||
impl<Conn> RunQueryDsl<Conn> for DropDatabaseStatement {} |
||||
|
||||
impl QueryId for DropDatabaseStatement { |
||||
type QueryId = (); |
||||
|
||||
const HAS_STATIC_QUERY_ID: bool = false; |
||||
} |
||||
|
||||
#[derive(Debug, Clone)] |
||||
pub struct CreateDatabaseStatement { |
||||
db_name: String, |
||||
} |
||||
|
||||
impl CreateDatabaseStatement { |
||||
pub fn new(db_name: &str) -> Self { |
||||
CreateDatabaseStatement { |
||||
db_name: db_name.to_owned(), |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl<DB: Backend> QueryFragment<DB> for CreateDatabaseStatement { |
||||
fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, DB>) -> QueryResult<()> { |
||||
out.push_sql("CREATE DATABASE "); |
||||
out.push_identifier(&self.db_name)?; |
||||
Ok(()) |
||||
} |
||||
} |
||||
|
||||
impl<Conn> RunQueryDsl<Conn> for CreateDatabaseStatement {} |
||||
|
||||
impl QueryId for CreateDatabaseStatement { |
||||
type QueryId = (); |
||||
|
||||
const HAS_STATIC_QUERY_ID: bool = false; |
||||
} |
||||
|
||||
pub fn drop_database(db_name: &str) -> DropDatabaseStatement { |
||||
DropDatabaseStatement::new(db_name) |
||||
} |
||||
|
||||
pub fn create_database(db_name: &str) -> CreateDatabaseStatement { |
||||
CreateDatabaseStatement::new(db_name) |
||||
} |
@ -1,6 +1,3 @@ |
||||
use u_agent::run_forever; |
||||
|
||||
#[tokio::main] |
||||
async fn main() { |
||||
run_forever().await; |
||||
fn main() { |
||||
u_agent::run_forever(); |
||||
} |
||||
|
@ -1,11 +1,17 @@ |
||||
use u_server_lib::serve; |
||||
// due to linking errors
|
||||
extern crate openssl; |
||||
// don't touch anything
|
||||
extern crate diesel; |
||||
// in this block
|
||||
|
||||
#[macro_use] |
||||
extern crate tracing; |
||||
|
||||
#[tokio::main] |
||||
async fn main() { |
||||
if let Err(e) = serve().await { |
||||
u_lib::logging::init_logger(Some("u_server")); |
||||
|
||||
if let Err(e) = u_server_lib::serve().await { |
||||
error!("U_SERVER error: {}", e); |
||||
} |
||||
} |
||||
|
@ -0,0 +1,53 @@ |
||||
version: "3.4" |
||||
|
||||
networks: |
||||
u_net: |
||||
|
||||
services: |
||||
|
||||
u_server: |
||||
image: localhost/unki/u_server |
||||
networks: |
||||
- u_net |
||||
volumes: |
||||
- ./u_server:/unki/u_server |
||||
- ./certs:/unki/certs |
||||
- ./logs:/unki/logs:rw |
||||
working_dir: /unki |
||||
command: /unki/u_server |
||||
depends_on: |
||||
u_db: |
||||
condition: service_healthy |
||||
ports: |
||||
- 63714:63714 |
||||
env_file: |
||||
- ./.env |
||||
- ./.env.private |
||||
environment: |
||||
RUST_LOG: warp=info,u_server_lib=debug |
||||
healthcheck: |
||||
test: ss -tlpn | grep 63714 |
||||
interval: 5s |
||||
timeout: 2s |
||||
retries: 2 |
||||
|
||||
u_db: |
||||
image: localhost/unki/u_db |
||||
networks: |
||||
- u_net |
||||
env_file: |
||||
- ./.env |
||||
- ./.env.private |
||||
volumes: |
||||
- ./migrator:/migrator |
||||
- ./data:/var/lib/postgresql/data |
||||
- type: bind |
||||
source: ./u_db_entrypoint.sh |
||||
target: /u_db_entrypoint.sh |
||||
command: /u_db_entrypoint.sh |
||||
healthcheck: |
||||
# test if db's port is open and db is created |
||||
test: ss -tlpn | grep 5432 && psql -lqt -U $${POSTGRES_USER} | grep -qw $${POSTGRES_DATABASE} |
||||
interval: 5s |
||||
timeout: 5s |
||||
retries: 3 |
@ -0,0 +1,9 @@ |
||||
#!/bin/bash |
||||
|
||||
export DOCKER_UID=$(id -u) |
||||
export DOCKER_GID=$(id -g) |
||||
|
||||
docker build -t localhost/unki/u_db -f u_db.Dockerfile . |
||||
docker build -t localhost/unki/u_server -f u_server.Dockerfile . |
||||
podman-compose down -v |
||||
podman-compose up -d |
@ -1,17 +0,0 @@ |
||||
FROM postgres:14.5 |
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive |
||||
|
||||
RUN apt update && apt upgrade -y |
||||
RUN apt install -y curl build-essential libpq-dev iproute2 |
||||
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable --profile minimal |
||||
ENV PATH /root/.cargo/bin:$PATH |
||||
RUN rustup target add x86_64-unknown-linux-musl |
||||
RUN cargo install diesel_cli --no-default-features --features postgres |
||||
|
||||
RUN mkdir -p /unki |
||||
ENV LC_ALL en_US.UTF-8 |
||||
ENV LANG en_US.UTF-8 |
||||
ENV LANGUAGE en_US.UTF-8 |
||||
RUN apt install -y locales locales-all |
||||
COPY u_db_entrypoint.sh /unki/ |
@ -1,7 +0,0 @@ |
||||
set -m |
||||
|
||||
export DATABASE_URL=postgres://${DB_USER}:${DB_PASSWORD}@127.0.0.1/${DB_NAME} |
||||
touch /unki/Cargo.toml |
||||
/usr/local/bin/docker-entrypoint.sh postgres & |
||||
sleep 10 && diesel setup && diesel migration run |
||||
[[ $1 == "svc" ]] && fg %1 |
@ -0,0 +1,8 @@ |
||||
FROM postgres:14.5 |
||||
|
||||
RUN apt update && apt upgrade -y |
||||
|
||||
ENV LC_ALL en_US.UTF-8 |
||||
ENV LANG en_US.UTF-8 |
||||
ENV LANGUAGE en_US.UTF-8 |
||||
RUN apt install -y locales locales-all iproute2 |
@ -1,13 +0,0 @@ |
||||
use lazy_static::lazy_static; |
||||
use uuid::Uuid; |
||||
|
||||
pub const MASTER_PORT: u16 = 63714; |
||||
|
||||
lazy_static! { |
||||
static ref UID: Uuid = Uuid::new_v4(); |
||||
} |
||||
|
||||
#[inline] |
||||
pub fn get_self_uid() -> Uuid { |
||||
*UID |
||||
} |
@ -0,0 +1,75 @@ |
||||
use envy::{from_env, prefixed, Result as EnvResult}; |
||||
use lazy_static::lazy_static; |
||||
use serde::Deserialize; |
||||
use uuid::Uuid; |
||||
|
||||
pub const MASTER_PORT: u16 = 63714; |
||||
|
||||
lazy_static! { |
||||
static ref UID: Uuid = Uuid::new_v4(); |
||||
} |
||||
|
||||
#[inline] |
||||
pub fn get_self_uid() -> Uuid { |
||||
*UID |
||||
} |
||||
|
||||
pub mod endpoints { |
||||
use super::*; |
||||
|
||||
#[derive(Deserialize)] |
||||
pub struct EndpointsEnv { |
||||
#[serde(default = "default_host")] |
||||
pub u_server: String, |
||||
} |
||||
|
||||
pub fn load() -> EnvResult<EndpointsEnv> { |
||||
dot(); |
||||
from_env() |
||||
} |
||||
} |
||||
|
||||
pub mod db { |
||||
use super::*; |
||||
|
||||
#[derive(Deserialize)] |
||||
pub struct DBEnv { |
||||
pub database: String, |
||||
pub host: String, |
||||
pub user: String, |
||||
pub password: String, |
||||
pub port: u16, |
||||
} |
||||
|
||||
pub fn load() -> EnvResult<DBEnv> { |
||||
dot(); |
||||
prefixed("POSTGRES_").from_env() |
||||
} |
||||
} |
||||
|
||||
pub mod admin { |
||||
use super::*; |
||||
|
||||
#[derive(Deserialize)] |
||||
pub struct AccessEnv { |
||||
pub admin_auth_token: String, |
||||
#[serde(default = "default_host")] |
||||
pub u_server: String, |
||||
} |
||||
|
||||
pub fn load() -> EnvResult<AccessEnv> { |
||||
dot(); |
||||
from_env() |
||||
} |
||||
} |
||||
|
||||
fn dot() { |
||||
let envs = [".env", ".env.private"]; |
||||
for envfile in &envs { |
||||
dotenv::from_filename(envfile).ok(); |
||||
} |
||||
} |
||||
|
||||
pub fn default_host() -> String { |
||||
"ortem.xyz".to_string() |
||||
} |
@ -0,0 +1,26 @@ |
||||
use deadpool_diesel::{Manager as DManager, Pool as DPool, Runtime}; |
||||
use diesel::pg::PgConnection; |
||||
use std::time::Duration; |
||||
|
||||
use crate::config::db::DBEnv; |
||||
|
||||
pub type PgAsyncPool = DPool<DManager<PgConnection>>; |
||||
|
||||
pub fn generate_postgres_url(config: &DBEnv) -> String { |
||||
format!( |
||||
"postgres://{}:{}@{}:{}/{}", |
||||
config.user, config.password, config.host, config.port, config.database |
||||
) |
||||
} |
||||
|
||||
pub fn async_pool(config: &DBEnv) -> PgAsyncPool { |
||||
let db_url = generate_postgres_url(config); |
||||
|
||||
let manager = DManager::new(db_url, Runtime::Tokio1); |
||||
DPool::builder(manager) |
||||
.max_size(8) |
||||
.wait_timeout(Some(Duration::from_secs(5 * 60))) |
||||
.runtime(Runtime::Tokio1) |
||||
.build() |
||||
.unwrap() |
||||
} |
@ -1,5 +1,73 @@ |
||||
mod chan; |
||||
mod variants; |
||||
|
||||
pub use chan::*; |
||||
pub use variants::*; |
||||
|
||||
use reqwest::Error as ReqError; |
||||
use serde::{Deserialize, Serialize}; |
||||
use thiserror::Error; |
||||
use uuid::Uuid; |
||||
|
||||
pub type UResult<T> = std::result::Result<T, UError>; |
||||
|
||||
#[derive(PartialEq, Error, Debug, Serialize, Deserialize, Clone)] |
||||
pub enum UError { |
||||
#[error("Runtime error: {0}")] |
||||
Runtime(String), |
||||
|
||||
#[error("Connection error: {0}. Body: {1}")] |
||||
NetError(String, String), |
||||
|
||||
#[error("Parse error")] |
||||
ParseError, |
||||
|
||||
#[error("Job error: {0}")] |
||||
JobError(String), |
||||
|
||||
#[error("Argument parsing failed: {0}")] |
||||
JobArgsError(String), |
||||
|
||||
#[error("Job is uncompleted yet")] |
||||
JobUncompleted, |
||||
|
||||
#[error("Job cannot be ran on this platform. Expected: {0}, got: {1}")] |
||||
InsuitablePlatform(String, String), |
||||
|
||||
#[error("Job {0} doesn't exist")] |
||||
NoJob(Uuid), |
||||
|
||||
#[error("FS error while processing {0}: {1}")] |
||||
FSError(String, String), |
||||
|
||||
#[error("Wrong auth token")] |
||||
WrongToken, |
||||
|
||||
#[error("Panicked: {0}")] |
||||
Panic(String), |
||||
|
||||
#[error("Panel error: {0}")] |
||||
PanelError(String), |
||||
|
||||
#[error("Deserialize from json error: {0}")] |
||||
DeserializeError(String), |
||||
} |
||||
|
||||
impl From<ReqError> for UError { |
||||
fn from(e: ReqError) -> Self { |
||||
UError::NetError(e.to_string(), String::new()) |
||||
} |
||||
} |
||||
|
||||
impl From<serde_json::Error> for UError { |
||||
fn from(e: serde_json::Error) -> Self { |
||||
UError::DeserializeError(e.to_string()) |
||||
} |
||||
} |
||||
|
||||
impl From<anyhow::Error> for UError { |
||||
fn from(e: anyhow::Error) -> Self { |
||||
match e.downcast::<UError>() { |
||||
Ok(err) => err, |
||||
Err(err) => UError::Runtime(err.to_string()), |
||||
} |
||||
} |
||||
} |
||||
|
@ -1,71 +0,0 @@ |
||||
#[cfg(not(target_arch = "wasm32"))] |
||||
use reqwest::Error as ReqError; |
||||
use serde::{Deserialize, Serialize}; |
||||
use thiserror::Error; |
||||
use uuid::Uuid; |
||||
|
||||
pub type UResult<T> = std::result::Result<T, UError>; |
||||
|
||||
#[derive(PartialEq, Eq, Error, Debug, Serialize, Deserialize, Clone)] |
||||
pub enum UError { |
||||
#[error("Runtime error: {0}")] |
||||
Runtime(String), |
||||
|
||||
#[error("Connection error: {0}. Body: {1}")] |
||||
NetError(String, String), |
||||
|
||||
#[error("Parse error")] |
||||
ParseError, |
||||
|
||||
#[error("Job error: {0}")] |
||||
JobError(String), |
||||
|
||||
#[error("Argument parsing failed: {0}")] |
||||
JobArgsError(String), |
||||
|
||||
#[error("Job is uncompleted yet")] |
||||
JobUncompleted, |
||||
|
||||
#[error("Job cannot be ran on this platform. Expected: {0}, got: {1}")] |
||||
InsuitablePlatform(String, String), |
||||
|
||||
#[error("Job {0} doesn't exist")] |
||||
NoJob(Uuid), |
||||
|
||||
#[error("FS error while processing {0}: {1}")] |
||||
FSError(String, String), |
||||
|
||||
#[error("Wrong auth token")] |
||||
WrongToken, |
||||
|
||||
#[error("Panicked: {0}")] |
||||
Panic(String), |
||||
|
||||
#[error("Panel error: {0}")] |
||||
PanelError(String), |
||||
|
||||
#[error("Deserialize from json error: {0}")] |
||||
DeserializeError(String), |
||||
} |
||||
|
||||
#[cfg(not(target_arch = "wasm32"))] |
||||
impl From<ReqError> for UError { |
||||
fn from(e: ReqError) -> Self { |
||||
UError::NetError(e.to_string(), String::new()) |
||||
} |
||||
} |
||||
|
||||
impl From<serde_json::Error> for UError { |
||||
fn from(e: serde_json::Error) -> Self { |
||||
UError::DeserializeError(e.to_string()) |
||||
} |
||||
} |
||||
|
||||
impl From<anyhow::Error> for UError { |
||||
fn from(e: anyhow::Error) -> Self { |
||||
match e.downcast::<UError>() { |
||||
Ok(err) => err, |
||||
Err(err) => UError::Runtime(err.to_string()), |
||||
} |
||||
} |
||||
} |
@ -1,29 +0,0 @@ |
||||
use envy::{from_env, Result as EnvResult}; |
||||
use serde::{de::DeserializeOwned, Deserialize}; |
||||
|
||||
#[derive(Deserialize)] |
||||
pub struct DefaultEnv { |
||||
#[serde(default = "default_host")] |
||||
pub u_server: String, |
||||
} |
||||
|
||||
pub fn load_env<E: DeserializeOwned>() -> EnvResult<E> { |
||||
dot(); |
||||
from_env() |
||||
} |
||||
|
||||
pub fn load_env_default() -> EnvResult<DefaultEnv> { |
||||
dot(); |
||||
from_env() |
||||
} |
||||
|
||||
fn dot() { |
||||
let envs = [".env", ".env.private"]; |
||||
for envfile in &envs { |
||||
dotenv::from_filename(envfile).ok(); |
||||
} |
||||
} |
||||
|
||||
pub fn default_host() -> String { |
||||
"ortem.xyz".to_string() |
||||
} |
@ -1,24 +0,0 @@ |
||||
use std::fmt; |
||||
|
||||
pub struct Hexlify<'b>(pub &'b [u8]); |
||||
|
||||
impl<'a> fmt::LowerHex for Hexlify<'a> { |
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
||||
for byte in self.0.iter() { |
||||
write!(f, "{:02x}", byte)?; |
||||
} |
||||
Ok(()) |
||||
} |
||||
} |
||||
|
||||
#[cfg(test)] |
||||
mod tests { |
||||
use super::*; |
||||
|
||||
#[test] |
||||
fn test_hexlify() { |
||||
let data = b"\x5a\x6b\x23\x4f\xa3\x7f\x9e"; |
||||
let result = "5a6b234fa37f9e"; |
||||
assert_eq!(format!("{:x}", Hexlify(data)), result); |
||||
} |
||||
} |
@ -1,5 +0,0 @@ |
||||
mod hexlify; |
||||
mod stripped; |
||||
|
||||
pub use hexlify::*; |
||||
pub use stripped::*; |
@ -1,80 +0,0 @@ |
||||
use std::fmt; |
||||
use std::iter::Iterator; |
||||
use std::slice::Iter as SliceIter; |
||||
use std::str::Chars; |
||||
|
||||
const MAX_DATA_LEN: usize = 2000; |
||||
|
||||
pub trait Strippable { |
||||
type Item: fmt::Display; |
||||
type TypeIter: Iterator<Item = Self::Item>; |
||||
|
||||
fn length(&self) -> usize; |
||||
fn iterator(&self) -> Self::TypeIter; |
||||
} |
||||
|
||||
impl<'a> Strippable for &'a str { |
||||
type Item = char; |
||||
type TypeIter = Chars<'a>; |
||||
|
||||
fn length(&self) -> usize { |
||||
self.len() |
||||
} |
||||
|
||||
fn iterator(&self) -> Self::TypeIter { |
||||
self.chars() |
||||
} |
||||
} |
||||
|
||||
impl<'a> Strippable for &'a Vec<u8> { |
||||
type Item = &'a u8; |
||||
type TypeIter = SliceIter<'a, u8>; |
||||
|
||||
fn length(&self) -> usize { |
||||
self.len() |
||||
} |
||||
|
||||
fn iterator(&self) -> Self::TypeIter { |
||||
self.iter() |
||||
} |
||||
} |
||||
|
||||
pub struct Stripped<'i, Inner: Strippable + 'i>(pub &'i Inner); |
||||
|
||||
impl<'i, Inner: Strippable + 'i> Stripped<'i, Inner> { |
||||
fn iter(&self) -> Inner::TypeIter { |
||||
self.0.iterator() |
||||
} |
||||
|
||||
fn placeholder(&self) -> &'static str { |
||||
if self.0.length() >= MAX_DATA_LEN { |
||||
" <...>" |
||||
} else { |
||||
"" |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl<'i, Inner: Strippable + 'i> fmt::Display for Stripped<'i, Inner> { |
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
||||
let placeholder = self.placeholder(); |
||||
for c in self.iter().take(MAX_DATA_LEN - placeholder.len()) { |
||||
write!(f, "{}", c)?; |
||||
} |
||||
write!(f, "{}", placeholder) |
||||
} |
||||
} |
||||
|
||||
#[cfg(test)] |
||||
mod tests { |
||||
use super::*; |
||||
use rstest::*; |
||||
|
||||
#[rstest] |
||||
#[case("abc", 3)] |
||||
#[case("abcde".repeat(50), MAX_DATA_LEN)] |
||||
fn test_strip(#[case] input: impl Into<String>, #[case] result_len: usize) { |
||||
let s = input.into(); |
||||
assert_eq!(Stripped(&s.as_str()).to_string().len(), result_len); |
||||
} |
||||
} |
@ -1,4 +1,3 @@ |
||||
# remove '.sample' to activate |
||||
ADMIN_AUTH_TOKEN= |
||||
DB_PASSWORD= |
||||
POSTGRES_PASSWORD=${DB_PASSWORD} |
||||
POSTGRES_PASSWORD= |
@ -0,0 +1,20 @@ |
||||
#!/bin/bash |
||||
set -e |
||||
|
||||
. ./.env |
||||
. ./.env.private |
||||
|
||||
export DATABASE_URL=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@127.0.0.1/${POSTGRES_DATABASE} |
||||
IMG_NAME=pg-schema |
||||
|
||||
docker run --rm -d \ |
||||
--env-file=$PWD/.env \ |
||||
--env-file=$PWD/.env.private \ |
||||
--name $IMG_NAME \ |
||||
-p 5432:5432 \ |
||||
postgres:14.5 |
||||
|
||||
timeout 10s grep -q 'system is ready to accept connections' <(docker logs --follow $IMG_NAME) |
||||
sleep 0.5 |
||||
diesel setup && diesel migration run |
||||
docker stop $IMG_NAME |
@ -1,5 +0,0 @@ |
||||
#!/bin/bash |
||||
docker build -t unki/u_db -f u_db.Dockerfile . |
||||
docker build -t unki/u_server -f u_server.Dockerfile . |
||||
docker-compose down |
||||
docker-compose up -d u_server |
@ -0,0 +1,6 @@ |
||||
set -m |
||||
|
||||
export DATABASE_URL=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@127.0.0.1/${POSTGRES_DATABASE} |
||||
/usr/local/bin/docker-entrypoint.sh postgres & |
||||
sleep 8 && /migrator up |
||||
fg %1 |
Loading…
Reference in new issue