parent
aaffa13d82
commit
8d02f9d66a
7
server/.env.example
Normal file
7
server/.env.example
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
PORT=
|
||||||
|
BIND_ADDR=
|
||||||
|
|
||||||
|
DB_PASSWORD=
|
||||||
|
DB_USER=
|
||||||
|
DB_NAME=
|
||||||
|
DATABASE_URL=
|
1655
server/Cargo.lock
generated
Normal file
1655
server/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
16
server/Cargo.toml
Normal file
16
server/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[package]
|
||||||
|
name = "personal-tracker-server"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
license = "GPL-3.0-only"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
actix-web = "4.5.1"
|
||||||
|
chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
|
colored = "2.1.0"
|
||||||
|
diesel = { version = "2.1.6", features = ["postgres", "uuid", "chrono", "serde_json"] }
|
||||||
|
diesel_migrations = { version = "2.1.0", features = ["postgres"] }
|
||||||
|
dotenvy = "0.15.7"
|
||||||
|
serde = { version = "1.0.200", features = ["derive"] }
|
||||||
|
serde_json = "1.0.116"
|
||||||
|
uuid = { version = "1.8.0", features = ["v7", "serde"] }
|
5
server/README.md
Normal file
5
server/README.md
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Personal Tracker Server
|
||||||
|
|
||||||
|
The server!
|
||||||
|
|
||||||
|
## Developer Setup
|
14
server/compose-dev.yaml
Normal file
14
server/compose-dev.yaml
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
# Docker Compose for Development Machines
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
container_name: personal-tracker-dev-db
|
||||||
|
image: postgres:16
|
||||||
|
ports:
|
||||||
|
- 127.0.0.1:5432:5432
|
||||||
|
volumes:
|
||||||
|
- ./dev-db/postgres:/var/lib/postgresql/data
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: "${DB_USER}"
|
||||||
|
POSTGRES_PASSWORD: "${DB_PASSWORD}"
|
||||||
|
POSTGRES_DB: "${DB_NAME}"
|
9
server/diesel.toml
Normal file
9
server/diesel.toml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# For documentation on how to configure this file,
|
||||||
|
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||||
|
|
||||||
|
[print_schema]
|
||||||
|
file = "src/schema.rs"
|
||||||
|
custom_type_derives = ["diesel::query_builder::QueryId"]
|
||||||
|
|
||||||
|
[migrations_directory]
|
||||||
|
dir = "migrations"
|
0
server/migrations/.keep
Normal file
0
server/migrations/.keep
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
-- This file was automatically created by Diesel to setup helper functions
|
||||||
|
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||||
|
-- changes will be added to existing projects as new migrations.
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||||
|
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
36
server/migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
36
server/migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
-- This file was automatically created by Diesel to setup helper functions
|
||||||
|
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||||
|
-- changes will be added to existing projects as new migrations.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-- Sets up a trigger for the given table to automatically set a column called
|
||||||
|
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||||
|
-- in the modified columns)
|
||||||
|
--
|
||||||
|
-- # Example
|
||||||
|
--
|
||||||
|
-- ```sql
|
||||||
|
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||||
|
--
|
||||||
|
-- SELECT diesel_manage_updated_at('users');
|
||||||
|
-- ```
|
||||||
|
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||||
|
BEGIN
|
||||||
|
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||||
|
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||||
|
BEGIN
|
||||||
|
IF (
|
||||||
|
NEW IS DISTINCT FROM OLD AND
|
||||||
|
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||||
|
) THEN
|
||||||
|
NEW.updated_at := current_timestamp;
|
||||||
|
END IF;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
@ -0,0 +1,2 @@
|
|||||||
|
-- This file should undo anything in `up.sql`
|
||||||
|
DROP TABLE tasks;
|
8
server/migrations/2024-05-03-015925_create_tasks/up.sql
Normal file
8
server/migrations/2024-05-03-015925_create_tasks/up.sql
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
-- Your SQL goes here
|
||||||
|
CREATE TABLE IF NOT EXISTS tasks (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
complete BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
due_date TIMESTAMP NOT NULL
|
||||||
|
);
|
16
server/src/api/mod.rs
Normal file
16
server/src/api/mod.rs
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
// SPDX-License-Identifier: GPL-3.0-Only
|
||||||
|
// Copyright (C) 2024 Luke Harding
|
||||||
|
|
||||||
|
use actix_web::web;
|
||||||
|
|
||||||
|
pub mod v1;
|
||||||
|
|
||||||
|
pub struct AppState {
|
||||||
|
pub database_url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
let scope = web::scope("/api").configure(v1::configure);
|
||||||
|
|
||||||
|
cfg.service(scope);
|
||||||
|
}
|
101
server/src/api/v1.rs
Normal file
101
server/src/api/v1.rs
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
use actix_web::{delete, get, HttpResponse, patch, post, Responder, web};
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::api::AppState;
|
||||||
|
use crate::db;
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
let scope = web::scope("/v1")
|
||||||
|
.service(get_task)
|
||||||
|
.service(get_tasks)
|
||||||
|
.service(create_task)
|
||||||
|
.service(delete_task)
|
||||||
|
.service(update_task);
|
||||||
|
|
||||||
|
cfg.service(scope);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/get_tasks")]
|
||||||
|
pub async fn get_tasks(data: web::Data<AppState>) -> impl Responder {
|
||||||
|
let tasks = match db::get_tasks(&data.database_url).await {
|
||||||
|
Ok(tasks) => tasks,
|
||||||
|
Err(e) => return HttpResponse::InternalServerError().body(e.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
HttpResponse::Ok().json(tasks)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/get_task/{uuid}")]
|
||||||
|
pub async fn get_task(data: web::Data<AppState>, path: web::Path<Uuid>) -> impl Responder {
|
||||||
|
let uuid = path.into_inner();
|
||||||
|
|
||||||
|
let task = match db::get_task(&data.database_url, uuid).await {
|
||||||
|
Ok(task) => task,
|
||||||
|
Err(e) => return HttpResponse::InternalServerError().body(e.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
HttpResponse::Ok().json(task)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct JsonNewTask {
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub due_date: NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/create_task")]
|
||||||
|
pub async fn create_task(
|
||||||
|
data: web::Data<AppState>,
|
||||||
|
task: web::Json<JsonNewTask>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let task = task.into_inner();
|
||||||
|
|
||||||
|
let new_task = match db::create_task(&data.database_url, task).await {
|
||||||
|
Ok(task) => task,
|
||||||
|
Err(e) => return HttpResponse::InternalServerError().body(e.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
HttpResponse::Ok().json(new_task)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[delete("/delete_task/{uuid}")]
|
||||||
|
pub async fn delete_task(data: web::Data<AppState>, path: web::Path<Uuid>) -> impl Responder {
|
||||||
|
let uuid = path.into_inner();
|
||||||
|
|
||||||
|
let result = match db::delete_task(&data.database_url, uuid).await {
|
||||||
|
Ok(result) => result,
|
||||||
|
Err(e) => return HttpResponse::InternalServerError().body(e.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if result > 0 {
|
||||||
|
HttpResponse::Ok().body(uuid.to_string())
|
||||||
|
} else {
|
||||||
|
HttpResponse::NotFound().body("Unable to delete. Not found.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct JsonPatchTask {
|
||||||
|
pub title: Option<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub complete: Option<bool>,
|
||||||
|
pub due_date: Option<NaiveDateTime>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[patch("/update_task/{uuid}")]
|
||||||
|
pub async fn update_task(
|
||||||
|
data: web::Data<AppState>,
|
||||||
|
path: web::Path<Uuid>,
|
||||||
|
new_values: web::Json<JsonPatchTask>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let new_values = new_values.into_inner();
|
||||||
|
let uuid = path.into_inner();
|
||||||
|
|
||||||
|
match db::update_task(&data.database_url, uuid, new_values).await {
|
||||||
|
Ok(task) => HttpResponse::Ok().json(task),
|
||||||
|
Err(e) => HttpResponse::InternalServerError().body(e.to_string()),
|
||||||
|
}
|
||||||
|
}
|
114
server/src/db/mod.rs
Normal file
114
server/src/db/mod.rs
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
// SPDX-License-Identifier: GPL-3.0-Only
|
||||||
|
// Copyright (C) 2024 Luke Harding
|
||||||
|
|
||||||
|
use std::error;
|
||||||
|
|
||||||
|
use diesel::{
|
||||||
|
Connection, ExpressionMethods, PgConnection, QueryDsl, RunQueryDsl, SelectableHelper,
|
||||||
|
};
|
||||||
|
use diesel::pg::Pg;
|
||||||
|
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use models::Task;
|
||||||
|
|
||||||
|
use crate::api::v1::{JsonNewTask, JsonPatchTask};
|
||||||
|
use crate::db::models::NewTask;
|
||||||
|
use crate::schema;
|
||||||
|
|
||||||
|
pub mod models;
|
||||||
|
|
||||||
|
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("./migrations");
|
||||||
|
|
||||||
|
pub fn establish_connection(database_url: &str) -> Result<PgConnection, Box<dyn error::Error>> {
|
||||||
|
Ok(PgConnection::establish(database_url)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_migrations(
|
||||||
|
connection: &mut impl MigrationHarness<Pg>,
|
||||||
|
) -> Result<(), Box<dyn error::Error + Send + Sync + 'static>> {
|
||||||
|
connection.run_pending_migrations(MIGRATIONS)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_tasks(database_url: &str) -> Result<Vec<Task>, Box<dyn error::Error>> {
|
||||||
|
use schema::tasks::dsl::*;
|
||||||
|
|
||||||
|
let conn = &mut establish_connection(database_url)?;
|
||||||
|
|
||||||
|
Ok(tasks.select(Task::as_select()).load(conn)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_task(database_url: &str, uuid: Uuid) -> Result<Task, Box<dyn error::Error>> {
|
||||||
|
use schema::tasks::dsl::*;
|
||||||
|
|
||||||
|
let conn = &mut establish_connection(database_url)?;
|
||||||
|
|
||||||
|
Ok(tasks.find(uuid).select(Task::as_select()).first(conn)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_task(
|
||||||
|
database_url: &str,
|
||||||
|
task: JsonNewTask,
|
||||||
|
) -> Result<Task, Box<dyn error::Error>> {
|
||||||
|
let conn = &mut establish_connection(database_url)?;
|
||||||
|
|
||||||
|
let new_task = NewTask {
|
||||||
|
id: Uuid::now_v7(),
|
||||||
|
title: &task.title,
|
||||||
|
description: &task.description,
|
||||||
|
due_date: task.due_date,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(diesel::insert_into(schema::tasks::table)
|
||||||
|
.values(&new_task)
|
||||||
|
.returning(Task::as_returning())
|
||||||
|
.get_result(conn)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_task(database_url: &str, uuid: Uuid) -> Result<usize, Box<dyn error::Error>> {
|
||||||
|
use schema::tasks::dsl::*;
|
||||||
|
|
||||||
|
let conn = &mut establish_connection(database_url)?;
|
||||||
|
|
||||||
|
Ok(diesel::delete(tasks.filter(id.eq(uuid))).execute(conn)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_task(
|
||||||
|
database_url: &str,
|
||||||
|
uuid: Uuid,
|
||||||
|
new_values: JsonPatchTask,
|
||||||
|
) -> Result<Task, Box<dyn error::Error>> {
|
||||||
|
use schema::tasks::dsl::*;
|
||||||
|
|
||||||
|
let conn = &mut establish_connection(database_url)?;
|
||||||
|
|
||||||
|
let mut current_task = get_task(database_url, uuid).await?;
|
||||||
|
|
||||||
|
if let Some(new_title) = new_values.title {
|
||||||
|
current_task.title = new_title;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(new_description) = new_values.description {
|
||||||
|
current_task.description = new_description;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(new_complete) = new_values.complete {
|
||||||
|
current_task.complete = new_complete;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(new_due_date) = new_values.due_date {
|
||||||
|
current_task.due_date = new_due_date;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(diesel::update(tasks.find(uuid))
|
||||||
|
.set((
|
||||||
|
title.eq(current_task.title),
|
||||||
|
description.eq(current_task.description),
|
||||||
|
complete.eq(current_task.complete),
|
||||||
|
due_date.eq(current_task.due_date),
|
||||||
|
))
|
||||||
|
.returning(Task::as_returning())
|
||||||
|
.get_result(conn)?)
|
||||||
|
}
|
29
server/src/db/models.rs
Normal file
29
server/src/db/models.rs
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
// SPDX-License-Identifier: GPL-3.0-Only
|
||||||
|
// Copyright (C) 2024 Luke Harding
|
||||||
|
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use diesel::prelude::*;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::schema::tasks;
|
||||||
|
|
||||||
|
#[derive(Queryable, Selectable, Serialize, Deserialize)]
|
||||||
|
#[diesel(table_name = crate::schema::tasks)]
|
||||||
|
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||||
|
pub struct Task {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub complete: bool,
|
||||||
|
pub due_date: NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Insertable, Deserialize, Serialize)]
|
||||||
|
#[diesel(table_name = tasks)]
|
||||||
|
pub struct NewTask<'a> {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub title: &'a str,
|
||||||
|
pub description: &'a str,
|
||||||
|
pub due_date: NaiveDateTime,
|
||||||
|
}
|
84
server/src/main.rs
Normal file
84
server/src/main.rs
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
// SPDX-License-Identifier: GPL-3.0-Only
|
||||||
|
// Copyright (C) 2024 Luke Harding
|
||||||
|
|
||||||
|
use std::{env, io};
|
||||||
|
|
||||||
|
use actix_web::{App, HttpServer, web};
|
||||||
|
|
||||||
|
use crate::api::AppState;
|
||||||
|
|
||||||
|
mod api;
|
||||||
|
pub mod db;
|
||||||
|
pub mod schema;
|
||||||
|
pub mod util;
|
||||||
|
|
||||||
|
#[actix_web::main]
|
||||||
|
async fn main() -> io::Result<()> {
|
||||||
|
util::print_copyright_notice();
|
||||||
|
|
||||||
|
// Default Server Values
|
||||||
|
let mut port = 8000;
|
||||||
|
let mut bind_addr = String::from("127.0.0.1");
|
||||||
|
let mut database_url = String::new();
|
||||||
|
|
||||||
|
util::notice_println("Loading Environment Variables");
|
||||||
|
|
||||||
|
// Load .env
|
||||||
|
if let Err(e) = dotenvy::dotenv() {
|
||||||
|
util::warn_println(format!("Failed to load dotenv: {}", e))
|
||||||
|
};
|
||||||
|
|
||||||
|
// If env variable for port or bind_addr is present use that instead of default.
|
||||||
|
if let Ok(env_port) = env::var("PORT") {
|
||||||
|
if let Ok(env_port) = env_port.parse::<u16>() {
|
||||||
|
port = env_port;
|
||||||
|
} else if !env_port.is_empty() {
|
||||||
|
util::warn_println("Failed to parse PORT env variable; using default value.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(env_addr) = env::var("BIND_ADDR") {
|
||||||
|
if !env_addr.is_empty() {
|
||||||
|
bind_addr = env_addr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(env_database) = env::var("DATABASE_URL") {
|
||||||
|
if !env_database.is_empty() {
|
||||||
|
database_url = env_database;
|
||||||
|
} else {
|
||||||
|
util::err_println("No DATABASE_URL found. Aborting,");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
util::notice_println("Connecting to DB.");
|
||||||
|
let mut conn = match db::establish_connection(&database_url) {
|
||||||
|
Ok(conn) => conn,
|
||||||
|
Err(e) => {
|
||||||
|
util::err_println("Failed to connect to DB. Aborting.");
|
||||||
|
eprintln!("{}", e);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
util::notice_println("Running Migrations");
|
||||||
|
if let Err(e) = db::run_migrations(&mut conn) {
|
||||||
|
util::err_println("Failed to run migrations. Aborting.");
|
||||||
|
eprintln!("{}", e);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
util::notice_println(format!("Starting server on {}:{}", bind_addr, port));
|
||||||
|
|
||||||
|
HttpServer::new(move || {
|
||||||
|
App::new()
|
||||||
|
.app_data(web::Data::new(AppState {
|
||||||
|
database_url: database_url.clone(),
|
||||||
|
}))
|
||||||
|
.configure(api::configure)
|
||||||
|
})
|
||||||
|
.bind((bind_addr, port))?
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
11
server/src/schema.rs
Normal file
11
server/src/schema.rs
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
// @generated automatically by Diesel CLI.
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
tasks (id) {
|
||||||
|
id -> Uuid,
|
||||||
|
title -> Text,
|
||||||
|
description -> Text,
|
||||||
|
complete -> Bool,
|
||||||
|
due_date -> Timestamp,
|
||||||
|
}
|
||||||
|
}
|
20
server/src/util.rs
Normal file
20
server/src/util.rs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
// SPDX-License-Identifier: GPL-3.0-Only
|
||||||
|
// Copyright (C) 2024 Luke Harding
|
||||||
|
|
||||||
|
use colored::Colorize;
|
||||||
|
|
||||||
|
pub fn print_copyright_notice() {
|
||||||
|
println!("{}", "Personal Tracker Server Copyright (C) 2024 Luke Harding <luke@lukeh990.io>\nThis program comes with ABSOLUTELY NO WARRANTY\nThis is free software, and you are welcome to redistribute it under certain conditions\n".italic());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn notice_println(msg: impl Into<String>) {
|
||||||
|
println!("{}", msg.into().green().bold());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn warn_println(msg: impl Into<String>) {
|
||||||
|
eprintln!("{}", msg.into().yellow().bold());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn err_println(msg: impl Into<String>) {
|
||||||
|
eprintln!("{}", msg.into().red().bold());
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user