rework
This commit is contained in:
parent
1f2e3e6eab
commit
5c3a9a1888
11 changed files with 950 additions and 253 deletions
|
@ -1,14 +1,29 @@
|
|||
use async_once::AsyncOnce;
|
||||
use dotenvy::dotenv;
|
||||
use lazy_static::lazy_static;
|
||||
use migrations::cmd_migrate;
|
||||
use once_cell::sync::Lazy;
|
||||
use redis::Commands;
|
||||
use sqlx::query;
|
||||
use std::env::args;
|
||||
use std::ops::Sub;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
mod migrations;
|
||||
|
||||
pub static REDIS_CLIENT: Lazy<redis::Client> = Lazy::new(|| {
|
||||
redis::Client::open(std::env::var("REDIS_ADDR").unwrap_or("redis://localhost:6379".to_string()))
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
lazy_static! {
|
||||
pub static ref PG: AsyncOnce<sqlx::PgPool> = AsyncOnce::new(async {
|
||||
let db_url = std::env::var("DATABASE_URL")
|
||||
.unwrap_or("postgres://saerrouser:saerro321@localhost:5432/data".to_string());
|
||||
sqlx::PgPool::connect(&db_url).await.unwrap()
|
||||
});
|
||||
}
|
||||
|
||||
fn cmd_prune() {
|
||||
println!("Pruning old data...");
|
||||
let mut con = REDIS_CLIENT.get_connection().unwrap();
|
||||
|
@ -46,14 +61,19 @@ fn cmd_help() {
|
|||
println!("Commands:");
|
||||
println!(" help - Show this help message");
|
||||
println!(" prune - Remove stale data from Redis");
|
||||
println!(" migrate - Reset and create database tables");
|
||||
}
|
||||
|
||||
fn main() {
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
dotenv().ok();
|
||||
|
||||
let command = args().nth(1).unwrap_or("help".to_string());
|
||||
|
||||
match command.as_str() {
|
||||
"help" => cmd_help(),
|
||||
"prune" => cmd_prune(),
|
||||
"migrate" => cmd_migrate().await,
|
||||
_ => {
|
||||
println!("Unknown command: {}", command);
|
||||
cmd_help();
|
||||
|
|
137
services/tasks/src/migrations.rs
Normal file
137
services/tasks/src/migrations.rs
Normal file
|
@ -0,0 +1,137 @@
|
|||
use crate::PG;
|
||||
use sqlx::query;
|
||||
|
||||
pub async fn cmd_migrate() {
|
||||
println!("Migrating database...");
|
||||
|
||||
tokio::join!(migrate_players(), migrate_classes(), migrate_vehicles(),);
|
||||
}
|
||||
|
||||
async fn migrate_players() {
|
||||
let pool = PG.get().await;
|
||||
|
||||
println!("-> Migrating players");
|
||||
|
||||
println!("PLAYERS => DROP TABLE IF EXISTS players");
|
||||
query("DROP TABLE IF EXISTS players")
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("PLAYERS => CREATE TABLE players");
|
||||
query(
|
||||
"CREATE TABLE players (
|
||||
time TIMESTAMPTZ NOT NULL,
|
||||
character_id TEXT NOT NULL,
|
||||
world_id INT NOT NULL,
|
||||
faction_id INT NOT NULL,
|
||||
zone_id INT NOT NULL);",
|
||||
)
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("PLAYERS => create_hypertable");
|
||||
query(
|
||||
"SELECT create_hypertable('players', 'time',
|
||||
chunk_time_interval => INTERVAL '1 minute', if_not_exists => TRUE);",
|
||||
)
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("PLAYERS => add_retention_policy");
|
||||
query("SELECT add_retention_policy('players', INTERVAL '15 minutes');")
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("PLAYERS => done!");
|
||||
}
|
||||
|
||||
async fn migrate_classes() {
|
||||
let pool = PG.get().await;
|
||||
|
||||
println!("-> Migrating classes");
|
||||
|
||||
println!("CLASSES => DROP TABLE IF EXISTS classes");
|
||||
query("DROP TABLE IF EXISTS classes")
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("CLASSES => CREATE TABLE classes");
|
||||
query(
|
||||
"CREATE TABLE classes (
|
||||
time TIMESTAMPTZ NOT NULL,
|
||||
character_id TEXT NOT NULL,
|
||||
world_id INT NOT NULL,
|
||||
faction_id INT NOT NULL,
|
||||
zone_id INT NOT NULL,
|
||||
class_id TEXT NOT NULL);",
|
||||
)
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("CLASSES => create_hypertable");
|
||||
query(
|
||||
"SELECT create_hypertable('classes', 'time',
|
||||
chunk_time_interval => INTERVAL '1 minute', if_not_exists => TRUE);",
|
||||
)
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("CLASSES => add_retention_policy");
|
||||
query("SELECT add_retention_policy('classes', INTERVAL '15 minutes');")
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("CLASSES => done!");
|
||||
}
|
||||
|
||||
async fn migrate_vehicles() {
|
||||
let pool = PG.get().await;
|
||||
|
||||
println!("-> Migrating vehicles");
|
||||
|
||||
println!("VEHICLES => DROP TABLE IF EXISTS vehicles");
|
||||
query("DROP TABLE IF EXISTS vehicles")
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("VEHICLES => CREATE TABLE vehicles");
|
||||
query(
|
||||
"CREATE TABLE vehicles (
|
||||
time TIMESTAMPTZ NOT NULL,
|
||||
character_id TEXT NOT NULL,
|
||||
world_id INT NOT NULL,
|
||||
faction_id INT NOT NULL,
|
||||
zone_id INT NOT NULL,
|
||||
vehicle_id TEXT NOT NULL);",
|
||||
)
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("VEHICLES => create_hypertable");
|
||||
query(
|
||||
"SELECT create_hypertable('vehicles', 'time',
|
||||
chunk_time_interval => INTERVAL '1 minute', if_not_exists => TRUE);",
|
||||
)
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("VEHICLES => add_retention_policy");
|
||||
|
||||
query("SELECT add_retention_policy('vehicles', INTERVAL '15 minutes');")
|
||||
.execute(pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("VEHICLES => done!");
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue