First commit of group-ironmen-master directory.

This commit is contained in:
2025-10-27 08:25:16 +08:00
commit a8467389ef
26390 changed files with 35396 additions and 0 deletions

View File

@@ -0,0 +1,11 @@
**/node_modules
**/.husky
**/.eslint*
**/prettier*
README.md
.gitignore
config.toml
Dockerfile
secret
target
.dockerignore

View File

@@ -0,0 +1,5 @@
config.toml
target
secret
node_modules
flamegraph*.svg

3154
group-ironmen-master/server/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,42 @@
[package]
name = "server"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
tokio-postgres = { version = "0.7.11", features = ["with-serde_json-1", "with-chrono-0_4"] }
actix-web = "4.11.0"
tokio = { version = "1.47.1", features = ["time"] }
serde = { version = "1.0.219", features = ["derive"] }
deadpool-postgres = { version = "0.14.1", features = ["serde"] }
config = "0.15.15"
derive_more = "2.0.1"
env_logger = { version = "0.11.8", default-features = false, features = ["humantime"] }
log = "0.4.28"
serde_json = "1.0.143"
futures = "0.3.31"
uuid = { version = "1.18.1", features = ["v4"] }
actix-service = "2.0.3"
chrono = { version = "0.4.42", features = ["serde"] }
actix-cors = "0.7.1"
blake2 = "0.10.6"
data-encoding = { version = "2.9.0", features = ["alloc"] }
lazy_static = "1.5.0"
regex = "1.11.2"
arc-swap = "1.7.1"
reqwest = { version = "0.12.23", features = ["json"] }
mimalloc = "0.1.48"
[profile.dev]
opt-level = 0
# Enable high optimizations for dependencies but not for our code:
[profile.dev.package."*"]
opt-level = 3
[profile.release]
lto = true
codegen-units = 1
panic = "abort"

View File

@@ -0,0 +1,22 @@
###############################################
# Backend Image
###############################################
FROM rust:1.73 as builder
WORKDIR /app
COPY src ./src
COPY Cargo.toml .
COPY Cargo.lock .
COPY collection_log_info.json .
RUN cargo build --release
FROM debian:bookworm-slim
WORKDIR /app
RUN apt-get update
RUN apt-get install -y openssl ca-certificates
RUN rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/server ./
COPY --from=builder /app/collection_log_info.json ./
COPY ./docker-entrypoint.sh ./
ENTRYPOINT ["/app/docker-entrypoint.sh"]
CMD ["/app/server"]

View File

@@ -0,0 +1 @@
RUSTFLAGS="-C target-cpu=native" cargo build --release

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
#!/bin/bash
CONFIG_FILE=config.toml
echo "[entrypoint] Creating $CONFIG_FILE"
if [ -e $CONFIG_FILE ]
then
echo "[entrypoint] $CONFIG_FILE already exists, deleting and starting fresh"
rm $CONFIG_FILE
fi
echo "[pg]" >> $CONFIG_FILE
echo "user = \"$PG_USER\"" >> $CONFIG_FILE
echo "password = \"$PG_PASSWORD\"" >> $CONFIG_FILE
echo "host = \"$PG_HOST\"" >> $CONFIG_FILE
echo "port = $PG_PORT" >> $CONFIG_FILE
echo "dbname = \"$PG_DB\"" >> $CONFIG_FILE
echo "pool.max_size = 16" >> $CONFIG_FILE
SECRET_FILE=secret
echo "[entrypoint] Creating $SECRET_FILE"
if [ -e $SECRET_FILE ]
then
echo "[entrypoint] $SECRET_FILE already exists, deleting and starting fresh"
rm $SECRET_FILE
fi
echo "$BACKEND_SECRET" >> $SECRET_FILE
echo "[entrypoint] Running run"
exec "$@"

View File

@@ -0,0 +1,143 @@
use crate::db;
use actix_web::{
body::BoxBody,
dev::{Service, ServiceRequest, ServiceResponse, Transform},
web, Error, FromRequest, HttpMessage, HttpRequest,
};
use deadpool_postgres::Pool;
use futures::{
future::{ready, LocalBoxFuture, Ready},
FutureExt,
};
use std::rc::Rc;
pub struct AuthenticateMiddlewareFactory;
impl AuthenticateMiddlewareFactory {
pub fn new() -> Self {
AuthenticateMiddlewareFactory {}
}
}
impl<S, B> Transform<S, ServiceRequest> for AuthenticateMiddlewareFactory
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
B: actix_web::body::MessageBody + 'static,
{
type Response = ServiceResponse<BoxBody>;
type Error = Error;
type InitError = ();
type Transform = AuthenticateMiddleware<S>;
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ready(Ok(AuthenticateMiddleware {
service: Rc::new(service),
}))
}
}
pub struct AuthenticationResult {
pub group_id: i64,
}
type AuthenticationInfo = Rc<AuthenticationResult>;
pub struct Authenticated(AuthenticationInfo);
impl std::ops::Deref for Authenticated {
type Target = AuthenticationInfo;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl FromRequest for Authenticated {
type Error = Error;
type Future = Ready<Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, _payload: &mut actix_web::dev::Payload) -> Self::Future {
let value = req.extensions().get::<AuthenticationInfo>().cloned();
let result = match value {
Some(v) => Ok(Authenticated(v)),
None => Err(actix_web::error::ErrorUnauthorized("")),
};
ready(result)
}
}
pub struct AuthenticateMiddleware<S> {
service: Rc<S>,
}
impl<S, B> Service<ServiceRequest> for AuthenticateMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
B: actix_web::body::MessageBody + 'static,
{
type Response = ServiceResponse<BoxBody>;
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
actix_service::forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
let srv = Rc::clone(&self.service);
async move {
let group_name = match req.match_info().get("group_name") {
Some(group_name) => group_name,
None => {
return Ok(req.error_response(actix_web::error::ErrorBadRequest(
"Missing group name from request",
)));
}
};
if group_name != "_" {
let auth_header = match req.headers().get("Authorization") {
Some(auth_header) => auth_header,
None => {
return Ok(req.error_response(actix_web::error::ErrorBadRequest(
"Authorization header missing from request",
)));
}
};
let token = match auth_header.to_str() {
Ok(token) => token,
Err(_) => {
return Ok(req.error_response(actix_web::error::ErrorBadRequest(
"Unable to parse Authorization header",
)));
}
};
let db_pool = match req.app_data::<web::Data<Pool>>() {
Some(db_pool) => db_pool,
None => {
return Ok(
req.error_response(actix_web::error::ErrorInternalServerError(""))
);
}
};
let client = match db_pool.get().await {
Ok(client) => client,
Err(_) => {
// log::error!("{}", err);
return Ok(
req.error_response(actix_web::error::ErrorInternalServerError(""))
);
}
};
let group_id = match db::get_group(&client, group_name, token).await {
Ok(group) => group,
Err(_) => {
// log::error!("{}", err);
return Ok(req.error_response(actix_web::error::ErrorUnauthorized("")));
}
};
let authentication_result = AuthenticationResult { group_id };
req.extensions_mut()
.insert::<AuthenticationInfo>(Rc::new(authentication_result));
}
let res = srv.call(req).await?;
Ok(res.map_into_boxed_body())
}
.boxed_local()
}
}

View File

@@ -0,0 +1,197 @@
use crate::auth_middleware::Authenticated;
use crate::collection_log::{CollectionLog, CollectionLogInfo};
use crate::db;
use crate::error::ApiError;
use crate::models::{
AmIInGroupRequest, GroupMember, GroupSkillData, RenameGroupMember, SHARED_MEMBER,
};
use crate::validators::{valid_name, validate_collection_log, validate_member_prop_length};
use actix_web::{delete, get, post, put, web, Error, HttpResponse};
use chrono::{DateTime, Utc};
use deadpool_postgres::{Client, Pool};
use serde::Deserialize;
use std::collections::HashMap;
#[post("/add-group-member")]
pub async fn add_group_member(
auth: Authenticated,
group_member: web::Json<GroupMember>,
db_pool: web::Data<Pool>,
) -> Result<HttpResponse, Error> {
if group_member.name.eq(SHARED_MEMBER) {
return Ok(
HttpResponse::BadRequest().body(format!("Member name {} not allowed", SHARED_MEMBER))
);
}
if !valid_name(&group_member.name) {
return Ok(HttpResponse::BadRequest()
.body(format!("Member name {} is not valid", group_member.name)));
}
let client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
db::add_group_member(&client, auth.group_id, &group_member.name).await?;
Ok(HttpResponse::Created().finish())
}
#[delete("/delete-group-member")]
pub async fn delete_group_member(
auth: Authenticated,
group_member: web::Json<GroupMember>,
db_pool: web::Data<Pool>,
) -> Result<HttpResponse, Error> {
if group_member.name.eq(SHARED_MEMBER) {
return Ok(
HttpResponse::BadRequest().body(format!("Member name {} not allowed", SHARED_MEMBER))
);
}
let mut client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
db::delete_group_member(&mut client, auth.group_id, &group_member.name).await?;
Ok(HttpResponse::Ok().finish())
}
#[put("/rename-group-member")]
pub async fn rename_group_member(
auth: Authenticated,
rename_member: web::Json<RenameGroupMember>,
db_pool: web::Data<Pool>,
) -> Result<HttpResponse, Error> {
if rename_member.original_name.eq(SHARED_MEMBER) || rename_member.new_name.eq(SHARED_MEMBER) {
return Ok(
HttpResponse::BadRequest().body(format!("Member name {} not allowed", SHARED_MEMBER))
);
}
if !valid_name(&rename_member.new_name) {
return Ok(HttpResponse::BadRequest().body(format!(
"Member name {} is not valid",
rename_member.new_name
)));
}
let client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
db::rename_group_member(
&client,
auth.group_id,
&rename_member.original_name,
&rename_member.new_name,
)
.await?;
Ok(HttpResponse::Ok().finish())
}
#[post("/update-group-member")]
pub async fn update_group_member(
auth: Authenticated,
group_member: web::Json<GroupMember>,
db_pool: web::Data<Pool>,
collection_log_info: web::Data<CollectionLogInfo>,
) -> Result<HttpResponse, Error> {
let client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
let in_group: bool = db::is_member_in_group(&client, auth.group_id, &group_member.name).await?;
if !in_group {
return Ok(HttpResponse::Unauthorized().body("Player is not a member of this group"));
}
let mut group_member_inner: GroupMember = group_member.into_inner();
validate_member_prop_length("stats", &group_member_inner.stats, 7, 7)?;
validate_member_prop_length("coordinates", &group_member_inner.coordinates, 3, 3)?;
validate_member_prop_length("skills", &group_member_inner.skills, 23, 24)?;
validate_member_prop_length("quests", &group_member_inner.quests, 0, 220)?;
validate_member_prop_length("inventory", &group_member_inner.inventory, 56, 56)?;
validate_member_prop_length("equipment", &group_member_inner.equipment, 28, 28)?;
validate_member_prop_length("bank", &group_member_inner.bank, 0, 3000)?;
validate_member_prop_length("shared_bank", &group_member_inner.shared_bank, 0, 1000)?;
validate_member_prop_length("rune_pouch", &group_member_inner.rune_pouch, 6, 8)?;
validate_member_prop_length("seed_vault", &group_member_inner.seed_vault, 0, 500)?;
validate_member_prop_length("deposited", &group_member_inner.deposited, 0, 200)?;
validate_member_prop_length("diary_vars", &group_member_inner.diary_vars, 0, 62)?;
validate_collection_log(&collection_log_info, &mut group_member_inner.collection_log)?;
db::update_group_member(
&client,
auth.group_id,
group_member_inner,
collection_log_info,
)
.await?;
Ok(HttpResponse::Ok().finish())
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct GetGroupDataQuery {
pub from_time: DateTime<Utc>,
}
#[get("/get-group-data")]
pub async fn get_group_data(
auth: Authenticated,
db_pool: web::Data<Pool>,
query: web::Query<GetGroupDataQuery>,
) -> Result<web::Json<Vec<GroupMember>>, Error> {
let from_time = query.from_time;
let client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
let group_members = db::get_group_data(&client, auth.group_id, &from_time).await?;
Ok(web::Json(group_members))
}
#[derive(Deserialize)]
pub enum SkillDataPeriod {
Day,
Week,
Month,
Year,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct GetSkillDataQuery {
pub period: SkillDataPeriod,
}
#[get("/get-skill-data")]
pub async fn get_skill_data(
auth: Authenticated,
db_pool: web::Data<Pool>,
query: web::Query<GetSkillDataQuery>,
) -> Result<web::Json<GroupSkillData>, Error> {
let client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
let aggregate_period = match query.period {
SkillDataPeriod::Day => db::AggregatePeriod::Day,
SkillDataPeriod::Week => db::AggregatePeriod::Month,
SkillDataPeriod::Month => db::AggregatePeriod::Month,
SkillDataPeriod::Year => db::AggregatePeriod::Year,
};
let group_skill_data =
db::get_skills_for_period(&client, auth.group_id, aggregate_period).await?;
Ok(web::Json(group_skill_data))
}
#[get("/collection-log")]
pub async fn get_collection_log(
auth: Authenticated,
db_pool: web::Data<Pool>,
) -> Result<web::Json<HashMap<String, Vec<CollectionLog>>>, Error> {
let client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
let collection_logs = db::get_collection_log_for_group(&client, auth.group_id).await?;
Ok(web::Json(collection_logs))
}
#[get("/am-i-logged-in")]
pub async fn am_i_logged_in(_auth: Authenticated) -> Result<HttpResponse, Error> {
Ok(HttpResponse::Ok().finish())
}
#[get("/am-i-in-group")]
pub async fn am_i_in_group(
auth: Authenticated,
db_pool: web::Data<Pool>,
q: web::Query<AmIInGroupRequest>,
) -> Result<HttpResponse, Error> {
let client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
let in_group: bool = db::is_member_in_group(&client, auth.group_id, &q.member_name).await?;
if !in_group {
return Ok(HttpResponse::Unauthorized().body("Player is not a member of this group"));
}
Ok(HttpResponse::Ok().finish())
}

View File

@@ -0,0 +1,174 @@
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
#[derive(Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct CollectionLog {
pub tab: i16,
pub page_name: String,
pub completion_counts: Vec<i32>,
pub items: Vec<i32>,
#[serde(skip_deserializing)]
pub new_items: Vec<i32>,
}
#[derive(Serialize, Clone)]
pub struct CollectionLogInfo {
#[serde(skip_serializing)]
page_name_to_id_lookup: HashMap<String, i16>,
#[serde(skip_serializing)]
page_id_item_set_lookup: HashMap<i16, HashSet<i32>>,
#[serde(skip_serializing)]
item_name_to_id_lookup: HashMap<String, i32>,
#[serde(skip_serializing)]
item_id_to_page_id_lookup: HashMap<i32, HashSet<i16>>,
}
#[derive(Deserialize)]
pub struct CollectionLogItemInfo {
pub id: i32,
pub name: String,
}
#[derive(Deserialize)]
pub struct CollectionLogPageInfo {
pub name: String,
pub items: Vec<CollectionLogItemInfo>,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
pub struct CollectionLogTabInfo {
pub tabId: i16,
pub pages: Vec<CollectionLogPageInfo>,
}
impl CollectionLogInfo {
pub fn new(pages_db: Vec<(i16, i16, String)>) -> Self {
let mut page_name_to_id_lookup = HashMap::new();
for page in &pages_db {
page_name_to_id_lookup.insert(page.2.clone(), page.1);
}
let mut item_id_to_page_id_lookup = HashMap::new();
let mut item_name_to_id_lookup = HashMap::new();
let mut page_id_item_set_lookup = HashMap::new();
for tab in COLLECTION_LOG_INFO.iter() {
for page in tab.pages.iter() {
let page_id = page_name_to_id_lookup.get(&page.name).unwrap();
if !page_id_item_set_lookup.contains_key(page_id) {
page_id_item_set_lookup.insert(*page_id, HashSet::new());
}
for item in page.items.iter() {
item_name_to_id_lookup.insert(item.name.clone(), item.id);
match page_id_item_set_lookup.get_mut(&page_id) {
Some(x) => x.insert(item.id),
None => true,
};
if !item_id_to_page_id_lookup.contains_key(&item.id) {
item_id_to_page_id_lookup.insert(item.id, HashSet::new());
}
match item_id_to_page_id_lookup.get_mut(&item.id) {
Some(x) => x.insert(*page_id),
None => true,
};
}
}
}
Self {
page_name_to_id_lookup,
page_id_item_set_lookup,
item_name_to_id_lookup,
item_id_to_page_id_lookup,
}
}
pub fn page_name_to_id(&self, page_name: &String) -> Option<&i16> {
match self.page_name_to_id_lookup.get(page_name) {
Some(x) => Some(x),
None => match COLLECTION_PAGE_REMAP.get(page_name) {
Some(x) => self.page_name_to_id_lookup.get(x),
None => None,
},
}
}
pub fn has_item(&self, page_id: i16, item_id: i32) -> bool {
match self.page_id_item_set_lookup.get(&page_id) {
None => false,
Some(x) => x.contains(&item_id),
}
}
pub fn remap_item_id(&self, item_id: i32) -> i32 {
match COLLECTION_ITEM_ID_REMAP.get(&item_id) {
Some(x) => *x,
None => item_id,
}
}
pub fn item_name_to_id(&self, item_name: &String) -> Option<&i32> {
match self.item_name_to_id_lookup.get(item_name) {
Some(x) => Some(x),
None => match COLLECTION_ITEM_REMAP.get(item_name) {
Some(x) => self.item_name_to_id_lookup.get(x),
None => None,
},
}
}
pub fn page_ids_for_item(&self, item_id: i32) -> Option<&HashSet<i16>> {
self.item_id_to_page_id_lookup.get(&item_id)
}
pub fn number_of_items_in_page(&self, page_id: i16) -> usize {
match self.page_id_item_set_lookup.get(&page_id) {
None => 0,
Some(x) => x.len(),
}
}
}
lazy_static! {
// Seems runelite plugins can rename the value we pass for the page. This remaps
// known plugin boss renaming. Is there a better way to handle this?
pub static ref COLLECTION_PAGE_REMAP: HashMap<String, String> = HashMap::from([
("The Grumbler".to_string(), "Phantom Muspah".to_string())
]);
pub static ref COLLECTION_ITEM_REMAP: HashMap<String, String> = HashMap::from([
("Pharaoh's sceptre".to_string(), "Pharaoh's sceptre (uncharged)".to_string())
]);
pub static ref COLLECTION_ITEM_ID_REMAP: HashMap<i32, i32> = HashMap::from([
(25627, 12019), // coal bag
(25628, 12020), // gem bag
(25629, 24882), // plank sack
(25617, 10859), // tea flask
(25618, 10877), // plain satchel
(25619, 10878), // green satchel
(25620, 10879), // red satchel
(25621, 10880), // black stachel
(25622, 10881), // gold satchel
(25623, 10882), // rune satchel
(25624, 13273), // unsired pet
(25630, 12854), // Flamtaer bag
(29992, 29990), // Alchemist's amulet
(30805, 30803), // Dossier
]);
pub static ref COLLECTION_LOG_DATA: String = {
let path = concat!(env!("CARGO_MANIFEST_DIR"), "/collection_log_info.json");
std::fs::read_to_string(path).expect(&format!("Could not read collection log info file at {}", path))
};
pub static ref COLLECTION_LOG_INFO: Vec<CollectionLogTabInfo> = {
serde_json::from_str(&COLLECTION_LOG_DATA).unwrap()
};
}

View File

@@ -0,0 +1,57 @@
use config::{ConfigError, File};
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Clone)]
pub enum LogLevel {
Info,
Warn,
Error,
}
impl LogLevel {
pub fn to_string(&self) -> &'static str {
match self {
LogLevel::Info => "info",
LogLevel::Warn => "warn",
LogLevel::Error => "error",
}
}
}
#[derive(Deserialize, Clone)]
pub struct LoggerConfig {
pub level: LogLevel,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct CaptchaConfig {
pub enabled: bool,
pub sitekey: String,
#[serde(skip_serializing)]
pub secret: String,
}
#[derive(Deserialize, Clone)]
pub struct Config {
pub pg: deadpool_postgres::Config,
#[serde(default = "default_logger_config")]
pub logger: LoggerConfig,
#[serde(default = "default_captcha_config")]
pub hcaptcha: CaptchaConfig,
}
fn default_logger_config() -> LoggerConfig {
LoggerConfig {
level: LogLevel::Info,
}
}
fn default_captcha_config() -> CaptchaConfig {
CaptchaConfig {
enabled: false,
sitekey: "".to_string(),
secret: "".to_string(),
}
}
impl Config {
pub fn from_env() -> Result<Self, ConfigError> {
let cfg = ::config::Config::builder()
.add_source(File::with_name("config"))
.build()?;
cfg.try_deserialize()
}
}

View File

@@ -0,0 +1,27 @@
use blake2::{Blake2s256, Digest};
use data_encoding::HEXLOWER;
use lazy_static::lazy_static;
use std::fs;
lazy_static! {
static ref SECRET: String = {
let path = concat!(env!("CARGO_MANIFEST_DIR"), "/secret");
fs::read_to_string(path).expect(&format!("Could not find secret file at {}", path))
};
}
pub fn hash(value: &str, salt: &str, iterations: u32) -> std::vec::Vec<u8> {
let mut hasher = Blake2s256::new();
let v = value.as_bytes();
for _ in 0..iterations {
hasher.update(v);
}
hasher.update(salt);
hasher.update(&SECRET.as_str());
hasher.finalize().to_vec()
}
pub fn token_hash(token: &str, salt: &str) -> String {
let hashed_token = hash(token, salt, 2);
HEXLOWER.encode(&hashed_token)
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,85 @@
use actix_web::{HttpResponse, ResponseError};
use deadpool_postgres::PoolError;
use derive_more::{Display, From};
#[derive(Debug, Display, From)]
pub enum ApiError {
PoolError(PoolError),
PGError(tokio_postgres::error::Error),
SerdeJsonError(serde_json::Error),
#[from(ignore)]
GroupCreationError(tokio_postgres::error::Error),
#[from(ignore)]
UpdateGroupMemberError(tokio_postgres::error::Error),
#[from(ignore)]
GetGroupError(tokio_postgres::error::Error),
#[from(ignore)]
AddMemberError(tokio_postgres::error::Error),
#[from(ignore)]
GetGroupDataError(tokio_postgres::error::Error),
#[from(ignore)]
DeleteGroupMemberError(tokio_postgres::error::Error),
#[from(ignore)]
RenameGroupMemberError(tokio_postgres::error::Error),
#[from(ignore)]
IsMemberInGroupError(tokio_postgres::error::Error),
#[from(ignore)]
GetSkillsDataError(tokio_postgres::error::Error),
#[from(ignore)]
GetCollectionLogError(tokio_postgres::error::Error),
GroupFullError,
ReqwestError(reqwest::Error),
GroupMemberValidationError(String),
}
impl std::error::Error for ApiError {}
fn handle_pg_error(err: &tokio_postgres::error::Error, name: &str) -> HttpResponse {
match err.as_db_error() {
Some(db_error) => log::error!("{}: {}", name, db_error.message()),
None => log::error!("{}: {}", name, err),
};
HttpResponse::InternalServerError().finish()
}
impl ResponseError for ApiError {
fn error_response(&self) -> HttpResponse {
match *self {
ApiError::PoolError(ref err) => {
log::error!("PoolError: {}", err);
HttpResponse::InternalServerError().body(format!("PoolError: {}", err))
}
ApiError::GroupCreationError(ref err) => handle_pg_error(err, "GroupCreationError"),
ApiError::UpdateGroupMemberError(ref err) => {
handle_pg_error(err, "UpdateGroupMemberError")
}
ApiError::PGError(ref err) => handle_pg_error(err, "PGError"),
ApiError::GetGroupError(ref err) => handle_pg_error(err, "GetGroupError"),
ApiError::AddMemberError(ref err) => handle_pg_error(err, "AddMemberError"),
ApiError::GetGroupDataError(ref err) => handle_pg_error(err, "GetGroupDataError"),
ApiError::IsMemberInGroupError(ref err) => handle_pg_error(err, "IsMemberInGroupError"),
ApiError::GetSkillsDataError(ref err) => handle_pg_error(err, "GetSkillsDataError"),
ApiError::GetCollectionLogError(ref err) => {
handle_pg_error(err, "GetCollectionLogError")
}
ApiError::DeleteGroupMemberError(ref err) => {
handle_pg_error(err, "DeleteGroupMemberError")
}
ApiError::RenameGroupMemberError(ref err) => {
handle_pg_error(err, "RenameGroupMemberError")
}
ApiError::SerdeJsonError(ref err) => {
log::error!("SerdeJsonError: {}", err);
HttpResponse::InternalServerError().body(format!("SerdeJsonError: {}", err))
}
ApiError::GroupFullError => HttpResponse::BadRequest()
.body("Group has already reached the maximum amount of players"),
ApiError::ReqwestError(ref err) => {
log::error!("ReqwestError: {}", err);
HttpResponse::InternalServerError().body(format!("ReqwestError: {}", err))
}
ApiError::GroupMemberValidationError(ref reason) => {
log::error!("Validation error: {}", reason);
HttpResponse::BadRequest().body(reason.clone())
}
}
}
}

View File

@@ -0,0 +1,86 @@
mod auth_middleware;
mod authed;
mod collection_log;
mod config;
mod crypto;
mod db;
mod error;
mod models;
mod unauthed;
mod validators;
use crate::auth_middleware::AuthenticateMiddlewareFactory;
use crate::collection_log::CollectionLogInfo;
use crate::config::Config;
use actix_cors::Cors;
use actix_web::{http::header, middleware, web, App, HttpServer};
use tokio_postgres::NoTls;
use mimalloc::MiMalloc;
#[global_allocator]
static GLOBAL: MiMalloc = MiMalloc;
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let config = Config::from_env().unwrap();
let pool = config.pg.create_pool(None, NoTls).unwrap();
env_logger::init_from_env(
env_logger::Env::new().default_filter_or(config.logger.level.to_string()),
);
let mut client = pool.get().await.unwrap();
db::update_schema(&mut client).await.unwrap();
let collection_log_info: CollectionLogInfo =
db::get_collection_log_info(&client).await.unwrap();
unauthed::start_ge_updater();
unauthed::start_skills_aggregator(pool.clone());
HttpServer::new(move || {
let unauthed_scope = web::scope("/api")
.service(unauthed::create_group)
.service(unauthed::get_ge_prices)
.service(unauthed::captcha_enabled)
.service(unauthed::collection_log_info);
let authed_scope = web::scope("/api/group/{group_name}")
.wrap(AuthenticateMiddlewareFactory::new())
.service(authed::update_group_member)
.service(authed::get_group_data)
.service(authed::add_group_member)
.service(authed::delete_group_member)
.service(authed::rename_group_member)
.service(authed::am_i_logged_in)
.service(authed::am_i_in_group)
.service(authed::get_skill_data)
.service(authed::get_collection_log);
let json_config = web::JsonConfig::default().limit(100000);
let cors = Cors::default()
.allow_any_origin()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "DELETE", "PUT", "OPTIONS"])
.allowed_headers(vec![
header::AUTHORIZATION,
header::ACCEPT,
header::CONTENT_TYPE,
header::CONTENT_LENGTH,
])
.max_age(3600);
App::new()
.wrap(middleware::Logger::new(
"\"%r\" %s %b \"%{User-Agent}i\" %D",
))
.wrap(middleware::Compress::default())
.wrap(cors)
.app_data(web::PayloadConfig::new(100000))
.app_data(json_config)
.app_data(web::Data::new(pool.clone()))
.app_data(web::Data::new(config.clone()))
.app_data(web::Data::new(collection_log_info.clone()))
.service(authed_scope)
.service(unauthed_scope)
})
.bind(("0.0.0.0", 8080))?
.run()
.await
}

View File

@@ -0,0 +1,118 @@
use crate::collection_log::CollectionLog;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
pub const SHARED_MEMBER: &str = "@SHARED";
#[derive(Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Coordinates {
x: i32,
y: i32,
plane: i32,
}
#[derive(Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Interacting {
name: String,
scale: i32,
ratio: i32,
location: Coordinates,
#[serde(default = "default_last_updated")]
last_updated: DateTime<Utc>,
}
fn default_last_updated() -> DateTime<Utc> {
Utc::now()
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RenameGroupMember {
pub original_name: String,
pub new_name: String,
}
#[derive(Deserialize, Serialize)]
pub struct GroupMember {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub stats: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub coordinates: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub skills: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub quests: Option<Vec<u8>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub inventory: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub equipment: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub bank: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub shared_bank: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub rune_pouch: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub interacting: Option<Interacting>,
#[serde(skip_serializing_if = "Option::is_none")]
pub seed_vault: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub deposited: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub diary_vars: Option<Vec<i32>>,
#[serde(skip_serializing)]
pub collection_log: Option<Vec<CollectionLog>>,
#[serde(skip_serializing)]
pub collection_log_new: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub last_updated: Option<DateTime<Utc>>,
}
#[derive(Serialize)]
pub struct AggregateSkillData {
pub time: DateTime<Utc>,
pub data: Vec<i32>,
}
#[derive(Serialize)]
pub struct MemberSkillData {
pub name: String,
pub skill_data: Vec<AggregateSkillData>,
}
pub type GroupSkillData = Vec<MemberSkillData>;
#[derive(Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct CreateGroup {
pub name: String,
pub member_names: Vec<String>,
#[serde(default, skip_serializing)]
pub captcha_response: String,
#[serde(default = "default_token")]
#[serde(skip_deserializing)]
pub token: String,
}
fn default_token() -> String {
uuid::Uuid::new_v4().hyphenated().to_string()
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct AmIInGroupRequest {
pub member_name: String,
}
#[derive(Deserialize)]
pub struct WikiGEPrice {
pub high: Option<i64>,
pub low: Option<i64>,
}
#[derive(Deserialize)]
pub struct WikiGEPrices {
pub data: std::collections::HashMap<i32, WikiGEPrice>,
}
pub type GEPrices = std::collections::HashMap<i32, i64>;
#[derive(Deserialize)]
pub struct CaptchaVerifyResponse {
pub success: bool,
// NOTE: unused
// #[serde(rename = "error-codes", default)]
// pub error_codes: std::vec::Vec<String>,
}

View File

@@ -0,0 +1,8 @@
CREATE SCHEMA IF NOT EXISTS groupironman;
CREATE TABLE IF NOT EXISTS groupironman.groups(
group_id BIGSERIAL UNIQUE,
group_name TEXT NOT NULL,
group_token_hash CHAR(64) NOT NULL,
PRIMARY KEY (group_name, group_token_hash)
);

View File

@@ -0,0 +1,197 @@
use crate::collection_log::COLLECTION_LOG_DATA;
use crate::config::Config;
use crate::db;
use crate::error::ApiError;
use crate::models::{CaptchaVerifyResponse, CreateGroup, GEPrices, WikiGEPrices};
use crate::validators::valid_name;
use actix_web::{get, http::header::ContentType, post, web, Error, HttpResponse};
use arc_swap::{ArcSwap, ArcSwapAny};
use deadpool_postgres::{Client, Pool};
use lazy_static::lazy_static;
use std::sync::Arc;
use std::time::Duration;
use tokio::{task, time};
lazy_static! {
static ref GE_PRICES: ArcSwapAny<Arc<String>> = ArcSwap::from(Arc::new(String::default()));
static ref HTTP_CLIENT: reqwest::Client = reqwest::Client::new();
}
pub async fn fetch_latest_prices() -> Result<WikiGEPrices, ApiError> {
let res = HTTP_CLIENT
.get("https://prices.runescape.wiki/api/v1/osrs/latest")
.header("User-Agent", "Group Ironmen - Dprk#8740")
.send()
.await
.map_err(ApiError::ReqwestError)?;
let wiki_ge_prices = res
.json::<WikiGEPrices>()
.await
.map_err(ApiError::ReqwestError)?;
Ok(wiki_ge_prices)
}
pub async fn update_ge_prices() -> Result<(), ApiError> {
let wiki_ge_prices = fetch_latest_prices().await?;
let mut ge_prices: GEPrices = std::collections::HashMap::new();
for (item_id, wiki_ge_price) in wiki_ge_prices.data {
let mut avg_ge_price: i64 = 0;
match wiki_ge_price.high {
Some(high) => avg_ge_price = high,
None => (),
}
match wiki_ge_price.low {
Some(low) => {
if avg_ge_price > 0 {
avg_ge_price = (avg_ge_price + low) / 2
} else {
avg_ge_price = low
}
}
None => (),
}
ge_prices.insert(item_id, avg_ge_price);
}
GE_PRICES.store(Arc::new(serde_json::to_string(&ge_prices)?));
Ok(())
}
pub fn start_ge_updater() {
task::spawn(async {
let mut interval = time::interval(Duration::from_secs(14400));
loop {
interval.tick().await;
log::info!("Fetching latest ge prices");
match update_ge_prices().await {
Ok(_) => (),
Err(err) => {
log::error!("Failed to fetch latest ge prices: {}", err);
}
}
}
});
}
pub fn start_skills_aggregator(db_pool: Pool) {
task::spawn(async move {
let mut interval = time::interval(Duration::from_secs(1800));
loop {
interval.tick().await;
log::info!("Running skill aggregator");
match db_pool.get().await {
Ok(mut client) => {
match db::aggregate_skills(&mut client).await {
Ok(_) => (),
Err(err) => {
log::error!("Failed to aggregate skills: {}", err);
}
}
match db::apply_skills_retention(&mut client).await {
Ok(_) => (),
Err(err) => {
log::error!("Failed to apply skills retention: {}", err);
}
}
}
Err(err) => {
log::error!("Failed to get db client: {}", err);
}
}
}
});
}
#[get("/ge-prices")]
pub async fn get_ge_prices() -> Result<HttpResponse, Error> {
let ge_prices_opt = GE_PRICES.load();
let res: String = (&**ge_prices_opt).clone();
Ok(HttpResponse::Ok()
.append_header(("Cache-Control", "public, max-age=86400"))
.content_type("application/json")
.body(res))
}
pub async fn verify_captcha(
response: &String,
secret: &String,
) -> Result<CaptchaVerifyResponse, ApiError> {
let body = [("response", response), ("secret", secret)];
let res = HTTP_CLIENT
.post("https://hcaptcha.com/siteverify")
.form(&body)
.send()
.await
.map_err(ApiError::ReqwestError)?;
let captcha_verify_response = res
.json::<CaptchaVerifyResponse>()
.await
.map_err(ApiError::ReqwestError)?;
Ok(captcha_verify_response)
}
#[post("/create-group")]
pub async fn create_group(
create_group: web::Json<CreateGroup>,
db_pool: web::Data<Pool>,
config: web::Data<Config>,
) -> Result<HttpResponse, Error> {
let mut create_group_inner = create_group.into_inner();
if config.hcaptcha.enabled {
let captcha_verify_response = verify_captcha(
&create_group_inner.captcha_response,
&config.hcaptcha.secret,
)
.await?;
if !captcha_verify_response.success {
return Ok(HttpResponse::BadRequest().body("Captcha response verification failed"));
}
}
if create_group_inner.member_names.len() > 5 {
return Ok(HttpResponse::BadRequest().body("Too many member names provided"));
}
create_group_inner.name = create_group_inner.name.trim().to_string();
if !valid_name(&create_group_inner.name) {
return Ok(HttpResponse::BadRequest().body("Provided group name is not valid"));
}
create_group_inner
.member_names
.retain(|member_name| member_name.trim().len() > 0);
for member_name in &create_group_inner.member_names {
if !valid_name(&member_name) {
return Ok(HttpResponse::BadRequest()
.body(format!("Member name {} is not valid", member_name)));
}
}
let mut client: Client = db_pool.get().await.map_err(ApiError::PoolError)?;
db::create_group(&mut client, &create_group_inner).await?;
Ok(HttpResponse::Created().json(&create_group_inner))
}
#[get("captcha-enabled")]
pub async fn captcha_enabled(config: web::Data<Config>) -> Result<HttpResponse, Error> {
Ok(HttpResponse::Ok().json(&config.hcaptcha))
}
#[get("collection-log-info")]
pub async fn collection_log_info() -> HttpResponse {
HttpResponse::Ok()
.content_type(ContentType::json())
.body(&**COLLECTION_LOG_DATA)
}

View File

@@ -0,0 +1,151 @@
use crate::collection_log::{CollectionLog, CollectionLogInfo};
use crate::error::ApiError;
use lazy_static::lazy_static;
use regex::Regex;
#[cfg(test)]
mod valid_name_tests {
use super::*;
#[test]
fn valid_names() {
let valid_names = [
"test",
"with space",
"with 1234",
"123",
"with-dash",
"dash-and space",
"CAPITAL LETTERS",
"MiXeD case-123",
"0123456789",
"underscore_name",
" space",
"space ",
];
for name in valid_names {
assert!(valid_name(name), "{} should have been a valid name", name);
}
}
#[test]
fn invalid_names() {
let invalid_names = [
"@SHARED",
"invalid!",
"@",
"-=+[];'./,<>?\"\\|`~",
"=",
"+",
"[",
"]",
";",
"'",
".",
"/",
",",
"<",
">",
"?",
"\"",
"\\",
"|",
"`",
"~",
"",
" ",
" ",
];
for name in invalid_names {
assert!(
!valid_name(name),
"{} should have been an invalid name",
name
);
}
}
}
pub fn valid_name(name: &str) -> bool {
lazy_static! {
static ref NAME_RE: Regex = Regex::new("[^A-Za-z 0-9-_]").unwrap();
}
let len = name.len();
(1..=16).contains(&len) && name.is_ascii() && !NAME_RE.is_match(name) && name.trim().len() > 0
}
pub fn validate_member_prop_length<T>(
prop_name: &str,
value: &Option<Vec<T>>,
min: usize,
max: usize,
) -> Result<(), ApiError> {
match value {
None => Ok(()),
Some(x) => {
if (min..=max).contains(&x.len()) {
Ok(())
} else {
Err(ApiError::GroupMemberValidationError(format!(
"{} length violated range constraint {}..={} actual={}",
prop_name,
min,
max,
x.len()
)))
}
}
}
}
pub fn validate_collection_log(
collection_log_info: &actix_web::web::Data<CollectionLogInfo>,
collection_logs: &mut Option<Vec<CollectionLog>>,
) -> Result<(), ApiError> {
match collection_logs {
None => Ok(()),
Some(ref mut x) => {
for collection_log in x {
let page_id = collection_log_info.page_name_to_id(&collection_log.page_name);
let result = match page_id {
Some(id) => {
let number_of_items: usize = collection_log.items.len() / 2;
if number_of_items > collection_log_info.number_of_items_in_page(*id) {
return Err(ApiError::GroupMemberValidationError(format!(
"{} is too many items for collection log {}",
number_of_items, collection_log.page_name
)));
}
for i in (0..collection_log.items.len()).step_by(2) {
let item_id =
collection_log_info.remap_item_id(collection_log.items[i]);
collection_log.items[i] = item_id;
if !collection_log_info.has_item(*id, item_id) {
return Err(ApiError::GroupMemberValidationError(format!(
"collection log {} does not have item id {}",
collection_log.page_name, item_id
)));
}
}
Ok(())
}
None => Err(ApiError::GroupMemberValidationError(format!(
"invalid collection log page {}",
collection_log.page_name
))),
};
if result.is_err() {
return result;
}
}
Ok(())
}
}
}