Skip to content

Commit f99b909

Browse files
authored
Impr/add name change route (#35)
* feat: add namechagne route * fix: actually works now * fix: add cache, simplify deduplication, remove redundant type * impr: split query to avoid aggregation * fix: update type, simplify mapping, clippy nitpick
1 parent edcdfde commit f99b909

File tree

4 files changed

+96
-2
lines changed

4 files changed

+96
-2
lines changed

src/db/mod.rs

Lines changed: 60 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
mod migrations;
22
pub mod schema;
33
pub mod writer;
4+
use std::collections::HashSet;
45

56
pub use migrations::run as setup_db;
67
use serde::Deserialize;
@@ -12,14 +13,15 @@ use crate::{
1213
schema::LogRangeParams,
1314
stream::{FlushBufferResponse, LogsStream},
1415
},
15-
web::schema::{AvailableLogDate, ChannelLogsStats, LogsParams, UserLogsStats},
16+
web::schema::{AvailableLogDate, ChannelLogsStats, LogsParams, PreviousName, UserLogsStats},
1617
Result,
1718
};
1819
use chrono::{DateTime, Datelike, Duration, Utc};
1920
use clickhouse::{query::RowCursor, Client, Row};
2021
use rand::{seq::IteratorRandom, thread_rng};
2122
use schema::StructuredMessage;
2223
use tracing::debug;
24+
use futures::future::try_join_all;
2325

2426
const CHANNEL_MULTI_QUERY_SIZE_DAYS: i64 = 14;
2527

@@ -381,6 +383,63 @@ pub async fn get_user_stats(
381383
})
382384
}
383385

386+
pub async fn get_user_name_history(
387+
db: &Client,
388+
user_id: &str,
389+
) -> Result<Vec<PreviousName>> {
390+
#[derive(Deserialize, Row)]
391+
struct SingleNameHistory {
392+
pub last_timestamp: i32,
393+
pub first_timestamp: i32,
394+
}
395+
396+
let name_query = "SELECT DISTINCT user_login FROM message_structured WHERE user_id = ? SETTINGS use_query_cache = 1, query_cache_ttl = 600".to_owned();
397+
let name_query = db.query(&name_query).bind(user_id);
398+
let distinct_logins = name_query.fetch_all::<String>().await?;
399+
if distinct_logins.is_empty() {
400+
return Ok(vec![]);
401+
}
402+
403+
let sanitized_user_logins = distinct_logins
404+
.iter()
405+
.map(|login| login.trim_start_matches(':').to_owned());
406+
407+
let history_query = "SELECT toDateTime(MAX(timestamp)) AS last_timestamp, toDateTime(MIN(timestamp)) AS first_timestamp FROM message_structured WHERE (user_id = ?) AND (user_login = ?) SETTINGS use_query_cache = 1, query_cache_ttl = 600".to_owned();
408+
409+
let name_history_rows = try_join_all(sanitized_user_logins.into_iter().map(|login| {
410+
let query = history_query.clone();
411+
async move {
412+
let query = db.query(&query).bind(user_id).bind(&login);
413+
query
414+
.fetch_one::<SingleNameHistory>()
415+
.await
416+
.map(|history| (login, history))
417+
}
418+
}))
419+
.await?;
420+
421+
let mut seen_logins = HashSet::new();
422+
423+
let names = name_history_rows
424+
.into_iter()
425+
.filter_map(|(login, history)| {
426+
if seen_logins.insert(login.clone()) {
427+
Some(PreviousName {
428+
user_login: login,
429+
last_timestamp: DateTime::from_timestamp(history.last_timestamp.into(), 0)
430+
.expect("Invalid DateTime"),
431+
first_timestamp: DateTime::from_timestamp(history.first_timestamp.into(), 0)
432+
.expect("Invalid DateTime"),
433+
})
434+
} else {
435+
None
436+
}
437+
})
438+
.collect();
439+
440+
Ok(names)
441+
}
442+
384443
fn apply_limit_offset(query: &mut String, buffer_response: &FlushBufferResponse) {
385444
if let Some(limit) = buffer_response.normalized_limit() {
386445
*query = format!("{query} LIMIT {limit}");

src/web/handlers.rs

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use super::{
33
schema::{
44
AvailableLogs, AvailableLogsParams, Channel, ChannelIdType, ChannelLogsByDatePath,
55
ChannelLogsStats, ChannelParam, ChannelsList, LogsParams, LogsPathChannel, SearchParams,
6-
UserLogPathParams, UserLogsPath, UserLogsStats, UserParam,
6+
UserLogPathParams, UserLogsPath, UserLogsStats, UserParam, UserNameHistoryParam
77
},
88
};
99
use crate::{
@@ -507,6 +507,20 @@ async fn search_user_logs(
507507
Ok(logs)
508508
}
509509

510+
511+
pub async fn get_user_name_history(
512+
app: State<App>,
513+
Path(UserNameHistoryParam {
514+
user_id,
515+
}): Path<UserNameHistoryParam>,
516+
) -> Result<impl IntoApiResponse> {
517+
app.check_opted_out(&user_id, None)?;
518+
519+
let names = db::get_user_name_history(&app.db,&user_id).await?;
520+
521+
Ok(Json(names))
522+
}
523+
510524
pub async fn optout(app: State<App>) -> Json<String> {
511525
let mut rng = thread_rng();
512526
let optout_code: String = (0..5).map(|_| rng.sample(Alphanumeric) as char).collect();

src/web/mod.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,12 @@ pub async fn run(app: App, mut shutdown_rx: ShutdownRx, bot_tx: Sender<BotMessag
167167
op.description("Get user stats")
168168
}),
169169
)
170+
.api_route(
171+
"/namehistory/:user_id",
172+
get_with(handlers::get_user_name_history, |op| {
173+
op.description("Get user name history by provided user id")
174+
}),
175+
)
170176
.api_route("/optout", post(handlers::optout))
171177
.api_route("/capabilities", get(capabilities))
172178
.route("/docs", Redoc::new("/openapi.json").axum_route())

src/web/schema.rs

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
use super::responders::logs::{JsonResponseType, LogsResponseType};
2+
use chrono::{DateTime, Utc};
23
use schemars::JsonSchema;
34
use serde::{Deserialize, Deserializer, Serialize};
45
use std::fmt::Display;
@@ -176,3 +177,17 @@ pub struct UserLogsStats {
176177
pub user_id: String,
177178
pub message_count: u64,
178179
}
180+
181+
#[derive(Deserialize, JsonSchema)]
182+
pub struct UserNameHistoryParam {
183+
pub user_id: String,
184+
}
185+
186+
#[derive(Serialize, JsonSchema)]
187+
pub struct PreviousName {
188+
pub user_login: String,
189+
#[schemars(with = "String")]
190+
pub last_timestamp: DateTime<Utc>,
191+
#[schemars(with = "String")]
192+
pub first_timestamp: DateTime<Utc>,
193+
}

0 commit comments

Comments
 (0)