Skip to content

Commit

Permalink
fix(db): fix bugs based on sqlite date format difference from chrono (#…
Browse files Browse the repository at this point in the history
…1951)

* fix(db): fix repository refresh job

* Provide fix for sqlite date format

* Update implementation for repository provider

* Fix formatting

* Add compiletime test case for operation types

* Fix tests

* Make end bound exclusive

* Fix formatting
  • Loading branch information
boxbeam authored Apr 24, 2024
1 parent c3d25eb commit 1b98db4
Show file tree
Hide file tree
Showing 6 changed files with 103 additions and 19 deletions.
11 changes: 8 additions & 3 deletions ee/tabby-db/src/github_repository_provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use chrono::{DateTime, Utc};
use sqlx::{prelude::FromRow, query, query_as};
use tabby_db_macros::query_paged_as;

use crate::{DbConn, SQLXResultExt};
use crate::{AsSQLiteDateTime, DbConn, SQLXResultExt};

#[derive(FromRow)]
pub struct GithubRepositoryProviderDAO {
Expand Down Expand Up @@ -156,8 +156,12 @@ impl DbConn {
) -> Result<i64> {
let res = query!(
"INSERT INTO github_provided_repositories (github_repository_provider_id, vendor_id, name, git_url) VALUES ($1, $2, $3, $4)
ON CONFLICT(github_repository_provider_id, vendor_id) DO UPDATE SET github_repository_provider_id = $1, name = $2, git_url = $3",
github_provider_id, vendor_id, name, git_url).execute(&self.pool).await?;
ON CONFLICT(github_repository_provider_id, vendor_id) DO UPDATE SET name = $3, git_url = $4, updated_at = DATETIME('now')",
github_provider_id,
vendor_id,
name,
git_url
).execute(&self.pool).await?;
Ok(res.last_insert_rowid())
}

Expand All @@ -177,6 +181,7 @@ impl DbConn {
github_provider_id: i64,
cutoff_timestamp: DateTime<Utc>,
) -> Result<()> {
let cutoff_timestamp = cutoff_timestamp.as_sqlite_datetime();
query!(
"DELETE FROM github_provided_repositories WHERE github_repository_provider_id = ? AND updated_at < ?;",
github_provider_id,
Expand Down
83 changes: 80 additions & 3 deletions ee/tabby-db/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
use std::{ops::Deref, path::Path, sync::Arc};
use std::{
fmt::Display,
ops::{Add, Deref, Sub},
path::Path,
sync::Arc,
};

use anyhow::anyhow;
use cache::Cache;
use cached::TimedSizedCache;
use chrono::{DateTime, NaiveDateTime, Utc};
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
pub use email_setting::EmailSettingDAO;
pub use github_repository_provider::{GithubProvidedRepositoryDAO, GithubRepositoryProviderDAO};
pub use invitations::InvitationDAO;
Expand Down Expand Up @@ -256,9 +261,53 @@ where
}
}

pub trait AsSQLiteDateTime {
fn as_sqlite_datetime(&self) -> String;
}

impl AsSQLiteDateTime for DateTime<Utc> {
fn as_sqlite_datetime(&self) -> String {
self.format("%F %X").to_string()
}
}

#[derive(Default, Clone)]
pub struct DateTimeUtc(DateTime<Utc>);

impl Display for DateTimeUtc {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_sqlite_datetime())
}
}

impl std::fmt::Debug for DateTimeUtc {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_sqlite_datetime())
}
}

impl DateTimeUtc {
pub fn now() -> Self {
Utc::now().into()
}
}

impl Add<Duration> for DateTimeUtc {
type Output = Self;

fn add(self, rhs: Duration) -> Self::Output {
((self.0) + rhs).into()
}
}

impl Sub<Duration> for DateTimeUtc {
type Output = Self;

fn sub(self, rhs: Duration) -> Self::Output {
((self.0) - rhs).into()
}
}

impl From<DateTime<Utc>> for DateTimeUtc {
fn from(value: DateTime<Utc>) -> Self {
Self(value)
Expand Down Expand Up @@ -291,7 +340,7 @@ impl<'a> Encode<'a, Sqlite> for DateTimeUtc {
&self,
buf: &mut <Sqlite as sqlx::database::HasArguments<'a>>::ArgumentBuffer,
) -> sqlx::encode::IsNull {
self.0.encode_by_ref(buf)
<String as Encode<Sqlite>>::encode(self.0.as_sqlite_datetime(), buf)
}
}

Expand Down Expand Up @@ -352,6 +401,34 @@ mod tests {
assert_eq!(new_token.len(), 36);
assert_ne!(old_token, new_token);
}

#[tokio::test]
async fn test_timestamp_format() {
let db = DbConn::new_in_memory().await.unwrap();

let time = DateTimeUtc::now();

let time_str = time.as_sqlite_datetime();
let sql_time: String = sqlx::query_scalar::<_, String>("SELECT ?;")
.bind(time)
.fetch_one(&db.pool)
.await
.unwrap();

assert_eq!(time_str, sql_time);

let sql_time: String = sqlx::query_scalar::<_, String>("SELECT DATETIME('now');")
.fetch_one(&db.pool)
.await
.unwrap();
assert_eq!(sql_time, DateTimeUtc::now().as_sqlite_datetime());

// No assertions, these will fail at compiletime if adding/subtracting from these types
// yields DateTime<Utc>, which could be dangerous
let time = DateTimeUtc::now();
let _added_time: DateTimeUtc = time.clone() + Duration::milliseconds(1);
let _subbed_time: DateTimeUtc = time - Duration::milliseconds(1);
}
}

#[cfg(any(test, feature = "testutils"))]
Expand Down
12 changes: 7 additions & 5 deletions ee/tabby-db/src/user_completions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use cached::CachedAsync;
use chrono::{DateTime, Utc};
use sqlx::{prelude::FromRow, query};

use crate::{DateTimeUtc, DbConn};
use crate::{AsSQLiteDateTime, DateTimeUtc, DbConn};

#[derive(FromRow)]
pub struct UserCompletionDAO {
Expand Down Expand Up @@ -41,7 +41,9 @@ impl DbConn {
let duration = Duration::from_millis(ts as u64);
let created_at =
DateTime::from_timestamp(duration.as_secs() as i64, duration.subsec_nanos())
.context("Invalid created_at timestamp")?;
.context("Invalid created_at timestamp")?
.as_sqlite_datetime();

let res = query!(
"INSERT INTO user_completions (user_id, completion_id, language, created_at) VALUES (?, ?, ?, ?);",
user_id,
Expand Down Expand Up @@ -109,7 +111,7 @@ impl DbConn {
SUM(selects) as selects,
SUM(views) as views
FROM user_completions JOIN users ON user_id = users.id AND users.active
WHERE user_completions.created_at >= DATE('now', '-1 year')
WHERE user_completions.created_at >= DATETIME('now', '-1 year')
AND ({users_empty} OR user_id IN ({users}))
GROUP BY 1, 2
ORDER BY 1, 2 ASC
Expand Down Expand Up @@ -171,8 +173,8 @@ impl DbConn {
no_selected_users = users.is_empty(),
no_selected_languages = languages.is_empty(),
))
.bind(start)
.bind(end)
.bind(start.as_sqlite_datetime())
.bind(end.as_sqlite_datetime())
.fetch_all(&self.pool)
.await?;
Ok(res)
Expand Down
6 changes: 3 additions & 3 deletions ee/tabby-db/src/user_events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use chrono::{DateTime, Utc};
use sqlx::{prelude::FromRow, query};
use tabby_db_macros::query_paged_as;

use crate::DbConn;
use crate::{AsSQLiteDateTime, DbConn};

#[derive(FromRow)]
pub struct UserEventDAO {
Expand Down Expand Up @@ -50,8 +50,8 @@ impl DbConn {
) -> Result<Vec<UserEventDAO>> {
let condition = Some(format!(
"created_at >= '{}' AND created_at < '{}'",
start.to_rfc3339(),
end.to_rfc3339()
start.as_sqlite_datetime(),
end.as_sqlite_datetime()
));
let events = query_paged_as!(
UserEventDAO,
Expand Down
2 changes: 1 addition & 1 deletion ee/tabby-webserver/src/integrations/github.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ pub fn routes(
};
Router::new()
.route("/connect/:id", routing::get(connect))
// Routes defined past this point require authentication
// Routes defined above this point require authentication
.layer(from_fn_with_state(auth, require_login_middleware))
.route("/callback", routing::get(callback))
.with_state(state)
Expand Down
8 changes: 4 additions & 4 deletions ee/tabby-webserver/src/service/analytic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ pub fn new_analytic_service(db: DbConn) -> Arc<dyn AnalyticService> {

#[cfg(test)]
mod tests {
use chrono::Days;
use chrono::{Days, Duration};

use super::*;
use crate::service::AsID;
Expand Down Expand Up @@ -200,7 +200,7 @@ mod tests {
.unwrap();

let svc = new_analytic_service(db);
let end = Utc::now();
let end = Utc::now() + Duration::days(1);
let start = end.checked_sub_days(Days::new(100)).unwrap();

// Test that there is a single completion stat (1 day of history) with 1 completion and 1 select
Expand Down Expand Up @@ -243,7 +243,7 @@ mod tests {
.unwrap();

let service = new_analytic_service(db);
let end = Utc::now();
let end = Utc::now() + Duration::days(1);
let start = end.checked_sub_days(Days::new(100)).unwrap();

let stats = service
Expand Down Expand Up @@ -288,7 +288,7 @@ mod tests {
.unwrap()
.is_empty());

let end = Utc::now();
let end = Utc::now() + Duration::days(1);
let start = end.checked_sub_days(Days::new(100)).unwrap();

assert!(service
Expand Down

0 comments on commit 1b98db4

Please sign in to comment.