Add local market calendar storage
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
This commit is contained in:
38
src/database/calendar.rs
Normal file
38
src/database/calendar.rs
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
use crate::{optimize, types::Calendar};
|
||||||
|
use clickhouse::error::Error;
|
||||||
|
use tokio::try_join;
|
||||||
|
|
||||||
|
optimize!("calendar");
|
||||||
|
|
||||||
|
pub async fn upsert_batch_and_delete<'a, T>(
|
||||||
|
client: &clickhouse::Client,
|
||||||
|
records: T,
|
||||||
|
) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
T: IntoIterator<Item = &'a Calendar> + Send + Sync + Clone,
|
||||||
|
T::IntoIter: Send,
|
||||||
|
{
|
||||||
|
let upsert_future = async {
|
||||||
|
let mut insert = client.insert("calendar")?;
|
||||||
|
for record in records.clone() {
|
||||||
|
insert.write(record).await?;
|
||||||
|
}
|
||||||
|
insert.end().await
|
||||||
|
};
|
||||||
|
|
||||||
|
let delete_future = async {
|
||||||
|
let dates = records
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.date)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
client
|
||||||
|
.query("DELETE FROM calendar WHERE date NOT IN ?")
|
||||||
|
.bind(dates)
|
||||||
|
.execute()
|
||||||
|
.await
|
||||||
|
};
|
||||||
|
|
||||||
|
try_join!(upsert_future, delete_future).map(|_| ())
|
||||||
|
}
|
@@ -2,6 +2,7 @@ pub mod assets;
|
|||||||
pub mod backfills_bars;
|
pub mod backfills_bars;
|
||||||
pub mod backfills_news;
|
pub mod backfills_news;
|
||||||
pub mod bars;
|
pub mod bars;
|
||||||
|
pub mod calendar;
|
||||||
pub mod news;
|
pub mod news;
|
||||||
pub mod orders;
|
pub mod orders;
|
||||||
|
|
||||||
@@ -144,7 +145,8 @@ pub async fn optimize_all(clickhouse_client: &Client) -> Result<(), Error> {
|
|||||||
news::optimize(clickhouse_client),
|
news::optimize(clickhouse_client),
|
||||||
backfills_bars::optimize(clickhouse_client),
|
backfills_bars::optimize(clickhouse_client),
|
||||||
backfills_news::optimize(clickhouse_client),
|
backfills_news::optimize(clickhouse_client),
|
||||||
orders::optimize(clickhouse_client)
|
orders::optimize(clickhouse_client),
|
||||||
|
calendar::optimize(clickhouse_client)
|
||||||
)
|
)
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
}
|
}
|
||||||
|
@@ -1,12 +1,13 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
types::alpaca,
|
database,
|
||||||
|
types::{alpaca, Calendar},
|
||||||
utils::{backoff, duration_until},
|
utils::{backoff, duration_until},
|
||||||
};
|
};
|
||||||
use log::info;
|
use log::info;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tokio::{sync::mpsc, time::sleep};
|
use tokio::{join, sync::mpsc, time::sleep};
|
||||||
|
|
||||||
pub enum Status {
|
pub enum Status {
|
||||||
Open,
|
Open,
|
||||||
@@ -36,13 +37,31 @@ impl From<alpaca::api::incoming::clock::Clock> for Message {
|
|||||||
|
|
||||||
pub async fn run(config: Arc<Config>, sender: mpsc::Sender<Message>) {
|
pub async fn run(config: Arc<Config>, sender: mpsc::Sender<Message>) {
|
||||||
loop {
|
loop {
|
||||||
let clock = alpaca::api::incoming::clock::get(
|
let clock_future = async {
|
||||||
&config.alpaca_client,
|
alpaca::api::incoming::clock::get(
|
||||||
&config.alpaca_rate_limiter,
|
&config.alpaca_client,
|
||||||
Some(backoff::infinite()),
|
&config.alpaca_rate_limiter,
|
||||||
)
|
Some(backoff::infinite()),
|
||||||
.await
|
)
|
||||||
.unwrap();
|
.await
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
let calendar_future = async {
|
||||||
|
alpaca::api::incoming::calendar::get(
|
||||||
|
&config.alpaca_client,
|
||||||
|
&config.alpaca_rate_limiter,
|
||||||
|
&alpaca::api::outgoing::calendar::Calendar::default(),
|
||||||
|
Some(backoff::infinite()),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.map(Calendar::from)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
};
|
||||||
|
|
||||||
|
let (clock, calendar) = join!(clock_future, calendar_future);
|
||||||
|
|
||||||
let sleep_until = duration_until(if clock.is_open {
|
let sleep_until = duration_until(if clock.is_open {
|
||||||
info!("Market is open, will close at {}.", clock.next_close);
|
info!("Market is open, will close at {}.", clock.next_close);
|
||||||
@@ -52,7 +71,15 @@ pub async fn run(config: Arc<Config>, sender: mpsc::Sender<Message>) {
|
|||||||
clock.next_open
|
clock.next_open
|
||||||
});
|
});
|
||||||
|
|
||||||
sleep(sleep_until).await;
|
let sleep_future = sleep(sleep_until);
|
||||||
|
|
||||||
|
let calendar_future = async {
|
||||||
|
database::calendar::upsert_batch_and_delete(&config.clickhouse_client, &calendar)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
};
|
||||||
|
|
||||||
|
join!(sleep_future, calendar_future);
|
||||||
sender.send(clock.into()).await.unwrap();
|
sender.send(clock.into()).await.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,11 +1,15 @@
|
|||||||
use crate::{config::ALPACA_API_URL, types::alpaca::api::outgoing, utils::de};
|
use crate::{
|
||||||
|
config::ALPACA_API_URL,
|
||||||
|
types::{self, alpaca::api::outgoing},
|
||||||
|
utils::de,
|
||||||
|
};
|
||||||
use backoff::{future::retry_notify, ExponentialBackoff};
|
use backoff::{future::retry_notify, ExponentialBackoff};
|
||||||
use governor::DefaultDirectRateLimiter;
|
use governor::DefaultDirectRateLimiter;
|
||||||
use log::warn;
|
use log::warn;
|
||||||
use reqwest::{Client, Error};
|
use reqwest::{Client, Error};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use time::{Date, Time};
|
use time::{Date, OffsetDateTime, Time};
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct Calendar {
|
pub struct Calendar {
|
||||||
@@ -17,6 +21,16 @@ pub struct Calendar {
|
|||||||
pub settlement_date: Date,
|
pub settlement_date: Date,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<Calendar> for types::Calendar {
|
||||||
|
fn from(calendar: Calendar) -> Self {
|
||||||
|
Self {
|
||||||
|
date: calendar.date,
|
||||||
|
open: OffsetDateTime::new_utc(calendar.date, calendar.open),
|
||||||
|
close: OffsetDateTime::new_utc(calendar.date, calendar.close),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
alpaca_client: &Client,
|
alpaca_client: &Client,
|
||||||
alpaca_rate_limiter: &DefaultDirectRateLimiter,
|
alpaca_rate_limiter: &DefaultDirectRateLimiter,
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
|
use crate::utils::time::MAX_TIMESTAMP;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
@@ -18,3 +19,13 @@ pub struct Calendar {
|
|||||||
#[serde(rename = "date")]
|
#[serde(rename = "date")]
|
||||||
pub date_type: DateType,
|
pub date_type: DateType,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for Calendar {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
start: OffsetDateTime::UNIX_EPOCH,
|
||||||
|
end: OffsetDateTime::from_unix_timestamp(MAX_TIMESTAMP).unwrap(),
|
||||||
|
date_type: DateType::Trading,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
13
src/types/calendar.rs
Normal file
13
src/types/calendar.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
use clickhouse::Row;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::{Date, OffsetDateTime};
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Row)]
|
||||||
|
pub struct Calendar {
|
||||||
|
#[serde(with = "clickhouse::serde::time::date")]
|
||||||
|
pub date: Date,
|
||||||
|
#[serde(with = "clickhouse::serde::time::datetime")]
|
||||||
|
pub open: OffsetDateTime,
|
||||||
|
#[serde(with = "clickhouse::serde::time::datetime")]
|
||||||
|
pub close: OffsetDateTime,
|
||||||
|
}
|
@@ -2,11 +2,13 @@ pub mod alpaca;
|
|||||||
pub mod asset;
|
pub mod asset;
|
||||||
pub mod backfill;
|
pub mod backfill;
|
||||||
pub mod bar;
|
pub mod bar;
|
||||||
|
pub mod calendar;
|
||||||
pub mod news;
|
pub mod news;
|
||||||
pub mod order;
|
pub mod order;
|
||||||
|
|
||||||
pub use asset::{Asset, Class, Exchange};
|
pub use asset::{Asset, Class, Exchange};
|
||||||
pub use backfill::Backfill;
|
pub use backfill::Backfill;
|
||||||
pub use bar::Bar;
|
pub use bar::Bar;
|
||||||
|
pub use calendar::Calendar;
|
||||||
pub use news::News;
|
pub use news::News;
|
||||||
pub use order::Order;
|
pub use order::Order;
|
||||||
|
@@ -4,6 +4,7 @@ use time::OffsetDateTime;
|
|||||||
pub const ONE_SECOND: Duration = Duration::from_secs(1);
|
pub const ONE_SECOND: Duration = Duration::from_secs(1);
|
||||||
pub const ONE_MINUTE: Duration = Duration::from_secs(60);
|
pub const ONE_MINUTE: Duration = Duration::from_secs(60);
|
||||||
pub const FIFTEEN_MINUTES: Duration = Duration::from_secs(60 * 15);
|
pub const FIFTEEN_MINUTES: Duration = Duration::from_secs(60 * 15);
|
||||||
|
pub const MAX_TIMESTAMP: i64 = 253_402_300_799;
|
||||||
|
|
||||||
pub fn last_minute() -> OffsetDateTime {
|
pub fn last_minute() -> OffsetDateTime {
|
||||||
let now_timestamp = OffsetDateTime::now_utc().unix_timestamp();
|
let now_timestamp = OffsetDateTime::now_utc().unix_timestamp();
|
||||||
|
@@ -120,3 +120,11 @@ CREATE TABLE IF NOT EXISTS qrust.orders (
|
|||||||
ENGINE = ReplacingMergeTree()
|
ENGINE = ReplacingMergeTree()
|
||||||
PARTITION BY toYYYYMM(time_submitted)
|
PARTITION BY toYYYYMM(time_submitted)
|
||||||
PRIMARY KEY id;
|
PRIMARY KEY id;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS qrust.calendar (
|
||||||
|
date Date,
|
||||||
|
open DateTime,
|
||||||
|
close DateTime
|
||||||
|
)
|
||||||
|
ENGINE = ReplacingMergeTree()
|
||||||
|
PRIMARY KEY date;
|
||||||
|
Reference in New Issue
Block a user