Remove asset_status thread
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
This commit is contained in:
@@ -1,26 +1,29 @@
|
||||
use super::{Guard, ThreadType};
|
||||
use super::ThreadType;
|
||||
use crate::{
|
||||
config::{Config, ALPACA_CRYPTO_DATA_URL, ALPACA_NEWS_DATA_URL, ALPACA_STOCK_DATA_URL},
|
||||
config::{Config, ALPACA_CRYPTO_DATA_URL, ALPACA_STOCK_DATA_URL},
|
||||
database,
|
||||
types::{
|
||||
alpaca::{
|
||||
self,
|
||||
api::{self, outgoing::Sort},
|
||||
Source,
|
||||
},
|
||||
news::Prediction,
|
||||
Asset, Bar, Class, News, Subset,
|
||||
Bar, Class, News,
|
||||
},
|
||||
utils::{
|
||||
duration_until, last_minute, remove_slash_from_pair, FIFTEEN_MINUTES, ONE_MINUTE,
|
||||
ONE_SECOND,
|
||||
},
|
||||
utils::{duration_until, last_minute, remove_slash_from_pair, FIFTEEN_MINUTES, ONE_MINUTE},
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use backoff::{future::retry, ExponentialBackoff};
|
||||
use futures_util::future::join_all;
|
||||
use log::{error, info, warn};
|
||||
use log::{info, warn};
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::{
|
||||
join, spawn,
|
||||
sync::{mpsc, oneshot, Mutex, RwLock},
|
||||
sync::{mpsc, oneshot, Mutex},
|
||||
task::{block_in_place, JoinHandle},
|
||||
time::sleep,
|
||||
};
|
||||
@@ -30,19 +33,28 @@ pub enum Action {
|
||||
Purge,
|
||||
}
|
||||
|
||||
impl From<super::Action> for Action {
|
||||
fn from(action: super::Action) -> Self {
|
||||
match action {
|
||||
super::Action::Add => Self::Backfill,
|
||||
super::Action::Remove => Self::Purge,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Message {
|
||||
pub action: Action,
|
||||
pub assets: Subset<Asset>,
|
||||
pub symbols: Vec<String>,
|
||||
pub response: oneshot::Sender<()>,
|
||||
}
|
||||
|
||||
impl Message {
|
||||
pub fn new(action: Action, assets: Subset<Asset>) -> (Self, oneshot::Receiver<()>) {
|
||||
pub fn new(action: Action, symbols: Vec<String>) -> (Self, oneshot::Receiver<()>) {
|
||||
let (sender, receiver) = oneshot::channel::<()>();
|
||||
(
|
||||
Self {
|
||||
action,
|
||||
assets,
|
||||
symbols,
|
||||
response: sender,
|
||||
},
|
||||
receiver,
|
||||
@@ -60,58 +72,31 @@ pub trait Handler: Send + Sync {
|
||||
fn log_string(&self) -> &'static str;
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
handler: Arc<Box<dyn Handler>>,
|
||||
guard: Arc<RwLock<Guard>>,
|
||||
mut receiver: mpsc::Receiver<Message>,
|
||||
) {
|
||||
pub async fn run(handler: Arc<Box<dyn Handler>>, mut receiver: mpsc::Receiver<Message>) {
|
||||
let backfill_jobs = Arc::new(Mutex::new(HashMap::new()));
|
||||
|
||||
loop {
|
||||
let message = receiver.recv().await.unwrap();
|
||||
|
||||
spawn(handle_backfill_message(
|
||||
handler.clone(),
|
||||
guard.clone(),
|
||||
backfill_jobs.clone(),
|
||||
message,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::significant_drop_tightening)]
|
||||
#[allow(clippy::too_many_lines)]
|
||||
async fn handle_backfill_message(
|
||||
handler: Arc<Box<dyn Handler>>,
|
||||
guard: Arc<RwLock<Guard>>,
|
||||
backfill_jobs: Arc<Mutex<HashMap<String, JoinHandle<()>>>>,
|
||||
message: Message,
|
||||
) {
|
||||
let guard = guard.read().await;
|
||||
let mut backfill_jobs = backfill_jobs.lock().await;
|
||||
|
||||
let symbols = match message.assets {
|
||||
Subset::All => guard
|
||||
.assets
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|(_, symbol)| symbol)
|
||||
.collect(),
|
||||
Subset::Some(assets) => assets
|
||||
.into_iter()
|
||||
.map(|asset| asset.symbol)
|
||||
.filter(|symbol| match message.action {
|
||||
Action::Backfill => guard.assets.contains_right(symbol),
|
||||
Action::Purge => !guard.assets.contains_right(symbol),
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
};
|
||||
|
||||
match message.action {
|
||||
Action::Backfill => {
|
||||
let log_string = handler.log_string();
|
||||
|
||||
for symbol in symbols {
|
||||
for symbol in message.symbols {
|
||||
if let Some(job) = backfill_jobs.get(&symbol) {
|
||||
if !job.is_finished() {
|
||||
warn!(
|
||||
@@ -131,7 +116,7 @@ async fn handle_backfill_message(
|
||||
.await
|
||||
.as_ref()
|
||||
.map_or(OffsetDateTime::UNIX_EPOCH, |backfill| {
|
||||
backfill.time + ONE_MINUTE
|
||||
backfill.time + ONE_SECOND
|
||||
});
|
||||
|
||||
let fetch_to = last_minute();
|
||||
@@ -148,7 +133,7 @@ async fn handle_backfill_message(
|
||||
}
|
||||
}
|
||||
Action::Purge => {
|
||||
for symbol in &symbols {
|
||||
for symbol in &message.symbols {
|
||||
if let Some(job) = backfill_jobs.remove(symbol) {
|
||||
if !job.is_finished() {
|
||||
job.abort();
|
||||
@@ -158,8 +143,8 @@ async fn handle_backfill_message(
|
||||
}
|
||||
|
||||
join!(
|
||||
handler.delete_backfills(&symbols),
|
||||
handler.delete_data(&symbols)
|
||||
handler.delete_backfills(&message.symbols),
|
||||
handler.delete_data(&message.symbols)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -167,25 +152,6 @@ async fn handle_backfill_message(
|
||||
message.response.send(()).unwrap();
|
||||
}
|
||||
|
||||
pub fn create_backfill_handler(
|
||||
thread_type: ThreadType,
|
||||
app_config: Arc<Config>,
|
||||
) -> Box<dyn Handler> {
|
||||
match thread_type {
|
||||
ThreadType::Bars(Class::UsEquity) => Box::new(BarHandler {
|
||||
app_config,
|
||||
data_url: ALPACA_STOCK_DATA_URL,
|
||||
api_query_constructor: us_equity_query_constructor,
|
||||
}),
|
||||
ThreadType::Bars(Class::Crypto) => Box::new(BarHandler {
|
||||
app_config,
|
||||
data_url: ALPACA_CRYPTO_DATA_URL,
|
||||
api_query_constructor: crypto_query_constructor,
|
||||
}),
|
||||
ThreadType::News => Box::new(NewsHandler { app_config }),
|
||||
}
|
||||
}
|
||||
|
||||
struct BarHandler {
|
||||
app_config: Arc<Config>,
|
||||
data_url: &'static str,
|
||||
@@ -277,35 +243,19 @@ impl Handler for BarHandler {
|
||||
let mut next_page_token = None;
|
||||
|
||||
loop {
|
||||
let message = retry(ExponentialBackoff::default(), || async {
|
||||
self.app_config.alpaca_rate_limit.until_ready().await;
|
||||
self.app_config
|
||||
.alpaca_client
|
||||
.get(self.data_url)
|
||||
.query(&(self.api_query_constructor)(
|
||||
&self.app_config,
|
||||
symbol.clone(),
|
||||
fetch_from,
|
||||
fetch_to,
|
||||
next_page_token.clone(),
|
||||
))
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<api::incoming::bar::Message>()
|
||||
.await
|
||||
.map_err(backoff::Error::Permanent)
|
||||
})
|
||||
let message = alpaca::api::incoming::bar::get_historical(
|
||||
&self.app_config,
|
||||
self.data_url,
|
||||
&(self.api_query_constructor)(
|
||||
&self.app_config,
|
||||
symbol.clone(),
|
||||
fetch_from,
|
||||
fetch_to,
|
||||
next_page_token.clone(),
|
||||
),
|
||||
)
|
||||
.await;
|
||||
|
||||
let message = match message {
|
||||
Ok(message) => message,
|
||||
Err(e) => {
|
||||
error!("Failed to backfill bars for {}: {}.", symbol, e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
message.bars.into_iter().for_each(|(symbol, bar_vec)| {
|
||||
for bar in bar_vec {
|
||||
bars.push(Bar::from((bar, symbol.clone())));
|
||||
@@ -381,38 +331,21 @@ impl Handler for NewsHandler {
|
||||
let mut next_page_token = None;
|
||||
|
||||
loop {
|
||||
let message = retry(ExponentialBackoff::default(), || async {
|
||||
self.app_config.alpaca_rate_limit.until_ready().await;
|
||||
self.app_config
|
||||
.alpaca_client
|
||||
.get(ALPACA_NEWS_DATA_URL)
|
||||
.query(&api::outgoing::news::News {
|
||||
symbols: vec![remove_slash_from_pair(&symbol)],
|
||||
start: Some(fetch_from),
|
||||
end: Some(fetch_to),
|
||||
limit: Some(50),
|
||||
include_content: Some(true),
|
||||
exclude_contentless: Some(false),
|
||||
page_token: next_page_token.clone(),
|
||||
sort: Some(Sort::Asc),
|
||||
})
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<api::incoming::news::Message>()
|
||||
.await
|
||||
.map_err(backoff::Error::Permanent)
|
||||
})
|
||||
let message = alpaca::api::incoming::news::get_historical(
|
||||
&self.app_config,
|
||||
&api::outgoing::news::News {
|
||||
symbols: vec![remove_slash_from_pair(&symbol)],
|
||||
start: Some(fetch_from),
|
||||
end: Some(fetch_to),
|
||||
limit: Some(50),
|
||||
include_content: Some(true),
|
||||
exclude_contentless: Some(false),
|
||||
page_token: next_page_token.clone(),
|
||||
sort: Some(Sort::Asc),
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let message = match message {
|
||||
Ok(message) => message,
|
||||
Err(e) => {
|
||||
error!("Failed to backfill news for {}: {}.", symbol, e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
message.news.into_iter().for_each(|news_item| {
|
||||
news.push(News::from(news_item));
|
||||
});
|
||||
@@ -480,3 +413,19 @@ impl Handler for NewsHandler {
|
||||
"news"
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_handler(thread_type: ThreadType, app_config: Arc<Config>) -> Box<dyn Handler> {
|
||||
match thread_type {
|
||||
ThreadType::Bars(Class::UsEquity) => Box::new(BarHandler {
|
||||
app_config,
|
||||
data_url: ALPACA_STOCK_DATA_URL,
|
||||
api_query_constructor: us_equity_query_constructor,
|
||||
}),
|
||||
ThreadType::Bars(Class::Crypto) => Box::new(BarHandler {
|
||||
app_config,
|
||||
data_url: ALPACA_CRYPTO_DATA_URL,
|
||||
api_query_constructor: crypto_query_constructor,
|
||||
}),
|
||||
ThreadType::News => Box::new(NewsHandler { app_config }),
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user