Clean up error propagation
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
This commit is contained in:
@@ -9,7 +9,7 @@ use crate::{
|
||||
Source,
|
||||
},
|
||||
news::Prediction,
|
||||
Bar, Class, News,
|
||||
Backfill, Bar, Class, News,
|
||||
},
|
||||
utils::{
|
||||
duration_until, last_minute, remove_slash_from_pair, FIFTEEN_MINUTES, ONE_MINUTE,
|
||||
@@ -18,14 +18,15 @@ use crate::{
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use futures_util::future::join_all;
|
||||
use log::{info, warn};
|
||||
use log::{error, info, warn};
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::{
|
||||
join, spawn,
|
||||
spawn,
|
||||
sync::{mpsc, oneshot, Mutex},
|
||||
task::{block_in_place, JoinHandle},
|
||||
time::sleep,
|
||||
try_join,
|
||||
};
|
||||
|
||||
pub enum Action {
|
||||
@@ -64,9 +65,12 @@ impl Message {
|
||||
|
||||
#[async_trait]
|
||||
pub trait Handler: Send + Sync {
|
||||
async fn select_latest_backfill(&self, symbol: String) -> Option<crate::types::Backfill>;
|
||||
async fn delete_backfills(&self, symbol: &[String]);
|
||||
async fn delete_data(&self, symbol: &[String]);
|
||||
async fn select_latest_backfill(
|
||||
&self,
|
||||
symbol: String,
|
||||
) -> Result<Option<Backfill>, clickhouse::error::Error>;
|
||||
async fn delete_backfills(&self, symbol: &[String]) -> Result<(), clickhouse::error::Error>;
|
||||
async fn delete_data(&self, symbol: &[String]) -> Result<(), clickhouse::error::Error>;
|
||||
async fn queue_backfill(&self, symbol: &str, fetch_to: OffsetDateTime);
|
||||
async fn backfill(&self, symbol: String, fetch_from: OffsetDateTime, fetch_to: OffsetDateTime);
|
||||
fn log_string(&self) -> &'static str;
|
||||
@@ -111,13 +115,14 @@ async fn handle_backfill_message(
|
||||
backfill_jobs.insert(
|
||||
symbol.clone(),
|
||||
spawn(async move {
|
||||
let fetch_from = handler
|
||||
let fetch_from = match handler
|
||||
.select_latest_backfill(symbol.clone())
|
||||
.await
|
||||
.as_ref()
|
||||
.map_or(OffsetDateTime::UNIX_EPOCH, |backfill| {
|
||||
backfill.time + ONE_SECOND
|
||||
});
|
||||
.unwrap()
|
||||
{
|
||||
Some(latest_backfill) => latest_backfill.time + ONE_SECOND,
|
||||
None => OffsetDateTime::UNIX_EPOCH,
|
||||
};
|
||||
|
||||
let fetch_to = last_minute();
|
||||
|
||||
@@ -142,10 +147,11 @@ async fn handle_backfill_message(
|
||||
}
|
||||
}
|
||||
|
||||
join!(
|
||||
try_join!(
|
||||
handler.delete_backfills(&message.symbols),
|
||||
handler.delete_data(&message.symbols)
|
||||
);
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -153,10 +159,10 @@ async fn handle_backfill_message(
|
||||
}
|
||||
|
||||
struct BarHandler {
|
||||
app_config: Arc<Config>,
|
||||
config: Arc<Config>,
|
||||
data_url: &'static str,
|
||||
api_query_constructor: fn(
|
||||
app_config: &Arc<Config>,
|
||||
config: &Arc<Config>,
|
||||
symbol: String,
|
||||
fetch_from: OffsetDateTime,
|
||||
fetch_to: OffsetDateTime,
|
||||
@@ -165,7 +171,7 @@ struct BarHandler {
|
||||
}
|
||||
|
||||
fn us_equity_query_constructor(
|
||||
app_config: &Arc<Config>,
|
||||
config: &Arc<Config>,
|
||||
symbol: String,
|
||||
fetch_from: OffsetDateTime,
|
||||
fetch_to: OffsetDateTime,
|
||||
@@ -179,7 +185,7 @@ fn us_equity_query_constructor(
|
||||
limit: Some(10000),
|
||||
adjustment: None,
|
||||
asof: None,
|
||||
feed: Some(app_config.alpaca_source),
|
||||
feed: Some(config.alpaca_source),
|
||||
currency: None,
|
||||
page_token: next_page_token,
|
||||
sort: Some(Sort::Asc),
|
||||
@@ -206,30 +212,33 @@ fn crypto_query_constructor(
|
||||
|
||||
#[async_trait]
|
||||
impl Handler for BarHandler {
|
||||
async fn select_latest_backfill(&self, symbol: String) -> Option<crate::types::Backfill> {
|
||||
async fn select_latest_backfill(
|
||||
&self,
|
||||
symbol: String,
|
||||
) -> Result<Option<Backfill>, clickhouse::error::Error> {
|
||||
database::backfills::select_latest_where_symbol(
|
||||
&self.app_config.clickhouse_client,
|
||||
&self.config.clickhouse_client,
|
||||
&database::backfills::Table::Bars,
|
||||
&symbol,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn delete_backfills(&self, symbols: &[String]) {
|
||||
async fn delete_backfills(&self, symbols: &[String]) -> Result<(), clickhouse::error::Error> {
|
||||
database::backfills::delete_where_symbols(
|
||||
&self.app_config.clickhouse_client,
|
||||
&self.config.clickhouse_client,
|
||||
&database::backfills::Table::Bars,
|
||||
symbols,
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
}
|
||||
|
||||
async fn delete_data(&self, symbols: &[String]) {
|
||||
database::bars::delete_where_symbols(&self.app_config.clickhouse_client, symbols).await;
|
||||
async fn delete_data(&self, symbols: &[String]) -> Result<(), clickhouse::error::Error> {
|
||||
database::bars::delete_where_symbols(&self.config.clickhouse_client, symbols).await
|
||||
}
|
||||
|
||||
async fn queue_backfill(&self, symbol: &str, fetch_to: OffsetDateTime) {
|
||||
if self.app_config.alpaca_source == Source::Iex {
|
||||
if self.config.alpaca_source == Source::Iex {
|
||||
let run_delay = duration_until(fetch_to + FIFTEEN_MINUTES + ONE_MINUTE);
|
||||
info!("Queing bar backfill for {} in {:?}.", symbol, run_delay);
|
||||
sleep(run_delay).await;
|
||||
@@ -243,18 +252,23 @@ impl Handler for BarHandler {
|
||||
let mut next_page_token = None;
|
||||
|
||||
loop {
|
||||
let message = alpaca::api::incoming::bar::get_historical(
|
||||
&self.app_config,
|
||||
let Ok(message) = alpaca::api::incoming::bar::get_historical(
|
||||
&self.config,
|
||||
self.data_url,
|
||||
&(self.api_query_constructor)(
|
||||
&self.app_config,
|
||||
&self.config,
|
||||
symbol.clone(),
|
||||
fetch_from,
|
||||
fetch_to,
|
||||
next_page_token.clone(),
|
||||
),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
else {
|
||||
error!("Failed to backfill bars for {}.", symbol);
|
||||
return;
|
||||
};
|
||||
|
||||
message.bars.into_iter().for_each(|(symbol, bar_vec)| {
|
||||
for bar in bar_vec {
|
||||
@@ -274,13 +288,17 @@ impl Handler for BarHandler {
|
||||
}
|
||||
|
||||
let backfill = bars.last().unwrap().clone().into();
|
||||
database::bars::upsert_batch(&self.app_config.clickhouse_client, bars).await;
|
||||
|
||||
database::bars::upsert_batch(&self.config.clickhouse_client, bars)
|
||||
.await
|
||||
.unwrap();
|
||||
database::backfills::upsert(
|
||||
&self.app_config.clickhouse_client,
|
||||
&self.config.clickhouse_client,
|
||||
&database::backfills::Table::Bars,
|
||||
&backfill,
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
info!("Backfilled bars for {}.", symbol);
|
||||
}
|
||||
@@ -291,31 +309,34 @@ impl Handler for BarHandler {
|
||||
}
|
||||
|
||||
struct NewsHandler {
|
||||
app_config: Arc<Config>,
|
||||
config: Arc<Config>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Handler for NewsHandler {
|
||||
async fn select_latest_backfill(&self, symbol: String) -> Option<crate::types::Backfill> {
|
||||
async fn select_latest_backfill(
|
||||
&self,
|
||||
symbol: String,
|
||||
) -> Result<Option<Backfill>, clickhouse::error::Error> {
|
||||
database::backfills::select_latest_where_symbol(
|
||||
&self.app_config.clickhouse_client,
|
||||
&self.config.clickhouse_client,
|
||||
&database::backfills::Table::News,
|
||||
&symbol,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn delete_backfills(&self, symbols: &[String]) {
|
||||
async fn delete_backfills(&self, symbols: &[String]) -> Result<(), clickhouse::error::Error> {
|
||||
database::backfills::delete_where_symbols(
|
||||
&self.app_config.clickhouse_client,
|
||||
&self.config.clickhouse_client,
|
||||
&database::backfills::Table::News,
|
||||
symbols,
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
}
|
||||
|
||||
async fn delete_data(&self, symbols: &[String]) {
|
||||
database::news::delete_where_symbols(&self.app_config.clickhouse_client, symbols).await;
|
||||
async fn delete_data(&self, symbols: &[String]) -> Result<(), clickhouse::error::Error> {
|
||||
database::news::delete_where_symbols(&self.config.clickhouse_client, symbols).await
|
||||
}
|
||||
|
||||
async fn queue_backfill(&self, symbol: &str, fetch_to: OffsetDateTime) {
|
||||
@@ -331,8 +352,8 @@ impl Handler for NewsHandler {
|
||||
let mut next_page_token = None;
|
||||
|
||||
loop {
|
||||
let message = alpaca::api::incoming::news::get_historical(
|
||||
&self.app_config,
|
||||
let Ok(message) = alpaca::api::incoming::news::get_historical(
|
||||
&self.config,
|
||||
&api::outgoing::news::News {
|
||||
symbols: vec![remove_slash_from_pair(&symbol)],
|
||||
start: Some(fetch_from),
|
||||
@@ -343,8 +364,13 @@ impl Handler for NewsHandler {
|
||||
page_token: next_page_token.clone(),
|
||||
sort: Some(Sort::Asc),
|
||||
},
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
else {
|
||||
error!("Failed to backfill news for {}.", symbol);
|
||||
return;
|
||||
};
|
||||
|
||||
message.news.into_iter().for_each(|news_item| {
|
||||
news.push(News::from(news_item));
|
||||
@@ -366,23 +392,19 @@ impl Handler for NewsHandler {
|
||||
.map(|news| format!("{}\n\n{}", news.headline, news.content))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let predictions = join_all(
|
||||
inputs
|
||||
.chunks(self.app_config.max_bert_inputs)
|
||||
.map(|inputs| {
|
||||
let sequence_classifier = self.app_config.sequence_classifier.clone();
|
||||
async move {
|
||||
let sequence_classifier = sequence_classifier.lock().await;
|
||||
block_in_place(|| {
|
||||
sequence_classifier
|
||||
.predict(inputs.iter().map(String::as_str).collect::<Vec<_>>())
|
||||
.into_iter()
|
||||
.map(|label| Prediction::try_from(label).unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
}
|
||||
}),
|
||||
)
|
||||
let predictions = join_all(inputs.chunks(self.config.max_bert_inputs).map(|inputs| {
|
||||
let sequence_classifier = self.config.sequence_classifier.clone();
|
||||
async move {
|
||||
let sequence_classifier = sequence_classifier.lock().await;
|
||||
block_in_place(|| {
|
||||
sequence_classifier
|
||||
.predict(inputs.iter().map(String::as_str).collect::<Vec<_>>())
|
||||
.into_iter()
|
||||
.map(|label| Prediction::try_from(label).unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
}
|
||||
}))
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten();
|
||||
@@ -398,13 +420,17 @@ impl Handler for NewsHandler {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let backfill = (news.last().unwrap().clone(), symbol.clone()).into();
|
||||
database::news::upsert_batch(&self.app_config.clickhouse_client, news).await;
|
||||
|
||||
database::news::upsert_batch(&self.config.clickhouse_client, news)
|
||||
.await
|
||||
.unwrap();
|
||||
database::backfills::upsert(
|
||||
&self.app_config.clickhouse_client,
|
||||
&self.config.clickhouse_client,
|
||||
&database::backfills::Table::News,
|
||||
&backfill,
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
info!("Backfilled news for {}.", symbol);
|
||||
}
|
||||
@@ -414,18 +440,18 @@ impl Handler for NewsHandler {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_handler(thread_type: ThreadType, app_config: Arc<Config>) -> Box<dyn Handler> {
|
||||
pub fn create_handler(thread_type: ThreadType, config: Arc<Config>) -> Box<dyn Handler> {
|
||||
match thread_type {
|
||||
ThreadType::Bars(Class::UsEquity) => Box::new(BarHandler {
|
||||
app_config,
|
||||
config,
|
||||
data_url: ALPACA_STOCK_DATA_URL,
|
||||
api_query_constructor: us_equity_query_constructor,
|
||||
}),
|
||||
ThreadType::Bars(Class::Crypto) => Box::new(BarHandler {
|
||||
app_config,
|
||||
config,
|
||||
data_url: ALPACA_CRYPTO_DATA_URL,
|
||||
api_query_constructor: crypto_query_constructor,
|
||||
}),
|
||||
ThreadType::News => Box::new(NewsHandler { app_config }),
|
||||
ThreadType::News => Box::new(NewsHandler { config }),
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user