Refactor threads to use trait implementations
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
This commit is contained in:
@@ -12,6 +12,7 @@ use crate::{
|
||||
},
|
||||
utils::{duration_until, last_minute, remove_slash_from_pair, FIFTEEN_MINUTES, ONE_MINUTE},
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use backoff::{future::retry, ExponentialBackoff};
|
||||
use futures_util::future::join_all;
|
||||
use log::{error, info, warn};
|
||||
@@ -49,28 +50,29 @@ impl Message {
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait Handler: Send + Sync {
|
||||
async fn select_latest_backfill(&self, symbol: String) -> Option<crate::types::Backfill>;
|
||||
async fn delete_backfills(&self, symbol: &[String]);
|
||||
async fn delete_data(&self, symbol: &[String]);
|
||||
async fn queue_backfill(&self, symbol: &str, fetch_to: OffsetDateTime);
|
||||
async fn backfill(&self, symbol: String, fetch_from: OffsetDateTime, fetch_to: OffsetDateTime);
|
||||
fn log_string(&self) -> &'static str;
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
app_config: Arc<Config>,
|
||||
thread_type: ThreadType,
|
||||
handler: Arc<Box<dyn Handler>>,
|
||||
guard: Arc<RwLock<Guard>>,
|
||||
mut backfill_receiver: mpsc::Receiver<Message>,
|
||||
mut receiver: mpsc::Receiver<Message>,
|
||||
) {
|
||||
let backfill_jobs = Arc::new(Mutex::new(HashMap::new()));
|
||||
|
||||
let data_url = match thread_type {
|
||||
ThreadType::Bars(Class::UsEquity) => ALPACA_STOCK_DATA_URL.to_string(),
|
||||
ThreadType::Bars(Class::Crypto) => ALPACA_CRYPTO_DATA_URL.to_string(),
|
||||
ThreadType::News => ALPACA_NEWS_DATA_URL.to_string(),
|
||||
};
|
||||
|
||||
loop {
|
||||
let message = backfill_receiver.recv().await.unwrap();
|
||||
let message = receiver.recv().await.unwrap();
|
||||
|
||||
spawn(handle_backfill_message(
|
||||
app_config.clone(),
|
||||
thread_type,
|
||||
handler.clone(),
|
||||
guard.clone(),
|
||||
data_url.clone(),
|
||||
backfill_jobs.clone(),
|
||||
message,
|
||||
));
|
||||
@@ -80,10 +82,8 @@ pub async fn run(
|
||||
#[allow(clippy::significant_drop_tightening)]
|
||||
#[allow(clippy::too_many_lines)]
|
||||
async fn handle_backfill_message(
|
||||
app_config: Arc<Config>,
|
||||
thread_type: ThreadType,
|
||||
handler: Arc<Box<dyn Handler>>,
|
||||
guard: Arc<RwLock<Guard>>,
|
||||
data_url: String,
|
||||
backfill_jobs: Arc<Mutex<HashMap<String, JoinHandle<()>>>>,
|
||||
message: Message,
|
||||
) {
|
||||
@@ -109,50 +109,40 @@ async fn handle_backfill_message(
|
||||
|
||||
match message.action {
|
||||
Action::Backfill => {
|
||||
let log_string = handler.log_string();
|
||||
|
||||
for symbol in symbols {
|
||||
if let Some(job) = backfill_jobs.get(&symbol) {
|
||||
if !job.is_finished() {
|
||||
warn!(
|
||||
"{:?} - Backfill for {} is already running, skipping.",
|
||||
thread_type, symbol
|
||||
"Backfill for {} {} is already running, skipping.",
|
||||
symbol, log_string
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let app_config = app_config.clone();
|
||||
let data_url = data_url.clone();
|
||||
|
||||
let handler = handler.clone();
|
||||
backfill_jobs.insert(
|
||||
symbol.clone(),
|
||||
spawn(async move {
|
||||
let (fetch_from, fetch_to) =
|
||||
queue_backfill(&app_config, thread_type, &symbol).await;
|
||||
let fetch_from = handler
|
||||
.select_latest_backfill(symbol.clone())
|
||||
.await
|
||||
.as_ref()
|
||||
.map_or(OffsetDateTime::UNIX_EPOCH, |backfill| {
|
||||
backfill.time + ONE_MINUTE
|
||||
});
|
||||
|
||||
match thread_type {
|
||||
ThreadType::Bars(_) => {
|
||||
execute_backfill_bars(
|
||||
app_config,
|
||||
thread_type,
|
||||
data_url,
|
||||
symbol,
|
||||
fetch_from,
|
||||
fetch_to,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
ThreadType::News => {
|
||||
execute_backfill_news(
|
||||
app_config,
|
||||
thread_type,
|
||||
data_url,
|
||||
symbol,
|
||||
fetch_from,
|
||||
fetch_to,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
let fetch_to = last_minute();
|
||||
|
||||
if fetch_from > fetch_to {
|
||||
info!("No need to backfill {} {}.", symbol, log_string,);
|
||||
return;
|
||||
}
|
||||
|
||||
handler.queue_backfill(&symbol, fetch_to).await;
|
||||
handler.backfill(symbol, fetch_from, fetch_to).await;
|
||||
}),
|
||||
);
|
||||
}
|
||||
@@ -167,263 +157,326 @@ async fn handle_backfill_message(
|
||||
}
|
||||
}
|
||||
|
||||
let backfills_future = database::backfills::delete_where_symbols(
|
||||
&app_config.clickhouse_client,
|
||||
&thread_type,
|
||||
&symbols,
|
||||
join!(
|
||||
handler.delete_backfills(&symbols),
|
||||
handler.delete_data(&symbols)
|
||||
);
|
||||
|
||||
let data_future = async {
|
||||
match thread_type {
|
||||
ThreadType::Bars(_) => {
|
||||
database::bars::delete_where_symbols(
|
||||
&app_config.clickhouse_client,
|
||||
&symbols,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
ThreadType::News => {
|
||||
database::news::delete_where_symbols(
|
||||
&app_config.clickhouse_client,
|
||||
&symbols,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
join!(backfills_future, data_future);
|
||||
}
|
||||
}
|
||||
|
||||
message.response.send(()).unwrap();
|
||||
}
|
||||
|
||||
async fn queue_backfill(
|
||||
app_config: &Arc<Config>,
|
||||
pub fn create_backfill_handler(
|
||||
thread_type: ThreadType,
|
||||
symbol: &String,
|
||||
) -> (OffsetDateTime, OffsetDateTime) {
|
||||
let latest_backfill = database::backfills::select_latest_where_symbol(
|
||||
&app_config.clickhouse_client,
|
||||
&thread_type,
|
||||
&symbol,
|
||||
)
|
||||
.await;
|
||||
app_config: Arc<Config>,
|
||||
) -> Box<dyn Handler> {
|
||||
match thread_type {
|
||||
ThreadType::Bars(Class::UsEquity) => Box::new(BarHandler {
|
||||
app_config,
|
||||
data_url: ALPACA_STOCK_DATA_URL,
|
||||
api_query_constructor: us_equity_query_constructor,
|
||||
}),
|
||||
ThreadType::Bars(Class::Crypto) => Box::new(BarHandler {
|
||||
app_config,
|
||||
data_url: ALPACA_CRYPTO_DATA_URL,
|
||||
api_query_constructor: crypto_query_constructor,
|
||||
}),
|
||||
ThreadType::News => Box::new(NewsHandler { app_config }),
|
||||
}
|
||||
}
|
||||
|
||||
let fetch_from = latest_backfill
|
||||
.as_ref()
|
||||
.map_or(OffsetDateTime::UNIX_EPOCH, |backfill| {
|
||||
backfill.time + ONE_MINUTE
|
||||
});
|
||||
struct BarHandler {
|
||||
app_config: Arc<Config>,
|
||||
data_url: &'static str,
|
||||
api_query_constructor: fn(
|
||||
app_config: &Arc<Config>,
|
||||
symbol: String,
|
||||
fetch_from: OffsetDateTime,
|
||||
fetch_to: OffsetDateTime,
|
||||
next_page_token: Option<String>,
|
||||
) -> api::outgoing::bar::Bar,
|
||||
}
|
||||
|
||||
let fetch_to = last_minute();
|
||||
fn us_equity_query_constructor(
|
||||
app_config: &Arc<Config>,
|
||||
symbol: String,
|
||||
fetch_from: OffsetDateTime,
|
||||
fetch_to: OffsetDateTime,
|
||||
next_page_token: Option<String>,
|
||||
) -> api::outgoing::bar::Bar {
|
||||
api::outgoing::bar::Bar::UsEquity {
|
||||
symbols: vec![symbol],
|
||||
timeframe: ONE_MINUTE,
|
||||
start: Some(fetch_from),
|
||||
end: Some(fetch_to),
|
||||
limit: Some(10000),
|
||||
adjustment: None,
|
||||
asof: None,
|
||||
feed: Some(app_config.alpaca_source),
|
||||
currency: None,
|
||||
page_token: next_page_token,
|
||||
sort: Some(Sort::Asc),
|
||||
}
|
||||
}
|
||||
|
||||
if app_config.alpaca_source == Source::Iex {
|
||||
fn crypto_query_constructor(
|
||||
_: &Arc<Config>,
|
||||
symbol: String,
|
||||
fetch_from: OffsetDateTime,
|
||||
fetch_to: OffsetDateTime,
|
||||
next_page_token: Option<String>,
|
||||
) -> api::outgoing::bar::Bar {
|
||||
api::outgoing::bar::Bar::Crypto {
|
||||
symbols: vec![symbol],
|
||||
timeframe: ONE_MINUTE,
|
||||
start: Some(fetch_from),
|
||||
end: Some(fetch_to),
|
||||
limit: Some(10000),
|
||||
page_token: next_page_token,
|
||||
sort: Some(Sort::Asc),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Handler for BarHandler {
|
||||
async fn select_latest_backfill(&self, symbol: String) -> Option<crate::types::Backfill> {
|
||||
database::backfills::select_latest_where_symbol(
|
||||
&self.app_config.clickhouse_client,
|
||||
&database::backfills::Table::Bars,
|
||||
&symbol,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn delete_backfills(&self, symbols: &[String]) {
|
||||
database::backfills::delete_where_symbols(
|
||||
&self.app_config.clickhouse_client,
|
||||
&database::backfills::Table::Bars,
|
||||
symbols,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn delete_data(&self, symbols: &[String]) {
|
||||
database::bars::delete_where_symbols(&self.app_config.clickhouse_client, symbols).await;
|
||||
}
|
||||
|
||||
async fn queue_backfill(&self, symbol: &str, fetch_to: OffsetDateTime) {
|
||||
if self.app_config.alpaca_source == Source::Iex {
|
||||
let run_delay = duration_until(fetch_to + FIFTEEN_MINUTES + ONE_MINUTE);
|
||||
info!("Queing bar backfill for {} in {:?}.", symbol, run_delay);
|
||||
sleep(run_delay).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn backfill(&self, symbol: String, fetch_from: OffsetDateTime, fetch_to: OffsetDateTime) {
|
||||
info!("Backfilling bars for {}.", symbol);
|
||||
|
||||
let mut bars = Vec::new();
|
||||
let mut next_page_token = None;
|
||||
|
||||
loop {
|
||||
let message = retry(ExponentialBackoff::default(), || async {
|
||||
self.app_config.alpaca_rate_limit.until_ready().await;
|
||||
self.app_config
|
||||
.alpaca_client
|
||||
.get(self.data_url)
|
||||
.query(&(self.api_query_constructor)(
|
||||
&self.app_config,
|
||||
symbol.clone(),
|
||||
fetch_from,
|
||||
fetch_to,
|
||||
next_page_token.clone(),
|
||||
))
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<api::incoming::bar::Message>()
|
||||
.await
|
||||
.map_err(backoff::Error::Permanent)
|
||||
})
|
||||
.await;
|
||||
|
||||
let message = match message {
|
||||
Ok(message) => message,
|
||||
Err(e) => {
|
||||
error!("Failed to backfill bars for {}: {}.", symbol, e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
message.bars.into_iter().for_each(|(symbol, bar_vec)| {
|
||||
for bar in bar_vec {
|
||||
bars.push(Bar::from((bar, symbol.clone())));
|
||||
}
|
||||
});
|
||||
|
||||
if message.next_page_token.is_none() {
|
||||
break;
|
||||
}
|
||||
next_page_token = message.next_page_token;
|
||||
}
|
||||
|
||||
if bars.is_empty() {
|
||||
info!("No bars to backfill for {}.", symbol);
|
||||
return;
|
||||
}
|
||||
|
||||
let backfill = bars.last().unwrap().clone().into();
|
||||
database::bars::upsert_batch(&self.app_config.clickhouse_client, bars).await;
|
||||
database::backfills::upsert(
|
||||
&self.app_config.clickhouse_client,
|
||||
&database::backfills::Table::Bars,
|
||||
&backfill,
|
||||
)
|
||||
.await;
|
||||
|
||||
info!("Backfilled bars for {}.", symbol);
|
||||
}
|
||||
|
||||
fn log_string(&self) -> &'static str {
|
||||
"bars"
|
||||
}
|
||||
}
|
||||
|
||||
struct NewsHandler {
|
||||
app_config: Arc<Config>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Handler for NewsHandler {
|
||||
async fn select_latest_backfill(&self, symbol: String) -> Option<crate::types::Backfill> {
|
||||
database::backfills::select_latest_where_symbol(
|
||||
&self.app_config.clickhouse_client,
|
||||
&database::backfills::Table::News,
|
||||
&symbol,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn delete_backfills(&self, symbols: &[String]) {
|
||||
database::backfills::delete_where_symbols(
|
||||
&self.app_config.clickhouse_client,
|
||||
&database::backfills::Table::News,
|
||||
symbols,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn delete_data(&self, symbols: &[String]) {
|
||||
database::news::delete_where_symbols(&self.app_config.clickhouse_client, symbols).await;
|
||||
}
|
||||
|
||||
async fn queue_backfill(&self, symbol: &str, fetch_to: OffsetDateTime) {
|
||||
let run_delay = duration_until(fetch_to + FIFTEEN_MINUTES + ONE_MINUTE);
|
||||
info!(
|
||||
"{:?} - Queing backfill for {} in {:?}.",
|
||||
thread_type, symbol, run_delay
|
||||
);
|
||||
info!("Queing news backfill for {} in {:?}.", symbol, run_delay);
|
||||
sleep(run_delay).await;
|
||||
}
|
||||
|
||||
(fetch_from, fetch_to)
|
||||
}
|
||||
async fn backfill(&self, symbol: String, fetch_from: OffsetDateTime, fetch_to: OffsetDateTime) {
|
||||
info!("Backfilling news for {}.", symbol);
|
||||
|
||||
async fn execute_backfill_bars(
|
||||
app_config: Arc<Config>,
|
||||
thread_type: ThreadType,
|
||||
data_url: String,
|
||||
symbol: String,
|
||||
fetch_from: OffsetDateTime,
|
||||
fetch_to: OffsetDateTime,
|
||||
) {
|
||||
if fetch_from > fetch_to {
|
||||
return;
|
||||
}
|
||||
let mut news = Vec::new();
|
||||
let mut next_page_token = None;
|
||||
|
||||
info!("{:?} - Backfilling data for {}.", thread_type, symbol);
|
||||
|
||||
let mut bars = Vec::new();
|
||||
let mut next_page_token = None;
|
||||
|
||||
loop {
|
||||
let message = retry(ExponentialBackoff::default(), || async {
|
||||
app_config.alpaca_rate_limit.until_ready().await;
|
||||
app_config
|
||||
.alpaca_client
|
||||
.get(&data_url)
|
||||
.query(&match thread_type {
|
||||
ThreadType::Bars(Class::UsEquity) => api::outgoing::bar::Bar::UsEquity {
|
||||
symbols: vec![symbol.clone()],
|
||||
timeframe: ONE_MINUTE,
|
||||
loop {
|
||||
let message = retry(ExponentialBackoff::default(), || async {
|
||||
self.app_config.alpaca_rate_limit.until_ready().await;
|
||||
self.app_config
|
||||
.alpaca_client
|
||||
.get(ALPACA_NEWS_DATA_URL)
|
||||
.query(&api::outgoing::news::News {
|
||||
symbols: vec![remove_slash_from_pair(&symbol)],
|
||||
start: Some(fetch_from),
|
||||
end: Some(fetch_to),
|
||||
limit: Some(10000),
|
||||
adjustment: None,
|
||||
asof: None,
|
||||
feed: Some(app_config.alpaca_source),
|
||||
currency: None,
|
||||
limit: Some(50),
|
||||
include_content: Some(true),
|
||||
exclude_contentless: Some(false),
|
||||
page_token: next_page_token.clone(),
|
||||
sort: Some(Sort::Asc),
|
||||
},
|
||||
ThreadType::Bars(Class::Crypto) => api::outgoing::bar::Bar::Crypto {
|
||||
symbols: vec![symbol.clone()],
|
||||
timeframe: ONE_MINUTE,
|
||||
start: Some(fetch_from),
|
||||
end: Some(fetch_to),
|
||||
limit: Some(10000),
|
||||
page_token: next_page_token.clone(),
|
||||
sort: Some(Sort::Asc),
|
||||
},
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<api::incoming::bar::Message>()
|
||||
.await
|
||||
.map_err(backoff::Error::Permanent)
|
||||
})
|
||||
.await;
|
||||
|
||||
let message = match message {
|
||||
Ok(message) => message,
|
||||
Err(e) => {
|
||||
error!(
|
||||
"{:?} - Failed to backfill data for {}: {}.",
|
||||
thread_type, symbol, e
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
message.bars.into_iter().for_each(|(symbol, bar_vec)| {
|
||||
for bar in bar_vec {
|
||||
bars.push(Bar::from((bar, symbol.clone())));
|
||||
}
|
||||
});
|
||||
|
||||
if message.next_page_token.is_none() {
|
||||
break;
|
||||
}
|
||||
next_page_token = message.next_page_token;
|
||||
}
|
||||
|
||||
if bars.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let backfill = bars.last().unwrap().clone().into();
|
||||
database::bars::upsert_batch(&app_config.clickhouse_client, bars).await;
|
||||
database::backfills::upsert(&app_config.clickhouse_client, &thread_type, &backfill).await;
|
||||
|
||||
info!("{:?} - Backfilled data for {}.", thread_type, symbol);
|
||||
}
|
||||
|
||||
async fn execute_backfill_news(
|
||||
app_config: Arc<Config>,
|
||||
thread_type: ThreadType,
|
||||
data_url: String,
|
||||
symbol: String,
|
||||
fetch_from: OffsetDateTime,
|
||||
fetch_to: OffsetDateTime,
|
||||
) {
|
||||
if fetch_from > fetch_to {
|
||||
return;
|
||||
}
|
||||
|
||||
info!("{:?} - Backfilling data for {}.", thread_type, symbol);
|
||||
|
||||
let mut news = Vec::new();
|
||||
let mut next_page_token = None;
|
||||
|
||||
loop {
|
||||
let message = retry(ExponentialBackoff::default(), || async {
|
||||
app_config.alpaca_rate_limit.until_ready().await;
|
||||
app_config
|
||||
.alpaca_client
|
||||
.get(&data_url)
|
||||
.query(&api::outgoing::news::News {
|
||||
symbols: vec![remove_slash_from_pair(&symbol)],
|
||||
start: Some(fetch_from),
|
||||
end: Some(fetch_to),
|
||||
limit: Some(50),
|
||||
include_content: Some(true),
|
||||
exclude_contentless: Some(false),
|
||||
page_token: next_page_token.clone(),
|
||||
sort: Some(Sort::Asc),
|
||||
})
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<api::incoming::news::Message>()
|
||||
.await
|
||||
.map_err(backoff::Error::Permanent)
|
||||
})
|
||||
.await;
|
||||
|
||||
let message = match message {
|
||||
Ok(message) => message,
|
||||
Err(e) => {
|
||||
error!(
|
||||
"{:?} - Failed to backfill data for {}: {}.",
|
||||
thread_type, symbol, e
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
message.news.into_iter().for_each(|news_item| {
|
||||
news.push(News::from(news_item));
|
||||
});
|
||||
|
||||
if message.next_page_token.is_none() {
|
||||
break;
|
||||
}
|
||||
next_page_token = message.next_page_token;
|
||||
}
|
||||
|
||||
if news.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let inputs = news
|
||||
.iter()
|
||||
.map(|news| format!("{}\n\n{}", news.headline, news.content))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let predictions = join_all(inputs.chunks(app_config.max_bert_inputs).map(|inputs| {
|
||||
let sequence_classifier = app_config.sequence_classifier.clone();
|
||||
async move {
|
||||
let sequence_classifier = sequence_classifier.lock().await;
|
||||
block_in_place(|| {
|
||||
sequence_classifier
|
||||
.predict(inputs.iter().map(String::as_str).collect::<Vec<_>>())
|
||||
.into_iter()
|
||||
.map(|label| Prediction::try_from(label).unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<api::incoming::news::Message>()
|
||||
.await
|
||||
.map_err(backoff::Error::Permanent)
|
||||
})
|
||||
.await;
|
||||
|
||||
let message = match message {
|
||||
Ok(message) => message,
|
||||
Err(e) => {
|
||||
error!("Failed to backfill news for {}: {}.", symbol, e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
message.news.into_iter().for_each(|news_item| {
|
||||
news.push(News::from(news_item));
|
||||
});
|
||||
|
||||
if message.next_page_token.is_none() {
|
||||
break;
|
||||
}
|
||||
next_page_token = message.next_page_token;
|
||||
}
|
||||
}))
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten();
|
||||
|
||||
let news = news
|
||||
if news.is_empty() {
|
||||
info!("No news to backfill for {}.", symbol);
|
||||
return;
|
||||
}
|
||||
|
||||
let inputs = news
|
||||
.iter()
|
||||
.map(|news| format!("{}\n\n{}", news.headline, news.content))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let predictions = join_all(
|
||||
inputs
|
||||
.chunks(self.app_config.max_bert_inputs)
|
||||
.map(|inputs| {
|
||||
let sequence_classifier = self.app_config.sequence_classifier.clone();
|
||||
async move {
|
||||
let sequence_classifier = sequence_classifier.lock().await;
|
||||
block_in_place(|| {
|
||||
sequence_classifier
|
||||
.predict(inputs.iter().map(String::as_str).collect::<Vec<_>>())
|
||||
.into_iter()
|
||||
.map(|label| Prediction::try_from(label).unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.into_iter()
|
||||
.zip(predictions)
|
||||
.map(|(news, prediction)| News {
|
||||
sentiment: prediction.sentiment,
|
||||
confidence: prediction.confidence,
|
||||
..news
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
.flatten();
|
||||
|
||||
let backfill = (news.last().unwrap().clone(), symbol.clone()).into();
|
||||
database::news::upsert_batch(&app_config.clickhouse_client, news).await;
|
||||
database::backfills::upsert(&app_config.clickhouse_client, &thread_type, &backfill).await;
|
||||
let news = news
|
||||
.into_iter()
|
||||
.zip(predictions)
|
||||
.map(|(news, prediction)| News {
|
||||
sentiment: prediction.sentiment,
|
||||
confidence: prediction.confidence,
|
||||
..news
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
info!("{:?} - Backfilled data for {}.", thread_type, symbol);
|
||||
let backfill = (news.last().unwrap().clone(), symbol.clone()).into();
|
||||
database::news::upsert_batch(&self.app_config.clickhouse_client, news).await;
|
||||
database::backfills::upsert(
|
||||
&self.app_config.clickhouse_client,
|
||||
&database::backfills::Table::News,
|
||||
&backfill,
|
||||
)
|
||||
.await;
|
||||
|
||||
info!("Backfilled news for {}.", symbol);
|
||||
}
|
||||
|
||||
fn log_string(&self) -> &'static str {
|
||||
"news"
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user