Optimize backfill early saving allocations

Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
This commit is contained in:
2024-03-11 20:41:59 +00:00
parent 2d8972dce2
commit d02f958865
2 changed files with 12 additions and 36 deletions

View File

@@ -120,10 +120,7 @@ impl super::Handler for Handler {
info!("Backfilling bars for {:?}.", symbols);
let mut bars = Vec::with_capacity(database::bars::BATCH_FLUSH_SIZE);
let mut last_times = symbols
.iter()
.map(|symbol| (symbol.clone(), None))
.collect::<HashMap<_, _>>();
let mut last_times = HashMap::new();
let mut next_page_token = None;
loop {
@@ -150,7 +147,7 @@ impl super::Handler for Handler {
for (symbol, bar_vec) in message.bars {
if let Some(last) = bar_vec.last() {
last_times.insert(symbol.clone(), Some(last.time));
last_times.insert(symbol.clone(), last.time);
}
for bar in bar_vec {
@@ -168,13 +165,8 @@ impl super::Handler for Handler {
.unwrap();
let backfilled = last_times
.into_iter()
.filter_map(|(symbol, time)| {
if let Some(time) = time {
return Some(Backfill { symbol, time });
}
None
})
.drain()
.map(|(symbol, time)| Backfill { symbol, time })
.collect::<Vec<_>>();
database::backfills_bars::upsert_batch(
@@ -190,11 +182,7 @@ impl super::Handler for Handler {
}
next_page_token = message.next_page_token;
bars = Vec::with_capacity(database::bars::BATCH_FLUSH_SIZE);
last_times = symbols
.iter()
.map(|symbol| (symbol.clone(), None))
.collect::<HashMap<_, _>>();
bars.clear();
}
}

View File

@@ -69,23 +69,20 @@ impl super::Handler for Handler {
sleep(run_delay).await;
}
#[allow(clippy::too_many_lines)]
async fn backfill(&self, jobs: HashMap<String, Job>) {
if jobs.is_empty() {
return;
}
let symbols = jobs.keys().cloned().collect::<Vec<_>>();
let symbols_set = symbols.iter().collect::<std::collections::HashSet<_>>();
let fetch_from = jobs.values().map(|job| job.fetch_from).min().unwrap();
let fetch_to = jobs.values().map(|job| job.fetch_to).max().unwrap();
info!("Backfilling news for {:?}.", symbols);
let mut news = Vec::with_capacity(database::news::BATCH_FLUSH_SIZE);
let mut last_times = symbols
.iter()
.map(|symbol| (symbol.clone(), None))
.collect::<HashMap<_, _>>();
let mut last_times = HashMap::new();
let mut next_page_token = None;
loop {
@@ -115,8 +112,8 @@ impl super::Handler for Handler {
let news_item = News::from(news_item);
for symbol in &news_item.symbols {
if last_times.contains_key(symbol) {
last_times.insert(symbol.clone(), Some(news_item.time_created));
if symbols_set.contains(symbol) {
last_times.insert(symbol.clone(), news_item.time_created);
}
}
@@ -165,13 +162,8 @@ impl super::Handler for Handler {
.unwrap();
let backfilled = last_times
.into_iter()
.filter_map(|(symbol, time)| {
if let Some(time) = time {
return Some(Backfill { symbol, time });
}
None
})
.drain()
.map(|(symbol, time)| Backfill { symbol, time })
.collect::<Vec<_>>();
database::backfills_news::upsert_batch(
@@ -187,11 +179,7 @@ impl super::Handler for Handler {
}
next_page_token = message.next_page_token;
news = Vec::with_capacity(database::news::BATCH_FLUSH_SIZE);
last_times = symbols
.iter()
.map(|symbol| (symbol.clone(), None))
.collect::<HashMap<_, _>>();
news.clear();
}
}