Merge live & historical handlers

Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
This commit is contained in:
2023-09-10 16:56:27 +03:00
parent 8a88d58192
commit 687fbb909f
20 changed files with 231 additions and 217 deletions

View File

@@ -1,147 +0,0 @@
use crate::{
config::{Config, ALPACA_CRYPTO_DATA_URL, ALPACA_STOCK_DATA_URL, ALPACA_TIMESTAMP_FORMAT},
database,
time::{next_minute, ONE_MINUTE},
types::{api::incoming, Asset, Bar, Class},
};
use http::StatusCode;
use indexmap::IndexMap;
use log::{error, info};
use std::{collections::HashMap, sync::Arc};
use time::OffsetDateTime;
use tokio::{sync::RwLock, task::spawn_blocking, time::sleep};
pub async fn backfill(
app_config: Arc<Config>,
asset: Asset,
backfilled: Arc<RwLock<HashMap<String, bool>>>,
) {
info!("Backfilling historical data for {}...", asset.symbol);
let task_run_offsetdatetime = next_minute() + app_config.alpaca_historical_offset;
let fetch_from = asset.timestamp_last + ONE_MINUTE;
let fetch_until = task_run_offsetdatetime - app_config.alpaca_historical_offset - ONE_MINUTE;
if fetch_from > fetch_until {
return;
}
let mut current_time = fetch_from;
let asset_clone = asset.clone();
let mut bars = spawn_blocking(move || {
let mut bars = IndexMap::new();
while current_time <= fetch_until {
bars.insert(
current_time,
Bar::empty(current_time, asset_clone.symbol.clone()),
);
current_time += ONE_MINUTE;
}
bars
})
.await
.unwrap();
let wait_duration = task_run_offsetdatetime - OffsetDateTime::now_utc();
if wait_duration.is_positive() {
sleep(wait_duration.unsigned_abs()).await;
}
let mut next_page_token = None;
loop {
let request = app_config
.alpaca_client
.get(match asset.class {
Class::UsEquity => ALPACA_STOCK_DATA_URL,
Class::Crypto => ALPACA_CRYPTO_DATA_URL,
})
.query(&[
("symbols", &asset.symbol),
("timeframe", &String::from("1Min")),
(
"start",
&fetch_from
.format(ALPACA_TIMESTAMP_FORMAT)
.unwrap()
.to_string(),
),
(
"end",
&fetch_until
.format(ALPACA_TIMESTAMP_FORMAT)
.unwrap()
.to_string(),
),
("limit", &String::from("10000")),
("page_token", &next_page_token.clone().unwrap_or_default()),
]);
app_config.alpaca_rate_limit.until_ready().await;
let response = request.send().await.unwrap();
let mut response = if response.status() == StatusCode::OK {
response.json::<incoming::bar::Message>().await.unwrap()
} else {
error!(
"Failed to backfill historical data for {} from {} to {}: {}",
asset.symbol,
fetch_from,
fetch_until,
response.text().await.unwrap()
);
break;
};
for bar in response
.bars
.remove(&asset.symbol)
.unwrap_or_default()
.unwrap_or_default()
{
bars.insert(bar.timestamp, Bar::from((bar, asset.symbol.clone())));
}
if response.next_page_token.is_none() {
break;
}
next_page_token = response.next_page_token;
}
let bars = bars.into_values().collect::<Vec<Bar>>();
let transaction = app_config.postgres_pool.begin().await.unwrap();
database::bars::upsert_batch(&app_config.postgres_pool, &bars, true).await;
database::assets::update_timestamp_last_where_symbol(
&app_config.postgres_pool,
&asset.symbol,
&fetch_until,
)
.await;
backfill_recent_nulls(&app_config, &asset, &fetch_until, &backfilled).await;
transaction.commit().await.unwrap();
info!("Backfilled historical data for {}.", asset.symbol);
}
#[allow(clippy::significant_drop_tightening)]
async fn backfill_recent_nulls(
app_config: &Arc<Config>,
asset: &Asset,
from: &OffsetDateTime,
backfilled: &Arc<RwLock<HashMap<String, bool>>>,
) {
let mut backfilled = backfilled.write().await;
let bars = database::bars::select_where_symbol_where_timestamp_larger_than(
&app_config.postgres_pool,
&asset.symbol,
from,
)
.await;
database::bars::upsert_batch(&app_config.postgres_pool, &bars, true).await;
database::assets::update_timestamp_last_where_symbol(
&app_config.postgres_pool,
&asset.symbol,
&bars.last().unwrap().timestamp,
)
.await;
backfilled.insert(asset.symbol.clone(), true);
}

View File

@@ -1,12 +1,14 @@
use crate::{ use crate::{
config::{Config, ALPACA_CRYPTO_WEBSOCKET_URL, ALPACA_STOCK_WEBSOCKET_URL}, config::{
data::historical::backfill, Config, ALPACA_CRYPTO_DATA_URL, ALPACA_CRYPTO_WEBSOCKET_URL, ALPACA_STOCK_DATA_URL,
ALPACA_STOCK_WEBSOCKET_URL, ALPACA_TIMESTAMP_FORMAT,
},
database, database,
time::{duration_until, last_minute, next_30s, ONE_MINUTE, THIRTY_SECONDS}, time::{duration_until, last_minute, next_30s, next_minute, ONE_MINUTE, THIRTY_SECONDS},
types::{ types::{
asset, api,
websocket::{incoming, outgoing}, asset::{self, Asset},
Bar, BroadcastMessage, Class, websocket, Bar, BroadcastMessage, Class,
}, },
}; };
use core::panic; use core::panic;
@@ -14,6 +16,8 @@ use futures_util::{
stream::{SplitSink, SplitStream}, stream::{SplitSink, SplitStream},
SinkExt, StreamExt, SinkExt, StreamExt,
}; };
use http::StatusCode;
use indexmap::IndexMap;
use log::{error, info, warn}; use log::{error, info, warn};
use serde_json::{from_str, to_string}; use serde_json::{from_str, to_string};
use std::{ use std::{
@@ -21,6 +25,7 @@ use std::{
sync::Arc, sync::Arc,
time::Instant, time::Instant,
}; };
use time::OffsetDateTime;
use tokio::{ use tokio::{
net::TcpStream, net::TcpStream,
spawn, spawn,
@@ -28,7 +33,8 @@ use tokio::{
broadcast::{Receiver, Sender}, broadcast::{Receiver, Sender},
RwLock, RwLock,
}, },
time::interval_at, task::spawn_blocking,
time::{interval_at, sleep},
}; };
use tokio_tungstenite::{connect_async, tungstenite::Message, MaybeTlsStream, WebSocketStream}; use tokio_tungstenite::{connect_async, tungstenite::Message, MaybeTlsStream, WebSocketStream};
@@ -84,18 +90,24 @@ async fn authenticate_websocket(
) { ) {
match stream.next().await { match stream.next().await {
Some(Ok(Message::Text(data))) Some(Ok(Message::Text(data)))
if from_str::<Vec<incoming::Message>>(&data).unwrap().get(0) if from_str::<Vec<websocket::incoming::Message>>(&data)
== Some(&incoming::Message::Success(incoming::success::Message { .unwrap()
msg: incoming::success::MessageType::Connected, .get(0)
})) => {} == Some(&websocket::incoming::Message::Success(
websocket::incoming::success::Message {
msg: websocket::incoming::success::MessageType::Connected,
},
)) => {}
_ => panic!(), _ => panic!(),
} }
sink.send(Message::Text( sink.send(Message::Text(
to_string(&outgoing::Message::Auth(outgoing::auth::Message::new( to_string(&websocket::outgoing::Message::Auth(
app_config.alpaca_api_key.clone(), websocket::outgoing::auth::Message::new(
app_config.alpaca_api_secret.clone(), app_config.alpaca_api_key.clone(),
))) app_config.alpaca_api_secret.clone(),
),
))
.unwrap(), .unwrap(),
)) ))
.await .await
@@ -103,10 +115,14 @@ async fn authenticate_websocket(
match stream.next().await { match stream.next().await {
Some(Ok(Message::Text(data))) Some(Ok(Message::Text(data)))
if from_str::<Vec<incoming::Message>>(&data).unwrap().get(0) if from_str::<Vec<websocket::incoming::Message>>(&data)
== Some(&incoming::Message::Success(incoming::success::Message { .unwrap()
msg: incoming::success::MessageType::Authenticated, .get(0)
})) => {} == Some(&websocket::incoming::Message::Success(
websocket::incoming::success::Message {
msg: websocket::incoming::success::MessageType::Authenticated,
},
)) => {}
_ => panic!(), _ => panic!(),
}; };
} }
@@ -124,8 +140,8 @@ async fn websocket_broadcast_handler(
sink.write() sink.write()
.await .await
.send(Message::Text( .send(Message::Text(
serde_json::to_string(&outgoing::Message::Subscribe( serde_json::to_string(&websocket::outgoing::Message::Subscribe(
outgoing::subscribe::Message::new(asset.clone().symbol), websocket::outgoing::subscribe::Message::new(asset.clone().symbol),
)) ))
.unwrap(), .unwrap(),
)) ))
@@ -138,8 +154,8 @@ async fn websocket_broadcast_handler(
sink.write() sink.write()
.await .await
.send(Message::Text( .send(Message::Text(
serde_json::to_string(&outgoing::Message::Unsubscribe( serde_json::to_string(&websocket::outgoing::Message::Unsubscribe(
outgoing::subscribe::Message::new(asset.clone().symbol), websocket::outgoing::subscribe::Message::new(asset.clone().symbol),
)) ))
.unwrap(), .unwrap(),
)) ))
@@ -161,13 +177,13 @@ async fn websocket_message_handler(
loop { loop {
match stream.next().await { match stream.next().await {
Some(Ok(Message::Text(data))) => { Some(Ok(Message::Text(data))) => {
let parsed_data = from_str::<Vec<incoming::Message>>(&data); let parsed_data = from_str::<Vec<websocket::incoming::Message>>(&data);
if let Err(e) = &parsed_data { if let Err(e) = &parsed_data {
warn!("Unparsed incoming message: {:?}: {}", data, e); warn!("Unparsed websocket::incoming message: {:?}: {}", data, e);
} }
for message in parsed_data.unwrap_or_default() { for message in parsed_data.unwrap_or_default() {
handle_message(&app_config, class, message, &backfilled).await; websocket_handle_text_message(&app_config, class, message, &backfilled).await;
} }
} }
Some(Ok(Message::Ping(_))) => sink Some(Ok(Message::Ping(_))) => sink
@@ -176,20 +192,20 @@ async fn websocket_message_handler(
.send(Message::Pong(vec![])) .send(Message::Pong(vec![]))
.await .await
.unwrap(), .unwrap(),
Some(unknown) => error!("Unknown incoming message: {:?}", unknown), Some(unknown) => error!("Unknown websocket::incoming message: {:?}", unknown),
None => panic!(), None => panic!(),
} }
} }
} }
async fn handle_message( async fn websocket_handle_text_message(
app_config: &Arc<Config>, app_config: &Arc<Config>,
class: Class, class: Class,
message: incoming::Message, message: websocket::incoming::Message,
backfilled: &Arc<RwLock<HashMap<String, bool>>>, backfilled: &Arc<RwLock<HashMap<String, bool>>>,
) { ) {
match message { match message {
incoming::Message::Subscription(subscription_message) => { websocket::incoming::Message::Subscription(subscription_message) => {
let old_assets = backfilled let old_assets = backfilled
.read() .read()
.await .await
@@ -227,9 +243,12 @@ async fn handle_message(
class, added_assets, deleted_assets class, added_assets, deleted_assets
); );
} }
incoming::Message::Bars(bar_message) => { websocket::incoming::Message::Bars(bar_message) => {
let bar = Bar::from(bar_message); let bar = Bar::from(bar_message);
info!("Incoming bar for {}: {}", bar.asset_symbol, bar.timestamp); info!(
"websocket::Incoming bar for {}: {}",
bar.asset_symbol, bar.timestamp
);
database::bars::upsert( database::bars::upsert(
&app_config.postgres_pool, &app_config.postgres_pool,
&bar, &bar,
@@ -237,9 +256,12 @@ async fn handle_message(
) )
.await; .await;
} }
incoming::Message::UpdatedBars(bar_message) => { websocket::incoming::Message::UpdatedBars(bar_message) => {
let bar = Bar::from(bar_message); let bar = Bar::from(bar_message);
info!("Incoming bar for {}: {}", bar.asset_symbol, bar.timestamp); info!(
"websocket::Incoming bar for {}: {}",
bar.asset_symbol, bar.timestamp
);
let transaction = app_config.postgres_pool.begin().await.unwrap(); let transaction = app_config.postgres_pool.begin().await.unwrap();
let backfilled_asset_symbol = backfilled.read().await[&bar.asset_symbol]; let backfilled_asset_symbol = backfilled.read().await[&bar.asset_symbol];
@@ -254,18 +276,18 @@ async fn handle_message(
} }
transaction.commit().await.unwrap(); transaction.commit().await.unwrap();
} }
incoming::Message::Success(_) => {} websocket::incoming::Message::Success(_) => {}
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum NullHandlerState {
Bars,
UpdatedBars,
}
#[allow(clippy::significant_drop_in_scrutinee)] #[allow(clippy::significant_drop_in_scrutinee)]
async fn null_handler(app_config: Arc<Config>, backfilled: Arc<RwLock<HashMap<String, bool>>>) { async fn null_handler(app_config: Arc<Config>, backfilled: Arc<RwLock<HashMap<String, bool>>>) {
#[derive(PartialEq)]
enum NullHandlerState {
Bars,
UpdatedBars,
}
let next_30s = next_30s(); let next_30s = next_30s();
let mut state = if next_30s.unix_timestamp() % 30 == 0 { let mut state = if next_30s.unix_timestamp() % 30 == 0 {
NullHandlerState::Bars NullHandlerState::Bars
@@ -309,3 +331,146 @@ async fn null_handler(app_config: Arc<Config>, backfilled: Arc<RwLock<HashMap<St
}; };
} }
} }
pub async fn backfill(
app_config: Arc<Config>,
asset: Asset,
backfilled: Arc<RwLock<HashMap<String, bool>>>,
) {
info!("Backfilling historical data for {}...", asset.symbol);
let task_run_offsetdatetime = next_minute() + app_config.alpaca_historical_offset;
let fetch_from = asset.timestamp_last + ONE_MINUTE;
let fetch_until = task_run_offsetdatetime - app_config.alpaca_historical_offset - ONE_MINUTE;
if fetch_from > fetch_until {
return;
}
let wait_duration = task_run_offsetdatetime - OffsetDateTime::now_utc();
if wait_duration.is_positive() {
sleep(wait_duration.unsigned_abs()).await;
}
let bars = backfill_bars_from_api(&app_config, &asset, fetch_from, fetch_until).await;
let transaction = app_config.postgres_pool.begin().await.unwrap();
database::bars::upsert_batch(&app_config.postgres_pool, &bars, true).await;
database::assets::update_timestamp_last_where_symbol(
&app_config.postgres_pool,
&asset.symbol,
&fetch_until,
)
.await;
derive_recent_nulls(&app_config, &asset, &fetch_until, &backfilled).await;
transaction.commit().await.unwrap();
info!("Backfilled historical data for {}.", asset.symbol);
}
fn generate_per_minute_bars(
from: OffsetDateTime,
until: OffsetDateTime,
asset: &Asset,
) -> IndexMap<OffsetDateTime, Bar> {
let mut bars = IndexMap::new();
let mut current_time = from;
while current_time <= until {
bars.insert(current_time, Bar::empty(current_time, asset.symbol.clone()));
current_time += ONE_MINUTE;
}
bars
}
async fn backfill_bars_from_api(
app_config: &Arc<Config>,
asset: &Asset,
from: OffsetDateTime,
until: OffsetDateTime,
) -> Vec<Bar> {
let asset_clone = asset.clone();
let mut bars = spawn_blocking(move || generate_per_minute_bars(from, until, &asset_clone))
.await
.unwrap();
let mut next_page_token = None;
loop {
let request = app_config
.alpaca_client
.get(match asset.class {
Class::UsEquity => ALPACA_STOCK_DATA_URL,
Class::Crypto => ALPACA_CRYPTO_DATA_URL,
})
.query(&[
("symbols", &asset.symbol),
("timeframe", &String::from("1Min")),
(
"start",
&from.format(ALPACA_TIMESTAMP_FORMAT).unwrap().to_string(),
),
(
"end",
&until.format(ALPACA_TIMESTAMP_FORMAT).unwrap().to_string(),
),
("limit", &String::from("10000")),
("page_token", &next_page_token.clone().unwrap_or_default()),
]);
app_config.alpaca_rate_limit.until_ready().await;
let response = request.send().await.unwrap();
let mut response = if response.status() == StatusCode::OK {
response
.json::<api::incoming::bar::Message>()
.await
.unwrap()
} else {
error!(
"Failed to backfill historical data for {} from {} to {}: {}",
asset.symbol,
from,
until,
response.text().await.unwrap()
);
break;
};
for bar in response
.bars
.remove(&asset.symbol)
.unwrap_or_default()
.unwrap_or_default()
{
bars.insert(bar.timestamp, Bar::from((bar, asset.symbol.clone())));
}
if response.next_page_token.is_none() {
break;
}
next_page_token = response.next_page_token;
}
bars.into_values().collect::<Vec<Bar>>()
}
#[allow(clippy::significant_drop_tightening)]
async fn derive_recent_nulls(
app_config: &Arc<Config>,
asset: &Asset,
from: &OffsetDateTime,
backfilled: &Arc<RwLock<HashMap<String, bool>>>,
) {
let mut backfilled = backfilled.write().await;
let bars = database::bars::select_where_symbol_where_timestamp_larger_than(
&app_config.postgres_pool,
&asset.symbol,
from,
)
.await;
database::bars::upsert_batch(&app_config.postgres_pool, &bars, true).await;
database::assets::update_timestamp_last_where_symbol(
&app_config.postgres_pool,
&asset.symbol,
&bars.last().unwrap().timestamp,
)
.await;
backfilled.insert(asset.symbol.clone(), true);
}

View File

@@ -1,2 +1 @@
pub mod historical; pub mod market;
pub mod live;

View File

@@ -24,13 +24,13 @@ async fn main() -> Result<(), BoxDynError> {
let (asset_broadcast_sender, _) = broadcast::channel::<BroadcastMessage>(100); let (asset_broadcast_sender, _) = broadcast::channel::<BroadcastMessage>(100);
threads.push(spawn(data::live::run( threads.push(spawn(data::market::run(
app_config.clone(), app_config.clone(),
Class::UsEquity, Class::UsEquity,
asset_broadcast_sender.clone(), asset_broadcast_sender.clone(),
))); )));
threads.push(spawn(data::live::run( threads.push(spawn(data::market::run(
app_config.clone(), app_config.clone(),
Class::Crypto, Class::Crypto,
asset_broadcast_sender.clone(), asset_broadcast_sender.clone(),

View File

@@ -2,7 +2,7 @@ use serde::Deserialize;
use std::collections::HashMap; use std::collections::HashMap;
use time::OffsetDateTime; use time::OffsetDateTime;
#[derive(Debug, PartialEq, Deserialize)] #[derive(Deserialize)]
pub struct Bar { pub struct Bar {
#[serde(rename = "t")] #[serde(rename = "t")]
#[serde(with = "time::serde::rfc3339")] #[serde(with = "time::serde::rfc3339")]
@@ -23,7 +23,7 @@ pub struct Bar {
pub volume_weighted: f64, pub volume_weighted: f64,
} }
#[derive(Debug, PartialEq, Deserialize)] #[derive(Deserialize)]
pub struct Message { pub struct Message {
pub bars: HashMap<String, Option<Vec<Bar>>>, pub bars: HashMap<String, Option<Vec<Bar>>>,
pub next_page_token: Option<String>, pub next_page_token: Option<String>,

View File

@@ -1,7 +1,7 @@
use serde::{Deserialize, Deserializer}; use serde::{Deserialize, Deserializer};
use time::{macros::format_description, Date, Time}; use time::{macros::format_description, Date, Time};
#[derive(Debug, PartialEq, Eq, Deserialize)] #[derive(Deserialize)]
pub struct CalendarDate { pub struct CalendarDate {
#[serde(deserialize_with = "deserialize_date")] #[serde(deserialize_with = "deserialize_date")]
pub date: Date, pub date: Date,

View File

@@ -1,9 +1,9 @@
use super::{api::incoming, class::Class, exchange::Exchange}; use super::{api::incoming, class::Class, exchange::Exchange};
use serde::{Deserialize, Serialize}; use serde::Serialize;
use sqlx::FromRow; use sqlx::FromRow;
use time::OffsetDateTime; use time::OffsetDateTime;
#[derive(Clone, Debug, PartialEq, Eq, FromRow, Serialize, Deserialize, Hash)] #[derive(Clone, Debug, FromRow, Serialize)]
pub struct Asset { pub struct Asset {
pub symbol: String, pub symbol: String,
pub class: Class, pub class: Class,
@@ -28,10 +28,9 @@ impl From<(incoming::Asset, bool, OffsetDateTime)> for Asset {
} }
} }
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, Debug)]
pub enum BroadcastMessage { pub enum BroadcastMessage {
Added(Asset), Added(Asset),
Updated(Asset), Updated(Asset),
Deleted(Asset), Deleted(Asset),
Reset(Asset),
} }

View File

@@ -1,9 +1,9 @@
use super::{api, websocket}; use super::{api, websocket};
use serde::{Deserialize, Serialize}; use serde::Serialize;
use sqlx::FromRow; use sqlx::FromRow;
use time::OffsetDateTime; use time::OffsetDateTime;
#[derive(Clone, Debug, PartialEq, FromRow, Serialize, Deserialize)] #[derive(Clone, Debug, FromRow, Serialize)]
pub struct Bar { pub struct Bar {
pub timestamp: OffsetDateTime, pub timestamp: OffsetDateTime,
pub asset_symbol: String, pub asset_symbol: String,

View File

@@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::Type; use sqlx::Type;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Type, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Type)]
pub enum Class { pub enum Class {
#[sqlx(rename = "us_equity")] #[sqlx(rename = "us_equity")]
#[serde(rename = "us_equity")] #[serde(rename = "us_equity")]

View File

@@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::Type; use sqlx::Type;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Type, Hash)] #[derive(Clone, Copy, Debug, Serialize, Deserialize, Type)]
pub enum Exchange { pub enum Exchange {
#[sqlx(rename = "AMEX")] #[sqlx(rename = "AMEX")]
#[serde(rename = "AMEX")] #[serde(rename = "AMEX")]

View File

@@ -14,9 +14,7 @@ pub use exchange::Exchange;
pub use source::Source; pub use source::Source;
pub use status::Status; pub use status::Status;
use serde::{Deserialize, Serialize}; #[derive(Clone, Debug)]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum BroadcastMessage { pub enum BroadcastMessage {
Asset(asset::BroadcastMessage), Asset(asset::BroadcastMessage),
} }

View File

@@ -3,7 +3,7 @@ use std::{
str::FromStr, str::FromStr,
}; };
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug)]
pub enum Source { pub enum Source {
Iex, Iex,
Sip, Sip,

View File

@@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize}; use serde::Deserialize;
use sqlx::Type; use sqlx::Type;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Type)] #[derive(PartialEq, Eq, Deserialize, Type)]
pub enum Status { pub enum Status {
#[sqlx(rename = "active")] #[sqlx(rename = "active")]
#[serde(rename = "active")] #[serde(rename = "active")]

View File

@@ -1,7 +1,7 @@
use serde::Deserialize; use serde::Deserialize;
use time::OffsetDateTime; use time::OffsetDateTime;
#[derive(Debug, PartialEq, Deserialize)] #[derive(PartialEq, Deserialize)]
pub struct Message { pub struct Message {
#[serde(rename = "t")] #[serde(rename = "t")]
#[serde(with = "time::serde::rfc3339")] #[serde(with = "time::serde::rfc3339")]

View File

@@ -4,7 +4,7 @@ pub mod success;
use serde::Deserialize; use serde::Deserialize;
#[derive(Debug, Deserialize, PartialEq)] #[derive(PartialEq, Deserialize)]
#[serde(tag = "T")] #[serde(tag = "T")]
pub enum Message { pub enum Message {
#[serde(rename = "success")] #[serde(rename = "success")]

View File

@@ -1,6 +1,6 @@
use serde::Deserialize; use serde::Deserialize;
#[derive(Debug, PartialEq, Eq, Deserialize)] #[derive(PartialEq, Eq, Deserialize)]
pub struct Message { pub struct Message {
pub trades: Vec<String>, pub trades: Vec<String>,
pub quotes: Vec<String>, pub quotes: Vec<String>,

View File

@@ -1,6 +1,6 @@
use serde::Deserialize; use serde::Deserialize;
#[derive(Debug, PartialEq, Eq, Deserialize)] #[derive(PartialEq, Eq, Deserialize)]
pub enum MessageType { pub enum MessageType {
#[serde(rename = "connected")] #[serde(rename = "connected")]
Connected, Connected,
@@ -8,7 +8,7 @@ pub enum MessageType {
Authenticated, Authenticated,
} }
#[derive(Debug, PartialEq, Eq, Deserialize)] #[derive(PartialEq, Eq, Deserialize)]
pub struct Message { pub struct Message {
pub msg: MessageType, pub msg: MessageType,
} }

View File

@@ -1,6 +1,6 @@
use serde::Serialize; use serde::Serialize;
#[derive(Debug, Serialize)] #[derive(Serialize)]
pub struct Message { pub struct Message {
key: String, key: String,
secret: String, secret: String,

View File

@@ -3,7 +3,7 @@ pub mod subscribe;
use serde::Serialize; use serde::Serialize;
#[derive(Debug, Serialize)] #[derive(Serialize)]
#[serde(tag = "action")] #[serde(tag = "action")]
pub enum Message { pub enum Message {
#[serde(rename = "auth")] #[serde(rename = "auth")]

View File

@@ -1,6 +1,6 @@
use serde::Serialize; use serde::Serialize;
#[derive(Debug, Serialize)] #[derive(Serialize)]
pub struct Message { pub struct Message {
bars: Vec<String>, bars: Vec<String>,
#[serde(rename = "updatedBars")] #[serde(rename = "updatedBars")]