2025-07-03 15:03:18 -04:00
|
|
|
use proc_macro::TokenStream;
|
|
|
|
use quote::quote;
|
2025-07-11 11:58:29 -04:00
|
|
|
use syn::{parse_macro_input, ItemEnum, Lit, DeriveInput, Fields, Data};
|
2025-07-03 15:03:18 -04:00
|
|
|
|
2025-07-10 16:50:22 -04:00
|
|
|
#[proc_macro_derive(HttpRequest, attributes(http_get))]
|
2025-07-03 15:03:18 -04:00
|
|
|
pub fn derive_http_get_request(input: TokenStream) -> TokenStream {
|
|
|
|
let input = parse_macro_input!(input as DeriveInput);
|
2025-07-10 16:40:53 -04:00
|
|
|
let name = &input.ident;
|
2025-07-03 15:03:18 -04:00
|
|
|
|
2025-07-14 19:51:49 -04:00
|
|
|
// Extract base_url from #[http_get(url = "...")]
|
|
|
|
let mut base_url_lit = None;
|
2025-07-10 16:34:43 -04:00
|
|
|
for attr in &input.attrs {
|
|
|
|
if attr.path().is_ident("http_get") {
|
2025-07-10 16:40:53 -04:00
|
|
|
let _ = attr.parse_nested_meta(|meta| {
|
|
|
|
if meta.path.is_ident("url") {
|
|
|
|
let value: Lit = meta.value()?.parse()?;
|
|
|
|
if let Lit::Str(litstr) = value {
|
2025-07-14 19:51:49 -04:00
|
|
|
base_url_lit = Some(litstr);
|
2025-07-10 16:34:43 -04:00
|
|
|
}
|
|
|
|
}
|
2025-07-10 16:40:53 -04:00
|
|
|
Ok(())
|
|
|
|
});
|
2025-07-10 16:34:43 -04:00
|
|
|
}
|
|
|
|
}
|
2025-07-14 19:51:49 -04:00
|
|
|
|
|
|
|
let base_url_lit = base_url_lit.expect("Missing #[http_get(url = \"...\")] attribute");
|
2025-07-10 16:34:43 -04:00
|
|
|
|
2025-07-03 15:03:18 -04:00
|
|
|
let expanded = match &input.data {
|
|
|
|
Data::Struct(data_struct) => {
|
|
|
|
let fields = match &data_struct.fields {
|
|
|
|
Fields::Named(named) => &named.named,
|
2025-07-10 16:50:22 -04:00
|
|
|
_ => panic!("#[derive(HttpRequest)] only supports structs with named fields"),
|
2025-07-03 15:03:18 -04:00
|
|
|
};
|
|
|
|
|
2025-07-14 19:51:49 -04:00
|
|
|
let query_param_code: Vec<_> = fields.iter().filter_map(|field| {
|
2025-07-03 15:03:18 -04:00
|
|
|
let ident = field.ident.clone().unwrap();
|
|
|
|
let field_name = ident.to_string();
|
|
|
|
if field_name.starts_with("lnk_p_") {
|
|
|
|
let key = &field_name["lnk_p_".len()..];
|
2025-07-14 19:51:49 -04:00
|
|
|
Some(quote! {
|
2025-07-03 15:03:18 -04:00
|
|
|
query_params.push((#key.to_string(), self.#ident.to_string()));
|
2025-07-14 19:51:49 -04:00
|
|
|
})
|
|
|
|
} else {
|
|
|
|
None
|
2025-07-03 15:03:18 -04:00
|
|
|
}
|
2025-07-14 19:51:49 -04:00
|
|
|
}).collect();
|
2025-07-03 15:03:18 -04:00
|
|
|
|
|
|
|
quote! {
|
2025-07-13 09:45:30 -04:00
|
|
|
impl Queryable for #name {
|
2025-07-14 13:18:28 -04:00
|
|
|
fn send(
|
2025-07-03 15:03:18 -04:00
|
|
|
&self,
|
2025-07-08 20:09:08 -04:00
|
|
|
headers: Option<Vec<(&str, &str)>>,
|
2025-07-14 13:18:28 -04:00
|
|
|
) -> Result<Response, std::io::Error> {
|
2025-07-03 15:03:18 -04:00
|
|
|
use urlencoding::encode;
|
2025-07-14 13:18:28 -04:00
|
|
|
use awc::Client;
|
2025-07-03 15:03:18 -04:00
|
|
|
|
|
|
|
let mut query_params: Vec<(String, String)> = Vec::new();
|
|
|
|
#(#query_param_code)*
|
|
|
|
|
2025-07-14 19:51:49 -04:00
|
|
|
let mut url = #base_url_lit.to_string();
|
2025-07-03 15:03:18 -04:00
|
|
|
if !query_params.is_empty() {
|
2025-07-10 16:34:43 -04:00
|
|
|
let query_parts: Vec<String> = query_params.iter()
|
|
|
|
.map(|(k, v)| format!("{}={}", k, encode(v)))
|
|
|
|
.collect();
|
2025-07-03 15:03:18 -04:00
|
|
|
url.push('?');
|
|
|
|
url.push_str(&query_parts.join("&"));
|
|
|
|
}
|
|
|
|
|
2025-07-14 13:18:28 -04:00
|
|
|
let client = Client::default();
|
2025-07-03 15:03:18 -04:00
|
|
|
let mut request = client.get(url);
|
|
|
|
|
|
|
|
if let Some(hdrs) = headers {
|
|
|
|
for (k, v) in hdrs {
|
|
|
|
request = request.append_header((k, v));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-07-14 13:18:28 -04:00
|
|
|
let response = actix_web::rt::System::new()
|
|
|
|
.block_on(async { request.send().await })
|
|
|
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;
|
|
|
|
|
|
|
|
let body_bytes = actix_web::rt::System::new()
|
|
|
|
.block_on(async { response.body().await })
|
|
|
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;
|
|
|
|
|
|
|
|
let body = Response::receive(body_bytes.to_vec())
|
|
|
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;
|
|
|
|
|
|
|
|
Ok(body)
|
2025-07-03 15:03:18 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Data::Enum(data_enum) => {
|
2025-07-14 19:51:49 -04:00
|
|
|
let variant_arms: Vec<_> = data_enum.variants.iter().map(|variant| {
|
2025-07-03 15:03:18 -04:00
|
|
|
let vname = &variant.ident;
|
|
|
|
match &variant.fields {
|
|
|
|
Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
|
2025-07-14 19:51:49 -04:00
|
|
|
quote! {
|
|
|
|
#name::#vname(inner) => inner.send(headers.clone()),
|
|
|
|
}
|
2025-07-03 15:03:18 -04:00
|
|
|
}
|
2025-07-10 16:50:22 -04:00
|
|
|
_ => panic!("#[derive(HttpRequest)] enum variants must have a single unnamed field"),
|
2025-07-03 15:03:18 -04:00
|
|
|
}
|
2025-07-14 19:51:49 -04:00
|
|
|
}).collect();
|
2025-07-03 15:03:18 -04:00
|
|
|
|
|
|
|
quote! {
|
2025-07-14 13:18:28 -04:00
|
|
|
impl Queryable for #name {
|
|
|
|
fn send(
|
2025-07-08 20:09:08 -04:00
|
|
|
&self,
|
|
|
|
headers: Option<Vec<(&str, &str)>>,
|
2025-07-14 13:18:28 -04:00
|
|
|
) -> Result<Response, std::io::Error> {
|
2025-07-03 15:03:18 -04:00
|
|
|
match self {
|
|
|
|
#(#variant_arms)*
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-07-10 16:50:22 -04:00
|
|
|
_ => panic!("#[derive(HttpRequest)] only supports structs and enums"),
|
|
|
|
};
|
|
|
|
|
|
|
|
TokenStream::from(expanded)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[proc_macro_derive(HttpResponse)]
|
|
|
|
pub fn derive_http_response(input: TokenStream) -> TokenStream {
|
|
|
|
let input = parse_macro_input!(input as DeriveInput);
|
|
|
|
let name = &input.ident;
|
|
|
|
|
2025-07-14 14:49:44 -04:00
|
|
|
let expanded = match &input.data {
|
|
|
|
Data::Struct(_) => {
|
|
|
|
quote! {
|
|
|
|
impl Responsable for #name {
|
|
|
|
fn receive(resp: actix_web::ClientResponse) -> Result<Self, std::error::Error> {
|
|
|
|
let parsed = actix_web::rt::System::new()
|
|
|
|
.block_on(async { resp.json::<#name>().await })
|
|
|
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;
|
|
|
|
Ok(parsed)
|
|
|
|
}
|
|
|
|
}
|
2025-07-10 16:50:22 -04:00
|
|
|
}
|
|
|
|
}
|
2025-07-14 14:49:44 -04:00
|
|
|
|
2025-07-14 19:51:49 -04:00
|
|
|
Data::Enum(enum_data) => {
|
|
|
|
let match_arms = enum_data.variants.iter().filter_map(|v| {
|
|
|
|
let variant_name = &v.ident;
|
|
|
|
match &v.fields {
|
|
|
|
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
|
|
|
|
let inner_ty = &fields.unnamed.first().unwrap().ty;
|
|
|
|
Some(quote! {
|
|
|
|
if let Ok(inner) = <#inner_ty as Responsable>::receive(resp) {
|
|
|
|
return Ok(#name::#variant_name(inner));
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
_ => None
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
quote! {
|
|
|
|
impl Responsable for #name {
|
|
|
|
fn receive(resp: actix_web::ClientResponse) -> Result<Self, std::error::Error> {
|
|
|
|
#(#match_arms)*
|
|
|
|
|
|
|
|
Err(std::io::Error::new(
|
|
|
|
std::io::ErrorKind::Other,
|
|
|
|
format!("No matching variant found for {}", stringify!(#name)),
|
|
|
|
).into())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
_ => panic!("#[derive(HttpResponse)] only supports structs and enums with tuple variants"),
|
2025-07-03 15:03:18 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
TokenStream::from(expanded)
|
|
|
|
}
|
2025-07-10 17:10:31 -04:00
|
|
|
|
|
|
|
#[proc_macro_derive(SendVec)]
|
|
|
|
pub fn derive_send_vec(input: TokenStream) -> TokenStream {
|
|
|
|
let input = parse_macro_input!(input as DeriveInput);
|
|
|
|
let name = &input.ident;
|
|
|
|
|
|
|
|
let expanded = quote! {
|
|
|
|
impl #name {
|
|
|
|
/// Sends all items in the vec sequentially, awaiting each.
|
|
|
|
pub async fn send_vec(
|
|
|
|
items: Vec<Self>,
|
2025-07-13 09:45:30 -04:00
|
|
|
client: std::sync::Arc<awc::Client>,
|
2025-07-10 17:10:31 -04:00
|
|
|
headers: Option<Vec<(&str, &str)>>,
|
|
|
|
api_key: Option<&str>,
|
2025-07-13 09:45:30 -04:00
|
|
|
) -> Result<Vec<awc::ClientResponse>, actix_web::error::SendRequestError> {
|
2025-07-10 17:10:31 -04:00
|
|
|
let mut responses = Vec::with_capacity(items.len());
|
|
|
|
for item in items {
|
|
|
|
let resp = item.send(client.clone(), headers.clone(), api_key).await?;
|
|
|
|
responses.push(resp);
|
|
|
|
}
|
|
|
|
Ok(responses)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
TokenStream::from(expanded)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[proc_macro_derive(ResponseVec)]
|
|
|
|
pub fn derive_response_vec(input: TokenStream) -> TokenStream {
|
|
|
|
let input = parse_macro_input!(input as DeriveInput);
|
|
|
|
let name = &input.ident;
|
|
|
|
|
|
|
|
let expanded = quote! {
|
|
|
|
impl #name {
|
|
|
|
/// Deserializes all responses sequentially into a Vec<Self>.
|
|
|
|
/// Assumes `Self` implements `DeserializeOwned`.
|
|
|
|
pub async fn response_vec(
|
2025-07-13 09:45:30 -04:00
|
|
|
responses: Vec<awc::ClientResponse>,
|
2025-07-14 13:02:05 -04:00
|
|
|
) -> Result<Vec<Self>, std::error::Error>
|
2025-07-10 17:10:31 -04:00
|
|
|
where
|
|
|
|
Self: Sized + serde::de::DeserializeOwned,
|
|
|
|
{
|
|
|
|
let mut results = Vec::with_capacity(responses.len());
|
|
|
|
for resp in responses {
|
|
|
|
let item = resp.json::<Self>().await?;
|
|
|
|
results.push(item);
|
|
|
|
}
|
|
|
|
Ok(results)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
TokenStream::from(expanded)
|
|
|
|
}
|
2025-07-11 11:58:29 -04:00
|
|
|
|
|
|
|
#[proc_macro_attribute]
|
|
|
|
pub fn alpaca_cli(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
|
|
|
let input_enum = parse_macro_input!(item as ItemEnum);
|
|
|
|
let enum_name = &input_enum.ident;
|
|
|
|
let variants = &input_enum.variants;
|
|
|
|
|
|
|
|
// Match arms for regular command variants (Single, Asset, etc.)
|
|
|
|
let regular_arms = variants.iter().filter_map(|v| {
|
|
|
|
let v_name = &v.ident;
|
|
|
|
|
|
|
|
// Skip Bulk variant — we handle it separately
|
|
|
|
if v_name == "Bulk" {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
Some(quote! {
|
|
|
|
#enum_name::#v_name(req) => {
|
|
|
|
let res = req.send(client.clone(), &api_key).await?;
|
|
|
|
let body = res.body().await?;
|
|
|
|
println!("{}", std::str::from_utf8(&body)?);
|
|
|
|
}
|
|
|
|
})
|
|
|
|
});
|
|
|
|
|
|
|
|
let expanded = quote! {
|
|
|
|
#[derive(structopt::StructOpt, Debug)]
|
|
|
|
#input_enum
|
|
|
|
|
|
|
|
#[tokio::main]
|
|
|
|
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
|
|
use structopt::StructOpt;
|
|
|
|
use std::fs::File;
|
|
|
|
use std::io::{BufReader, Read};
|
|
|
|
use std::sync::Arc;
|
|
|
|
use std::thread;
|
|
|
|
|
|
|
|
const THREADS: usize = 4;
|
|
|
|
|
|
|
|
// Initialize shared HTTP client and API key
|
2025-07-13 09:45:30 -04:00
|
|
|
let client = Arc::new(awc::Client::default());
|
2025-07-11 11:58:29 -04:00
|
|
|
let api_key = std::env::var("APCA_API_KEY_ID")?;
|
|
|
|
let cmd = #enum_name::from_args();
|
|
|
|
|
|
|
|
match cmd {
|
|
|
|
#(#regular_arms)*
|
|
|
|
|
|
|
|
#enum_name::Bulk { input } => {
|
|
|
|
// Choose input source: file or stdin
|
|
|
|
let mut reader: Box<dyn Read> = match input {
|
|
|
|
Some(path) => Box::new(File::open(path)?),
|
|
|
|
None => Box::new(std::io::stdin()),
|
|
|
|
};
|
|
|
|
|
|
|
|
// Read input JSON into buffer
|
|
|
|
let mut buf = String::new();
|
|
|
|
reader.read_to_string(&mut buf)?;
|
|
|
|
|
|
|
|
// Deserialize into Vec<Query>
|
|
|
|
let queries: Vec<Query> = serde_json::from_str(&buf)?;
|
|
|
|
let total = queries.len();
|
|
|
|
|
|
|
|
if total == 0 {
|
|
|
|
eprintln!("No queries provided.");
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
let shared_queries = Arc::new(queries);
|
|
|
|
let shared_key = Arc::new(api_key);
|
|
|
|
|
|
|
|
let per_thread = total / THREADS;
|
|
|
|
|
|
|
|
let mut handles = Vec::with_capacity(THREADS);
|
|
|
|
for i in 0..THREADS {
|
|
|
|
let queries_clone = Arc::clone(&shared_queries);
|
|
|
|
let client_clone = Arc::clone(&client);
|
|
|
|
let key_clone = Arc::clone(&shared_key);
|
|
|
|
|
|
|
|
let start_index = i * per_thread;
|
|
|
|
let end_index = if i == THREADS - 1 {
|
|
|
|
total // Last thread gets the remainder
|
|
|
|
} else {
|
|
|
|
start_index + per_thread
|
|
|
|
};
|
|
|
|
|
|
|
|
let handle = thread::spawn(move || {
|
|
|
|
let rt = tokio::runtime::Runtime::new().expect("Failed to create runtime");
|
|
|
|
for idx in start_index..end_index {
|
|
|
|
let query = &queries_clone[idx];
|
|
|
|
let send_result = rt.block_on(query.send(client_clone.clone(), &key_clone));
|
|
|
|
|
|
|
|
match send_result {
|
|
|
|
Ok(response) => {
|
|
|
|
let body_result = rt.block_on(response.body());
|
|
|
|
match body_result {
|
|
|
|
Ok(body) => println!("{}", String::from_utf8_lossy(&body)),
|
|
|
|
Err(e) => eprintln!("Error reading response body: {:?}", e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
eprintln!("Request failed: {:?}", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
handles.push(handle);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Wait for all threads to complete
|
|
|
|
for handle in handles {
|
|
|
|
handle.join().expect("A thread panicked");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
TokenStream::from(expanded)
|
|
|
|
}
|