feat: add adclicks example

This commit is contained in:
2025-02-01 00:34:01 +01:00
parent aa1cec9986
commit 50aa9c7a14
8 changed files with 1726 additions and 58 deletions

1237
client-application/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,19 @@
[package]
name = "client-application"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0.95"
chrono = { version = "0.4.39", features = ["serde"] }
clap = { version = "4.5.27", features = ["derive", "env"] }
dotenvy = "0.15.7"
kafka = "0.10.0"
nodrift = "0.3.0"
rand = "0.9.0"
serde = { version = "1.0.217", features = ["derive"] }
serde_json = "1.0.138"
tokio = { version = "1.43.0", features = ["full"] }
tracing = { version = "0.1.41", features = ["log"] }
tracing-subscriber = "0.3.19"
uuid = { version = "1.12.1", features = ["serde"] }

View File

@@ -0,0 +1,17 @@
FROM rustlang/rust:nightly AS builder
WORKDIR /mnt/src
COPY Cargo.toml Cargo.toml
COPY Cargo.lock Cargo.lock
COPY src/ src/
RUN cargo build --release
FROM debian:bookworm AS production
RUN apt update && apt upgrade -y && apt install libssl-dev -y
COPY --from=builder /mnt/src/target/release/client-application /usr/local/bin/client-application
ENTRYPOINT ["/usr/local/bin/client-application"]

View File

@@ -0,0 +1,111 @@
use std::time::Duration;
use anyhow::Context;
use chrono::{TimeDelta, Utc};
use clap::{Parser, Subcommand};
use kafka::producer::Record;
use rand::Rng;
use serde::Serialize;
#[derive(Parser)]
#[command(author, version, about, long_about = None, subcommand_required = true)]
struct Command {
#[command(subcommand)]
command: Option<Commands>,
}
#[derive(Subcommand)]
enum Commands {
Produce {
#[arg(long)]
host: String,
#[arg(long)]
topic: String,
#[arg(long = "delay-ms")]
delay_ms: u64,
},
}
#[derive(Clone, Serialize, Debug)]
struct AdSource {
user_id: i64,
ad_id: i64,
click_timestamp: String,
impression_timestamp: String,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
dotenvy::dotenv().ok();
tracing_subscriber::fmt::init();
let cli = Command::parse();
tracing::debug!("Starting cli");
match cli.command.unwrap() {
Commands::Produce {
topic,
delay_ms,
host,
} => {
let send_event =
nodrift::schedule(std::time::Duration::from_millis(delay_ms), move || {
let host = host.clone();
let topic = topic.clone();
async move {
tracing::info!("sending event");
let mut rng = rand::rng();
let mut producer = kafka::producer::Producer::from_hosts(vec![host])
.with_ack_timeout(Duration::from_secs(1))
.with_required_acks(kafka::client::RequiredAcks::One)
.create()
.map_err(|e| nodrift::DriftError::JobError(e.into()))?;
let msg = AdSource {
user_id: rng.random_range(0..64),
ad_id: rng.random_range(0..64),
click_timestamp: format!(
"{}",
Utc::now()
.checked_add_signed(TimeDelta::milliseconds(500))
.unwrap()
.format("%Y-%m-%dT%H:%M:%S")
),
impression_timestamp: format!(
"{}",
Utc::now().to_utc().format("%Y-%m-%dT%H:%M:%S")
),
};
producer
.send(&Record::from_value(
&topic,
serde_json::to_string(&msg)
.context("failed to serialize type")
.map_err(nodrift::DriftError::JobError)?,
))
.map_err(|e| nodrift::DriftError::JobError(e.into()))?;
Ok(())
}
});
println!("waiting for closure press ctrl-c to cancel");
tokio::select! {
_ = send_event.cancelled() => {
tokio::time::sleep(Duration::from_secs(5)).await;
return Ok(())
}
_ = tokio::signal::ctrl_c() => {
send_event.cancel();
return Ok(())
}
}
}
}
}