9 Commits

Author SHA1 Message Date
Tom Alexander
c04b4e8da5 Fix bug that prevented actions from triggering.
All checks were successful
semver Build semver has succeeded
format Build format has succeeded
build Build build has succeeded
clippy Build clippy has succeeded
rust-test Build rust-test has succeeded
2024-09-29 18:09:07 -04:00
Tom Alexander
69dd1ba156 Remove support for http2.
Nginx does not support http2 for upstream proxies because there is not much point for low-latency connections.
2024-09-29 18:00:34 -04:00
Tom Alexander
65c964b329 Fix clippy lint. 2024-09-29 17:42:08 -04:00
Tom Alexander
613026b326 Adding repo whitelist.
Some checks failed
semver Build semver has succeeded
format Build format has succeeded
clippy Build clippy has failed
build Build build has succeeded
rust-test Build rust-test has succeeded
2024-09-29 16:54:58 -04:00
Tom Alexander
cd56bb2fe1 Fix debug build in docker container by adding cranelift.
All checks were successful
semver Build semver has succeeded
format Build format has succeeded
clippy Build clippy has succeeded
build Build build has succeeded
rust-test Build rust-test has succeeded
2024-09-29 15:31:24 -04:00
Tom Alexander
4bcf8b9ddc Merge branch 'lint'
Some checks failed
semver Build semver has succeeded
format Build format has succeeded
rust-test Build rust-test has failed
clippy Build clippy has failed
build Build build has succeeded
2024-09-29 15:03:19 -04:00
Tom Alexander
14b38b7fcd Add rustfmt config. 2024-09-29 15:03:07 -04:00
Tom Alexander
0602f8472b Separate out to two binaries.
Some checks failed
format Build format has succeeded
clippy Build clippy has failed
rust-test Build rust-test has failed
2024-09-29 14:59:39 -04:00
Tom Alexander
cdac8224c6 Fix clippy lints. 2024-09-29 14:08:05 -04:00
16 changed files with 287 additions and 154 deletions

View File

@@ -1,3 +1,5 @@
cargo-features = ["codegen-backend"]
[package]
name = "webhook_bridge"
version = "0.0.1"
@@ -17,8 +19,26 @@ include = [
"Cargo.lock"
]
[lib]
name = "webhookbridge"
path = "src/lib.rs"
[[bin]]
name = "webhook_bridge"
path = "src/main.rs"
[[bin]]
# This bin exists for development purposes only. The real target of this crate is the webhook_bridge server binary.
name = "local_trigger"
path = "src/bin_local_trigger.rs"
required-features = ["local_trigger"]
[features]
default = ["local_trigger"]
local_trigger = []
[dependencies]
axum = { version = "0.7.5", default-features = false, features = ["tokio", "http1", "http2", "json"] }
axum = { version = "0.7.5", default-features = false, features = ["tokio", "http1", "json"] }
base64 = "0.22.1"
hmac = "0.12.1"
http-body-util = "0.1.2"
@@ -41,3 +61,10 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
inherits = "release"
lto = true
strip = "symbols"
[profile.dev]
codegen-backend = "cranelift"
[profile.dev.package."*"]
codegen-backend = "llvm"
opt-level = 3

View File

@@ -33,3 +33,4 @@ format: ## Auto-format source files.
.PHONY: clean
clean:
> $(MAKE) -C docker/webhook_bridge_development clean
> rm -rf target

View File

@@ -6,7 +6,7 @@ RUN mkdir /source
WORKDIR /source
COPY . .
# TODO: Add static build, which currently errors due to proc_macro. RUSTFLAGS="-C target-feature=+crt-static"
RUN CARGO_TARGET_DIR=/target cargo build --profile release-lto
RUN CARGO_TARGET_DIR=/target cargo build --profile release-lto --bin webhook_bridge
FROM alpine:3.20 AS runner

View File

@@ -2,5 +2,6 @@ FROM rustlang/rust:nightly-alpine3.20 AS builder
RUN apk add --no-cache musl-dev pkgconfig libressl3.8-libssl libressl-dev
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
RUN rustup component add rustc-codegen-cranelift
RUN rustup component add rustfmt
RUN rustup component add clippy

14
rustfmt.toml Normal file
View File

@@ -0,0 +1,14 @@
imports_granularity = "Item"
group_imports = "StdExternalCrate"
# In rustfmt 2.0 I will want to adjust these settings.
#
# max_width controls the max length of a line before rustfmt gives up
# but that also scales the length of a bunch of other lines
# automaticaly due to width_heuristics. I want to find a way to enable
# rustfmt to work on longer lines when necessary without making my
# regular code too wide.
#
# max_width = 100
# error_on_line_overflow = true
# width_heuristics = "Off"

13
src/app_state.rs Normal file
View File

@@ -0,0 +1,13 @@
use std::collections::HashSet;
use std::sync::Arc;
use kube::Client;
use crate::gitea_client::GiteaClient;
#[derive(Clone)]
pub(crate) struct AppState {
pub(crate) kubernetes_client: Client,
pub(crate) gitea: GiteaClient,
pub(crate) allowed_repos: Arc<HashSet<String>>,
}

12
src/bin_local_trigger.rs Normal file
View File

@@ -0,0 +1,12 @@
#![forbid(unsafe_code)]
use webhookbridge::init_tracing;
use webhookbridge::local_trigger;
const EXAMPLE_WEBHOOK_PAYLOAD: &str = include_str!("../example_tag_webhook_payload.json");
#[tokio::main]
#[allow(clippy::needless_return)]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
init_tracing().await?;
local_trigger(EXAMPLE_WEBHOOK_PAYLOAD).await
}

View File

@@ -17,14 +17,22 @@ use serde_json::Value;
#[serde(deny_unknown_fields)]
pub(crate) struct PipelineRunSpec {
/// Contents of the Pipeline
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) pipelineSpec: Option<Value>,
#[serde(
rename = "pipelineSpec",
default,
skip_serializing_if = "Option::is_none"
)]
pub(crate) pipeline_spec: Option<Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) timeouts: Option<Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) taskRunTemplate: Option<Value>,
#[serde(
rename = "taskRunTemplate",
default,
skip_serializing_if = "Option::is_none"
)]
pub(crate) task_run_template: Option<Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) workspaces: Option<Value>,

View File

@@ -1,13 +1,13 @@
use std::path::Path;
use std::path::PathBuf;
use regex::Regex;
use tracing::debug;
use crate::crd_pipeline_run::PipelineRun;
use crate::gitea_client::GiteaClient;
use crate::gitea_client::Tree;
use crate::gitea_client::TreeFileReference;
use crate::remote_config::RemoteConfig;
use regex::Regex;
use tracing::debug;
pub(crate) async fn discover_webhook_bridge_config(
gitea: &GiteaClient,
@@ -16,8 +16,7 @@ pub(crate) async fn discover_webhook_bridge_config(
let remote_config_reference = repo_tree
.files
.iter()
.filter(|file_reference| file_reference.path == ".webhook_bridge/webhook_bridge.toml")
.next()
.find(|file_reference| file_reference.path == ".webhook_bridge/webhook_bridge.toml")
.ok_or("File not found in remote repo: .webhook_bridge/webhook_bridge.toml.")?;
let remote_config_contents =
@@ -47,8 +46,7 @@ pub(crate) async fn discover_matching_push_triggers<RE: AsRef<str>>(
let pipeline_template = repo_tree
.files
.iter()
.filter(|file_reference| Path::new(&file_reference.path) == path_to_source.as_path())
.next()
.find(|file_reference| Path::new(&file_reference.path) == path_to_source.as_path())
.ok_or("Trigger source not found in remote repo.")?;
let pipeline_contents = String::from_utf8(gitea.read_file(pipeline_template).await?)?;
debug!("Pipeline template contents: {}", pipeline_contents);

View File

@@ -2,7 +2,9 @@ use std::error::Error;
#[derive(Debug, Clone)]
pub(crate) enum GiteaClientError {
#[allow(dead_code)]
Static(#[allow(dead_code)] &'static str),
#[allow(dead_code)]
String(#[allow(dead_code)] String),
NoTotalCountHeaderInResponse,
}

View File

@@ -1,4 +1,5 @@
use base64::{engine::general_purpose, Engine as _};
use base64::engine::general_purpose;
use base64::Engine as _;
use serde::Deserialize;
use tracing::debug;
@@ -37,7 +38,7 @@ impl GiteaClient {
owner = owner.as_ref(),
repo = repo.as_ref(),
commit = commit.as_ref(),
page = page.map(|num| format!("&page={}", num)).unwrap_or_else(|| String::new())
page = page.map(|num| format!("&page={}", num)).unwrap_or_default()
);
let response = self
.http_client
@@ -99,11 +100,18 @@ impl GiteaClient {
#[derive(Debug, Deserialize)]
#[serde(deny_unknown_fields)]
struct ResponseGetTree {
#[allow(dead_code)]
sha: String,
#[allow(dead_code)]
url: String,
tree: Vec<ResponseObjectReference>,
#[allow(dead_code)]
truncated: bool,
page: u64,
#[allow(dead_code)]
total_count: u64,
}
@@ -111,10 +119,18 @@ struct ResponseGetTree {
#[serde(deny_unknown_fields)]
struct ResponseObjectReference {
path: String,
#[allow(dead_code)]
mode: String,
#[allow(dead_code)]
#[serde(rename = "type")]
object_type: String,
#[allow(dead_code)]
size: u64,
#[allow(dead_code)]
sha: String,
url: String,
}
@@ -150,7 +166,13 @@ impl TreeFileReference {
struct ResponseReadFile {
content: String,
encoding: String,
#[allow(dead_code)]
url: String,
#[allow(dead_code)]
sha: String,
#[allow(dead_code)]
size: u64,
}

View File

@@ -16,7 +16,7 @@ pub(crate) struct HookPush {
commits: Vec<HookCommit>,
total_commits: u64,
head_commit: HookCommit,
repository: HookRepository,
pub(crate) repository: HookRepository,
pusher: HookUser,
sender: HookUser,
}
@@ -55,7 +55,7 @@ pub(crate) struct HookRepository {
id: u64,
owner: HookUser,
name: String,
full_name: String,
pub(crate) full_name: String,
description: String,
empty: bool,
private: bool,

View File

@@ -55,7 +55,7 @@ pub(crate) async fn run_pipelines(
name: Some("REPO_URL".to_owned()),
value: pipeline
.clone_uri
.map(|uri| serde_json::Value::String(uri))
.map(serde_json::Value::String)
.or_else(|| Some(serde_json::Value::String(hook_repo_url.into_owned()))),
});
param_list.push(PipelineParam {

146
src/lib.rs Normal file
View File

@@ -0,0 +1,146 @@
#![forbid(unsafe_code)]
use std::collections::HashSet;
use std::sync::Arc;
use std::time::Duration;
use axum::http::StatusCode;
use axum::middleware;
use axum::routing::get;
use axum::routing::post;
use axum::Json;
use axum::Router;
use kube::Client;
use serde::Serialize;
use tokio::signal;
use tower_http::timeout::TimeoutLayer;
use tower_http::trace::TraceLayer;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use self::app_state::AppState;
use self::gitea_client::GiteaClient;
use self::hook_push::HookPush;
use self::webhook::handle_push;
use self::webhook::hook;
use self::webhook::verify_signature;
mod app_state;
mod crd_pipeline_run;
mod discovery;
mod gitea_client;
mod hook_push;
mod kubernetes;
mod remote_config;
mod webhook;
pub async fn init_tracing() -> Result<(), Box<dyn std::error::Error>> {
tracing_subscriber::registry()
.with(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| {
"webhookbridge=info,webhook_bridge=info,local_trigger=info,tower_http=debug,axum::rejection=trace"
.into()
}),
)
.with(tracing_subscriber::fmt::layer())
.init();
Ok(())
}
pub async fn launch_server() -> Result<(), Box<dyn std::error::Error>> {
let kubernetes_client: Client = Client::try_default()
.await
.expect("Set KUBECONFIG to a valid kubernetes config.");
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT")?;
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN")?;
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let allowed_repos = std::env::var("WEBHOOK_BRIDGE_REPO_WHITELIST")?;
let allowed_repos: HashSet<_> = allowed_repos
.split(",")
.filter(|s| !s.is_empty())
.map(str::to_owned)
.collect();
tracing::debug!("Using repo whitelist: {:?}", allowed_repos);
let app = Router::new()
.route("/hook", post(hook))
.layer(middleware::from_fn(verify_signature))
.route("/health", get(health))
.layer((
TraceLayer::new_for_http(),
// Add a timeout layer so graceful shutdown can't wait forever.
TimeoutLayer::new(Duration::from_secs(600)),
))
.with_state(AppState {
kubernetes_client,
gitea,
allowed_repos: Arc::new(allowed_repos),
});
let listener = tokio::net::TcpListener::bind("0.0.0.0:9988").await?;
tracing::info!("listening on {}", listener.local_addr().unwrap());
axum::serve(listener, app)
.with_graceful_shutdown(shutdown_signal())
.await?;
Ok(())
}
pub async fn local_trigger(payload: &str) -> Result<(), Box<dyn std::error::Error>> {
let kubernetes_client: Client = Client::try_default()
.await
.expect("Set KUBECONFIG to a valid kubernetes config.");
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT")?;
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN")?;
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let allowed_repos = std::env::var("WEBHOOK_BRIDGE_REPO_WHITELIST")
.ok()
.unwrap_or_default();
let allowed_repos: HashSet<_> = allowed_repos
.split(",")
.filter(|s| !s.is_empty())
.map(str::to_owned)
.collect();
tracing::debug!("Using repo whitelist: {:?}", allowed_repos);
let webhook_payload: HookPush = serde_json::from_str(payload)?;
handle_push(gitea, kubernetes_client, &allowed_repos, webhook_payload).await?;
Ok(())
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
_ = ctrl_c => {},
_ = terminate => {},
}
}
async fn health() -> (StatusCode, Json<HealthResponse>) {
(StatusCode::OK, Json(HealthResponse { ok: true }))
}
#[derive(Serialize)]
struct HealthResponse {
ok: bool,
}

View File

@@ -1,136 +1,10 @@
#![forbid(unsafe_code)]
use std::time::Duration;
use axum::http::StatusCode;
use axum::middleware;
use axum::routing::get;
use axum::routing::post;
use axum::Json;
use axum::Router;
use kube::Client;
use serde::Serialize;
use tokio::signal;
use tower_http::timeout::TimeoutLayer;
use tower_http::trace::TraceLayer;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use self::discovery::discover_matching_push_triggers;
use self::discovery::discover_webhook_bridge_config;
use self::gitea_client::GiteaClient;
use self::hook_push::HookPush;
use self::hook_push::PipelineParamters;
use self::kubernetes::run_pipelines;
use self::webhook::handle_push;
use self::webhook::hook;
use self::webhook::verify_signature;
mod crd_pipeline_run;
mod discovery;
mod gitea_client;
mod hook_push;
mod kubernetes;
mod remote_config;
mod webhook;
const EXAMPLE_WEBHOOK_PAYLOAD: &'static str = include_str!("../example_tag_webhook_payload.json");
use webhookbridge::init_tracing;
use webhookbridge::launch_server;
#[tokio::main]
#[allow(clippy::needless_return)]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
tracing_subscriber::registry()
.with(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| {
"webhook_bridge=info,tower_http=debug,axum::rejection=trace".into()
}),
)
.with(tracing_subscriber::fmt::layer())
.init();
init_tracing().await?;
launch_server().await
}
async fn launch_server() -> Result<(), Box<dyn std::error::Error>> {
let kubernetes_client: Client = Client::try_default()
.await
.expect("Set KUBECONFIG to a valid kubernetes config.");
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT")?;
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN")?;
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let app = Router::new()
.route("/hook", post(hook))
.layer(middleware::from_fn(verify_signature))
.route("/health", get(health))
.layer((
TraceLayer::new_for_http(),
// Add a timeout layer so graceful shutdown can't wait forever.
TimeoutLayer::new(Duration::from_secs(600)),
))
.with_state(AppState {
kubernetes_client,
gitea,
});
let listener = tokio::net::TcpListener::bind("0.0.0.0:9988").await?;
tracing::info!("listening on {}", listener.local_addr().unwrap());
axum::serve(listener, app)
.with_graceful_shutdown(shutdown_signal())
.await?;
Ok(())
}
async fn local_trigger() -> Result<(), Box<dyn std::error::Error>> {
let kubernetes_client: Client = Client::try_default()
.await
.expect("Set KUBECONFIG to a valid kubernetes config.");
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT")?;
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN")?;
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let webhook_payload: HookPush = serde_json::from_str(EXAMPLE_WEBHOOK_PAYLOAD)?;
handle_push(gitea, kubernetes_client, webhook_payload).await?;
Ok(())
}
#[derive(Clone)]
struct AppState {
kubernetes_client: Client,
gitea: GiteaClient,
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
_ = ctrl_c => {},
_ = terminate => {},
}
}
async fn health() -> (StatusCode, Json<HealthResponse>) {
(StatusCode::OK, Json(HealthResponse { ok: true }))
}
#[derive(Serialize)]
struct HealthResponse {
ok: bool,
}

View File

@@ -1,5 +1,6 @@
use std::borrow::Borrow;
use std::collections::HashSet;
use std::future::Future;
use std::sync::Arc;
use axum::async_trait;
use axum::body::Body;
@@ -14,7 +15,8 @@ use axum::response::IntoResponse;
use axum::response::Response;
use axum::Json;
use axum::RequestExt;
use base64::{engine::general_purpose, Engine as _};
use base64::engine::general_purpose;
use base64::Engine as _;
use hmac::Hmac;
use hmac::Mac;
use http_body_util::BodyExt;
@@ -22,13 +24,13 @@ use serde::Serialize;
use sha2::Sha256;
use tracing::debug;
use crate::app_state::AppState;
use crate::discovery::discover_matching_push_triggers;
use crate::discovery::discover_webhook_bridge_config;
use crate::gitea_client::GiteaClient;
use crate::hook_push::HookPush;
use crate::hook_push::PipelineParamters;
use crate::kubernetes::run_pipelines;
use crate::AppState;
type HmacSha256 = Hmac<Sha256>;
@@ -40,7 +42,12 @@ pub(crate) async fn hook(
debug!("REQ: {:?}", payload);
match payload {
HookRequest::Push(webhook_payload) => {
handle_push(state.gitea, state.kubernetes_client, webhook_payload)
handle_push(
state.gitea,
state.kubernetes_client,
state.allowed_repos.borrow(),
webhook_payload,
)
.await
.expect("Failed to handle push event.");
(
@@ -167,11 +174,19 @@ fn hex_to_bytes(s: &str) -> Option<Vec<u8>> {
pub(crate) async fn handle_push(
gitea: GiteaClient,
kubernetes_client: kube::Client,
allowed_repos: &HashSet<String>,
webhook_payload: HookPush,
) -> Result<(), Box<dyn std::error::Error>> {
let repo_owner = webhook_payload.get_repo_owner()?;
let repo_name = webhook_payload.get_repo_name()?;
let pull_base_sha = webhook_payload.get_pull_base_sha()?;
if !allowed_repos.contains(&webhook_payload.repository.full_name) {
tracing::info!(
"{} is not an allowed repository.",
webhook_payload.repository.full_name
);
return Ok(());
}
let repo_tree = gitea.get_tree(repo_owner, repo_name, pull_base_sha).await?;
let remote_config = discover_webhook_bridge_config(&gitea, &repo_tree).await?;
let pipelines = discover_matching_push_triggers(