14 Commits

Author SHA1 Message Date
Tom Alexander
1c082a5e24 Test: instantiate new clients for every request.
Some checks failed
semver Build semver has succeeded
format Build format has succeeded
build Build build has succeeded
clippy Build clippy has failed
rust-test Build rust-test has succeeded
Trying to figure out why I am getting the below error occasionally in gitea:

Delivery: Post "https://webhookbridge.fizz.buzz/hook": context deadline exceeded (Client.Timeout exceeded while awaiting headers)
2024-09-29 21:58:05 -04:00
Tom Alexander
9ed8905a5c Always return status code ok.
All checks were successful
semver Build semver has succeeded
format Build format has succeeded
clippy Build clippy has succeeded
build Build build has succeeded
rust-test Build rust-test has succeeded
2024-09-29 18:37:23 -04:00
Tom Alexander
8cb28459a0 Fix clippy lint. 2024-09-29 18:31:47 -04:00
Tom Alexander
753ad6dd05 Handle errors in push events.
Some checks failed
semver Build semver has succeeded
format Build format has succeeded
rust-test Build rust-test has failed
clippy Build clippy has failed
build Build build has succeeded
2024-09-29 18:24:50 -04:00
Tom Alexander
dd4c20f0a7 Remove log of secret.
All checks were successful
semver Build semver has succeeded
format Build format has succeeded
build Build build has succeeded
clippy Build clippy has succeeded
rust-test Build rust-test has succeeded
2024-09-29 18:14:36 -04:00
Tom Alexander
c04b4e8da5 Fix bug that prevented actions from triggering.
All checks were successful
semver Build semver has succeeded
format Build format has succeeded
build Build build has succeeded
clippy Build clippy has succeeded
rust-test Build rust-test has succeeded
2024-09-29 18:09:07 -04:00
Tom Alexander
69dd1ba156 Remove support for http2.
Nginx does not support http2 for upstream proxies because there is not much point for low-latency connections.
2024-09-29 18:00:34 -04:00
Tom Alexander
65c964b329 Fix clippy lint. 2024-09-29 17:42:08 -04:00
Tom Alexander
613026b326 Adding repo whitelist.
Some checks failed
semver Build semver has succeeded
format Build format has succeeded
clippy Build clippy has failed
build Build build has succeeded
rust-test Build rust-test has succeeded
2024-09-29 16:54:58 -04:00
Tom Alexander
cd56bb2fe1 Fix debug build in docker container by adding cranelift.
All checks were successful
semver Build semver has succeeded
format Build format has succeeded
clippy Build clippy has succeeded
build Build build has succeeded
rust-test Build rust-test has succeeded
2024-09-29 15:31:24 -04:00
Tom Alexander
4bcf8b9ddc Merge branch 'lint'
Some checks failed
semver Build semver has succeeded
format Build format has succeeded
rust-test Build rust-test has failed
clippy Build clippy has failed
build Build build has succeeded
2024-09-29 15:03:19 -04:00
Tom Alexander
14b38b7fcd Add rustfmt config. 2024-09-29 15:03:07 -04:00
Tom Alexander
0602f8472b Separate out to two binaries.
Some checks failed
format Build format has succeeded
clippy Build clippy has failed
rust-test Build rust-test has failed
2024-09-29 14:59:39 -04:00
Tom Alexander
cdac8224c6 Fix clippy lints. 2024-09-29 14:08:05 -04:00
16 changed files with 326 additions and 163 deletions

View File

@@ -1,3 +1,5 @@
cargo-features = ["codegen-backend"]
[package] [package]
name = "webhook_bridge" name = "webhook_bridge"
version = "0.0.1" version = "0.0.1"
@@ -17,8 +19,26 @@ include = [
"Cargo.lock" "Cargo.lock"
] ]
[lib]
name = "webhookbridge"
path = "src/lib.rs"
[[bin]]
name = "webhook_bridge"
path = "src/main.rs"
[[bin]]
# This bin exists for development purposes only. The real target of this crate is the webhook_bridge server binary.
name = "local_trigger"
path = "src/bin_local_trigger.rs"
required-features = ["local_trigger"]
[features]
default = ["local_trigger"]
local_trigger = []
[dependencies] [dependencies]
axum = { version = "0.7.5", default-features = false, features = ["tokio", "http1", "http2", "json"] } axum = { version = "0.7.5", default-features = false, features = ["tokio", "http1", "json"] }
base64 = "0.22.1" base64 = "0.22.1"
hmac = "0.12.1" hmac = "0.12.1"
http-body-util = "0.1.2" http-body-util = "0.1.2"
@@ -41,3 +61,10 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
inherits = "release" inherits = "release"
lto = true lto = true
strip = "symbols" strip = "symbols"
[target.'cfg(target_os = "linux")'.profile.dev]
codegen-backend = "cranelift"
[profile.dev.package."*"]
codegen-backend = "llvm"
opt-level = 3

View File

@@ -33,3 +33,4 @@ format: ## Auto-format source files.
.PHONY: clean .PHONY: clean
clean: clean:
> $(MAKE) -C docker/webhook_bridge_development clean > $(MAKE) -C docker/webhook_bridge_development clean
> rm -rf target

View File

@@ -6,7 +6,7 @@ RUN mkdir /source
WORKDIR /source WORKDIR /source
COPY . . COPY . .
# TODO: Add static build, which currently errors due to proc_macro. RUSTFLAGS="-C target-feature=+crt-static" # TODO: Add static build, which currently errors due to proc_macro. RUSTFLAGS="-C target-feature=+crt-static"
RUN CARGO_TARGET_DIR=/target cargo build --profile release-lto RUN CARGO_TARGET_DIR=/target cargo build --profile release-lto --bin webhook_bridge
FROM alpine:3.20 AS runner FROM alpine:3.20 AS runner

View File

@@ -2,5 +2,6 @@ FROM rustlang/rust:nightly-alpine3.20 AS builder
RUN apk add --no-cache musl-dev pkgconfig libressl3.8-libssl libressl-dev RUN apk add --no-cache musl-dev pkgconfig libressl3.8-libssl libressl-dev
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
RUN rustup component add rustc-codegen-cranelift
RUN rustup component add rustfmt RUN rustup component add rustfmt
RUN rustup component add clippy RUN rustup component add clippy

14
rustfmt.toml Normal file
View File

@@ -0,0 +1,14 @@
imports_granularity = "Item"
group_imports = "StdExternalCrate"
# In rustfmt 2.0 I will want to adjust these settings.
#
# max_width controls the max length of a line before rustfmt gives up
# but that also scales the length of a bunch of other lines
# automaticaly due to width_heuristics. I want to find a way to enable
# rustfmt to work on longer lines when necessary without making my
# regular code too wide.
#
# max_width = 100
# error_on_line_overflow = true
# width_heuristics = "Off"

13
src/app_state.rs Normal file
View File

@@ -0,0 +1,13 @@
use std::collections::HashSet;
use std::sync::Arc;
use kube::Client;
use crate::gitea_client::GiteaClient;
#[derive(Clone)]
pub(crate) struct AppState {
pub(crate) kubernetes_client: Client,
pub(crate) gitea: GiteaClient,
pub(crate) allowed_repos: Arc<HashSet<String>>,
}

12
src/bin_local_trigger.rs Normal file
View File

@@ -0,0 +1,12 @@
#![forbid(unsafe_code)]
use webhookbridge::init_tracing;
use webhookbridge::local_trigger;
const EXAMPLE_WEBHOOK_PAYLOAD: &str = include_str!("../example_tag_webhook_payload.json");
#[tokio::main]
#[allow(clippy::needless_return)]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
init_tracing().await?;
local_trigger(EXAMPLE_WEBHOOK_PAYLOAD).await
}

View File

@@ -17,14 +17,22 @@ use serde_json::Value;
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub(crate) struct PipelineRunSpec { pub(crate) struct PipelineRunSpec {
/// Contents of the Pipeline /// Contents of the Pipeline
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(
pub(crate) pipelineSpec: Option<Value>, rename = "pipelineSpec",
default,
skip_serializing_if = "Option::is_none"
)]
pub(crate) pipeline_spec: Option<Value>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) timeouts: Option<Value>, pub(crate) timeouts: Option<Value>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(
pub(crate) taskRunTemplate: Option<Value>, rename = "taskRunTemplate",
default,
skip_serializing_if = "Option::is_none"
)]
pub(crate) task_run_template: Option<Value>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) workspaces: Option<Value>, pub(crate) workspaces: Option<Value>,

View File

@@ -1,13 +1,13 @@
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use regex::Regex;
use tracing::debug;
use crate::crd_pipeline_run::PipelineRun; use crate::crd_pipeline_run::PipelineRun;
use crate::gitea_client::GiteaClient; use crate::gitea_client::GiteaClient;
use crate::gitea_client::Tree; use crate::gitea_client::Tree;
use crate::gitea_client::TreeFileReference;
use crate::remote_config::RemoteConfig; use crate::remote_config::RemoteConfig;
use regex::Regex;
use tracing::debug;
pub(crate) async fn discover_webhook_bridge_config( pub(crate) async fn discover_webhook_bridge_config(
gitea: &GiteaClient, gitea: &GiteaClient,
@@ -16,8 +16,7 @@ pub(crate) async fn discover_webhook_bridge_config(
let remote_config_reference = repo_tree let remote_config_reference = repo_tree
.files .files
.iter() .iter()
.filter(|file_reference| file_reference.path == ".webhook_bridge/webhook_bridge.toml") .find(|file_reference| file_reference.path == ".webhook_bridge/webhook_bridge.toml")
.next()
.ok_or("File not found in remote repo: .webhook_bridge/webhook_bridge.toml.")?; .ok_or("File not found in remote repo: .webhook_bridge/webhook_bridge.toml.")?;
let remote_config_contents = let remote_config_contents =
@@ -47,8 +46,7 @@ pub(crate) async fn discover_matching_push_triggers<RE: AsRef<str>>(
let pipeline_template = repo_tree let pipeline_template = repo_tree
.files .files
.iter() .iter()
.filter(|file_reference| Path::new(&file_reference.path) == path_to_source.as_path()) .find(|file_reference| Path::new(&file_reference.path) == path_to_source.as_path())
.next()
.ok_or("Trigger source not found in remote repo.")?; .ok_or("Trigger source not found in remote repo.")?;
let pipeline_contents = String::from_utf8(gitea.read_file(pipeline_template).await?)?; let pipeline_contents = String::from_utf8(gitea.read_file(pipeline_template).await?)?;
debug!("Pipeline template contents: {}", pipeline_contents); debug!("Pipeline template contents: {}", pipeline_contents);

View File

@@ -2,7 +2,9 @@ use std::error::Error;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) enum GiteaClientError { pub(crate) enum GiteaClientError {
#[allow(dead_code)]
Static(#[allow(dead_code)] &'static str), Static(#[allow(dead_code)] &'static str),
#[allow(dead_code)]
String(#[allow(dead_code)] String), String(#[allow(dead_code)] String),
NoTotalCountHeaderInResponse, NoTotalCountHeaderInResponse,
} }

View File

@@ -1,4 +1,5 @@
use base64::{engine::general_purpose, Engine as _}; use base64::engine::general_purpose;
use base64::Engine as _;
use serde::Deserialize; use serde::Deserialize;
use tracing::debug; use tracing::debug;
@@ -37,7 +38,7 @@ impl GiteaClient {
owner = owner.as_ref(), owner = owner.as_ref(),
repo = repo.as_ref(), repo = repo.as_ref(),
commit = commit.as_ref(), commit = commit.as_ref(),
page = page.map(|num| format!("&page={}", num)).unwrap_or_else(|| String::new()) page = page.map(|num| format!("&page={}", num)).unwrap_or_default()
); );
let response = self let response = self
.http_client .http_client
@@ -99,11 +100,18 @@ impl GiteaClient {
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
struct ResponseGetTree { struct ResponseGetTree {
#[allow(dead_code)]
sha: String, sha: String,
#[allow(dead_code)]
url: String, url: String,
tree: Vec<ResponseObjectReference>, tree: Vec<ResponseObjectReference>,
#[allow(dead_code)]
truncated: bool, truncated: bool,
page: u64, page: u64,
#[allow(dead_code)]
total_count: u64, total_count: u64,
} }
@@ -111,10 +119,18 @@ struct ResponseGetTree {
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
struct ResponseObjectReference { struct ResponseObjectReference {
path: String, path: String,
#[allow(dead_code)]
mode: String, mode: String,
#[allow(dead_code)]
#[serde(rename = "type")] #[serde(rename = "type")]
object_type: String, object_type: String,
#[allow(dead_code)]
size: u64, size: u64,
#[allow(dead_code)]
sha: String, sha: String,
url: String, url: String,
} }
@@ -150,7 +166,13 @@ impl TreeFileReference {
struct ResponseReadFile { struct ResponseReadFile {
content: String, content: String,
encoding: String, encoding: String,
#[allow(dead_code)]
url: String, url: String,
#[allow(dead_code)]
sha: String, sha: String,
#[allow(dead_code)]
size: u64, size: u64,
} }

View File

@@ -16,7 +16,7 @@ pub(crate) struct HookPush {
commits: Vec<HookCommit>, commits: Vec<HookCommit>,
total_commits: u64, total_commits: u64,
head_commit: HookCommit, head_commit: HookCommit,
repository: HookRepository, pub(crate) repository: HookRepository,
pusher: HookUser, pusher: HookUser,
sender: HookUser, sender: HookUser,
} }
@@ -55,7 +55,7 @@ pub(crate) struct HookRepository {
id: u64, id: u64,
owner: HookUser, owner: HookUser,
name: String, name: String,
full_name: String, pub(crate) full_name: String,
description: String, description: String,
empty: bool, empty: bool,
private: bool, private: bool,

View File

@@ -55,7 +55,7 @@ pub(crate) async fn run_pipelines(
name: Some("REPO_URL".to_owned()), name: Some("REPO_URL".to_owned()),
value: pipeline value: pipeline
.clone_uri .clone_uri
.map(|uri| serde_json::Value::String(uri)) .map(serde_json::Value::String)
.or_else(|| Some(serde_json::Value::String(hook_repo_url.into_owned()))), .or_else(|| Some(serde_json::Value::String(hook_repo_url.into_owned()))),
}); });
param_list.push(PipelineParam { param_list.push(PipelineParam {

146
src/lib.rs Normal file
View File

@@ -0,0 +1,146 @@
#![forbid(unsafe_code)]
use std::collections::HashSet;
use std::sync::Arc;
use std::time::Duration;
use axum::http::StatusCode;
use axum::middleware;
use axum::routing::get;
use axum::routing::post;
use axum::Json;
use axum::Router;
use kube::Client;
use serde::Serialize;
use tokio::signal;
use tower_http::timeout::TimeoutLayer;
use tower_http::trace::TraceLayer;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use self::app_state::AppState;
use self::gitea_client::GiteaClient;
use self::hook_push::HookPush;
use self::webhook::handle_push;
use self::webhook::hook;
use self::webhook::verify_signature;
mod app_state;
mod crd_pipeline_run;
mod discovery;
mod gitea_client;
mod hook_push;
mod kubernetes;
mod remote_config;
mod webhook;
pub async fn init_tracing() -> Result<(), Box<dyn std::error::Error>> {
tracing_subscriber::registry()
.with(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| {
"webhookbridge=info,webhook_bridge=info,local_trigger=info,tower_http=debug,axum::rejection=trace"
.into()
}),
)
.with(tracing_subscriber::fmt::layer())
.init();
Ok(())
}
pub async fn launch_server() -> Result<(), Box<dyn std::error::Error>> {
let kubernetes_client: Client = Client::try_default()
.await
.expect("Set KUBECONFIG to a valid kubernetes config.");
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT")?;
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN")?;
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let allowed_repos = std::env::var("WEBHOOK_BRIDGE_REPO_WHITELIST")?;
let allowed_repos: HashSet<_> = allowed_repos
.split(",")
.filter(|s| !s.is_empty())
.map(str::to_owned)
.collect();
tracing::debug!("Using repo whitelist: {:?}", allowed_repos);
let app = Router::new()
.route("/hook", post(hook))
.layer(middleware::from_fn(verify_signature))
.route("/health", get(health))
.layer((
TraceLayer::new_for_http(),
// Add a timeout layer so graceful shutdown can't wait forever.
TimeoutLayer::new(Duration::from_secs(600)),
))
.with_state(AppState {
kubernetes_client,
gitea,
allowed_repos: Arc::new(allowed_repos),
});
let listener = tokio::net::TcpListener::bind("0.0.0.0:9988").await?;
tracing::info!("listening on {}", listener.local_addr().unwrap());
axum::serve(listener, app)
.with_graceful_shutdown(shutdown_signal())
.await?;
Ok(())
}
pub async fn local_trigger(payload: &str) -> Result<(), Box<dyn std::error::Error>> {
let kubernetes_client: Client = Client::try_default()
.await
.expect("Set KUBECONFIG to a valid kubernetes config.");
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT")?;
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN")?;
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let allowed_repos = std::env::var("WEBHOOK_BRIDGE_REPO_WHITELIST")
.ok()
.unwrap_or_default();
let allowed_repos: HashSet<_> = allowed_repos
.split(",")
.filter(|s| !s.is_empty())
.map(str::to_owned)
.collect();
tracing::debug!("Using repo whitelist: {:?}", allowed_repos);
let webhook_payload: HookPush = serde_json::from_str(payload)?;
handle_push(gitea, kubernetes_client, &allowed_repos, webhook_payload).await?;
Ok(())
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
_ = ctrl_c => {},
_ = terminate => {},
}
}
async fn health() -> (StatusCode, Json<HealthResponse>) {
(StatusCode::OK, Json(HealthResponse { ok: true }))
}
#[derive(Serialize)]
struct HealthResponse {
ok: bool,
}

View File

@@ -1,136 +1,10 @@
#![forbid(unsafe_code)] #![forbid(unsafe_code)]
use std::time::Duration; use webhookbridge::init_tracing;
use webhookbridge::launch_server;
use axum::http::StatusCode;
use axum::middleware;
use axum::routing::get;
use axum::routing::post;
use axum::Json;
use axum::Router;
use kube::Client;
use serde::Serialize;
use tokio::signal;
use tower_http::timeout::TimeoutLayer;
use tower_http::trace::TraceLayer;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use self::discovery::discover_matching_push_triggers;
use self::discovery::discover_webhook_bridge_config;
use self::gitea_client::GiteaClient;
use self::hook_push::HookPush;
use self::hook_push::PipelineParamters;
use self::kubernetes::run_pipelines;
use self::webhook::handle_push;
use self::webhook::hook;
use self::webhook::verify_signature;
mod crd_pipeline_run;
mod discovery;
mod gitea_client;
mod hook_push;
mod kubernetes;
mod remote_config;
mod webhook;
const EXAMPLE_WEBHOOK_PAYLOAD: &'static str = include_str!("../example_tag_webhook_payload.json");
#[tokio::main] #[tokio::main]
#[allow(clippy::needless_return)]
async fn main() -> Result<(), Box<dyn std::error::Error>> { async fn main() -> Result<(), Box<dyn std::error::Error>> {
tracing_subscriber::registry() init_tracing().await?;
.with(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| {
"webhook_bridge=info,tower_http=debug,axum::rejection=trace".into()
}),
)
.with(tracing_subscriber::fmt::layer())
.init();
launch_server().await launch_server().await
} }
async fn launch_server() -> Result<(), Box<dyn std::error::Error>> {
let kubernetes_client: Client = Client::try_default()
.await
.expect("Set KUBECONFIG to a valid kubernetes config.");
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT")?;
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN")?;
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let app = Router::new()
.route("/hook", post(hook))
.layer(middleware::from_fn(verify_signature))
.route("/health", get(health))
.layer((
TraceLayer::new_for_http(),
// Add a timeout layer so graceful shutdown can't wait forever.
TimeoutLayer::new(Duration::from_secs(600)),
))
.with_state(AppState {
kubernetes_client,
gitea,
});
let listener = tokio::net::TcpListener::bind("0.0.0.0:9988").await?;
tracing::info!("listening on {}", listener.local_addr().unwrap());
axum::serve(listener, app)
.with_graceful_shutdown(shutdown_signal())
.await?;
Ok(())
}
async fn local_trigger() -> Result<(), Box<dyn std::error::Error>> {
let kubernetes_client: Client = Client::try_default()
.await
.expect("Set KUBECONFIG to a valid kubernetes config.");
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT")?;
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN")?;
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let webhook_payload: HookPush = serde_json::from_str(EXAMPLE_WEBHOOK_PAYLOAD)?;
handle_push(gitea, kubernetes_client, webhook_payload).await?;
Ok(())
}
#[derive(Clone)]
struct AppState {
kubernetes_client: Client,
gitea: GiteaClient,
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
_ = ctrl_c => {},
_ = terminate => {},
}
}
async fn health() -> (StatusCode, Json<HealthResponse>) {
(StatusCode::OK, Json(HealthResponse { ok: true }))
}
#[derive(Serialize)]
struct HealthResponse {
ok: bool,
}

View File

@@ -1,5 +1,6 @@
use std::borrow::Borrow;
use std::collections::HashSet;
use std::future::Future; use std::future::Future;
use std::sync::Arc;
use axum::async_trait; use axum::async_trait;
use axum::body::Body; use axum::body::Body;
@@ -14,7 +15,8 @@ use axum::response::IntoResponse;
use axum::response::Response; use axum::response::Response;
use axum::Json; use axum::Json;
use axum::RequestExt; use axum::RequestExt;
use base64::{engine::general_purpose, Engine as _}; use base64::engine::general_purpose;
use base64::Engine as _;
use hmac::Hmac; use hmac::Hmac;
use hmac::Mac; use hmac::Mac;
use http_body_util::BodyExt; use http_body_util::BodyExt;
@@ -22,13 +24,13 @@ use serde::Serialize;
use sha2::Sha256; use sha2::Sha256;
use tracing::debug; use tracing::debug;
use crate::app_state::AppState;
use crate::discovery::discover_matching_push_triggers; use crate::discovery::discover_matching_push_triggers;
use crate::discovery::discover_webhook_bridge_config; use crate::discovery::discover_webhook_bridge_config;
use crate::gitea_client::GiteaClient; use crate::gitea_client::GiteaClient;
use crate::hook_push::HookPush; use crate::hook_push::HookPush;
use crate::hook_push::PipelineParamters; use crate::hook_push::PipelineParamters;
use crate::kubernetes::run_pipelines; use crate::kubernetes::run_pipelines;
use crate::AppState;
type HmacSha256 = Hmac<Sha256>; type HmacSha256 = Hmac<Sha256>;
@@ -40,19 +42,54 @@ pub(crate) async fn hook(
debug!("REQ: {:?}", payload); debug!("REQ: {:?}", payload);
match payload { match payload {
HookRequest::Push(webhook_payload) => { HookRequest::Push(webhook_payload) => {
handle_push(state.gitea, state.kubernetes_client, webhook_payload) let kubernetes_client: kube::Client = kube::Client::try_default()
.await .await
.expect("Failed to handle push event."); .expect("Set KUBECONFIG to a valid kubernetes config.");
(
let gitea_api_root = std::env::var("WEBHOOK_BRIDGE_API_ROOT");
let gitea_api_token = std::env::var("WEBHOOK_BRIDGE_OAUTH_TOKEN");
let (gitea_api_root, gitea_api_token) = match (gitea_api_root, gitea_api_token) {
(Ok(r), Ok(t)) => (r, t),
_ => {
return (
StatusCode::OK, StatusCode::OK,
Json(HookResponse { Json(HookResponse {
ok: true, ok: true,
message: None, message: None,
}), }),
);
}
};
let gitea = GiteaClient::new(gitea_api_root, gitea_api_token);
let push_result = handle_push(
gitea,
kubernetes_client,
state.allowed_repos.borrow(),
webhook_payload,
) )
.await;
match push_result {
Ok(_) => (
StatusCode::OK,
Json(HookResponse {
ok: true,
message: None,
}),
),
Err(_) => (
// StatusCode::INTERNAL_SERVER_ERROR,
StatusCode::OK,
Json(HookResponse {
ok: false,
message: Some("Failed to handle push event.".to_string()),
}),
),
}
} }
HookRequest::Unrecognized(payload) => ( HookRequest::Unrecognized(payload) => (
StatusCode::BAD_REQUEST, // StatusCode::BAD_REQUEST,
StatusCode::OK,
Json(HookResponse { Json(HookResponse {
ok: false, ok: false,
message: Some(format!("unrecognized event type: {payload}")), message: Some(format!("unrecognized event type: {payload}")),
@@ -139,9 +176,9 @@ where
} }
async fn check_hash(body: Bytes, secret: String, signature: Vec<u8>) -> Result<Bytes, Response> { async fn check_hash(body: Bytes, secret: String, signature: Vec<u8>) -> Result<Bytes, Response> {
tracing::info!("Checking signature {:02x?}", signature.as_slice()); tracing::debug!("Checking signature {:02x?}", signature.as_slice());
tracing::info!("Using secret {:?}", secret); // tracing::info!("Using secret {:?}", secret);
tracing::info!("and body {}", general_purpose::STANDARD.encode(&body)); tracing::debug!("and body {}", general_purpose::STANDARD.encode(&body));
let mut mac = HmacSha256::new_from_slice(secret.as_bytes()) let mut mac = HmacSha256::new_from_slice(secret.as_bytes())
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response())?; .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response())?;
mac.update(&body); mac.update(&body);
@@ -167,11 +204,19 @@ fn hex_to_bytes(s: &str) -> Option<Vec<u8>> {
pub(crate) async fn handle_push( pub(crate) async fn handle_push(
gitea: GiteaClient, gitea: GiteaClient,
kubernetes_client: kube::Client, kubernetes_client: kube::Client,
allowed_repos: &HashSet<String>,
webhook_payload: HookPush, webhook_payload: HookPush,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
let repo_owner = webhook_payload.get_repo_owner()?; let repo_owner = webhook_payload.get_repo_owner()?;
let repo_name = webhook_payload.get_repo_name()?; let repo_name = webhook_payload.get_repo_name()?;
let pull_base_sha = webhook_payload.get_pull_base_sha()?; let pull_base_sha = webhook_payload.get_pull_base_sha()?;
if !allowed_repos.contains(&webhook_payload.repository.full_name) {
tracing::info!(
"{} is not an allowed repository.",
webhook_payload.repository.full_name
);
return Ok(());
}
let repo_tree = gitea.get_tree(repo_owner, repo_name, pull_base_sha).await?; let repo_tree = gitea.get_tree(repo_owner, repo_name, pull_base_sha).await?;
let remote_config = discover_webhook_bridge_config(&gitea, &repo_tree).await?; let remote_config = discover_webhook_bridge_config(&gitea, &repo_tree).await?;
let pipelines = discover_matching_push_triggers( let pipelines = discover_matching_push_triggers(