diff --git a/Cargo.lock b/Cargo.lock index eacd0e6bb..7fc49b5f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -650,6 +650,10 @@ version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +[[package]] +name = "bzr-store" +version = "0.1.0" + [[package]] name = "castaway" version = "0.1.2" @@ -2693,6 +2697,10 @@ dependencies = [ "maplit", ] +[[package]] +name = "janitor-git-store" +version = "0.1.0" + [[package]] name = "janitor-mail-filter" version = "0.0.0" @@ -2739,8 +2747,10 @@ name = "janitor-runner" version = "0.0.0" dependencies = [ "async-trait", + "axum", "breezyshim", "chrono", + "clap", "debian-control", "debversion", "janitor", diff --git a/Cargo.toml b/Cargo.toml index 0d93e1a78..e3d61d30b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -82,7 +82,10 @@ protobuf-codegen = "3" protoc-rust = "^2.0" [workspace] -members = [ "common-py", "differ", "mail-filter", "publish-py", "publish", "runner-py", "runner", "worker" ] +members = [ "common-py", "differ", "mail-filter", "publish-py", "publish", "runner-py", "runner", "worker" , "bzr-store", "git-store"] + +[workspace.package] +edition = "2021" [features] default = ["gcp", "gcs", "debian"] diff --git a/bzr-store/Cargo.toml b/bzr-store/Cargo.toml new file mode 100644 index 000000000..bd0821737 --- /dev/null +++ b/bzr-store/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "bzr-store" +version = "0.1.0" +edition.workspace = true + +[lib] + +[dependencies] diff --git a/bzr-store/src/lib.rs b/bzr-store/src/lib.rs new file mode 100644 index 000000000..e69de29bb diff --git a/common-py/Cargo.toml b/common-py/Cargo.toml index d9d6f46a3..00250efce 100644 --- a/common-py/Cargo.toml +++ b/common-py/Cargo.toml @@ -3,7 +3,7 @@ name = "common-py" version = "0.0.0" authors = ["Jelmer Vernooij "] publish = false -edition = "2021" +edition.workspace = true description = "Common bindings for the janitor - python" license = "GPL-3.0+" repository = "https://github.com/jelmer/janitor.git" diff --git a/differ/Cargo.toml b/differ/Cargo.toml index 363a7b8d4..599e2a0ea 100644 --- a/differ/Cargo.toml +++ b/differ/Cargo.toml @@ -2,7 +2,7 @@ name = "janitor-differ" version = "0.0.0" authors = ["Jelmer Vernooij "] -edition = "2021" +edition.workspace = true description = "Differ for the janitor" license = "GPL-3.0+" repository = "https://github.com/jelmer/janitor.git" diff --git a/git-store/Cargo.toml b/git-store/Cargo.toml new file mode 100644 index 000000000..725d9c963 --- /dev/null +++ b/git-store/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "janitor-git-store" +version = "0.1.0" +edition.workspace = true + +[lib] + +[dependencies] diff --git a/git-store/src/lib.rs b/git-store/src/lib.rs new file mode 100644 index 000000000..e69de29bb diff --git a/git-store/src/main.rs b/git-store/src/main.rs new file mode 100644 index 000000000..e7a11a969 --- /dev/null +++ b/git-store/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} diff --git a/mail-filter/Cargo.toml b/mail-filter/Cargo.toml index 3d2c102b5..356ebe8aa 100644 --- a/mail-filter/Cargo.toml +++ b/mail-filter/Cargo.toml @@ -2,7 +2,7 @@ name = "janitor-mail-filter" version = "0.0.0" authors = ["Jelmer Vernooij "] -edition = "2018" +edition.workspace = true description = "Mail filter for the janitor" license = "Apache-2.0" repository = "https://github.com/jelmer/janitor.git" diff --git a/publish-py/Cargo.toml b/publish-py/Cargo.toml index 5831dfac7..53f42b62e 100644 --- a/publish-py/Cargo.toml +++ b/publish-py/Cargo.toml @@ -2,7 +2,7 @@ name = "publish-py" version = "0.0.0" authors = ["Jelmer Vernooij "] -edition = "2021" +edition.workspace = true description = "Publisher for the janitor - python bindings" publish = false license = "GPL-3.0+" diff --git a/publish/Cargo.toml b/publish/Cargo.toml index 4f23cd252..508c93f46 100644 --- a/publish/Cargo.toml +++ b/publish/Cargo.toml @@ -2,7 +2,7 @@ name = "janitor-publish" version = "0.0.0" authors = ["Jelmer Vernooij "] -edition = "2021" +edition.workspace = true description = "Publisher for the janitor" license = "GPL-3.0+" repository = "https://github.com/jelmer/janitor.git" diff --git a/publish/src/bin/janitor-publish.rs b/publish/src/bin/janitor-publish.rs index f72c7234e..efd9eb3eb 100644 --- a/publish/src/bin/janitor-publish.rs +++ b/publish/src/bin/janitor-publish.rs @@ -52,7 +52,7 @@ struct Args { /// Limit number of pushes per cycle. #[clap(long)] - push_limit: Option, + push_limit: Option, /// Require a binary diff when publishing merge requests. #[clap(long)] @@ -95,16 +95,16 @@ async fn main() -> Result<(), i32> { let config: &'static _ = Box::leak(config); - let bucket_rate_limiter: std::sync::Arc>> = - std::sync::Arc::new(std::sync::Mutex::new(if args.slowstart { - Box::new(SlowStartRateLimiter::new(args.max_mps_per_bucket)) + let bucket_rate_limiter: Mutex> = + std::sync::Mutex::new(if args.slowstart { + Box::new(SlowStartRateLimiter::new(args.max_mps_per_bucket)) as Box } else if let Some(max_mps_per_bucket) = args.max_mps_per_bucket { - Box::new(FixedRateLimiter::new(max_mps_per_bucket)) + Box::new(FixedRateLimiter::new(max_mps_per_bucket)) as Box } else { - Box::new(NonRateLimiter) - })); + Box::new(NonRateLimiter) as Box + }); - let forge_rate_limiter = Arc::new(Mutex::new(HashMap::new())); + let forge_rate_limiter = Mutex::new(HashMap::new()); let vcs_managers = Box::new(janitor::vcs::get_vcs_managers_from_config(config)); let vcs_managers: &'static _ = Box::leak(vcs_managers); @@ -147,15 +147,20 @@ async fn main() -> Result<(), i32> { .await, )); + let state = Arc::new(janitor_publish::AppState { + conn: db.clone(), + bucket_rate_limiter, + forge_rate_limiter, + push_limit: args.push_limit, + }); + if args.once { janitor_publish::publish_pending_ready( - db.clone(), + state, redis_async_connection.clone(), config, publish_worker.clone(), - bucket_rate_limiter.clone(), vcs_managers, - args.push_limit, args.require_binary_diff, ) .await @@ -176,44 +181,34 @@ async fn main() -> Result<(), i32> { } } else { tokio::spawn(janitor_publish::process_queue_loop( - db.clone(), + state.clone(), redis_async_connection.clone(), config, publish_worker.clone(), - bucket_rate_limiter.clone(), - forge_rate_limiter.clone(), vcs_managers, chrono::Duration::seconds(args.interval), !args.no_auto_publish, - args.push_limit, args.modify_mp_limit, args.require_binary_diff, )); - tokio::spawn(janitor_publish::refresh_bucket_mp_counts( - db.clone(), - bucket_rate_limiter.clone(), - )); + tokio::spawn(janitor_publish::refresh_bucket_mp_counts(state.clone())); tokio::spawn(janitor_publish::listen_to_runner( - db.clone(), + state.clone(), redis_async_connection.clone(), config, publish_worker.clone(), - bucket_rate_limiter.clone(), vcs_managers, args.require_binary_diff, )); let app = janitor_publish::web::app( + state.clone(), publish_worker.clone(), - bucket_rate_limiter.clone(), - forge_rate_limiter.clone(), vcs_managers, - db.clone(), args.require_binary_diff, args.modify_mp_limit, - args.push_limit, redis_async_connection.clone(), config, ); diff --git a/publish/src/lib.rs b/publish/src/lib.rs index da8bbc9e7..a7bee7437 100644 --- a/publish/src/lib.rs +++ b/publish/src/lib.rs @@ -574,16 +574,13 @@ fn get_merged_by_user_url(url: &url::Url, user: &str) -> Result } pub async fn process_queue_loop( - db: sqlx::PgPool, + state: Arc, redis: Option, config: &janitor::config::Config, publish_worker: Arc>, - bucket_rate_limiter: Arc>>, - forge_rate_limiter: Arc>>>, vcs_managers: &HashMap>, interval: chrono::Duration, auto_publish: bool, - push_limit: Option, modify_mp_limit: Option, require_binary_diff: bool, ) { @@ -591,22 +588,17 @@ pub async fn process_queue_loop( } pub async fn publish_pending_ready( - db: sqlx::PgPool, + state: Arc, redis: Option, config: &janitor::config::Config, publish_worker: Arc>, - bucket_rate_limiter: Arc>>, vcs_managers: &HashMap>, - push_limit: Option, require_binary_diff: bool, ) -> Result<(), PublishError> { todo!(); } -pub async fn refresh_bucket_mp_counts( - db: sqlx::PgPool, - bucket_rate_limiter: Arc>>, -) -> Result<(), sqlx::Error> { +pub async fn refresh_bucket_mp_counts(state: Arc) -> Result<(), sqlx::Error> { let mut per_bucket: HashMap> = HashMap::new(); @@ -620,7 +612,7 @@ pub async fn refresh_bucket_mp_counts( GROUP BY 1, 2 "#, ) - .fetch_all(&db) + .fetch_all(&state.conn) .await?; for row in rows { @@ -629,7 +621,8 @@ pub async fn refresh_bucket_mp_counts( .or_default() .insert(row.0, row.2 as usize); } - bucket_rate_limiter + state + .bucket_rate_limiter .lock() .unwrap() .set_mps_per_bucket(&per_bucket); @@ -637,11 +630,10 @@ pub async fn refresh_bucket_mp_counts( } pub async fn listen_to_runner( - db: sqlx::PgPool, + state: Arc, redis: Option, config: &janitor::config::Config, publish_worker: Arc>, - bucket_rate_limiter: Arc>>, vcs_managers: &HashMap>, require_binary_diff: bool, ) { @@ -678,3 +670,10 @@ mod tests { assert_eq!(finish_time + chrono::Duration::days(7), next_try_time); } } + +pub struct AppState { + pub conn: sqlx::PgPool, + pub bucket_rate_limiter: Mutex>, + pub forge_rate_limiter: Mutex>>, + pub push_limit: Option, +} diff --git a/publish/src/rate_limiter.rs b/publish/src/rate_limiter.rs index 07a2f5ff6..06743602f 100644 --- a/publish/src/rate_limiter.rs +++ b/publish/src/rate_limiter.rs @@ -16,6 +16,10 @@ pub trait RateLimiter: Send + Sync { fn inc(&mut self, bucket: &str); fn get_stats(&self) -> Option; + + fn get_max_open(&self, bucket: &str) -> Option { + None + } } pub struct NonRateLimiter; diff --git a/publish/src/state.rs b/publish/src/state.rs index cbd13422d..f96cc7aab 100644 --- a/publish/src/state.rs +++ b/publish/src/state.rs @@ -284,3 +284,46 @@ LIMIT 1 .fetch_optional(&*conn) .await } + +pub async fn get_publish_attempt_count( + conn: &PgPool, + revision: &RevisionId, + transient_result_codes: &[&str], +) -> Result { + Ok(sqlx::query_scalar::<_, i64>( + "select count(*) from publish where revision = $1 and result_code != ALL($2::text[])", + ) + .bind(revision) + .bind(transient_result_codes) + .fetch_one(&*conn) + .await? as usize) +} + +pub async fn get_previous_mp_status( + conn: &PgPool, + codebase: &str, + campaign: &str, +) -> Result, sqlx::Error> { + sqlx::query_as( + r#""" +WITH per_run_mps AS ( + SELECT run.id AS run_id, run.finish_time, + merge_proposal.url AS mp_url, merge_proposal.status AS mp_status + FROM run + LEFT JOIN merge_proposal ON run.revision = merge_proposal.revision + WHERE run.codebase = $1 + AND run.suite = $2 + AND run.result_code = 'success' + AND merge_proposal.status NOT IN ('open', 'abandoned') + GROUP BY run.id, merge_proposal.url +) +SELECT mp_url, mp_status FROM per_run_mps +WHERE run_id = ( + SELECT run_id FROM per_run_mps ORDER BY finish_time DESC LIMIT 1) +"""#, + ) + .bind(codebase) + .bind(campaign) + .fetch_all(&*conn) + .await +} diff --git a/publish/src/web.rs b/publish/src/web.rs index 90217a80e..c3433f639 100644 --- a/publish/src/web.rs +++ b/publish/src/web.rs @@ -1,4 +1,9 @@ use crate::rate_limiter::RateLimiter; +use crate::AppState; +use axum::extract::{Path, State}; +use axum::http::StatusCode; +use axum::response::{IntoResponse, Json}; +use axum::routing::{delete, get, post, put}; use axum::Router; use breezyshim::forge::Forge; use janitor::vcs::{VcsManager, VcsType}; @@ -6,17 +11,509 @@ use sqlx::PgPool; use std::collections::HashMap; use std::sync::{Arc, Mutex}; +async fn get_merge_proposals_by_campaign() { + unimplemented!() +} + +async fn get_merge_proposals_by_codebase() { + unimplemented!() +} + +async fn post_merge_proposal() { + unimplemented!() +} + +async fn absorbed() { + unimplemented!() +} + +async fn get_policy() { + unimplemented!() +} + +async fn get_policies() { + unimplemented!() +} + +async fn put_policy() { + unimplemented!() +} + +async fn put_policies() { + unimplemented!() +} + +async fn update_merge_proposal() { + unimplemented!() +} + +async fn delete_policy() { + unimplemented!() +} + +async fn consider() { + unimplemented!() +} + +async fn get_publish_by_id() { + unimplemented!() +} + +async fn publish() { + unimplemented!() +} + +async fn get_credentials() { + unimplemented!() +} + +async fn health() -> &'static str { + "OK" +} + +async fn ready() -> &'static str { + "OK" +} + +async fn scan() { + unimplemented!() +} + +async fn check_stragglers() { + unimplemented!() +} + +async fn refresh_status() { + unimplemented!() +} + +async fn autopublish() { + unimplemented!() +} + +async fn get_rate_limit( + State(state): State>, + Path(bucket): Path, +) -> impl IntoResponse { + let stats = state.bucket_rate_limiter.lock().unwrap().get_stats(); + + if let Some(stats) = stats { + if let Some(current_open) = stats.per_bucket.get(&bucket) { + let max_open = state + .bucket_rate_limiter + .lock() + .unwrap() + .get_max_open(&bucket); + ( + StatusCode::OK, + Json( + serde_json::to_value(&BucketRateLimit { + open: Some(*current_open), + max_open, + remaining: max_open.map(|max_open| max_open - *current_open), + }) + .unwrap(), + ), + ) + } else { + ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "reason": "No such rate limit bucket", + "bucket": bucket, + })), + ) + } + } else { + ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "reason": "No rate limit stats available", + })), + ) + } +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BucketRateLimit { + open: Option, + max_open: Option, + remaining: Option, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct RateLimitsInfo { + per_bucket: HashMap, + per_forge: HashMap>, + push_limit: Option, +} + +async fn get_all_rate_limits(State(state): State>) -> impl IntoResponse { + let stats = state.bucket_rate_limiter.lock().unwrap().get_stats(); + + let per_bucket = if let Some(stats) = stats { + let mut per_bucket = HashMap::new(); + for (bucket, current_open) in stats.per_bucket.iter() { + let max_open = state + .bucket_rate_limiter + .lock() + .unwrap() + .get_max_open(bucket); + per_bucket.insert( + bucket.clone(), + BucketRateLimit { + open: Some(*current_open), + max_open, + remaining: max_open.map(|max_open| max_open - *current_open), + }, + ); + } + per_bucket + } else { + HashMap::new() + }; + + Json( + serde_json::to_value(&RateLimitsInfo { + per_bucket, + per_forge: state + .forge_rate_limiter + .lock() + .unwrap() + .iter() + .map(|(f, t)| (f.forge_name().to_string(), *t)) + .collect(), + push_limit: state.push_limit, + }) + .unwrap(), + ) +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct Blocker { + result: bool, + details: D, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerSuccessDetails { + result_code: String, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerInactiveDetails { + inactive: bool, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerCommandDetails { + correct: String, + actual: String, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct Review { + reviewer: String, + reviewed_at: chrono::DateTime, + comment: String, + verdict: String, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerPublishStatusDetails { + status: String, + reviews: HashMap, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerBackoffDetails { + attempt_count: usize, + next_try_time: chrono::DateTime, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerProposeRateLimitDetails { + open: Option, + max_open: Option, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerChangeSetDetails { + change_set_id: String, + change_set_state: String, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerPreviousMpDetails { + url: String, + status: String, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct BlockerInfo { + success: Blocker, + inactive: Blocker, + command: Blocker, + publish_status: Blocker, + backoff: Blocker, + propose_rate_limit: Blocker, + change_set: Blocker, + previous_mp: Blocker>, +} + +async fn get_blockers( + State(state): State>, + Path(id): Path, +) -> impl IntoResponse { + #[derive(sqlx::FromRow)] + struct RunDetails { + id: String, + codebase: String, + campaign: String, + finish_time: chrono::DateTime, + run_command: String, + publish_status: String, + rate_limit_bucket: Option, + revision: Option, + policy_command: String, + result_code: String, + change_set_state: String, + change_set: String, + inactive: bool, + }; + + let run = sqlx::query_as::<_, RunDetails>( + r#""" +SELECT + run.id AS id, + run.codebase AS codebase, + run.suite AS campaign, + run.finish_time AS finish_time, + run.command AS run_command, + run.publish_status AS publish_status, + named_publish_policy.rate_limit_bucket AS rate_limit_bucket, + run.revision AS revision, + candidate.command AS policy_command, + run.result_code AS result_code, + change_set.state AS change_set_state, + change_set.id AS change_set, + codebase.inactive AS inactive +FROM run +INNER JOIN codebase ON codebase.name = run.codebase +INNER JOIN candidate ON candidate.codebase = run.codebase AND candidate.suite = run.suite +INNER JOIN named_publish_policy ON candidate.publish_policy = named_publish_policy.name +INNER JOIN change_set ON change_set.id = run.change_set +WHERE run.id = $1 +"""#, + ) + .bind(&id) + .fetch_optional(&state.conn) + .await + .unwrap(); + + let run = if let Some(run) = run { + run + } else { + return ( + axum::http::StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "reason": "No such publish-ready run", + "run_id": id, + })), + ); + }; + + #[derive(sqlx::FromRow)] + struct ReviewDetails { + reviewer: String, + reviewed_at: chrono::DateTime, + comment: String, + verdict: String, + } + + let reviews = sqlx::query_as::<_, ReviewDetails>("SELECT * FROM review WHERE run_id = $1") + .bind(&id) + .fetch_all(&state.conn) + .await + .unwrap(); + + let attempt_count = if let Some(revision) = run.revision { + crate::state::get_publish_attempt_count(&state.conn, &revision, &["differ-unreachable"]) + .await + .unwrap() + } else { + 0 + }; + + let last_mps = crate::state::get_previous_mp_status(&state.conn, &run.codebase, &run.campaign) + .await + .unwrap(); + + let success = Blocker { + result: run.result_code == "success", + details: BlockerSuccessDetails { + result_code: run.result_code, + }, + }; + + let inactive = Blocker { + result: !run.inactive, + details: BlockerInactiveDetails { + inactive: run.inactive, + }, + }; + + let command = Blocker { + result: run.run_command == run.policy_command, + details: BlockerCommandDetails { + correct: run.policy_command, + actual: run.run_command, + }, + }; + + let publish_status = Blocker { + result: run.publish_status == "approved", + details: BlockerPublishStatusDetails { + status: run.publish_status, + reviews: reviews + .into_iter() + .map(|row| { + ( + row.reviewer.clone(), + Review { + reviewer: row.reviewer, + reviewed_at: row.reviewed_at, + comment: row.comment, + verdict: row.verdict, + }, + ) + }) + .collect(), + }, + }; + + let next_try_time = crate::calculate_next_try_time(run.finish_time, attempt_count); + + let backoff = Blocker { + result: chrono::Utc::now() >= next_try_time, + details: BlockerBackoffDetails { + attempt_count, + next_try_time, + }, + }; + + // TODO(jelmer): include forge rate limits? + + let propose_rate_limit = { + if let Some(bucket) = run.rate_limit_bucket { + let open = state + .bucket_rate_limiter + .lock() + .unwrap() + .get_stats() + .and_then(|stats| stats.per_bucket.get(&bucket).cloned()); + let max_open = state + .bucket_rate_limiter + .lock() + .unwrap() + .get_max_open(&bucket); + Blocker { + result: state + .bucket_rate_limiter + .lock() + .unwrap() + .check_allowed(&bucket), + details: BlockerProposeRateLimitDetails { open, max_open }, + } + } else { + Blocker { + result: true, + details: BlockerProposeRateLimitDetails { + open: None, + max_open: None, + }, + } + } + }; + + let change_set = Blocker { + result: ["publishing", "ready"].contains(&run.change_set_state.as_str()), + details: BlockerChangeSetDetails { + change_set_id: run.change_set, + change_set_state: run.change_set_state, + }, + }; + + let previous_mp = Blocker { + result: last_mps + .iter() + .all(|last_mp| last_mp.1 != "rejected" && last_mp.1 != "closed"), + details: last_mps + .iter() + .map(|last_mp| BlockerPreviousMpDetails { + url: last_mp.0.clone(), + status: last_mp.1.clone(), + }) + .collect(), + }; + + ( + StatusCode::OK, + Json( + serde_json::to_value(&BlockerInfo { + success, + previous_mp, + change_set, + inactive, + command, + publish_status, + backoff, + propose_rate_limit, + }) + .unwrap(), + ), + ) +} + pub fn app( + state: Arc, worker: Arc>, - bucket_rate_limiter: Arc>>, - forge_rate_limiter: Arc>>>, vcs_managers: &HashMap>, - db: PgPool, require_binary_diff: bool, modify_mp_limit: Option, - push_limit: Option, redis: Option, config: &janitor::config::Config, ) -> Router { Router::new() + .route( + "/:campaign/merge-proposals", + get(get_merge_proposals_by_campaign), + ) + .route( + "/c/:codebase/merge-proposals", + get(get_merge_proposals_by_codebase), + ) + .route("/merge-proposals", get(post_merge_proposal)) + .route("/absorbed", get(absorbed)) + .route("/policy/:name", get(get_policy)) + .route("/policy", get(get_policies)) + .route("/policy/:name", put(put_policy)) + .route("/policy", put(put_policies)) + .route("/merge-proposal", post(update_merge_proposal)) + .route("/policy/:name", delete(delete_policy)) + .route("/merge-proposal", post(update_merge_proposal)) + .route("/consider:id", post(consider)) + .route("/publish/:id", get(get_publish_by_id)) + .route("/:campaign/:codebase/publish", post(publish)) + .route("/credentials", get(get_credentials)) + .route("/health", get(health)) + .route("/ready", get(ready)) + .route("/scan", post(scan)) + .route("/check-stragglers", post(check_stragglers)) + .route("/refresh-status", post(refresh_status)) + .route("/autopublish", post(autopublish)) + .route("/rate-limits/:bucket", get(get_rate_limit)) + .route("/rate-limits", get(get_all_rate_limits)) + .route("/blockers/:id", get(get_blockers)) + .with_state(state) } diff --git a/py/janitor/publish_one.py b/py/janitor/publish_one.py deleted file mode 100644 index 9310221cb..000000000 --- a/py/janitor/publish_one.py +++ /dev/null @@ -1,660 +0,0 @@ -#!/usr/bin/python3 -# Copyright (C) 2019 Jelmer Vernooij -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - -"""Publisher for a single branch. - -This is the worker module for the publish service. For each branch that needs -to be published, this module gets invoked. It accepts some JSON on stdin with a -request, and writes results to standard out as JSON. -""" - -import logging -import os -import shlex -import traceback -import urllib.error -import urllib.parse -import urllib.request -from contextlib import ExitStack -from email.utils import parseaddr -from typing import Any, Optional - -from breezy.branch import Branch -from breezy.errors import ( - DivergedBranches, - NoSuchRevision, - PermissionDenied, - UnexpectedHttpStatus, -) -from breezy.forge import ( - Forge, - ForgeLoginRequired, - MergeProposal, - MergeProposalExists, - NoSuchProject, - SourceNotDerivedFromTarget, - UnsupportedForge, - determine_title, - get_forge, -) -from breezy.git.remote import RemoteGitBranch, RemoteGitError -from breezy.plugins.gitlab.forge import ( - ForkingDisabled, - GitLabConflict, - ProjectCreationTimeout, -) -from breezy.transport import Transport -from jinja2 import ( - Environment, - FileSystemLoader, - Template, - TemplateNotFound, - TemplateSyntaxError, - select_autoescape, -) -from silver_platter import ( - BranchMissing, - BranchRateLimited, - BranchTemporarilyUnavailable, - BranchUnavailable, - EmptyMergeProposal, - InsufficientChangesForNewProposal, - PublishResult, - create_temp_sprout, - find_existing_proposed, - full_branch_url, - merge_conflicts, - open_branch, - publish_changes, -) - -from ._launchpad import override_launchpad_consumer_name -from .debian.debdiff import debdiff_is_empty, markdownify_debdiff - -override_launchpad_consumer_name() - - -MODE_SKIP = "skip" -MODE_BUILD_ONLY = "build-only" -MODE_PUSH = "push" -MODE_PUSH_DERIVED = "push-derived" -MODE_PROPOSE = "propose" -MODE_ATTEMPT_PUSH = "attempt-push" -MODE_BTS = "bts" -SUPPORTED_MODES = [ - MODE_PUSH_DERIVED, - MODE_PROPOSE, - MODE_PUSH, - MODE_BUILD_ONLY, - MODE_SKIP, -] - - -class PublishFailure(Exception): - def __init__(self, code, description) -> None: - self.code = code - self.description = description - - -class PublishNothingToDo(Exception): - def __init__(self, description) -> None: - self.description = description - - -class MergeConflict(Exception): - def __init__(self, target_branch, source_branch) -> None: - self.target_branch = target_branch - self.source_branch = source_branch - - -class DebdiffRetrievalError(Exception): - def __init__(self, reason) -> None: - self.reason = reason - - -def publish( - *, - template_env, - campaign: str, - commit_message_template: Optional[str], - title_template: Optional[str], - codemod_result: Any, - mode: str, - role: str, - forge: Forge, - target_branch: Branch, - source_branch: Branch, - derived_branch_name: str, - resume_branch: Optional[Branch] = None, - log_id: Optional[str] = None, - existing_proposal: Optional[MergeProposal] = None, - allow_create_proposal: bool = False, - debdiff: Optional[bytes] = None, - reviewers: Optional[list[str]] = None, - result_tags: Optional[dict[str, bytes]] = None, - stop_revision: Optional[bytes] = None, - extra_context: Optional[dict[str, Any]] = None, -): - def get_proposal_description(description_format, existing_proposal): - vs = { - "log_id": log_id, - "campaign": campaign, - "role": role, - } - if extra_context: - vs.update(extra_context) - if codemod_result: - vs.update(codemod_result) - vs["codemod"] = codemod_result - if debdiff: - vs["debdiff"] = debdiff.decode("utf-8", "replace") - if description_format == "markdown": - template = template_env.get_template(campaign + ".md") - else: - template = template_env.get_template(campaign + ".txt") - return template.render(vs) - - def get_proposal_commit_message(existing_proposal): - if commit_message_template: - template = Template(commit_message_template) - return template.render(codemod_result or {}) - else: - return None - - def get_proposal_title(existing_proposal): - if title_template: - template = Template(title_template) - return template.render(codemod_result or {}) - else: - try: - description = get_proposal_description("text", existing_proposal) - except TemplateNotFound: - description = get_proposal_description("markdown", existing_proposal) - return determine_title(description) - - with target_branch.lock_read(), source_branch.lock_read(): - try: - if merge_conflicts(target_branch, source_branch, stop_revision): - raise MergeConflict(target_branch, source_branch) - except NoSuchRevision as e: - raise PublishFailure( - description=f"Revision missing: {e.revision}", # type: ignore - code="revision-missing", - ) from e - - labels: Optional[list[str]] - - if forge and forge.supports_merge_proposal_labels: - labels = [campaign] - else: - labels = None - try: - return publish_changes( - local_branch=source_branch, - main_branch=target_branch, - resume_branch=resume_branch, - mode=mode, - name=derived_branch_name, - get_proposal_description=get_proposal_description, - get_proposal_commit_message=get_proposal_commit_message, - get_proposal_title=get_proposal_title, - forge=forge, - allow_create_proposal=allow_create_proposal, - overwrite_existing=True, - existing_proposal=existing_proposal, - labels=labels, - tags=result_tags, - allow_collaboration=True, - reviewers=reviewers, - stop_revision=stop_revision, - ) - except DivergedBranches as e: - raise PublishFailure( - description="Upstream branch has diverged from local changes.", - code="diverged-branches", - ) from e - except UnsupportedForge as e: - raise PublishFailure( - description=f"Forge unsupported: {target_branch.repository.user_url}.", - code="hoster-unsupported", - ) from e - except NoSuchProject as e: - raise PublishFailure( - description=f"project {e.project} was not found", code="project-not-found" - ) from e - except ForkingDisabled as e: - raise PublishFailure( - description=f"Forking disabled: {target_branch.repository.user_url}", - code="forking-disabled", - ) from e - except PermissionDenied as e: - raise PublishFailure(description=str(e), code="permission-denied") from e - except TemplateNotFound as e: - raise PublishFailure(description=str(e), code="template-not-found") from e - except TemplateSyntaxError as e: - raise PublishFailure(description=str(e), code="template-syntax-error") from e - except MergeProposalExists as e: - raise PublishFailure(description=str(e), code="merge-proposal-exists") from e - except GitLabConflict as e: - raise PublishFailure( - code="gitlab-conflict", - description=( - "Conflict during GitLab operation. " "Reached repository limit?" - ), - ) from e - except SourceNotDerivedFromTarget as e: - raise PublishFailure( - code="source-not-derived-from-target", - description=( - "The source repository is not a fork of the " "target repository." - ), - ) from e - except ProjectCreationTimeout as e: - raise PublishFailure( - code="project-creation-timeout", - description="Forking the project (to %s) timed out (%ds)" - % (e.project, e.timeout), - ) from e - except RemoteGitError as exc: - raise PublishFailure( - code="remote-git-error", description=f"remote git error: {exc}" - ) from exc - except InsufficientChangesForNewProposal as e: - raise PublishNothingToDo("not enough changes for a new merge proposal") from e - except BranchTemporarilyUnavailable as e: - raise PublishFailure("branch-temporarily-unavailable", str(e)) from e - except BranchUnavailable as e: - raise PublishFailure("branch-unavailable", str(e)) from e - - -class DebdiffMissingRun(Exception): - """Raised when the debdiff was missing a run.""" - - def __init__(self, missing_run_id) -> None: - self.missing_run_id = missing_run_id - - -class DifferUnavailable(Exception): - """The differ was unavailable.""" - - def __init__(self, reason) -> None: - self.reason = reason - - -def get_debdiff(differ_url: str, unchanged_id: str, log_id: str) -> bytes: - debdiff_url = urllib.parse.urljoin( - differ_url, f"/debdiff/{unchanged_id}/{log_id}?filter_boring=1" - ) - headers = {"Accept": "text/plain"} - - request = urllib.request.Request(debdiff_url, headers=headers) - try: - with urllib.request.urlopen(request) as f: - return f.read() - except urllib.error.HTTPError as e: - if e.code == 404: - if "unavailable_run_id" in e.headers: - raise DebdiffMissingRun(e.headers["unavailable_run_id"]) from e - raise - elif e.code in (400, 500, 502, 503, 504): - raise DebdiffRetrievalError( - "Error %d: %s" % (e.code, e.file.read().decode("utf-8", "replace")) # type: ignore - ) from e - else: - raise - except ConnectionResetError as e: - raise DifferUnavailable(str(e)) from e - except urllib.error.URLError as e: - raise DebdiffRetrievalError(str(e)) from e - - -def _drop_env(args): - while args and "=" in args[0]: - args.pop(0) - - -def publish_one( - template_env, - campaign: str, - command, - codemod_result, - target_branch_url: str, - mode: str, - role: str, - revision: bytes, - log_id: str, - unchanged_id: str, - source_branch_url: str, - differ_url: str, - derived_branch_name: str, - require_binary_diff: bool = False, - possible_forges: Optional[list[Forge]] = None, - possible_transports: Optional[list[Transport]] = None, - allow_create_proposal: bool = False, - reviewers: Optional[list[str]] = None, - result_tags: Optional[dict[str, bytes]] = None, - commit_message_template: Optional[str] = None, - title_template: Optional[str] = None, - existing_mp_url: Optional[str] = None, - extra_context: Optional[dict[str, Any]] = None, -) -> tuple[PublishResult, str]: - args = shlex.split(command) - _drop_env(args) - - with ExitStack() as es: - try: - source_branch = open_branch( - source_branch_url, possible_transports=possible_transports - ) - except BranchTemporarilyUnavailable as e: - raise PublishFailure("local-branch-temporarily-unavailable", str(e)) from e - except BranchUnavailable as e: - raise PublishFailure("local-branch-unavailable", str(e)) from e - except BranchMissing as e: - raise PublishFailure("local-branch-missing", str(e)) from e - - if isinstance(source_branch, RemoteGitBranch): - local_tree, destroy = create_temp_sprout(source_branch) - es.callback(destroy) - source_branch = local_tree.branch - - try: - target_branch = open_branch( - target_branch_url, possible_transports=possible_transports - ) - except BranchRateLimited as e: - raise PublishFailure("branch-rate-limited", str(e)) from e - except BranchTemporarilyUnavailable as e: - raise PublishFailure("branch-temporarily-unavailable", str(e)) from e - except BranchUnavailable as e: - raise PublishFailure("branch-unavailable", str(e)) from e - except BranchMissing as e: - raise PublishFailure("branch-missing", str(e)) from e - - try: - if mode == MODE_BTS: - raise NotImplementedError - else: - forge = get_forge(target_branch, possible_forges=possible_forges) - except UnsupportedForge as e: - if mode not in (MODE_PUSH, MODE_BUILD_ONLY): - netloc = urllib.parse.urlparse(target_branch.user_url).netloc - raise PublishFailure( - description=f"Forge unsupported: {netloc}.", - code="hoster-unsupported", - ) from e - # We can't figure out what branch to resume from when there's no forge - # that can tell us. - resume_branch = None - existing_proposal = None - if mode == MODE_PUSH: - logging.warning( - "Unsupported forge (%s), will attempt to push to %s", - e, - full_branch_url(target_branch), - ) - forge = None - except ForgeLoginRequired as e: - if mode not in (MODE_PUSH, MODE_BUILD_ONLY): - netloc = urllib.parse.urlparse(target_branch.user_url).netloc - raise PublishFailure( - description=f"Forge {netloc} supported but no login known.", - code="hoster-no-login", - ) from e - # We can't figure out what branch to resume from when there's no forge - # that can tell us. - resume_branch = None - existing_proposal = None - if mode == MODE_PUSH: - logging.warning( - "No login for forge (%s), will attempt to push to %s", - e, - full_branch_url(target_branch), - ) - forge = None - except UnexpectedHttpStatus as e: - if e.code == 502: - raise PublishFailure("bad-gateway", str(e)) from e - elif e.code == 429: - raise PublishFailure("too-many-requests", str(e)) from e - else: - traceback.print_exc() - raise PublishFailure(f"http-{e.code}", str(e)) from e - else: - if existing_mp_url is not None: - try: - existing_proposal = forge.get_proposal_by_url(existing_mp_url) - except UnsupportedForge as e: - raise PublishFailure("forge-mp-url-mismatch", str(e)) from e - overwrite: Optional[bool] = True - try: - resume_branch = open_branch( - existing_proposal.get_source_branch_url(), - possible_transports=possible_transports, - ) - except BranchRateLimited as e: - raise PublishFailure("resume-branch-rate-limited", str(e)) from e - except BranchTemporarilyUnavailable as e: - raise PublishFailure( - "resume-branch-temporarily-unavailable", str(e) - ) from e - except BranchUnavailable as e: - raise PublishFailure("resume-branch-unavailable", str(e)) from e - except BranchMissing as e: - raise PublishFailure("resume-branch-missing", str(e)) from e - else: - try: - ( - resume_branch, - overwrite, - existing_proposals, - ) = find_existing_proposed( - target_branch, forge, derived_branch_name - ) - except NoSuchProject as e: - if mode not in (MODE_PUSH, MODE_BUILD_ONLY): - raise PublishFailure( - description=f"Project {e.project} not found.", - code="project-not-found", - ) from e - resume_branch = None - existing_proposal = None - except ForgeLoginRequired as e: - raise PublishFailure( - description=f"Forge {forge} supported but no login known.", - code="hoster-no-login", - ) from e - except PermissionDenied as e: - raise PublishFailure( - description=( - f"Permission denied while finding existing proposal: {e.extra}" - ), - code="permission-denied", - ) from e - else: - if existing_proposals and len(existing_proposals) > 1: - existing_proposal = existing_proposals[0] - logging.warning( - "Multiple existing proposals: %r. Using %r", - existing_proposals, - existing_proposal, - ) - elif existing_proposals and len(existing_proposals) > 0: - existing_proposal = existing_proposals[0] - else: - existing_proposal = None - - debdiff: Optional[bytes] - try: - debdiff = get_debdiff(differ_url, unchanged_id, log_id) - except DebdiffRetrievalError as e: - raise PublishFailure( - description=f"Error from differ for build diff: {e.reason}", - code="differ-error", - ) from e - except DifferUnavailable as e: - raise PublishFailure( - description=f"Unable to contact differ for build diff: {e.reason}", - code="differ-unreachable", - ) from e - except DebdiffMissingRun as e: - if mode in (MODE_PROPOSE, MODE_ATTEMPT_PUSH) and require_binary_diff: - if e.missing_run_id == log_id: - raise PublishFailure( - description=( - "Build diff is not available. " - f"Run ({log_id}) not yet published?" - ), - code="missing-build-diff-self", - ) from e - else: - raise PublishFailure( - description=( - "Binary debdiff is not available. " - f"Control run ({e.missing_run_id}) not published?" - ), - code="missing-build-diff-control", - ) from e - debdiff = None - - try: - publish_result = publish( - template_env=template_env, - campaign=campaign, - commit_message_template=commit_message_template, - title_template=title_template, - codemod_result=codemod_result, - mode=mode, - role=role, - forge=forge, - target_branch=target_branch, - source_branch=source_branch, - derived_branch_name=derived_branch_name, - resume_branch=resume_branch, - log_id=log_id, - existing_proposal=existing_proposal, - allow_create_proposal=allow_create_proposal, - debdiff=debdiff, - reviewers=reviewers, - result_tags=result_tags, - stop_revision=revision, - extra_context=extra_context, - ) - except EmptyMergeProposal as e: - raise PublishFailure( - code="empty-merge-proposal", - description=( - "No changes to propose; " "changes made independently upstream?" - ), - ) from e - except MergeConflict as e: - raise PublishFailure( - code="merge-conflict", - description="merge would conflict (upstream changes?)", - ) from e - - return publish_result, derived_branch_name - - -def load_template_env(path): - env = Environment( - loader=FileSystemLoader(path), - trim_blocks=True, - lstrip_blocks=True, - autoescape=select_autoescape(disabled_extensions=("txt", "md"), default=False), - ) - env.globals.update( - { - "debdiff_is_empty": debdiff_is_empty, - "markdownify_debdiff": markdownify_debdiff, - "parseaddr": parseaddr, - } - ) - return env - - -if __name__ == "__main__": - import argparse - import json - import sys - - parser = argparse.ArgumentParser() - parser.add_argument( - "--template-env-path", - type=str, - default=os.path.join(os.path.dirname(__file__), "..", "proposal-templates"), - help="Path to templates", - ) - args = parser.parse_args() - - logging.basicConfig(level=logging.INFO, stream=sys.stderr) - - request = json.load(sys.stdin) - - template_env = load_template_env(args.template_env_path) - template_env.globals["external_url"] = ( - request["external_url"].rstrip("/") if request["external_url"] else None - ) - - try: - publish_result, branch_name = publish_one( - template_env, - campaign=request["campaign"], - derived_branch_name=request["derived_branch_name"], - command=request["command"], - codemod_result=request["codemod_result"], - target_branch_url=request["target_branch_url"], - mode=request["mode"], - role=request["role"], - log_id=request["log_id"], - unchanged_id=request["unchanged_id"], - source_branch_url=request["source_branch_url"], - require_binary_diff=request["require-binary-diff"], - possible_forges=None, - possible_transports=None, - allow_create_proposal=request["allow_create_proposal"], - differ_url=request["differ_url"], - reviewers=request.get("reviewers"), - revision=request["revision"].encode("utf-8"), - result_tags=request.get("tags"), - commit_message_template=request.get("commit_message_template"), - title_template=request.get("title_template"), - existing_mp_url=request.get("existing_mp_url"), - extra_context=request.get("extra_context"), - ) - except PublishFailure as e: - json.dump({"code": e.code, "description": e.description}, sys.stdout) - sys.exit(1) - except PublishNothingToDo as e: - json.dump({"code": "nothing-to-do", "description": e.description}, sys.stdout) - sys.exit(1) - - result = {} - if publish_result.proposal: - result["proposal_url"] = publish_result.proposal.url - result["proposal_web_url"] = publish_result.proposal.get_web_url() - result["is_new"] = publish_result.is_new - result["branch_name"] = branch_name - result["target_branch_url"] = publish_result.target_branch.user_url.rstrip("/") - if publish_result.forge: - result["target_branch_web_url"] = publish_result.forge.get_web_url( - publish_result.target_branch - ) - - json.dump(result, sys.stdout) - - sys.exit(0) diff --git a/runner-py/Cargo.toml b/runner-py/Cargo.toml index 136c58b15..40130e087 100644 --- a/runner-py/Cargo.toml +++ b/runner-py/Cargo.toml @@ -3,7 +3,7 @@ name = "runner-py" version = "0.0.0" authors = ["Jelmer Vernooij "] publish = false -edition = "2021" +edition.workspace = true description = "Runner for the janitor - python bindings" license = "GPL-3.0+" repository = "https://github.com/jelmer/janitor.git" diff --git a/runner/Cargo.toml b/runner/Cargo.toml index 3ede282ee..c939587e6 100644 --- a/runner/Cargo.toml +++ b/runner/Cargo.toml @@ -2,7 +2,7 @@ name = "janitor-runner" version = "0.0.0" authors = ["Jelmer Vernooij "] -edition = "2021" +edition.workspace = true description = "Runner for the janitor" license = "GPL-3.0+" repository = "https://github.com/jelmer/janitor.git" @@ -26,6 +26,8 @@ debian-control = { version = "0.1.28", optional = true } log.workspace = true silver-platter = { workspace = true, features = ["debian"] } reqwest.workspace = true +clap = { workspace = true, features = ["derive"], optional = true } +axum.workspace = true [dev-dependencies] maplit = { workspace = true } @@ -33,3 +35,9 @@ maplit = { workspace = true } [features] default = ["debian"] debian = ["janitor/debian", "dep:debversion", "dep:debian-control"] +cli = ["dep:clap"] + +[[bin]] +name = "janitor-runner" +path = "src/main.rs" +required-features = ["cli"] diff --git a/runner/src/lib.rs b/runner/src/lib.rs index fdb5facd5..0be880392 100644 --- a/runner/src/lib.rs +++ b/runner/src/lib.rs @@ -6,6 +6,7 @@ use url::Url; pub mod backchannel; pub mod config_generator; +pub mod web; pub fn committer_env(committer: Option<&str>) -> HashMap { let mut env = HashMap::new(); @@ -276,6 +277,8 @@ pub struct ResultRemote { url: Url, } +pub struct AppState {} + #[cfg(test)] mod tests { use super::*; diff --git a/runner/src/main.rs b/runner/src/main.rs new file mode 100644 index 000000000..5ab397862 --- /dev/null +++ b/runner/src/main.rs @@ -0,0 +1,80 @@ +use clap::Parser; +use std::path::PathBuf; + +#[derive(Parser)] +struct Args { + #[clap(long, default_value = "localhost")] + listen_address: String, + + #[clap(long, default_value = "9911")] + port: u16, + + #[clap(long, default_value = "9919")] + public_port: u16, + + #[clap(long)] + /// Command to run to check codebase before pushing + post_check: Option, + + #[clap(long)] + /// Command to run to check whether to process codebase + pre_check: Option, + + #[clap(long)] + /// Use cached branches only. + use_cached_only: bool, + + #[clap(long, default_value = "janitor.conf")] + /// Path to configuration. + config: Option, + + #[clap(long)] + /// Backup directory to write files to if artifact or log manager is unreachable. + backup_directory: Option, + + #[clap(long)] + /// Public vcs location (used for URLs handed to worker) + public_vcs_location: Option, + + #[clap(long)] + /// Base location for our own APT archive + public_apt_archive_location: Option, + + #[clap(long)] + public_dep_server_url: Option, + + #[clap(flatten)] + logging: janitor::logging::LoggingArgs, + + #[clap(long)] + /// Print debugging info + debug: bool, + + #[clap(long, default_value = "60")] + /// Time before marking a run as having timed out (minutes) + run_timeout: u64, + + #[clap(long)] + /// Avoid processing runs on a host (e.g. 'salsa.debian.org') + avoid_host: Vec, +} + +#[tokio::main] +async fn main() -> Result<(), i32> { + let args = Args::parse(); + + args.logging.init(); + + let state = Arc::new(AppState {}); + + let app = janitor_runner::web::app(state.clone()); + + // run it + let addr = SocketAddr::new(args.listen_address, args.new_port); + log::info!("listening on {}", addr); + + let listener = tokio::net::TcpListener::bind(addr).await?; + axum::serve(listener, app.into_make_service()).await?; + + Ok(()) +} diff --git a/runner/src/web.rs b/runner/src/web.rs new file mode 100644 index 000000000..2dd3adfa5 --- /dev/null +++ b/runner/src/web.rs @@ -0,0 +1,135 @@ +use crate::AppState; +use axum::{ + extract::Path, extract::State, response::IntoResponse, routing::delete, routing::get, + routing::post, Router, +}; +use std::sync::Arc; + +async fn queue_position(State(state): State>) { + unimplemented!() +} + +async fn schedule_control(State(state): State>) { + unimplemented!() +} + +async fn schedule(State(state): State>) { + unimplemented!() +} + +async fn status(State(state): State>) { + unimplemented!() +} + +async fn log_index(State(state): State>, Path(id): Path) { + unimplemented!() +} + +async fn log( + State(state): State>, + Path(id): Path, + Path(filename): Path, +) { + unimplemented!() +} + +async fn kill(State(state): State>, Path(id): Path) { + unimplemented!() +} + +async fn get_codebases(State(state): State>) { + unimplemented!() +} + +async fn update_codebases(State(state): State>) { + unimplemented!() +} + +async fn delete_candidate(State(state): State>, Path(id): Path) { + unimplemented!() +} + +async fn get_run(State(state): State>, Path(id): Path) { + unimplemented!() +} + +async fn update_run(State(state): State>, Path(id): Path) { + unimplemented!() +} + +async fn get_active_runs(State(state): State>) { + unimplemented!() +} + +async fn get_active_run(State(state): State>, Path(id): Path) { + unimplemented!() +} + +async fn peek_active_run(State(state): State>) { + unimplemented!() +} + +async fn get_queue(State(state): State>) { + unimplemented!() +} + +async fn health() -> impl IntoResponse { + "OK" +} + +async fn ready() -> impl IntoResponse { + "OK" +} + +async fn finish_active_run(State(state): State>, Path(id): Path) { + unimplemented!() +} + +async fn public_root() -> impl IntoResponse { + "" +} + +async fn public_assign(State(state): State>) { + unimplemented!() +} + +async fn public_finish(State(state): State>, Path(id): Path) { + unimplemented!() +} + +async fn public_get_active_run(State(state): State>, Path(id): Path) { + unimplemented!() +} + +pub fn public_app(state: Arc) -> Router { + Router::new() + .route("/", get(public_root)) + .route("/runner/active-runs", post(public_assign)) + .route("/runner/active-runs/:id/finish", post(public_finish)) + .route("/runner/active-runs/:id", get(public_get_active_run)) + .with_state(state) +} + +pub fn app(state: Arc) -> Router { + Router::new() + .route("/queue/position", get(queue_position)) + .route("/schedule-control", post(schedule_control)) + .route("/schedule", post(schedule)) + .route("/status", get(status)) + .route("/log/:id", get(log_index)) + .route("/kill:id", post(kill)) + .route("/log/:id/:filename", get(log)) + .route("/codebases", get(get_codebases)) + .route("/codebases", post(update_codebases)) + .route("/candidates/:id", delete(delete_candidate)) + .route("/run/:id", get(get_run)) + .route("/run/:id", post(update_run)) + .route("/active-runs", get(get_active_runs)) + .route("/active-runs/:id", get(get_active_run)) + .route("/active-runs/:id/finish", post(finish_active_run)) + .route("/active-runs/+peek", get(peek_active_run)) + .route("/queue", get(get_queue)) + .route("/health", get(health)) + .route("/ready", get(ready)) + .with_state(state) +} diff --git a/worker/Cargo.toml b/worker/Cargo.toml index e94cc9516..d703d4055 100644 --- a/worker/Cargo.toml +++ b/worker/Cargo.toml @@ -2,7 +2,7 @@ name = "janitor-worker" version = "0.0.0" authors = ["Jelmer Vernooij "] -edition = "2021" +edition.workspace = true description = "Worker for the janitor" license = "GPL-3.0+" repository = "https://github.com/jelmer/janitor.git"