outsource cancel to method and handle errors

This commit is contained in:
lukas-heiligenbrunner 2024-02-27 22:02:47 +01:00 committed by Lukas-Heiligenbrunner
parent 114a34de8f
commit f6af87dc27
5 changed files with 38 additions and 30 deletions

View File

@ -19,6 +19,7 @@ pub fn build_api() -> Vec<Route> {
stats, stats,
get_build, get_build,
get_package, get_package,
package_update_endpoint package_update_endpoint,
cancel_build
] ]
} }

View File

@ -147,7 +147,7 @@ pub async fn delete_build(
#[openapi(tag = "build")] #[openapi(tag = "build")]
#[post("/build/<buildid>/cancel")] #[post("/build/<buildid>/cancel")]
pub async fn cancle_build( pub async fn cancel_build(
db: &State<DatabaseConnection>, db: &State<DatabaseConnection>,
tx: &State<Sender<Action>>, tx: &State<Sender<Action>>,
buildid: i32, buildid: i32,

View File

@ -43,7 +43,7 @@ pub async fn get_info_by_name(pkg_name: &str) -> anyhow::Result<Package> {
Ok(response) Ok(response)
} }
pub async fn download_pkgbuild(url: &str, dest_dir: &str) -> anyhow::Result<String> { pub async fn download_pkgbuild(url: &str, dest_dir: &str, clear_build_dir: bool) -> anyhow::Result<String> {
let (file_data, file_name) = match download_file(url).await { let (file_data, file_name) = match download_file(url).await {
Ok(data) => data, Ok(data) => data,
Err(e) => { Err(e) => {
@ -51,6 +51,10 @@ pub async fn download_pkgbuild(url: &str, dest_dir: &str) -> anyhow::Result<Stri
} }
}; };
if clear_build_dir {
fs::remove_dir_all(dest_dir)?;
}
// Check if the directory exists // Check if the directory exists
if fs::metadata(dest_dir).is_err() { if fs::metadata(dest_dir).is_err() {
// Create the directory if it does not exist // Create the directory if it does not exist

View File

@ -1,5 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::iter::Map; use std::ops::Add;
use crate::builder::types::Action; use crate::builder::types::Action;
use crate::db::builds::ActiveModel; use crate::db::builds::ActiveModel;
use crate::db::prelude::{Builds, Packages}; use crate::db::prelude::{Builds, Packages};
@ -7,7 +7,6 @@ use crate::db::{builds, packages, versions};
use crate::repo::repo::add_pkg; use crate::repo::repo::add_pkg;
use anyhow::anyhow; use anyhow::anyhow;
use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, Set}; use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, Set};
use std::ops::{Add, Deref};
use std::sync::Arc; use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
use tokio::sync::broadcast::error::RecvError; use tokio::sync::broadcast::error::RecvError;
@ -37,34 +36,38 @@ pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
.await; .await;
} }
Action::Cancel(build_id) => { Action::Cancel(build_id) => {
let build = Builds::find_by_id(build_id) let _ = cancel_build(build_id, job_handles.clone(), db.clone()).await;
.one(&db)
.await
.expect("TODO: panic message")
.expect("TODO: panic message");
let mut build: builds::ActiveModel = build.into();
build.status = Set(Some(4));
build.end_time = Set(Some(
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs() as u32,
));
let _ = build.clone().update(&db).await;
job_handles
.lock()
.await
.remove(&build.id.clone().unwrap())
.expect("TODO: panic message")
.abort();
} }
} }
} }
} }
} }
async fn cancel_build(build_id: i32, job_handles: Arc<Mutex<HashMap<i32, JoinHandle<()>>>>, db: DatabaseConnection) -> anyhow::Result<()> {
let build = Builds::find_by_id(build_id)
.one(&db)
.await?
.ok_or(anyhow!("No build found"))?;
let mut build: builds::ActiveModel = build.into();
build.status = Set(Some(4));
build.end_time = Set(Some(
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs() as u32,
));
let _ = build.clone().update(&db).await;
job_handles
.lock()
.await
.remove(&build.id.clone().unwrap())
.ok_or(anyhow!("No build found"))?
.abort();
Ok(())
}
async fn queue_package( async fn queue_package(
name: String, name: String,
version: String, version: String,
@ -76,7 +79,6 @@ async fn queue_package(
job_handles: Arc<Mutex<HashMap<i32, JoinHandle<()>>>>, job_handles: Arc<Mutex<HashMap<i32, JoinHandle<()>>>>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let permits = Arc::clone(&semaphore); let permits = Arc::clone(&semaphore);
let mut job_handles = Arc::clone(&job_handles);
let build_id = build_model.id.clone().unwrap(); let build_id = build_model.id.clone().unwrap();
// spawn new thread for each pkg build // spawn new thread for each pkg build
@ -116,7 +118,7 @@ async fn build_package(
pkg.status = Set(0); pkg.status = Set(0);
pkg = pkg.update(&db).await?.into(); pkg = pkg.update(&db).await?.into();
match add_pkg(url, version, name, tx).await { match add_pkg(url, version, name, tx, false).await {
Ok(pkg_file_name) => { Ok(pkg_file_name) => {
println!("successfully built package"); println!("successfully built package");
// update package success status // update package success status

View File

@ -20,8 +20,9 @@ pub async fn add_pkg(
version: String, version: String,
name: String, name: String,
tx: Sender<String>, tx: Sender<String>,
clear_build_dir: bool,
) -> anyhow::Result<String> { ) -> anyhow::Result<String> {
let fname = download_pkgbuild(format!("{}{}", BASEURL, url).as_str(), "./builds").await?; let fname = download_pkgbuild(format!("{}{}", BASEURL, url).as_str(), "./builds", clear_build_dir).await?;
let pkg_file_names = build_pkgbuild( let pkg_file_names = build_pkgbuild(
format!("./builds/{fname}"), format!("./builds/{fname}"),
version.as_str(), version.as_str(),