store build logs in db
endpoint to get logs checks if pkg already in db force build flag
This commit is contained in:
parent
18f35f948e
commit
406da5325f
@ -1,12 +1,14 @@
|
|||||||
use crate::aur::aur::get_info_by_name;
|
use crate::aur::aur::get_info_by_name;
|
||||||
use crate::builder::types::Action;
|
use crate::builder::types::Action;
|
||||||
|
use crate::db::prelude::{Packages, Versions};
|
||||||
use crate::db::{packages, versions};
|
use crate::db::{packages, versions};
|
||||||
|
use rocket::response::status::NotFound;
|
||||||
use rocket::serde::json::Json;
|
use rocket::serde::json::Json;
|
||||||
use rocket::serde::Deserialize;
|
use rocket::serde::Deserialize;
|
||||||
use rocket::{post, State};
|
use rocket::{post, State};
|
||||||
use rocket_okapi::okapi::schemars;
|
use rocket_okapi::okapi::schemars;
|
||||||
use rocket_okapi::{openapi, JsonSchema};
|
use rocket_okapi::{openapi, JsonSchema};
|
||||||
use sea_orm::ActiveModelTrait;
|
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter};
|
||||||
use sea_orm::{DatabaseConnection, Set};
|
use sea_orm::{DatabaseConnection, Set};
|
||||||
use tokio::sync::broadcast::Sender;
|
use tokio::sync::broadcast::Sender;
|
||||||
|
|
||||||
@ -14,6 +16,7 @@ use tokio::sync::broadcast::Sender;
|
|||||||
#[serde(crate = "rocket::serde")]
|
#[serde(crate = "rocket::serde")]
|
||||||
pub struct AddBody {
|
pub struct AddBody {
|
||||||
name: String,
|
name: String,
|
||||||
|
force_build: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[openapi(tag = "test")]
|
#[openapi(tag = "test")]
|
||||||
@ -22,28 +25,55 @@ pub async fn package_add(
|
|||||||
db: &State<DatabaseConnection>,
|
db: &State<DatabaseConnection>,
|
||||||
input: Json<AddBody>,
|
input: Json<AddBody>,
|
||||||
tx: &State<Sender<Action>>,
|
tx: &State<Sender<Action>>,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), NotFound<String>> {
|
||||||
let db = db as &DatabaseConnection;
|
let db = db as &DatabaseConnection;
|
||||||
let pkg_name = &input.name;
|
|
||||||
|
|
||||||
let pkg = get_info_by_name(pkg_name)
|
let pkt_model = match Packages::find()
|
||||||
|
.filter(packages::Column::Name.eq(input.name.clone()))
|
||||||
|
.one(db)
|
||||||
.await
|
.await
|
||||||
.map_err(|_| "couldn't download package metadata".to_string())?;
|
.map_err(|e| NotFound(e.to_string()))?
|
||||||
|
{
|
||||||
|
None => {
|
||||||
let new_package = packages::ActiveModel {
|
let new_package = packages::ActiveModel {
|
||||||
name: Set(pkg_name.clone()),
|
name: Set(input.name.clone()),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let pkt_model = new_package.save(db).await.expect("TODO: panic message");
|
new_package.save(db).await.expect("TODO: panic message")
|
||||||
|
}
|
||||||
|
Some(p) => p.into(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let pkg = get_info_by_name(input.name.clone().as_str())
|
||||||
|
.await
|
||||||
|
.map_err(|_| NotFound("couldn't download package metadata".to_string()))?;
|
||||||
|
|
||||||
|
let version_model = match Versions::find()
|
||||||
|
.filter(versions::Column::Version.eq(pkg.version.clone()))
|
||||||
|
.one(db)
|
||||||
|
.await
|
||||||
|
.map_err(|e| NotFound(e.to_string()))?
|
||||||
|
{
|
||||||
|
None => {
|
||||||
let new_version = versions::ActiveModel {
|
let new_version = versions::ActiveModel {
|
||||||
version: Set(pkg.version.clone()),
|
version: Set(pkg.version.clone()),
|
||||||
package_id: Set(pkt_model.id.clone().unwrap()),
|
package_id: Set(pkt_model.id.clone().unwrap()),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let version_model = new_version.save(db).await.expect("TODO: panic message");
|
new_version.save(db).await.expect("TODO: panic message")
|
||||||
|
}
|
||||||
|
Some(p) => {
|
||||||
|
// todo add check if this version was successfully built
|
||||||
|
// if not allow build
|
||||||
|
if input.force_build {
|
||||||
|
p.into()
|
||||||
|
} else {
|
||||||
|
return Err(NotFound("Version already existing".to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let _ = tx.send(Action::Build(
|
let _ = tx.send(Action::Build(
|
||||||
pkg.name,
|
pkg.name,
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
use crate::api::add::okapi_add_operation_for_package_add_;
|
use crate::api::add::okapi_add_operation_for_package_add_;
|
||||||
use crate::api::add::package_add;
|
use crate::api::add::package_add;
|
||||||
use crate::api::list::okapi_add_operation_for_package_list_;
|
use crate::api::list::okapi_add_operation_for_build_output_;
|
||||||
use crate::api::list::okapi_add_operation_for_search_;
|
use crate::api::list::okapi_add_operation_for_list_builds_;
|
||||||
|
use crate::api::list::{build_output, okapi_add_operation_for_package_list_};
|
||||||
|
use crate::api::list::{list_builds, okapi_add_operation_for_search_};
|
||||||
use crate::api::list::{package_list, search};
|
use crate::api::list::{package_list, search};
|
||||||
use crate::api::remove::okapi_add_operation_for_package_del_;
|
use crate::api::remove::okapi_add_operation_for_package_del_;
|
||||||
use crate::api::remove::okapi_add_operation_for_version_del_;
|
use crate::api::remove::okapi_add_operation_for_version_del_;
|
||||||
@ -10,5 +12,13 @@ use rocket::Route;
|
|||||||
use rocket_okapi::openapi_get_routes;
|
use rocket_okapi::openapi_get_routes;
|
||||||
|
|
||||||
pub fn build_api() -> Vec<Route> {
|
pub fn build_api() -> Vec<Route> {
|
||||||
openapi_get_routes![search, package_list, package_add, package_del, version_del]
|
openapi_get_routes![
|
||||||
|
search,
|
||||||
|
package_list,
|
||||||
|
package_add,
|
||||||
|
package_del,
|
||||||
|
version_del,
|
||||||
|
build_output,
|
||||||
|
list_builds
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
use crate::aur::aur::query_aur;
|
use crate::aur::aur::query_aur;
|
||||||
use crate::db::migration::JoinType;
|
use crate::db::migration::JoinType;
|
||||||
use crate::db::prelude::Packages;
|
use crate::db::prelude::{Builds, Packages};
|
||||||
use crate::db::{packages, versions};
|
use crate::db::{builds, packages, versions};
|
||||||
|
use rocket::response::status::NotFound;
|
||||||
use rocket::serde::json::Json;
|
use rocket::serde::json::Json;
|
||||||
use rocket::serde::{Deserialize, Serialize};
|
use rocket::serde::{Deserialize, Serialize};
|
||||||
use rocket::{get, State};
|
use rocket::{get, State};
|
||||||
use rocket_okapi::okapi::schemars;
|
use rocket_okapi::okapi::schemars;
|
||||||
use rocket_okapi::{openapi, JsonSchema};
|
use rocket_okapi::{openapi, JsonSchema};
|
||||||
use sea_orm::ColumnTrait;
|
use sea_orm::{ColumnTrait, QueryFilter};
|
||||||
use sea_orm::{DatabaseConnection, EntityTrait, FromQueryResult, QuerySelect, RelationTrait};
|
use sea_orm::{DatabaseConnection, EntityTrait, FromQueryResult, QuerySelect, RelationTrait};
|
||||||
|
|
||||||
#[derive(Serialize, JsonSchema)]
|
#[derive(Serialize, JsonSchema)]
|
||||||
@ -20,7 +21,7 @@ pub struct ApiPackage {
|
|||||||
#[openapi(tag = "test")]
|
#[openapi(tag = "test")]
|
||||||
#[get("/search?<query>")]
|
#[get("/search?<query>")]
|
||||||
pub async fn search(query: &str) -> Result<Json<Vec<ApiPackage>>, String> {
|
pub async fn search(query: &str) -> Result<Json<Vec<ApiPackage>>, String> {
|
||||||
match query_aur(query).await {
|
return match query_aur(query).await {
|
||||||
Ok(v) => {
|
Ok(v) => {
|
||||||
let mapped = v
|
let mapped = v
|
||||||
.iter()
|
.iter()
|
||||||
@ -29,19 +30,19 @@ pub async fn search(query: &str) -> Result<Json<Vec<ApiPackage>>, String> {
|
|||||||
version: x.version.clone(),
|
version: x.version.clone(),
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
return Ok(Json(mapped));
|
Ok(Json(mapped))
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
return Err(format!("{}", e));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Err(e) => Err(format!("{}", e)),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(FromQueryResult, Deserialize, JsonSchema, Serialize)]
|
#[derive(FromQueryResult, Deserialize, JsonSchema, Serialize)]
|
||||||
#[serde(crate = "rocket::serde")]
|
#[serde(crate = "rocket::serde")]
|
||||||
pub struct ListPackageModel {
|
pub struct ListPackageModel {
|
||||||
|
id: i32,
|
||||||
name: String,
|
name: String,
|
||||||
count: i32,
|
count: i32,
|
||||||
|
status: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[openapi(tag = "test")]
|
#[openapi(tag = "test")]
|
||||||
@ -56,6 +57,8 @@ pub async fn package_list(
|
|||||||
.select_only()
|
.select_only()
|
||||||
.column_as(versions::Column::Id.count(), "count")
|
.column_as(versions::Column::Id.count(), "count")
|
||||||
.column(packages::Column::Name)
|
.column(packages::Column::Name)
|
||||||
|
.column(packages::Column::Id)
|
||||||
|
.column(packages::Column::Status)
|
||||||
.group_by(packages::Column::Name)
|
.group_by(packages::Column::Name)
|
||||||
.into_model::<ListPackageModel>()
|
.into_model::<ListPackageModel>()
|
||||||
.all(db)
|
.all(db)
|
||||||
@ -64,3 +67,56 @@ pub async fn package_list(
|
|||||||
|
|
||||||
Ok(Json(all))
|
Ok(Json(all))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[openapi(tag = "test")]
|
||||||
|
#[get("/builds/output?<buildid>")]
|
||||||
|
pub async fn build_output(
|
||||||
|
db: &State<DatabaseConnection>,
|
||||||
|
buildid: i32,
|
||||||
|
) -> Result<String, NotFound<String>> {
|
||||||
|
let db = db as &DatabaseConnection;
|
||||||
|
|
||||||
|
let build = Builds::find_by_id(buildid)
|
||||||
|
.one(db)
|
||||||
|
.await
|
||||||
|
.map_err(|e| NotFound(e.to_string()))?
|
||||||
|
.ok_or(NotFound("couldn't find id".to_string()))?;
|
||||||
|
|
||||||
|
build.ouput.ok_or(NotFound("No Output".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(FromQueryResult, Deserialize, JsonSchema, Serialize)]
|
||||||
|
#[serde(crate = "rocket::serde")]
|
||||||
|
pub struct ListBuildsModel {
|
||||||
|
id: i32,
|
||||||
|
pkg_id: i32,
|
||||||
|
version_id: i32,
|
||||||
|
status: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[openapi(tag = "test")]
|
||||||
|
#[get("/builds?<pkgid>")]
|
||||||
|
pub async fn list_builds(
|
||||||
|
db: &State<DatabaseConnection>,
|
||||||
|
pkgid: i32,
|
||||||
|
) -> Result<Json<Vec<ListBuildsModel>>, NotFound<String>> {
|
||||||
|
let db = db as &DatabaseConnection;
|
||||||
|
|
||||||
|
let build = Builds::find()
|
||||||
|
.filter(builds::Column::PkgId.eq(pkgid))
|
||||||
|
.all(db)
|
||||||
|
.await
|
||||||
|
.map_err(|e| NotFound(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(Json(
|
||||||
|
build
|
||||||
|
.iter()
|
||||||
|
.map(|x| ListBuildsModel {
|
||||||
|
id: x.id,
|
||||||
|
status: x.status,
|
||||||
|
pkg_id: x.pkg_id,
|
||||||
|
version_id: x.version_id,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
@ -1,6 +1,12 @@
|
|||||||
use crate::builder::types::Action;
|
use crate::builder::types::Action;
|
||||||
|
use crate::db::prelude::{Builds, Packages};
|
||||||
|
use crate::db::{builds, packages};
|
||||||
use crate::repo::repo::add_pkg;
|
use crate::repo::repo::add_pkg;
|
||||||
use sea_orm::{ActiveModelTrait, DatabaseConnection, Set};
|
use anyhow::anyhow;
|
||||||
|
use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, Set};
|
||||||
|
use std::ops::Add;
|
||||||
|
use tokio::sync::broadcast;
|
||||||
|
use tokio::sync::broadcast::error::RecvError;
|
||||||
use tokio::sync::broadcast::Sender;
|
use tokio::sync::broadcast::Sender;
|
||||||
|
|
||||||
pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
||||||
@ -11,20 +17,65 @@ pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
|||||||
Action::Build(name, version, url, mut version_model) => {
|
Action::Build(name, version, url, mut version_model) => {
|
||||||
let db = db.clone();
|
let db = db.clone();
|
||||||
|
|
||||||
|
let build = builds::ActiveModel {
|
||||||
|
pkg_id: version_model.package_id.clone(),
|
||||||
|
version_id: version_model.id.clone(),
|
||||||
|
ouput: Set(None),
|
||||||
|
status: Set(Some(0)),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let new_build = build.save(&db).await.unwrap();
|
||||||
|
|
||||||
// spawn new thread for each pkg build
|
// spawn new thread for each pkg build
|
||||||
|
// todo add queue and build two packages in parallel
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
match add_pkg(url, version, name).await {
|
let (tx, mut rx) = broadcast::channel::<String>(3);
|
||||||
|
|
||||||
|
let db2 = db.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
loop {
|
||||||
|
match rx.recv().await {
|
||||||
|
Ok(output_line) => {
|
||||||
|
println!("{output_line}");
|
||||||
|
|
||||||
|
let _ = append_db_log_output(
|
||||||
|
&db2,
|
||||||
|
output_line,
|
||||||
|
new_build.id.clone().unwrap(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
Err(e) => match e {
|
||||||
|
RecvError::Closed => {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
RecvError::Lagged(_) => {}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
match add_pkg(url, version, name, tx).await {
|
||||||
Ok(pkg_file_name) => {
|
Ok(pkg_file_name) => {
|
||||||
println!("successfully built package");
|
println!("successfully built package");
|
||||||
|
let _ = set_pkg_status(
|
||||||
|
&db,
|
||||||
|
version_model.package_id.clone().unwrap(),
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
// update status
|
|
||||||
version_model.status = Set(Some(1));
|
|
||||||
version_model.file_name = Set(Some(pkg_file_name));
|
version_model.file_name = Set(Some(pkg_file_name));
|
||||||
version_model.update(&db).await.unwrap();
|
let _ = version_model.update(&db).await;
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
version_model.status = Set(Some(2));
|
let _ = set_pkg_status(
|
||||||
version_model.update(&db).await.unwrap();
|
&db,
|
||||||
|
version_model.package_id.clone().unwrap(),
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
let _ = version_model.update(&db).await;
|
||||||
|
|
||||||
println!("Error: {e}")
|
println!("Error: {e}")
|
||||||
}
|
}
|
||||||
@ -35,3 +86,47 @@ pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// todo maybe move to helper file
|
||||||
|
async fn set_pkg_status(
|
||||||
|
db: &DatabaseConnection,
|
||||||
|
package_id: i32,
|
||||||
|
status: i32,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let mut pkg = Packages::find_by_id(package_id)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or(anyhow!("no package with id {package_id} found"))?;
|
||||||
|
|
||||||
|
pkg.status = status;
|
||||||
|
|
||||||
|
let pkg: packages::ActiveModel = pkg.into();
|
||||||
|
|
||||||
|
pkg.update(db).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn append_db_log_output(
|
||||||
|
db: &DatabaseConnection,
|
||||||
|
text: String,
|
||||||
|
build_id: i32,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let build = Builds::find_by_id(build_id)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or(anyhow!("build not found"))?;
|
||||||
|
|
||||||
|
let mut build: builds::ActiveModel = build.into();
|
||||||
|
|
||||||
|
match build.ouput.unwrap() {
|
||||||
|
None => {
|
||||||
|
build.ouput = Set(Some(text.add("\n")));
|
||||||
|
}
|
||||||
|
Some(s) => {
|
||||||
|
build.ouput = Set(Some(s.add(text.as_str()).add("\n")));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
build.update(db).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
@ -26,7 +26,8 @@ create table packages
|
|||||||
(
|
(
|
||||||
id integer not null
|
id integer not null
|
||||||
primary key autoincrement,
|
primary key autoincrement,
|
||||||
name text not null
|
name text not null,
|
||||||
|
status integer default 0 not null
|
||||||
);
|
);
|
||||||
|
|
||||||
create table status
|
create table status
|
||||||
@ -44,8 +45,7 @@ create table versions
|
|||||||
primary key autoincrement,
|
primary key autoincrement,
|
||||||
version TEXT not null,
|
version TEXT not null,
|
||||||
package_id integer not null,
|
package_id integer not null,
|
||||||
file_name TEXT,
|
file_name TEXT
|
||||||
status INTEGER
|
|
||||||
);
|
);
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
|
@ -11,6 +11,7 @@ pub struct Model {
|
|||||||
#[sea_orm(primary_key)]
|
#[sea_orm(primary_key)]
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub status: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
@ -13,7 +13,6 @@ pub struct Model {
|
|||||||
pub version: String,
|
pub version: String,
|
||||||
pub package_id: i32,
|
pub package_id: i32,
|
||||||
pub file_name: Option<String>,
|
pub file_name: Option<String>,
|
||||||
pub status: Option<i32>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{BufRead, BufReader};
|
use std::process::Stdio;
|
||||||
use std::process::Command;
|
|
||||||
use std::time::SystemTime;
|
use std::time::SystemTime;
|
||||||
|
use tokio::io::{AsyncBufReadExt, BufReader, Lines};
|
||||||
|
use tokio::sync::broadcast::Sender;
|
||||||
|
|
||||||
pub fn build_pkgbuild(
|
pub async fn build_pkgbuild(
|
||||||
folder_path: String,
|
folder_path: String,
|
||||||
pkg_vers: &str,
|
pkg_vers: &str,
|
||||||
pkg_name: &str,
|
pkg_name: &str,
|
||||||
|
tx: Sender<String>,
|
||||||
) -> anyhow::Result<String> {
|
) -> anyhow::Result<String> {
|
||||||
let makepkg = include_str!("../../scripts/makepkg");
|
let makepkg = include_str!("../../scripts/makepkg");
|
||||||
|
|
||||||
@ -15,34 +17,39 @@ pub fn build_pkgbuild(
|
|||||||
let script_file = std::env::temp_dir().join("makepkg_custom.sh");
|
let script_file = std::env::temp_dir().join("makepkg_custom.sh");
|
||||||
fs::write(&script_file, makepkg).expect("Unable to write script to file");
|
fs::write(&script_file, makepkg).expect("Unable to write script to file");
|
||||||
|
|
||||||
let mut output = Command::new("bash")
|
let mut child = tokio::process::Command::new("bash")
|
||||||
.args(&[
|
.args(&[
|
||||||
script_file.as_os_str().to_str().unwrap(),
|
script_file.as_os_str().to_str().unwrap(),
|
||||||
"-f",
|
"-f",
|
||||||
"--noconfirm",
|
"--noconfirm",
|
||||||
"-s",
|
"--nocolor",
|
||||||
"-c",
|
"-s", // install required deps
|
||||||
|
"-c", // cleanup leftover files and dirs
|
||||||
|
"--rmdeps", // remove installed deps with -s
|
||||||
|
"--noprogressbar", // pacman shouldn't display a progressbar
|
||||||
])
|
])
|
||||||
.current_dir(folder_path.clone())
|
.current_dir(folder_path.clone())
|
||||||
.spawn()
|
.stdout(Stdio::piped())
|
||||||
.unwrap();
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()?;
|
||||||
|
|
||||||
if let Some(stdout) = output.stdout.take() {
|
let stderr = child
|
||||||
let reader = BufReader::new(stdout);
|
.stderr
|
||||||
|
.take()
|
||||||
|
.ok_or(anyhow!("failed to take stderr"))?;
|
||||||
|
let stdout = child
|
||||||
|
.stdout
|
||||||
|
.take()
|
||||||
|
.ok_or(anyhow!("failed to take stdout"))?;
|
||||||
|
|
||||||
// Iterate through each line of output
|
let stderr = BufReader::new(stderr).lines();
|
||||||
for line in reader.lines() {
|
let stdout = BufReader::new(stdout).lines();
|
||||||
if let Ok(line_content) = line {
|
|
||||||
// Print the line to the terminal
|
|
||||||
println!("{}", line_content);
|
|
||||||
|
|
||||||
// todo store line to database for being fetchable from api
|
let tx1 = tx.clone();
|
||||||
}
|
spawn_broadcast_sender(stderr, tx1);
|
||||||
}
|
spawn_broadcast_sender(stdout, tx);
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure the command completes
|
let result = child.wait().await;
|
||||||
let result = output.wait();
|
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
@ -59,6 +66,18 @@ pub fn build_pkgbuild(
|
|||||||
locate_built_package(pkg_name.to_string(), pkg_vers.to_string(), folder_path)
|
locate_built_package(pkg_name.to_string(), pkg_vers.to_string(), folder_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn spawn_broadcast_sender<R: tokio::io::AsyncRead + Unpin + Send + 'static>(
|
||||||
|
mut reader: Lines<BufReader<R>>,
|
||||||
|
tx: Sender<String>,
|
||||||
|
) {
|
||||||
|
tokio::spawn(async move {
|
||||||
|
while let Ok(Some(line)) = reader.next_line().await {
|
||||||
|
// println!("directerr: {line}");
|
||||||
|
let _ = tx.send(line);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
fn locate_built_package(
|
fn locate_built_package(
|
||||||
pkg_name: String,
|
pkg_name: String,
|
||||||
pkg_vers: String,
|
pkg_vers: String,
|
||||||
|
@ -7,14 +7,25 @@ use anyhow::anyhow;
|
|||||||
use sea_orm::{ColumnTrait, DatabaseConnection, EntityTrait, ModelTrait, QueryFilter};
|
use sea_orm::{ColumnTrait, DatabaseConnection, EntityTrait, ModelTrait, QueryFilter};
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
use tokio::sync::broadcast::Sender;
|
||||||
|
|
||||||
static REPO_NAME: &str = "repo";
|
static REPO_NAME: &str = "repo";
|
||||||
static BASEURL: &str = "https://aur.archlinux.org";
|
static BASEURL: &str = "https://aur.archlinux.org";
|
||||||
|
|
||||||
pub async fn add_pkg(url: String, version: String, name: String) -> anyhow::Result<String> {
|
pub async fn add_pkg(
|
||||||
|
url: String,
|
||||||
|
version: String,
|
||||||
|
name: String,
|
||||||
|
tx: Sender<String>,
|
||||||
|
) -> anyhow::Result<String> {
|
||||||
let fname = download_pkgbuild(format!("{}{}", BASEURL, url).as_str(), "./builds").await?;
|
let fname = download_pkgbuild(format!("{}{}", BASEURL, url).as_str(), "./builds").await?;
|
||||||
let pkg_file_name =
|
let pkg_file_name = build_pkgbuild(
|
||||||
build_pkgbuild(format!("./builds/{fname}"), version.as_str(), name.as_str())?;
|
format!("./builds/{fname}"),
|
||||||
|
version.as_str(),
|
||||||
|
name.as_str(),
|
||||||
|
tx,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// todo force overwrite if file already exists
|
// todo force overwrite if file already exists
|
||||||
fs::copy(
|
fs::copy(
|
||||||
|
Loading…
Reference in New Issue
Block a user