add frontend and put backend in seperate folder
This commit is contained in:
3825
backend/Cargo.lock
generated
Normal file
3825
backend/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
31
backend/Cargo.toml
Normal file
31
backend/Cargo.toml
Normal file
@ -0,0 +1,31 @@
|
||||
[package]
|
||||
# todo rename to AURCentral
|
||||
name = "untitled"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
aur-rs = "0.1.1"
|
||||
tokio = "1.35.1"
|
||||
anyhow = "1.0.76"
|
||||
|
||||
reqwest = { version = "0.11.23", features = ["blocking", "gzip"] }
|
||||
flate2 = "1.0.28"
|
||||
tar = "0.4.40"
|
||||
|
||||
rocket = "0.5.0-rc.3"
|
||||
rocket_okapi = { version = "0.8.0-rc.3", features = ["swagger"] }
|
||||
sea-orm = { version = "0.12", features = [ "sqlx-sqlite", "runtime-tokio-rustls", "macros" ] }
|
||||
sea-orm-migration = {version = "0.12", features = ["sqlx-sqlite", "runtime-tokio-rustls"]}
|
||||
serde = "1.0.193"
|
||||
rust-embed = "8.1.0"
|
||||
|
||||
[[bin]]
|
||||
name = "untitled"
|
||||
path = "src/main.rs"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
static = []
|
2
backend/scripts/README.md
Normal file
2
backend/scripts/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
This is a patched makepkg version to allow being run as root.
|
||||
Especially in containers this makes things a lot easier and shoudln't be a security concern there.
|
1483
backend/scripts/makepkg
Executable file
1483
backend/scripts/makepkg
Executable file
File diff suppressed because it is too large
Load Diff
86
backend/src/api/add.rs
Normal file
86
backend/src/api/add.rs
Normal file
@ -0,0 +1,86 @@
|
||||
use crate::aur::aur::get_info_by_name;
|
||||
use crate::builder::types::Action;
|
||||
use crate::db::prelude::{Packages, Versions};
|
||||
use crate::db::{packages, versions};
|
||||
use rocket::response::status::BadRequest;
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::serde::Deserialize;
|
||||
use rocket::{post, State};
|
||||
use rocket_okapi::okapi::schemars;
|
||||
use rocket_okapi::{openapi, JsonSchema};
|
||||
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter};
|
||||
use sea_orm::{DatabaseConnection, Set};
|
||||
use tokio::sync::broadcast::Sender;
|
||||
|
||||
#[derive(Deserialize, JsonSchema)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct AddBody {
|
||||
name: String,
|
||||
force_build: bool,
|
||||
}
|
||||
|
||||
#[openapi(tag = "test")]
|
||||
#[post("/packages/add", data = "<input>")]
|
||||
pub async fn package_add(
|
||||
db: &State<DatabaseConnection>,
|
||||
input: Json<AddBody>,
|
||||
tx: &State<Sender<Action>>,
|
||||
) -> Result<(), BadRequest<String>> {
|
||||
let db = db as &DatabaseConnection;
|
||||
|
||||
let pkt_model = match Packages::find()
|
||||
.filter(packages::Column::Name.eq(input.name.clone()))
|
||||
.one(db)
|
||||
.await
|
||||
.map_err(|e| BadRequest(Some(e.to_string())))?
|
||||
{
|
||||
None => {
|
||||
let new_package = packages::ActiveModel {
|
||||
name: Set(input.name.clone()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
new_package.save(db).await.expect("TODO: panic message")
|
||||
}
|
||||
Some(p) => p.into(),
|
||||
};
|
||||
|
||||
let pkg = get_info_by_name(input.name.clone().as_str())
|
||||
.await
|
||||
.map_err(|_| BadRequest(Some("couldn't download package metadata".to_string())))?;
|
||||
|
||||
let version_model = match Versions::find()
|
||||
.filter(versions::Column::Version.eq(pkg.version.clone()))
|
||||
.one(db)
|
||||
.await
|
||||
.map_err(|e| BadRequest(Some(e.to_string())))?
|
||||
{
|
||||
None => {
|
||||
let new_version = versions::ActiveModel {
|
||||
version: Set(pkg.version.clone()),
|
||||
package_id: Set(pkt_model.id.clone().unwrap()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
new_version.save(db).await.expect("TODO: panic message")
|
||||
}
|
||||
Some(p) => {
|
||||
// todo add check if this version was successfully built
|
||||
// if not allow build
|
||||
if input.force_build {
|
||||
p.into()
|
||||
} else {
|
||||
return Err(BadRequest(Some("Version already existing".to_string())));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let _ = tx.send(Action::Build(
|
||||
pkg.name,
|
||||
pkg.version,
|
||||
pkg.url_path.unwrap(),
|
||||
version_model,
|
||||
));
|
||||
|
||||
Ok(())
|
||||
}
|
25
backend/src/api/backend.rs
Normal file
25
backend/src/api/backend.rs
Normal file
@ -0,0 +1,25 @@
|
||||
use crate::api::add::okapi_add_operation_for_package_add_;
|
||||
use crate::api::add::package_add;
|
||||
use crate::api::list::okapi_add_operation_for_build_output_;
|
||||
use crate::api::list::okapi_add_operation_for_list_builds_;
|
||||
use crate::api::list::{build_output, okapi_add_operation_for_package_list_};
|
||||
use crate::api::list::{list_builds, okapi_add_operation_for_search_};
|
||||
use crate::api::list::{package_list, search};
|
||||
use crate::api::remove::okapi_add_operation_for_package_del_;
|
||||
use crate::api::remove::okapi_add_operation_for_version_del_;
|
||||
use crate::api::remove::{package_del, version_del};
|
||||
use rocket::Route;
|
||||
use rocket_okapi::openapi_get_routes;
|
||||
|
||||
pub fn build_api() -> Vec<Route> {
|
||||
openapi_get_routes![
|
||||
search,
|
||||
package_list,
|
||||
package_add,
|
||||
package_del,
|
||||
version_del,
|
||||
build_output,
|
||||
list_builds,
|
||||
|
||||
]
|
||||
}
|
51
backend/src/api/embed.rs
Normal file
51
backend/src/api/embed.rs
Normal file
@ -0,0 +1,51 @@
|
||||
use rocket::http::uri::fmt::Path;
|
||||
use rocket::http::uri::Segments;
|
||||
use rocket::http::{ContentType, Method, Status};
|
||||
use rocket::route::{Handler, Outcome};
|
||||
use rocket::{Data, Request, Response, Route};
|
||||
use rust_embed::{RustEmbed};
|
||||
use std::io::Cursor;
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "web"]
|
||||
struct Asset;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CustomHandler {}
|
||||
|
||||
impl Into<Vec<Route>> for CustomHandler {
|
||||
fn into(self) -> Vec<Route> {
|
||||
vec![Route::ranked(-2, Method::Get, "/<path..>", self)]
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::async_trait]
|
||||
impl Handler for CustomHandler {
|
||||
async fn handle<'r>(&self, request: &'r Request<'_>, _: Data<'r>) -> Outcome<'r> {
|
||||
let mut path = request
|
||||
.segments::<Segments<'_, Path>>(0..)
|
||||
.ok()
|
||||
.and_then(|segments| segments.to_path_buf(true).ok())
|
||||
.unwrap();
|
||||
|
||||
if path.is_dir() || path.to_str() == Some("") {
|
||||
path = path.join("index.html")
|
||||
}
|
||||
|
||||
match <Asset as RustEmbed>::get(path.to_string_lossy().as_ref()) {
|
||||
None => Outcome::Failure(Status::NotFound),
|
||||
Some(file_content) => {
|
||||
let content_type: ContentType = path
|
||||
.extension()
|
||||
.map(|x| x.to_string_lossy())
|
||||
.and_then(|x| ContentType::from_extension(&x))
|
||||
.unwrap_or(ContentType::Plain);
|
||||
let rsp = Response::build()
|
||||
.header(content_type)
|
||||
.sized_body(file_content.data.len(), Cursor::new(file_content.data))
|
||||
.finalize();
|
||||
Outcome::Success(rsp)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
148
backend/src/api/list.rs
Normal file
148
backend/src/api/list.rs
Normal file
@ -0,0 +1,148 @@
|
||||
use crate::aur::aur::query_aur;
|
||||
use crate::db::migration::JoinType;
|
||||
use crate::db::prelude::{Builds, Packages};
|
||||
use crate::db::{builds, packages, versions};
|
||||
use rocket::response::status::NotFound;
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::serde::{Deserialize, Serialize};
|
||||
use rocket::{get, State};
|
||||
use rocket_okapi::okapi::schemars;
|
||||
use rocket_okapi::{openapi, JsonSchema};
|
||||
use sea_orm::{ColumnTrait, QueryFilter};
|
||||
use sea_orm::{DatabaseConnection, EntityTrait, FromQueryResult, QuerySelect, RelationTrait};
|
||||
|
||||
#[derive(Serialize, JsonSchema)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct ApiPackage {
|
||||
name: String,
|
||||
version: String,
|
||||
}
|
||||
|
||||
#[openapi(tag = "test")]
|
||||
#[get("/search?<query>")]
|
||||
pub async fn search(query: &str) -> Result<Json<Vec<ApiPackage>>, String> {
|
||||
return match query_aur(query).await {
|
||||
Ok(v) => {
|
||||
let mapped = v
|
||||
.iter()
|
||||
.map(|x| ApiPackage {
|
||||
name: x.name.clone(),
|
||||
version: x.version.clone(),
|
||||
})
|
||||
.collect();
|
||||
Ok(Json(mapped))
|
||||
}
|
||||
Err(e) => Err(format!("{}", e)),
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(FromQueryResult, Deserialize, JsonSchema, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct ListPackageModel {
|
||||
id: i32,
|
||||
name: String,
|
||||
count: i32,
|
||||
status: i32,
|
||||
}
|
||||
|
||||
#[openapi(tag = "test")]
|
||||
#[get("/packages/list")]
|
||||
pub async fn package_list(
|
||||
db: &State<DatabaseConnection>,
|
||||
) -> Result<Json<Vec<ListPackageModel>>, String> {
|
||||
let db = db as &DatabaseConnection;
|
||||
|
||||
let all: Vec<ListPackageModel> = Packages::find()
|
||||
.join_rev(JoinType::InnerJoin, versions::Relation::Packages.def())
|
||||
.select_only()
|
||||
.column_as(versions::Column::Id.count(), "count")
|
||||
.column(packages::Column::Name)
|
||||
.column(packages::Column::Id)
|
||||
.column(packages::Column::Status)
|
||||
.group_by(packages::Column::Name)
|
||||
.into_model::<ListPackageModel>()
|
||||
.all(db)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Ok(Json(all))
|
||||
}
|
||||
|
||||
#[openapi(tag = "test")]
|
||||
#[get("/builds/output?<buildid>&<startline>")]
|
||||
pub async fn build_output(
|
||||
db: &State<DatabaseConnection>,
|
||||
buildid: i32,
|
||||
startline: Option<i32>,
|
||||
) -> Result<String, NotFound<String>> {
|
||||
let db = db as &DatabaseConnection;
|
||||
|
||||
let build = Builds::find_by_id(buildid)
|
||||
.one(db)
|
||||
.await
|
||||
.map_err(|e| NotFound(e.to_string()))?
|
||||
.ok_or(NotFound("couldn't find id".to_string()))?;
|
||||
|
||||
return match build.ouput {
|
||||
None => Err(NotFound("No Output".to_string())),
|
||||
Some(v) => match startline {
|
||||
None => Ok(v),
|
||||
Some(startline) => {
|
||||
let output: Vec<String> = v.split("\n").map(|x| x.to_string()).collect();
|
||||
let len = output.len();
|
||||
let len_missing = len as i32 - startline;
|
||||
|
||||
let output = output
|
||||
.iter()
|
||||
.rev()
|
||||
.take(if len_missing > 0 {
|
||||
len_missing as usize
|
||||
} else {
|
||||
0
|
||||
})
|
||||
.rev()
|
||||
.map(|x1| x1.clone())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let output = output.join("\n");
|
||||
Ok(output)
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(FromQueryResult, Deserialize, JsonSchema, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct ListBuildsModel {
|
||||
id: i32,
|
||||
pkg_id: i32,
|
||||
version_id: i32,
|
||||
status: Option<i32>,
|
||||
}
|
||||
|
||||
#[openapi(tag = "test")]
|
||||
#[get("/builds?<pkgid>")]
|
||||
pub async fn list_builds(
|
||||
db: &State<DatabaseConnection>,
|
||||
pkgid: i32,
|
||||
) -> Result<Json<Vec<ListBuildsModel>>, NotFound<String>> {
|
||||
let db = db as &DatabaseConnection;
|
||||
|
||||
let build = Builds::find()
|
||||
.filter(builds::Column::PkgId.eq(pkgid))
|
||||
.all(db)
|
||||
.await
|
||||
.map_err(|e| NotFound(e.to_string()))?;
|
||||
|
||||
Ok(Json(
|
||||
build
|
||||
.iter()
|
||||
.map(|x| ListBuildsModel {
|
||||
id: x.id,
|
||||
status: x.status,
|
||||
pkg_id: x.pkg_id,
|
||||
version_id: x.version_id,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
))
|
||||
}
|
6
backend/src/api/mod.rs
Normal file
6
backend/src/api/mod.rs
Normal file
@ -0,0 +1,6 @@
|
||||
mod add;
|
||||
pub mod backend;
|
||||
mod list;
|
||||
mod remove;
|
||||
#[cfg(feature = "static")]
|
||||
pub mod embed;
|
37
backend/src/api/remove.rs
Normal file
37
backend/src/api/remove.rs
Normal file
@ -0,0 +1,37 @@
|
||||
use crate::repo::repo::{remove_pkg, remove_version};
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::serde::Deserialize;
|
||||
use rocket::{post, State};
|
||||
use rocket_okapi::okapi::schemars;
|
||||
use rocket_okapi::{openapi, JsonSchema};
|
||||
use sea_orm::DatabaseConnection;
|
||||
|
||||
#[derive(Deserialize, JsonSchema)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct DelBody {
|
||||
id: i32,
|
||||
}
|
||||
|
||||
#[openapi(tag = "test")]
|
||||
#[post("/packages/delete", data = "<input>")]
|
||||
pub async fn package_del(
|
||||
db: &State<DatabaseConnection>,
|
||||
input: Json<DelBody>,
|
||||
) -> Result<(), String> {
|
||||
let db = db as &DatabaseConnection;
|
||||
let pkg_id = input.id.clone();
|
||||
|
||||
remove_pkg(db, pkg_id).await.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[openapi(tag = "test")]
|
||||
#[post("/versions/delete/<id>")]
|
||||
pub async fn version_del(db: &State<DatabaseConnection>, id: i32) -> Result<(), String> {
|
||||
let db = db as &DatabaseConnection;
|
||||
|
||||
remove_version(db, id).await.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
99
backend/src/aur/aur.rs
Normal file
99
backend/src/aur/aur.rs
Normal file
@ -0,0 +1,99 @@
|
||||
use anyhow::anyhow;
|
||||
use aur_rs::{Package, Request};
|
||||
use flate2::bufread::GzDecoder;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use tar::Archive;
|
||||
|
||||
pub async fn query_aur(query: &str) -> anyhow::Result<Vec<Package>> {
|
||||
let request = Request::default();
|
||||
let response = request.search_package_by_name(query).await;
|
||||
|
||||
let response = match response {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
return Err(anyhow!("failed to search"));
|
||||
}
|
||||
};
|
||||
|
||||
let mut response = response.results;
|
||||
response.sort_by(|x, x1| x.popularity.partial_cmp(&x1.popularity).unwrap().reverse());
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn get_info_by_name(pkg_name: &str) -> anyhow::Result<Package> {
|
||||
let request = Request::default();
|
||||
let response = request.search_info_by_name(pkg_name).await;
|
||||
|
||||
let mut response = match response {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
return Err(anyhow!("failed to get package"));
|
||||
}
|
||||
};
|
||||
|
||||
let response = match response.results.pop() {
|
||||
None => {
|
||||
return Err(anyhow!("no package found"));
|
||||
}
|
||||
Some(v) => v,
|
||||
};
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn download_pkgbuild(url: &str, dest_dir: &str) -> anyhow::Result<String> {
|
||||
let (file_data, file_name) = match download_file(url).await {
|
||||
Ok(data) => data,
|
||||
Err(e) => {
|
||||
return Err(anyhow!("Error downloading file: {}", e));
|
||||
}
|
||||
};
|
||||
|
||||
// Check if the directory exists
|
||||
if !fs::metadata(dest_dir).is_ok() {
|
||||
// Create the directory if it does not exist
|
||||
fs::create_dir(dest_dir)?;
|
||||
}
|
||||
|
||||
unpack_tar_gz(&file_data, dest_dir)?;
|
||||
Ok(file_name)
|
||||
}
|
||||
|
||||
async fn download_file(url: &str) -> anyhow::Result<(Vec<u8>, String)> {
|
||||
let response = reqwest::get(url).await?;
|
||||
|
||||
// extract name of file without extension
|
||||
// todo might be also possible here to use package name
|
||||
let t = response
|
||||
.url()
|
||||
.path_segments()
|
||||
.and_then(|segments| segments.last())
|
||||
.ok_or(anyhow!("no segments"))?
|
||||
.split(".")
|
||||
.collect::<Vec<&str>>()
|
||||
.first()
|
||||
.ok_or(anyhow!(""))?
|
||||
.to_string();
|
||||
|
||||
println!("{}", t);
|
||||
|
||||
let r = response.bytes().await?;
|
||||
Ok((r.to_vec(), t))
|
||||
}
|
||||
|
||||
fn unpack_tar_gz(data: &[u8], target_dir: &str) -> anyhow::Result<()> {
|
||||
let tar = GzDecoder::new(data);
|
||||
let mut archive = Archive::new(tar);
|
||||
|
||||
for entry in archive.entries()? {
|
||||
let mut entry = entry?;
|
||||
let path = entry.path()?;
|
||||
let entry_path = Path::new(target_dir).join(path);
|
||||
|
||||
entry.unpack(entry_path)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
1
backend/src/aur/mod.rs
Normal file
1
backend/src/aur/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod aur;
|
138
backend/src/builder/builder.rs
Normal file
138
backend/src/builder/builder.rs
Normal file
@ -0,0 +1,138 @@
|
||||
use crate::builder::types::Action;
|
||||
use crate::db::prelude::{Builds, Packages};
|
||||
use crate::db::{builds, packages};
|
||||
use crate::repo::repo::add_pkg;
|
||||
use anyhow::anyhow;
|
||||
use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, Set};
|
||||
use std::ops::Add;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::sync::broadcast::error::RecvError;
|
||||
use tokio::sync::broadcast::Sender;
|
||||
|
||||
pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
||||
loop {
|
||||
if let Ok(_result) = tx.subscribe().recv().await {
|
||||
match _result {
|
||||
// add a package to parallel build
|
||||
Action::Build(name, version, url, mut version_model) => {
|
||||
let db = db.clone();
|
||||
|
||||
let build = builds::ActiveModel {
|
||||
pkg_id: version_model.package_id.clone(),
|
||||
version_id: version_model.id.clone(),
|
||||
ouput: Set(None),
|
||||
status: Set(Some(0)),
|
||||
..Default::default()
|
||||
};
|
||||
let mut new_build = build.save(&db).await.unwrap();
|
||||
|
||||
// spawn new thread for each pkg build
|
||||
// todo add queue and build two packages in parallel
|
||||
tokio::spawn(async move {
|
||||
let (tx, mut rx) = broadcast::channel::<String>(3);
|
||||
|
||||
let db2 = db.clone();
|
||||
let new_build2 = new_build.clone();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
match rx.recv().await {
|
||||
Ok(output_line) => {
|
||||
println!("{output_line}");
|
||||
|
||||
let _ = append_db_log_output(
|
||||
&db2,
|
||||
output_line,
|
||||
new_build2.id.clone().unwrap(),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
Err(e) => match e {
|
||||
RecvError::Closed => {
|
||||
break;
|
||||
}
|
||||
RecvError::Lagged(_) => {}
|
||||
},
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
match add_pkg(url, version, name, tx).await {
|
||||
Ok(pkg_file_name) => {
|
||||
println!("successfully built package");
|
||||
let _ = set_pkg_status(
|
||||
&db,
|
||||
version_model.package_id.clone().unwrap(),
|
||||
1,
|
||||
)
|
||||
.await;
|
||||
|
||||
version_model.file_name = Set(Some(pkg_file_name));
|
||||
let _ = version_model.update(&db).await;
|
||||
|
||||
new_build.status = Set(Some(1));
|
||||
let _ = new_build.update(&db).await;
|
||||
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = set_pkg_status(
|
||||
&db,
|
||||
version_model.package_id.clone().unwrap(),
|
||||
2,
|
||||
)
|
||||
.await;
|
||||
let _ = version_model.update(&db).await;
|
||||
|
||||
new_build.status = Set(Some(1));
|
||||
let _ = new_build.update(&db).await;
|
||||
|
||||
println!("Error: {e}")
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// todo maybe move to helper file
|
||||
async fn set_pkg_status(
|
||||
db: &DatabaseConnection,
|
||||
package_id: i32,
|
||||
status: i32,
|
||||
) -> anyhow::Result<()> {
|
||||
let mut pkg: packages::ActiveModel = Packages::find_by_id(package_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(anyhow!("no package with id {package_id} found"))?
|
||||
.into();
|
||||
|
||||
pkg.status = Set(status);
|
||||
pkg.update(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn append_db_log_output(
|
||||
db: &DatabaseConnection,
|
||||
text: String,
|
||||
build_id: i32,
|
||||
) -> anyhow::Result<()> {
|
||||
let build = Builds::find_by_id(build_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(anyhow!("build not found"))?;
|
||||
|
||||
let mut build: builds::ActiveModel = build.into();
|
||||
|
||||
match build.ouput.unwrap() {
|
||||
None => {
|
||||
build.ouput = Set(Some(text.add("\n")));
|
||||
}
|
||||
Some(s) => {
|
||||
build.ouput = Set(Some(s.add(text.as_str()).add("\n")));
|
||||
}
|
||||
}
|
||||
|
||||
build.update(db).await?;
|
||||
Ok(())
|
||||
}
|
2
backend/src/builder/mod.rs
Normal file
2
backend/src/builder/mod.rs
Normal file
@ -0,0 +1,2 @@
|
||||
pub mod builder;
|
||||
pub mod types;
|
6
backend/src/builder/types.rs
Normal file
6
backend/src/builder/types.rs
Normal file
@ -0,0 +1,6 @@
|
||||
use crate::db::versions;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Action {
|
||||
Build(String, String, String, versions::ActiveModel),
|
||||
}
|
22
backend/src/db/builds.rs
Normal file
22
backend/src/db/builds.rs
Normal file
@ -0,0 +1,22 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||
|
||||
use rocket::serde::Serialize;
|
||||
use rocket_okapi::okapi::schemars;
|
||||
use rocket_okapi::JsonSchema;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, JsonSchema)]
|
||||
#[sea_orm(table_name = "builds")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub pkg_id: i32,
|
||||
pub version_id: i32,
|
||||
pub ouput: Option<String>,
|
||||
pub status: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
59
backend/src/db/migration/create.rs
Normal file
59
backend/src/db/migration/create.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
let db = manager.get_connection();
|
||||
|
||||
// Use `execute_unprepared` if the SQL statement doesn't have value bindings
|
||||
db.execute_unprepared(
|
||||
r#"
|
||||
create table builds
|
||||
(
|
||||
id integer not null
|
||||
constraint builds_pk
|
||||
primary key autoincrement,
|
||||
pkg_id integer not null,
|
||||
version_id integer not null,
|
||||
ouput TEXT,
|
||||
status integer
|
||||
);
|
||||
|
||||
create table packages
|
||||
(
|
||||
id integer not null
|
||||
primary key autoincrement,
|
||||
name text not null,
|
||||
status integer default 0 not null
|
||||
);
|
||||
|
||||
create table status
|
||||
(
|
||||
id integer not null
|
||||
constraint status_pk
|
||||
primary key autoincrement,
|
||||
value TEXT
|
||||
);
|
||||
|
||||
create table versions
|
||||
(
|
||||
id integer not null
|
||||
constraint versions_pk
|
||||
primary key autoincrement,
|
||||
version TEXT not null,
|
||||
package_id integer not null,
|
||||
file_name TEXT
|
||||
);
|
||||
"#,
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, _: &SchemaManager) -> Result<(), DbErr> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
12
backend/src/db/migration/mod.rs
Normal file
12
backend/src/db/migration/mod.rs
Normal file
@ -0,0 +1,12 @@
|
||||
pub use sea_orm_migration::prelude::*;
|
||||
|
||||
mod create;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![Box::new(create::Migration)]
|
||||
}
|
||||
}
|
9
backend/src/db/mod.rs
Normal file
9
backend/src/db/mod.rs
Normal file
@ -0,0 +1,9 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||
|
||||
pub mod prelude;
|
||||
|
||||
pub mod builds;
|
||||
pub mod migration;
|
||||
pub mod packages;
|
||||
pub mod status;
|
||||
pub mod versions;
|
29
backend/src/db/packages.rs
Normal file
29
backend/src/db/packages.rs
Normal file
@ -0,0 +1,29 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||
|
||||
use rocket::serde::Serialize;
|
||||
use rocket_okapi::okapi::schemars;
|
||||
use rocket_okapi::JsonSchema;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, JsonSchema)]
|
||||
#[sea_orm(table_name = "packages")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub status: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::versions::Entity")]
|
||||
Versions,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl Related<super::versions::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Versions.def()
|
||||
}
|
||||
}
|
6
backend/src/db/prelude.rs
Normal file
6
backend/src/db/prelude.rs
Normal file
@ -0,0 +1,6 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||
|
||||
pub use super::builds::Entity as Builds;
|
||||
pub use super::packages::Entity as Packages;
|
||||
pub use super::status::Entity as Status;
|
||||
pub use super::versions::Entity as Versions;
|
19
backend/src/db/status.rs
Normal file
19
backend/src/db/status.rs
Normal file
@ -0,0 +1,19 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||
|
||||
use rocket::serde::Serialize;
|
||||
use rocket_okapi::okapi::schemars;
|
||||
use rocket_okapi::JsonSchema;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, JsonSchema)]
|
||||
#[sea_orm(table_name = "status")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub value: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
35
backend/src/db/versions.rs
Normal file
35
backend/src/db/versions.rs
Normal file
@ -0,0 +1,35 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||
|
||||
use rocket::serde::Serialize;
|
||||
use rocket_okapi::okapi::schemars;
|
||||
use rocket_okapi::JsonSchema;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, JsonSchema)]
|
||||
#[sea_orm(table_name = "versions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub version: String,
|
||||
pub package_id: i32,
|
||||
pub file_name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::packages::Entity",
|
||||
from = "Column::PackageId",
|
||||
to = "super::packages::Column::Id"
|
||||
)]
|
||||
Packages,
|
||||
}
|
||||
|
||||
// `Related` trait has to be implemented by hand
|
||||
impl Related<super::packages::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Packages.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
98
backend/src/main.rs
Normal file
98
backend/src/main.rs
Normal file
@ -0,0 +1,98 @@
|
||||
mod api;
|
||||
mod aur;
|
||||
mod builder;
|
||||
mod db;
|
||||
mod pkgbuild;
|
||||
mod repo;
|
||||
|
||||
use crate::api::backend;
|
||||
use crate::builder::types::Action;
|
||||
use crate::db::migration::Migrator;
|
||||
use rocket::config::Config;
|
||||
use rocket::fs::FileServer;
|
||||
use rocket::futures::future::join_all;
|
||||
use rocket_okapi::swagger_ui::{make_swagger_ui, SwaggerUIConfig};
|
||||
use sea_orm::{Database, DatabaseConnection};
|
||||
use sea_orm_migration::MigratorTrait;
|
||||
use std::fs;
|
||||
use tokio::sync::broadcast;
|
||||
#[cfg(feature = "static")]
|
||||
use crate::api::embed::CustomHandler;
|
||||
|
||||
fn main() {
|
||||
let t = tokio::runtime::Runtime::new().unwrap();
|
||||
|
||||
let (tx, _) = broadcast::channel::<Action>(32);
|
||||
|
||||
t.block_on(async move {
|
||||
// create folder for db stuff
|
||||
if !fs::metadata("./db").is_ok() {
|
||||
fs::create_dir("./db").unwrap();
|
||||
}
|
||||
|
||||
let db: DatabaseConnection = Database::connect("sqlite://db/db.sqlite?mode=rwc")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Migrator::up(&db, None).await.unwrap();
|
||||
|
||||
// create repo folder
|
||||
if !fs::metadata("./repo").is_ok() {
|
||||
fs::create_dir("./repo").unwrap();
|
||||
}
|
||||
|
||||
let db2 = db.clone();
|
||||
let tx2 = tx.clone();
|
||||
tokio::spawn(async move {
|
||||
builder::builder::init(db2, tx2).await;
|
||||
});
|
||||
|
||||
let backend_handle = tokio::spawn(async {
|
||||
let mut config = Config::default();
|
||||
config.address = "0.0.0.0".parse().unwrap();
|
||||
config.port = 8081;
|
||||
|
||||
let rock = rocket::custom(config)
|
||||
.manage(db)
|
||||
.manage(tx)
|
||||
.mount("/", backend::build_api())
|
||||
|
||||
.mount(
|
||||
"/docs/",
|
||||
make_swagger_ui(&SwaggerUIConfig {
|
||||
url: "../openapi.json".to_owned(),
|
||||
..Default::default()
|
||||
}),
|
||||
);
|
||||
#[cfg(feature = "static")]
|
||||
let rock = rock.mount("/", CustomHandler {});
|
||||
|
||||
let rock = rock
|
||||
.launch()
|
||||
.await;
|
||||
match rock {
|
||||
Ok(_) => println!("Rocket shut down gracefully."),
|
||||
Err(err) => println!("Rocket had an error: {}", err),
|
||||
};
|
||||
});
|
||||
|
||||
let repo_handle = tokio::spawn(async {
|
||||
let mut config = Config::default();
|
||||
config.address = "0.0.0.0".parse().unwrap();
|
||||
config.port = 8080;
|
||||
|
||||
let launch_result = rocket::custom(config)
|
||||
.mount("/", FileServer::from("./repo"))
|
||||
.launch()
|
||||
.await;
|
||||
match launch_result {
|
||||
Ok(_) => println!("Rocket shut down gracefully."),
|
||||
Err(err) => println!("Rocket had an error: {}", err),
|
||||
};
|
||||
});
|
||||
|
||||
join_all([repo_handle, backend_handle]).await;
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
135
backend/src/pkgbuild/build.rs
Normal file
135
backend/src/pkgbuild/build.rs
Normal file
@ -0,0 +1,135 @@
|
||||
use anyhow::anyhow;
|
||||
use std::fs;
|
||||
use std::process::Stdio;
|
||||
use std::time::SystemTime;
|
||||
use tokio::io::{AsyncBufReadExt, BufReader, Lines};
|
||||
use tokio::sync::broadcast::Sender;
|
||||
|
||||
pub async fn build_pkgbuild(
|
||||
folder_path: String,
|
||||
pkg_vers: &str,
|
||||
pkg_name: &str,
|
||||
tx: Sender<String>,
|
||||
) -> anyhow::Result<String> {
|
||||
let makepkg = include_str!("../../scripts/makepkg");
|
||||
|
||||
// Create a temporary file to store the bash script content
|
||||
let script_file = std::env::temp_dir().join("makepkg_custom.sh");
|
||||
fs::write(&script_file, makepkg).expect("Unable to write script to file");
|
||||
|
||||
let mut child = tokio::process::Command::new("bash")
|
||||
.args(&[
|
||||
script_file.as_os_str().to_str().unwrap(),
|
||||
"-f",
|
||||
"--noconfirm",
|
||||
"--nocolor",
|
||||
"-s", // install required deps
|
||||
"-c", // cleanup leftover files and dirs
|
||||
"--rmdeps", // remove installed deps with -s
|
||||
"--noprogressbar", // pacman shouldn't display a progressbar
|
||||
])
|
||||
.current_dir(folder_path.clone())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
let stderr = child
|
||||
.stderr
|
||||
.take()
|
||||
.ok_or(anyhow!("failed to take stderr"))?;
|
||||
let stdout = child
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or(anyhow!("failed to take stdout"))?;
|
||||
|
||||
let stderr = BufReader::new(stderr).lines();
|
||||
let stdout = BufReader::new(stdout).lines();
|
||||
|
||||
let tx1 = tx.clone();
|
||||
spawn_broadcast_sender(stderr, tx1);
|
||||
spawn_broadcast_sender(stdout, tx);
|
||||
|
||||
let result = child.wait().await;
|
||||
|
||||
match result {
|
||||
Ok(result) => {
|
||||
if !result.success() {
|
||||
return Err(anyhow!("failed to build package"));
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("Failed to execute makepkg: {}", err);
|
||||
return Err(anyhow!("failed to build package"));
|
||||
}
|
||||
}
|
||||
|
||||
locate_built_package(pkg_name.to_string(), pkg_vers.to_string(), folder_path)
|
||||
}
|
||||
|
||||
fn spawn_broadcast_sender<R: tokio::io::AsyncRead + Unpin + Send + 'static>(
|
||||
mut reader: Lines<BufReader<R>>,
|
||||
tx: Sender<String>,
|
||||
) {
|
||||
tokio::spawn(async move {
|
||||
while let Ok(Some(line)) = reader.next_line().await {
|
||||
// println!("directerr: {line}");
|
||||
let _ = tx.send(line);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn locate_built_package(
|
||||
pkg_name: String,
|
||||
pkg_vers: String,
|
||||
folder_path: String,
|
||||
) -> anyhow::Result<String> {
|
||||
// check if expected built dir exists
|
||||
let built_name = build_expected_repo_packagename(pkg_name.to_string(), pkg_vers.to_string());
|
||||
if fs::metadata(format!("{folder_path}/{built_name}")).is_ok() {
|
||||
println!("Built {built_name}");
|
||||
return Ok(built_name.to_string());
|
||||
}
|
||||
|
||||
// the naming might not always contain the build version
|
||||
// eg. mesa-git --> match pkgname and extension if multiple return latest
|
||||
if let Ok(paths) = fs::read_dir(folder_path) {
|
||||
let mut candidate_filename: Option<String> = None;
|
||||
let mut candidate_timestamp = SystemTime::UNIX_EPOCH;
|
||||
|
||||
for path in paths {
|
||||
if let Ok(path) = path {
|
||||
let path = path.path();
|
||||
if let Some(file_name) = path.file_name() {
|
||||
let file_name = file_name.to_str().unwrap();
|
||||
|
||||
if file_name.ends_with("-x86_64.pkg.tar.zst")
|
||||
&& file_name.starts_with(pkg_name.as_str())
|
||||
{
|
||||
if let Ok(metadata) = path.metadata() {
|
||||
if let Ok(modified_time) = metadata.modified() {
|
||||
// Update the candidate filename and timestamp if the current file is newer
|
||||
if modified_time > candidate_timestamp {
|
||||
candidate_filename = Some(file_name.to_string());
|
||||
candidate_timestamp = modified_time;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if candidate_filename.is_some() {
|
||||
println!("Built {}", candidate_filename.clone().unwrap());
|
||||
return Ok(candidate_filename.unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("Built package not found"))
|
||||
}
|
||||
|
||||
/// don't trust this pkg name from existing
|
||||
/// pkgbuild might build different version name
|
||||
pub fn build_expected_repo_packagename(pkg_name: String, pkg_vers: String) -> String {
|
||||
format!("{pkg_name}-{pkg_vers}-x86_64.pkg.tar.zst")
|
||||
}
|
1
backend/src/pkgbuild/mod.rs
Normal file
1
backend/src/pkgbuild/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod build;
|
1
backend/src/repo/mod.rs
Normal file
1
backend/src/repo/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod repo;
|
132
backend/src/repo/repo.rs
Normal file
132
backend/src/repo/repo.rs
Normal file
@ -0,0 +1,132 @@
|
||||
use crate::aur::aur::download_pkgbuild;
|
||||
use crate::db::prelude::Packages;
|
||||
use crate::db::prelude::Versions;
|
||||
use crate::db::versions;
|
||||
use crate::pkgbuild::build::build_pkgbuild;
|
||||
use anyhow::anyhow;
|
||||
use sea_orm::{ColumnTrait, DatabaseConnection, EntityTrait, ModelTrait, QueryFilter};
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use tokio::sync::broadcast::Sender;
|
||||
|
||||
static REPO_NAME: &str = "repo";
|
||||
static BASEURL: &str = "https://aur.archlinux.org";
|
||||
|
||||
pub async fn add_pkg(
|
||||
url: String,
|
||||
version: String,
|
||||
name: String,
|
||||
tx: Sender<String>,
|
||||
) -> anyhow::Result<String> {
|
||||
let fname = download_pkgbuild(format!("{}{}", BASEURL, url).as_str(), "./builds").await?;
|
||||
let pkg_file_name = build_pkgbuild(
|
||||
format!("./builds/{fname}"),
|
||||
version.as_str(),
|
||||
name.as_str(),
|
||||
tx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// todo force overwrite if file already exists
|
||||
fs::copy(
|
||||
format!("./builds/{fname}/{pkg_file_name}"),
|
||||
format!("./repo/{pkg_file_name}"),
|
||||
)?;
|
||||
fs::remove_file(format!("./builds/{fname}/{pkg_file_name}"))?;
|
||||
|
||||
repo_add(pkg_file_name.clone())?;
|
||||
|
||||
Ok(pkg_file_name)
|
||||
}
|
||||
|
||||
fn repo_add(pkg_file_name: String) -> anyhow::Result<()> {
|
||||
let db_file = format!("{REPO_NAME}.db.tar.gz");
|
||||
|
||||
let output = Command::new("repo-add")
|
||||
.args(&[db_file.clone(), pkg_file_name, "--nocolor".to_string()])
|
||||
.current_dir("./repo/")
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"Error exit code when repo-add: {}{}",
|
||||
String::from_utf8_lossy(output.stdout.as_slice()),
|
||||
String::from_utf8_lossy(output.stderr.as_slice())
|
||||
));
|
||||
}
|
||||
|
||||
println!("{db_file} updated successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn repo_remove(pkg_file_name: String) -> anyhow::Result<()> {
|
||||
let db_file = format!("{REPO_NAME}.db.tar.gz");
|
||||
|
||||
let output = Command::new("repo-remove")
|
||||
.args(&[db_file.clone(), pkg_file_name, "--nocolor".to_string()])
|
||||
.current_dir("./repo/")
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"Error exit code when repo-remove: {}{}",
|
||||
String::from_utf8_lossy(output.stdout.as_slice()),
|
||||
String::from_utf8_lossy(output.stderr.as_slice())
|
||||
));
|
||||
}
|
||||
|
||||
println!("{db_file} updated successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove_pkg(db: &DatabaseConnection, pkg_id: i32) -> anyhow::Result<()> {
|
||||
let pkg = Packages::find_by_id(pkg_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(anyhow!("id not found"))?;
|
||||
|
||||
fs::remove_dir_all(format!("./builds/{}", pkg.name))?;
|
||||
|
||||
let versions = Versions::find()
|
||||
.filter(versions::Column::PackageId.eq(pkg.id))
|
||||
.all(db)
|
||||
.await?;
|
||||
|
||||
for v in versions {
|
||||
rem_ver(db, v).await?;
|
||||
}
|
||||
|
||||
// remove package db entry
|
||||
pkg.delete(db).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove_version(db: &DatabaseConnection, version_id: i32) -> anyhow::Result<()> {
|
||||
let version = Versions::find()
|
||||
.filter(versions::Column::PackageId.eq(version_id))
|
||||
.one(db)
|
||||
.await?;
|
||||
if let Some(version) = version {
|
||||
rem_ver(db, version).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn rem_ver(db: &DatabaseConnection, version: versions::Model) -> anyhow::Result<()> {
|
||||
if let Some(filename) = version.file_name.clone() {
|
||||
// so repo-remove only supports passing a package name and removing the whole package
|
||||
// it seems that repo-add removes an older version when called
|
||||
// todo fix in future by implementing in rust
|
||||
if let Some(pkg) = Packages::find_by_id(version.package_id).one(db).await? {
|
||||
// remove from repo db
|
||||
repo_remove(pkg.name)?;
|
||||
|
||||
// remove from fs
|
||||
fs::remove_file(format!("./repo/{filename}"))?;
|
||||
}
|
||||
}
|
||||
|
||||
version.delete(db).await?;
|
||||
Ok(())
|
||||
}
|
Reference in New Issue
Block a user