init
This commit is contained in:
commit
2447399b46
33
Dockerfile
Normal file
33
Dockerfile
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# Stage 1: Build the Rust binary
|
||||||
|
FROM rust AS builder
|
||||||
|
|
||||||
|
# Install necessary tools and dependencies
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy the Rust project files
|
||||||
|
COPY ./src /app/src
|
||||||
|
COPY ./scripts /app/scripts
|
||||||
|
COPY Cargo.lock /app
|
||||||
|
COPY Cargo.toml /app
|
||||||
|
|
||||||
|
# Build the Rust binary
|
||||||
|
RUN cargo build --release
|
||||||
|
|
||||||
|
# Stage 2: Create the final image
|
||||||
|
FROM archlinux
|
||||||
|
|
||||||
|
# Copy the built binary from the previous stage
|
||||||
|
COPY --from=builder /app/target/release/untitled /usr/local/bin/untitled
|
||||||
|
|
||||||
|
RUN pacman -Syyu --noconfirm
|
||||||
|
RUN pacman -S --noconfirm base-devel git
|
||||||
|
RUN pacman -Sc
|
||||||
|
|
||||||
|
# Set any additional configurations or dependencies if required
|
||||||
|
|
||||||
|
# Example: Expose a port if your application listens on a specific port
|
||||||
|
# EXPOSE 8080
|
||||||
|
|
||||||
|
# Set the entry point or default command to run your application
|
||||||
|
CMD ["untitled"]
|
124
src/api/backend.rs
Normal file
124
src/api/backend.rs
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
use crate::aur::aur::get_info_by_name;
|
||||||
|
use crate::builder::types::Action;
|
||||||
|
use crate::db::packages;
|
||||||
|
use crate::query_aur;
|
||||||
|
use rocket::serde::json::Json;
|
||||||
|
use rocket::serde::{Deserialize, Serialize};
|
||||||
|
use rocket::State;
|
||||||
|
use rocket::{get, post, Route};
|
||||||
|
use rocket_okapi::okapi::schemars;
|
||||||
|
use rocket_okapi::{openapi, openapi_get_routes, JsonSchema};
|
||||||
|
use sea_orm::{ActiveModelTrait, DatabaseConnection, Set};
|
||||||
|
use sea_orm::{DeleteResult, EntityTrait, ModelTrait};
|
||||||
|
use tokio::sync::broadcast::Sender;
|
||||||
|
|
||||||
|
use crate::db::prelude::Packages;
|
||||||
|
use crate::repo::repo::remove_pkg;
|
||||||
|
|
||||||
|
#[derive(Serialize, JsonSchema)]
|
||||||
|
#[serde(crate = "rocket::serde")]
|
||||||
|
struct ApiPackage {
|
||||||
|
name: String,
|
||||||
|
version: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[openapi(tag = "test")]
|
||||||
|
#[get("/search?<query>")]
|
||||||
|
async fn search(query: &str) -> Result<Json<Vec<ApiPackage>>, String> {
|
||||||
|
match query_aur(query).await {
|
||||||
|
Ok(v) => {
|
||||||
|
let mapped = v
|
||||||
|
.iter()
|
||||||
|
.map(|x| ApiPackage {
|
||||||
|
name: x.name.clone(),
|
||||||
|
version: x.version.clone(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
return Ok(Json(mapped));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(format!("{}", e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[openapi(tag = "test")]
|
||||||
|
#[get("/packages/list")]
|
||||||
|
async fn package_list(
|
||||||
|
db: &State<DatabaseConnection>,
|
||||||
|
) -> Result<Json<Vec<packages::Model>>, String> {
|
||||||
|
let db = db as &DatabaseConnection;
|
||||||
|
|
||||||
|
let all: Vec<packages::Model> = Packages::find().all(db).await.unwrap();
|
||||||
|
|
||||||
|
Ok(Json(all))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, JsonSchema)]
|
||||||
|
#[serde(crate = "rocket::serde")]
|
||||||
|
struct AddBody {
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[openapi(tag = "test")]
|
||||||
|
#[post("/packages/add", data = "<input>")]
|
||||||
|
async fn package_add(
|
||||||
|
db: &State<DatabaseConnection>,
|
||||||
|
input: Json<AddBody>,
|
||||||
|
tx: &State<Sender<Action>>,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let db = db as &DatabaseConnection;
|
||||||
|
|
||||||
|
let pkg_name = &input.name;
|
||||||
|
|
||||||
|
let pkg = get_info_by_name(pkg_name)
|
||||||
|
.await
|
||||||
|
.map_err(|_| "couldn't download package metadata".to_string())?;
|
||||||
|
|
||||||
|
let new_package = packages::ActiveModel {
|
||||||
|
name: Set(pkg_name.clone()),
|
||||||
|
version: Set(pkg.version.clone()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let t = new_package.save(db).await.expect("TODO: panic message");
|
||||||
|
|
||||||
|
let _ = tx.send(Action::Build(
|
||||||
|
pkg.name,
|
||||||
|
pkg.version,
|
||||||
|
pkg.url_path.unwrap(),
|
||||||
|
t.id.unwrap(),
|
||||||
|
));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, JsonSchema)]
|
||||||
|
#[serde(crate = "rocket::serde")]
|
||||||
|
struct DelBody {
|
||||||
|
id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[openapi(tag = "test")]
|
||||||
|
#[post("/packages/delete", data = "<input>")]
|
||||||
|
async fn package_del(db: &State<DatabaseConnection>, input: Json<DelBody>) -> Result<(), String> {
|
||||||
|
let db = db as &DatabaseConnection;
|
||||||
|
let pkg_id = &input.id;
|
||||||
|
|
||||||
|
let pkg = Packages::find_by_id(*pkg_id)
|
||||||
|
.one(db)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// remove folders
|
||||||
|
remove_pkg(pkg.name.to_string(), pkg.version.to_string()).await;
|
||||||
|
|
||||||
|
// remove package db entry
|
||||||
|
let res: DeleteResult = pkg.delete(db).await.unwrap();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build_api() -> Vec<Route> {
|
||||||
|
openapi_get_routes![search, package_list, package_add, package_del]
|
||||||
|
}
|
2
src/api/mod.rs
Normal file
2
src/api/mod.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
pub mod backend;
|
||||||
|
pub mod repository;
|
5
src/api/repository.rs
Normal file
5
src/api/repository.rs
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
use rocket::fs::FileServer;
|
||||||
|
|
||||||
|
pub fn build_api() -> FileServer {
|
||||||
|
FileServer::from("./repo")
|
||||||
|
}
|
99
src/aur/aur.rs
Normal file
99
src/aur/aur.rs
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
use anyhow::anyhow;
|
||||||
|
use aur_rs::{Package, Request};
|
||||||
|
use flate2::bufread::GzDecoder;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
use tar::Archive;
|
||||||
|
|
||||||
|
pub async fn query_aur(query: &str) -> anyhow::Result<Vec<Package>> {
|
||||||
|
let request = Request::default();
|
||||||
|
let response = request.search_package_by_name(query).await;
|
||||||
|
|
||||||
|
let response = match response {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(_) => {
|
||||||
|
return Err(anyhow!("failed to search"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut response = response.results;
|
||||||
|
response.sort_by(|x, x1| x.popularity.partial_cmp(&x1.popularity).unwrap().reverse());
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_info_by_name(pkg_name: &str) -> anyhow::Result<Package> {
|
||||||
|
let request = Request::default();
|
||||||
|
let response = request.search_info_by_name(pkg_name).await;
|
||||||
|
|
||||||
|
let mut response = match response {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(_) => {
|
||||||
|
return Err(anyhow!("failed to get package"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = match response.results.pop() {
|
||||||
|
None => {
|
||||||
|
return Err(anyhow!("no package found"));
|
||||||
|
}
|
||||||
|
Some(v) => v,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn download_pkgbuild(url: &str, dest_dir: &str) -> anyhow::Result<String> {
|
||||||
|
let (file_data, file_name) = match download_file(url).await {
|
||||||
|
Ok(data) => data,
|
||||||
|
Err(e) => {
|
||||||
|
return Err(anyhow!("Error downloading file: {}", e));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if the directory exists
|
||||||
|
if !fs::metadata(dest_dir).is_ok() {
|
||||||
|
// Create the directory if it does not exist
|
||||||
|
fs::create_dir(dest_dir)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
unpack_tar_gz(&file_data, dest_dir)?;
|
||||||
|
Ok(file_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_file(url: &str) -> anyhow::Result<(Vec<u8>, String)> {
|
||||||
|
let response = reqwest::get(url).await?;
|
||||||
|
|
||||||
|
// extract name of file without extension
|
||||||
|
// todo might be also possible here to use package name
|
||||||
|
let t = response
|
||||||
|
.url()
|
||||||
|
.path_segments()
|
||||||
|
.and_then(|segments| segments.last())
|
||||||
|
.ok_or(anyhow!("no segments"))?
|
||||||
|
.split(".")
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.first()
|
||||||
|
.ok_or(anyhow!(""))?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
println!("{}", t);
|
||||||
|
|
||||||
|
let r = response.bytes().await?;
|
||||||
|
Ok((r.to_vec(), t))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpack_tar_gz(data: &[u8], target_dir: &str) -> anyhow::Result<()> {
|
||||||
|
let tar = GzDecoder::new(data);
|
||||||
|
let mut archive = Archive::new(tar);
|
||||||
|
|
||||||
|
for entry in archive.entries()? {
|
||||||
|
let mut entry = entry?;
|
||||||
|
let path = entry.path()?;
|
||||||
|
let entry_path = Path::new(target_dir).join(path);
|
||||||
|
|
||||||
|
entry.unpack(entry_path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
1
src/aur/mod.rs
Normal file
1
src/aur/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
pub mod aur;
|
39
src/builder/builder.rs
Normal file
39
src/builder/builder.rs
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
use crate::builder::types::Action;
|
||||||
|
use crate::db::packages;
|
||||||
|
use crate::db::prelude::Packages;
|
||||||
|
use crate::repo::repo::add_pkg;
|
||||||
|
use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, Set};
|
||||||
|
use tokio::sync::broadcast::Sender;
|
||||||
|
|
||||||
|
pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
||||||
|
loop {
|
||||||
|
if let Ok(_result) = tx.subscribe().recv().await {
|
||||||
|
match _result {
|
||||||
|
// add a package to parallel build
|
||||||
|
Action::Build(name, version, url, id) => {
|
||||||
|
let db = db.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
match add_pkg(url, version, name).await {
|
||||||
|
Ok(_) => {
|
||||||
|
println!("successfully built package");
|
||||||
|
|
||||||
|
let mut pkg: packages::ActiveModel = Packages::find_by_id(id)
|
||||||
|
.one(&db)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap()
|
||||||
|
.into();
|
||||||
|
|
||||||
|
pkg.status = Set(2);
|
||||||
|
let pkg: packages::Model = pkg.update(&db).await.unwrap();
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("Error: {e}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
2
src/builder/mod.rs
Normal file
2
src/builder/mod.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
pub mod builder;
|
||||||
|
pub mod types;
|
4
src/builder/types.rs
Normal file
4
src/builder/types.rs
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
#[derive(Clone)]
|
||||||
|
pub enum Action {
|
||||||
|
Build(String, String, String, i32),
|
||||||
|
}
|
49
src/db/migration/m20220101_000001_create_table.rs
Normal file
49
src/db/migration/m20220101_000001_create_table.rs
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Packages::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Packages::Id)
|
||||||
|
.integer()
|
||||||
|
.not_null()
|
||||||
|
.auto_increment()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(Packages::Version).string().not_null())
|
||||||
|
.col(ColumnDef::new(Packages::name).string().not_null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Packages::Status)
|
||||||
|
.integer()
|
||||||
|
.not_null()
|
||||||
|
.default(0),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Packages::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Learn more at https://docs.rs/sea-query#iden
|
||||||
|
#[derive(Iden)]
|
||||||
|
enum Packages {
|
||||||
|
Table,
|
||||||
|
name,
|
||||||
|
Version,
|
||||||
|
Id,
|
||||||
|
Status,
|
||||||
|
}
|
12
src/db/migration/mod.rs
Normal file
12
src/db/migration/mod.rs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
pub use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
mod m20220101_000001_create_table;
|
||||||
|
|
||||||
|
pub struct Migrator;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigratorTrait for Migrator {
|
||||||
|
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||||
|
vec![Box::new(m20220101_000001_create_table::Migration)]
|
||||||
|
}
|
||||||
|
}
|
6
src/db/mod.rs
Normal file
6
src/db/mod.rs
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||||
|
|
||||||
|
pub mod prelude;
|
||||||
|
|
||||||
|
pub mod migration;
|
||||||
|
pub mod packages;
|
21
src/db/packages.rs
Normal file
21
src/db/packages.rs
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||||
|
|
||||||
|
use rocket::serde::Serialize;
|
||||||
|
use rocket_okapi::okapi::schemars;
|
||||||
|
use rocket_okapi::JsonSchema;
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, JsonSchema)]
|
||||||
|
#[sea_orm(table_name = "packages")]
|
||||||
|
pub struct Model {
|
||||||
|
pub name: String,
|
||||||
|
pub version: String,
|
||||||
|
#[sea_orm(primary_key, auto_increment = false)]
|
||||||
|
pub id: i32,
|
||||||
|
pub status: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
3
src/db/prelude.rs
Normal file
3
src/db/prelude.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2
|
||||||
|
|
||||||
|
pub use super::packages::Entity as Packages;
|
89
src/main.rs
Normal file
89
src/main.rs
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
mod api;
|
||||||
|
mod aur;
|
||||||
|
mod builder;
|
||||||
|
mod db;
|
||||||
|
mod pkgbuild;
|
||||||
|
mod repo;
|
||||||
|
|
||||||
|
use crate::api::{backend, repository};
|
||||||
|
use crate::aur::aur::query_aur;
|
||||||
|
use crate::builder::types::Action;
|
||||||
|
use crate::db::migration::Migrator;
|
||||||
|
use rocket::config::Config;
|
||||||
|
use rocket::futures::future::join_all;
|
||||||
|
use rocket_okapi::swagger_ui::{make_swagger_ui, SwaggerUIConfig};
|
||||||
|
use sea_orm::{Database, DatabaseConnection};
|
||||||
|
use sea_orm_migration::MigratorTrait;
|
||||||
|
use std::fs;
|
||||||
|
use tokio::sync::broadcast;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let t = tokio::runtime::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let (tx, _) = broadcast::channel::<Action>(32);
|
||||||
|
|
||||||
|
t.block_on(async move {
|
||||||
|
//build_package("sea-orm-cli").await;
|
||||||
|
|
||||||
|
let db: DatabaseConnection = Database::connect("sqlite://db.sqlite?mode=rwc")
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Migrator::up(&db, None).await.unwrap();
|
||||||
|
|
||||||
|
// Check if the directory exists
|
||||||
|
if !fs::metadata("./repo").is_ok() {
|
||||||
|
// Create the directory if it does not exist
|
||||||
|
fs::create_dir("./repo").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let db2 = db.clone();
|
||||||
|
let tx2 = tx.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
builder::builder::init(db2, tx2).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
let backend_handle = tokio::spawn(async {
|
||||||
|
let mut config = Config::default();
|
||||||
|
config.address = "0.0.0.0".parse().unwrap();
|
||||||
|
config.port = 8081;
|
||||||
|
|
||||||
|
let launch_result = rocket::custom(config)
|
||||||
|
.manage(db)
|
||||||
|
.manage(tx)
|
||||||
|
.mount("/", backend::build_api())
|
||||||
|
.mount(
|
||||||
|
"/docs/",
|
||||||
|
make_swagger_ui(&SwaggerUIConfig {
|
||||||
|
url: "../openapi.json".to_owned(),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.launch()
|
||||||
|
.await;
|
||||||
|
match launch_result {
|
||||||
|
Ok(_) => println!("Rocket shut down gracefully."),
|
||||||
|
Err(err) => println!("Rocket had an error: {}", err),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
let repo_handle = tokio::spawn(async {
|
||||||
|
let mut config = Config::default();
|
||||||
|
config.address = "0.0.0.0".parse().unwrap();
|
||||||
|
config.port = 8080;
|
||||||
|
|
||||||
|
let launch_result = rocket::custom(config)
|
||||||
|
.mount("/", repository::build_api())
|
||||||
|
.launch()
|
||||||
|
.await;
|
||||||
|
match launch_result {
|
||||||
|
Ok(_) => println!("Rocket shut down gracefully."),
|
||||||
|
Err(err) => println!("Rocket had an error: {}", err),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
join_all([repo_handle, backend_handle]).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
95
src/pkgbuild/build.rs
Normal file
95
src/pkgbuild/build.rs
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
use anyhow::anyhow;
|
||||||
|
use std::fs;
|
||||||
|
use std::process::{Command, Stdio};
|
||||||
|
use std::time::SystemTime;
|
||||||
|
|
||||||
|
pub fn build_pkgbuild(
|
||||||
|
folder_path: String,
|
||||||
|
pkg_vers: &str,
|
||||||
|
pkg_name: &str,
|
||||||
|
) -> anyhow::Result<String> {
|
||||||
|
let makepkg = include_str!("../../scripts/makepkg");
|
||||||
|
|
||||||
|
// Create a temporary file to store the bash script content
|
||||||
|
let script_file = std::env::temp_dir().join("makepkg_custom.sh");
|
||||||
|
fs::write(&script_file, makepkg).expect("Unable to write script to file");
|
||||||
|
|
||||||
|
let output = Command::new("bash")
|
||||||
|
.args(&[
|
||||||
|
script_file.as_os_str().to_str().unwrap(),
|
||||||
|
"-f",
|
||||||
|
"--noconfirm",
|
||||||
|
"-s",
|
||||||
|
"-c",
|
||||||
|
])
|
||||||
|
.current_dir(folder_path.clone())
|
||||||
|
.stdout(Stdio::inherit())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let output = output.wait_with_output();
|
||||||
|
|
||||||
|
match output {
|
||||||
|
Ok(output) => {
|
||||||
|
if output.status.success() {
|
||||||
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
|
println!("makepkg output: {}", stdout);
|
||||||
|
} else {
|
||||||
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||||
|
eprintln!("makepkg error: {}", stderr);
|
||||||
|
|
||||||
|
return Err(anyhow!("failed to build package"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!("Failed to execute makepkg: {}", err);
|
||||||
|
return Err(anyhow!("failed to build package"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if expected built dir exists
|
||||||
|
let built_name = build_repo_packagename(pkg_name.to_string(), pkg_vers.to_string());
|
||||||
|
if fs::metadata(format!("{folder_path}/{built_name}")).is_ok() {
|
||||||
|
println!("Built {built_name}");
|
||||||
|
return Ok(built_name.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// the naming might not always contain the build version
|
||||||
|
// eg. mesa-git --> match pkgname and extension if multiple return latest
|
||||||
|
if let Ok(paths) = fs::read_dir(folder_path) {
|
||||||
|
let mut candidate_filename: Option<String> = None;
|
||||||
|
let mut candidate_timestamp = SystemTime::UNIX_EPOCH;
|
||||||
|
|
||||||
|
for path in paths {
|
||||||
|
if let Ok(path) = path {
|
||||||
|
let path = path.path();
|
||||||
|
if let Some(file_name) = path.file_name() {
|
||||||
|
let file_name = file_name.to_str().unwrap();
|
||||||
|
|
||||||
|
if file_name.ends_with("-x86_64.pkg.tar.zst") && file_name.starts_with(pkg_name)
|
||||||
|
{
|
||||||
|
if let Ok(metadata) = path.metadata() {
|
||||||
|
if let Ok(modified_time) = metadata.modified() {
|
||||||
|
// Update the candidate filename and timestamp if the current file is newer
|
||||||
|
if modified_time > candidate_timestamp {
|
||||||
|
candidate_filename = Some(file_name.to_string());
|
||||||
|
candidate_timestamp = modified_time;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if candidate_filename.is_some() {
|
||||||
|
println!("Built {}", candidate_filename.clone().unwrap());
|
||||||
|
return Ok(candidate_filename.unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(anyhow!("No package built"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build_repo_packagename(pkg_name: String, pkg_vers: String) -> String {
|
||||||
|
format!("{pkg_name}-{pkg_vers}-x86_64.pkg.tar.zst")
|
||||||
|
}
|
1
src/pkgbuild/mod.rs
Normal file
1
src/pkgbuild/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
pub mod build;
|
1
src/repo/mod.rs
Normal file
1
src/repo/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
pub mod repo;
|
76
src/repo/repo.rs
Normal file
76
src/repo/repo.rs
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
use crate::aur::aur::download_pkgbuild;
|
||||||
|
use crate::pkgbuild::build::{build_pkgbuild, build_repo_packagename};
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use std::fs;
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
static REPO_NAME: &str = "repo";
|
||||||
|
static BASEURL: &str = "https://aur.archlinux.org";
|
||||||
|
|
||||||
|
pub async fn add_pkg(url: String, version: String, name: String) -> anyhow::Result<()> {
|
||||||
|
let fname = download_pkgbuild(format!("{}{}", BASEURL, url).as_str(), "./builds").await?;
|
||||||
|
let pkg_file_name =
|
||||||
|
build_pkgbuild(format!("./builds/{fname}"), version.as_str(), name.as_str())?;
|
||||||
|
|
||||||
|
// todo force overwrite if file already exists
|
||||||
|
fs::copy(
|
||||||
|
format!("./builds/{fname}/{pkg_file_name}"),
|
||||||
|
format!("./repo/{pkg_file_name}"),
|
||||||
|
)?;
|
||||||
|
fs::remove_file(format!("./builds/{fname}/{pkg_file_name}"))?;
|
||||||
|
|
||||||
|
repo_add(pkg_file_name)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn repo_add(pkg_file_name: String) -> anyhow::Result<()> {
|
||||||
|
let db_file = format!("{REPO_NAME}.db.tar.gz");
|
||||||
|
|
||||||
|
let output = Command::new("repo-add")
|
||||||
|
.args(&[db_file.clone(), pkg_file_name])
|
||||||
|
.current_dir("./repo/")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Error exit code when repo-add: {}{}",
|
||||||
|
String::from_utf8_lossy(output.stdout.as_slice()),
|
||||||
|
String::from_utf8_lossy(output.stderr.as_slice())
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("{db_file} updated successfully");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn repo_remove(pkg_file_name: String) -> anyhow::Result<()> {
|
||||||
|
let db_file = format!("{REPO_NAME}.db.tar.gz");
|
||||||
|
|
||||||
|
let output = Command::new("repo-remove")
|
||||||
|
.args(&[db_file.clone(), pkg_file_name])
|
||||||
|
.current_dir("./repo/")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Error exit code when repo-remove: {}{}",
|
||||||
|
String::from_utf8_lossy(output.stdout.as_slice()),
|
||||||
|
String::from_utf8_lossy(output.stderr.as_slice())
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("{db_file} updated successfully");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn remove_pkg(pkg_name: String, pkg_version: String) -> anyhow::Result<()> {
|
||||||
|
fs::remove_dir_all(format!("./builds/{pkg_name}"))?;
|
||||||
|
|
||||||
|
let filename = build_repo_packagename(pkg_name.clone(), pkg_version);
|
||||||
|
fs::remove_file(format!("./repo/{filename}"))?;
|
||||||
|
|
||||||
|
repo_remove(pkg_name)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user