From 2447399b46525f05ba017866254406849b1c4625 Mon Sep 17 00:00:00 2001 From: lukas-heiligenbrunner Date: Sat, 23 Dec 2023 19:27:36 +0100 Subject: [PATCH] init --- Dockerfile | 33 +++++ src/api/backend.rs | 124 ++++++++++++++++++ src/api/mod.rs | 2 + src/api/repository.rs | 5 + src/aur/aur.rs | 99 ++++++++++++++ src/aur/mod.rs | 1 + src/builder/builder.rs | 39 ++++++ src/builder/mod.rs | 2 + src/builder/types.rs | 4 + .../m20220101_000001_create_table.rs | 49 +++++++ src/db/migration/mod.rs | 12 ++ src/db/mod.rs | 6 + src/db/packages.rs | 21 +++ src/db/prelude.rs | 3 + src/main.rs | 89 +++++++++++++ src/pkgbuild/build.rs | 95 ++++++++++++++ src/pkgbuild/mod.rs | 1 + src/repo/mod.rs | 1 + src/repo/repo.rs | 76 +++++++++++ 19 files changed, 662 insertions(+) create mode 100644 Dockerfile create mode 100644 src/api/backend.rs create mode 100644 src/api/mod.rs create mode 100644 src/api/repository.rs create mode 100644 src/aur/aur.rs create mode 100644 src/aur/mod.rs create mode 100644 src/builder/builder.rs create mode 100644 src/builder/mod.rs create mode 100644 src/builder/types.rs create mode 100644 src/db/migration/m20220101_000001_create_table.rs create mode 100644 src/db/migration/mod.rs create mode 100644 src/db/mod.rs create mode 100644 src/db/packages.rs create mode 100644 src/db/prelude.rs create mode 100644 src/main.rs create mode 100644 src/pkgbuild/build.rs create mode 100644 src/pkgbuild/mod.rs create mode 100644 src/repo/mod.rs create mode 100644 src/repo/repo.rs diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..735da29 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,33 @@ +# Stage 1: Build the Rust binary +FROM rust AS builder + +# Install necessary tools and dependencies + +WORKDIR /app + +# Copy the Rust project files +COPY ./src /app/src +COPY ./scripts /app/scripts +COPY Cargo.lock /app +COPY Cargo.toml /app + +# Build the Rust binary +RUN cargo build --release + +# Stage 2: Create the final image +FROM archlinux + +# Copy the built binary from the previous stage +COPY --from=builder /app/target/release/untitled /usr/local/bin/untitled + +RUN pacman -Syyu --noconfirm +RUN pacman -S --noconfirm base-devel git +RUN pacman -Sc + +# Set any additional configurations or dependencies if required + +# Example: Expose a port if your application listens on a specific port +# EXPOSE 8080 + +# Set the entry point or default command to run your application +CMD ["untitled"] diff --git a/src/api/backend.rs b/src/api/backend.rs new file mode 100644 index 0000000..bed600c --- /dev/null +++ b/src/api/backend.rs @@ -0,0 +1,124 @@ +use crate::aur::aur::get_info_by_name; +use crate::builder::types::Action; +use crate::db::packages; +use crate::query_aur; +use rocket::serde::json::Json; +use rocket::serde::{Deserialize, Serialize}; +use rocket::State; +use rocket::{get, post, Route}; +use rocket_okapi::okapi::schemars; +use rocket_okapi::{openapi, openapi_get_routes, JsonSchema}; +use sea_orm::{ActiveModelTrait, DatabaseConnection, Set}; +use sea_orm::{DeleteResult, EntityTrait, ModelTrait}; +use tokio::sync::broadcast::Sender; + +use crate::db::prelude::Packages; +use crate::repo::repo::remove_pkg; + +#[derive(Serialize, JsonSchema)] +#[serde(crate = "rocket::serde")] +struct ApiPackage { + name: String, + version: String, +} + +#[openapi(tag = "test")] +#[get("/search?")] +async fn search(query: &str) -> Result>, String> { + match query_aur(query).await { + Ok(v) => { + let mapped = v + .iter() + .map(|x| ApiPackage { + name: x.name.clone(), + version: x.version.clone(), + }) + .collect(); + return Ok(Json(mapped)); + } + Err(e) => { + return Err(format!("{}", e)); + } + } +} + +#[openapi(tag = "test")] +#[get("/packages/list")] +async fn package_list( + db: &State, +) -> Result>, String> { + let db = db as &DatabaseConnection; + + let all: Vec = Packages::find().all(db).await.unwrap(); + + Ok(Json(all)) +} + +#[derive(Deserialize, JsonSchema)] +#[serde(crate = "rocket::serde")] +struct AddBody { + name: String, +} + +#[openapi(tag = "test")] +#[post("/packages/add", data = "")] +async fn package_add( + db: &State, + input: Json, + tx: &State>, +) -> Result<(), String> { + let db = db as &DatabaseConnection; + + let pkg_name = &input.name; + + let pkg = get_info_by_name(pkg_name) + .await + .map_err(|_| "couldn't download package metadata".to_string())?; + + let new_package = packages::ActiveModel { + name: Set(pkg_name.clone()), + version: Set(pkg.version.clone()), + ..Default::default() + }; + + let t = new_package.save(db).await.expect("TODO: panic message"); + + let _ = tx.send(Action::Build( + pkg.name, + pkg.version, + pkg.url_path.unwrap(), + t.id.unwrap(), + )); + + Ok(()) +} + +#[derive(Deserialize, JsonSchema)] +#[serde(crate = "rocket::serde")] +struct DelBody { + id: i32, +} + +#[openapi(tag = "test")] +#[post("/packages/delete", data = "")] +async fn package_del(db: &State, input: Json) -> Result<(), String> { + let db = db as &DatabaseConnection; + let pkg_id = &input.id; + + let pkg = Packages::find_by_id(*pkg_id) + .one(db) + .await + .unwrap() + .unwrap(); + + // remove folders + remove_pkg(pkg.name.to_string(), pkg.version.to_string()).await; + + // remove package db entry + let res: DeleteResult = pkg.delete(db).await.unwrap(); + Ok(()) +} + +pub fn build_api() -> Vec { + openapi_get_routes![search, package_list, package_add, package_del] +} diff --git a/src/api/mod.rs b/src/api/mod.rs new file mode 100644 index 0000000..f771c2f --- /dev/null +++ b/src/api/mod.rs @@ -0,0 +1,2 @@ +pub mod backend; +pub mod repository; diff --git a/src/api/repository.rs b/src/api/repository.rs new file mode 100644 index 0000000..2e6a364 --- /dev/null +++ b/src/api/repository.rs @@ -0,0 +1,5 @@ +use rocket::fs::FileServer; + +pub fn build_api() -> FileServer { + FileServer::from("./repo") +} diff --git a/src/aur/aur.rs b/src/aur/aur.rs new file mode 100644 index 0000000..fd063f1 --- /dev/null +++ b/src/aur/aur.rs @@ -0,0 +1,99 @@ +use anyhow::anyhow; +use aur_rs::{Package, Request}; +use flate2::bufread::GzDecoder; +use std::fs; +use std::path::Path; +use tar::Archive; + +pub async fn query_aur(query: &str) -> anyhow::Result> { + let request = Request::default(); + let response = request.search_package_by_name(query).await; + + let response = match response { + Ok(v) => v, + Err(_) => { + return Err(anyhow!("failed to search")); + } + }; + + let mut response = response.results; + response.sort_by(|x, x1| x.popularity.partial_cmp(&x1.popularity).unwrap().reverse()); + + Ok(response) +} + +pub async fn get_info_by_name(pkg_name: &str) -> anyhow::Result { + let request = Request::default(); + let response = request.search_info_by_name(pkg_name).await; + + let mut response = match response { + Ok(v) => v, + Err(_) => { + return Err(anyhow!("failed to get package")); + } + }; + + let response = match response.results.pop() { + None => { + return Err(anyhow!("no package found")); + } + Some(v) => v, + }; + + Ok(response) +} + +pub async fn download_pkgbuild(url: &str, dest_dir: &str) -> anyhow::Result { + let (file_data, file_name) = match download_file(url).await { + Ok(data) => data, + Err(e) => { + return Err(anyhow!("Error downloading file: {}", e)); + } + }; + + // Check if the directory exists + if !fs::metadata(dest_dir).is_ok() { + // Create the directory if it does not exist + fs::create_dir(dest_dir)?; + } + + unpack_tar_gz(&file_data, dest_dir)?; + Ok(file_name) +} + +async fn download_file(url: &str) -> anyhow::Result<(Vec, String)> { + let response = reqwest::get(url).await?; + + // extract name of file without extension + // todo might be also possible here to use package name + let t = response + .url() + .path_segments() + .and_then(|segments| segments.last()) + .ok_or(anyhow!("no segments"))? + .split(".") + .collect::>() + .first() + .ok_or(anyhow!(""))? + .to_string(); + + println!("{}", t); + + let r = response.bytes().await?; + Ok((r.to_vec(), t)) +} + +fn unpack_tar_gz(data: &[u8], target_dir: &str) -> anyhow::Result<()> { + let tar = GzDecoder::new(data); + let mut archive = Archive::new(tar); + + for entry in archive.entries()? { + let mut entry = entry?; + let path = entry.path()?; + let entry_path = Path::new(target_dir).join(path); + + entry.unpack(entry_path)?; + } + + Ok(()) +} diff --git a/src/aur/mod.rs b/src/aur/mod.rs new file mode 100644 index 0000000..5f6c405 --- /dev/null +++ b/src/aur/mod.rs @@ -0,0 +1 @@ +pub mod aur; diff --git a/src/builder/builder.rs b/src/builder/builder.rs new file mode 100644 index 0000000..c9e70e2 --- /dev/null +++ b/src/builder/builder.rs @@ -0,0 +1,39 @@ +use crate::builder::types::Action; +use crate::db::packages; +use crate::db::prelude::Packages; +use crate::repo::repo::add_pkg; +use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, Set}; +use tokio::sync::broadcast::Sender; + +pub async fn init(db: DatabaseConnection, tx: Sender) { + loop { + if let Ok(_result) = tx.subscribe().recv().await { + match _result { + // add a package to parallel build + Action::Build(name, version, url, id) => { + let db = db.clone(); + tokio::spawn(async move { + match add_pkg(url, version, name).await { + Ok(_) => { + println!("successfully built package"); + + let mut pkg: packages::ActiveModel = Packages::find_by_id(id) + .one(&db) + .await + .unwrap() + .unwrap() + .into(); + + pkg.status = Set(2); + let pkg: packages::Model = pkg.update(&db).await.unwrap(); + } + Err(e) => { + println!("Error: {e}") + } + } + }); + } + } + } + } +} diff --git a/src/builder/mod.rs b/src/builder/mod.rs new file mode 100644 index 0000000..ab228e5 --- /dev/null +++ b/src/builder/mod.rs @@ -0,0 +1,2 @@ +pub mod builder; +pub mod types; diff --git a/src/builder/types.rs b/src/builder/types.rs new file mode 100644 index 0000000..fd2a5c4 --- /dev/null +++ b/src/builder/types.rs @@ -0,0 +1,4 @@ +#[derive(Clone)] +pub enum Action { + Build(String, String, String, i32), +} diff --git a/src/db/migration/m20220101_000001_create_table.rs b/src/db/migration/m20220101_000001_create_table.rs new file mode 100644 index 0000000..4d1b5b3 --- /dev/null +++ b/src/db/migration/m20220101_000001_create_table.rs @@ -0,0 +1,49 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(Packages::Table) + .if_not_exists() + .col( + ColumnDef::new(Packages::Id) + .integer() + .not_null() + .auto_increment() + .primary_key(), + ) + .col(ColumnDef::new(Packages::Version).string().not_null()) + .col(ColumnDef::new(Packages::name).string().not_null()) + .col( + ColumnDef::new(Packages::Status) + .integer() + .not_null() + .default(0), + ) + .to_owned(), + ) + .await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(Packages::Table).to_owned()) + .await + } +} + +/// Learn more at https://docs.rs/sea-query#iden +#[derive(Iden)] +enum Packages { + Table, + name, + Version, + Id, + Status, +} diff --git a/src/db/migration/mod.rs b/src/db/migration/mod.rs new file mode 100644 index 0000000..2c605af --- /dev/null +++ b/src/db/migration/mod.rs @@ -0,0 +1,12 @@ +pub use sea_orm_migration::prelude::*; + +mod m20220101_000001_create_table; + +pub struct Migrator; + +#[async_trait::async_trait] +impl MigratorTrait for Migrator { + fn migrations() -> Vec> { + vec![Box::new(m20220101_000001_create_table::Migration)] + } +} diff --git a/src/db/mod.rs b/src/db/mod.rs new file mode 100644 index 0000000..45f86e6 --- /dev/null +++ b/src/db/mod.rs @@ -0,0 +1,6 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2 + +pub mod prelude; + +pub mod migration; +pub mod packages; diff --git a/src/db/packages.rs b/src/db/packages.rs new file mode 100644 index 0000000..cea5136 --- /dev/null +++ b/src/db/packages.rs @@ -0,0 +1,21 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2 + +use rocket::serde::Serialize; +use rocket_okapi::okapi::schemars; +use rocket_okapi::JsonSchema; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, JsonSchema)] +#[sea_orm(table_name = "packages")] +pub struct Model { + pub name: String, + pub version: String, + #[sea_orm(primary_key, auto_increment = false)] + pub id: i32, + pub status: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/src/db/prelude.rs b/src/db/prelude.rs new file mode 100644 index 0000000..078bce5 --- /dev/null +++ b/src/db/prelude.rs @@ -0,0 +1,3 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.2 + +pub use super::packages::Entity as Packages; diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..dda3237 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,89 @@ +mod api; +mod aur; +mod builder; +mod db; +mod pkgbuild; +mod repo; + +use crate::api::{backend, repository}; +use crate::aur::aur::query_aur; +use crate::builder::types::Action; +use crate::db::migration::Migrator; +use rocket::config::Config; +use rocket::futures::future::join_all; +use rocket_okapi::swagger_ui::{make_swagger_ui, SwaggerUIConfig}; +use sea_orm::{Database, DatabaseConnection}; +use sea_orm_migration::MigratorTrait; +use std::fs; +use tokio::sync::broadcast; + +fn main() { + let t = tokio::runtime::Runtime::new().unwrap(); + + let (tx, _) = broadcast::channel::(32); + + t.block_on(async move { + //build_package("sea-orm-cli").await; + + let db: DatabaseConnection = Database::connect("sqlite://db.sqlite?mode=rwc") + .await + .unwrap(); + + Migrator::up(&db, None).await.unwrap(); + + // Check if the directory exists + if !fs::metadata("./repo").is_ok() { + // Create the directory if it does not exist + fs::create_dir("./repo").unwrap(); + } + + let db2 = db.clone(); + let tx2 = tx.clone(); + tokio::spawn(async move { + builder::builder::init(db2, tx2).await; + }); + + let backend_handle = tokio::spawn(async { + let mut config = Config::default(); + config.address = "0.0.0.0".parse().unwrap(); + config.port = 8081; + + let launch_result = rocket::custom(config) + .manage(db) + .manage(tx) + .mount("/", backend::build_api()) + .mount( + "/docs/", + make_swagger_ui(&SwaggerUIConfig { + url: "../openapi.json".to_owned(), + ..Default::default() + }), + ) + .launch() + .await; + match launch_result { + Ok(_) => println!("Rocket shut down gracefully."), + Err(err) => println!("Rocket had an error: {}", err), + }; + }); + + let repo_handle = tokio::spawn(async { + let mut config = Config::default(); + config.address = "0.0.0.0".parse().unwrap(); + config.port = 8080; + + let launch_result = rocket::custom(config) + .mount("/", repository::build_api()) + .launch() + .await; + match launch_result { + Ok(_) => println!("Rocket shut down gracefully."), + Err(err) => println!("Rocket had an error: {}", err), + }; + }); + + join_all([repo_handle, backend_handle]).await; + }); + + return; +} diff --git a/src/pkgbuild/build.rs b/src/pkgbuild/build.rs new file mode 100644 index 0000000..5598c2a --- /dev/null +++ b/src/pkgbuild/build.rs @@ -0,0 +1,95 @@ +use anyhow::anyhow; +use std::fs; +use std::process::{Command, Stdio}; +use std::time::SystemTime; + +pub fn build_pkgbuild( + folder_path: String, + pkg_vers: &str, + pkg_name: &str, +) -> anyhow::Result { + let makepkg = include_str!("../../scripts/makepkg"); + + // Create a temporary file to store the bash script content + let script_file = std::env::temp_dir().join("makepkg_custom.sh"); + fs::write(&script_file, makepkg).expect("Unable to write script to file"); + + let output = Command::new("bash") + .args(&[ + script_file.as_os_str().to_str().unwrap(), + "-f", + "--noconfirm", + "-s", + "-c", + ]) + .current_dir(folder_path.clone()) + .stdout(Stdio::inherit()) + .spawn() + .unwrap(); + let output = output.wait_with_output(); + + match output { + Ok(output) => { + if output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + println!("makepkg output: {}", stdout); + } else { + let stderr = String::from_utf8_lossy(&output.stderr); + eprintln!("makepkg error: {}", stderr); + + return Err(anyhow!("failed to build package")); + } + } + Err(err) => { + eprintln!("Failed to execute makepkg: {}", err); + return Err(anyhow!("failed to build package")); + } + } + + // check if expected built dir exists + let built_name = build_repo_packagename(pkg_name.to_string(), pkg_vers.to_string()); + if fs::metadata(format!("{folder_path}/{built_name}")).is_ok() { + println!("Built {built_name}"); + return Ok(built_name.to_string()); + } + + // the naming might not always contain the build version + // eg. mesa-git --> match pkgname and extension if multiple return latest + if let Ok(paths) = fs::read_dir(folder_path) { + let mut candidate_filename: Option = None; + let mut candidate_timestamp = SystemTime::UNIX_EPOCH; + + for path in paths { + if let Ok(path) = path { + let path = path.path(); + if let Some(file_name) = path.file_name() { + let file_name = file_name.to_str().unwrap(); + + if file_name.ends_with("-x86_64.pkg.tar.zst") && file_name.starts_with(pkg_name) + { + if let Ok(metadata) = path.metadata() { + if let Ok(modified_time) = metadata.modified() { + // Update the candidate filename and timestamp if the current file is newer + if modified_time > candidate_timestamp { + candidate_filename = Some(file_name.to_string()); + candidate_timestamp = modified_time; + } + } + } + } + } + } + } + + if candidate_filename.is_some() { + println!("Built {}", candidate_filename.clone().unwrap()); + return Ok(candidate_filename.unwrap()); + } + } + + Err(anyhow!("No package built")) +} + +pub fn build_repo_packagename(pkg_name: String, pkg_vers: String) -> String { + format!("{pkg_name}-{pkg_vers}-x86_64.pkg.tar.zst") +} diff --git a/src/pkgbuild/mod.rs b/src/pkgbuild/mod.rs new file mode 100644 index 0000000..76f610c --- /dev/null +++ b/src/pkgbuild/mod.rs @@ -0,0 +1 @@ +pub mod build; diff --git a/src/repo/mod.rs b/src/repo/mod.rs new file mode 100644 index 0000000..c426b23 --- /dev/null +++ b/src/repo/mod.rs @@ -0,0 +1 @@ +pub mod repo; diff --git a/src/repo/repo.rs b/src/repo/repo.rs new file mode 100644 index 0000000..c416499 --- /dev/null +++ b/src/repo/repo.rs @@ -0,0 +1,76 @@ +use crate::aur::aur::download_pkgbuild; +use crate::pkgbuild::build::{build_pkgbuild, build_repo_packagename}; +use anyhow::anyhow; +use std::fs; +use std::process::Command; + +static REPO_NAME: &str = "repo"; +static BASEURL: &str = "https://aur.archlinux.org"; + +pub async fn add_pkg(url: String, version: String, name: String) -> anyhow::Result<()> { + let fname = download_pkgbuild(format!("{}{}", BASEURL, url).as_str(), "./builds").await?; + let pkg_file_name = + build_pkgbuild(format!("./builds/{fname}"), version.as_str(), name.as_str())?; + + // todo force overwrite if file already exists + fs::copy( + format!("./builds/{fname}/{pkg_file_name}"), + format!("./repo/{pkg_file_name}"), + )?; + fs::remove_file(format!("./builds/{fname}/{pkg_file_name}"))?; + + repo_add(pkg_file_name)?; + + Ok(()) +} + +fn repo_add(pkg_file_name: String) -> anyhow::Result<()> { + let db_file = format!("{REPO_NAME}.db.tar.gz"); + + let output = Command::new("repo-add") + .args(&[db_file.clone(), pkg_file_name]) + .current_dir("./repo/") + .output()?; + + if !output.status.success() { + return Err(anyhow!( + "Error exit code when repo-add: {}{}", + String::from_utf8_lossy(output.stdout.as_slice()), + String::from_utf8_lossy(output.stderr.as_slice()) + )); + } + + println!("{db_file} updated successfully"); + Ok(()) +} + +fn repo_remove(pkg_file_name: String) -> anyhow::Result<()> { + let db_file = format!("{REPO_NAME}.db.tar.gz"); + + let output = Command::new("repo-remove") + .args(&[db_file.clone(), pkg_file_name]) + .current_dir("./repo/") + .output()?; + + if !output.status.success() { + return Err(anyhow!( + "Error exit code when repo-remove: {}{}", + String::from_utf8_lossy(output.stdout.as_slice()), + String::from_utf8_lossy(output.stderr.as_slice()) + )); + } + + println!("{db_file} updated successfully"); + Ok(()) +} + +pub async fn remove_pkg(pkg_name: String, pkg_version: String) -> anyhow::Result<()> { + fs::remove_dir_all(format!("./builds/{pkg_name}"))?; + + let filename = build_repo_packagename(pkg_name.clone(), pkg_version); + fs::remove_file(format!("./repo/{filename}"))?; + + repo_remove(pkg_name)?; + + Ok(()) +}