add queue mechanism for builds and only allow one package built at a time for now.
This commit is contained in:
parent
59d2094225
commit
63eea3c822
@ -41,7 +41,7 @@ pub async fn package_add(
|
||||
None => {
|
||||
let new_package = packages::ActiveModel {
|
||||
name: Set(input.name.clone()),
|
||||
status: Set(0),
|
||||
status: Set(3),
|
||||
latest_aur_version: Set(pkg.version.clone()),
|
||||
..Default::default()
|
||||
};
|
||||
@ -77,7 +77,7 @@ pub async fn package_add(
|
||||
}
|
||||
};
|
||||
|
||||
pkg_model.status = Set(0);
|
||||
pkg_model.status = Set(3);
|
||||
pkg_model.latest_version_id = Set(Some(version_model.id.clone().unwrap()));
|
||||
pkg_model.save(db).await.expect("todo error message");
|
||||
|
||||
|
@ -84,14 +84,16 @@ pub async fn get_package(
|
||||
let db = db as &DatabaseConnection;
|
||||
|
||||
let all: ListPackageModel = Packages::find()
|
||||
.join_rev(JoinType::InnerJoin, versions::Relation::Packages.def())
|
||||
.join_rev(JoinType::LeftJoin, versions::Relation::LatestPackage.def())
|
||||
.filter(packages::Column::Id.eq(id))
|
||||
.select_only()
|
||||
.column_as(versions::Column::Id.count(), "count")
|
||||
.column(packages::Column::Name)
|
||||
.column(packages::Column::Id)
|
||||
.column(packages::Column::Status)
|
||||
.group_by(packages::Column::Name)
|
||||
.column_as(packages::Column::OutOfDate, "outofdate")
|
||||
.column_as(packages::Column::LatestAurVersion, "latest_aur_version")
|
||||
.column_as(versions::Column::Version, "latest_version")
|
||||
.column_as(packages::Column::LatestVersionId, "latest_version_id")
|
||||
.into_model::<ListPackageModel>()
|
||||
.one(db)
|
||||
.await
|
||||
|
@ -1,27 +1,54 @@
|
||||
use crate::builder::types::Action;
|
||||
use crate::db::builds::ActiveModel;
|
||||
use crate::db::prelude::{Builds, Packages};
|
||||
use crate::db::{builds, packages};
|
||||
use crate::db::{builds, packages, versions};
|
||||
use crate::repo::repo::add_pkg;
|
||||
use anyhow::anyhow;
|
||||
use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, Set};
|
||||
use std::ops::Add;
|
||||
use std::sync::Arc;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::sync::broadcast::error::RecvError;
|
||||
use tokio::sync::broadcast::Sender;
|
||||
use tokio::sync::broadcast::{Receiver, Sender};
|
||||
use tokio::sync::{broadcast, Semaphore};
|
||||
|
||||
pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
||||
let semaphore = Arc::new(Semaphore::new(1));
|
||||
|
||||
loop {
|
||||
if let Ok(_result) = tx.subscribe().recv().await {
|
||||
match _result {
|
||||
// add a package to parallel build
|
||||
Action::Build(name, version, url, mut version_model) => {
|
||||
let db = db.clone();
|
||||
let _ = queue_package(
|
||||
name,
|
||||
version,
|
||||
url,
|
||||
version_model,
|
||||
db.clone(),
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn queue_package(
|
||||
name: String,
|
||||
version: String,
|
||||
url: String,
|
||||
mut version_model: versions::ActiveModel,
|
||||
db: DatabaseConnection,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> anyhow::Result<()> {
|
||||
// set build status to pending
|
||||
let build = builds::ActiveModel {
|
||||
pkg_id: version_model.package_id.clone(),
|
||||
version_id: version_model.id.clone(),
|
||||
ouput: Set(None),
|
||||
status: Set(Some(0)),
|
||||
status: Set(Some(3)),
|
||||
start_time: Set(Some(
|
||||
SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
@ -32,47 +59,52 @@ pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
||||
};
|
||||
let mut new_build = build.save(&db).await.unwrap();
|
||||
|
||||
let permits = Arc::clone(&semaphore);
|
||||
|
||||
// spawn new thread for each pkg build
|
||||
// todo add queue and build two packages in parallel
|
||||
tokio::spawn(async move {
|
||||
let (tx, mut rx) = broadcast::channel::<String>(3);
|
||||
let _permit = permits.acquire().await.unwrap();
|
||||
|
||||
let db2 = db.clone();
|
||||
let new_build2 = new_build.clone();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
match rx.recv().await {
|
||||
Ok(output_line) => {
|
||||
println!("{output_line}");
|
||||
// set build status to building
|
||||
new_build.status = Set(Some(0));
|
||||
new_build = new_build.save(&db).await.unwrap();
|
||||
|
||||
let _ = append_db_log_output(
|
||||
&db2,
|
||||
output_line,
|
||||
new_build2.id.clone().unwrap(),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
Err(e) => match e {
|
||||
RecvError::Closed => {
|
||||
break;
|
||||
}
|
||||
RecvError::Lagged(_) => {}
|
||||
},
|
||||
}
|
||||
}
|
||||
build_package(new_build, db, version_model, version, name, url).await;
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_package(
|
||||
mut new_build: builds::ActiveModel,
|
||||
db: DatabaseConnection,
|
||||
mut version_model: versions::ActiveModel,
|
||||
version: String,
|
||||
name: String,
|
||||
url: String,
|
||||
) -> anyhow::Result<()> {
|
||||
let (tx, rx) = broadcast::channel::<String>(3);
|
||||
spawn_log_appender(db.clone(), new_build.clone(), rx);
|
||||
|
||||
let package_id = version_model.package_id.clone().unwrap();
|
||||
let mut pkg: packages::ActiveModel = Packages::find_by_id(package_id)
|
||||
.one(&db)
|
||||
.await?
|
||||
.ok_or(anyhow!("no package with id {package_id} found"))?
|
||||
.into();
|
||||
|
||||
// update status to building
|
||||
pkg.status = Set(0);
|
||||
pkg = pkg.update(&db).await?.into();
|
||||
|
||||
match add_pkg(url, version, name, tx).await {
|
||||
Ok(pkg_file_name) => {
|
||||
println!("successfully built package");
|
||||
let _ = set_pkg_status(
|
||||
&db,
|
||||
version_model.package_id.clone().unwrap(),
|
||||
version_model.id.clone().unwrap(),
|
||||
Some(false),
|
||||
1,
|
||||
)
|
||||
.await;
|
||||
// update package success status
|
||||
pkg.status = Set(1);
|
||||
pkg.latest_version_id = Set(Some(version_model.id.clone().unwrap()));
|
||||
pkg.out_of_date = Set(false as i32);
|
||||
pkg.update(&db).await?;
|
||||
|
||||
version_model.file_name = Set(Some(pkg_file_name));
|
||||
let _ = version_model.update(&db).await;
|
||||
@ -87,14 +119,10 @@ pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
||||
let _ = new_build.update(&db).await;
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = set_pkg_status(
|
||||
&db,
|
||||
version_model.package_id.clone().unwrap(),
|
||||
version_model.id.clone().unwrap(),
|
||||
None,
|
||||
2,
|
||||
)
|
||||
.await;
|
||||
pkg.status = Set(2);
|
||||
pkg.latest_version_id = Set(Some(version_model.id.clone().unwrap()));
|
||||
pkg.update(&db).await?;
|
||||
|
||||
let _ = version_model.update(&db).await;
|
||||
|
||||
new_build.status = Set(Some(2));
|
||||
@ -108,36 +136,30 @@ pub async fn init(db: DatabaseConnection, tx: Sender<Action>) {
|
||||
|
||||
println!("Error: {e}")
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn spawn_log_appender(db2: DatabaseConnection, new_build2: ActiveModel, mut rx: Receiver<String>) {
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
match rx.recv().await {
|
||||
Ok(output_line) => {
|
||||
println!("{output_line}");
|
||||
|
||||
let _ = append_db_log_output(&db2, output_line, new_build2.id.clone().unwrap())
|
||||
.await;
|
||||
}
|
||||
Err(e) => match e {
|
||||
RecvError::Closed => {
|
||||
break;
|
||||
}
|
||||
RecvError::Lagged(_) => {}
|
||||
},
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// todo maybe move to helper file
|
||||
async fn set_pkg_status(
|
||||
db: &DatabaseConnection,
|
||||
package_id: i32,
|
||||
version_id: i32,
|
||||
outofdate: Option<bool>,
|
||||
status: i32,
|
||||
) -> anyhow::Result<()> {
|
||||
let mut pkg: packages::ActiveModel = Packages::find_by_id(package_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(anyhow!("no package with id {package_id} found"))?
|
||||
.into();
|
||||
|
||||
pkg.status = Set(status);
|
||||
pkg.latest_version_id = Set(Some(version_id));
|
||||
if outofdate.is_some() {
|
||||
pkg.out_of_date = Set(outofdate.unwrap() as i32)
|
||||
}
|
||||
pkg.update(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn append_db_log_output(
|
||||
db: &DatabaseConnection,
|
||||
|
@ -156,6 +156,8 @@ IconData switchSuccessIcon(int status) {
|
||||
return Icons.check_circle_outline;
|
||||
case 2:
|
||||
return Icons.cancel_outlined;
|
||||
case 3:
|
||||
return Icons.pause_circle_outline;
|
||||
default:
|
||||
return Icons.question_mark_outlined;
|
||||
}
|
||||
@ -169,6 +171,8 @@ Color switchSuccessColor(int status) {
|
||||
return const Color(0xFF0A6900);
|
||||
case 2:
|
||||
return const Color(0xff760707);
|
||||
case 3:
|
||||
return const Color(0xFF0044AA);
|
||||
default:
|
||||
return const Color(0xFF9D8D00);
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ class _BuildScreenState extends State<BuildScreen> {
|
||||
body: APIBuilder<BuildProvider, Build, BuildDTO>(
|
||||
dto: BuildDTO(buildID: widget.buildID),
|
||||
interval: const Duration(seconds: 10),
|
||||
onLoad: () => const Text("no data"),
|
||||
onLoad: () => const Text("loading"),
|
||||
onData: (buildData) {
|
||||
final start_time = DateTime.fromMillisecondsSinceEpoch(
|
||||
(buildData.start_time ?? 0) * 1000);
|
||||
@ -64,11 +64,23 @@ class _BuildScreenState extends State<BuildScreen> {
|
||||
const SizedBox(
|
||||
height: 15,
|
||||
),
|
||||
BuildOutput(build: buildData)
|
||||
_buildPage(buildData)
|
||||
],
|
||||
);
|
||||
}),
|
||||
appBar: AppBar(),
|
||||
);
|
||||
}
|
||||
|
||||
Widget _buildPage(Build build) {
|
||||
switch (build.status) {
|
||||
case 3:
|
||||
return const Text("in Queue");
|
||||
case 0:
|
||||
case 1:
|
||||
case 2:
|
||||
default:
|
||||
return BuildOutput(build: build);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,3 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:aurcache/api/builds.dart';
|
||||
import 'package:aurcache/api/packages.dart';
|
||||
import 'package:aurcache/components/api/APIBuilder.dart';
|
||||
import 'package:aurcache/providers/builds_provider.dart';
|
||||
|
Loading…
Reference in New Issue
Block a user