Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: NixOS/ofborg
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: b0ad48860b3e
Choose a base ref
...
head repository: NixOS/ofborg
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: 1d906ff67c90
Choose a head ref

Commits on Jan 3, 2019

  1. Verified

    This commit was signed with the committer’s verified signature.
    Mic92 Jörg Thalheim
    Copy the full SHA
    1b5287d View commit details
  2. Copy the full SHA
    9be551f View commit details
  3. Copy the full SHA
    0263690 View commit details
  4. Copy the full SHA
    8fb7eab View commit details
  5. Copy the full SHA
    cb7012e View commit details
  6. Copy the full SHA
    1ceae29 View commit details
  7. Copy the full SHA
    0c0cf89 View commit details
  8. Copy the full SHA
    7eb3b81 View commit details
  9. clippy: &Vec<T> -> &[T]

    grahamc committed Jan 3, 2019
    Copy the full SHA
    848f1a0 View commit details
  10. Copy the full SHA
    99159dd View commit details
  11. clippy: useless use of vec[]

    grahamc committed Jan 3, 2019
    Copy the full SHA
    48d2815 View commit details
  12. Copy the full SHA
    a9e2c02 View commit details
  13. clippy: map .clone -> cloned

    grahamc committed Jan 3, 2019
    Copy the full SHA
    0ab2ce4 View commit details
  14. Copy the full SHA
    fc8494b View commit details
  15. Copy the full SHA
    2a90813 View commit details
  16. clippy: don't clone a copy

    grahamc committed Jan 3, 2019
    Copy the full SHA
    8fe6899 View commit details
  17. Copy the full SHA
    2b338c3 View commit details
  18. Copy the full SHA
    f2a0ca2 View commit details
  19. clippy: identical conversion

    grahamc committed Jan 3, 2019
    Copy the full SHA
    91e23d1 View commit details
  20. clippy: drop &ref

    grahamc committed Jan 3, 2019
    Copy the full SHA
    e9f437d View commit details
  21. clippy: Unneeded lifetime

    grahamc committed Jan 3, 2019
    Copy the full SHA
    c73cf2e View commit details
  22. clippy: write -> write_all

    grahamc committed Jan 3, 2019
    Copy the full SHA
    1d906ff View commit details
10 changes: 5 additions & 5 deletions ofborg/build.rs
Original file line number Diff line number Diff line change
@@ -429,7 +429,7 @@ pub enum Event {
f.write_all("\n}\n\n".as_bytes()).unwrap();

f.write_all(b"pub fn event_metric_name(event: &Event) -> String {
match event {
match *event {
").unwrap();

let variants: Vec<String> = events()
@@ -452,7 +452,7 @@ pub enum Event {
}


format!(" &Event::{} => String::from(\"{}\")",
format!(" Event::{} => String::from(\"{}\")",
&variant_match,
&mtype.metric_name(),
)
@@ -568,7 +568,7 @@ impl MetricCollector {
index_fields.push("instance".to_owned());
let ref_index_fields: Vec<String> = index_fields
.iter()
.map(|m| format!("ref {}", m))
.map(|m| format!("{}", m))
.collect();

let for_matcher: String;
@@ -591,7 +591,7 @@ impl MetricCollector {
.expect(\"Failed to unwrap metric mutex for {}\");
let values: Vec<String> = (*table)
.iter()
.map(|(&{}, value)| {{
.map(|({}, value)| {{
let kvs: Vec<String> = vec![
{}
];
@@ -615,7 +615,7 @@ impl MetricCollector {


f.write_all(variants.join("\n").as_bytes()).unwrap();
f.write_all("return output;\n }".as_bytes()).unwrap();
f.write_all("output\n }".as_bytes()).unwrap();
f.write_all("\n}".as_bytes()).unwrap();

}
13 changes: 6 additions & 7 deletions ofborg/src/acl.rs
Original file line number Diff line number Diff line change
@@ -13,11 +13,12 @@ impl ACL {
) -> ACL {
trusted_users.iter_mut().map(|x| *x = x.to_lowercase()).last();
known_users.iter_mut().map(|x| *x = x.to_lowercase()).last();
return ACL {

ACL {
trusted_users,
known_users,
repos,
};
}
}

pub fn is_repo_eligible(&self, name: &str) -> bool {
@@ -46,16 +47,14 @@ impl ACL {
return false;
}

return self.known_users.contains(&user.to_lowercase());
self.known_users.contains(&user.to_lowercase())
}

pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool {
if repo.to_lowercase() == "nixos/nixpkgs" {
return self.trusted_users.contains(&user.to_lowercase());
} else if user == "grahamc" {
return true;
self.trusted_users.contains(&user.to_lowercase())
} else {
return false;
user == "grahamc"
}
}
}
10 changes: 4 additions & 6 deletions ofborg/src/asynccmd.rs
Original file line number Diff line number Diff line change
@@ -166,7 +166,7 @@ impl AsyncCmd {
}
}

if waiters.len() == 0 {
if waiters.is_empty() {
debug!("Closing up the waiter receiver thread, no more waiters.");
break;
}
@@ -177,7 +177,7 @@ impl AsyncCmd {
waiters.len()
);

return return_status;
return_status
});

SpawnedAsyncCmd {
@@ -189,20 +189,18 @@ impl AsyncCmd {


impl SpawnedAsyncCmd {
pub fn lines<'a>(&'a mut self) -> mpsc::Iter<'a, String> {
pub fn lines(&mut self) -> mpsc::Iter<'_, String> {
self.rx.iter()
}

pub fn wait(self) -> Result<ExitStatus, io::Error> {
self.waiter.join()
.map_err(|_err| io::Error::new(io::ErrorKind::Other, "Couldn't join thread."))
.and_then(|opt| opt.ok_or(io::Error::new(io::ErrorKind::Other, "Thread didn't return an exit status.")))
.and_then(|opt| opt.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Thread didn't return an exit status.")))
.and_then(|res| res)
}
}



#[cfg(test)]
mod tests {
use super::AsyncCmd;
13 changes: 6 additions & 7 deletions ofborg/src/bin/builder.rs
Original file line number Diff line number Diff line change
@@ -51,9 +51,8 @@ fn main() {
})
.unwrap();

let queue_name: String;
if cfg.runner.build_all_jobs != Some(true) {
queue_name = channel
let queue_name: String = if cfg.runner.build_all_jobs != Some(true) {
channel
.declare_queue(easyamqp::QueueConfig {
queue: format!("build-inputs-{}", cfg.nix.system.clone()),
passive: false,
@@ -63,11 +62,11 @@ fn main() {
no_wait: false,
arguments: None,
})
.unwrap().queue;
.unwrap().queue
} else {
warn!("Building all jobs, please don't use this unless you're");
warn!("developing and have Graham's permission!");
queue_name = channel
channel
.declare_queue(easyamqp::QueueConfig {
queue: "".to_owned(),
passive: false,
@@ -77,8 +76,8 @@ fn main() {
no_wait: false,
arguments: None,
})
.unwrap().queue;
}
.unwrap().queue
};

channel
.bind_queue(easyamqp::BindQueueConfig {
2 changes: 1 addition & 1 deletion ofborg/src/bin/mass-rebuilder.rs
Original file line number Diff line number Diff line change
@@ -45,7 +45,7 @@ fn main() {

let mrw = tasks::massrebuilder::MassRebuildWorker::new(
cloner,
nix,
&nix,
cfg.github(),
cfg.acl(),
cfg.runner.identity.clone(),
2 changes: 1 addition & 1 deletion ofborg/src/bin/simple-build.rs
Original file line number Diff line number Diff line change
@@ -32,5 +32,5 @@ fn main() {
fn file_to_str(f: &mut File) -> String {
let mut buffer = Vec::new();
f.read_to_end(&mut buffer).expect("Reading eval output");
return String::from(String::from_utf8_lossy(&buffer));
String::from(String::from_utf8_lossy(&buffer))
}
64 changes: 32 additions & 32 deletions ofborg/src/checkout.rs
Original file line number Diff line number Diff line change
@@ -13,7 +13,7 @@ pub struct CachedCloner {
}

pub fn cached_cloner(path: &Path) -> CachedCloner {
return CachedCloner { root: path.to_path_buf() };
CachedCloner { root: path.to_path_buf() }
}

pub struct CachedProject {
@@ -29,7 +29,7 @@ pub struct CachedProjectCo {
}

impl CachedCloner {
pub fn project(&self, name: String, clone_url: String) -> CachedProject {
pub fn project(&self, name: &str, clone_url: String) -> CachedProject {
// <root>/repo/<hash>/clone
// <root>/repo/<hash>/clone.lock
// <root>/repo/<hash>/<type>/<id>
@@ -39,10 +39,10 @@ impl CachedCloner {
new_root.push("repo");
new_root.push(format!("{:x}", md5::compute(&name)));

return CachedProject {
CachedProject {
root: new_root,
clone_url,
};
}
}
}

@@ -53,12 +53,12 @@ impl CachedProject {
let mut new_root = self.root.clone();
new_root.push(use_category);

return Ok(CachedProjectCo {
Ok(CachedProjectCo {
root: new_root,
id,
clone_url: self.clone_from().clone(),
local_reference: self.clone_to().clone(),
});
})
}

fn prefetch_cache(&self) -> Result<PathBuf, Error> {
@@ -67,7 +67,7 @@ impl CachedProject {
self.clone_repo()?;
self.fetch_repo()?;

return Ok(self.clone_to());
Ok(self.clone_to())
}
}

@@ -89,7 +89,7 @@ impl CachedProjectCo {

// let build_dir = self.build_dir();

return Ok(self.clone_to().to_str().unwrap().to_string());
Ok(self.clone_to().to_str().unwrap().to_string())
}

pub fn fetch_pr(&self, pr_id: u64) -> Result<(), Error> {
@@ -105,9 +105,9 @@ impl CachedProjectCo {
lock.unlock();

if result.success() {
return Ok(());
Ok(())
} else {
return Err(Error::new(ErrorKind::Other, "Failed to fetch PR"));
Err(Error::new(ErrorKind::Other, "Failed to fetch PR"))
}
}

@@ -124,7 +124,7 @@ impl CachedProjectCo {

lock.unlock();

return result.success();
result.success()
}

pub fn merge_commit(&self, commit: &OsStr) -> Result<(), Error> {
@@ -142,9 +142,9 @@ impl CachedProjectCo {
lock.unlock();

if result.success() {
return Ok(());
Ok(())
} else {
return Err(Error::new(ErrorKind::Other, "Failed to merge"));
Err(Error::new(ErrorKind::Other, "Failed to merge"))
}
}

@@ -161,17 +161,17 @@ impl CachedProjectCo {
lock.unlock();

if result.status.success() {
return Ok(
Ok(
String::from_utf8_lossy(&result.stdout)
.lines()
.map(|l| l.to_owned())
.collect(),
);
)
} else {
return Err(Error::new(
Err(Error::new(
ErrorKind::Other,
String::from_utf8_lossy(&result.stderr).to_lowercase(),
));
))
}
}

@@ -188,67 +188,67 @@ impl CachedProjectCo {
lock.unlock();

if result.status.success() {
return Ok(
Ok(
String::from_utf8_lossy(&result.stdout)
.lines()
.map(|l| l.to_owned())
.collect(),
);
)
} else {
return Err(Error::new(
Err(Error::new(
ErrorKind::Other,
String::from_utf8_lossy(&result.stderr).to_lowercase(),
));
))
}
}
}

impl clone::GitClonable for CachedProjectCo {
fn clone_from(&self) -> String {
return self.clone_url.clone();
self.clone_url.clone()
}

fn clone_to(&self) -> PathBuf {
let mut clone_path = self.root.clone();
clone_path.push(&self.id);
return clone_path;
clone_path
}

fn lock_path(&self) -> PathBuf {
let mut lock_path = self.root.clone();
lock_path.push(format!("{}.lock", self.id));
return lock_path;
lock_path
}

fn extra_clone_args(&self) -> Vec<&OsStr> {
let local_ref = self.local_reference.as_ref();
return vec![
vec![
OsStr::new("--shared"),
OsStr::new("--reference-if-able"),
local_ref,
];
]
}
}

impl clone::GitClonable for CachedProject {
fn clone_from(&self) -> String {
return self.clone_url.clone();
self.clone_url.clone()
}

fn clone_to(&self) -> PathBuf {
let mut clone_path = self.root.clone();
clone_path.push("clone");
return clone_path;
clone_path
}

fn lock_path(&self) -> PathBuf {
let mut clone_path = self.root.clone();
clone_path.push("clone.lock");
return clone_path;
clone_path
}

fn extra_clone_args(&self) -> Vec<&OsStr> {
return vec![OsStr::new("--bare")];
vec![OsStr::new("--bare")]
}
}

@@ -290,7 +290,7 @@ mod tests {
let hash = make_pr_repo(&bare.path(), &mk_co.path());

let cloner = cached_cloner(&workingdir.path());
let project = cloner.project("commit-msg-list".to_owned(), bare.string());
let project = cloner.project("commit-msg-list", bare.string());
let working_co = project
.clone_for("testing-commit-msgs".to_owned(), "123".to_owned())
.expect("clone should work");
@@ -317,7 +317,7 @@ mod tests {
let hash = make_pr_repo(&bare.path(), &mk_co.path());

let cloner = cached_cloner(&workingdir.path());
let project = cloner.project("commit-files-changed-list".to_owned(), bare.string());
let project = cloner.project("commit-files-changed-list", bare.string());
let working_co = project
.clone_for("testing-files-changed".to_owned(), "123".to_owned())
.expect("clone should work");
Loading