refactor engine in preparation for multi-crate repo support

This commit is contained in:
Sam Rijs 2018-02-11 13:24:24 +11:00
parent da700e8095
commit 54e8dfa662
12 changed files with 231 additions and 79 deletions

View file

@ -0,0 +1,43 @@
use failure::Error;
use futures::{Future, Poll, Stream, stream};
use ::models::crates::{AnalyzedDependencies, CrateDeps};
use super::super::Engine;
use super::super::machines::analyzer::DependencyAnalyzer;
const FETCH_RELEASES_CONCURRENCY: usize = 10;
pub struct AnalyzeDependenciesFuture {
inner: Box<Future<Item=AnalyzedDependencies, Error=Error>>
}
impl AnalyzeDependenciesFuture {
pub fn new(engine: &Engine, deps: CrateDeps) -> Self {
let analyzer = DependencyAnalyzer::new(&deps);
let main_deps = deps.main.into_iter().map(|(name, _)| name);
let dev_deps = deps.dev.into_iter().map(|(name, _)| name);
let build_deps = deps.build.into_iter().map(|(name, _)| name);
let release_futures = engine.fetch_releases(main_deps.chain(dev_deps).chain(build_deps));
let analyzed_deps_future = stream::iter_ok::<_, Error>(release_futures)
.buffer_unordered(FETCH_RELEASES_CONCURRENCY)
.fold(analyzer, |mut analyzer, releases| { analyzer.process(releases); Ok(analyzer) as Result<_, Error> })
.map(|analyzer| analyzer.finalize());
AnalyzeDependenciesFuture {
inner: Box::new(analyzed_deps_future)
}
}
}
impl Future for AnalyzeDependenciesFuture {
type Item = AnalyzedDependencies;
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
self.inner.poll()
}
}

View file

@ -0,0 +1,56 @@
use std::mem;
use failure::Error;
use futures::{Async, Future, Poll, Stream};
use futures::stream::FuturesUnordered;
use ::models::repo::RepoPath;
use super::super::Engine;
use super::super::machines::crawler::ManifestCrawler;
pub use super::super::machines::crawler::ManifestCrawlerOutput;
pub struct CrawlManifestFuture {
repo_path: RepoPath,
engine: Engine,
crawler: ManifestCrawler,
unordered: FuturesUnordered<Box<Future<Item=(String, String), Error=Error>>>
}
impl CrawlManifestFuture {
pub fn new(engine: &Engine, repo_path: RepoPath, entry_point: String) -> Self {
let future: Box<Future<Item=_, Error=_>> = Box::new(engine.retrieve_file_at_path(&repo_path, &entry_point)
.map(move |contents| (entry_point, contents)));
let engine = engine.clone();
let crawler = ManifestCrawler::new();
let mut unordered = FuturesUnordered::new();
unordered.push(future);
CrawlManifestFuture {
repo_path, engine, crawler, unordered
}
}
}
impl Future for CrawlManifestFuture {
type Item = ManifestCrawlerOutput;
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match try_ready!(self.unordered.poll()) {
None => {
let crawler = mem::replace(&mut self.crawler, ManifestCrawler::new());
Ok(Async::Ready(crawler.finalize()))
},
Some((path, raw_manifest)) => {
let output = self.crawler.step(path, raw_manifest)?;
for path in output.paths_of_interest.into_iter() {
let future: Box<Future<Item=_, Error=_>> = Box::new(self.engine.retrieve_file_at_path(&self.repo_path, &path)
.map(move |contents| (path, contents)));
self.unordered.push(future);
}
self.poll()
}
}
}
}

View file

@ -0,0 +1,5 @@
mod crawl;
mod analyze;
pub use self::crawl::CrawlManifestFuture;
pub use self::analyze::AnalyzeDependenciesFuture;

View file

@ -0,0 +1,47 @@
use std::collections::HashMap;
use failure::Error;
use ::parsers::manifest::parse_manifest_toml;
use ::models::crates::{CrateDeps, CrateName, CrateManifest};
pub struct ManifestCrawlerOutput {
pub crates: Vec<(CrateName, CrateDeps)>
}
pub struct ManifestCrawlerStepOutput {
pub paths_of_interest: Vec<String>
}
pub struct ManifestCrawler {
manifests: HashMap<String, CrateManifest>,
leaf_crates: Vec<(CrateName, CrateDeps)>
}
impl ManifestCrawler {
pub fn new() -> ManifestCrawler {
ManifestCrawler {
manifests: HashMap::new(),
leaf_crates: vec![]
}
}
pub fn step(&mut self, path: String, raw_manifest: String) -> Result<ManifestCrawlerStepOutput, Error> {
let manifest = parse_manifest_toml(&raw_manifest)?;
self.manifests.insert(path, manifest.clone());
match manifest {
CrateManifest::Crate(name, deps) => {
self.leaf_crates.push((name, deps));
}
}
Ok(ManifestCrawlerStepOutput {
paths_of_interest: vec![]
})
}
pub fn finalize(self) -> ManifestCrawlerOutput {
ManifestCrawlerOutput {
crates: self.leaf_crates
}
}
}

View file

@ -0,0 +1,2 @@
pub mod crawler;
pub mod analyzer;

View file

@ -2,27 +2,28 @@ use std::sync::Arc;
use std::time::Duration;
use failure::Error;
use futures::{Future, Stream, stream};
use futures::Future;
use futures::future::join_all;
use hyper::Client;
use hyper::client::HttpConnector;
use hyper_tls::HttpsConnector;
use slog::Logger;
use tokio_service::Service;
mod analyzer;
mod machines;
mod futures;
use ::utils::throttle::Throttle;
use ::models::repo::{Repository, RepoPath};
use ::models::crates::{CrateName, CrateRelease, CrateManifest, AnalyzedDependencies};
use ::parsers::manifest::parse_manifest_toml;
use ::models::crates::{CrateName, CrateRelease, AnalyzedDependencies};
use ::interactors::crates::query_crate;
use ::interactors::github::retrieve_file_at_path;
use ::interactors::github::GetPopularRepos;
use self::analyzer::DependencyAnalyzer;
use self::futures::AnalyzeDependenciesFuture;
use self::futures::CrawlManifestFuture;
#[derive(Clone, Debug)]
pub struct Engine {
@ -42,11 +43,14 @@ impl Engine {
}
}
const FETCH_RELEASES_CONCURRENCY: usize = 10;
pub struct AnalyzeDependenciesOutcome {
pub name: CrateName,
pub deps: AnalyzedDependencies
pub crates: Vec<(CrateName, AnalyzedDependencies)>
}
impl AnalyzeDependenciesOutcome {
pub fn any_outdated(&self) -> bool {
self.crates.iter().any(|&(_, ref deps)| deps.any_outdated())
}
}
impl Engine {
@ -60,30 +64,17 @@ impl Engine {
pub fn analyze_dependencies(&self, repo_path: RepoPath) ->
impl Future<Item=AnalyzeDependenciesOutcome, Error=Error>
{
let manifest_future = self.retrieve_manifest(&repo_path);
let manifest_future = CrawlManifestFuture::new(self, repo_path, "Cargo.toml".to_string());
let engine = self.clone();
manifest_future.and_then(move |manifest| {
let CrateManifest::Crate(crate_name, deps) = manifest;
let analyzer = DependencyAnalyzer::new(&deps);
manifest_future.and_then(move |manifest_output| {
let futures = manifest_output.crates.into_iter().map(move |(crate_name, deps)| {
let analyzed_deps_future = AnalyzeDependenciesFuture::new(&engine, deps);
let main_deps = deps.main.into_iter().map(|(name, _)| name);
let dev_deps = deps.dev.into_iter().map(|(name, _)| name);
let build_deps = deps.build.into_iter().map(|(name, _)| name);
analyzed_deps_future.map(move |analyzed_deps| (crate_name, analyzed_deps))
});
let release_futures = engine.fetch_releases(main_deps.chain(dev_deps).chain(build_deps));
let analyzed_deps_future = stream::iter_ok::<_, Error>(release_futures)
.buffer_unordered(FETCH_RELEASES_CONCURRENCY)
.fold(analyzer, |mut analyzer, releases| { analyzer.process(releases); Ok(analyzer) as Result<_, Error> })
.map(|analyzer| analyzer.finalize());
analyzed_deps_future.map(move |analyzed_deps| {
AnalyzeDependenciesOutcome {
name: crate_name,
deps: analyzed_deps
}
})
join_all(futures).map(|crates| AnalyzeDependenciesOutcome { crates })
})
}
@ -97,12 +88,9 @@ impl Engine {
})
}
fn retrieve_manifest(&self, repo_path: &RepoPath) ->
impl Future<Item=CrateManifest, Error=Error>
fn retrieve_file_at_path(&self, repo_path: &RepoPath, path: &str) ->
impl Future<Item=String, Error=Error>
{
retrieve_file_at_path(self.client.clone(), &repo_path, "Cargo.toml").from_err()
.and_then(|manifest_source| {
parse_manifest_toml(&manifest_source).map_err(|err| err.into())
})
retrieve_file_at_path(self.client.clone(), &repo_path, path).from_err()
}
}

View file

@ -5,7 +5,7 @@
extern crate base64;
#[macro_use] extern crate failure;
extern crate futures;
#[macro_use] extern crate futures;
extern crate hyper;
extern crate hyper_tls;
#[macro_use] extern crate lazy_static;

View file

@ -49,7 +49,7 @@ pub struct CrateRelease {
pub yanked: bool
}
#[derive(Debug, Default)]
#[derive(Clone, Debug, Default)]
pub struct CrateDeps {
pub main: BTreeMap<CrateName, VersionReq>,
pub dev: BTreeMap<CrateName, VersionReq>,
@ -109,7 +109,7 @@ impl AnalyzedDependencies {
}
}
#[derive(Debug)]
#[derive(Clone, Debug)]
pub enum CrateManifest {
Crate(CrateName, CrateDeps)
}

View file

@ -5,10 +5,31 @@ use hyper::Response;
use maud::{Markup, html};
use ::engine::AnalyzeDependenciesOutcome;
use ::models::crates::{CrateName, AnalyzedDependency};
use ::models::crates::{CrateName, AnalyzedDependency, AnalyzedDependencies};
use ::models::repo::RepoPath;
use ::server::assets;
fn dependency_tables(crate_name: CrateName, deps: AnalyzedDependencies) -> Markup {
html! {
h2 class="title is-3" {
"Crate "
code (crate_name.as_ref())
}
@if !deps.main.is_empty() {
(dependency_table("Dependencies", deps.main))
}
@if !deps.dev.is_empty() {
(dependency_table("Dev dependencies", deps.dev))
}
@if !deps.build.is_empty() {
(dependency_table("Build dependencies", deps.build))
}
}
}
fn dependency_table(title: &str, deps: BTreeMap<CrateName, AnalyzedDependency>) -> Markup {
let count_total = deps.len();
let count_outdated = deps.iter().filter(|&(_, dep)| dep.is_outdated()).count();
@ -65,7 +86,7 @@ pub fn render(analysis_outcome: AnalyzeDependenciesOutcome, repo_path: RepoPath)
let status_base_url = format!("{}/{}", &super::SELF_BASE_URL as &str, self_path);
let title = format!("{} / {}", repo_path.qual.as_ref(), repo_path.name.as_ref());
let (hero_class, status_asset) = if analysis_outcome.deps.any_outdated() {
let (hero_class, status_asset) = if analysis_outcome.any_outdated() {
("is-warning", assets::BADGE_OUTDATED_SVG.as_ref())
} else {
("is-success", assets::BADGE_UPTODATE_SVG.as_ref())
@ -98,21 +119,8 @@ pub fn render(analysis_outcome: AnalyzeDependenciesOutcome, repo_path: RepoPath)
}
section class="section" {
div class="container" {
h2 class="title is-3" {
"Crate "
code (analysis_outcome.name.as_ref())
}
@if !analysis_outcome.deps.main.is_empty() {
(dependency_table("Dependencies", analysis_outcome.deps.main))
}
@if !analysis_outcome.deps.dev.is_empty() {
(dependency_table("Dev dependencies", analysis_outcome.deps.dev))
}
@if !analysis_outcome.deps.build.is_empty() {
(dependency_table("Build dependencies", analysis_outcome.deps.build))
@for (crate_name, deps) in analysis_outcome.crates {
(dependency_tables(crate_name, deps))
}
}
}

View file

@ -29,29 +29,32 @@ struct AnalyzeDependenciesResponse {
}
pub fn status_json(analysis_outcome: AnalyzeDependenciesOutcome) -> Response {
let crates = analysis_outcome.crates.into_iter().map(|(crate_name, analyzed_deps)| {
let single = AnalyzeDependenciesResponseSingle {
dependencies: analysis_outcome.deps.main.into_iter()
dependencies: analyzed_deps.main.into_iter()
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
outdated: analyzed.is_outdated(),
required: analyzed.required,
latest: analyzed.latest
})).collect(),
dev_dependencies: analysis_outcome.deps.dev.into_iter()
dev_dependencies: analyzed_deps.dev.into_iter()
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
outdated: analyzed.is_outdated(),
required: analyzed.required,
latest: analyzed.latest
})).collect(),
build_dependencies: analysis_outcome.deps.build.into_iter()
build_dependencies: analyzed_deps.build.into_iter()
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
outdated: analyzed.is_outdated(),
required: analyzed.required,
latest: analyzed.latest
})).collect()
};
(crate_name.into(), single)
});
let multi = AnalyzeDependenciesResponse {
crates: vec![(analysis_outcome.name.into(), single)].into_iter().collect()
crates: crates.collect()
};
Response::new()

View file

@ -8,7 +8,7 @@ pub fn status_svg(analysis_outcome: Option<AnalyzeDependenciesOutcome>) -> Respo
let mut response = Response::new()
.with_header(ContentType("image/svg+xml;charset=utf-8".parse().unwrap()));
if let Some(outcome) = analysis_outcome {
if outcome.deps.any_outdated() {
if outcome.any_outdated() {
response.set_body(assets::BADGE_OUTDATED_SVG.to_vec());
} else {
response.set_body(assets::BADGE_UPTODATE_SVG.to_vec());