mirror of
https://github.com/deps-rs/deps.rs.git
synced 2024-11-22 02:16:30 +00:00
refactor engine in preparation for multi-crate repo support
This commit is contained in:
parent
da700e8095
commit
54e8dfa662
12 changed files with 231 additions and 79 deletions
43
src/engine/futures/analyze.rs
Normal file
43
src/engine/futures/analyze.rs
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
use failure::Error;
|
||||||
|
use futures::{Future, Poll, Stream, stream};
|
||||||
|
|
||||||
|
use ::models::crates::{AnalyzedDependencies, CrateDeps};
|
||||||
|
|
||||||
|
use super::super::Engine;
|
||||||
|
use super::super::machines::analyzer::DependencyAnalyzer;
|
||||||
|
|
||||||
|
const FETCH_RELEASES_CONCURRENCY: usize = 10;
|
||||||
|
|
||||||
|
pub struct AnalyzeDependenciesFuture {
|
||||||
|
inner: Box<Future<Item=AnalyzedDependencies, Error=Error>>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AnalyzeDependenciesFuture {
|
||||||
|
pub fn new(engine: &Engine, deps: CrateDeps) -> Self {
|
||||||
|
let analyzer = DependencyAnalyzer::new(&deps);
|
||||||
|
|
||||||
|
let main_deps = deps.main.into_iter().map(|(name, _)| name);
|
||||||
|
let dev_deps = deps.dev.into_iter().map(|(name, _)| name);
|
||||||
|
let build_deps = deps.build.into_iter().map(|(name, _)| name);
|
||||||
|
|
||||||
|
let release_futures = engine.fetch_releases(main_deps.chain(dev_deps).chain(build_deps));
|
||||||
|
|
||||||
|
let analyzed_deps_future = stream::iter_ok::<_, Error>(release_futures)
|
||||||
|
.buffer_unordered(FETCH_RELEASES_CONCURRENCY)
|
||||||
|
.fold(analyzer, |mut analyzer, releases| { analyzer.process(releases); Ok(analyzer) as Result<_, Error> })
|
||||||
|
.map(|analyzer| analyzer.finalize());
|
||||||
|
|
||||||
|
AnalyzeDependenciesFuture {
|
||||||
|
inner: Box::new(analyzed_deps_future)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Future for AnalyzeDependenciesFuture {
|
||||||
|
type Item = AnalyzedDependencies;
|
||||||
|
type Error = Error;
|
||||||
|
|
||||||
|
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
|
||||||
|
self.inner.poll()
|
||||||
|
}
|
||||||
|
}
|
56
src/engine/futures/crawl.rs
Normal file
56
src/engine/futures/crawl.rs
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
|
use failure::Error;
|
||||||
|
use futures::{Async, Future, Poll, Stream};
|
||||||
|
use futures::stream::FuturesUnordered;
|
||||||
|
|
||||||
|
use ::models::repo::RepoPath;
|
||||||
|
|
||||||
|
use super::super::Engine;
|
||||||
|
use super::super::machines::crawler::ManifestCrawler;
|
||||||
|
pub use super::super::machines::crawler::ManifestCrawlerOutput;
|
||||||
|
|
||||||
|
pub struct CrawlManifestFuture {
|
||||||
|
repo_path: RepoPath,
|
||||||
|
engine: Engine,
|
||||||
|
crawler: ManifestCrawler,
|
||||||
|
unordered: FuturesUnordered<Box<Future<Item=(String, String), Error=Error>>>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CrawlManifestFuture {
|
||||||
|
pub fn new(engine: &Engine, repo_path: RepoPath, entry_point: String) -> Self {
|
||||||
|
let future: Box<Future<Item=_, Error=_>> = Box::new(engine.retrieve_file_at_path(&repo_path, &entry_point)
|
||||||
|
.map(move |contents| (entry_point, contents)));
|
||||||
|
let engine = engine.clone();
|
||||||
|
let crawler = ManifestCrawler::new();
|
||||||
|
let mut unordered = FuturesUnordered::new();
|
||||||
|
unordered.push(future);
|
||||||
|
|
||||||
|
CrawlManifestFuture {
|
||||||
|
repo_path, engine, crawler, unordered
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Future for CrawlManifestFuture {
|
||||||
|
type Item = ManifestCrawlerOutput;
|
||||||
|
type Error = Error;
|
||||||
|
|
||||||
|
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
|
||||||
|
match try_ready!(self.unordered.poll()) {
|
||||||
|
None => {
|
||||||
|
let crawler = mem::replace(&mut self.crawler, ManifestCrawler::new());
|
||||||
|
Ok(Async::Ready(crawler.finalize()))
|
||||||
|
},
|
||||||
|
Some((path, raw_manifest)) => {
|
||||||
|
let output = self.crawler.step(path, raw_manifest)?;
|
||||||
|
for path in output.paths_of_interest.into_iter() {
|
||||||
|
let future: Box<Future<Item=_, Error=_>> = Box::new(self.engine.retrieve_file_at_path(&self.repo_path, &path)
|
||||||
|
.map(move |contents| (path, contents)));
|
||||||
|
self.unordered.push(future);
|
||||||
|
}
|
||||||
|
self.poll()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
5
src/engine/futures/mod.rs
Normal file
5
src/engine/futures/mod.rs
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
mod crawl;
|
||||||
|
mod analyze;
|
||||||
|
|
||||||
|
pub use self::crawl::CrawlManifestFuture;
|
||||||
|
pub use self::analyze::AnalyzeDependenciesFuture;
|
47
src/engine/machines/crawler.rs
Normal file
47
src/engine/machines/crawler.rs
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use failure::Error;
|
||||||
|
|
||||||
|
use ::parsers::manifest::parse_manifest_toml;
|
||||||
|
use ::models::crates::{CrateDeps, CrateName, CrateManifest};
|
||||||
|
|
||||||
|
pub struct ManifestCrawlerOutput {
|
||||||
|
pub crates: Vec<(CrateName, CrateDeps)>
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ManifestCrawlerStepOutput {
|
||||||
|
pub paths_of_interest: Vec<String>
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ManifestCrawler {
|
||||||
|
manifests: HashMap<String, CrateManifest>,
|
||||||
|
leaf_crates: Vec<(CrateName, CrateDeps)>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ManifestCrawler {
|
||||||
|
pub fn new() -> ManifestCrawler {
|
||||||
|
ManifestCrawler {
|
||||||
|
manifests: HashMap::new(),
|
||||||
|
leaf_crates: vec![]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn step(&mut self, path: String, raw_manifest: String) -> Result<ManifestCrawlerStepOutput, Error> {
|
||||||
|
let manifest = parse_manifest_toml(&raw_manifest)?;
|
||||||
|
self.manifests.insert(path, manifest.clone());
|
||||||
|
match manifest {
|
||||||
|
CrateManifest::Crate(name, deps) => {
|
||||||
|
self.leaf_crates.push((name, deps));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(ManifestCrawlerStepOutput {
|
||||||
|
paths_of_interest: vec![]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finalize(self) -> ManifestCrawlerOutput {
|
||||||
|
ManifestCrawlerOutput {
|
||||||
|
crates: self.leaf_crates
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
2
src/engine/machines/mod.rs
Normal file
2
src/engine/machines/mod.rs
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
pub mod crawler;
|
||||||
|
pub mod analyzer;
|
|
@ -2,27 +2,28 @@ use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use failure::Error;
|
use failure::Error;
|
||||||
use futures::{Future, Stream, stream};
|
use futures::Future;
|
||||||
|
use futures::future::join_all;
|
||||||
use hyper::Client;
|
use hyper::Client;
|
||||||
use hyper::client::HttpConnector;
|
use hyper::client::HttpConnector;
|
||||||
use hyper_tls::HttpsConnector;
|
use hyper_tls::HttpsConnector;
|
||||||
use slog::Logger;
|
use slog::Logger;
|
||||||
use tokio_service::Service;
|
use tokio_service::Service;
|
||||||
|
|
||||||
mod analyzer;
|
mod machines;
|
||||||
|
mod futures;
|
||||||
|
|
||||||
use ::utils::throttle::Throttle;
|
use ::utils::throttle::Throttle;
|
||||||
|
|
||||||
use ::models::repo::{Repository, RepoPath};
|
use ::models::repo::{Repository, RepoPath};
|
||||||
use ::models::crates::{CrateName, CrateRelease, CrateManifest, AnalyzedDependencies};
|
use ::models::crates::{CrateName, CrateRelease, AnalyzedDependencies};
|
||||||
|
|
||||||
use ::parsers::manifest::parse_manifest_toml;
|
|
||||||
|
|
||||||
use ::interactors::crates::query_crate;
|
use ::interactors::crates::query_crate;
|
||||||
use ::interactors::github::retrieve_file_at_path;
|
use ::interactors::github::retrieve_file_at_path;
|
||||||
use ::interactors::github::GetPopularRepos;
|
use ::interactors::github::GetPopularRepos;
|
||||||
|
|
||||||
use self::analyzer::DependencyAnalyzer;
|
use self::futures::AnalyzeDependenciesFuture;
|
||||||
|
use self::futures::CrawlManifestFuture;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Engine {
|
pub struct Engine {
|
||||||
|
@ -42,12 +43,15 @@ impl Engine {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const FETCH_RELEASES_CONCURRENCY: usize = 10;
|
|
||||||
|
|
||||||
pub struct AnalyzeDependenciesOutcome {
|
pub struct AnalyzeDependenciesOutcome {
|
||||||
pub name: CrateName,
|
pub crates: Vec<(CrateName, AnalyzedDependencies)>
|
||||||
pub deps: AnalyzedDependencies
|
}
|
||||||
}
|
|
||||||
|
impl AnalyzeDependenciesOutcome {
|
||||||
|
pub fn any_outdated(&self) -> bool {
|
||||||
|
self.crates.iter().any(|&(_, ref deps)| deps.any_outdated())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Engine {
|
impl Engine {
|
||||||
pub fn get_popular_repos(&self) ->
|
pub fn get_popular_repos(&self) ->
|
||||||
|
@ -60,30 +64,17 @@ impl Engine {
|
||||||
pub fn analyze_dependencies(&self, repo_path: RepoPath) ->
|
pub fn analyze_dependencies(&self, repo_path: RepoPath) ->
|
||||||
impl Future<Item=AnalyzeDependenciesOutcome, Error=Error>
|
impl Future<Item=AnalyzeDependenciesOutcome, Error=Error>
|
||||||
{
|
{
|
||||||
let manifest_future = self.retrieve_manifest(&repo_path);
|
let manifest_future = CrawlManifestFuture::new(self, repo_path, "Cargo.toml".to_string());
|
||||||
|
|
||||||
let engine = self.clone();
|
let engine = self.clone();
|
||||||
manifest_future.and_then(move |manifest| {
|
manifest_future.and_then(move |manifest_output| {
|
||||||
let CrateManifest::Crate(crate_name, deps) = manifest;
|
let futures = manifest_output.crates.into_iter().map(move |(crate_name, deps)| {
|
||||||
let analyzer = DependencyAnalyzer::new(&deps);
|
let analyzed_deps_future = AnalyzeDependenciesFuture::new(&engine, deps);
|
||||||
|
|
||||||
let main_deps = deps.main.into_iter().map(|(name, _)| name);
|
analyzed_deps_future.map(move |analyzed_deps| (crate_name, analyzed_deps))
|
||||||
let dev_deps = deps.dev.into_iter().map(|(name, _)| name);
|
});
|
||||||
let build_deps = deps.build.into_iter().map(|(name, _)| name);
|
|
||||||
|
|
||||||
let release_futures = engine.fetch_releases(main_deps.chain(dev_deps).chain(build_deps));
|
join_all(futures).map(|crates| AnalyzeDependenciesOutcome { crates })
|
||||||
|
|
||||||
let analyzed_deps_future = stream::iter_ok::<_, Error>(release_futures)
|
|
||||||
.buffer_unordered(FETCH_RELEASES_CONCURRENCY)
|
|
||||||
.fold(analyzer, |mut analyzer, releases| { analyzer.process(releases); Ok(analyzer) as Result<_, Error> })
|
|
||||||
.map(|analyzer| analyzer.finalize());
|
|
||||||
|
|
||||||
analyzed_deps_future.map(move |analyzed_deps| {
|
|
||||||
AnalyzeDependenciesOutcome {
|
|
||||||
name: crate_name,
|
|
||||||
deps: analyzed_deps
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,12 +88,9 @@ impl Engine {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn retrieve_manifest(&self, repo_path: &RepoPath) ->
|
fn retrieve_file_at_path(&self, repo_path: &RepoPath, path: &str) ->
|
||||||
impl Future<Item=CrateManifest, Error=Error>
|
impl Future<Item=String, Error=Error>
|
||||||
{
|
{
|
||||||
retrieve_file_at_path(self.client.clone(), &repo_path, "Cargo.toml").from_err()
|
retrieve_file_at_path(self.client.clone(), &repo_path, path).from_err()
|
||||||
.and_then(|manifest_source| {
|
|
||||||
parse_manifest_toml(&manifest_source).map_err(|err| err.into())
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
extern crate base64;
|
extern crate base64;
|
||||||
#[macro_use] extern crate failure;
|
#[macro_use] extern crate failure;
|
||||||
extern crate futures;
|
#[macro_use] extern crate futures;
|
||||||
extern crate hyper;
|
extern crate hyper;
|
||||||
extern crate hyper_tls;
|
extern crate hyper_tls;
|
||||||
#[macro_use] extern crate lazy_static;
|
#[macro_use] extern crate lazy_static;
|
||||||
|
|
|
@ -49,7 +49,7 @@ pub struct CrateRelease {
|
||||||
pub yanked: bool
|
pub yanked: bool
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Clone, Debug, Default)]
|
||||||
pub struct CrateDeps {
|
pub struct CrateDeps {
|
||||||
pub main: BTreeMap<CrateName, VersionReq>,
|
pub main: BTreeMap<CrateName, VersionReq>,
|
||||||
pub dev: BTreeMap<CrateName, VersionReq>,
|
pub dev: BTreeMap<CrateName, VersionReq>,
|
||||||
|
@ -109,7 +109,7 @@ impl AnalyzedDependencies {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum CrateManifest {
|
pub enum CrateManifest {
|
||||||
Crate(CrateName, CrateDeps)
|
Crate(CrateName, CrateDeps)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,31 @@ use hyper::Response;
|
||||||
use maud::{Markup, html};
|
use maud::{Markup, html};
|
||||||
|
|
||||||
use ::engine::AnalyzeDependenciesOutcome;
|
use ::engine::AnalyzeDependenciesOutcome;
|
||||||
use ::models::crates::{CrateName, AnalyzedDependency};
|
use ::models::crates::{CrateName, AnalyzedDependency, AnalyzedDependencies};
|
||||||
use ::models::repo::RepoPath;
|
use ::models::repo::RepoPath;
|
||||||
use ::server::assets;
|
use ::server::assets;
|
||||||
|
|
||||||
|
fn dependency_tables(crate_name: CrateName, deps: AnalyzedDependencies) -> Markup {
|
||||||
|
html! {
|
||||||
|
h2 class="title is-3" {
|
||||||
|
"Crate "
|
||||||
|
code (crate_name.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
@if !deps.main.is_empty() {
|
||||||
|
(dependency_table("Dependencies", deps.main))
|
||||||
|
}
|
||||||
|
|
||||||
|
@if !deps.dev.is_empty() {
|
||||||
|
(dependency_table("Dev dependencies", deps.dev))
|
||||||
|
}
|
||||||
|
|
||||||
|
@if !deps.build.is_empty() {
|
||||||
|
(dependency_table("Build dependencies", deps.build))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn dependency_table(title: &str, deps: BTreeMap<CrateName, AnalyzedDependency>) -> Markup {
|
fn dependency_table(title: &str, deps: BTreeMap<CrateName, AnalyzedDependency>) -> Markup {
|
||||||
let count_total = deps.len();
|
let count_total = deps.len();
|
||||||
let count_outdated = deps.iter().filter(|&(_, dep)| dep.is_outdated()).count();
|
let count_outdated = deps.iter().filter(|&(_, dep)| dep.is_outdated()).count();
|
||||||
|
@ -65,7 +86,7 @@ pub fn render(analysis_outcome: AnalyzeDependenciesOutcome, repo_path: RepoPath)
|
||||||
let status_base_url = format!("{}/{}", &super::SELF_BASE_URL as &str, self_path);
|
let status_base_url = format!("{}/{}", &super::SELF_BASE_URL as &str, self_path);
|
||||||
let title = format!("{} / {}", repo_path.qual.as_ref(), repo_path.name.as_ref());
|
let title = format!("{} / {}", repo_path.qual.as_ref(), repo_path.name.as_ref());
|
||||||
|
|
||||||
let (hero_class, status_asset) = if analysis_outcome.deps.any_outdated() {
|
let (hero_class, status_asset) = if analysis_outcome.any_outdated() {
|
||||||
("is-warning", assets::BADGE_OUTDATED_SVG.as_ref())
|
("is-warning", assets::BADGE_OUTDATED_SVG.as_ref())
|
||||||
} else {
|
} else {
|
||||||
("is-success", assets::BADGE_UPTODATE_SVG.as_ref())
|
("is-success", assets::BADGE_UPTODATE_SVG.as_ref())
|
||||||
|
@ -98,21 +119,8 @@ pub fn render(analysis_outcome: AnalyzeDependenciesOutcome, repo_path: RepoPath)
|
||||||
}
|
}
|
||||||
section class="section" {
|
section class="section" {
|
||||||
div class="container" {
|
div class="container" {
|
||||||
h2 class="title is-3" {
|
@for (crate_name, deps) in analysis_outcome.crates {
|
||||||
"Crate "
|
(dependency_tables(crate_name, deps))
|
||||||
code (analysis_outcome.name.as_ref())
|
|
||||||
}
|
|
||||||
|
|
||||||
@if !analysis_outcome.deps.main.is_empty() {
|
|
||||||
(dependency_table("Dependencies", analysis_outcome.deps.main))
|
|
||||||
}
|
|
||||||
|
|
||||||
@if !analysis_outcome.deps.dev.is_empty() {
|
|
||||||
(dependency_table("Dev dependencies", analysis_outcome.deps.dev))
|
|
||||||
}
|
|
||||||
|
|
||||||
@if !analysis_outcome.deps.build.is_empty() {
|
|
||||||
(dependency_table("Build dependencies", analysis_outcome.deps.build))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,29 +29,32 @@ struct AnalyzeDependenciesResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn status_json(analysis_outcome: AnalyzeDependenciesOutcome) -> Response {
|
pub fn status_json(analysis_outcome: AnalyzeDependenciesOutcome) -> Response {
|
||||||
let single = AnalyzeDependenciesResponseSingle {
|
let crates = analysis_outcome.crates.into_iter().map(|(crate_name, analyzed_deps)| {
|
||||||
dependencies: analysis_outcome.deps.main.into_iter()
|
let single = AnalyzeDependenciesResponseSingle {
|
||||||
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
|
dependencies: analyzed_deps.main.into_iter()
|
||||||
outdated: analyzed.is_outdated(),
|
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
|
||||||
required: analyzed.required,
|
outdated: analyzed.is_outdated(),
|
||||||
latest: analyzed.latest
|
required: analyzed.required,
|
||||||
})).collect(),
|
latest: analyzed.latest
|
||||||
dev_dependencies: analysis_outcome.deps.dev.into_iter()
|
})).collect(),
|
||||||
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
|
dev_dependencies: analyzed_deps.dev.into_iter()
|
||||||
outdated: analyzed.is_outdated(),
|
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
|
||||||
required: analyzed.required,
|
outdated: analyzed.is_outdated(),
|
||||||
latest: analyzed.latest
|
required: analyzed.required,
|
||||||
})).collect(),
|
latest: analyzed.latest
|
||||||
build_dependencies: analysis_outcome.deps.build.into_iter()
|
})).collect(),
|
||||||
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
|
build_dependencies: analyzed_deps.build.into_iter()
|
||||||
outdated: analyzed.is_outdated(),
|
.map(|(name, analyzed)| (name.into(), AnalyzeDependenciesResponseDetail {
|
||||||
required: analyzed.required,
|
outdated: analyzed.is_outdated(),
|
||||||
latest: analyzed.latest
|
required: analyzed.required,
|
||||||
})).collect()
|
latest: analyzed.latest
|
||||||
};
|
})).collect()
|
||||||
|
};
|
||||||
|
(crate_name.into(), single)
|
||||||
|
});
|
||||||
|
|
||||||
let multi = AnalyzeDependenciesResponse {
|
let multi = AnalyzeDependenciesResponse {
|
||||||
crates: vec![(analysis_outcome.name.into(), single)].into_iter().collect()
|
crates: crates.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
Response::new()
|
Response::new()
|
||||||
|
|
|
@ -8,7 +8,7 @@ pub fn status_svg(analysis_outcome: Option<AnalyzeDependenciesOutcome>) -> Respo
|
||||||
let mut response = Response::new()
|
let mut response = Response::new()
|
||||||
.with_header(ContentType("image/svg+xml;charset=utf-8".parse().unwrap()));
|
.with_header(ContentType("image/svg+xml;charset=utf-8".parse().unwrap()));
|
||||||
if let Some(outcome) = analysis_outcome {
|
if let Some(outcome) = analysis_outcome {
|
||||||
if outcome.deps.any_outdated() {
|
if outcome.any_outdated() {
|
||||||
response.set_body(assets::BADGE_OUTDATED_SVG.to_vec());
|
response.set_body(assets::BADGE_OUTDATED_SVG.to_vec());
|
||||||
} else {
|
} else {
|
||||||
response.set_body(assets::BADGE_UPTODATE_SVG.to_vec());
|
response.set_body(assets::BADGE_UPTODATE_SVG.to_vec());
|
||||||
|
|
Loading…
Reference in a new issue