Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 14 additions & 4 deletions database/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -739,10 +739,8 @@ impl Index {
commits
}

// FIXME: in theory this won't scale indefinitely as there's potentially
// millions of queries and labels and iterating all of them is eventually
// going to be impractical. But for now it performs quite well, so we'll go
// for it as keeping indices around would be annoying.
// Note: this function (same as the functions below) iterate through all possible combinations
// of benchmark parameters, so they should ideally not be called very often.
pub fn compile_metrics(&self) -> Vec<String> {
self.pstat_series
.map
Expand All @@ -765,6 +763,18 @@ impl Index {
.collect()
}

/// All targets for which we have some compile-time benchmark result data.
pub fn compile_targets(&self) -> Vec<Target> {
self.pstat_series
.map
.keys()
.map(|(_, _, _, _, target, _)| target)
.collect::<std::collections::HashSet<_>>()
.into_iter()
.cloned()
.collect()
}

// FIXME: in theory this won't scale indefinitely as there's potentially
// millions of queries and labels and iterating all of them is eventually
// going to be impractical. But for now it performs quite well, so we'll go
Expand Down
2 changes: 0 additions & 2 deletions site/frontend/src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@ export interface BenchmarkInfo {
compile_metrics: [string];
// Known runtime metrics from the DB
runtime_metrics: [string];
// Last loaded run date
as_of: string | null;
}

export async function loadBenchmarkInfo(): Promise<BenchmarkInfo> {
Expand Down
12 changes: 0 additions & 12 deletions site/frontend/src/components/as-of.vue

This file was deleted.

2 changes: 0 additions & 2 deletions site/frontend/src/pages/compare/page.vue
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
<script setup lang="ts">
import {loadBenchmarkInfo} from "../../api";
import AsOf from "../../components/as-of.vue";
import {
changeUrl,
createUrlWithAppendedParams,
Expand Down Expand Up @@ -182,5 +181,4 @@ let info = await loadBenchmarkInfo();
</div>
</div>
<br />
<AsOf :info="info" />
</template>
2 changes: 0 additions & 2 deletions site/frontend/src/pages/graphs/page.vue
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ import {
} from "../../utils/navigation";
import {renderPlots} from "../../graph/render";
import {BenchmarkInfo, loadBenchmarkInfo} from "../../api";
import AsOf from "../../components/as-of.vue";
import {loadGraphs} from "../../graph/api";

function loadSelectorFromUrl(urlParams: Dict<string>): GraphsSelector {
Expand Down Expand Up @@ -180,7 +179,6 @@ loadGraphData(selector, loading);
</div>
<div id="size-charts"></div>
</div>
<AsOf :info="info" />
</div>
</div>
</template>
Expand Down
5 changes: 2 additions & 3 deletions site/src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ where
}

pub mod info {
use database::Date;
use serde::Serialize;

#[derive(Debug, Clone, PartialEq, Serialize)]
Expand All @@ -53,8 +52,8 @@ pub mod info {
/// Sorted list of known runtime metrics
pub runtime_metrics: Vec<String>,

/// Chronologically last loaded run date.
pub as_of: Option<Date>,
/// List of known targets for compile-time benchmarks
pub compile_targets: Vec<String>,
}
}

Expand Down
50 changes: 49 additions & 1 deletion site/src/load.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ use serde::Deserialize;
use crate::self_profile::SelfProfileCache;
use collector::compile::benchmark::category::Category;
use collector::{Bound, MasterCommit, SelfProfileStorage};
use database::Pool;
pub use database::{ArtifactId, Benchmark, Commit};
use database::{Pool, Target};

#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TryCommit {
Expand Down Expand Up @@ -81,6 +81,8 @@ pub struct SiteCtxt {
pub landing_page: ArcSwap<Option<Arc<crate::api::graphs::Response>>>,
/// Index of various common queries
pub index: ArcSwap<database::Index>,
/// Summary of some basic data present in the database.
pub data_summary: BenchmarkDataSummary,
/// Cached master-branch Rust commits
pub master_commits: Arc<ArcSwap<MasterCommitCache>>, // outer Arc enables mutation in background task
/// Cache for self profile data
Expand Down Expand Up @@ -132,9 +134,27 @@ impl SiteCtxt {

let master_commits = MasterCommitCache::download().await?;

// We load this data only at website start, as they change very infrequently
let compile_metrics = {
let mut metrics = index.compile_metrics();
metrics.sort();
metrics
};
let runtime_metrics = {
let mut metrics = index.runtime_metrics();
metrics.sort();
metrics
};
let compile_targets = index.compile_targets();

Ok(Self {
config,
index: ArcSwap::new(Arc::new(index)),
data_summary: BenchmarkDataSummary {
compile_metrics,
runtime_metrics,
compile_targets,
},
master_commits: Arc::new(ArcSwap::new(Arc::new(master_commits))),
pool,
landing_page: ArcSwap::new(Arc::new(None)),
Expand Down Expand Up @@ -183,3 +203,31 @@ impl SiteCtxt {
commits
}
}

/// Summary of data contained in the database which changes *very* infrequently, so it can be
/// aggressively cached - we only load the summary when starting the website.
/// Currently, it contains compile and runtime metrics.
pub struct BenchmarkDataSummary {
/// All known compile benchmark metrics (e.g. instruction count, cycles, etc.) for which we have
/// some benchmark results in the DB.
compile_metrics: Vec<String>,
/// All known runtime benchmark metrics (e.g. instruction count, cycles, etc.) for which we have
/// some benchmark results in the DB.
runtime_metrics: Vec<String>,
/// All known targets for which we have some compile-time benchmark results in the DB.
compile_targets: Vec<Target>,
}

impl BenchmarkDataSummary {
pub fn compile_metrics(&self) -> &[String] {
&self.compile_metrics
}

pub fn runtime_metrics(&self) -> &[String] {
&self.runtime_metrics
}

pub fn compile_targets(&self) -> &[Target] {
&self.compile_targets
}
}
23 changes: 9 additions & 14 deletions site/src/request_handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,18 @@ pub use self_profile::{
};
pub use status_page::handle_status_page;

use crate::api::{info, ServerResult};
use crate::api::info;
use crate::load::SiteCtxt;

pub fn handle_info(ctxt: &SiteCtxt) -> info::Response {
let mut compile_metrics = ctxt.index.load().compile_metrics();
compile_metrics.sort();

let mut runtime_metrics = ctxt.index.load().runtime_metrics();
runtime_metrics.sort();

info::Response {
compile_metrics,
runtime_metrics,
as_of: ctxt.index.load().commits().last().map(|d| d.date),
compile_metrics: ctxt.data_summary.compile_metrics().to_vec(),
runtime_metrics: ctxt.data_summary.runtime_metrics().to_vec(),
compile_targets: ctxt
.data_summary
.compile_targets()
.iter()
.map(|t| t.to_string())
.collect(),
}
}

pub async fn handle_collected() -> ServerResult<()> {
Ok(())
}
31 changes: 1 addition & 30 deletions site/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::sync::{Arc, LazyLock};
use std::{fmt, str};

use futures::{future::FutureExt, stream::StreamExt};
use headers::{Authorization, CacheControl, ContentType, ETag, Header, HeaderMapExt, IfNoneMatch};
use headers::{CacheControl, ContentType, ETag, HeaderMapExt, IfNoneMatch};
use http::header::CACHE_CONTROL;
use hyper::StatusCode;
use log::{error, info};
Expand Down Expand Up @@ -107,23 +107,6 @@ impl Server {
Ok(response)
}

fn check_auth(&self, req: &http::request::Parts) -> bool {
if let Some(auth) = req
.headers
.get(Authorization::<headers::authorization::Bearer>::name())
{
let ctxt = self.ctxt.read();
let ctxt = ctxt.as_ref().unwrap();
let auth = Authorization::<headers::authorization::Bearer>::decode(
&mut Some(auth).into_iter(),
)
.unwrap();
return auth.0.token() == *ctxt.config.keys.github_webhook_secret.as_ref().unwrap();
}

false
}

async fn handle_metrics(&self, _req: Request) -> Response {
use prometheus::Encoder;
let ctxt: Arc<SiteCtxt> = self.ctxt.read().as_ref().unwrap().clone();
Expand Down Expand Up @@ -342,18 +325,6 @@ async fn serve_req(server: Server, req: Request) -> Result<Response, ServerError
crate::comparison::handle_compare(check!(parse_body(&body)), &ctxt).await,
&compression,
)),
"/perf/collected" => {
if !server.check_auth(&req) {
return Ok(http::Response::builder()
.status(StatusCode::UNAUTHORIZED)
.body(hyper::Body::empty())
.unwrap());
}
Ok(to_response(
request_handlers::handle_collected().await,
&compression,
))
}
"/perf/github-hook" => {
if !verify_gh(&ctxt.config, &req, &body) {
return Ok(http::Response::builder()
Expand Down
Loading