Skip to content

Miscellaneous site crate cleanups #1029

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Sep 24, 2021
63 changes: 32 additions & 31 deletions site/src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,32 +4,8 @@
//!
//! The responses are calculated in the server.rs file.

use database::Benchmark;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::result::Result as StdResult;

#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct StyledBenchmarkName {
pub name: Benchmark,
pub profile: crate::db::Profile,
}

impl fmt::Display for StyledBenchmarkName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}-{}", self.name, self.profile)
}
}

impl Serialize for StyledBenchmarkName {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
serializer.collect_str(&self)
}
}

pub type ServerResult<T> = StdResult<T, String>;

pub mod info {
Expand Down Expand Up @@ -65,11 +41,6 @@ pub mod dashboard {
}
}

#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CommitResponse {
pub commit: Option<String>,
}

pub mod graph {
use collector::Bound;
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -263,7 +234,38 @@ pub mod self_profile_raw {
pub cids: Vec<i32>,
pub cid: i32,
pub url: String,
pub is_tarball: bool,
}
}

pub mod self_profile_processed {
use serde::{Deserialize, Serialize};

#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ProcessorType {
#[serde(rename = "codegen-schedule")]
CodegenSchedule,
Crox,
Flamegraph,
}

#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct Request {
pub commit: String,
pub benchmark: String,
pub run_name: String,
pub cid: Option<i32>,
#[serde(rename = "type")]
pub processor_type: ProcessorType,
#[serde(default, flatten)]
pub params: std::collections::HashMap<String, String>,
}

#[derive(Debug, Clone, Serialize)]
pub struct Response {
pub cids: Vec<i32>,
pub cid: i32,
pub url: String,
}
}

Expand All @@ -277,7 +279,6 @@ pub mod self_profile {
pub base_commit: Option<String>,
pub benchmark: String,
pub run_name: String,

pub sort_idx: String,
}

Expand Down
20 changes: 14 additions & 6 deletions site/src/comparison.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,13 @@ type BoxedError = Box<dyn Error + Send + Sync>;
pub async fn handle_triage(
body: api::triage::Request,
ctxt: &SiteCtxt,
) -> Result<api::triage::Response, BoxedError> {
) -> api::ServerResult<api::triage::Response> {
log::info!("handle_triage({:?})", body);
let start = body.start;
let end = body.end;
let master_commits = collector::master_commits().await?;
let master_commits = collector::master_commits()
.await
.map_err(|e| format!("error retrieving master commit list: {}", e))?;

let start_artifact = ctxt
.artifact_id_for_bound(start.clone(), true)
Expand All @@ -47,7 +50,8 @@ pub async fn handle_triage(
&master_commits,
body.calcNewSig.unwrap_or(false),
)
.await?
.await
.map_err(|e| format!("error comparing commits: {}", e))?
{
Some(c) => c,
None => {
Expand Down Expand Up @@ -87,8 +91,11 @@ pub async fn handle_triage(
pub async fn handle_compare(
body: api::comparison::Request,
ctxt: &SiteCtxt,
) -> Result<api::comparison::Response, BoxedError> {
let master_commits = collector::master_commits().await?;
) -> api::ServerResult<api::comparison::Response> {
log::info!("handle_compare({:?})", body);
let master_commits = collector::master_commits()
.await
.map_err(|e| format!("error retrieving master commit list: {}", e))?;
let end = body.end;
let comparison = compare_given_commits(
body.start,
Expand All @@ -98,7 +105,8 @@ pub async fn handle_compare(
&master_commits,
body.calcNewSig.unwrap_or(false),
)
.await?
.await
.map_err(|e| format!("error comparing commits: {}", e))?
.ok_or_else(|| format!("could not find end commit for bound {:?}", end))?;

let conn = ctxt.conn().await;
Expand Down
4 changes: 2 additions & 2 deletions site/src/request_handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ pub use github::handle_github;
pub use graph::{handle_graph, handle_graph_new};
pub use next_commit::handle_next_commit;
pub use self_profile::{
get_self_profile_raw, handle_self_profile, handle_self_profile_processed_download,
handle_self_profile_raw, handle_self_profile_raw_download,
handle_self_profile, handle_self_profile_processed_download, handle_self_profile_raw,
handle_self_profile_raw_download,
};
pub use status_page::handle_status_page;

Expand Down
1 change: 1 addition & 0 deletions site/src/request_handlers/github.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ pub async fn handle_github(
request: github::Request,
ctxt: Arc<SiteCtxt>,
) -> ServerResult<github::Response> {
log::info!("handle_github({:?})", request);
if request.comment.body.contains(" homu: ") {
if let Some(sha) = parse_homu_comment(&request.comment.body).await {
enqueue_sha(request.issue, &ctxt, sha).await?;
Expand Down
122 changes: 36 additions & 86 deletions site/src/request_handlers/self_profile.rs
Original file line number Diff line number Diff line change
@@ -1,31 +1,30 @@
use std::collections::{HashMap, HashSet};
use std::collections::HashSet;
use std::io::Read;
use std::sync::Arc;
use std::time::{Duration, Instant};

use bytes::Buf;
use headers::{ContentType, Header};
use hyper::StatusCode;
use log::error;

use crate::api::{self_profile, self_profile_raw, ServerResult};
use crate::api::{self_profile, self_profile_processed, self_profile_raw, ServerResult};
use crate::db::ArtifactId;
use crate::load::SiteCtxt;
use crate::selector::{self, Tag};
use crate::server::{Request, Response, ResponseHeaders};
use crate::server::{Response, ResponseHeaders};

pub async fn handle_self_profile_processed_download(
body: self_profile_raw::Request,
mut params: HashMap<String, String>,
body: self_profile_processed::Request,
ctxt: &SiteCtxt,
) -> http::Response<hyper::Body> {
log::info!("handle_self_profile_processed_download({:?})", body);
let mut params = body.params.clone();
let diff_against = params.remove("base_commit");
if params
.get("type")
.map_or(false, |t| t != "codegen-schedule")

if body.processor_type != self_profile_processed::ProcessorType::CodegenSchedule
&& diff_against.is_some()
{
let mut resp = Response::new("Only codegen_schedule supports diffing right now.".into());
let mut resp = Response::new("Only codegen-schedule supports diffing right now.".into());
*resp.status_mut() = StatusCode::BAD_REQUEST;
return resp;
}
Expand Down Expand Up @@ -75,7 +74,17 @@ pub async fn handle_self_profile_processed_download(
None
};

let data = match handle_self_profile_raw(body, ctxt).await {
let data = match handle_self_profile_raw(
self_profile_raw::Request {
commit: body.commit,
benchmark: body.benchmark.clone(),
run_name: body.run_name.clone(),
cid: body.cid,
},
ctxt,
)
.await
{
Ok(v) => match get_self_profile_raw_data(&v.url).await {
Ok(v) => v,
Err(e) => return e,
Expand All @@ -89,15 +98,16 @@ pub async fn handle_self_profile_processed_download(

log::trace!("got data in {:?}", start.elapsed());

let output = match crate::self_profile::generate(&title, base_data, data, params) {
Ok(c) => c,
Err(e) => {
log::error!("Failed to generate json {:?}", e);
let mut resp = http::Response::new(format!("{:?}", e).into());
*resp.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
return resp;
}
};
let output =
match crate::self_profile::generate(&title, body.processor_type, base_data, data, params) {
Ok(c) => c,
Err(e) => {
log::error!("Failed to generate json {:?}", e);
let mut resp = http::Response::new(format!("{:?}", e).into());
*resp.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
return resp;
}
};
let mut builder = http::Response::builder()
.header_typed(if output.filename.ends_with("json") {
ContentType::json()
Expand Down Expand Up @@ -390,9 +400,9 @@ pub async fn handle_self_profile_raw_download(
body: self_profile_raw::Request,
ctxt: &SiteCtxt,
) -> http::Response<hyper::Body> {
let res = handle_self_profile_raw(body, ctxt).await;
let (url, is_tarball) = match res {
Ok(v) => (v.url, v.is_tarball),
log::info!("handle_self_profile_raw_download({:?})", body);
let url = match handle_self_profile_raw(body, ctxt).await {
Ok(v) => v.url,
Err(e) => {
let mut resp = http::Response::new(e.into());
*resp.status_mut() = StatusCode::BAD_REQUEST;
Expand Down Expand Up @@ -427,75 +437,16 @@ pub async fn handle_self_profile_raw_download(
.insert(hyper::header::CONTENT_TYPE, header.pop().unwrap());
server_resp.headers_mut().insert(
hyper::header::CONTENT_DISPOSITION,
hyper::header::HeaderValue::from_maybe_shared(format!(
"attachment; filename=\"{}\"",
if is_tarball {
"self-profile.tar"
} else {
"self-profile.mm_profdata"
}
))
hyper::header::HeaderValue::from_maybe_shared(
"attachment; filename=\"self-profile.mm_profdata\"",
)
.expect("valid header"),
);
*server_resp.status_mut() = StatusCode::OK;
tokio::spawn(tarball(resp, sender));
server_resp
}

pub fn get_self_profile_raw(
req: &Request,
) -> Result<(HashMap<String, String>, self_profile_raw::Request), http::Response<hyper::Body>> {
// FIXME: how should this look?
let url = match url::Url::parse(&format!("http://example.com{}", req.uri())) {
Ok(v) => v,
Err(e) => {
error!("failed to parse url {}: {:?}", req.uri(), e);
return Err(http::Response::builder()
.header_typed(ContentType::text_utf8())
.status(StatusCode::BAD_REQUEST)
.body(hyper::Body::from(format!(
"failed to parse url {}: {:?}",
req.uri(),
e
)))
.unwrap());
}
};
let mut parts = url
.query_pairs()
.into_owned()
.collect::<HashMap<String, String>>();
macro_rules! key_or_error {
($ident:ident) => {
if let Some(v) = parts.remove(stringify!($ident)) {
v
} else {
error!(
"failed to deserialize request {}: missing {} in query string",
req.uri(),
stringify!($ident)
);
return Err(http::Response::builder()
.header_typed(ContentType::text_utf8())
.status(StatusCode::BAD_REQUEST)
.body(hyper::Body::from(format!(
"failed to deserialize request {}: missing {} in query string",
req.uri(),
stringify!($ident)
)))
.unwrap());
}
};
}
let request = self_profile_raw::Request {
commit: key_or_error!(commit),
benchmark: key_or_error!(benchmark),
run_name: key_or_error!(run_name),
cid: None,
};
return Ok((parts, request));
}

async fn tarball(resp: reqwest::Response, mut sender: hyper::body::Sender) {
// Ideally, we would stream the response though the snappy decoding, but
// snappy doesn't support that AFAICT -- we'd need it to implement AsyncRead
Expand Down Expand Up @@ -615,7 +566,6 @@ pub async fn handle_self_profile_raw(
cids: cids.to_vec(),
cid,
url,
is_tarball: false,
})
}
}
Expand Down
14 changes: 8 additions & 6 deletions site/src/self_profile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ mod codegen_schedule;
pub mod crox;
pub mod flamegraph;

pub type ProcessorType = crate::api::self_profile_processed::ProcessorType;

pub struct Output {
pub data: Vec<u8>,
pub filename: &'static str,
Expand All @@ -16,13 +18,13 @@ pub struct Output {

pub fn generate(
title: &str,
processor_type: ProcessorType,
self_profile_base_data: Option<Vec<u8>>,
self_profile_data: Vec<u8>,
mut params: HashMap<String, String>,
params: HashMap<String, String>,
) -> anyhow::Result<Output> {
let removed = params.remove("type");
match removed.as_deref() {
Some("crox") => {
match processor_type {
ProcessorType::Crox => {
let opt = serde_json::from_str(&serde_json::to_string(&params).unwrap())
.context("crox opts")?;
Ok(Output {
Expand All @@ -31,7 +33,7 @@ pub fn generate(
is_download: true,
})
}
Some("flamegraph") => {
ProcessorType::Flamegraph => {
let opt = serde_json::from_str(&serde_json::to_string(&params).unwrap())
.context("flame opts")?;
Ok(Output {
Expand All @@ -40,7 +42,7 @@ pub fn generate(
is_download: false,
})
}
Some("codegen-schedule") => {
ProcessorType::CodegenSchedule => {
let opt =
serde_json::from_str(&serde_json::to_string(&params).unwrap()).context("params")?;
Ok(Output {
Expand Down
Loading