@@ -2,12 +2,14 @@ use collector::Bound;
2
2
use std:: collections:: HashMap ;
3
3
use std:: sync:: Arc ;
4
4
5
+ use crate :: api:: detail:: { CompilationSection , CompilationSections } ;
5
6
use crate :: api:: graphs:: GraphKind ;
6
7
use crate :: api:: { detail, graphs, ServerResult } ;
7
8
use crate :: db:: { self , ArtifactId , Profile , Scenario } ;
8
9
use crate :: interpolate:: IsInterpolated ;
9
10
use crate :: load:: SiteCtxt ;
10
11
use crate :: selector:: { CompileBenchmarkQuery , CompileTestCase , Selector , SeriesResponse } ;
12
+ use crate :: self_profile:: { download_and_analyze_self_profile, SelfProfileWithAnalysis } ;
11
13
12
14
/// Returns data for a detailed information when comparing a single test result comparison
13
15
/// for a compile-time benchmark.
@@ -23,12 +25,13 @@ pub async fn handle_compile_detail(
23
25
request. end ,
24
26
) ) ;
25
27
28
+ let scenario = request. scenario . parse ( ) ?;
26
29
let interpolated_responses: Vec < _ > = ctxt
27
30
. statistic_series (
28
31
CompileBenchmarkQuery :: default ( )
29
- . benchmark ( Selector :: One ( request. benchmark ) )
32
+ . benchmark ( Selector :: One ( request. benchmark . clone ( ) ) )
30
33
. profile ( Selector :: One ( request. profile . parse ( ) ?) )
31
- . scenario ( Selector :: One ( request . scenario . parse ( ) ? ) )
34
+ . scenario ( Selector :: One ( scenario) )
32
35
. metric ( Selector :: One ( request. stat . parse ( ) ?) ) ,
33
36
artifact_ids. clone ( ) ,
34
37
)
@@ -37,6 +40,51 @@ pub async fn handle_compile_detail(
37
40
. map ( |sr| sr. interpolate ( ) . map ( |series| series. collect :: < Vec < _ > > ( ) ) )
38
41
. collect ( ) ;
39
42
43
+ async fn calculate_sections (
44
+ ctxt : & SiteCtxt ,
45
+ aid : Option < & ArtifactId > ,
46
+ benchmark : & str ,
47
+ profile : & str ,
48
+ scenario : Scenario ,
49
+ ) -> Option < CompilationSections > {
50
+ match aid {
51
+ Some ( aid) => download_and_analyze_self_profile (
52
+ ctxt,
53
+ aid. clone ( ) ,
54
+ benchmark,
55
+ profile,
56
+ scenario,
57
+ None ,
58
+ )
59
+ . await
60
+ . ok ( )
61
+ . map ( |profile| compute_sections ( & profile) ) ,
62
+ None => None ,
63
+ }
64
+ }
65
+
66
+ // Doc queries are not split into the classic frontend/backend/linker parts.
67
+ let ( sections_before, sections_after) = if request. profile != "doc" {
68
+ tokio:: join!(
69
+ calculate_sections(
70
+ & ctxt,
71
+ artifact_ids. get( 0 ) ,
72
+ & request. benchmark,
73
+ & request. profile,
74
+ scenario,
75
+ ) ,
76
+ calculate_sections(
77
+ & ctxt,
78
+ artifact_ids. get( 1 ) ,
79
+ & request. benchmark,
80
+ & request. profile,
81
+ scenario,
82
+ )
83
+ )
84
+ } else {
85
+ ( None , None )
86
+ } ;
87
+
40
88
let mut graphs = Vec :: new ( ) ;
41
89
42
90
let mut interpolated_responses = interpolated_responses. into_iter ( ) ;
@@ -51,9 +99,64 @@ pub async fn handle_compile_detail(
51
99
Ok ( detail:: Response {
52
100
commits : artifact_ids_to_commits ( artifact_ids) ,
53
101
graphs,
102
+ sections_before,
103
+ sections_after,
54
104
} )
55
105
}
56
106
107
+ /// Heuristically tries to determine which queries belong to individual high-level
108
+ /// "compilation sections" (parsing, analysis, codegen, linker etc.).
109
+ fn compute_sections ( profile : & SelfProfileWithAnalysis ) -> CompilationSections {
110
+ let mut parsing = 0 ;
111
+ let mut metadata = 0 ;
112
+ let mut analysis = 0 ;
113
+ let mut codegen = 0 ;
114
+ let mut linker = 0 ;
115
+
116
+ for query in & profile. profile . query_data {
117
+ let query_name = query. label . as_str ( ) . to_lowercase ( ) ;
118
+ if query_name == "parse_crate" {
119
+ parsing += query. time ;
120
+ } else if query_name == "resolver_for_lowering" {
121
+ metadata += query. time ;
122
+ } else if query_name == "analysis" {
123
+ analysis += query. time ;
124
+ } else if query_name == "link_crate" {
125
+ // The link query overlaps codegen, so we want to take the duration of the `link_crate`
126
+ // query instead.
127
+ linker += query. time ;
128
+ } else if query_name == "codegen_crate" || query_name == "LLVM_passes" {
129
+ // The codegen is divided between rustc part and codegen backend part.
130
+ // FIXME: these two queries can overlap, this should be calculated by a better analysis
131
+ codegen += query. time ;
132
+ }
133
+ }
134
+ CompilationSections {
135
+ sections : vec ! [
136
+ CompilationSection {
137
+ name: "Parsing" . to_string( ) ,
138
+ value: parsing,
139
+ } ,
140
+ CompilationSection {
141
+ name: "Metadata" . to_string( ) ,
142
+ value: metadata,
143
+ } ,
144
+ CompilationSection {
145
+ name: "Analysis" . to_string( ) ,
146
+ value: analysis,
147
+ } ,
148
+ CompilationSection {
149
+ name: "Codegen" . to_string( ) ,
150
+ value: codegen,
151
+ } ,
152
+ CompilationSection {
153
+ name: "Linker" . to_string( ) ,
154
+ value: linker,
155
+ } ,
156
+ ] ,
157
+ }
158
+ }
159
+
57
160
pub async fn handle_graphs (
58
161
request : graphs:: Request ,
59
162
ctxt : Arc < SiteCtxt > ,
0 commit comments