7
7
8
8
'use strict' ;
9
9
10
+ /* eslint-disable no-for-of-loops/no-for-of-loops */
11
+
10
12
// Hi, if this is your first time editing/reading a Dangerfile, here's a summary:
11
13
// It's a JS runtime which helps you provide continuous feedback inside GitHub.
12
14
//
26
28
// `DANGER_GITHUB_API_TOKEN=[ENV_ABOVE] yarn danger pr https://github.com/facebook/react/pull/11865
27
29
28
30
const { markdown, danger, warn} = require ( 'danger' ) ;
29
-
30
- const { generateResultsArray} = require ( './scripts/rollup/stats' ) ;
31
- const { readFileSync, readdirSync} = require ( 'fs' ) ;
32
- const path = require ( 'path' ) ;
33
-
34
- /**
35
- * Generates a Markdown table
36
- * @param {string[] } headers
37
- * @param {string[][] } body
38
- */
39
- function generateMDTable ( headers , body ) {
40
- const tableHeaders = [
41
- headers . join ( ' | ' ) ,
42
- headers . map ( ( ) => ' --- ' ) . join ( ' | ' ) ,
43
- ] ;
44
-
45
- const tablebody = body . map ( r => r . join ( ' | ' ) ) ;
46
- return tableHeaders . join ( '\n' ) + '\n' + tablebody . join ( '\n' ) ;
31
+ const { promisify} = require ( 'util' ) ;
32
+ const glob = promisify ( require ( 'glob' ) ) ;
33
+ const gzipSize = require ( 'gzip-size' ) ;
34
+
35
+ const { readFileSync, statSync} = require ( 'fs' ) ;
36
+
37
+ const BASE_DIR = 'base-build' ;
38
+ const HEAD_DIR = 'build2' ;
39
+
40
+ const CRITICAL_THRESHOLD = 0.02 ;
41
+ const SIGNIFICANCE_THRESHOLD = 0.002 ;
42
+ const CRITICAL_ARTIFACT_PATHS = new Set ( [
43
+ // We always report changes to these bundles, even if the change is
44
+ // insiginificant or non-existent.
45
+ 'oss-stable/react-dom/cjs/react-dom.production.min.js' ,
46
+ 'oss-experimental/react-dom/cjs/react-dom.production.min.js' ,
47
+ 'facebook-www/ReactDOM-prod.classic.js' ,
48
+ 'facebook-www/ReactDOM-prod.modern.js' ,
49
+ 'facebook-www/ReactDOMForked-prod.classic.js' ,
50
+ ] ) ;
51
+
52
+ const kilobyteFormatter = new Intl . NumberFormat ( 'en' , {
53
+ style : 'unit' ,
54
+ unit : 'kilobyte' ,
55
+ minimumFractionDigits : 2 ,
56
+ maximumFractionDigits : 2 ,
57
+ } ) ;
58
+
59
+ function kbs ( bytes ) {
60
+ return kilobyteFormatter . format ( bytes / 1000 ) ;
47
61
}
48
62
49
- /**
50
- * Generates a user-readable string from a percentage change
51
- * @param {number } change
52
- * @param {boolean } includeEmoji
53
- */
54
- function addPercent ( change , includeEmoji ) {
55
- if ( ! isFinite ( change ) ) {
56
- // When a new package is created
57
- return 'n/a' ;
58
- }
59
- const formatted = ( change * 100 ) . toFixed ( 1 ) ;
60
- if ( / ^ - | ^ 0 (?: \. 0 + ) $ / . test ( formatted ) ) {
61
- return `${ formatted } %` ;
62
- } else {
63
- if ( includeEmoji ) {
64
- return `:small_red_triangle:+${ formatted } %` ;
65
- } else {
66
- return `+${ formatted } %` ;
67
- }
68
- }
69
- }
63
+ const percentFormatter = new Intl . NumberFormat ( 'en' , {
64
+ style : 'percent' ,
65
+ signDisplay : 'exceptZero' ,
66
+ minimumFractionDigits : 2 ,
67
+ maximumFractionDigits : 2 ,
68
+ } ) ;
70
69
71
- function setBoldness ( row , isBold ) {
72
- if ( isBold ) {
73
- return row . map ( element => `**${ element } **` ) ;
74
- } else {
75
- return row ;
70
+ function change ( decimal ) {
71
+ if ( Number === Infinity ) {
72
+ return 'New file' ;
76
73
}
77
- }
78
-
79
- function getBundleSizes ( pathToSizesDir ) {
80
- const filenames = readdirSync ( pathToSizesDir ) ;
81
- let bundleSizes = [ ] ;
82
- for ( let i = 0 ; i < filenames . length ; i ++ ) {
83
- const filename = filenames [ i ] ;
84
- if ( filename . endsWith ( '.json' ) ) {
85
- const json = readFileSync ( path . join ( pathToSizesDir , filename ) ) ;
86
- bundleSizes . push ( ...JSON . parse ( json ) . bundleSizes ) ;
87
- }
74
+ if ( decimal === - 1 ) {
75
+ return 'Deleted' ;
76
+ }
77
+ if ( decimal < 0.0001 ) {
78
+ return '=' ;
88
79
}
89
- return { bundleSizes } ;
80
+ return percentFormatter . format ( decimal ) ;
90
81
}
91
82
92
- async function printResultsForChannel ( baseResults , headResults ) {
93
- // Take the JSON of the build response and
94
- // make an array comparing the results for printing
95
- const results = generateResultsArray ( headResults , baseResults ) ;
96
-
97
- const packagesToShow = results
98
- . filter (
99
- r =>
100
- Math . abs ( r . prevFileSizeAbsoluteChange ) >= 300 || // bytes
101
- Math . abs ( r . prevGzipSizeAbsoluteChange ) >= 100 // bytes
102
- )
103
- . map ( r => r . packageName ) ;
104
-
105
- if ( packagesToShow . length ) {
106
- let allTables = [ ] ;
107
-
108
- // Highlight React and React DOM changes inline
109
- // e.g. react: `react.production.min.js`: -3%, `react.development.js`: +4%
110
-
111
- if ( packagesToShow . includes ( 'react' ) ) {
112
- const reactProd = results . find (
113
- r => r . bundleType === 'UMD_PROD' && r . packageName === 'react'
114
- ) ;
115
- if (
116
- reactProd . prevFileSizeChange !== 0 ||
117
- reactProd . prevGzipSizeChange !== 0
118
- ) {
119
- const changeSize = addPercent ( reactProd . prevFileSizeChange , true ) ;
120
- const changeGzip = addPercent ( reactProd . prevGzipSizeChange , true ) ;
121
- markdown ( `React: size: ${ changeSize } , gzip: ${ changeGzip } ` ) ;
122
- }
123
- }
124
-
125
- if ( packagesToShow . includes ( 'react-dom' ) ) {
126
- const reactDOMProd = results . find (
127
- r => r . bundleType === 'UMD_PROD' && r . packageName === 'react-dom'
128
- ) ;
129
- if (
130
- reactDOMProd . prevFileSizeChange !== 0 ||
131
- reactDOMProd . prevGzipSizeChange !== 0
132
- ) {
133
- const changeSize = addPercent ( reactDOMProd . prevFileSizeChange , true ) ;
134
- const changeGzip = addPercent ( reactDOMProd . prevGzipSizeChange , true ) ;
135
- markdown ( `ReactDOM: size: ${ changeSize } , gzip: ${ changeGzip } ` ) ;
136
- }
137
- }
138
-
139
- // Show a hidden summary table for all diffs
140
-
141
- // eslint-disable-next-line no-var,no-for-of-loops/no-for-of-loops
142
- for ( var name of new Set ( packagesToShow ) ) {
143
- const thisBundleResults = results . filter ( r => r . packageName === name ) ;
144
- const changedFiles = thisBundleResults . filter (
145
- r => r . prevFileSizeChange !== 0 || r . prevGzipSizeChange !== 0
146
- ) ;
147
-
148
- const mdHeaders = [
149
- 'File' ,
150
- 'Filesize Diff' ,
151
- 'Gzip Diff' ,
152
- 'Prev Size' ,
153
- 'Current Size' ,
154
- 'Prev Gzip' ,
155
- 'Current Gzip' ,
156
- 'ENV' ,
157
- ] ;
158
-
159
- const mdRows = changedFiles . map ( r => {
160
- const isProd = r . bundleType . includes ( 'PROD' ) ;
161
- return setBoldness (
162
- [
163
- r . filename ,
164
- addPercent ( r . prevFileSizeChange , isProd ) ,
165
- addPercent ( r . prevGzipSizeChange , isProd ) ,
166
- r . prevSize ,
167
- r . prevFileSize ,
168
- r . prevGzip ,
169
- r . prevGzipSize ,
170
- r . bundleType ,
171
- ] ,
172
- isProd
173
- ) ;
174
- } ) ;
83
+ const header = `
84
+ | Name | +/- | Base | Current | +/- gzip | Base gzip | Current gzip |
85
+ | ---- | --- | ---- | ------- | -------- | --------- | ------------ |` ;
175
86
176
- allTables . push ( `\n## ${ name } ` ) ;
177
- allTables . push ( generateMDTable ( mdHeaders , mdRows ) ) ;
178
- }
179
-
180
- const summary = `
181
- <details>
182
- <summary>Details of bundled changes.</summary>
183
-
184
- ${ allTables . join ( '\n' ) }
185
-
186
- </details>
187
- ` ;
188
- return summary ;
189
- } else {
190
- return 'No significant bundle size changes to report.' ;
191
- }
87
+ function row ( result ) {
88
+ // prettier-ignore
89
+ return `| ${ result . path } | **${ change ( result . change ) } ** | ${ kbs ( result . baseSize ) } | ${ kbs ( result . headSize ) } | ${ change ( result . changeGzip ) } | ${ kbs ( result . baseSizeGzip ) } | ${ kbs ( result . headSizeGzip ) } ` ;
192
90
}
193
91
194
92
( async function ( ) {
@@ -202,21 +100,10 @@ async function printResultsForChannel(baseResults, headResults) {
202
100
}
203
101
204
102
let headSha ;
205
- let headSizesStable ;
206
- let headSizesExperimental ;
207
-
208
103
let baseSha ;
209
- let baseSizesStable ;
210
- let baseSizesExperimental ;
211
-
212
104
try {
213
- headSha = ( readFileSync ( './build2/COMMIT_SHA' ) + '' ) . trim ( ) ;
214
- headSizesStable = getBundleSizes ( './build2/sizes-stable' ) ;
215
- headSizesExperimental = getBundleSizes ( './build2/sizes-experimental' ) ;
216
-
217
- baseSha = ( readFileSync ( './base-build/COMMIT_SHA' ) + '' ) . trim ( ) ;
218
- baseSizesStable = getBundleSizes ( './base-build/sizes-stable' ) ;
219
- baseSizesExperimental = getBundleSizes ( './base-build/sizes-experimental' ) ;
105
+ headSha = ( readFileSync ( HEAD_DIR + '/COMMIT_SHA' ) + '' ) . trim ( ) ;
106
+ baseSha = ( readFileSync ( BASE_DIR + '/COMMIT_SHA' ) + '' ) . trim ( ) ;
220
107
} catch {
221
108
warn (
222
109
"Failed to read build artifacts. It's possible a build configuration " +
@@ -226,17 +113,135 @@ async function printResultsForChannel(baseResults, headResults) {
226
113
return ;
227
114
}
228
115
116
+ const resultsMap = new Map ( ) ;
117
+
118
+ // Find all the head (current) artifacts paths.
119
+ const headArtifactPaths = await glob ( '**/*.js' , { cwd : 'build2' } ) ;
120
+ for ( const artifactPath of headArtifactPaths ) {
121
+ try {
122
+ // This will throw if there's no matching base artifact
123
+ const baseSize = statSync ( BASE_DIR + '/' + artifactPath ) . size ;
124
+ const baseSizeGzip = gzipSize . fileSync ( BASE_DIR + '/' + artifactPath ) ;
125
+
126
+ const headSize = statSync ( HEAD_DIR + '/' + artifactPath ) . size ;
127
+ const headSizeGzip = gzipSize . fileSync ( HEAD_DIR + '/' + artifactPath ) ;
128
+ resultsMap . set ( artifactPath , {
129
+ path : artifactPath ,
130
+ headSize,
131
+ headSizeGzip,
132
+ baseSize,
133
+ baseSizeGzip,
134
+ change : ( headSize - baseSize ) / baseSize ,
135
+ changeGzip : ( headSizeGzip - baseSizeGzip ) / baseSizeGzip ,
136
+ } ) ;
137
+ } catch {
138
+ // There's no matching base artifact. This is a new file.
139
+ const baseSize = 0 ;
140
+ const baseSizeGzip = 0 ;
141
+ const headSize = statSync ( HEAD_DIR + '/' + artifactPath ) . size ;
142
+ const headSizeGzip = gzipSize . fileSync ( HEAD_DIR + '/' + artifactPath ) ;
143
+ resultsMap . set ( artifactPath , {
144
+ path : artifactPath ,
145
+ headSize,
146
+ headSizeGzip,
147
+ baseSize,
148
+ baseSizeGzip,
149
+ change : Infinity ,
150
+ changeGzip : Infinity ,
151
+ } ) ;
152
+ }
153
+ }
154
+
155
+ // Check for base artifacts that were deleted in the head.
156
+ const baseArtifactPaths = await glob ( '**/*.js' , { cwd : 'base-build' } ) ;
157
+ for ( const artifactPath of baseArtifactPaths ) {
158
+ if ( ! resultsMap . has ( artifactPath ) ) {
159
+ const baseSize = statSync ( BASE_DIR + '/' + artifactPath ) . size ;
160
+ const baseSizeGzip = gzipSize . fileSync ( BASE_DIR + '/' + artifactPath ) ;
161
+ const headSize = 0 ;
162
+ const headSizeGzip = 0 ;
163
+ resultsMap . set ( artifactPath , {
164
+ path : artifactPath ,
165
+ headSize,
166
+ headSizeGzip,
167
+ baseSize,
168
+ baseSizeGzip,
169
+ change : - 1 ,
170
+ changeGzip : - 1 ,
171
+ } ) ;
172
+ }
173
+ }
174
+
175
+ const results = Array . from ( resultsMap . values ( ) ) ;
176
+ results . sort ( ( a , b ) => b . change - a . change ) ;
177
+
178
+ let criticalResults = [ ] ;
179
+ for ( const artifactPath of CRITICAL_ARTIFACT_PATHS ) {
180
+ const result = resultsMap . get ( artifactPath ) ;
181
+ if ( result === undefined ) {
182
+ throw new Error (
183
+ 'Missing expected bundle. If this was an intentional change to the ' +
184
+ 'build configuration, update Dangerfile.js accordingly: ' +
185
+ artifactPath
186
+ ) ;
187
+ }
188
+ criticalResults . push ( row ( result ) ) ;
189
+ }
190
+
191
+ let significantResults = [ ] ;
192
+ for ( const result of results ) {
193
+ // If result exceeds critical threshold, add to top section.
194
+ if (
195
+ ( result . change > CRITICAL_THRESHOLD ||
196
+ 0 - result . change > CRITICAL_THRESHOLD ||
197
+ // New file
198
+ result . change === Infinity ||
199
+ // Deleted file
200
+ result . change === - 1 ) &&
201
+ // Skip critical artifacts. We added those earlier, in a fixed order.
202
+ ! CRITICAL_ARTIFACT_PATHS . has ( result . path )
203
+ ) {
204
+ criticalResults . push ( row ( result ) ) ;
205
+ }
206
+
207
+ // Do the same for results that exceed the significant threshold. These
208
+ // will go into the bottom, collapsed section. Intentionally including
209
+ // critical artifacts in this section, too.
210
+ if (
211
+ result . change > SIGNIFICANCE_THRESHOLD ||
212
+ 0 - result . change > SIGNIFICANCE_THRESHOLD ||
213
+ result . change === Infinity ||
214
+ result . change === - 1
215
+ ) {
216
+ significantResults . push ( row ( result ) ) ;
217
+ }
218
+ }
219
+
229
220
markdown ( `
230
- ## Size changes
221
+ Comparing: ${ baseSha } ...${ headSha }
222
+
223
+ ## Critical size changes
231
224
232
- <p>Comparing: ${ baseSha } ...${ headSha } </p>
225
+ Includes critical production bundles, as well as any change greater than ${ CRITICAL_THRESHOLD *
226
+ 100 } %:
233
227
234
- ### Stable channel
228
+ ${ header }
229
+ ${ criticalResults . join ( '\n' ) }
235
230
236
- ${ await printResultsForChannel ( baseSizesStable , headSizesStable ) }
231
+ ## Significant size changes
237
232
238
- ### Experimental channel
233
+ Includes any change greater than ${ SIGNIFICANCE_THRESHOLD * 100 } %:
239
234
240
- ${ await printResultsForChannel ( baseSizesExperimental , headSizesExperimental ) }
235
+ ${
236
+ significantResults . length > 0
237
+ ? `
238
+ <details>
239
+ <summary>Expand to show</summary>
240
+ ${ header }
241
+ ${ significantResults . join ( '\n' ) }
242
+ </details>
243
+ `
244
+ : '(No significant changes)'
245
+ }
241
246
` ) ;
242
247
} ) ( ) ;
0 commit comments