未验证 提交 da5a05dc 编写于 作者: J JJ Kasper 提交者: GitHub

Migrate PR stats action into Next.js repo (#13177)

* Migrate PR stats into Next.js repo

* Update running prettier in local mode
上级 beeefaae
......@@ -8,3 +8,4 @@ packages/next/compiled/**/*
packages/react-refresh-utils/**/*.js
packages/react-dev-overlay/lib/**
**/__tmp__/**
.github/actions/next-stats-action/.work
\ No newline at end of file
**/node_modules
out.md
.work
\ No newline at end of file
FROM node:10-buster
LABEL com.github.actions.name="Next.js PR Stats"
LABEL com.github.actions.description="Compares stats of a PR with the main branch"
LABEL repository="https://github.com/zeit/next-stats-action"
COPY . /next-stats
# Install node_modules
RUN cd /next-stats && yarn install --production
RUN git config --global user.email 'stats@localhost'
RUN git config --global user.name 'next stats'
RUN apt update
RUN apt install apache2-utils -y
COPY entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]
# Next.js Stats GitHub Action
> Downloads and runs project with provided configs gathering stats to compare branches
See it in action at Next.js https://github.com/zeit/next.js
## Getting Started
1. Add a `.stats-app` folder to your project with a [`stats-config.js`](#stats-config) and any files to run against for example a test app that is to be built
2. Add the action to your [workflow](https://help.github.com/en/articles/configuring-a-workflow)
3. Enjoy the stats
## Stats Config
```TypeScript
const StatsConfig = {
// the Heading to show at the top of stats comments
commentHeading: 'Stats from current PR' | undefined,
commentReleaseHeading: 'Stats from current release' | undefined,
// the command to build your project if not done on post install
initialBuildCommand: undefined | string,
skipInitialInstall: undefined | boolean,
// the command to build the app (app source should be in `.stats-app`)
appBuildCommand: string,
appStartCommand: string | undefined,
// the main branch to compare against (what PRs will be merging into)
mainBranch: 'canary',
// the main repository path (relative to https://github.com/)
mainRepo: 'zeit/next.js',
// whether to attempt auto merging the main branch into PR before running stats
autoMergeMain: boolean | undefined,
// an array of configs for each run
configs: [
{ // first run's config
// title of the run
title: 'fastMode stats',
// whether to diff the outputted files (default: onOutputChange)
diff: 'onOutputChange' | false | undefined,
// config files to add before running diff (if `undefined` uses `configFiles`)
diffConfigFiles: [] | undefined,
// renames to apply to make file names deterministic
renames: [
{
srcGlob: 'main-*.js',
dest: 'main.js'
}
],
// config files to add before running (removed before successive runs)
configFiles: [
{
path: './next.config.js',
content: 'module.exports = { fastMode: true }'
}
],
// an array of file groups to diff/track
filesToTrack: [
{
name: 'Pages',
globs: [
'build/pages/**/*.js'
]
}
],
// an array of URLs to fetch while `appStartCommand` is running
// will be output to fetched-pages/${pathname}.html
pagesToFetch: [
'https://localhost:$PORT/page-1'
]
},
{ // second run's config
title: 'slowMode stats',
diff: false,
configFiles: [
{
path: './next.config.js',
content: 'module.exports = { slowMode: true }'
}
],
filesToTrack: [
{
name: 'Main Bundles',
globs: [
'build/runtime/webpack-*.js',
'build/runtime/main-*.js',
]
}
]
},
]
}
module.exports = StatsConfig
```
#!/bin/bash
set -eu # stop on error
export HOME=/root
node /next-stats/src/index.js
{
"name": "get-stats",
"version": "1.0.0",
"main": "src/index.js",
"license": "MIT",
"dependencies": {
"async-sema": "^3.1.0",
"fs-extra": "^8.1.0",
"get-port": "^5.0.0",
"glob": "^7.1.4",
"gzip-size": "^5.1.1",
"minimatch": "^3.0.4",
"node-fetch": "^2.6.0",
"prettier": "^1.18.2",
"pretty-bytes": "^5.3.0",
"pretty-ms": "^5.0.0"
}
}
const path = require('path')
const fs = require('fs-extra')
const fetch = require('node-fetch')
const prettyMs = require('pretty-ms')
const logger = require('./util/logger')
const prettyBytes = require('pretty-bytes')
const { benchTitle } = require('./constants')
const gzipIgnoreRegex = new RegExp(`(General|^Serverless|${benchTitle})`)
const prettify = (val, type = 'bytes') => {
if (typeof val !== 'number') return 'N/A'
return type === 'bytes' ? prettyBytes(val) : prettyMs(val)
}
const round = (num, places) => {
const placesFactor = Math.pow(10, places)
return Math.round(num * placesFactor) / placesFactor
}
const shortenLabel = (itemKey) =>
itemKey.length > 24
? `${itemKey.substr(0, 12)}..${itemKey.substr(itemKey.length - 12, 12)}`
: itemKey
const twoMB = 2 * 1024 * 1024
module.exports = async function addComment(
results = [],
actionInfo,
statsConfig
) {
let comment = `# ${
actionInfo.isRelease
? statsConfig.commentReleaseHeading || 'Stats from current release'
: statsConfig.commentHeading || 'Stats from current PR'
}\n\n`
const tableHead = `| | ${statsConfig.mainRepo} ${statsConfig.mainBranch} ${
actionInfo.lastStableTag || ''
} | ${actionInfo.prRepo} ${actionInfo.prRef} | Change |\n| - | - | - | - |\n`
for (let i = 0; i < results.length; i++) {
const result = results[i]
const isLastResult = i === results.length - 1
let resultHasIncrease = false
let resultHasDecrease = false
let resultContent = ''
Object.keys(result.mainRepoStats).forEach((groupKey) => {
const isBenchmark = groupKey === benchTitle
const mainRepoGroup = result.mainRepoStats[groupKey]
const diffRepoGroup = result.diffRepoStats[groupKey]
const itemKeys = new Set([
...Object.keys(mainRepoGroup),
...Object.keys(diffRepoGroup),
])
let groupTable = tableHead
let mainRepoTotal = 0
let diffRepoTotal = 0
let totalChange = 0
itemKeys.forEach((itemKey) => {
const prettyType = itemKey.match(/(length|duration)/i) ? 'ms' : 'bytes'
const isGzipItem = itemKey.endsWith('gzip')
const mainItemVal = mainRepoGroup[itemKey]
const diffItemVal = diffRepoGroup[itemKey]
const useRawValue = isBenchmark && prettyType !== 'ms'
const mainItemStr = useRawValue
? mainItemVal
: prettify(mainItemVal, prettyType)
const diffItemStr = useRawValue
? diffItemVal
: prettify(diffItemVal, prettyType)
let change = ''
// Don't show gzip values for serverless as they aren't
// deterministic currently
if (groupKey.startsWith('Serverless') && isGzipItem) return
// otherwise only show gzip values
else if (!isGzipItem && !groupKey.match(gzipIgnoreRegex)) return
if (
itemKey !== 'buildDuration' ||
(isBenchmark && itemKey.match(/req\/sec/))
) {
if (typeof mainItemVal === 'number') mainRepoTotal += mainItemVal
if (typeof diffItemVal === 'number') diffRepoTotal += diffItemVal
}
// calculate the change
if (mainItemVal !== diffItemVal) {
if (
typeof mainItemVal === 'number' &&
typeof diffItemVal === 'number'
) {
change = round(diffItemVal - mainItemVal, 2)
// check if there is still a change after rounding
if (change !== 0) {
const absChange = Math.abs(change)
change = `${change < 0 ? '-' : '⚠️ +'}${
useRawValue ? absChange : prettify(absChange, prettyType)
}`
}
} else {
change = 'N/A'
}
}
groupTable += `| ${
isBenchmark ? itemKey : shortenLabel(itemKey)
} | ${mainItemStr} | ${diffItemStr} | ${change} |\n`
})
let groupTotalChange = ''
totalChange = diffRepoTotal - mainRepoTotal
if (totalChange !== 0) {
if (totalChange < 0) {
resultHasDecrease = true
groupTotalChange = ` Overall decrease ${isBenchmark ? '⚠️' : ''}`
} else {
if (
(groupKey !== 'General' && totalChange > 5) ||
totalChange > twoMB
) {
resultHasIncrease = true
}
groupTotalChange = ` Overall increase ${isBenchmark ? '' : '⚠️'}`
}
}
if (groupKey !== 'General' && groupKey !== benchTitle) {
let totalChangeSign = ''
if (totalChange === 0) {
totalChange = ''
} else {
totalChangeSign = totalChange < 0 ? '-' : '⚠️ +'
}
totalChange = `${totalChangeSign}${
typeof totalChange === 'number'
? prettify(Math.abs(totalChange))
: totalChange
}`
groupTable += `| Overall change | ${prettyBytes(
round(mainRepoTotal, 2)
)} | ${prettyBytes(round(diffRepoTotal, 2))} | ${totalChange} |\n`
}
if (itemKeys.size > 0) {
resultContent += `<details>\n`
resultContent += `<summary><strong>${groupKey}</strong>${groupTotalChange}</summary>\n\n`
resultContent += groupTable
resultContent += `\n</details>\n\n`
}
})
// add diffs
if (result.diffs) {
const diffHeading = '#### Diffs\n'
let diffContent = diffHeading
Object.keys(result.diffs).forEach((itemKey) => {
const curDiff = result.diffs[itemKey]
diffContent += `<details>\n`
diffContent += `<summary>Diff for <strong>${shortenLabel(
itemKey
)}</strong></summary>\n\n`
if (curDiff.length > 36 * 1000) {
diffContent += 'Diff too large to display'
} else {
diffContent += `\`\`\`diff\n${curDiff}\n\`\`\``
}
diffContent += `\n</details>\n`
})
if (diffContent !== diffHeading) {
resultContent += diffContent
}
}
let increaseDecreaseNote = ''
if (resultHasIncrease) {
increaseDecreaseNote = ' (Increase detected ⚠️)'
} else if (resultHasDecrease) {
increaseDecreaseNote = ' (Decrease detected ✓)'
}
comment += `<details>\n`
comment += `<summary><strong>${result.title}</strong>${increaseDecreaseNote}</summary>\n\n<br/>\n\n`
comment += resultContent
comment += '</details>\n'
if (!isLastResult) {
comment += `<hr/>\n`
}
}
if (process.env.LOCAL_STATS) {
const statsPath = path.resolve('pr-stats.md')
await fs.writeFile(statsPath, comment)
console.log(`Output PR stats to ${statsPath}`)
} else {
logger('\n--stats start--\n', comment, '\n--stats end--\n')
}
if (
actionInfo.customCommentEndpoint ||
(actionInfo.githubToken && actionInfo.commentEndpoint)
) {
logger(`Posting results to ${actionInfo.commentEndpoint}`)
const body = {
body: comment,
...(!actionInfo.githubToken
? {
isRelease: actionInfo.isRelease,
commitId: actionInfo.commitId,
issueId: actionInfo.issueId,
}
: {}),
}
if (actionInfo.customCommentEndpoint) {
logger(`Using body ${JSON.stringify({ ...body, body: 'OMITTED' })}`)
}
try {
const res = await fetch(actionInfo.commentEndpoint, {
method: 'POST',
headers: {
...(actionInfo.githubToken
? {
Authorization: `bearer ${actionInfo.githubToken}`,
}
: {
'content-type': 'application/json',
}),
},
body: JSON.stringify(body),
})
if (!res.ok) {
logger.error(`Failed to post results ${res.status}`)
try {
logger.error(await res.text())
} catch (_) {
/* no-op */
}
} else {
logger('Successfully posted results')
}
} catch (err) {
logger.error(`Error occurred posting results`, err)
}
} else {
logger(
`Not posting results`,
actionInfo.githubToken ? 'No comment endpoint' : 'no GitHub token'
)
}
}
const path = require('path')
const benchTitle = 'Page Load Tests'
const workDir = path.join(__dirname, '../.work')
const mainRepoName = 'main-repo'
const diffRepoName = 'diff-repo'
const mainRepoDir = path.join(workDir, mainRepoName)
const diffRepoDir = path.join(workDir, diffRepoName)
const statsAppDir = path.join(workDir, 'stats-app')
const diffingDir = path.join(workDir, 'diff')
const allowedConfigLocations = [
'./',
'.stats-app',
'test/.stats-app',
'.github/.stats-app',
]
module.exports = {
benchTitle,
workDir,
diffingDir,
mainRepoName,
diffRepoName,
mainRepoDir,
diffRepoDir,
statsAppDir,
allowedConfigLocations,
}
const exec = require('./util/exec')
const logger = require('./util/logger')
const runConfigs = require('./run')
const addComment = require('./add-comment')
const actionInfo = require('./prepare/action-info')()
const { mainRepoDir, diffRepoDir } = require('./constants')
const loadStatsConfig = require('./prepare/load-stats-config')
const {
cloneRepo,
checkoutRef,
mergeBranch,
getCommitId,
linkPackages,
getLastStable,
} = require('./prepare/repo-setup')(actionInfo)
const allowedActions = new Set(['synchronize', 'opened'])
if (!allowedActions.has(actionInfo.actionName) && !actionInfo.isRelease) {
logger(
`Not running for ${actionInfo.actionName} event action on repo: ${actionInfo.prRepo} and ref ${actionInfo.prRef}`
)
process.exit(0)
}
;(async () => {
try {
const { stdout: gitName } = await exec(
'git config user.name && git config user.email'
)
console.log('git author result:', gitName)
// clone PR/newer repository/ref first to get settings
if (!actionInfo.skipClone) {
await cloneRepo(actionInfo.prRepo, diffRepoDir)
await checkoutRef(actionInfo.prRef, diffRepoDir)
}
// load stats config from allowed locations
const { statsConfig, relativeStatsAppDir } = loadStatsConfig()
if (actionInfo.prRef === statsConfig.mainBranch) {
throw new Error(
`'GITHUB_REF' can not be the same as mainBranch in 'stats-config.js'.\n` +
`This will result in comparing against the same branch`
)
}
// clone main repository/ref
if (!actionInfo.skipClone) {
await cloneRepo(statsConfig.mainRepo, mainRepoDir)
await checkoutRef(statsConfig.mainBranch, mainRepoDir)
}
/* eslint-disable-next-line */
actionInfo.commitId = await getCommitId(diffRepoDir)
if (!actionInfo.skipClone) {
if (actionInfo.isRelease) {
logger('Release detected, resetting mainRepo to last stable tag')
const lastStableTag = await getLastStable(mainRepoDir, actionInfo.prRef)
if (!lastStableTag) throw new Error('failed to get last stable tag')
await checkoutRef(lastStableTag, mainRepoDir)
/* eslint-disable-next-line */
actionInfo.lastStableTag = lastStableTag
/* eslint-disable-next-line */
actionInfo.commitId = await getCommitId(diffRepoDir)
if (!actionInfo.customCommentEndpoint) {
/* eslint-disable-next-line */
actionInfo.commentEndpoint = `https://api.github.com/repos/${statsConfig.mainRepo}/commits/${actionInfo.commitId}/comments`
}
} else if (statsConfig.autoMergeMain) {
logger('Attempting auto merge of main branch')
await mergeBranch(statsConfig.mainBranch, mainRepoDir, diffRepoDir)
}
}
let mainRepoPkgPaths
let diffRepoPkgPaths
// run install/initialBuildCommand
const repoDirs = [mainRepoDir, diffRepoDir]
for (const dir of repoDirs) {
logger(`Running initial build for ${dir}`)
if (!actionInfo.skipClone) {
let buildCommand = `cd ${dir}${
!statsConfig.skipInitialInstall ? ' && yarn install' : ''
}`
if (statsConfig.initialBuildCommand) {
buildCommand += ` && ${statsConfig.initialBuildCommand}`
}
await exec(buildCommand)
}
logger(`Linking packages in ${dir}`)
const pkgPaths = await linkPackages(dir)
if (dir === mainRepoDir) mainRepoPkgPaths = pkgPaths
else diffRepoPkgPaths = pkgPaths
}
// run the configs and post the comment
const results = await runConfigs(statsConfig.configs, {
statsConfig,
mainRepoPkgPaths,
diffRepoPkgPaths,
relativeStatsAppDir,
})
await addComment(results, actionInfo, statsConfig)
logger('finished')
process.exit(0)
} catch (err) {
console.error('Error occurred generating stats:')
console.error(err)
process.exit(1)
}
})()
const path = require('path')
const logger = require('../util/logger')
const { execSync } = require('child_process')
const releaseTypes = new Set(['release', 'published'])
module.exports = function actionInfo() {
let {
ISSUE_ID,
SKIP_CLONE,
GITHUB_REF,
LOCAL_STATS,
GIT_ROOT_DIR,
GITHUB_ACTION,
COMMENT_ENDPOINT,
GITHUB_REPOSITORY,
GITHUB_EVENT_PATH,
PR_STATS_COMMENT_TOKEN,
} = process.env
delete process.env.GITHUB_TOKEN
delete process.env.PR_STATS_COMMENT_TOKEN
// only use custom endpoint if we don't have a token
const commentEndpoint = !PR_STATS_COMMENT_TOKEN && COMMENT_ENDPOINT
if (LOCAL_STATS === 'true') {
const cwd = process.cwd()
const parentDir = path.join(cwd, '../..')
if (!GITHUB_REF) {
// get the current branch name
GITHUB_REF = execSync(`cd "${cwd}" && git rev-parse --abbrev-ref HEAD`)
.toString()
.trim()
}
if (!GIT_ROOT_DIR) {
GIT_ROOT_DIR = path.join(parentDir, '/')
}
if (!GITHUB_REPOSITORY) {
GITHUB_REPOSITORY = path.relative(parentDir, cwd)
}
if (!GITHUB_ACTION) {
GITHUB_ACTION = 'opened'
}
}
const info = {
commentEndpoint,
skipClone: SKIP_CLONE,
actionName: GITHUB_ACTION,
githubToken: PR_STATS_COMMENT_TOKEN,
customCommentEndpoint: !!commentEndpoint,
gitRoot: GIT_ROOT_DIR || 'https://github.com/',
prRepo: GITHUB_REPOSITORY,
prRef: GITHUB_REF,
commitId: null,
issueId: ISSUE_ID,
isRelease: releaseTypes.has(GITHUB_ACTION),
}
// get comment
if (GITHUB_EVENT_PATH) {
const event = require(GITHUB_EVENT_PATH)
info.actionName = event.action || info.actionName
if (releaseTypes.has(info.actionName)) {
info.isRelease = true
} else {
// Since GITHUB_REPOSITORY and REF might not match the fork
// use event data to get repository and ref info
const prData = event['pull_request']
if (prData) {
info.prRepo = prData.head.repo.full_name
info.prRef = prData.head.ref
info.issueId = prData.number
if (!info.commentEndpoint) {
info.commentEndpoint = prData._links.comments || ''
}
// comment endpoint might be under `href`
if (typeof info.commentEndpoint === 'object') {
info.commentEndpoint = info.commentEndpoint.href
}
}
}
}
logger('Got actionInfo:')
logger.json({
...info,
githubToken: PR_STATS_COMMENT_TOKEN ? 'found' : 'missing',
})
return info
}
const path = require('path')
const logger = require('../util/logger')
const { diffRepoDir, allowedConfigLocations } = require('../constants')
// load stats-config
function loadStatsConfig() {
let statsConfig
let relativeStatsAppDir
for (const configPath of allowedConfigLocations) {
try {
relativeStatsAppDir = configPath
statsConfig = require(path.join(
diffRepoDir,
configPath,
'stats-config.js'
))
break
} catch (_) {
/* */
}
}
if (!statsConfig) {
throw new Error(
`Failed to locate \`.stats-app\`, allowed locations are: ${allowedConfigLocations.join(
', '
)}`
)
}
logger(
'Got statsConfig at',
path.join(relativeStatsAppDir, 'stats-config.js'),
statsConfig,
'\n'
)
return { statsConfig, relativeStatsAppDir }
}
module.exports = loadStatsConfig
const path = require('path')
const fs = require('fs-extra')
const exec = require('../util/exec')
const { remove } = require('fs-extra')
const logger = require('../util/logger')
module.exports = (actionInfo) => {
return {
async cloneRepo(repoPath = '', dest = '') {
await remove(dest)
await exec(`git clone ${actionInfo.gitRoot}${repoPath} ${dest}`)
},
async checkoutRef(ref = '', repoDir = '') {
await exec(`cd ${repoDir} && git fetch && git checkout ${ref}`)
},
async getLastStable(repoDir = '', ref) {
const { stdout } = await exec(`cd ${repoDir} && git tag -l`)
const tags = stdout.trim().split('\n')
let lastStableTag
for (let i = tags.length - 1; i >= 0; i--) {
const curTag = tags[i]
// stable doesn't include `-canary` or `-beta`
if (!curTag.includes('-') && !ref.includes(curTag)) {
lastStableTag = curTag
break
}
}
return lastStableTag
},
async getCommitId(repoDir = '') {
const { stdout } = await exec(`cd ${repoDir} && git rev-parse HEAD`)
return stdout.trim()
},
async resetToRef(ref = '', repoDir = '') {
await exec(`cd ${repoDir} && git reset --hard ${ref}`)
},
async mergeBranch(ref = '', origRepoDir = '', destRepoDir = '') {
await exec(`cd ${destRepoDir} && git remote add upstream ${origRepoDir}`)
await exec(`cd ${destRepoDir} && git fetch upstream`)
try {
await exec(`cd ${destRepoDir} && git merge upstream/${ref}`)
logger('Auto merge of main branch successful')
} catch (err) {
logger.error('Failed to auto merge main branch:', err)
if (err.stdout && err.stdout.includes('CONFLICT')) {
await exec(`cd ${destRepoDir} && git merge --abort`)
logger('aborted auto merge')
}
}
},
async linkPackages(repoDir = '') {
const pkgPaths = new Map()
const pkgDatas = new Map()
let pkgs
try {
pkgs = await fs.readdir(path.join(repoDir, 'packages'))
} catch (err) {
if (err.code === 'ENOENT') {
console.log('no packages to link')
return pkgPaths
}
throw err
}
for (const pkg of pkgs) {
const pkgPath = path.join(repoDir, 'packages', pkg)
const packedPkgPath = path.join(pkgPath, `${pkg}-packed.tgz`)
// pack the package with yarn
await exec(`cd ${pkgPath} && yarn pack -f ${pkg}-packed.tgz`)
const pkgDataPath = path.join(pkgPath, 'package.json')
const pkgData = require(pkgDataPath)
const { name } = pkgData
pkgDatas.set(name, { pkgDataPath, pkgData, packedPkgPath })
pkgPaths.set(name, packedPkgPath)
}
for (const pkg of pkgDatas.keys()) {
const { pkgDataPath, pkgData } = pkgDatas.get(pkg)
for (const pkg of pkgDatas.keys()) {
const { packedPkgPath } = pkgDatas.get(pkg)
if (!pkgData.dependencies || !pkgData.dependencies[pkg]) continue
pkgData.dependencies[pkg] = packedPkgPath
}
await fs.writeFile(
pkgDataPath,
JSON.stringify(pkgData, null, 2),
'utf8'
)
}
return pkgPaths
},
}
}
const exec = require('../util/exec')
const parseField = (stdout = '', field = '') => {
return stdout.split(field).pop().trim().split(/\s/).shift().trim()
}
// benchmark a url
async function benchmarkUrl(
url = '',
options = {
reqTimeout: 60,
concurrency: 50,
numRequests: 2500,
}
) {
const { numRequests, concurrency, reqTimeout } = options
const { stdout } = await exec(
`ab -n ${numRequests} -c ${concurrency} -s ${reqTimeout} "${url}"`
)
const totalTime = parseFloat(parseField(stdout, 'Time taken for tests:'), 10)
const failedRequests = parseInt(parseField(stdout, 'Failed requests:'), 10)
const avgReqPerSec = parseFloat(parseField(stdout, 'Requests per second:'))
return {
totalTime,
avgReqPerSec,
failedRequests,
}
}
module.exports = benchmarkUrl
const path = require('path')
const fs = require('fs-extra')
const exec = require('../util/exec')
const glob = require('../util/glob')
const logger = require('../util/logger')
const { statsAppDir, diffingDir } = require('../constants')
module.exports = async function collectDiffs(
filesToTrack = [],
initial = false
) {
if (initial) {
logger('Setting up directory for diffing')
// set-up diffing directory
await fs.remove(diffingDir)
await fs.mkdirp(diffingDir)
await exec(`cd ${diffingDir} && git init`)
} else {
// remove any previous files in case they won't be overwritten
const toRemove = await glob('!(.git)', { cwd: diffingDir, dot: true })
await Promise.all(
toRemove.map((file) => fs.remove(path.join(diffingDir, file)))
)
}
const diffs = {}
await Promise.all(
filesToTrack.map(async (fileGroup) => {
const { globs } = fileGroup
const curFiles = []
await Promise.all(
globs.map(async (pattern) => {
curFiles.push(...(await glob(pattern, { cwd: statsAppDir })))
})
)
for (let file of curFiles) {
const absPath = path.join(statsAppDir, file)
const diffDest = path.join(diffingDir, file)
await fs.copy(absPath, diffDest)
}
if (curFiles.length > 0) {
await exec(
`cd "${process.env.LOCAL_STATS ? process.cwd() : diffingDir}" && ` +
`yarn prettier --write ${curFiles
.map((f) => path.join(diffingDir, f))
.join(' ')}`
)
}
})
)
await exec(`cd ${diffingDir} && git add .`, true)
if (initial) {
await exec(`cd ${diffingDir} && git commit -m 'initial commit'`)
} else {
let { stdout: renamedFiles } = await exec(
`cd ${diffingDir} && git diff --name-status HEAD`
)
renamedFiles = renamedFiles
.trim()
.split('\n')
.filter((line) => line.startsWith('R'))
diffs._renames = []
for (const line of renamedFiles) {
const [, prev, cur] = line.split('\t')
await fs.move(path.join(diffingDir, cur), path.join(diffingDir, prev))
diffs._renames.push({
prev,
cur,
})
}
await exec(`cd ${diffingDir} && git add .`)
let { stdout: changedFiles } = await exec(
`cd ${diffingDir} && git diff --name-only HEAD`
)
changedFiles = changedFiles.trim().split('\n')
for (const file of changedFiles) {
const fileKey = path.basename(file)
const hasFile = await fs.exists(path.join(diffingDir, file))
if (!hasFile) {
diffs[fileKey] = 'deleted'
continue
}
try {
let { stdout } = await exec(
`cd ${diffingDir} && git diff --minimal HEAD ${file}`
)
stdout = (stdout.split(file).pop() || '').trim()
if (stdout.length > 0) {
diffs[fileKey] = stdout
}
} catch (err) {
console.error(`Failed to diff ${file}: ${err.message}`)
diffs[fileKey] = `failed to diff`
}
}
}
return diffs
}
const path = require('path')
const fs = require('fs-extra')
const getPort = require('get-port')
const fetch = require('node-fetch')
const glob = require('../util/glob')
const gzipSize = require('gzip-size')
const logger = require('../util/logger')
const { spawn } = require('../util/exec')
const { parse: urlParse } = require('url')
const benchmarkUrl = require('./benchmark-url')
const { statsAppDir, diffingDir, benchTitle } = require('../constants')
module.exports = async function collectStats(
runConfig = {},
statsConfig = {},
fromDiff = false
) {
const stats = {
[benchTitle]: {},
}
const orderedStats = {
[benchTitle]: {},
}
const curDir = fromDiff ? diffingDir : statsAppDir
const hasPagesToFetch =
Array.isArray(runConfig.pagesToFetch) && runConfig.pagesToFetch.length > 0
const hasPagesToBench =
Array.isArray(runConfig.pagesToBench) && runConfig.pagesToBench.length > 0
if (
!fromDiff &&
statsConfig.appStartCommand &&
(hasPagesToFetch || hasPagesToBench)
) {
const port = await getPort()
const child = spawn(statsConfig.appStartCommand, {
cwd: curDir,
env: {
PORT: port,
},
stdio: 'pipe',
})
let exitCode = null
let logStderr = true
child.stdout.on('data', (data) => process.stdout.write(data))
child.stderr.on('data', (data) => logStderr && process.stderr.write(data))
child.on('exit', (code) => {
exitCode = code
})
// give app a second to start up
await new Promise((resolve) => setTimeout(() => resolve(), 1500))
if (exitCode !== null) {
throw new Error(
`Failed to run \`${statsConfig.appStartCommand}\` process exited with code ${exitCode}`
)
}
if (hasPagesToFetch) {
const fetchedPagesDir = path.join(curDir, 'fetched-pages')
await fs.mkdirp(fetchedPagesDir)
for (let url of runConfig.pagesToFetch) {
url = url.replace('$PORT', port)
const { pathname } = urlParse(url)
try {
const res = await fetch(url)
if (!res.ok) {
throw new Error(`Failed to fetch ${url} got status: ${res.status}`)
}
const responseText = (await res.text()).trim()
let fileName = pathname === '/' ? '/index' : pathname
if (fileName.endsWith('/'))
fileName = fileName.substr(0, fileName.length - 1)
logger(
`Writing file to ${path.join(fetchedPagesDir, `${fileName}.html`)}`
)
await fs.writeFile(
path.join(fetchedPagesDir, `${fileName}.html`),
responseText,
'utf8'
)
} catch (err) {
logger.error(err)
}
}
}
if (hasPagesToBench) {
// disable stderr so we don't clobber logs while benchmarking
// any pages that create logs
logStderr = false
for (let url of runConfig.pagesToBench) {
url = url.replace('$PORT', port)
logger(`Benchmarking ${url}`)
const results = await benchmarkUrl(url, runConfig.benchOptions)
logger(`Finished benchmarking ${url}`)
const { pathname: key } = urlParse(url)
stats[benchTitle][`${key} failed reqs`] = results.failedRequests
stats[benchTitle][`${key} total time (seconds)`] = results.totalTime
stats[benchTitle][`${key} avg req/sec`] = results.avgReqPerSec
}
}
child.kill()
}
for (const fileGroup of runConfig.filesToTrack) {
const { name, globs } = fileGroup
const groupStats = {}
const curFiles = new Set()
for (const pattern of globs) {
const results = await glob(pattern, { cwd: curDir, nodir: true })
results.forEach((result) => curFiles.add(result))
}
for (const file of curFiles) {
const fileKey = path.basename(file)
const absPath = path.join(curDir, file)
try {
const fileInfo = await fs.stat(absPath)
groupStats[fileKey] = fileInfo.size
groupStats[`${fileKey} gzip`] = await gzipSize.file(absPath)
} catch (err) {
logger.error('Failed to get file stats', err)
}
}
stats[name] = groupStats
}
for (const fileGroup of runConfig.filesToTrack) {
const { name } = fileGroup
orderedStats[name] = stats[name]
}
if (stats[benchTitle]) {
orderedStats[benchTitle] = stats[benchTitle]
}
return orderedStats
}
const path = require('path')
const fs = require('fs-extra')
// getDirSize recursively gets size of all files in a directory
async function getDirSize(dir, ctx = { size: 0 }) {
let subDirs = await fs.readdir(dir)
subDirs = subDirs.map((d) => path.join(dir, d))
await Promise.all(
subDirs.map(async (curDir) => {
const fileStat = await fs.stat(curDir)
if (fileStat.isDirectory()) {
return getDirSize(curDir, ctx)
}
ctx.size += fileStat.size
})
)
return ctx.size
}
module.exports = getDirSize
const path = require('path')
const fs = require('fs-extra')
const glob = require('../util/glob')
const exec = require('../util/exec')
const logger = require('../util/logger')
const getDirSize = require('./get-dir-size')
const collectStats = require('./collect-stats')
const collectDiffs = require('./collect-diffs')
const { statsAppDir, diffRepoDir, mainRepoDir } = require('../constants')
async function runConfigs(
configs = [],
{ statsConfig, relativeStatsAppDir, mainRepoPkgPaths, diffRepoPkgPaths },
diffing = false
) {
const results = []
for (const config of configs) {
logger(`Running config: ${config.title}${diffing ? ' (diff)' : ''}`)
let mainRepoStats
let diffRepoStats
let diffs
for (const pkgPaths of [mainRepoPkgPaths, diffRepoPkgPaths]) {
let curStats = {
General: {
buildDuration: null,
nodeModulesSize: null,
},
}
// if stats-config is in root of project we're analyzing
// the whole project so copy from each repo
const curStatsAppPath =
relativeStatsAppDir === './'
? mainRepoStats
? diffRepoDir
: mainRepoDir
: path.join(diffRepoDir, relativeStatsAppDir)
// clean statsAppDir
await fs.remove(statsAppDir)
await fs.copy(curStatsAppPath, statsAppDir)
logger(`Copying ${curStatsAppPath} ${statsAppDir}`)
// apply config files
for (const configFile of config.configFiles || []) {
const filePath = path.join(statsAppDir, configFile.path)
await fs.writeFile(filePath, configFile.content, 'utf8')
}
// links local builds of the packages and installs dependencies
await linkPkgs(statsAppDir, pkgPaths)
if (!diffing) {
curStats.General.nodeModulesSize = await getDirSize(
path.join(statsAppDir, 'node_modules')
)
}
const buildStart = new Date().getTime()
await exec(`cd ${statsAppDir} && ${statsConfig.appBuildCommand}`)
curStats.General.buildDuration = new Date().getTime() - buildStart
// apply renames to get deterministic output names
for (const rename of config.renames) {
const results = await glob(rename.srcGlob, { cwd: statsAppDir })
if (results.length === 0 || results[0] === rename.dest) continue
await fs.move(
path.join(statsAppDir, results[0]),
path.join(statsAppDir, rename.dest)
)
}
const collectedStats = await collectStats(config, statsConfig)
curStats = {
...curStats,
...collectedStats,
}
const applyRenames = (renames, stats) => {
if (renames) {
for (const rename of renames) {
Object.keys(stats).forEach((group) => {
Object.keys(stats[group]).forEach((item) => {
let { cur, prev } = rename
cur = path.basename(cur)
prev = path.basename(prev)
if (cur === item) {
stats[group][prev] = stats[group][item]
stats[group][prev + ' gzip'] = stats[group][item + ' gzip']
delete stats[group][item]
delete stats[group][item + ' gzip']
}
})
})
}
}
}
if (mainRepoStats) {
diffRepoStats = curStats
if (!diffing && config.diff !== false) {
for (const groupKey of Object.keys(curStats)) {
if (groupKey === 'General') continue
let changeDetected = config.diff === 'always'
const curDiffs = await collectDiffs(config.filesToTrack)
changeDetected = changeDetected || Object.keys(curDiffs).length > 0
applyRenames(curDiffs._renames, diffRepoStats)
delete curDiffs._renames
if (changeDetected) {
logger('Detected change, running diff')
diffs = await runConfigs(
[
{
...config,
configFiles: config.diffConfigFiles,
},
],
{
statsConfig,
mainRepoPkgPaths,
diffRepoPkgPaths,
relativeStatsAppDir,
},
true
)
delete diffs._renames
break
}
}
}
if (diffing) {
// copy new files and get diff results
return collectDiffs(config.filesToTrack)
}
} else {
// set up diffing folder and copy initial files
await collectDiffs(config.filesToTrack, true)
/* eslint-disable-next-line */
mainRepoStats = curStats
}
}
logger(`Finished running: ${config.title}`)
results.push({
title: config.title,
mainRepoStats,
diffRepoStats,
diffs,
})
}
return results
}
async function linkPkgs(pkgDir = '', pkgPaths) {
await fs.remove(path.join(pkgDir, 'node_modules'))
const pkgJsonPath = path.join(pkgDir, 'package.json')
const pkgData = require(pkgJsonPath)
if (!pkgData.dependencies && !pkgData.devDependencies) return
for (const pkg of pkgPaths.keys()) {
const pkgPath = pkgPaths.get(pkg)
if (pkgData.dependencies && pkgData.dependencies[pkg]) {
pkgData.dependencies[pkg] = pkgPath
} else if (pkgData.devDependencies && pkgData.devDependencies[pkg]) {
pkgData.devDependencies[pkg] = pkgPath
}
}
await fs.writeFile(pkgJsonPath, JSON.stringify(pkgData, null, 2), 'utf8')
await exec(`cd ${pkgDir} && yarn install`)
}
module.exports = runConfigs
const logger = require('./logger')
const { promisify } = require('util')
const { exec: execOrig, spawn: spawnOrig } = require('child_process')
const execP = promisify(execOrig)
const env = {
...process.env,
GITHUB_TOKEN: '',
PR_STATS_COMMENT_TOKEN: '',
}
function exec(command, noLog = false) {
if (!noLog) logger(`exec: ${command}`)
return execP(command, { env, timeout: 180 * 1000 })
}
exec.spawn = function spawn(command = '', opts = {}) {
logger(`spawn: ${command}`)
const child = spawnOrig('/bin/bash', ['-c', command], {
...opts,
env: {
...env,
...opts.env,
},
stdio: opts.stdio || 'inherit',
})
child.on('exit', (code, signal) => {
logger(`spawn exit (${code}, ${signal}): ${command}`)
})
return child
}
module.exports = exec
const globOrig = require('glob')
const { promisify } = require('util')
module.exports = promisify(globOrig)
function logger(...args) {
console.log(...args)
}
logger.json = (obj) => {
logger('\n', JSON.stringify(obj, null, 2), '\n')
}
logger.error = (...args) => {
console.error(...args)
}
logger.warn = (...args) => {
console.warn(...args)
}
module.exports = logger
......@@ -9,4 +9,5 @@ jobs:
name: PR Stats
runs-on: ubuntu-latest
steps:
- uses: zeit/next-stats-action@master
- uses: actions/checkout@v2
- uses: ./.github/actions/next-stats-action
......@@ -7,6 +7,7 @@ jobs:
name: Release Stats
runs-on: ubuntu-latest
steps:
- uses: zeit/next-stats-action@master
- uses: actions/checkout@v2
- uses: ./.github/actions/next-stats-action
env:
PR_STATS_COMMENT_TOKEN: ${{ secrets.PR_STATS_COMMENT_TOKEN }}
......@@ -29,3 +29,4 @@ test/**/next-env.d.ts
examples/**/out
examples/**/.env*.local
pr-stats.md
......@@ -8,3 +8,4 @@ packages/react-refresh-utils/**/*.d.ts
packages/react-dev-overlay/lib/**
**/__tmp__/**
lerna.json
.github/actions/next-stats-action/.work
\ No newline at end of file
......@@ -14,6 +14,7 @@
"testfirefox": "cross-env BROWSER_NAME=firefox yarn testonly",
"testie": "cross-env BROWSER_NAME=\"internet explorer\" yarn testonly",
"testall": "yarn run testonly -- --ci --forceExit",
"genstats": "cross-env LOCAL_STATS=true node .github/actions/next-stats-action/src/index.js",
"pretest": "yarn run lint",
"git-reset": "git reset --hard HEAD",
"git-clean": "git clean -d -x -e node_modules -e packages -f",
......@@ -79,6 +80,8 @@
"firebase": "6.3.4",
"fs-extra": "9.0.0",
"get-port": "5.1.1",
"glob": "7.1.6",
"gzip-size": "5.1.1",
"isomorphic-unfetch": "3.0.0",
"jest-circus": "26.0.1",
"jest-cli": "24.9.0",
......@@ -87,6 +90,7 @@
"lerna": "3.14.1",
"lint-staged": "10.1.7",
"lost": "8.3.1",
"minimatch": "3.0.4",
"moment": "^2.24.0",
"node-fetch": "2.6.0",
"node-notifier": "5.4.0",
......@@ -100,6 +104,8 @@
"postcss-trolling": "0.1.7",
"pre-commit": "1.2.2",
"prettier": "2.0.5",
"pretty-bytes": "5.3.0",
"pretty-ms": "7.0.0",
"react": "16.12.0",
"react-dom": "16.12.0",
"react-ssr-prepass": "1.0.8",
......
......@@ -7418,9 +7418,10 @@ glob-to-regexp@^0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e"
glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.1.1:
glob@7.1.6, glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.1.1:
version "7.1.6"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6"
integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
......@@ -7574,6 +7575,7 @@ grpc@1.22.2:
gzip-size@5.1.1, gzip-size@^5.0.0:
version "5.1.1"
resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-5.1.1.tgz#cb9bee692f87c0612b232840a873904e4c135274"
integrity sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA==
dependencies:
duplexer "^0.1.1"
pify "^4.0.1"
......@@ -10462,9 +10464,10 @@ minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a"
minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4, minimatch@~3.0.2:
minimatch@3.0.4, minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.4, minimatch@~3.0.2:
version "3.0.4"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
dependencies:
brace-expansion "^1.1.7"
......@@ -11578,6 +11581,11 @@ parse-json@^5.0.0:
json-parse-better-errors "^1.0.1"
lines-and-columns "^1.1.6"
parse-ms@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-2.1.0.tgz#348565a753d4391fa524029956b172cb7753097d"
integrity sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==
parse-passwd@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6"
......@@ -12778,16 +12786,17 @@ prettier@2.0.5:
version "2.0.5"
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.0.5.tgz#d6d56282455243f2f92cc1716692c08aa31522d4"
pretty-bytes@5.3.0, pretty-bytes@^5.1.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.3.0.tgz#f2849e27db79fb4d6cfe24764fc4134f165989f2"
integrity sha512-hjGrh+P926p4R4WbaB6OckyRtO0F0/lQBiT+0gnxjV+5kjPBrfVBFCsCLbMqVQeydvIoouYTCmmEURiH3R1Bdg==
pretty-bytes@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-3.0.1.tgz#27d0008d778063a0b4811bb35c79f1bd5d5fbccf"
dependencies:
number-is-nan "^1.0.0"
pretty-bytes@^5.1.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.3.0.tgz#f2849e27db79fb4d6cfe24764fc4134f165989f2"
pretty-format@^24.9.0:
version "24.9.0"
resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-24.9.0.tgz#12fac31b37019a4eea3c11aa9a959eb7628aa7c9"
......@@ -12810,6 +12819,13 @@ pretty-hrtime@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz#b7e3ea42435a4c9b2759d99e0f201eb195802ee1"
pretty-ms@7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/pretty-ms/-/pretty-ms-7.0.0.tgz#45781273110caf35f55cab21a8a9bd403a233dc0"
integrity sha512-J3aPWiC5e9ZeZFuSeBraGxSkGMOvulSWsxDByOcbD1Pr75YL3LSNIKIb52WXbCLE1sS5s4inBBbryjF4Y05Ceg==
dependencies:
parse-ms "^2.1.0"
private@^0.1.8:
version "0.1.8"
resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册