Skip to content

Commit

Permalink
Merge branch 'canary' into fix/antd
Browse files Browse the repository at this point in the history
  • Loading branch information
shuding authored Nov 13, 2023
2 parents 7bc4f18 + a01beaf commit c4bc5b2
Show file tree
Hide file tree
Showing 493 changed files with 361,545 additions and 10,846 deletions.
2 changes: 1 addition & 1 deletion .github/actions/next-stats-action/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ if (!allowedActions.has(actionInfo.actionName) && !actionInfo.isRelease) {
const isMainRepo = dir === mainRepoDir
const pkgPaths = await linkPackages({
repoDir: dir,
nextSwcVersion: isMainRepo ? mainNextSwcVersion : undefined,
nextSwcVersion: isMainRepo ? mainNextSwcVersion : null,
})

if (isMainRepo) mainRepoPkgPaths = pkgPaths
Expand Down
148 changes: 85 additions & 63 deletions .github/actions/next-stats-action/src/prepare/repo-setup.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
const path = require('path')
const fs = require('fs/promises')
const fs = require('fs')
const { existsSync } = require('fs')
const exec = require('../util/exec')
const logger = require('../util/logger')
Expand All @@ -8,7 +8,7 @@ const execa = require('execa')
module.exports = (actionInfo) => {
return {
async cloneRepo(repoPath = '', dest = '', branch = '', depth = '20') {
await fs.rm(dest, { recursive: true, force: true })
await fs.promises.rm(dest, { recursive: true, force: true })
await exec(
`git clone ${actionInfo.gitRoot}${repoPath} --single-branch --branch ${branch} --depth=${depth} ${dest}`
)
Expand Down Expand Up @@ -52,25 +52,23 @@ module.exports = (actionInfo) => {
}
}
},
/**
* Runs `pnpm pack` on each package in the `packages` folder of the provided `repoDir`
* @param {{ repoDir: string, nextSwcVersion: null | string }} options Required options
* @returns {Promise<Map<string, string>>} List packages key is the package name, value is the path to the packed tar file.'
*/
async linkPackages({ repoDir, nextSwcVersion }) {
/** @type {Map<string, string>} */
const pkgPaths = new Map()

/**
* @typedef {Object} PkgData
* @property {string} pkgDataPath Where the package.json file is located
* @property {string} pkg The folder name of the package
* @property {string} pkgPath The path to the package folder
* @property {any} pkgData The content of package.json
* @property {string} packedPkgPath The npm pack output .tgz file path
*/

/** @type {Map<string, PkgData>} */
/** @type {Map<string, { packageJsonPath: string, packagePath: string, packageJson: any, packedPackageTarPath: string }>} */
const pkgDatas = new Map()

let pkgs
let packageFolders

try {
pkgs = await fs.readdir(path.join(repoDir, 'packages'))
packageFolders = await fs.promises.readdir(
path.join(repoDir, 'packages')
)
} catch (err) {
if (err.code === 'ENOENT') {
require('console').log('no packages to link')
Expand All @@ -79,50 +77,60 @@ module.exports = (actionInfo) => {
throw err
}

await Promise.all(
pkgs.map(async (pkg) => {
const pkgPath = path.join(repoDir, 'packages', pkg)
const packedPkgPath = path.join(pkgPath, `${pkg}-packed.tgz`)

const pkgDataPath = path.join(pkgPath, 'package.json')
if (existsSync(pkgDataPath)) {
const pkgData = JSON.parse(await fs.readFile(pkgDataPath))
const { name } = pkgData

pkgDatas.set(name, {
pkgDataPath,
pkg,
pkgPath,
pkgData,
packedPkgPath,
})
pkgPaths.set(name, packedPkgPath)
} else {
require('console').log(`Skipping ${pkgDataPath}`)
}
for (const packageFolder of packageFolders) {
const packagePath = path.join(repoDir, 'packages', packageFolder)
const packedPackageTarPath = path.join(
packagePath,
`${packageFolder}-packed.tgz`
)
const packageJsonPath = path.join(packagePath, 'package.json')

if (!existsSync(packageJsonPath)) {
require('console').log(`Skipping ${packageFolder}, no package.json`)
continue
}

const packageJson = JSON.parse(fs.readFileSync(packageJsonPath))
const { name: packageName } = packageJson

pkgDatas.set(packageName, {
packageJsonPath,
packagePath,
packageJson,
packedPackageTarPath,
})
)
pkgPaths.set(packageName, packedPackageTarPath)
}

for (const [
pkg,
{ pkgDataPath, pkgData, pkgPath },
packageName,
{ packageJsonPath, packagePath, packageJson },
] of pkgDatas.entries()) {
// update the current package dependencies to point to packed tgz path
for (const [pkg, { packedPkgPath }] of pkgDatas.entries()) {
if (!pkgData.dependencies || !pkgData.dependencies[pkg]) continue
pkgData.dependencies[pkg] = packedPkgPath
// This loops through all items to get the packagedPkgPath of each item and add it to pkgData.dependencies
for (const [
packageName,
{ packedPackageTarPath },
] of pkgDatas.entries()) {
if (
!packageJson.dependencies ||
!packageJson.dependencies[packageName]
)
continue
// Edit the pkgData of the current item to point to the packed tgz
packageJson.dependencies[packageName] = packedPackageTarPath
}

// make sure native binaries are included in local linking
if (pkg === '@next/swc') {
pkgData.files ||= []
if (packageName === '@next/swc') {
packageJson.files ||= []

pkgData.files.push('native')
packageJson.files.push('native')

try {
const swcBinariesDirContents = await fs.readdir(
path.join(pkgPath, 'native')
)
const swcBinariesDirContents = (
await fs.promises.readdir(path.join(packagePath, 'native'))
).filter((file) => file !== '.gitignore' && file !== 'index.d.ts')

require('console').log(
'using swc binaries: ',
swcBinariesDirContents.join(', ')
Expand All @@ -133,29 +141,28 @@ module.exports = (actionInfo) => {
}
throw err
}
} else if (pkg === 'next') {
} else if (packageName === 'next') {
const nextSwcPkg = pkgDatas.get('@next/swc')

console.log('using swc dep', {
nextSwcVersion,
nextSwcPkg,
})
if (nextSwcVersion) {
Object.assign(pkgData.dependencies, {
Object.assign(packageJson.dependencies, {
'@next/swc-linux-x64-gnu': nextSwcVersion,
})
} else {
if (nextSwcPkg) {
pkgData.dependencies['@next/swc'] = nextSwcPkg.packedPkgPath
} else {
pkgData.files.push('native')
packageJson.dependencies['@next/swc'] =
nextSwcPkg.packedPackageTarPath
}
}
}

await fs.writeFile(
pkgDataPath,
JSON.stringify(pkgData, null, 2),
await fs.promises.writeFile(
packageJsonPath,
JSON.stringify(packageJson, null, 2),
'utf8'
)
}
Expand All @@ -164,11 +171,14 @@ module.exports = (actionInfo) => {
// to the correct versions
await Promise.all(
Array.from(pkgDatas.entries()).map(
async ([pkgName, { pkgPath, packedPkgPath }]) => {
async ([
packageName,
{ packagePath: pkgPath, packedPackageTarPath: packedPkgPath },
]) => {
/** @type {null | () => Promise<void>} */
let cleanup = null

if (pkgName === '@next/swc') {
if (packageName === '@next/swc') {
// next-swc uses a gitignore to prevent the committing of native builds but it doesn't
// use files in package.json because it publishes to individual packages based on architecture.
// When we used yarn to pack these packages the gitignore was ignored so the native builds were packed
Expand All @@ -184,9 +194,15 @@ module.exports = (actionInfo) => {
'disabled-native-gitignore'
)

await fs.rename(nativeGitignorePath, renamedGitignorePath)
await fs.promises.rename(
nativeGitignorePath,
renamedGitignorePath
)
cleanup = async () => {
await fs.rename(renamedGitignorePath, nativeGitignorePath)
await fs.promises.rename(
renamedGitignorePath,
nativeGitignorePath
)
}
}

Expand All @@ -198,13 +214,19 @@ module.exports = (actionInfo) => {
},
})

return Promise.all([
fs.rename(path.resolve(pkgPath, stdout.trim()), packedPkgPath),
const packedFileName = stdout.trim()

await Promise.all([
fs.promises.rename(
path.join(pkgPath, packedFileName),
packedPkgPath
),
cleanup?.(),
])
}
)
)

return pkgPaths
},
}
Expand Down
5 changes: 4 additions & 1 deletion .github/actions/next-stats-action/src/run/collect-diffs.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,9 @@ module.exports = async function collectDiffs(
})
)

logger('Tracking the following files:')
logger(curFiles)

for (let file of curFiles) {
const absPath = path.join(statsAppDir, file)

Expand All @@ -53,7 +56,7 @@ module.exports = async function collectDiffs(
)
await exec(
`cd "${process.env.LOCAL_STATS ? process.cwd() : diffingDir}" && ` +
`${prettierPath} --write ${curFiles
`${prettierPath} --write --no-error-on-unmatched-pattern ${curFiles
.map((f) => path.join(diffingDir, f))
.join(' ')}`
)
Expand Down
2 changes: 1 addition & 1 deletion .github/labeler.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
{ "type": "user", "pattern": "s3prototype" },
{ "type": "user", "pattern": "manovotny" }
],
"created-by: web-tooling team": [
"created-by: turbopack team": [
{ "type": "user", "pattern": "ForsakenHarmony" },
{ "type": "user", "pattern": "jridgewell" },
{ "type": "user", "pattern": "kdy1" },
Expand Down
10 changes: 8 additions & 2 deletions .github/workflows/update-turbopack-test-manifest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ name: Update Turbopack test manifest

on:
schedule:
# Every hour
- cron: '0 * * * *'
# Every day at 9AM https://crontab.guru/#0_9_*_*_*
- cron: '0 9 * * *'
workflow_dispatch:

jobs:
Expand All @@ -25,6 +25,12 @@ jobs:
node-version: ${{ env.NODE_LTS_VERSION }}
check-latest: true

- run: corepack enable

- name: Install dependencies
shell: bash
run: pnpm i

- name: Create Pull Request
shell: bash
run: node scripts/automated-update-workflow.js
Expand Down
Loading

0 comments on commit c4bc5b2

Please sign in to comment.