Skip to content

Commit

Permalink
fix: cleanup (#1457)
Browse files Browse the repository at this point in the history
  • Loading branch information
dnalborczyk authored May 30, 2022
1 parent 5faa4d1 commit 4e29466
Show file tree
Hide file tree
Showing 6 changed files with 94 additions and 82 deletions.
14 changes: 7 additions & 7 deletions src/events/http/lambda-events/LambdaProxyIntegrationEventV2.js
Original file line number Diff line number Diff line change
Expand Up @@ -159,15 +159,16 @@ export default class LambdaProxyIntegrationEventV2 {
})

return {
version: '2.0',
routeKey: this.#routeKey,
rawPath: this.#request.url.pathname,
rawQueryString: this.#request.url.searchParams.toString(),
body,
cookies,
headers,
isBase64Encoded: false,
pathParameters: nullIfEmpty(pathParams),
queryStringParameters: this.#request.url.search
? fromEntries(Array.from(this.#request.url.searchParams))
: null,
rawPath: this.#request.url.pathname,
rawQueryString: this.#request.url.searchParams.toString(),
requestContext: {
accountId: 'offlineContext_accountId',
apiId: 'offlineContext_apiId',
Expand Down Expand Up @@ -195,10 +196,9 @@ export default class LambdaProxyIntegrationEventV2 {
time: requestTime,
timeEpoch: requestTimeEpoch,
},
body,
pathParameters: nullIfEmpty(pathParams),
isBase64Encoded: false,
routeKey: this.#routeKey,
stageVariables: this.#stageVariables,
version: '2.0',
}
}
}
3 changes: 2 additions & 1 deletion src/lambda/handler-runner/docker-runner/DockerContainer.js
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,8 @@ export default class DockerContainer {
return
}

const fileStream = createWriteStream(`${layerZipFile}`)
const fileStream = createWriteStream(layerZipFile)

await new Promise((resolve, reject) => {
res.body.pipe(fileStream)
res.body.on('error', (err) => {
Expand Down
73 changes: 4 additions & 69 deletions src/lambda/handler-runner/in-process-runner/InProcessRunner.js
Original file line number Diff line number Diff line change
@@ -1,92 +1,27 @@
import { readdirSync } from 'node:fs'
import { createRequire } from 'node:module'
import { dirname, resolve } from 'node:path'
import { performance } from 'node:perf_hooks'
import process from 'node:process'
import clearModule from './clearModule.js'

const { assign, keys } = Object
const { assign } = Object

const require = createRequire(import.meta.url)

function clearModule(fP, opts) {
const options = opts ?? {}
let filePath = fP
if (!require.cache[filePath]) {
const dirName = dirname(filePath)
for (const fn of readdirSync(dirName)) {
const fullPath = resolve(dirName, fn)
if (
fullPath.substr(0, filePath.length + 1) === `${filePath}.` &&
require.cache[fullPath]
) {
filePath = fullPath
break
}
}
}
if (require.cache[filePath]) {
// Remove file from parent cache
if (require.cache[filePath].parent) {
let i = require.cache[filePath].parent.children.length
if (i) {
do {
i -= 1
if (require.cache[filePath].parent.children[i].id === filePath) {
require.cache[filePath].parent.children.splice(i, 1)
}
} while (i)
}
}
const cld = require.cache[filePath].children
delete require.cache[filePath]
for (const c of cld) {
// Unload any non node_modules and non-binary children
if (
!c.filename.match(/\/node_modules\//i) &&
!c.filename.match(/\.node$/i)
) {
clearModule(c.id, { ...options, cleanup: false })
}
}
if (opts.cleanup) {
// Cleanup any node_modules that are orphans
let cleanup = false
do {
cleanup = false
for (const fn of keys(require.cache)) {
if (
require.cache[fn] &&
require.cache[fn].id !== '.' &&
require.cache[fn].parent &&
require.cache[fn].parent.id !== '.' &&
!require.cache[require.cache[fn].parent.id] &&
!fn.match(/\/node_modules\//i) &&
!fn.match(/\.node$/i)
) {
delete require.cache[fn]
cleanup = true
}
}
} while (cleanup)
}
}
}

export default class InProcessRunner {
#allowCache = false
#env = null
#functionKey = null
#handlerName = null
#handlerPath = null
#timeout = null
#allowCache = false

constructor(functionKey, handlerPath, handlerName, env, timeout, allowCache) {
this.#allowCache = allowCache
this.#env = env
this.#functionKey = functionKey
this.#handlerName = handlerName
this.#handlerPath = handlerPath
this.#timeout = timeout
this.#allowCache = allowCache
}

// no-op
Expand Down
71 changes: 71 additions & 0 deletions src/lambda/handler-runner/in-process-runner/clearModule.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { readdirSync } from 'node:fs'
import { createRequire } from 'node:module'
import { dirname, resolve } from 'node:path'

const { keys } = Object

const require = createRequire(import.meta.url)

export default function clearModule(fP, opts) {
const options = opts ?? {}
let filePath = fP
if (!require.cache[filePath]) {
const dirName = dirname(filePath)
for (const fn of readdirSync(dirName)) {
const fullPath = resolve(dirName, fn)
if (
fullPath.substr(0, filePath.length + 1) === `${filePath}.` &&
require.cache[fullPath]
) {
filePath = fullPath
break
}
}
}
if (require.cache[filePath]) {
// Remove file from parent cache
if (require.cache[filePath].parent) {
let i = require.cache[filePath].parent.children.length
if (i) {
do {
i -= 1
if (require.cache[filePath].parent.children[i].id === filePath) {
require.cache[filePath].parent.children.splice(i, 1)
}
} while (i)
}
}
const cld = require.cache[filePath].children
delete require.cache[filePath]
for (const c of cld) {
// Unload any non node_modules and non-binary children
if (
!c.filename.match(/\/node_modules\//i) &&
!c.filename.match(/\.node$/i)
) {
clearModule(c.id, { ...options, cleanup: false })
}
}
if (opts.cleanup) {
// Cleanup any node_modules that are orphans
let cleanup = false
do {
cleanup = false
for (const fn of keys(require.cache)) {
if (
require.cache[fn] &&
require.cache[fn].id !== '.' &&
require.cache[fn].parent &&
require.cache[fn].parent.id !== '.' &&
!require.cache[require.cache[fn].parent.id] &&
!fn.match(/\/node_modules\//i) &&
!fn.match(/\.node$/i)
) {
delete require.cache[fn]
cleanup = true
}
}
} while (cleanup)
}
}
}
12 changes: 7 additions & 5 deletions tests/integration/_testHelpers/compressArtifact.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
import { createWriteStream } from 'node:fs'
import { stat } from 'node:fs/promises'
import { dirname, resolve } from 'node:path'
import archiver from 'archiver'
import fs from 'fs-extra'
import { ensureDir } from 'fs-extra'

export default async function compressArtifact(baseDir, dest, src = []) {
const destPath = resolve(baseDir, dest)
await fs.ensureDir(dirname(destPath))
await ensureDir(dirname(destPath))

return new Promise((_resolve, reject) => {
const output = fs.createWriteStream(destPath)
const output = createWriteStream(destPath)
const archive = archiver('zip', {
zlib: { level: 9 },
})
Expand All @@ -18,7 +20,7 @@ export default async function compressArtifact(baseDir, dest, src = []) {
await Promise.all(
src.map(async (filename) => {
const filepath = resolve(baseDir, filename)
const stats = await fs.stat(filepath)
const stats = await stat(filepath)
if (stats.isDirectory()) {
archive.directory(filepath, filename)
return
Expand All @@ -27,7 +29,7 @@ export default async function compressArtifact(baseDir, dest, src = []) {
}),
)

archive.finalize()
await archive.finalize()
})

archive.on('error', (err) => reject(err))
Expand Down
3 changes: 3 additions & 0 deletions tests/integration/_testHelpers/setupTeardown.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ export async function setup(options) {

await new Promise((res, reject) => {
let stdData = ''

serverlessProcess.on('close', (code) => {
if (code) {
console.error(`Output: ${stdData}`)
Expand All @@ -35,13 +36,15 @@ export async function setup(options) {
reject(new Error('serverless offline ended prematurely'))
}
})

serverlessProcess.stderr.on('data', (data) => {
if (shouldPrintOfflineOutput) process._rawDebug(String(data))
stdData += data
if (String(data).includes('Server ready:')) {
res()
}
})

serverlessProcess.stdout.on('data', (data) => {
if (shouldPrintOfflineOutput) process._rawDebug(String(data))
stdData += data
Expand Down

0 comments on commit 4e29466

Please sign in to comment.