Skip to content

Commit 1752315

Browse files
authored
Branch was updated using the 'autoupdate branch' Actions workflow.
2 parents 6d40923 + 72140bb commit 1752315

3 files changed

Lines changed: 120 additions & 1 deletion

File tree

Procfile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,3 @@
11
web: NODE_ENV=production node server.js
2+
3+
release: NODE_ENV=production node script/purge-redis-pages.js

data/release-notes/2-22/0.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ sections:
55
- heading: GitHub Actions Beta
66
notes:
77
- |
8-
[GitHub Actions](https://github.com/features/actions) is a powerful, flexible solution for CI/CD and workflow automation. GitHub Actions on Enteprise Server includes tools to help you manage the service, including key metrics in the Management Console, audit logs and access controls to help you control the roll out.
8+
[GitHub Actions](https://github.com/features/actions) is a powerful, flexible solution for CI/CD and workflow automation. GitHub Actions on Enterprise Server includes tools to help you manage the service, including key metrics in the Management Console, audit logs and access controls to help you control the roll out.
99
1010
You will need to provide your own [storage](https://docs.github.com/en/enterprise/2.22/admin/github-actions/enabling-github-actions-and-configuring-storage) and runners for GitHub Actions. AWS S3, Azure Blob Storage and MinIO are supported. Please review the [updated minimum requirements for your platform](https://docs.github.com/en/enterprise/2.22/admin/installation/setting-up-a-github-enterprise-server-instance) before you turn on GitHub Actions. To learn more, contact the GitHub Sales team or [sign up for the beta](https://resources.github.com/beta-signup/). {% comment %} https://github.com/github/releases/issues/775 {% endcomment %}
1111

script/purge-redis-pages.js

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
#!/usr/bin/env node
2+
3+
// [start-readme]
4+
//
5+
// Run this script to manually purge the Redis rendered page cache.
6+
// This will typically only be run by Heroku during the deployment process,
7+
// as triggered via our Procfile's "release" phase configuration.
8+
//
9+
// [end-readme]
10+
11+
const Redis = require('ioredis')
12+
13+
const { REDIS_URL, HEROKU_RELEASE_VERSION, HEROKU_PRODUCTION_APP } = process.env
14+
const isHerokuProd = HEROKU_PRODUCTION_APP === 'true'
15+
const pageCacheDatabaseNumber = 1
16+
const keyScanningPattern = HEROKU_RELEASE_VERSION ? '*:rp:*' : 'rp:*'
17+
const scanSetSize = 250
18+
19+
const startTime = Date.now()
20+
const expirationDuration = 30 * 60 * 1000 // 30 minutes
21+
const expirationTimestamp = startTime + expirationDuration // 30 minutes from now
22+
23+
// print keys to be purged without actually purging
24+
const dryRun = ['-d', '--dry-run'].includes(process.argv[2])
25+
26+
// verify environment variables
27+
if (!REDIS_URL) {
28+
if (isHerokuProd) {
29+
console.error('Error: you must specify the REDIS_URL environment variable.\n')
30+
process.exit(1)
31+
} else {
32+
console.warn('Warning: you did not specify a REDIS_URL environment variable. Exiting...\n')
33+
process.exit(0)
34+
}
35+
}
36+
37+
console.log({
38+
HEROKU_RELEASE_VERSION,
39+
HEROKU_PRODUCTION_APP
40+
})
41+
42+
purgeRenderedPageCache()
43+
44+
function purgeRenderedPageCache () {
45+
const redisClient = new Redis(REDIS_URL, { db: pageCacheDatabaseNumber })
46+
let totalKeyCount = 0
47+
let iteration = 0
48+
49+
// Create a readable stream (object mode) for the SCAN cursor
50+
const scanStream = redisClient.scanStream({
51+
match: keyScanningPattern,
52+
count: scanSetSize
53+
})
54+
55+
scanStream.on('end', function () {
56+
console.log(`Done purging keys; affected total: ${totalKeyCount}`)
57+
console.log(`Time elapsed: ${Date.now() - startTime} ms`)
58+
59+
// This seems to be unexpectedly necessary
60+
process.exit(0)
61+
})
62+
63+
scanStream.on('error', function (error) {
64+
console.error('An unexpected error occurred!\n' + error.stack)
65+
console.error('\nAborting...')
66+
process.exit(1)
67+
})
68+
69+
scanStream.on('data', async function (keys) {
70+
console.log(`[Iteration ${iteration++}] Received ${keys.length} keys...`)
71+
72+
// NOTE: It is possible for a SCAN cursor iteration to return 0 keys when
73+
// using a MATCH because it is applied after the elements are retrieved
74+
if (keys.length === 0) return
75+
76+
if (dryRun) {
77+
console.log(`DRY RUN! This iteration might have set TTL for up to ${keys.length} keys:\n - ${keys.join('\n - ')}`)
78+
return
79+
}
80+
81+
// Pause the SCAN stream while we set a TTL on these keys
82+
scanStream.pause()
83+
84+
// Find existing TTLs to ensure we aren't extending the TTL if it's already set
85+
// PTTL mykey // only operate on -1 result values or those greater than ONE_HOUR_FROM_NOW
86+
const pttlPipeline = redisClient.pipeline()
87+
keys.forEach(key => pttlPipeline.pttl(key))
88+
const pttlResults = await pttlPipeline.exec()
89+
90+
// Update pertinent keys to have TTLs set
91+
let updatingKeyCount = 0
92+
const pexpireAtPipeline = redisClient.pipeline()
93+
keys.forEach((key, i) => {
94+
const [error, pttl] = pttlResults[i]
95+
const needsShortenedTtl = error == null && (pttl === -1 || pttl > expirationDuration)
96+
const isOldKey = !HEROKU_RELEASE_VERSION || !key.startsWith(`${HEROKU_RELEASE_VERSION}:`)
97+
98+
if (needsShortenedTtl && isOldKey) {
99+
pexpireAtPipeline.pexpireat(key, expirationTimestamp)
100+
updatingKeyCount += 1
101+
}
102+
})
103+
104+
// Only update TTLs if there are records worth updating
105+
if (updatingKeyCount > 0) {
106+
// Set all the TTLs
107+
const pexpireAtResults = await pexpireAtPipeline.exec()
108+
const updatedResults = pexpireAtResults.filter(([error, result]) => error == null && result === 1)
109+
110+
// Count only the entries whose TTLs were successfully updated
111+
totalKeyCount += updatedResults.length
112+
}
113+
114+
// Resume the SCAN stream
115+
scanStream.resume()
116+
})
117+
}

0 commit comments

Comments
 (0)