Skip to content

Commit 1f89a0e

Browse files
authored
Branch was updated using the 'autoupdate branch' Actions workflow.
2 parents b3a5a59 + bca0682 commit 1f89a0e

4 files changed

Lines changed: 164 additions & 0 deletions

File tree

.github/allowed-actions.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ module.exports = [
1212
'actions/setup-ruby@5f29a1cd8dfebf420691c4c9a0e832e2fae5a526', //actions/setup-ruby@v1.1.2
1313
'actions/stale@af4072615903a8b031f986d25b1ae3bf45ec44d4', //actions/stale@v3.0.13
1414
'crowdin/github-action@fd9429dd63d6c0f8a8cb4b93ad8076990bd6e688',
15+
'crykn/copy_folder_to_another_repo_action@abc264e1c16eb3d7b1f7763bfdb0e1699ad43120',
1516
'dawidd6/action-delete-branch@47743101a121ad657031e6704086271ca81b1911',
1617
'docker://chinthakagodawita/autoupdate-action:v1',
1718
'fkirc/skip-duplicate-actions@36feb0d8d062137530c2e00bd278d138fe191289',
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
name: site-policy-sync
2+
3+
# Controls when the action will run.
4+
on:
5+
# Triggers the workflow pull requests merged to the main branch
6+
pull_request:
7+
branches:
8+
- main
9+
types:
10+
- closed
11+
12+
# Allows you to run this workflow manually from the Actions tab
13+
workflow_dispatch:
14+
15+
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
16+
jobs:
17+
# This workflow contains a single job called "build"
18+
copy-file:
19+
# The type of runner that the job will run on
20+
runs-on: ubuntu-latest
21+
22+
# Steps represent a sequence of tasks that will be executed as part of the job
23+
steps:
24+
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
25+
- uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
26+
27+
# Pushes to other repo
28+
- name: Push folder to another repository
29+
uses: crykn/copy_folder_to_another_repo_action@abc264e1c16eb3d7b1f7763bfdb0e1699ad43120
30+
env:
31+
API_TOKEN_GITHUB: ${{ secrets.API_TOKEN_SITEPOLICY }}
32+
with:
33+
source_folder: 'content/github/site-policy'
34+
destination_repo: 'github/site-policy'
35+
destination_branch: 'non-substantive-changes'
36+
destination_folder: 'Policies'
37+
user_email: 'pcihon@users.noreply.github.com'
38+
user_name: 'pcihon'
39+
commit_msg: 'Mirroring non-substantive changes.'

Procfile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,3 @@
11
web: NODE_ENV=production node server.js
2+
3+
release: NODE_ENV=production node script/purge-redis-pages.js

script/purge-redis-pages.js

Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
#!/usr/bin/env node
2+
3+
// [start-readme]
4+
//
5+
// Run this script to manually purge the Redis rendered page cache.
6+
// This will typically only be run by Heroku during the deployment process,
7+
// as triggered via our Procfile's "release" phase configuration.
8+
//
9+
// [end-readme]
10+
11+
const program = require('commander')
12+
const Redis = require('ioredis')
13+
14+
const { REDIS_URL, HEROKU_RELEASE_VERSION, HEROKU_PRODUCTION_APP } = process.env
15+
const isHerokuProd = HEROKU_PRODUCTION_APP === 'true'
16+
const pageCacheDatabaseNumber = 1
17+
const keyScanningPattern = HEROKU_RELEASE_VERSION ? '*:rp:*' : 'rp:*'
18+
const scanSetSize = 250
19+
20+
const startTime = Date.now()
21+
const expirationDuration = 30 * 60 * 1000 // 30 minutes
22+
const expirationTimestamp = startTime + expirationDuration // 30 minutes from now
23+
24+
program
25+
.description('Purge the Redis rendered page cache')
26+
.option('-d, --dry-run', 'print keys to be purged without actually purging')
27+
.parse(process.argv)
28+
29+
const dryRun = program.dryRun
30+
31+
// verify environment variables
32+
if (!REDIS_URL) {
33+
if (isHerokuProd) {
34+
console.error('Error: you must specify the REDIS_URL environment variable.\n')
35+
process.exit(1)
36+
} else {
37+
console.warn('Warning: you did not specify a REDIS_URL environment variable. Exiting...\n')
38+
process.exit(0)
39+
}
40+
}
41+
42+
console.log({
43+
HEROKU_RELEASE_VERSION,
44+
HEROKU_PRODUCTION_APP
45+
})
46+
47+
purgeRenderedPageCache()
48+
49+
function purgeRenderedPageCache () {
50+
const redisClient = new Redis(REDIS_URL, { db: pageCacheDatabaseNumber })
51+
let totalKeyCount = 0
52+
let iteration = 0
53+
54+
// Create a readable stream (object mode) for the SCAN cursor
55+
const scanStream = redisClient.scanStream({
56+
match: keyScanningPattern,
57+
count: scanSetSize
58+
})
59+
60+
scanStream.on('end', function () {
61+
console.log(`Done purging keys; affected total: ${totalKeyCount}`)
62+
console.log(`Time elapsed: ${Date.now() - startTime} ms`)
63+
64+
// This seems to be unexpectedly necessary
65+
process.exit(0)
66+
})
67+
68+
scanStream.on('error', function (error) {
69+
console.error('An unexpected error occurred!\n' + error.stack)
70+
console.error('\nAborting...')
71+
process.exit(1)
72+
})
73+
74+
scanStream.on('data', async function (keys) {
75+
console.log(`[Iteration ${iteration++}] Received ${keys.length} keys...`)
76+
77+
// NOTE: It is possible for a SCAN cursor iteration to return 0 keys when
78+
// using a MATCH because it is applied after the elements are retrieved
79+
if (keys.length === 0) return
80+
81+
if (dryRun) {
82+
console.log(`DRY RUN! This iteration might have set TTL for up to ${keys.length} keys:\n - ${keys.join('\n - ')}`)
83+
return
84+
}
85+
86+
// Pause the SCAN stream while we set a TTL on these keys
87+
scanStream.pause()
88+
89+
// Find existing TTLs to ensure we aren't extending the TTL if it's already set
90+
// PTTL mykey // only operate on -1 result values or those greater than ONE_HOUR_FROM_NOW
91+
const pttlPipeline = redisClient.pipeline()
92+
keys.forEach(key => pttlPipeline.pttl(key))
93+
const pttlResults = await pttlPipeline.exec()
94+
95+
// Update pertinent keys to have TTLs set
96+
let updatingKeyCount = 0
97+
const pexpireAtPipeline = redisClient.pipeline()
98+
keys.forEach((key, i) => {
99+
const [error, pttl] = pttlResults[i]
100+
const needsShortenedTtl = error == null && (pttl === -1 || pttl > expirationDuration)
101+
const isOldKey = !HEROKU_RELEASE_VERSION || !key.startsWith(`${HEROKU_RELEASE_VERSION}:`)
102+
103+
if (needsShortenedTtl && isOldKey) {
104+
pexpireAtPipeline.pexpireat(key, expirationTimestamp)
105+
updatingKeyCount += 1
106+
}
107+
})
108+
109+
// Only update TTLs if there are records worth updating
110+
if (updatingKeyCount > 0) {
111+
// Set all the TTLs
112+
const pexpireAtResults = await pexpireAtPipeline.exec()
113+
const updatedResults = pexpireAtResults.filter(([error, result]) => error == null && result === 1)
114+
115+
// Count only the entries whose TTLs were successfully updated
116+
totalKeyCount += updatedResults.length
117+
}
118+
119+
// Resume the SCAN stream
120+
scanStream.resume()
121+
})
122+
}

0 commit comments

Comments
 (0)