Skip to content

Commit de48c17

Browse files
authored
Merge pull request #1 from pre-commit-ci/initial-commit
initial commit
2 parents a650a91 + f8a7442 commit de48c17

5 files changed

Lines changed: 377 additions & 0 deletions

File tree

.pre-commit-config.yaml

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
repos:
2+
- repo: https://github.com/pre-commit/pre-commit-hooks
3+
rev: v4.3.0
4+
hooks:
5+
- id: trailing-whitespace
6+
- id: end-of-file-fixer
7+
- id: check-yaml
8+
- id: debug-statements
9+
- id: double-quote-string-fixer
10+
- id: name-tests-test
11+
- id: requirements-txt-fixer
12+
- repo: https://github.com/asottile/reorder_python_imports
13+
rev: v3.9.0
14+
hooks:
15+
- id: reorder-python-imports
16+
args: [--py37-plus, --add-import, 'from __future__ import annotations']
17+
- repo: https://github.com/asottile/add-trailing-comma
18+
rev: v2.3.0
19+
hooks:
20+
- id: add-trailing-comma
21+
args: [--py36-plus]
22+
- repo: https://github.com/asottile/pyupgrade
23+
rev: v3.2.0
24+
hooks:
25+
- id: pyupgrade
26+
args: [--py37-plus]
27+
- repo: https://github.com/pre-commit/mirrors-autopep8
28+
rev: v2.0.0
29+
hooks:
30+
- id: autopep8
31+
- repo: https://github.com/PyCQA/flake8
32+
rev: 5.0.4
33+
hooks:
34+
- id: flake8
35+
- repo: https://github.com/pre-commit/mirrors-mypy
36+
rev: v0.982
37+
hooks:
38+
- id: mypy

action.yml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
name: pre-commit.ci lite
2+
description: pre-commit.ci lite
3+
inputs:
4+
msg:
5+
description: commit message to use for autofixing
6+
required: false
7+
default: '[pre-commit.ci lite] apply automatic fixes'
8+
runs:
9+
using: node16
10+
main: main.mjs
11+
post: post.mjs
12+
post-if: always() && env.PRE_COMMIT_CI_LITE_ARTIFACT == 'true'

bin/main

Lines changed: 298 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,298 @@
1+
#!/usr/bin/env python3
2+
from __future__ import annotations
3+
4+
import argparse
5+
import base64
6+
import io
7+
import json
8+
import os.path
9+
import shutil
10+
import subprocess
11+
import tempfile
12+
import urllib.request
13+
from typing import Any
14+
from typing import Iterable
15+
from typing import NamedTuple
16+
from typing import Sequence
17+
18+
_VERSION = 'v0.0.0'
19+
20+
_GIT = (
21+
'git',
22+
'-c', 'user.name=does-not-matter',
23+
'-c', 'user.email=does-not-matter@example.com',
24+
'-c', 'protocol.version-2',
25+
)
26+
27+
28+
def _has_changes(*, src_repo: str) -> bool:
29+
cmd = (
30+
*_GIT, '-C', src_repo, 'diff-index',
31+
'--quiet', '--no-ext-diff', 'HEAD', '--',
32+
)
33+
return subprocess.call(cmd) == 1
34+
35+
36+
def _rev_parse(*, repo: str, ref: str) -> str:
37+
cmd = (*_GIT, '-C', repo, 'rev-parse', ref)
38+
return subprocess.check_output(cmd).strip().decode()
39+
40+
41+
def _fetch_pr(*, src_repo: str, pr: int) -> str:
42+
subprocess.check_call((
43+
*_GIT, '-C', src_repo, 'fetch',
44+
'--quiet', '--depth=1',
45+
'origin', f'+refs/pull/{pr}/head',
46+
))
47+
return _rev_parse(repo=src_repo, ref='FETCH_HEAD')
48+
49+
50+
def _make_commit(*, src_repo: str, head: str, clone: str) -> str:
51+
os.makedirs(clone, exist_ok=True)
52+
53+
# ~essentially make a worktree and commit what's currently modified
54+
git_dir = os.path.join(clone, '.git')
55+
shutil.copytree(os.path.join(src_repo, '.git'), git_dir)
56+
57+
subprocess.check_call(
58+
(
59+
*_GIT, '-C', src_repo, 'commit',
60+
'--all', '--quiet', '--no-edit', '--no-verify', '--message=hi',
61+
),
62+
env={**os.environ, 'GIT_DIR': os.path.join(clone, '.git')},
63+
)
64+
commit = _rev_parse(repo=clone, ref='HEAD')
65+
66+
# need to clean out deletes
67+
subprocess.check_call((*_GIT, '-C', clone, 'checkout', '--', '.'))
68+
subprocess.check_call((*_GIT, '-C', clone, 'clean', '-qfxfd'))
69+
70+
subprocess.check_call((*_GIT, '-C', clone, 'checkout', head, '--quiet'))
71+
cmd = (*_GIT, '-C', clone, 'cherry-pick', '--quiet', '--no-edit', commit)
72+
subprocess.check_call(cmd, stdout=subprocess.DEVNULL)
73+
return _rev_parse(repo=clone, ref='HEAD')
74+
75+
76+
def _changed_files(*, repo: str, commit: str) -> list[str]:
77+
out = subprocess.check_output((
78+
*_GIT, '-C', repo, 'show',
79+
'--name-only', '-z', '--no-renames', '--format=', commit,
80+
))
81+
if not out:
82+
return []
83+
else:
84+
return out.rstrip(b'\0').decode().split('\0')
85+
86+
87+
class Index(NamedTuple):
88+
mode: str
89+
path: str
90+
oid: str
91+
92+
93+
class Object(NamedTuple):
94+
oid: str
95+
tp: str
96+
data: bytes
97+
98+
def as_index(self) -> list[Index]:
99+
ret = []
100+
101+
bts = self.data
102+
while True:
103+
try:
104+
pt1, bts = bts.split(b'\0', 1)
105+
except ValueError:
106+
break
107+
else:
108+
mode, _, path = pt1.decode().partition(' ')
109+
oid_bts, bts = bts[:20], bts[20:]
110+
oid = ''.join(f'{c:02x}' for c in oid_bts)
111+
112+
ret.append(Index(mode=mode, path=path, oid=oid))
113+
114+
return ret
115+
116+
117+
def _read_obj(bio: io.BytesIO) -> Object | None:
118+
line = bio.readline().strip().decode()
119+
if line.endswith(' missing'):
120+
return None
121+
122+
oid, tp, sz = line.split()
123+
data = bio.read(int(sz))
124+
bio.read(1) # discard newline
125+
return Object(oid=oid, tp=tp, data=data)
126+
127+
128+
def _query_objects(
129+
*,
130+
repo: str,
131+
objects: Sequence[str],
132+
) -> dict[str, Object | None]:
133+
stdin = ('\n'.join(objects) + '\n').encode()
134+
res = subprocess.run(
135+
(*_GIT, '-C', repo, 'cat-file', '--batch'),
136+
input=stdin,
137+
stdout=subprocess.PIPE,
138+
check=True,
139+
)
140+
bio = io.BytesIO(res.stdout)
141+
return {obj: _read_obj(bio) for obj in objects}
142+
143+
144+
def _structure_for(
145+
*,
146+
repo: str,
147+
ref: str,
148+
files: Iterable[str],
149+
) -> dict[str, dict[str, Index]]:
150+
trees = tuple({f'{ref}:{os.path.dirname(f)}' for f in files})
151+
objs = _query_objects(repo=repo, objects=trees)
152+
return {
153+
k.partition(':')[2]: {idx.path: idx for idx in v.as_index()}
154+
for k, v in objs.items()
155+
if v is not None
156+
}
157+
158+
159+
def _get_data(
160+
*,
161+
msg: str,
162+
clone: str,
163+
head: str,
164+
commit: str,
165+
) -> dict[str, Any]:
166+
files = _changed_files(repo=clone, commit=commit)
167+
168+
dir_structure = _structure_for(repo=clone, ref=commit, files=files)
169+
deletes = []
170+
entries = []
171+
for filename in files:
172+
dirname, basename = os.path.split(filename)
173+
try:
174+
entries.append((filename, dir_structure[dirname][basename]))
175+
except KeyError:
176+
deletes.append(filename)
177+
178+
binary = []
179+
text = []
180+
file_oids = [idx.oid for _, idx in entries]
181+
file_objects = _query_objects(repo=clone, objects=file_oids)
182+
for (filename, entry), obj in zip(entries, file_objects.values()):
183+
assert obj is not None
184+
try:
185+
contents = obj.data.decode()
186+
except UnicodeDecodeError:
187+
b64 = base64.b64encode(obj.data).decode()
188+
binary.append((filename, entry.mode, b64))
189+
else:
190+
text.append((filename, entry.mode, contents))
191+
192+
return {
193+
'action_version': _VERSION,
194+
'msg': msg,
195+
'base_tree': _rev_parse(repo=clone, ref=f'{head}:'),
196+
'delete': deletes,
197+
'binary': binary,
198+
'text': text,
199+
}
200+
201+
202+
def _save_artifact(
203+
data: dict[str, Any],
204+
pr: int,
205+
run_id: int,
206+
url: str,
207+
token: str,
208+
) -> None:
209+
contents = json.dumps(data, separators=(',', ':')).encode()
210+
211+
artifact_name = f'pre-commit-ci-lite-{pr}-{run_id}'
212+
213+
headers = {
214+
'Accept': 'application/json;api-version=6.0-preview',
215+
'Authorization': f'Bearer {token}',
216+
}
217+
218+
base_url = f'{url}_apis/pipelines/workflows/{run_id}/artifacts?api-version=6.0-preview' # noqa: E501
219+
220+
req_create = urllib.request.Request(
221+
base_url,
222+
method='POST',
223+
headers={**headers, 'Content-Type': 'application/json'},
224+
data=json.dumps({
225+
'type': 'actions_storage',
226+
'name': artifact_name,
227+
'retentionDays': 1,
228+
}).encode(),
229+
)
230+
resp_create = json.load(urllib.request.urlopen(req_create))
231+
232+
req_upload = urllib.request.Request(
233+
f'{resp_create["fileContainerResourceUrl"]}?itemPath={artifact_name}/data.json', # noqa: E501
234+
method='PUT',
235+
headers={
236+
**headers,
237+
'Content-Type': 'application/octet-stream',
238+
'Content-Range': f'bytes 0-{len(contents) - 1}/{len(contents)}',
239+
},
240+
data=contents,
241+
)
242+
urllib.request.urlopen(req_upload)
243+
244+
req_finish = urllib.request.Request(
245+
f'{base_url}&artifactName={artifact_name}',
246+
method='PATCH',
247+
headers={**headers, 'Content-Type': 'application/json'},
248+
data=json.dumps({'size': len(contents)}).encode(),
249+
)
250+
urllib.request.urlopen(req_finish)
251+
252+
253+
def main(argv: Sequence[str] | None = None) -> int:
254+
parser = argparse.ArgumentParser()
255+
parser.add_argument('--src-repo', default='.')
256+
parser.add_argument('--dry-run', action='store_true')
257+
parser.add_argument('--msg', required=True)
258+
parser.add_argument('--pr', type=int, required=True)
259+
parser.add_argument('--run-id', type=int, required=True)
260+
parser.add_argument('--runtime-token', required=True)
261+
parser.add_argument('--runtime-url', required=True)
262+
args = parser.parse_args(argv)
263+
264+
if not _has_changes(src_repo=args.src_repo):
265+
print('nothing to do: no changes!')
266+
return 0
267+
268+
head = _fetch_pr(src_repo=args.src_repo, pr=args.pr)
269+
270+
with tempfile.TemporaryDirectory() as clone:
271+
commit = _make_commit(src_repo=args.src_repo, head=head, clone=clone)
272+
273+
data = _get_data(
274+
msg=args.msg,
275+
clone=clone,
276+
head=head,
277+
commit=commit,
278+
)
279+
280+
if args.dry_run:
281+
print('would create artifact with data:')
282+
print(json.dumps(data, indent=2))
283+
else:
284+
_save_artifact(
285+
data=data,
286+
pr=args.pr,
287+
run_id=args.run_id,
288+
url=args.runtime_url,
289+
token=args.runtime_token,
290+
)
291+
with open(os.environ['GITHUB_ENV'], 'a+') as f:
292+
f.write('PRE_COMMIT_CI_LITE_ARTIFACT=true\n')
293+
294+
return 0
295+
296+
297+
if __name__ == '__main__':
298+
raise SystemExit(main())

main.mjs

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import child_process from 'child_process';
2+
import fs from 'fs';
3+
import path from 'path';
4+
import url from 'url';
5+
6+
const event = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH));
7+
8+
if (process.env.GITHUB_EVENT_NAME !== 'pull_request') {
9+
console.log('skip: not a pull request');
10+
process.exit(0);
11+
} else if (event.sender.type !== 'User') {
12+
console.log('skip: triggered by a bot');
13+
process.exit(0);
14+
}
15+
16+
const __dirname = path.dirname(url.fileURLToPath(import.meta.url));
17+
const args = [
18+
'-uS', path.resolve(__dirname, 'bin/main'),
19+
'--msg', process.env.INPUT_MSG,
20+
'--pr', event.number.toString(10),
21+
'--run-id', process.env.GITHUB_RUN_ID,
22+
'--runtime-token', process.env.ACTIONS_RUNTIME_TOKEN,
23+
'--runtime-url', process.env.ACTIONS_RUNTIME_URL,
24+
];
25+
try {
26+
child_process.execFileSync('python3', args, {stdio: 'inherit'});
27+
} catch (e) {
28+
process.exit(e.status);
29+
}

post.mjs

Whitespace-only changes.

0 commit comments

Comments
 (0)