-
Notifications
You must be signed in to change notification settings - Fork 804
Expand file tree
/
Copy paths3.py
More file actions
362 lines (304 loc) · 11.5 KB
/
s3.py
File metadata and controls
362 lines (304 loc) · 11.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
from __future__ import annotations
import asyncio
from asyncio.subprocess import DEVNULL
import os
import tempfile
from typing import Literal, Optional, Sequence
import zipfile
from art.errors import ForbiddenBucketCreationError
from art.utils.output_dirs import (
get_output_dir_from_model_properties,
get_step_checkpoint_dir,
)
from ..utils import limit_concurrency
__all__: Sequence[str] = ("s3_sync",)
ExcludableOption = Literal["checkpoints", "logs", "trajectories"]
class S3SyncError(RuntimeError):
"""Raised when the underlying *aws s3 sync* command exits with a non‑zero status."""
def build_s3_path(
*,
model_name: str,
project: str,
step: int | None = None,
s3_bucket: str | None = None,
prefix: str | None = None,
) -> str:
"""Return the fully-qualified S3 URI for this model directory."""
if s3_bucket is None:
s3_bucket = os.environ["BACKUP_BUCKET"]
prefix_part = f"{prefix.strip('/')}/" if prefix else ""
path = f"s3://{s3_bucket}/{prefix_part}{project}/models/{model_name}"
if step is not None:
# Use the new checkpoint structure in S3
path += f"/checkpoints/{step:04d}"
return path
def build_s3_zipped_step_path(
*,
model_name: str,
project: str,
step: int,
s3_bucket: str | None = None,
prefix: str | None = None,
) -> str:
"""Return the fully-qualified S3 URI for a zipped step in a model directory."""
base_path = build_s3_path(
model_name=model_name,
project=project,
s3_bucket=s3_bucket,
prefix=prefix,
)
return f"{base_path}/zipped-steps/{step:04d}.zip"
@limit_concurrency(1)
async def s3_sync(
source: str,
destination: str,
*,
profile: Optional[str] = None,
endpoint_url: Optional[str] = None,
verbose: bool = False,
delete: bool = False,
exclude: list[ExcludableOption] | None = None,
) -> None:
"""Synchronise *source* and *destination* using the AWS CLI.
Either *source* or *destination* (or both) can point to an S3 URI, making it
possible to copy from local disk to S3 or from S3 to local disk.
The function is asynchronous: while the `aws` process runs, control is
yielded back to the event loop so other tasks can continue executing.
Args:
source: The *from* path. Can be a local path or an ``s3://`` URI.
destination: The *to* path. Can be a local path or an ``s3://`` URI.
profile: Optional AWS profile name to pass to the CLI.
endpoint_url: Optional custom endpoint URL for S3-compatible services
(e.g. MinIO). Falls back to the ``AWS_ENDPOINT_URL`` environment
variable when not provided.
verbose: When *True*, the output of the AWS CLI is streamed to the
calling process; otherwise it is suppressed.
delete: When *True*, delete files in *destination* that don't exist in
*source*.
exclude: List of directories to exclude from sync.
Raises:
S3SyncError: If the *aws s3 sync* command exits with a non‑zero status.
"""
if endpoint_url is None:
endpoint_url = os.environ.get("AWS_ENDPOINT_URL")
cmd: list[str] = ["aws"]
if profile:
cmd += ["--profile", profile]
if endpoint_url:
cmd += ["--endpoint-url", endpoint_url]
cmd += ["s3"]
# use cp for files, sync for directories
if os.path.isfile(source):
cmd += ["cp"]
else:
cmd += ["sync"]
if delete:
cmd.append("--delete")
# Add exclude patterns for each excluded directory
if exclude:
for excluded_dir in exclude:
cmd.extend(["--exclude", f"{excluded_dir}/*"])
cmd += [source, destination]
# Suppress output unless verbose mode is requested.
stdout = None if verbose else DEVNULL
stderr = None if verbose else DEVNULL
process = await asyncio.create_subprocess_exec(*cmd, stdout=stdout, stderr=stderr)
return_code = await process.wait()
if return_code != 0:
raise S3SyncError(f"{' '.join(cmd)} exited with status {return_code}")
async def ensure_bucket_exists(
s3_bucket: str | None = None,
profile: str | None = None,
endpoint_url: str | None = None,
) -> None:
if s3_bucket is None:
s3_bucket = os.environ["BACKUP_BUCKET"]
if endpoint_url is None:
endpoint_url = os.environ.get("AWS_ENDPOINT_URL")
# Check if bucket exists
cmd = ["aws"]
if profile:
cmd += ["--profile", profile]
if endpoint_url:
cmd += ["--endpoint-url", endpoint_url]
cmd += ["s3api", "head-bucket", "--bucket", s3_bucket]
result = await asyncio.create_subprocess_exec(*cmd, stdout=DEVNULL, stderr=DEVNULL)
return_code = await result.wait()
if return_code == 0:
return # Bucket exists
# Try to create the bucket
print(f"S3 bucket {s3_bucket} does not exist, creating it")
cmd = ["aws"]
if profile:
cmd += ["--profile", profile]
if endpoint_url:
cmd += ["--endpoint-url", endpoint_url]
cmd += ["s3api", "create-bucket", "--bucket", s3_bucket]
result = await asyncio.create_subprocess_exec(*cmd)
return_code = await result.wait()
if return_code != 0:
raise ForbiddenBucketCreationError(
message=f"Failed to create bucket {s3_bucket}. It may already exist and belong to another user, or your credentials may be insufficient to create an S3 bucket."
)
async def pull_model_from_s3(
model_name: str,
project: str,
step: int | None = None,
s3_bucket: str | None = None,
prefix: str | None = None,
verbose: bool = False,
delete: bool = False,
art_path: str | None = None,
exclude: list[ExcludableOption] | None = None,
) -> str:
"""Pull a model from S3 to the local directory.
Args:
model_name: The name of the model to pull.
project: The project name.
step: A specific step to pull from S3. If None, all steps will be pulled.
s3_bucket: The S3 bucket to pull from.
prefix: The prefix to pull from.
verbose: When *True*, the output of the AWS CLI is streamed to the
calling process; otherwise it is suppressed.
delete: When *True*, delete the local model directory if it exists.
art_path: The path to the ART directory.
exclude: List of directories to exclude from sync.
Returns:
The local directory path.
"""
local_model_dir = get_output_dir_from_model_properties(
project=project,
name=model_name,
art_path=art_path,
)
os.makedirs(local_model_dir, exist_ok=True)
# Use the new checkpoint structure
if step is not None:
# Pull directly to the new checkpoint structure
checkpoint_dir = get_step_checkpoint_dir(local_model_dir, step)
os.makedirs(checkpoint_dir, exist_ok=True)
local_dir = checkpoint_dir
else:
local_dir = local_model_dir
s3_path = build_s3_path(
model_name=model_name,
project=project,
step=step,
s3_bucket=s3_bucket,
prefix=prefix,
)
await ensure_bucket_exists(s3_bucket)
if verbose:
print(f"DEBUG: S3 sync from {s3_path} to {local_dir}")
await s3_sync(s3_path, local_dir, verbose=verbose, delete=delete, exclude=exclude)
if verbose:
print(
f"DEBUG: After sync, local_dir contents: {os.listdir(local_dir) if os.path.exists(local_dir) else 'Does not exist'}"
)
return local_model_dir
async def push_model_to_s3(
model_name: str,
project: str,
s3_bucket: str | None = None,
prefix: str | None = None,
verbose: bool = False,
delete: bool = False,
art_path: str | None = None,
) -> None:
"""Push a model to S3.
Args:
model_name: The name of the model to push.
project: The project name.
s3_bucket: The S3 bucket to push to.
prefix: The prefix to push to.
verbose: When *True*, the output of the AWS CLI is streamed to the
calling process; otherwise it is suppressed.
delete: When *True*, delete the local model directory if it exists.
art_path: The path to the ART directory.
"""
local_model_dir = get_output_dir_from_model_properties(
project=project,
name=model_name,
art_path=art_path,
)
if not os.path.exists(local_model_dir):
raise FileNotFoundError(
f"Local model directory {local_model_dir} does not exist."
)
s3_path = build_s3_path(
model_name=model_name,
project=project,
s3_bucket=s3_bucket,
prefix=prefix,
)
await ensure_bucket_exists(s3_bucket)
if verbose:
print(f"DEBUG: S3 sync from {local_model_dir} to {s3_path}")
await s3_sync(local_model_dir, s3_path, verbose=verbose, delete=delete)
async def archive_and_presign_step_url(
model_name: str,
project: str,
step: int,
s3_bucket: str | None = None,
prefix: str | None = None,
verbose: bool = False,
delete: bool = False,
art_path: str | None = None,
checkpoint_path: str | None = None,
) -> str:
"""Get a presigned URL for a step in a model.
Args:
model_name: Name of the model.
project: Project name.
step: Step number.
s3_bucket: S3 bucket to upload to.
prefix: S3 prefix.
verbose: Whether to print verbose output.
delete: Whether to delete after upload.
art_path: Path to ART directory (used if checkpoint_path not provided).
checkpoint_path: Direct path to the checkpoint directory. If provided, uses this
instead of constructing from art_path.
"""
if checkpoint_path is None:
model_output_dir = get_output_dir_from_model_properties(
project=project,
name=model_name,
art_path=art_path,
)
checkpoint_path = get_step_checkpoint_dir(model_output_dir, step)
if not os.path.exists(checkpoint_path):
raise ValueError(f"Local step directory does not exist: {checkpoint_path}")
s3_step_path = build_s3_zipped_step_path(
model_name=model_name,
project=project,
step=step,
s3_bucket=s3_bucket,
prefix=prefix,
)
# Create temporary directory for the zip file
with tempfile.TemporaryDirectory() as temp_dir:
# Create zip archive
archive_path = os.path.join(temp_dir, "model.zip")
with zipfile.ZipFile(archive_path, "w", zipfile.ZIP_DEFLATED) as zipf:
for root, _, files in os.walk(checkpoint_path):
for file in files:
file_path = os.path.join(root, file)
# Add file to zip with relative path
arcname = os.path.relpath(file_path, checkpoint_path)
zipf.write(file_path, arcname)
await ensure_bucket_exists(s3_bucket)
await s3_sync(archive_path, s3_step_path, verbose=verbose, delete=delete)
# Remove the s3:// prefix to get the key
s3_key = s3_step_path.removeprefix("s3://")
# Generate presigned URL with 1 hour expiration
cmd = ["aws", "s3", "presign", s3_key, "--expires-in", "3600"]
process = await asyncio.create_subprocess_exec(
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
if process.returncode != 0:
raise RuntimeError(f"Failed to generate presigned URL: {stderr.decode()}")
presigned_url = stdout.decode().strip()
if verbose:
print("presigned_url", presigned_url)
return presigned_url