Skip to content

Commit e49bce0

Browse files
committed
style: replace non-ASCII em dashes with ASCII -- in comments and strings
1 parent 4e9c3ad commit e49bce0

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

src/apm_cli/core/script_runner.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def run_script(self, script_name: str, params: Dict[str, str]) -> bool:
111111
error_msg += f"Available scripts in apm.yml: {available}\n"
112112
error_msg += f"\nTo get started, create a prompt file first:\n"
113113
error_msg += f" echo '# My agent prompt' > {script_name}.prompt.md\n"
114-
error_msg += f"\nThen run again APM will auto-discover it.\n"
114+
error_msg += f"\nThen run again -- APM will auto-discover it.\n"
115115
error_msg += f"\nOr define a script explicitly in apm.yml:\n"
116116
error_msg += f" scripts:\n"
117117
error_msg += f" {script_name}: copilot {script_name}.prompt.md\n"
@@ -886,7 +886,7 @@ def _detect_installed_runtime(self) -> str:
886886
system-level stubs (e.g. GitHub CLI copilot extensions).
887887
888888
Priority:
889-
1. APM runtimes dir: copilot (codex excluded v0.116+ is
889+
1. APM runtimes dir: copilot (codex excluded -- v0.116+ is
890890
incompatible with GitHub Models' Chat Completions API)
891891
2. PATH: llm > copilot > codex (llm uses Chat Completions, works
892892
with GitHub Models even when codex dropped that API)
@@ -902,7 +902,7 @@ def _detect_installed_runtime(self) -> str:
902902
apm_runtimes = Path.home() / ".apm" / "runtimes"
903903

904904
# 1. Check APM-managed runtimes directory first (highest priority).
905-
# Only copilot is checked here codex installed via APM runtimes
905+
# Only copilot is checked here -- codex installed via APM runtimes
906906
# will be v0.116+ which dropped Chat Completions support and is
907907
# incompatible with GitHub Models.
908908
# llm is checked via PATH only (installed as a Python package).
@@ -918,7 +918,7 @@ def _detect_installed_runtime(self) -> str:
918918
if exe.stat().st_size > 0:
919919
return name
920920

921-
# 2. Fall back to PATH prefer llm (uses Chat Completions, works with
921+
# 2. Fall back to PATH -- prefer llm (uses Chat Completions, works with
922922
# GitHub Models even when codex has dropped that API format)
923923
if shutil.which("llm"):
924924
return "llm"
@@ -952,7 +952,7 @@ def _generate_runtime_command(self, runtime: str, prompt_file: Path) -> str:
952952
# Codex CLI with default sandbox and git repo check skip
953953
return f"codex -s workspace-write --skip-git-repo-check {prompt_file}"
954954
elif runtime == "llm":
955-
# llm CLI uses Chat Completions, compatible with GitHub Models
955+
# llm CLI -- uses Chat Completions, compatible with GitHub Models
956956
return f"llm -m github/gpt-4o {prompt_file}"
957957
else:
958958
raise ValueError(f"Unsupported runtime: {runtime}")

0 commit comments

Comments
 (0)