fix(discord): preserve URLs when escaping markdown

- Replace sed-based esc() with awk version that skips content inside
  <URL> wrappers, preventing URL corruption from backslash escaping
- Reorder pipeline: wrap_urls | esc (wrap first, then escape)
- Update comment: "partial" → "incomplete" for clarity

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Alex Verkhovsky 2025-12-14 14:06:35 -07:00
parent ba7e672a46
commit 761a4a1f25
2 changed files with 23 additions and 10 deletions

View File

@ -2,8 +2,21 @@
# Discord notification helper functions # Discord notification helper functions
# Escape markdown special chars and @mentions for safe Discord display # Escape markdown special chars and @mentions for safe Discord display
# Bracket expression: ] must be first, then other chars. In POSIX bracket expr, \ is literal. # Skips content inside <URL> wrappers to preserve URLs intact
esc() { sed -e 's/[][\*_()~`>]/\\&/g' -e 's/@/@ /g'; } esc() {
awk '{
result = ""; in_url = 0; n = length($0)
for (i = 1; i <= n; i++) {
c = substr($0, i, 1)
if (c == "<" && substr($0, i, 8) ~ /^<https?:/) in_url = 1
if (in_url) { result = result c; if (c == ">") in_url = 0 }
else if (c == "@") result = result "@ "
else if (index("[]\\*_()~`", c) > 0) result = result "\\" c
else result = result c
}
print result
}'
}
# Truncate to $1 chars (or 80 if wall-of-text with <3 spaces) # Truncate to $1 chars (or 80 if wall-of-text with <3 spaces)
trunc() { trunc() {
@ -14,7 +27,7 @@ trunc() {
printf '%s' "$txt" printf '%s' "$txt"
} }
# Remove partial URL at end of truncated text (partial URLs are useless) # Remove incomplete URL at end of truncated text (incomplete URLs are useless)
strip_trailing_url() { sed -E 's~<?https?://[^[:space:]]*$~~'; } strip_trailing_url() { sed -E 's~<?https?://[^[:space:]]*$~~'; }
# Wrap URLs in <> to suppress Discord embeds (keeps links clickable) # Wrap URLs in <> to suppress Discord embeds (keeps links clickable)

View File

@ -57,7 +57,7 @@ jobs:
if [ -n "$PR_BODY" ] && [ ${#PR_BODY} -gt $MAX_BODY ]; then if [ -n "$PR_BODY" ] && [ ${#PR_BODY} -gt $MAX_BODY ]; then
BODY=$(printf '%s' "$BODY" | strip_trailing_url) BODY=$(printf '%s' "$BODY" | strip_trailing_url)
fi fi
BODY=$(printf '%s' "$BODY" | esc | wrap_urls) BODY=$(printf '%s' "$BODY" | wrap_urls | esc)
[ -n "$PR_BODY" ] && [ ${#PR_BODY} -gt $MAX_BODY ] && BODY="${BODY}..." [ -n "$PR_BODY" ] && [ ${#PR_BODY} -gt $MAX_BODY ] && BODY="${BODY}..."
[ -n "$BODY" ] && BODY=" · $BODY" [ -n "$BODY" ] && BODY=" · $BODY"
USER=$(printf '%s' "$PR_USER" | esc) USER=$(printf '%s' "$PR_USER" | esc)
@ -99,7 +99,7 @@ jobs:
if [ -n "$ISSUE_BODY" ] && [ ${#ISSUE_BODY} -gt $MAX_BODY ]; then if [ -n "$ISSUE_BODY" ] && [ ${#ISSUE_BODY} -gt $MAX_BODY ]; then
BODY=$(printf '%s' "$BODY" | strip_trailing_url) BODY=$(printf '%s' "$BODY" | strip_trailing_url)
fi fi
BODY=$(printf '%s' "$BODY" | esc | wrap_urls) BODY=$(printf '%s' "$BODY" | wrap_urls | esc)
[ -n "$ISSUE_BODY" ] && [ ${#ISSUE_BODY} -gt $MAX_BODY ] && BODY="${BODY}..." [ -n "$ISSUE_BODY" ] && [ ${#ISSUE_BODY} -gt $MAX_BODY ] && BODY="${BODY}..."
[ -n "$BODY" ] && BODY=" · $BODY" [ -n "$BODY" ] && BODY=" · $BODY"
USER=$(printf '%s' "$USER" | esc) USER=$(printf '%s' "$USER" | esc)
@ -138,7 +138,7 @@ jobs:
if [ ${#COMMENT_BODY} -gt $MAX_BODY ]; then if [ ${#COMMENT_BODY} -gt $MAX_BODY ]; then
BODY=$(printf '%s' "$BODY" | strip_trailing_url) BODY=$(printf '%s' "$BODY" | strip_trailing_url)
fi fi
BODY=$(printf '%s' "$BODY" | esc | wrap_urls) BODY=$(printf '%s' "$BODY" | wrap_urls | esc)
[ ${#COMMENT_BODY} -gt $MAX_BODY ] && BODY="${BODY}..." [ ${#COMMENT_BODY} -gt $MAX_BODY ] && BODY="${BODY}..."
USER=$(printf '%s' "$COMMENT_USER" | esc) USER=$(printf '%s' "$COMMENT_USER" | esc)
@ -178,7 +178,7 @@ jobs:
if [ -n "$REVIEW_BODY" ] && [ ${#REVIEW_BODY} -gt $MAX_BODY ]; then if [ -n "$REVIEW_BODY" ] && [ ${#REVIEW_BODY} -gt $MAX_BODY ]; then
BODY=$(printf '%s' "$BODY" | strip_trailing_url) BODY=$(printf '%s' "$BODY" | strip_trailing_url)
fi fi
BODY=$(printf '%s' "$BODY" | esc | wrap_urls) BODY=$(printf '%s' "$BODY" | wrap_urls | esc)
[ -n "$REVIEW_BODY" ] && [ ${#REVIEW_BODY} -gt $MAX_BODY ] && BODY="${BODY}..." [ -n "$REVIEW_BODY" ] && [ ${#REVIEW_BODY} -gt $MAX_BODY ] && BODY="${BODY}..."
[ -n "$BODY" ] && BODY=": $BODY" [ -n "$BODY" ] && BODY=": $BODY"
USER=$(printf '%s' "$REVIEW_USER" | esc) USER=$(printf '%s' "$REVIEW_USER" | esc)
@ -214,7 +214,7 @@ jobs:
if [ ${#COMMENT_BODY} -gt $MAX_BODY ]; then if [ ${#COMMENT_BODY} -gt $MAX_BODY ]; then
BODY=$(printf '%s' "$BODY" | strip_trailing_url) BODY=$(printf '%s' "$BODY" | strip_trailing_url)
fi fi
BODY=$(printf '%s' "$BODY" | esc | wrap_urls) BODY=$(printf '%s' "$BODY" | wrap_urls | esc)
[ ${#COMMENT_BODY} -gt $MAX_BODY ] && BODY="${BODY}..." [ ${#COMMENT_BODY} -gt $MAX_BODY ] && BODY="${BODY}..."
USER=$(printf '%s' "$COMMENT_USER" | esc) USER=$(printf '%s' "$COMMENT_USER" | esc)
@ -248,7 +248,7 @@ jobs:
if [ -n "$RELEASE_BODY" ] && [ ${#RELEASE_BODY} -gt $MAX_BODY ]; then if [ -n "$RELEASE_BODY" ] && [ ${#RELEASE_BODY} -gt $MAX_BODY ]; then
BODY=$(printf '%s' "$BODY" | strip_trailing_url) BODY=$(printf '%s' "$BODY" | strip_trailing_url)
fi fi
BODY=$(printf '%s' "$BODY" | esc | wrap_urls) BODY=$(printf '%s' "$BODY" | wrap_urls | esc)
[ -n "$RELEASE_BODY" ] && [ ${#RELEASE_BODY} -gt $MAX_BODY ] && BODY="${BODY}..." [ -n "$RELEASE_BODY" ] && [ ${#RELEASE_BODY} -gt $MAX_BODY ] && BODY="${BODY}..."
[ -n "$BODY" ] && BODY=" · $BODY" [ -n "$BODY" ] && BODY=" · $BODY"
TAG_ESC=$(printf '%s' "$TAG" | esc) TAG_ESC=$(printf '%s' "$TAG" | esc)
@ -299,7 +299,7 @@ jobs:
run: | run: |
set -o pipefail set -o pipefail
[ -z "$WEBHOOK" ] && exit 0 [ -z "$WEBHOOK" ] && exit 0
esc() { sed -e 's/[][\*_()~`>]/\\&/g' -e 's/@/@ /g'; } esc() { sed -e 's/[][\*_()~`]/\\&/g' -e 's/@/@ /g'; }
trunc() { tr '\n\r' ' ' | cut -c1-"$1"; } trunc() { tr '\n\r' ' ' | cut -c1-"$1"; }
REF_TRUNC=$(printf '%s' "$REF" | trunc 100) REF_TRUNC=$(printf '%s' "$REF" | trunc 100)