Fix URL detector truncating wrapped URLs arriving in separate PTY chunks
The PTY may deliver a long URL across multiple chunks with enough delay that the debounce fires between them, emitting a truncated URL. Fixed by: 1. Stripping trailing empty strings from split (artifact of trailing \n) 2. Deferring emission when the URL reaches the end of the buffer — a confirmation timer (500ms) waits for more data before emitting Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -4,6 +4,10 @@
|
||||
* The Linux PTY hard-wraps long lines with \r\n at the terminal column width,
|
||||
* which breaks xterm.js WebLinksAddon URL detection. This class reassembles
|
||||
* those wrapped URLs and fires a callback for ones >= 100 chars.
|
||||
*
|
||||
* Two-phase approach: when a URL candidate extends to the end of the buffer,
|
||||
* emission is deferred (the rest of the URL may arrive in the next PTY chunk).
|
||||
* A confirmation timer emits the pending URL if no further data arrives.
|
||||
*/
|
||||
|
||||
const ANSI_RE =
|
||||
@@ -11,6 +15,7 @@ const ANSI_RE =
|
||||
|
||||
const MAX_BUFFER = 8 * 1024; // 8 KB rolling buffer cap
|
||||
const DEBOUNCE_MS = 300;
|
||||
const CONFIRM_MS = 500; // extra wait when URL reaches end of buffer
|
||||
const MIN_URL_LENGTH = 100;
|
||||
|
||||
export type UrlCallback = (url: string) => void;
|
||||
@@ -19,7 +24,9 @@ export class UrlDetector {
|
||||
private decoder = new TextDecoder();
|
||||
private buffer = "";
|
||||
private timer: ReturnType<typeof setTimeout> | null = null;
|
||||
private confirmTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
private lastEmitted = "";
|
||||
private pendingUrl: string | null = null;
|
||||
private callback: UrlCallback;
|
||||
|
||||
constructor(callback: UrlCallback) {
|
||||
@@ -35,8 +42,14 @@ export class UrlDetector {
|
||||
this.buffer = this.buffer.slice(-MAX_BUFFER);
|
||||
}
|
||||
|
||||
// Debounce — scan after 300 ms of silence
|
||||
// Cancel pending timers — new data arrived, rescan from scratch
|
||||
if (this.timer !== null) clearTimeout(this.timer);
|
||||
if (this.confirmTimer !== null) {
|
||||
clearTimeout(this.confirmTimer);
|
||||
this.confirmTimer = null;
|
||||
}
|
||||
|
||||
// Debounce — scan after 300 ms of silence
|
||||
this.timer = setTimeout(() => {
|
||||
this.timer = null;
|
||||
this.scan();
|
||||
@@ -47,26 +60,64 @@ export class UrlDetector {
|
||||
const clean = this.buffer.replace(ANSI_RE, "");
|
||||
const lines = clean.replace(/\r\n/g, "\n").replace(/\r/g, "\n").split("\n");
|
||||
|
||||
// Remove trailing empty elements (artifacts of trailing \n from split)
|
||||
while (lines.length > 0 && lines[lines.length - 1] === "") {
|
||||
lines.pop();
|
||||
}
|
||||
|
||||
if (lines.length === 0) return;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const match = lines[i].match(/https?:\/\/[^\s'"]+/);
|
||||
if (!match) continue;
|
||||
|
||||
// Start with the URL fragment found on this line
|
||||
let url = match[0];
|
||||
let lastLineIndex = i;
|
||||
|
||||
// Concatenate subsequent continuation lines (non-empty, no spaces, no leading whitespace)
|
||||
for (let j = i + 1; j < lines.length; j++) {
|
||||
const next = lines[j];
|
||||
if (!next || next.startsWith(" ") || next.includes(" ")) break;
|
||||
url += next;
|
||||
lastLineIndex = j;
|
||||
i = j; // skip this line in the outer loop
|
||||
}
|
||||
|
||||
if (url.length >= MIN_URL_LENGTH && url !== this.lastEmitted) {
|
||||
if (url.length < MIN_URL_LENGTH) continue;
|
||||
|
||||
// If the URL reaches the last line of the buffer, the rest may still
|
||||
// be arriving in the next PTY chunk — defer emission.
|
||||
if (lastLineIndex >= lines.length - 1) {
|
||||
this.pendingUrl = url;
|
||||
this.confirmTimer = setTimeout(() => {
|
||||
this.confirmTimer = null;
|
||||
this.emitPending();
|
||||
}, CONFIRM_MS);
|
||||
return;
|
||||
}
|
||||
|
||||
// URL is clearly complete (more content follows it in the buffer)
|
||||
this.pendingUrl = null;
|
||||
if (url !== this.lastEmitted) {
|
||||
this.lastEmitted = url;
|
||||
this.callback(url);
|
||||
}
|
||||
}
|
||||
|
||||
// Scan finished without finding a URL reaching the buffer end.
|
||||
// If we had a pending URL from a previous scan, it's now confirmed complete.
|
||||
if (this.pendingUrl) {
|
||||
this.emitPending();
|
||||
}
|
||||
}
|
||||
|
||||
private emitPending(): void {
|
||||
if (this.pendingUrl && this.pendingUrl !== this.lastEmitted) {
|
||||
this.lastEmitted = this.pendingUrl;
|
||||
this.callback(this.pendingUrl);
|
||||
}
|
||||
this.pendingUrl = null;
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
@@ -74,5 +125,9 @@ export class UrlDetector {
|
||||
clearTimeout(this.timer);
|
||||
this.timer = null;
|
||||
}
|
||||
if (this.confirmTimer !== null) {
|
||||
clearTimeout(this.confirmTimer);
|
||||
this.confirmTimer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user