2026-01-11 21:22:49 +08:00
|
|
|
/**
|
|
|
|
|
* @license
|
|
|
|
|
* Copyright 2025 Google LLC
|
|
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
import http from 'node:http';
|
|
|
|
|
import https from 'node:https';
|
|
|
|
|
import zlib from 'node:zlib';
|
|
|
|
|
import fs from 'node:fs';
|
|
|
|
|
import path from 'node:path';
|
|
|
|
|
import { EventEmitter } from 'node:events';
|
2026-02-10 08:54:23 -08:00
|
|
|
import {
|
|
|
|
|
CoreEvent,
|
|
|
|
|
coreEvents,
|
|
|
|
|
debugLogger,
|
|
|
|
|
type ConsoleLogPayload,
|
|
|
|
|
type Config,
|
|
|
|
|
} from '@google/gemini-cli-core';
|
2026-02-06 16:20:22 -08:00
|
|
|
import WebSocket from 'ws';
|
2026-01-11 21:22:49 +08:00
|
|
|
|
|
|
|
|
const ACTIVITY_ID_HEADER = 'x-activity-request-id';
|
2026-02-06 16:20:22 -08:00
|
|
|
const MAX_BUFFER_SIZE = 100;
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
/** Type guard: Array.isArray doesn't narrow readonly arrays in TS 5.8 */
|
|
|
|
|
function isHeaderRecord(
|
|
|
|
|
h: http.OutgoingHttpHeaders | readonly string[],
|
|
|
|
|
): h is http.OutgoingHttpHeaders {
|
|
|
|
|
return !Array.isArray(h);
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-11 21:22:49 +08:00
|
|
|
export interface NetworkLog {
|
|
|
|
|
id: string;
|
|
|
|
|
timestamp: number;
|
|
|
|
|
method: string;
|
|
|
|
|
url: string;
|
|
|
|
|
headers: Record<string, string>;
|
|
|
|
|
body?: string;
|
|
|
|
|
pending?: boolean;
|
2026-02-06 16:20:22 -08:00
|
|
|
chunk?: {
|
|
|
|
|
index: number;
|
|
|
|
|
data: string;
|
|
|
|
|
timestamp: number;
|
|
|
|
|
};
|
2026-01-11 21:22:49 +08:00
|
|
|
response?: {
|
|
|
|
|
status: number;
|
|
|
|
|
headers: Record<string, string>;
|
|
|
|
|
body?: string;
|
|
|
|
|
durationMs: number;
|
|
|
|
|
};
|
|
|
|
|
error?: string;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
/** Partial update to an existing network log. */
|
|
|
|
|
export type PartialNetworkLog = { id: string } & Partial<NetworkLog>;
|
|
|
|
|
|
2026-01-11 21:22:49 +08:00
|
|
|
/**
|
|
|
|
|
* Capture utility for session activities (network and console).
|
|
|
|
|
* Provides a stream of events that can be persisted for analysis or inspection.
|
|
|
|
|
*/
|
|
|
|
|
export class ActivityLogger extends EventEmitter {
|
|
|
|
|
private static instance: ActivityLogger;
|
|
|
|
|
private isInterceptionEnabled = false;
|
|
|
|
|
private requestStartTimes = new Map<string, number>();
|
2026-02-06 16:20:22 -08:00
|
|
|
private networkLoggingEnabled = false;
|
2026-01-11 21:22:49 +08:00
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
private networkBufferMap = new Map<
|
|
|
|
|
string,
|
|
|
|
|
Array<NetworkLog | PartialNetworkLog>
|
|
|
|
|
>();
|
|
|
|
|
private networkBufferIds: string[] = [];
|
|
|
|
|
private consoleBuffer: Array<ConsoleLogPayload & { timestamp: number }> = [];
|
|
|
|
|
private readonly bufferLimit = 10;
|
|
|
|
|
|
2026-01-11 21:22:49 +08:00
|
|
|
static getInstance(): ActivityLogger {
|
|
|
|
|
if (!ActivityLogger.instance) {
|
|
|
|
|
ActivityLogger.instance = new ActivityLogger();
|
|
|
|
|
}
|
|
|
|
|
return ActivityLogger.instance;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
enableNetworkLogging() {
|
|
|
|
|
this.networkLoggingEnabled = true;
|
|
|
|
|
this.emit('network-logging-enabled');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
disableNetworkLogging() {
|
|
|
|
|
this.networkLoggingEnabled = false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
isNetworkLoggingEnabled(): boolean {
|
|
|
|
|
return this.networkLoggingEnabled;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
/**
|
|
|
|
|
* Atomically returns and clears all buffered logs.
|
|
|
|
|
* Prevents data loss from events emitted between get and clear.
|
|
|
|
|
*/
|
|
|
|
|
drainBufferedLogs(): {
|
|
|
|
|
network: Array<NetworkLog | PartialNetworkLog>;
|
|
|
|
|
console: Array<ConsoleLogPayload & { timestamp: number }>;
|
|
|
|
|
} {
|
|
|
|
|
const network: Array<NetworkLog | PartialNetworkLog> = [];
|
|
|
|
|
for (const id of this.networkBufferIds) {
|
|
|
|
|
const events = this.networkBufferMap.get(id);
|
|
|
|
|
if (events) network.push(...events);
|
|
|
|
|
}
|
|
|
|
|
const console = [...this.consoleBuffer];
|
|
|
|
|
this.networkBufferMap.clear();
|
|
|
|
|
this.networkBufferIds = [];
|
|
|
|
|
this.consoleBuffer = [];
|
|
|
|
|
return { network, console };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
getBufferedLogs(): {
|
|
|
|
|
network: Array<NetworkLog | PartialNetworkLog>;
|
|
|
|
|
console: Array<ConsoleLogPayload & { timestamp: number }>;
|
|
|
|
|
} {
|
|
|
|
|
const network: Array<NetworkLog | PartialNetworkLog> = [];
|
|
|
|
|
for (const id of this.networkBufferIds) {
|
|
|
|
|
const events = this.networkBufferMap.get(id);
|
|
|
|
|
if (events) network.push(...events);
|
|
|
|
|
}
|
|
|
|
|
return {
|
|
|
|
|
network,
|
|
|
|
|
console: [...this.consoleBuffer],
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
clearBufferedLogs(): void {
|
|
|
|
|
this.networkBufferMap.clear();
|
|
|
|
|
this.networkBufferIds = [];
|
|
|
|
|
this.consoleBuffer = [];
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-11 21:22:49 +08:00
|
|
|
private stringifyHeaders(headers: unknown): Record<string, string> {
|
|
|
|
|
const result: Record<string, string> = {};
|
|
|
|
|
if (!headers) return result;
|
|
|
|
|
|
|
|
|
|
if (headers instanceof Headers) {
|
|
|
|
|
headers.forEach((v, k) => {
|
|
|
|
|
result[k.toLowerCase()] = v;
|
|
|
|
|
});
|
|
|
|
|
} else if (typeof headers === 'object' && headers !== null) {
|
|
|
|
|
for (const [key, val] of Object.entries(headers)) {
|
|
|
|
|
result[key.toLowerCase()] = Array.isArray(val)
|
|
|
|
|
? val.join(', ')
|
|
|
|
|
: String(val);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
private sanitizeNetworkLog(
|
|
|
|
|
log: NetworkLog | PartialNetworkLog,
|
|
|
|
|
): NetworkLog | PartialNetworkLog {
|
2026-01-11 21:22:49 +08:00
|
|
|
if (!log || typeof log !== 'object') return log;
|
|
|
|
|
|
|
|
|
|
const sanitized = { ...log };
|
|
|
|
|
|
|
|
|
|
// Sanitize request headers
|
2026-02-10 08:54:23 -08:00
|
|
|
if ('headers' in sanitized && sanitized.headers) {
|
2026-01-11 21:22:49 +08:00
|
|
|
const headers = { ...sanitized.headers };
|
|
|
|
|
for (const key of Object.keys(headers)) {
|
|
|
|
|
if (
|
|
|
|
|
['authorization', 'cookie', 'x-goog-api-key'].includes(
|
|
|
|
|
key.toLowerCase(),
|
|
|
|
|
)
|
|
|
|
|
) {
|
|
|
|
|
headers[key] = '[REDACTED]';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
sanitized.headers = headers;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Sanitize response headers
|
2026-02-10 08:54:23 -08:00
|
|
|
if ('response' in sanitized && sanitized.response?.headers) {
|
2026-01-11 21:22:49 +08:00
|
|
|
const resHeaders = { ...sanitized.response.headers };
|
|
|
|
|
for (const key of Object.keys(resHeaders)) {
|
|
|
|
|
if (['set-cookie'].includes(key.toLowerCase())) {
|
|
|
|
|
resHeaders[key] = '[REDACTED]';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
sanitized.response = { ...sanitized.response, headers: resHeaders };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return sanitized;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
/** @internal Emit a network event — public for testing only. */
|
|
|
|
|
emitNetworkEvent(payload: NetworkLog | PartialNetworkLog) {
|
|
|
|
|
this.safeEmitNetwork(payload);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private safeEmitNetwork(payload: NetworkLog | PartialNetworkLog) {
|
|
|
|
|
const sanitized = this.sanitizeNetworkLog(payload);
|
|
|
|
|
const id = sanitized.id;
|
|
|
|
|
|
|
|
|
|
if (!this.networkBufferMap.has(id)) {
|
|
|
|
|
this.networkBufferIds.push(id);
|
|
|
|
|
this.networkBufferMap.set(id, []);
|
|
|
|
|
// Evict oldest request group if over limit
|
|
|
|
|
if (this.networkBufferIds.length > this.bufferLimit) {
|
|
|
|
|
const evictId = this.networkBufferIds.shift()!;
|
|
|
|
|
this.networkBufferMap.delete(evictId);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
this.networkBufferMap.get(id)!.push(sanitized);
|
|
|
|
|
|
|
|
|
|
this.emit('network', sanitized);
|
2026-01-11 21:22:49 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
enable() {
|
|
|
|
|
if (this.isInterceptionEnabled) return;
|
|
|
|
|
this.isInterceptionEnabled = true;
|
|
|
|
|
|
|
|
|
|
this.patchGlobalFetch();
|
|
|
|
|
this.patchNodeHttp();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private patchGlobalFetch() {
|
|
|
|
|
if (!global.fetch) return;
|
|
|
|
|
const originalFetch = global.fetch;
|
|
|
|
|
|
|
|
|
|
global.fetch = async (input: RequestInfo | URL, init?: RequestInit) => {
|
|
|
|
|
const url =
|
|
|
|
|
typeof input === 'string'
|
|
|
|
|
? input
|
|
|
|
|
: input instanceof URL
|
|
|
|
|
? input.toString()
|
2026-02-10 08:54:23 -08:00
|
|
|
: input.url;
|
2026-02-06 16:20:22 -08:00
|
|
|
if (url.includes('127.0.0.1') || url.includes('localhost'))
|
|
|
|
|
return originalFetch(input, init);
|
2026-01-11 21:22:49 +08:00
|
|
|
|
|
|
|
|
const id = Math.random().toString(36).substring(7);
|
|
|
|
|
const method = (init?.method || 'GET').toUpperCase();
|
|
|
|
|
|
|
|
|
|
const newInit = { ...init };
|
|
|
|
|
const headers = new Headers(init?.headers || {});
|
|
|
|
|
headers.set(ACTIVITY_ID_HEADER, id);
|
|
|
|
|
newInit.headers = headers;
|
|
|
|
|
|
|
|
|
|
let reqBody = '';
|
|
|
|
|
if (init?.body) {
|
|
|
|
|
if (typeof init.body === 'string') reqBody = init.body;
|
|
|
|
|
else if (init.body instanceof URLSearchParams)
|
|
|
|
|
reqBody = init.body.toString();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.requestStartTimes.set(id, Date.now());
|
|
|
|
|
this.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
timestamp: Date.now(),
|
|
|
|
|
method,
|
|
|
|
|
url,
|
|
|
|
|
headers: this.stringifyHeaders(newInit.headers),
|
|
|
|
|
body: reqBody,
|
|
|
|
|
pending: true,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const response = await originalFetch(input, newInit);
|
|
|
|
|
const clonedRes = response.clone();
|
|
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
// Stream chunks if body is available
|
|
|
|
|
if (clonedRes.body) {
|
|
|
|
|
const reader = clonedRes.body.getReader();
|
|
|
|
|
const decoder = new TextDecoder();
|
|
|
|
|
const chunks: string[] = [];
|
|
|
|
|
let chunkIndex = 0;
|
|
|
|
|
|
|
|
|
|
const readStream = async () => {
|
|
|
|
|
try {
|
|
|
|
|
while (true) {
|
|
|
|
|
const { done, value } = await reader.read();
|
|
|
|
|
if (done) break;
|
|
|
|
|
|
|
|
|
|
const chunkData = decoder.decode(value, { stream: true });
|
|
|
|
|
chunks.push(chunkData);
|
|
|
|
|
|
|
|
|
|
// Emit chunk update
|
|
|
|
|
this.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
pending: true,
|
|
|
|
|
chunk: {
|
|
|
|
|
index: chunkIndex++,
|
|
|
|
|
data: chunkData,
|
|
|
|
|
timestamp: Date.now(),
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Final update with complete response
|
|
|
|
|
const startTime = this.requestStartTimes.get(id);
|
|
|
|
|
const durationMs = startTime ? Date.now() - startTime : 0;
|
|
|
|
|
this.requestStartTimes.delete(id);
|
|
|
|
|
|
|
|
|
|
this.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
pending: false,
|
|
|
|
|
response: {
|
|
|
|
|
status: response.status,
|
|
|
|
|
headers: this.stringifyHeaders(response.headers),
|
|
|
|
|
body: chunks.join(''),
|
|
|
|
|
durationMs,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
} catch (err) {
|
|
|
|
|
const message = err instanceof Error ? err.message : String(err);
|
|
|
|
|
this.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
pending: false,
|
|
|
|
|
error: `Failed to read response body: ${message}`,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
};
|
2026-01-11 21:22:49 +08:00
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
void readStream();
|
|
|
|
|
} else {
|
|
|
|
|
// Fallback for responses without body stream
|
|
|
|
|
clonedRes
|
|
|
|
|
.text()
|
|
|
|
|
.then((text) => {
|
|
|
|
|
const startTime = this.requestStartTimes.get(id);
|
|
|
|
|
const durationMs = startTime ? Date.now() - startTime : 0;
|
|
|
|
|
this.requestStartTimes.delete(id);
|
|
|
|
|
|
|
|
|
|
this.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
pending: false,
|
|
|
|
|
response: {
|
|
|
|
|
status: response.status,
|
|
|
|
|
headers: this.stringifyHeaders(response.headers),
|
|
|
|
|
body: text,
|
|
|
|
|
durationMs,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
})
|
|
|
|
|
.catch((err) => {
|
|
|
|
|
const message = err instanceof Error ? err.message : String(err);
|
|
|
|
|
this.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
pending: false,
|
|
|
|
|
error: `Failed to read response body: ${message}`,
|
|
|
|
|
});
|
2026-01-11 21:22:49 +08:00
|
|
|
});
|
2026-02-06 16:20:22 -08:00
|
|
|
}
|
2026-01-11 21:22:49 +08:00
|
|
|
|
|
|
|
|
return response;
|
|
|
|
|
} catch (err: unknown) {
|
|
|
|
|
this.requestStartTimes.delete(id);
|
|
|
|
|
const message = err instanceof Error ? err.message : String(err);
|
|
|
|
|
this.safeEmitNetwork({ id, pending: false, error: message });
|
|
|
|
|
throw err;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private patchNodeHttp() {
|
2026-02-10 08:54:23 -08:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-this-alias
|
2026-01-11 21:22:49 +08:00
|
|
|
const self = this;
|
|
|
|
|
const originalRequest = http.request;
|
|
|
|
|
const originalHttpsRequest = https.request;
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
const wrapRequest = (
|
|
|
|
|
originalFn: typeof http.request,
|
|
|
|
|
args: unknown[],
|
|
|
|
|
protocol: string,
|
|
|
|
|
) => {
|
|
|
|
|
const firstArg = args[0];
|
|
|
|
|
let options: http.RequestOptions | string | URL;
|
|
|
|
|
if (typeof firstArg === 'string') {
|
|
|
|
|
options = firstArg;
|
|
|
|
|
} else if (firstArg instanceof URL) {
|
|
|
|
|
options = firstArg;
|
|
|
|
|
} else {
|
|
|
|
|
options = (firstArg ?? {}) as http.RequestOptions;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let url = '';
|
|
|
|
|
if (typeof options === 'string') {
|
|
|
|
|
url = options;
|
|
|
|
|
} else if (options instanceof URL) {
|
|
|
|
|
url = options.href;
|
|
|
|
|
} else {
|
|
|
|
|
// Some callers pass URL-like objects that include href
|
|
|
|
|
const href =
|
|
|
|
|
'href' in options && typeof options.href === 'string'
|
|
|
|
|
? options.href
|
|
|
|
|
: '';
|
|
|
|
|
url =
|
|
|
|
|
href ||
|
|
|
|
|
`${protocol}//${options.hostname || options.host || 'localhost'}${options.path || '/'}`;
|
|
|
|
|
}
|
2026-01-11 21:22:49 +08:00
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
if (url.includes('127.0.0.1') || url.includes('localhost'))
|
2026-02-10 08:54:23 -08:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-type-assertion
|
|
|
|
|
return originalFn.apply(http, args as any);
|
|
|
|
|
|
|
|
|
|
const rawHeaders =
|
|
|
|
|
typeof options === 'object' &&
|
|
|
|
|
options !== null &&
|
|
|
|
|
!(options instanceof URL)
|
|
|
|
|
? options.headers
|
|
|
|
|
: undefined;
|
|
|
|
|
let headers: http.OutgoingHttpHeaders = {};
|
|
|
|
|
if (rawHeaders && isHeaderRecord(rawHeaders)) {
|
|
|
|
|
headers = rawHeaders;
|
|
|
|
|
}
|
2026-01-11 21:22:49 +08:00
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
if (headers[ACTIVITY_ID_HEADER]) {
|
2026-01-11 21:22:49 +08:00
|
|
|
delete headers[ACTIVITY_ID_HEADER];
|
2026-02-10 08:54:23 -08:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-type-assertion
|
|
|
|
|
return originalFn.apply(http, args as any);
|
2026-01-11 21:22:49 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const id = Math.random().toString(36).substring(7);
|
2026-02-10 08:54:23 -08:00
|
|
|
this.requestStartTimes.set(id, Date.now());
|
|
|
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-type-assertion
|
|
|
|
|
const req = originalFn.apply(http, args as any);
|
2026-01-11 21:22:49 +08:00
|
|
|
const requestChunks: Buffer[] = [];
|
|
|
|
|
|
|
|
|
|
const oldWrite = req.write;
|
|
|
|
|
const oldEnd = req.end;
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
req.write = function (chunk: unknown, ...etc: unknown[]) {
|
2026-01-11 21:22:49 +08:00
|
|
|
if (chunk) {
|
|
|
|
|
const encoding =
|
2026-02-10 00:10:15 +00:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion
|
2026-01-11 21:22:49 +08:00
|
|
|
typeof etc[0] === 'string' ? (etc[0] as BufferEncoding) : undefined;
|
|
|
|
|
requestChunks.push(
|
2026-02-10 08:54:23 -08:00
|
|
|
Buffer.isBuffer(chunk)
|
|
|
|
|
? chunk
|
|
|
|
|
: typeof chunk === 'string'
|
|
|
|
|
? Buffer.from(chunk, encoding)
|
|
|
|
|
: Buffer.from(
|
|
|
|
|
chunk instanceof Uint8Array ? chunk : String(chunk),
|
|
|
|
|
),
|
2026-01-11 21:22:49 +08:00
|
|
|
);
|
|
|
|
|
}
|
2026-02-10 08:54:23 -08:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-type-assertion
|
|
|
|
|
return oldWrite.apply(this, [chunk, ...etc] as any);
|
2026-01-11 21:22:49 +08:00
|
|
|
};
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
req.end = function (
|
|
|
|
|
this: http.ClientRequest,
|
|
|
|
|
chunk: unknown,
|
|
|
|
|
...etc: unknown[]
|
|
|
|
|
) {
|
2026-01-11 21:22:49 +08:00
|
|
|
if (chunk && typeof chunk !== 'function') {
|
|
|
|
|
const encoding =
|
2026-02-10 00:10:15 +00:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion
|
2026-01-11 21:22:49 +08:00
|
|
|
typeof etc[0] === 'string' ? (etc[0] as BufferEncoding) : undefined;
|
|
|
|
|
requestChunks.push(
|
2026-02-10 08:54:23 -08:00
|
|
|
Buffer.isBuffer(chunk)
|
|
|
|
|
? chunk
|
|
|
|
|
: typeof chunk === 'string'
|
|
|
|
|
? Buffer.from(chunk, encoding)
|
|
|
|
|
: Buffer.from(
|
|
|
|
|
chunk instanceof Uint8Array ? chunk : String(chunk),
|
|
|
|
|
),
|
2026-01-11 21:22:49 +08:00
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
const body = Buffer.concat(requestChunks).toString('utf8');
|
|
|
|
|
|
|
|
|
|
self.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
timestamp: Date.now(),
|
|
|
|
|
method: req.method || 'GET',
|
|
|
|
|
url,
|
|
|
|
|
headers: self.stringifyHeaders(req.getHeaders()),
|
|
|
|
|
body,
|
|
|
|
|
pending: true,
|
|
|
|
|
});
|
2026-02-10 08:54:23 -08:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-type-assertion
|
|
|
|
|
return (oldEnd as any).apply(this, [chunk, ...etc]);
|
2026-01-11 21:22:49 +08:00
|
|
|
};
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
req.on('response', (res: http.IncomingMessage) => {
|
2026-01-11 21:22:49 +08:00
|
|
|
const responseChunks: Buffer[] = [];
|
2026-02-06 16:20:22 -08:00
|
|
|
let chunkIndex = 0;
|
|
|
|
|
|
|
|
|
|
res.on('data', (chunk: Buffer) => {
|
|
|
|
|
const chunkBuffer = Buffer.from(chunk);
|
|
|
|
|
responseChunks.push(chunkBuffer);
|
|
|
|
|
|
|
|
|
|
// Emit chunk update for streaming
|
|
|
|
|
self.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
pending: true,
|
|
|
|
|
chunk: {
|
|
|
|
|
index: chunkIndex++,
|
|
|
|
|
data: chunkBuffer.toString('utf8'),
|
|
|
|
|
timestamp: Date.now(),
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
2026-01-11 21:22:49 +08:00
|
|
|
res.on('end', () => {
|
|
|
|
|
const buffer = Buffer.concat(responseChunks);
|
|
|
|
|
const encoding = res.headers['content-encoding'];
|
|
|
|
|
|
|
|
|
|
const processBuffer = (finalBuffer: Buffer) => {
|
|
|
|
|
const resBody = finalBuffer.toString('utf8');
|
|
|
|
|
const startTime = self.requestStartTimes.get(id);
|
|
|
|
|
const durationMs = startTime ? Date.now() - startTime : 0;
|
|
|
|
|
self.requestStartTimes.delete(id);
|
|
|
|
|
|
|
|
|
|
self.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
pending: false,
|
|
|
|
|
response: {
|
2026-02-10 08:54:23 -08:00
|
|
|
status: res.statusCode || 0,
|
2026-01-11 21:22:49 +08:00
|
|
|
headers: self.stringifyHeaders(res.headers),
|
|
|
|
|
body: resBody,
|
|
|
|
|
durationMs,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if (encoding === 'gzip') {
|
|
|
|
|
zlib.gunzip(buffer, (err, decompressed) => {
|
|
|
|
|
processBuffer(err ? buffer : decompressed);
|
|
|
|
|
});
|
|
|
|
|
} else if (encoding === 'deflate') {
|
|
|
|
|
zlib.inflate(buffer, (err, decompressed) => {
|
|
|
|
|
processBuffer(err ? buffer : decompressed);
|
|
|
|
|
});
|
|
|
|
|
} else {
|
|
|
|
|
processBuffer(buffer);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
req.on('error', (err: Error) => {
|
2026-01-11 21:22:49 +08:00
|
|
|
self.requestStartTimes.delete(id);
|
2026-02-10 08:54:23 -08:00
|
|
|
const message = err.message;
|
|
|
|
|
self.safeEmitNetwork({
|
|
|
|
|
id,
|
|
|
|
|
pending: false,
|
|
|
|
|
error: message,
|
|
|
|
|
});
|
2026-01-11 21:22:49 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return req;
|
|
|
|
|
};
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-type-assertion
|
|
|
|
|
(http as any).request = (...args: unknown[]) =>
|
2026-01-11 21:22:49 +08:00
|
|
|
wrapRequest(originalRequest, args, 'http:');
|
2026-02-10 08:54:23 -08:00
|
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-type-assertion
|
|
|
|
|
(https as any).request = (...args: unknown[]) =>
|
|
|
|
|
wrapRequest(originalHttpsRequest as typeof http.request, args, 'https:');
|
2026-01-11 21:22:49 +08:00
|
|
|
}
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
logConsole(payload: ConsoleLogPayload) {
|
|
|
|
|
const enriched = { ...payload, timestamp: Date.now() };
|
|
|
|
|
this.consoleBuffer.push(enriched);
|
|
|
|
|
if (this.consoleBuffer.length > this.bufferLimit) {
|
|
|
|
|
this.consoleBuffer.shift();
|
|
|
|
|
}
|
|
|
|
|
this.emit('console', enriched);
|
2026-01-11 21:22:49 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2026-02-06 16:20:22 -08:00
|
|
|
* Setup file-based logging to JSONL
|
2026-01-11 21:22:49 +08:00
|
|
|
*/
|
2026-02-06 16:20:22 -08:00
|
|
|
function setupFileLogging(
|
|
|
|
|
capture: ActivityLogger,
|
|
|
|
|
config: Config,
|
|
|
|
|
customPath?: string,
|
|
|
|
|
) {
|
|
|
|
|
const logFile =
|
|
|
|
|
customPath ||
|
|
|
|
|
(config.storage
|
|
|
|
|
? path.join(
|
|
|
|
|
config.storage.getProjectTempLogsDir(),
|
|
|
|
|
`session-${config.getSessionId()}.jsonl`,
|
|
|
|
|
)
|
|
|
|
|
: null);
|
|
|
|
|
|
|
|
|
|
if (!logFile) return;
|
|
|
|
|
|
|
|
|
|
const logsDir = path.dirname(logFile);
|
|
|
|
|
if (!fs.existsSync(logsDir)) {
|
|
|
|
|
fs.mkdirSync(logsDir, { recursive: true });
|
|
|
|
|
}
|
2026-01-11 21:22:49 +08:00
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
const writeToLog = (type: 'console' | 'network', payload: unknown) => {
|
|
|
|
|
try {
|
|
|
|
|
const entry =
|
|
|
|
|
JSON.stringify({
|
|
|
|
|
type,
|
|
|
|
|
payload,
|
|
|
|
|
sessionId: config.getSessionId(),
|
|
|
|
|
timestamp: Date.now(),
|
|
|
|
|
}) + '\n';
|
|
|
|
|
|
|
|
|
|
fs.promises.appendFile(logFile, entry).catch((err) => {
|
|
|
|
|
debugLogger.error('Failed to write to activity log:', err);
|
|
|
|
|
});
|
|
|
|
|
} catch (err) {
|
|
|
|
|
debugLogger.error('Failed to prepare activity log entry:', err);
|
2026-01-11 21:22:49 +08:00
|
|
|
}
|
2026-02-06 16:20:22 -08:00
|
|
|
};
|
2026-01-11 21:22:49 +08:00
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
capture.on('console', (payload) => writeToLog('console', payload));
|
|
|
|
|
capture.on('network', (payload) => writeToLog('network', payload));
|
|
|
|
|
}
|
2026-01-28 09:02:41 -08:00
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
/**
|
|
|
|
|
* Setup network-based logging via WebSocket
|
|
|
|
|
*/
|
|
|
|
|
function setupNetworkLogging(
|
|
|
|
|
capture: ActivityLogger,
|
|
|
|
|
host: string,
|
|
|
|
|
port: number,
|
|
|
|
|
config: Config,
|
2026-02-09 14:03:10 -08:00
|
|
|
onReconnectFailed?: () => void,
|
2026-02-06 16:20:22 -08:00
|
|
|
) {
|
2026-02-10 08:54:23 -08:00
|
|
|
const transportBuffer: object[] = [];
|
2026-02-06 16:20:22 -08:00
|
|
|
let ws: WebSocket | null = null;
|
|
|
|
|
let reconnectTimer: NodeJS.Timeout | null = null;
|
|
|
|
|
let sessionId: string | null = null;
|
|
|
|
|
let pingInterval: NodeJS.Timeout | null = null;
|
2026-02-09 14:03:10 -08:00
|
|
|
let reconnectAttempts = 0;
|
|
|
|
|
const MAX_RECONNECT_ATTEMPTS = 2;
|
2026-02-06 16:20:22 -08:00
|
|
|
|
|
|
|
|
const connect = () => {
|
|
|
|
|
try {
|
|
|
|
|
ws = new WebSocket(`ws://${host}:${port}/ws`);
|
|
|
|
|
|
|
|
|
|
ws.on('open', () => {
|
|
|
|
|
debugLogger.debug(`WebSocket connected to ${host}:${port}`);
|
2026-02-09 14:03:10 -08:00
|
|
|
reconnectAttempts = 0;
|
2026-02-06 16:20:22 -08:00
|
|
|
// Register with CLI's session ID
|
|
|
|
|
sendMessage({
|
|
|
|
|
type: 'register',
|
|
|
|
|
sessionId: config.getSessionId(),
|
|
|
|
|
timestamp: Date.now(),
|
2026-01-11 21:22:49 +08:00
|
|
|
});
|
2026-02-06 16:20:22 -08:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
ws.on('message', (data: Buffer) => {
|
|
|
|
|
try {
|
2026-02-10 08:54:23 -08:00
|
|
|
const parsed: unknown = JSON.parse(data.toString());
|
|
|
|
|
if (
|
|
|
|
|
typeof parsed === 'object' &&
|
|
|
|
|
parsed !== null &&
|
|
|
|
|
'type' in parsed &&
|
|
|
|
|
typeof parsed.type === 'string'
|
|
|
|
|
) {
|
|
|
|
|
handleServerMessage({
|
|
|
|
|
type: parsed.type,
|
|
|
|
|
sessionId:
|
|
|
|
|
'sessionId' in parsed && typeof parsed.sessionId === 'string'
|
|
|
|
|
? parsed.sessionId
|
|
|
|
|
: undefined,
|
|
|
|
|
});
|
|
|
|
|
}
|
2026-02-06 16:20:22 -08:00
|
|
|
} catch (err) {
|
|
|
|
|
debugLogger.debug('Invalid WebSocket message:', err);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
ws.on('close', () => {
|
|
|
|
|
debugLogger.debug(`WebSocket disconnected from ${host}:${port}`);
|
|
|
|
|
cleanup();
|
|
|
|
|
scheduleReconnect();
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
ws.on('error', (err) => {
|
|
|
|
|
debugLogger.debug(`WebSocket error:`, err);
|
|
|
|
|
});
|
|
|
|
|
} catch (err) {
|
|
|
|
|
debugLogger.debug(`Failed to connect WebSocket:`, err);
|
|
|
|
|
scheduleReconnect();
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
const handleServerMessage = (message: {
|
|
|
|
|
type: string;
|
|
|
|
|
sessionId?: string;
|
|
|
|
|
}) => {
|
2026-02-06 16:20:22 -08:00
|
|
|
switch (message.type) {
|
|
|
|
|
case 'registered':
|
2026-02-10 08:54:23 -08:00
|
|
|
sessionId = message.sessionId || null;
|
2026-02-06 16:20:22 -08:00
|
|
|
debugLogger.debug(`WebSocket session registered: ${sessionId}`);
|
|
|
|
|
|
|
|
|
|
// Start ping interval
|
|
|
|
|
if (pingInterval) clearInterval(pingInterval);
|
|
|
|
|
pingInterval = setInterval(() => {
|
|
|
|
|
sendMessage({ type: 'pong', timestamp: Date.now() });
|
|
|
|
|
}, 15000);
|
|
|
|
|
|
|
|
|
|
// Flush buffered logs
|
|
|
|
|
flushBuffer();
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case 'ping':
|
|
|
|
|
sendMessage({ type: 'pong', timestamp: Date.now() });
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
// Ignore unknown message types
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
const sendMessage = (message: object) => {
|
2026-02-06 16:20:22 -08:00
|
|
|
if (ws && ws.readyState === WebSocket.OPEN) {
|
|
|
|
|
ws.send(JSON.stringify(message));
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
const sendToNetwork = (type: 'console' | 'network', payload: object) => {
|
2026-02-06 16:20:22 -08:00
|
|
|
const message = {
|
|
|
|
|
type,
|
|
|
|
|
payload,
|
|
|
|
|
sessionId: sessionId || config.getSessionId(),
|
|
|
|
|
timestamp: Date.now(),
|
2026-01-11 21:22:49 +08:00
|
|
|
};
|
|
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
// If not connected or network logging not enabled, buffer
|
|
|
|
|
if (
|
|
|
|
|
!ws ||
|
|
|
|
|
ws.readyState !== WebSocket.OPEN ||
|
|
|
|
|
!capture.isNetworkLoggingEnabled()
|
|
|
|
|
) {
|
2026-02-10 08:54:23 -08:00
|
|
|
transportBuffer.push(message);
|
|
|
|
|
if (transportBuffer.length > MAX_BUFFER_SIZE) transportBuffer.shift();
|
2026-02-06 16:20:22 -08:00
|
|
|
return;
|
|
|
|
|
}
|
2026-01-11 21:22:49 +08:00
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
sendMessage(message);
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const flushBuffer = () => {
|
|
|
|
|
if (
|
|
|
|
|
!ws ||
|
|
|
|
|
ws.readyState !== WebSocket.OPEN ||
|
|
|
|
|
!capture.isNetworkLoggingEnabled()
|
|
|
|
|
) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-10 08:54:23 -08:00
|
|
|
const { network, console: consoleLogs } = capture.drainBufferedLogs();
|
|
|
|
|
const allInitialLogs: Array<{
|
|
|
|
|
type: 'network' | 'console';
|
|
|
|
|
payload: object;
|
|
|
|
|
timestamp: number;
|
|
|
|
|
}> = [
|
|
|
|
|
...network.map((l) => ({
|
|
|
|
|
type: 'network' as const,
|
|
|
|
|
payload: l,
|
|
|
|
|
timestamp: 'timestamp' in l && l.timestamp ? l.timestamp : Date.now(),
|
|
|
|
|
})),
|
|
|
|
|
...consoleLogs.map((l) => ({
|
|
|
|
|
type: 'console' as const,
|
|
|
|
|
payload: l,
|
|
|
|
|
timestamp: l.timestamp,
|
|
|
|
|
})),
|
|
|
|
|
].sort((a, b) => a.timestamp - b.timestamp);
|
|
|
|
|
|
|
|
|
|
debugLogger.debug(
|
|
|
|
|
`Flushing ${allInitialLogs.length} initial buffered logs and ${transportBuffer.length} transport buffered logs...`,
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
for (const log of allInitialLogs) {
|
|
|
|
|
sendMessage({
|
|
|
|
|
type: log.type,
|
|
|
|
|
payload: log.payload,
|
|
|
|
|
sessionId: sessionId || config.getSessionId(),
|
|
|
|
|
timestamp: Date.now(),
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
while (transportBuffer.length > 0) {
|
|
|
|
|
const message = transportBuffer.shift()!;
|
2026-02-06 16:20:22 -08:00
|
|
|
sendMessage(message);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const cleanup = () => {
|
|
|
|
|
if (pingInterval) {
|
|
|
|
|
clearInterval(pingInterval);
|
|
|
|
|
pingInterval = null;
|
|
|
|
|
}
|
|
|
|
|
ws = null;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const scheduleReconnect = () => {
|
|
|
|
|
if (reconnectTimer) return;
|
|
|
|
|
|
2026-02-09 14:03:10 -08:00
|
|
|
reconnectAttempts++;
|
|
|
|
|
if (reconnectAttempts > MAX_RECONNECT_ATTEMPTS && onReconnectFailed) {
|
|
|
|
|
debugLogger.debug(
|
|
|
|
|
`WebSocket reconnect failed after ${MAX_RECONNECT_ATTEMPTS} attempts, promoting to server...`,
|
|
|
|
|
);
|
|
|
|
|
onReconnectFailed();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
reconnectTimer = setTimeout(() => {
|
|
|
|
|
reconnectTimer = null;
|
|
|
|
|
debugLogger.debug('Reconnecting WebSocket...');
|
|
|
|
|
connect();
|
2026-02-09 14:03:10 -08:00
|
|
|
}, 1000);
|
2026-02-06 16:20:22 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Initial connection
|
|
|
|
|
connect();
|
|
|
|
|
|
|
|
|
|
capture.on('console', (payload) => sendToNetwork('console', payload));
|
|
|
|
|
capture.on('network', (payload) => sendToNetwork('network', payload));
|
2026-02-10 08:54:23 -08:00
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
capture.on('network-logging-enabled', () => {
|
|
|
|
|
debugLogger.debug('Network logging enabled, flushing buffer...');
|
|
|
|
|
flushBuffer();
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Cleanup on process exit
|
|
|
|
|
process.on('exit', () => {
|
|
|
|
|
if (reconnectTimer) clearTimeout(reconnectTimer);
|
|
|
|
|
if (ws) ws.close();
|
|
|
|
|
cleanup();
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-09 14:03:10 -08:00
|
|
|
let bridgeAttached = false;
|
|
|
|
|
|
2026-02-06 16:20:22 -08:00
|
|
|
/**
|
2026-02-09 14:03:10 -08:00
|
|
|
* Bridge coreEvents to the ActivityLogger singleton (guarded — only once).
|
2026-02-06 16:20:22 -08:00
|
|
|
*/
|
2026-02-09 14:03:10 -08:00
|
|
|
function bridgeCoreEvents(capture: ActivityLogger) {
|
|
|
|
|
if (bridgeAttached) return;
|
|
|
|
|
bridgeAttached = true;
|
|
|
|
|
coreEvents.on(CoreEvent.ConsoleLog, (payload) => {
|
|
|
|
|
capture.logConsole(payload);
|
|
|
|
|
});
|
|
|
|
|
}
|
2026-02-06 16:20:22 -08:00
|
|
|
|
2026-02-09 14:03:10 -08:00
|
|
|
/**
|
|
|
|
|
* Initialize the activity logger with a specific transport mode.
|
|
|
|
|
*
|
|
|
|
|
* @param config CLI configuration
|
|
|
|
|
* @param options Transport configuration: network (WebSocket) or file (JSONL)
|
|
|
|
|
*/
|
|
|
|
|
export function initActivityLogger(
|
|
|
|
|
config: Config,
|
|
|
|
|
options:
|
|
|
|
|
| {
|
|
|
|
|
mode: 'network';
|
|
|
|
|
host: string;
|
|
|
|
|
port: number;
|
|
|
|
|
onReconnectFailed?: () => void;
|
|
|
|
|
}
|
2026-02-10 08:54:23 -08:00
|
|
|
| { mode: 'file'; filePath?: string }
|
|
|
|
|
| { mode: 'buffer' },
|
2026-02-09 14:03:10 -08:00
|
|
|
): void {
|
2026-02-06 16:20:22 -08:00
|
|
|
const capture = ActivityLogger.getInstance();
|
|
|
|
|
capture.enable();
|
|
|
|
|
|
2026-02-09 14:03:10 -08:00
|
|
|
if (options.mode === 'network') {
|
|
|
|
|
setupNetworkLogging(
|
|
|
|
|
capture,
|
|
|
|
|
options.host,
|
|
|
|
|
options.port,
|
|
|
|
|
config,
|
|
|
|
|
options.onReconnectFailed,
|
|
|
|
|
);
|
2026-02-06 16:20:22 -08:00
|
|
|
capture.enableNetworkLogging();
|
2026-02-10 08:54:23 -08:00
|
|
|
} else if (options.mode === 'file') {
|
2026-02-09 14:03:10 -08:00
|
|
|
setupFileLogging(capture, config, options.filePath);
|
2026-02-06 16:20:22 -08:00
|
|
|
}
|
2026-02-10 08:54:23 -08:00
|
|
|
// buffer mode: no transport, just intercept + bridge
|
2026-02-06 16:20:22 -08:00
|
|
|
|
2026-02-09 14:03:10 -08:00
|
|
|
bridgeCoreEvents(capture);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Add a network (WebSocket) transport to the existing ActivityLogger singleton.
|
|
|
|
|
* Used for promotion re-entry without re-bridging coreEvents.
|
|
|
|
|
*/
|
|
|
|
|
export function addNetworkTransport(
|
|
|
|
|
config: Config,
|
|
|
|
|
host: string,
|
|
|
|
|
port: number,
|
|
|
|
|
onReconnectFailed?: () => void,
|
|
|
|
|
): void {
|
|
|
|
|
const capture = ActivityLogger.getInstance();
|
|
|
|
|
setupNetworkLogging(capture, host, port, config, onReconnectFailed);
|
2026-01-11 21:22:49 +08:00
|
|
|
}
|