Add DNS traffic recorder plugin (#1748)

This commit is contained in:
Nick Sweeting
2025-12-31 11:02:43 -08:00
committed by GitHub
18 changed files with 616 additions and 748 deletions

View File

@@ -1512,6 +1512,173 @@ async function installExtensionWithCache(extension, options = {}) {
return installedExt;
}
// ============================================================================
// Snapshot Hook Utilities (for CDP-based plugins like ssl, responses, dns)
// ============================================================================
/**
* Parse command line arguments into an object.
* Handles --key=value and --flag formats.
*
* @returns {Object} - Parsed arguments object
*/
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
/**
* Wait for Chrome session files to be ready.
* Polls for cdp_url.txt and target_id.txt in the chrome session directory.
*
* @param {string} chromeSessionDir - Path to chrome session directory (e.g., '../chrome')
* @param {number} [timeoutMs=60000] - Timeout in milliseconds
* @returns {Promise<boolean>} - True if files are ready, false if timeout
*/
async function waitForChromeSession(chromeSessionDir, timeoutMs = 60000) {
const cdpFile = path.join(chromeSessionDir, 'cdp_url.txt');
const targetIdFile = path.join(chromeSessionDir, 'target_id.txt');
const startTime = Date.now();
while (Date.now() - startTime < timeoutMs) {
if (fs.existsSync(cdpFile) && fs.existsSync(targetIdFile)) {
return true;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
return false;
}
/**
* Read CDP WebSocket URL from chrome session directory.
*
* @param {string} chromeSessionDir - Path to chrome session directory
* @returns {string|null} - CDP URL or null if not found
*/
function readCdpUrl(chromeSessionDir) {
const cdpFile = path.join(chromeSessionDir, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
/**
* Read target ID from chrome session directory.
*
* @param {string} chromeSessionDir - Path to chrome session directory
* @returns {string|null} - Target ID or null if not found
*/
function readTargetId(chromeSessionDir) {
const targetIdFile = path.join(chromeSessionDir, 'target_id.txt');
if (fs.existsSync(targetIdFile)) {
return fs.readFileSync(targetIdFile, 'utf8').trim();
}
return null;
}
/**
* Connect to Chrome browser and find the target page.
* This is a high-level utility that handles all the connection logic:
* 1. Wait for chrome session files
* 2. Connect to browser via CDP
* 3. Find the target page by ID
*
* @param {Object} options - Connection options
* @param {string} [options.chromeSessionDir='../chrome'] - Path to chrome session directory
* @param {number} [options.timeoutMs=60000] - Timeout for waiting
* @param {Object} [options.puppeteer] - Puppeteer module (must be passed in)
* @returns {Promise<Object>} - { browser, page, targetId, cdpUrl }
* @throws {Error} - If connection fails or page not found
*/
async function connectToPage(options = {}) {
const {
chromeSessionDir = '../chrome',
timeoutMs = 60000,
puppeteer,
} = options;
if (!puppeteer) {
throw new Error('puppeteer module must be passed to connectToPage()');
}
// Wait for chrome session to be ready
const sessionReady = await waitForChromeSession(chromeSessionDir, timeoutMs);
if (!sessionReady) {
throw new Error(`Chrome session not ready after ${timeoutMs/1000}s (chrome plugin must run first)`);
}
// Read session files
const cdpUrl = readCdpUrl(chromeSessionDir);
if (!cdpUrl) {
throw new Error('No Chrome session found (cdp_url.txt missing)');
}
const targetId = readTargetId(chromeSessionDir);
// Connect to browser
const browser = await puppeteer.connect({ browserWSEndpoint: cdpUrl });
// Find the target page
const pages = await browser.pages();
let page = null;
if (targetId) {
page = pages.find(p => {
const target = p.target();
return target && target._targetId === targetId;
});
}
// Fallback to last page if target not found
if (!page) {
page = pages[pages.length - 1];
}
if (!page) {
throw new Error('No page found in browser');
}
return { browser, page, targetId, cdpUrl };
}
/**
* Wait for page navigation to complete.
* Polls for page_loaded.txt marker file written by chrome_navigate.
*
* @param {string} chromeSessionDir - Path to chrome session directory
* @param {number} [timeoutMs=120000] - Timeout in milliseconds
* @param {number} [postLoadDelayMs=0] - Additional delay after page load marker
* @returns {Promise<void>}
* @throws {Error} - If timeout waiting for navigation
*/
async function waitForPageLoaded(chromeSessionDir, timeoutMs = 120000, postLoadDelayMs = 0) {
const pageLoadedMarker = path.join(chromeSessionDir, 'page_loaded.txt');
const pollInterval = 100;
let waitTime = 0;
while (!fs.existsSync(pageLoadedMarker) && waitTime < timeoutMs) {
await new Promise(resolve => setTimeout(resolve, pollInterval));
waitTime += pollInterval;
}
if (!fs.existsSync(pageLoadedMarker)) {
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
}
// Optional post-load delay for late responses
if (postLoadDelayMs > 0) {
await new Promise(resolve => setTimeout(resolve, postLoadDelayMs));
}
}
// Export all functions
module.exports = {
// Environment helpers
@@ -1559,6 +1726,13 @@ module.exports = {
installExtensionWithCache,
// Deprecated - use enableExtensions option instead
getExtensionLaunchArgs,
// Snapshot hook utilities (for CDP-based plugins)
parseArgs,
waitForChromeSession,
readCdpUrl,
readTargetId,
connectToPage,
waitForPageLoaded,
};
// CLI usage

View File

@@ -7,75 +7,31 @@
* navigation and capture all console output.
*
* Usage: on_Snapshot__21_consolelog.js --url=<url> --snapshot-id=<uuid>
* Output: Writes console.jsonl + listener.pid
* Output: Writes console.jsonl
*/
const fs = require('fs');
const path = require('path');
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
const puppeteer = require('puppeteer-core');
// Import shared utilities from chrome_utils.js
const {
getEnvBool,
getEnvInt,
parseArgs,
connectToPage,
waitForPageLoaded,
} = require('../chrome/chrome_utils.js');
const PLUGIN_NAME = 'consolelog';
const OUTPUT_DIR = '.';
const OUTPUT_FILE = 'console.jsonl';
// PID file is now written by run_hook() with hook-specific name
const CHROME_SESSION_DIR = '../chrome';
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
function getEnv(name, defaultValue = '') {
return (process.env[name] || defaultValue).trim();
}
function getEnvBool(name, defaultValue = false) {
const val = getEnv(name, '').toLowerCase();
if (['true', '1', 'yes', 'on'].includes(val)) return true;
if (['false', '0', 'no', 'off'].includes(val)) return false;
return defaultValue;
}
async function waitForChromeTabOpen(timeoutMs = 60000) {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
const startTime = Date.now();
while (Date.now() - startTime < timeoutMs) {
if (fs.existsSync(cdpFile) && fs.existsSync(targetIdFile)) {
return true;
}
// Wait 100ms before checking again
await new Promise(resolve => setTimeout(resolve, 100));
}
return false;
}
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
function getPageId() {
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
if (fs.existsSync(targetIdFile)) {
return fs.readFileSync(targetIdFile, 'utf8').trim();
}
return null;
}
async function serializeArgs(args) {
const serialized = [];
for (const arg of args) {
@@ -95,39 +51,16 @@ async function serializeArgs(args) {
async function setupListeners() {
const outputPath = path.join(OUTPUT_DIR, OUTPUT_FILE);
const timeout = getEnvInt('CONSOLELOG_TIMEOUT', 30) * 1000;
fs.writeFileSync(outputPath, ''); // Clear existing
// Wait for chrome tab to be open (up to 60s)
const tabOpen = await waitForChromeTabOpen(60000);
if (!tabOpen) {
throw new Error('Chrome tab not open after 60s (chrome plugin must run first)');
}
const cdpUrl = getCdpUrl();
if (!cdpUrl) {
throw new Error('No Chrome session found');
}
const browser = await puppeteer.connect({ browserWSEndpoint: cdpUrl });
// Find our page
const pages = await browser.pages();
const targetId = getPageId();
let page = null;
if (targetId) {
page = pages.find(p => {
const target = p.target();
return target && target._targetId === targetId;
});
}
if (!page) {
page = pages[pages.length - 1];
}
if (!page) {
throw new Error('No page found');
}
// Connect to Chrome page using shared utility
const { browser, page } = await connectToPage({
chromeSessionDir: CHROME_SESSION_DIR,
timeoutMs: timeout,
puppeteer,
});
// Set up listeners that write directly to file
page.on('console', async (msg) => {
@@ -178,27 +111,6 @@ async function setupListeners() {
return { browser, page };
}
async function waitForNavigation() {
// Wait for chrome_navigate to complete (it writes page_loaded.txt)
const navDir = '../chrome';
const pageLoadedMarker = path.join(navDir, 'page_loaded.txt');
const maxWait = 120000; // 2 minutes
const pollInterval = 100;
let waitTime = 0;
while (!fs.existsSync(pageLoadedMarker) && waitTime < maxWait) {
await new Promise(resolve => setTimeout(resolve, pollInterval));
waitTime += pollInterval;
}
if (!fs.existsSync(pageLoadedMarker)) {
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
}
// Wait a bit longer for any post-load console output
await new Promise(resolve => setTimeout(resolve, 500));
}
async function main() {
const args = parseArgs();
const url = args.url;
@@ -215,22 +127,16 @@ async function main() {
process.exit(0);
}
const startTs = new Date();
const timeout = getEnvInt('CONSOLELOG_TIMEOUT', 30) * 1000;
try {
// Set up listeners BEFORE navigation
await setupListeners();
// Note: PID file is written by run_hook() with hook-specific name
// Snapshot.cleanup() kills all *.pid processes when done
// Wait for chrome_navigate to complete (BLOCKING)
await waitForNavigation();
await waitForPageLoaded(CHROME_SESSION_DIR, timeout * 4, 500);
// Report success
const endTs = new Date();
// Output clean JSONL (no RESULT_JSON= prefix)
// Output clean JSONL
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'succeeded',
@@ -243,7 +149,6 @@ async function main() {
const error = `${e.name}: ${e.message}`;
console.error(`ERROR: ${error}`);
// Output clean JSONL (no RESULT_JSON= prefix)
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'failed',

View File

@@ -0,0 +1,21 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"additionalProperties": false,
"required_plugins": ["chrome"],
"properties": {
"DNS_ENABLED": {
"type": "boolean",
"default": true,
"x-aliases": ["SAVE_DNS", "USE_DNS"],
"description": "Enable DNS traffic recording during page load"
},
"DNS_TIMEOUT": {
"type": "integer",
"default": 30,
"minimum": 5,
"x-fallback": "TIMEOUT",
"description": "Timeout for DNS recording in seconds"
}
}
}

View File

@@ -0,0 +1,240 @@
#!/usr/bin/env node
/**
* Record all DNS traffic (hostname -> IP resolutions) during page load.
*
* This hook sets up CDP listeners BEFORE chrome_navigate loads the page,
* then waits for navigation to complete. The listeners capture all DNS
* resolutions by extracting hostname/IP pairs from network responses.
*
* Usage: on_Snapshot__22_dns.js --url=<url> --snapshot-id=<uuid>
* Output: Writes dns.jsonl with one line per DNS resolution record
*/
const fs = require('fs');
const path = require('path');
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
const puppeteer = require('puppeteer-core');
// Import shared utilities from chrome_utils.js
const {
getEnvBool,
getEnvInt,
parseArgs,
connectToPage,
waitForPageLoaded,
} = require('../chrome/chrome_utils.js');
const PLUGIN_NAME = 'dns';
const OUTPUT_DIR = '.';
const OUTPUT_FILE = 'dns.jsonl';
const CHROME_SESSION_DIR = '../chrome';
function extractHostname(url) {
try {
const urlObj = new URL(url);
return urlObj.hostname;
} catch (e) {
return null;
}
}
async function setupListener(targetUrl) {
const outputPath = path.join(OUTPUT_DIR, OUTPUT_FILE);
const timeout = getEnvInt('DNS_TIMEOUT', 30) * 1000;
// Initialize output file
fs.writeFileSync(outputPath, '');
// Track seen hostname -> IP mappings to avoid duplicates per request
const seenResolutions = new Map();
// Track request IDs to their URLs for correlation
const requestUrls = new Map();
// Connect to Chrome page using shared utility
const { browser, page } = await connectToPage({
chromeSessionDir: CHROME_SESSION_DIR,
timeoutMs: timeout,
puppeteer,
});
// Get CDP session for low-level network events
const client = await page.target().createCDPSession();
// Enable network domain to receive events
await client.send('Network.enable');
// Listen for request events to track URLs
client.on('Network.requestWillBeSent', (params) => {
requestUrls.set(params.requestId, params.request.url);
});
// Listen for response events which contain remoteIPAddress (the resolved IP)
client.on('Network.responseReceived', (params) => {
try {
const response = params.response;
const url = response.url;
const remoteIPAddress = response.remoteIPAddress;
const remotePort = response.remotePort;
if (!url || !remoteIPAddress) {
return;
}
const hostname = extractHostname(url);
if (!hostname) {
return;
}
// Skip if IP address is same as hostname (already an IP)
if (hostname === remoteIPAddress) {
return;
}
// Create a unique key for this resolution
const resolutionKey = `${hostname}:${remoteIPAddress}`;
// Skip if we've already recorded this resolution
if (seenResolutions.has(resolutionKey)) {
return;
}
seenResolutions.set(resolutionKey, true);
// Determine record type (A for IPv4, AAAA for IPv6)
const isIPv6 = remoteIPAddress.includes(':');
const recordType = isIPv6 ? 'AAAA' : 'A';
// Create DNS record
const timestamp = new Date().toISOString();
const dnsRecord = {
ts: timestamp,
hostname: hostname,
ip: remoteIPAddress,
port: remotePort || null,
type: recordType,
protocol: url.startsWith('https://') ? 'https' : 'http',
url: url,
requestId: params.requestId,
};
// Append to output file
fs.appendFileSync(outputPath, JSON.stringify(dnsRecord) + '\n');
} catch (e) {
// Ignore errors
}
});
// Listen for failed requests too - they still involve DNS
client.on('Network.loadingFailed', (params) => {
try {
const requestId = params.requestId;
const url = requestUrls.get(requestId);
if (!url) {
return;
}
const hostname = extractHostname(url);
if (!hostname) {
return;
}
// Check if this is a DNS-related failure
const errorText = params.errorText || '';
if (errorText.includes('net::ERR_NAME_NOT_RESOLVED') ||
errorText.includes('net::ERR_NAME_RESOLUTION_FAILED')) {
// Create a unique key for this failed resolution
const resolutionKey = `${hostname}:NXDOMAIN`;
// Skip if we've already recorded this NXDOMAIN
if (seenResolutions.has(resolutionKey)) {
return;
}
seenResolutions.set(resolutionKey, true);
const timestamp = new Date().toISOString();
const dnsRecord = {
ts: timestamp,
hostname: hostname,
ip: null,
port: null,
type: 'NXDOMAIN',
protocol: url.startsWith('https://') ? 'https' : 'http',
url: url,
requestId: requestId,
error: errorText,
};
fs.appendFileSync(outputPath, JSON.stringify(dnsRecord) + '\n');
}
} catch (e) {
// Ignore errors
}
});
return { browser, page, client };
}
async function main() {
const args = parseArgs();
const url = args.url;
const snapshotId = args.snapshot_id;
if (!url || !snapshotId) {
console.error('Usage: on_Snapshot__22_dns.js --url=<url> --snapshot-id=<uuid>');
process.exit(1);
}
if (!getEnvBool('DNS_ENABLED', true)) {
console.error('Skipping (DNS_ENABLED=False)');
console.log(JSON.stringify({type: 'ArchiveResult', status: 'skipped', output_str: 'DNS_ENABLED=False'}));
process.exit(0);
}
const timeout = getEnvInt('DNS_TIMEOUT', 30) * 1000;
try {
// Set up listener BEFORE navigation
await setupListener(url);
// Wait for chrome_navigate to complete (BLOCKING)
await waitForPageLoaded(CHROME_SESSION_DIR, timeout * 4, 500);
// Count DNS records
const outputPath = path.join(OUTPUT_DIR, OUTPUT_FILE);
let recordCount = 0;
if (fs.existsSync(outputPath)) {
const content = fs.readFileSync(outputPath, 'utf8');
recordCount = content.split('\n').filter(line => line.trim()).length;
}
// Output clean JSONL
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'succeeded',
output_str: `${OUTPUT_FILE} (${recordCount} DNS records)`,
}));
process.exit(0);
} catch (e) {
const error = `${e.name}: ${e.message}`;
console.error(`ERROR: ${error}`);
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'failed',
output_str: error,
}));
process.exit(1);
}
}
main().catch(e => {
console.error(`Fatal error: ${e.message}`);
process.exit(1);
});

View File

@@ -29,6 +29,8 @@ const {
getEnvBool,
getEnvInt,
parseResolution,
parseArgs,
readCdpUrl,
} = require('../chrome/chrome_utils.js');
// Check if DOM is enabled BEFORE requiring puppeteer
@@ -47,18 +49,6 @@ const OUTPUT_DIR = '.';
const OUTPUT_FILE = 'output.html';
const CHROME_SESSION_DIR = '../chrome';
// Parse command line arguments
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
// Check if staticfile extractor already downloaded this URL
const STATICFILE_DIR = '../staticfile';
function hasStaticFileOutput() {
@@ -81,15 +71,6 @@ async function waitForChromeTabLoaded(timeoutMs = 60000) {
return false;
}
// Get CDP URL from chrome plugin if available
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
async function dumpDom(url) {
const timeout = (getEnvInt('CHROME_TIMEOUT') || getEnvInt('TIMEOUT', 60)) * 1000;
const resolution = getEnv('CHROME_RESOLUTION') || getEnv('RESOLUTION', '1440,2000');
@@ -108,7 +89,7 @@ async function dumpDom(url) {
try {
// Try to connect to existing Chrome session
const cdpUrl = getCdpUrl();
const cdpUrl = readCdpUrl(CHROME_SESSION_DIR);
if (cdpUrl) {
try {
browser = await puppeteer.connect({
@@ -212,7 +193,7 @@ async function main() {
}
// Only wait for page load if using shared Chrome session
const cdpUrl = getCdpUrl();
const cdpUrl = readCdpUrl(CHROME_SESSION_DIR);
if (cdpUrl) {
// Wait for page to be fully loaded
const pageLoaded = await waitForChromeTabLoaded(60000);

View File

@@ -19,6 +19,15 @@ const fs = require('fs');
const path = require('path');
const https = require('https');
const http = require('http');
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
const {
getEnv,
getEnvBool,
getEnvInt,
parseArgs,
} = require('../chrome/chrome_utils.js');
// Extractor metadata
const PLUGIN_NAME = 'headers';
@@ -27,35 +36,6 @@ const OUTPUT_FILE = 'headers.json';
const CHROME_SESSION_DIR = '../chrome';
const CHROME_HEADERS_FILE = 'response_headers.json';
// Parse command line arguments
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
// Get environment variable with default
function getEnv(name, defaultValue = '') {
return (process.env[name] || defaultValue).trim();
}
function getEnvBool(name, defaultValue = false) {
const val = getEnv(name, '').toLowerCase();
if (['true', '1', 'yes', 'on'].includes(val)) return true;
if (['false', '0', 'no', 'off'].includes(val)) return false;
return defaultValue;
}
function getEnvInt(name, defaultValue = 0) {
const val = parseInt(getEnv(name, String(defaultValue)), 10);
return isNaN(val) ? defaultValue : val;
}
// Get headers from chrome plugin if available
function getHeadersFromChromeSession() {
const headersFile = path.join(CHROME_SESSION_DIR, CHROME_HEADERS_FILE);

View File

@@ -20,21 +20,20 @@
* MODALCLOSER_POLL_INTERVAL: How often to check for CSS modals in ms (default: 500)
*/
function getEnv(name, defaultValue = '') {
return (process.env[name] || defaultValue).trim();
}
const fs = require('fs');
const path = require('path');
function getEnvBool(name, defaultValue = false) {
const val = getEnv(name, '').toLowerCase();
if (['true', '1', 'yes', 'on'].includes(val)) return true;
if (['false', '0', 'no', 'off'].includes(val)) return false;
return defaultValue;
}
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
function getEnvInt(name, defaultValue = 0) {
const val = parseInt(getEnv(name, String(defaultValue)), 10);
return isNaN(val) ? defaultValue : val;
}
// Import shared utilities from chrome_utils.js
const {
getEnvBool,
getEnvInt,
parseArgs,
readCdpUrl,
readTargetId,
} = require('../chrome/chrome_utils.js');
// Check if modalcloser is enabled BEFORE requiring puppeteer
if (!getEnvBool('MODALCLOSER_ENABLED', true)) {
@@ -42,42 +41,11 @@ if (!getEnvBool('MODALCLOSER_ENABLED', true)) {
process.exit(0);
}
const fs = require('fs');
const path = require('path');
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
const puppeteer = require('puppeteer-core');
const PLUGIN_NAME = 'modalcloser';
const CHROME_SESSION_DIR = '../chrome';
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
function getPageId() {
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
if (fs.existsSync(targetIdFile)) {
return fs.readFileSync(targetIdFile, 'utf8').trim();
}
return null;
}
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
@@ -267,7 +235,7 @@ async function main() {
const dialogTimeout = getEnvInt('MODALCLOSER_TIMEOUT', 1250);
const pollInterval = getEnvInt('MODALCLOSER_POLL_INTERVAL', 500);
const cdpUrl = getCdpUrl();
const cdpUrl = readCdpUrl(CHROME_SESSION_DIR);
if (!cdpUrl) {
console.error('ERROR: Chrome CDP URL not found (chrome plugin must run first)');
process.exit(1);
@@ -307,7 +275,7 @@ async function main() {
}
// Find the right page by target ID
const targetId = getPageId();
const targetId = readTargetId(CHROME_SESSION_DIR);
let page = null;
if (targetId) {
page = pages.find(p => {

View File

@@ -29,6 +29,8 @@ const {
getEnvBool,
getEnvInt,
parseResolution,
parseArgs,
readCdpUrl,
} = require('../chrome/chrome_utils.js');
// Check if PDF is enabled BEFORE requiring puppeteer
@@ -47,18 +49,6 @@ const OUTPUT_DIR = '.';
const OUTPUT_FILE = 'output.pdf';
const CHROME_SESSION_DIR = '../chrome';
// Parse command line arguments
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
// Check if staticfile extractor already downloaded this URL
const STATICFILE_DIR = '../staticfile';
function hasStaticFileOutput() {
@@ -81,15 +71,6 @@ async function waitForChromeTabLoaded(timeoutMs = 60000) {
return false;
}
// Get CDP URL from chrome plugin if available
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
async function printToPdf(url) {
const timeout = (getEnvInt('CHROME_TIMEOUT') || getEnvInt('TIMEOUT', 60)) * 1000;
const resolution = getEnv('CHROME_RESOLUTION') || getEnv('RESOLUTION', '1440,2000');
@@ -108,7 +89,7 @@ async function printToPdf(url) {
try {
// Try to connect to existing Chrome session
const cdpUrl = getCdpUrl();
const cdpUrl = readCdpUrl(CHROME_SESSION_DIR);
if (cdpUrl) {
try {
browser = await puppeteer.connect({
@@ -221,7 +202,7 @@ async function main() {
}
// Only wait for page load if using shared Chrome session
const cdpUrl = getCdpUrl();
const cdpUrl = readCdpUrl(CHROME_SESSION_DIR);
if (cdpUrl) {
// Wait for page to be fully loaded
const pageLoaded = await waitForChromeTabLoaded(60000);

View File

@@ -89,9 +89,12 @@ class TestPipProviderHook(TestCase):
except json.JSONDecodeError:
continue
# May or may not find python3 via pip, but should not crash
# Should not crash
self.assertNotIn('Traceback', result.stderr)
# Should find python3 via pip or env provider
self.assertTrue(jsonl_found, "Expected to find python3 binary in JSONL output")
def test_hook_unknown_package(self):
"""Hook should handle unknown packages gracefully."""
env = os.environ.copy()

View File

@@ -6,20 +6,30 @@
* redirect chain from the initial request. It stays alive through navigation
* and emits JSONL on SIGTERM.
*
* Usage: on_Snapshot__25_chrome_redirects.bg.js --url=<url> --snapshot-id=<uuid>
* Output: Writes redirects.jsonl + hook.pid
* Usage: on_Snapshot__31_redirects.bg.js --url=<url> --snapshot-id=<uuid>
* Output: Writes redirects.jsonl
*/
const fs = require('fs');
const path = require('path');
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
const puppeteer = require('puppeteer-core');
// Import shared utilities from chrome_utils.js
const {
getEnvBool,
getEnvInt,
parseArgs,
connectToPage,
waitForPageLoaded,
} = require('../chrome/chrome_utils.js');
const PLUGIN_NAME = 'redirects';
const OUTPUT_DIR = '.';
const OUTPUT_FILE = 'redirects.jsonl';
// PID file is now written by run_hook() with hook-specific name
const CHROME_SESSION_DIR = '../chrome';
// Global state
@@ -29,94 +39,20 @@ let finalUrl = '';
let page = null;
let browser = null;
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
function getEnv(name, defaultValue = '') {
return (process.env[name] || defaultValue).trim();
}
function getEnvBool(name, defaultValue = false) {
const val = getEnv(name, '').toLowerCase();
if (['true', '1', 'yes', 'on'].includes(val)) return true;
if (['false', '0', 'no', 'off'].includes(val)) return false;
return defaultValue;
}
async function waitForChromeTabOpen(timeoutMs = 60000) {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
const startTime = Date.now();
while (Date.now() - startTime < timeoutMs) {
if (fs.existsSync(cdpFile) && fs.existsSync(targetIdFile)) {
return true;
}
// Wait 100ms before checking again
await new Promise(resolve => setTimeout(resolve, 100));
}
return false;
}
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
function getPageId() {
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
if (fs.existsSync(targetIdFile)) {
return fs.readFileSync(targetIdFile, 'utf8').trim();
}
return null;
}
async function setupRedirectListener() {
const outputPath = path.join(OUTPUT_DIR, OUTPUT_FILE);
const timeout = getEnvInt('REDIRECTS_TIMEOUT', 30) * 1000;
fs.writeFileSync(outputPath, ''); // Clear existing
// Wait for chrome tab to be open (up to 60s)
const tabOpen = await waitForChromeTabOpen(60000);
if (!tabOpen) {
throw new Error('Chrome tab not open after 60s (chrome plugin must run first)');
}
const cdpUrl = getCdpUrl();
if (!cdpUrl) {
throw new Error('No Chrome session found');
}
browser = await puppeteer.connect({ browserWSEndpoint: cdpUrl });
// Find our page
const pages = await browser.pages();
const targetId = getPageId();
if (targetId) {
page = pages.find(p => {
const target = p.target();
return target && target._targetId === targetId;
});
}
if (!page) {
page = pages[pages.length - 1];
}
if (!page) {
throw new Error('No page found');
}
// Connect to Chrome page using shared utility
const connection = await connectToPage({
chromeSessionDir: CHROME_SESSION_DIR,
timeoutMs: timeout,
puppeteer,
});
browser = connection.browser;
page = connection.page;
// Enable CDP Network domain to capture redirects
const client = await page.target().createCDPSession();
@@ -208,27 +144,6 @@ async function setupRedirectListener() {
return { browser, page };
}
async function waitForNavigation() {
// Wait for chrome_navigate to complete
const navDir = '../chrome';
const pageLoadedMarker = path.join(navDir, 'page_loaded.txt');
const maxWait = 120000; // 2 minutes
const pollInterval = 100;
let waitTime = 0;
while (!fs.existsSync(pageLoadedMarker) && waitTime < maxWait) {
await new Promise(resolve => setTimeout(resolve, pollInterval));
waitTime += pollInterval;
}
if (!fs.existsSync(pageLoadedMarker)) {
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
}
// Wait a bit longer for any post-load analysis
await new Promise(resolve => setTimeout(resolve, 1000));
}
function handleShutdown(signal) {
console.error(`\nReceived ${signal}, emitting final results...`);
@@ -254,7 +169,7 @@ async function main() {
const snapshotId = args.snapshot_id;
if (!url || !snapshotId) {
console.error('Usage: on_Snapshot__25_chrome_redirects.bg.js --url=<url> --snapshot-id=<uuid>');
console.error('Usage: on_Snapshot__31_redirects.bg.js --url=<url> --snapshot-id=<uuid>');
process.exit(1);
}
@@ -266,6 +181,8 @@ async function main() {
process.exit(0);
}
const timeout = getEnvInt('REDIRECTS_TIMEOUT', 30) * 1000;
// Register signal handlers for graceful shutdown
process.on('SIGTERM', () => handleShutdown('SIGTERM'));
process.on('SIGINT', () => handleShutdown('SIGINT'));
@@ -274,11 +191,8 @@ async function main() {
// Set up redirect listener BEFORE navigation
await setupRedirectListener();
// Note: PID file is written by run_hook() with hook-specific name
// Snapshot.cleanup() kills all *.pid processes when done
// Wait for chrome_navigate to complete (BLOCKING)
await waitForNavigation();
await waitForPageLoaded(CHROME_SESSION_DIR, timeout * 4, 1000);
// Keep process alive until killed by cleanup
console.error('Redirect tracking complete, waiting for cleanup signal...');
@@ -290,7 +204,6 @@ async function main() {
const error = `${e.name}: ${e.message}`;
console.error(`ERROR: ${error}`);
// Output clean JSONL (no RESULT_JSON= prefix)
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'failed',

View File

@@ -7,83 +7,35 @@
* responses during the navigation.
*
* Usage: on_Snapshot__24_responses.js --url=<url> --snapshot-id=<uuid>
* Output: Creates responses/ directory with index.jsonl + listener.pid
* Output: Creates responses/ directory with index.jsonl
*/
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
const puppeteer = require('puppeteer-core');
// Import shared utilities from chrome_utils.js
const {
getEnv,
getEnvBool,
getEnvInt,
parseArgs,
connectToPage,
waitForPageLoaded,
} = require('../chrome/chrome_utils.js');
const PLUGIN_NAME = 'responses';
const OUTPUT_DIR = '.';
// PID file is now written by run_hook() with hook-specific name
const CHROME_SESSION_DIR = '../chrome';
// Resource types to capture (by default, capture everything)
const DEFAULT_TYPES = ['script', 'stylesheet', 'font', 'image', 'media', 'xhr', 'websocket'];
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
function getEnv(name, defaultValue = '') {
return (process.env[name] || defaultValue).trim();
}
function getEnvBool(name, defaultValue = false) {
const val = getEnv(name, '').toLowerCase();
if (['true', '1', 'yes', 'on'].includes(val)) return true;
if (['false', '0', 'no', 'off'].includes(val)) return false;
return defaultValue;
}
function getEnvInt(name, defaultValue = 0) {
const val = parseInt(getEnv(name, String(defaultValue)), 10);
return isNaN(val) ? defaultValue : val;
}
async function waitForChromeTabOpen(timeoutMs = 60000) {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
const startTime = Date.now();
while (Date.now() - startTime < timeoutMs) {
if (fs.existsSync(cdpFile) && fs.existsSync(targetIdFile)) {
return true;
}
// Wait 100ms before checking again
await new Promise(resolve => setTimeout(resolve, 100));
}
return false;
}
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
function getPageId() {
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
if (fs.existsSync(targetIdFile)) {
return fs.readFileSync(targetIdFile, 'utf8').trim();
}
return null;
}
function getExtensionFromMimeType(mimeType) {
const mimeMap = {
'text/html': 'html',
@@ -150,6 +102,7 @@ async function createSymlink(target, linkPath) {
}
async function setupListener() {
const timeout = getEnvInt('RESPONSES_TIMEOUT', 30) * 1000;
const typesStr = getEnv('RESPONSES_TYPES', DEFAULT_TYPES.join(','));
const typesToSave = typesStr.split(',').map(t => t.trim().toLowerCase());
@@ -162,37 +115,12 @@ async function setupListener() {
const indexPath = path.join(OUTPUT_DIR, 'index.jsonl');
fs.writeFileSync(indexPath, '');
// Wait for chrome tab to be open (up to 60s)
const tabOpen = await waitForChromeTabOpen(60000);
if (!tabOpen) {
throw new Error('Chrome tab not open after 60s (chrome plugin must run first)');
}
const cdpUrl = getCdpUrl();
if (!cdpUrl) {
throw new Error('No Chrome session found');
}
const browser = await puppeteer.connect({ browserWSEndpoint: cdpUrl });
// Find our page
const pages = await browser.pages();
const targetId = getPageId();
let page = null;
if (targetId) {
page = pages.find(p => {
const target = p.target();
return target && target._targetId === targetId;
});
}
if (!page) {
page = pages[pages.length - 1];
}
if (!page) {
throw new Error('No page found');
}
// Connect to Chrome page using shared utility
const { browser, page } = await connectToPage({
chromeSessionDir: CHROME_SESSION_DIR,
timeoutMs: timeout,
puppeteer,
});
// Set up response listener
page.on('response', async (response) => {
@@ -280,27 +208,6 @@ async function setupListener() {
return { browser, page };
}
async function waitForNavigation() {
// Wait for chrome_navigate to complete
const navDir = '../chrome';
const pageLoadedMarker = path.join(navDir, 'page_loaded.txt');
const maxWait = 120000; // 2 minutes
const pollInterval = 100;
let waitTime = 0;
while (!fs.existsSync(pageLoadedMarker) && waitTime < maxWait) {
await new Promise(resolve => setTimeout(resolve, pollInterval));
waitTime += pollInterval;
}
if (!fs.existsSync(pageLoadedMarker)) {
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
}
// Wait a bit longer for any post-load responses
await new Promise(resolve => setTimeout(resolve, 1000));
}
async function main() {
const args = parseArgs();
const url = args.url;
@@ -317,22 +224,17 @@ async function main() {
process.exit(0);
}
const startTs = new Date();
const timeout = getEnvInt('RESPONSES_TIMEOUT', 30) * 1000;
try {
// Set up listener BEFORE navigation
await setupListener();
// Note: PID file is written by run_hook() with hook-specific name
// Snapshot.cleanup() kills all *.pid processes when done
// Wait for chrome_navigate to complete (BLOCKING)
await waitForNavigation();
// Extra 1s delay for late responses
await waitForPageLoaded(CHROME_SESSION_DIR, timeout * 4, 1000);
// Report success
const endTs = new Date();
// Output clean JSONL (no RESULT_JSON= prefix)
// Output clean JSONL
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'succeeded',
@@ -345,7 +247,6 @@ async function main() {
const error = `${e.name}: ${e.message}`;
console.error(`ERROR: ${error}`);
// Output clean JSONL (no RESULT_JSON= prefix)
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'failed',

View File

@@ -29,6 +29,8 @@ const {
getEnvBool,
getEnvInt,
parseResolution,
parseArgs,
readCdpUrl,
} = require('../chrome/chrome_utils.js');
// Check if screenshot is enabled BEFORE requiring puppeteer
@@ -47,18 +49,6 @@ const OUTPUT_DIR = '.';
const OUTPUT_FILE = 'screenshot.png';
const CHROME_SESSION_DIR = '../chrome';
// Parse command line arguments
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
// Check if staticfile extractor already downloaded this URL
const STATICFILE_DIR = '../staticfile';
function hasStaticFileOutput() {
@@ -81,15 +71,6 @@ async function waitForChromeTabLoaded(timeoutMs = 60000) {
return false;
}
// Get CDP URL from chrome plugin if available
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
async function takeScreenshot(url) {
const timeout = (getEnvInt('CHROME_TIMEOUT') || getEnvInt('TIMEOUT', 60)) * 1000;
const resolution = getEnv('CHROME_RESOLUTION') || getEnv('RESOLUTION', '1440,2000');
@@ -108,7 +89,7 @@ async function takeScreenshot(url) {
try {
// Try to connect to existing Chrome session
const cdpUrl = getCdpUrl();
const cdpUrl = readCdpUrl(CHROME_SESSION_DIR);
if (cdpUrl) {
try {
browser = await puppeteer.connect({
@@ -214,7 +195,7 @@ async function main() {
}
// Only wait for page load if using shared Chrome session
const cdpUrl = getCdpUrl();
const cdpUrl = readCdpUrl(CHROME_SESSION_DIR);
if (cdpUrl) {
// Wait for page to be fully loaded
const pageLoaded = await waitForChromeTabLoaded(60000);

View File

@@ -119,11 +119,12 @@ class TestSEOWithChrome(TestCase):
self.assertNotIn('Traceback', result.stderr)
self.assertNotIn('Error:', result.stderr)
# example.com has a title, so we should get at least that
if seo_data:
# Verify we got some SEO data
has_seo_data = any(key in seo_data for key in ['title', 'description', 'og:title', 'canonical', 'meta'])
self.assertTrue(has_seo_data, f"No SEO data extracted: {seo_data}")
# example.com has a title, so we MUST get SEO data
self.assertIsNotNone(seo_data, "No SEO data extracted from file or stdout")
# Verify we got some SEO data
has_seo_data = any(key in seo_data for key in ['title', 'description', 'og:title', 'canonical', 'meta'])
self.assertTrue(has_seo_data, f"No SEO data extracted: {seo_data}")
except RuntimeError as e:
if 'Chrome' in str(e) or 'CDP' in str(e):

View File

@@ -7,114 +7,46 @@
* during the navigation request.
*
* Usage: on_Snapshot__23_ssl.js --url=<url> --snapshot-id=<uuid>
* Output: Writes ssl.json + listener.pid
* Output: Writes ssl.jsonl
*/
const fs = require('fs');
const path = require('path');
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
const puppeteer = require('puppeteer-core');
// Import shared utilities from chrome_utils.js
const {
getEnvBool,
getEnvInt,
parseArgs,
connectToPage,
waitForPageLoaded,
} = require('../chrome/chrome_utils.js');
const PLUGIN_NAME = 'ssl';
const OUTPUT_DIR = '.';
const OUTPUT_FILE = 'ssl.jsonl';
// PID file is now written by run_hook() with hook-specific name
const CHROME_SESSION_DIR = '../chrome';
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
function getEnv(name, defaultValue = '') {
return (process.env[name] || defaultValue).trim();
}
function getEnvBool(name, defaultValue = false) {
const val = getEnv(name, '').toLowerCase();
if (['true', '1', 'yes', 'on'].includes(val)) return true;
if (['false', '0', 'no', 'off'].includes(val)) return false;
return defaultValue;
}
async function waitForChromeTabOpen(timeoutMs = 60000) {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
const startTime = Date.now();
while (Date.now() - startTime < timeoutMs) {
if (fs.existsSync(cdpFile) && fs.existsSync(targetIdFile)) {
return true;
}
// Wait 100ms before checking again
await new Promise(resolve => setTimeout(resolve, 100));
}
return false;
}
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
function getPageId() {
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
if (fs.existsSync(targetIdFile)) {
return fs.readFileSync(targetIdFile, 'utf8').trim();
}
return null;
}
async function setupListener(url) {
const outputPath = path.join(OUTPUT_DIR, OUTPUT_FILE);
const timeout = getEnvInt('SSL_TIMEOUT', 30) * 1000;
// Only extract SSL for HTTPS URLs
if (!url.startsWith('https://')) {
throw new Error('URL is not HTTPS');
}
// Wait for chrome tab to be open (up to 60s)
const tabOpen = await waitForChromeTabOpen(60000);
if (!tabOpen) {
throw new Error('Chrome tab not open after 60s (chrome plugin must run first)');
}
const cdpUrl = getCdpUrl();
if (!cdpUrl) {
throw new Error('No Chrome session found');
}
const browser = await puppeteer.connect({ browserWSEndpoint: cdpUrl });
// Find our page
const pages = await browser.pages();
const targetId = getPageId();
let page = null;
if (targetId) {
page = pages.find(p => {
const target = p.target();
return target && target._targetId === targetId;
});
}
if (!page) {
page = pages[pages.length - 1];
}
if (!page) {
throw new Error('No page found');
}
// Connect to Chrome page using shared utility
const { browser, page } = await connectToPage({
chromeSessionDir: CHROME_SESSION_DIR,
timeoutMs: timeout,
puppeteer,
});
// Set up listener to capture SSL details during navigation
page.on('response', async (response) => {
@@ -171,24 +103,6 @@ async function setupListener(url) {
return { browser, page };
}
async function waitForNavigation() {
// Wait for chrome_navigate to complete (it writes page_loaded.txt)
const navDir = '../chrome';
const pageLoadedMarker = path.join(navDir, 'page_loaded.txt');
const maxWait = 120000; // 2 minutes
const pollInterval = 100;
let waitTime = 0;
while (!fs.existsSync(pageLoadedMarker) && waitTime < maxWait) {
await new Promise(resolve => setTimeout(resolve, pollInterval));
waitTime += pollInterval;
}
if (!fs.existsSync(pageLoadedMarker)) {
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
}
}
async function main() {
const args = parseArgs();
const url = args.url;
@@ -205,22 +119,16 @@ async function main() {
process.exit(0);
}
const startTs = new Date();
const timeout = getEnvInt('SSL_TIMEOUT', 30) * 1000;
try {
// Set up listener BEFORE navigation
await setupListener(url);
// Note: PID file is written by run_hook() with hook-specific name
// Snapshot.cleanup() kills all *.pid processes when done
// Wait for chrome_navigate to complete (BLOCKING)
await waitForNavigation();
await waitForPageLoaded(CHROME_SESSION_DIR, timeout * 4);
// Report success
const endTs = new Date();
// Output clean JSONL (no RESULT_JSON= prefix)
// Output clean JSONL
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'succeeded',
@@ -233,7 +141,6 @@ async function main() {
const error = `${e.name}: ${e.message}`;
console.error(`ERROR: ${error}`);
// Output clean JSONL (no RESULT_JSON= prefix)
console.log(JSON.stringify({
type: 'ArchiveResult',
status: 'failed',

View File

@@ -117,17 +117,20 @@ class TestSSLWithChrome(TestCase):
except json.JSONDecodeError:
continue
# Verify we got SSL data from HTTPS URL
if ssl_data:
# example.com uses HTTPS, should get certificate info
self.assertIn('protocol', ssl_data, f"SSL data missing protocol: {ssl_data}")
self.assertTrue(
ssl_data['protocol'].startswith('TLS') or ssl_data['protocol'].startswith('SSL'),
f"Unexpected protocol: {ssl_data['protocol']}"
)
else:
# If no SSL data, at least verify hook ran without crashing
self.assertEqual(result.returncode, 0, f"Hook failed: {result.stderr}")
# Verify hook ran successfully
self.assertEqual(result.returncode, 0, f"Hook failed: {result.stderr}")
self.assertNotIn('Traceback', result.stderr)
self.assertNotIn('Error:', result.stderr)
# example.com uses HTTPS, so we MUST get SSL certificate data
self.assertIsNotNone(ssl_data, "No SSL data extracted from HTTPS URL")
# Verify we got certificate info
self.assertIn('protocol', ssl_data, f"SSL data missing protocol: {ssl_data}")
self.assertTrue(
ssl_data['protocol'].startswith('TLS') or ssl_data['protocol'].startswith('SSL'),
f"Unexpected protocol: {ssl_data['protocol']}"
)
except RuntimeError as e:
if 'Chrome' in str(e) or 'CDP' in str(e):

View File

@@ -6,19 +6,29 @@
* Content-Type from the initial response. If it's a static file (PDF, image, etc.),
* it downloads the content directly using CDP.
*
* Usage: on_Snapshot__26_chrome_staticfile.bg.js --url=<url> --snapshot-id=<uuid>
* Output: Downloads static file + writes hook.pid
* Usage: on_Snapshot__31_staticfile.bg.js --url=<url> --snapshot-id=<uuid>
* Output: Downloads static file
*/
const fs = require('fs');
const path = require('path');
// Add NODE_MODULES_DIR to module resolution paths if set
if (process.env.NODE_MODULES_DIR) module.paths.unshift(process.env.NODE_MODULES_DIR);
const puppeteer = require('puppeteer-core');
// Import shared utilities from chrome_utils.js
const {
getEnvBool,
getEnvInt,
parseArgs,
connectToPage,
waitForPageLoaded,
} = require('../chrome/chrome_utils.js');
const PLUGIN_NAME = 'staticfile';
const OUTPUT_DIR = '.';
// PID file is now written by run_hook() with hook-specific name
const CHROME_SESSION_DIR = '../chrome';
// Content-Types that indicate static files
@@ -107,65 +117,6 @@ let downloadError = null;
let page = null;
let browser = null;
function parseArgs() {
const args = {};
process.argv.slice(2).forEach(arg => {
if (arg.startsWith('--')) {
const [key, ...valueParts] = arg.slice(2).split('=');
args[key.replace(/-/g, '_')] = valueParts.join('=') || true;
}
});
return args;
}
function getEnv(name, defaultValue = '') {
return (process.env[name] || defaultValue).trim();
}
function getEnvBool(name, defaultValue = false) {
const val = getEnv(name, '').toLowerCase();
if (['true', '1', 'yes', 'on'].includes(val)) return true;
if (['false', '0', 'no', 'off'].includes(val)) return false;
return defaultValue;
}
function getEnvInt(name, defaultValue = 0) {
const val = parseInt(getEnv(name, String(defaultValue)), 10);
return isNaN(val) ? defaultValue : val;
}
async function waitForChromeTabOpen(timeoutMs = 60000) {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
const startTime = Date.now();
while (Date.now() - startTime < timeoutMs) {
if (fs.existsSync(cdpFile) && fs.existsSync(targetIdFile)) {
return true;
}
// Wait 100ms before checking again
await new Promise(resolve => setTimeout(resolve, 100));
}
return false;
}
function getCdpUrl() {
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
if (fs.existsSync(cdpFile)) {
return fs.readFileSync(cdpFile, 'utf8').trim();
}
return null;
}
function getPageId() {
const targetIdFile = path.join(CHROME_SESSION_DIR, 'target_id.txt');
if (fs.existsSync(targetIdFile)) {
return fs.readFileSync(targetIdFile, 'utf8').trim();
}
return null;
}
function isStaticContentType(contentType) {
if (!contentType) return false;
@@ -199,36 +150,16 @@ function getFilenameFromUrl(url) {
}
async function setupStaticFileListener() {
// Wait for chrome tab to be open (up to 60s)
const tabOpen = await waitForChromeTabOpen(60000);
if (!tabOpen) {
throw new Error('Chrome tab not open after 60s (chrome plugin must run first)');
}
const timeout = getEnvInt('STATICFILE_TIMEOUT', 30) * 1000;
const cdpUrl = getCdpUrl();
if (!cdpUrl) {
throw new Error('No Chrome session found');
}
browser = await puppeteer.connect({ browserWSEndpoint: cdpUrl });
// Find our page
const pages = await browser.pages();
const targetId = getPageId();
if (targetId) {
page = pages.find(p => {
const target = p.target();
return target && target._targetId === targetId;
});
}
if (!page) {
page = pages[pages.length - 1];
}
if (!page) {
throw new Error('No page found');
}
// Connect to Chrome page using shared utility
const connection = await connectToPage({
chromeSessionDir: CHROME_SESSION_DIR,
timeoutMs: timeout,
puppeteer,
});
browser = connection.browser;
page = connection.page;
// Track the first response to check Content-Type
let firstResponseHandled = false;
@@ -296,27 +227,6 @@ async function setupStaticFileListener() {
return { browser, page };
}
async function waitForNavigation() {
// Wait for chrome_navigate to complete
const navDir = '../chrome';
const pageLoadedMarker = path.join(navDir, 'page_loaded.txt');
const maxWait = 120000; // 2 minutes
const pollInterval = 100;
let waitTime = 0;
while (!fs.existsSync(pageLoadedMarker) && waitTime < maxWait) {
await new Promise(resolve => setTimeout(resolve, pollInterval));
waitTime += pollInterval;
}
if (!fs.existsSync(pageLoadedMarker)) {
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
}
// Wait a bit longer to ensure response handler completes
await new Promise(resolve => setTimeout(resolve, 500));
}
function handleShutdown(signal) {
console.error(`\nReceived ${signal}, emitting final results...`);
@@ -378,7 +288,7 @@ async function main() {
const snapshotId = args.snapshot_id;
if (!url || !snapshotId) {
console.error('Usage: on_Snapshot__26_chrome_staticfile.bg.js --url=<url> --snapshot-id=<uuid>');
console.error('Usage: on_Snapshot__31_staticfile.bg.js --url=<url> --snapshot-id=<uuid>');
process.exit(1);
}
@@ -390,6 +300,8 @@ async function main() {
process.exit(0);
}
const timeout = getEnvInt('STATICFILE_TIMEOUT', 30) * 1000;
// Register signal handlers for graceful shutdown
process.on('SIGTERM', () => handleShutdown('SIGTERM'));
process.on('SIGINT', () => handleShutdown('SIGINT'));
@@ -398,11 +310,8 @@ async function main() {
// Set up static file listener BEFORE navigation
await setupStaticFileListener();
// Note: PID file is written by run_hook() with hook-specific name
// Snapshot.cleanup() kills all *.pid processes when done
// Wait for chrome_navigate to complete (BLOCKING)
await waitForNavigation();
await waitForPageLoaded(CHROME_SESSION_DIR, timeout * 4, 500);
// Keep process alive until killed by cleanup
console.error('Static file detection complete, waiting for cleanup signal...');