mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2026-01-05 02:16:27 +10:00
new gallerydl plugin and more
This commit is contained in:
@@ -1,9 +1,10 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Capture console output from a page (DAEMON MODE).
|
||||
* Capture console output from a page.
|
||||
*
|
||||
* This hook daemonizes and stays alive to capture console logs throughout
|
||||
* the snapshot lifecycle. It's killed by chrome_cleanup at the end.
|
||||
* This hook sets up CDP listeners BEFORE chrome_navigate loads the page,
|
||||
* then waits for navigation to complete. The listeners stay active through
|
||||
* navigation and capture all console output.
|
||||
*
|
||||
* Usage: on_Snapshot__21_consolelog.js --url=<url> --snapshot-id=<uuid>
|
||||
* Output: Writes console.jsonl + listener.pid
|
||||
@@ -150,10 +151,30 @@ async function setupListeners() {
|
||||
}
|
||||
});
|
||||
|
||||
// Don't disconnect - keep browser connection alive
|
||||
return { browser, page };
|
||||
}
|
||||
|
||||
async function waitForNavigation() {
|
||||
// Wait for chrome_navigate to complete (it writes page_loaded.txt)
|
||||
const navDir = path.join(CHROME_SESSION_DIR, '../chrome_navigate');
|
||||
const pageLoadedMarker = path.join(navDir, 'page_loaded.txt');
|
||||
const maxWait = 120000; // 2 minutes
|
||||
const pollInterval = 100;
|
||||
let waitTime = 0;
|
||||
|
||||
while (!fs.existsSync(pageLoadedMarker) && waitTime < maxWait) {
|
||||
await new Promise(resolve => setTimeout(resolve, pollInterval));
|
||||
waitTime += pollInterval;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(pageLoadedMarker)) {
|
||||
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
|
||||
}
|
||||
|
||||
// Wait a bit longer for any post-load console output
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = parseArgs();
|
||||
const url = args.url;
|
||||
@@ -179,13 +200,16 @@ async function main() {
|
||||
const startTs = new Date();
|
||||
|
||||
try {
|
||||
// Set up listeners
|
||||
// Set up listeners BEFORE navigation
|
||||
await setupListeners();
|
||||
|
||||
// Write PID file so chrome_cleanup can kill us
|
||||
// Write PID file so chrome_cleanup can kill any remaining processes
|
||||
fs.writeFileSync(path.join(OUTPUT_DIR, PID_FILE), String(process.pid));
|
||||
|
||||
// Report success immediately (we're staying alive in background)
|
||||
// Wait for chrome_navigate to complete (BLOCKING)
|
||||
await waitForNavigation();
|
||||
|
||||
// Report success
|
||||
const endTs = new Date();
|
||||
const duration = (endTs - startTs) / 1000;
|
||||
|
||||
@@ -207,18 +231,7 @@ async function main() {
|
||||
};
|
||||
console.log(`RESULT_JSON=${JSON.stringify(result)}`);
|
||||
|
||||
// Daemonize: detach from parent and keep running
|
||||
// This process will be killed by chrome_cleanup
|
||||
if (process.stdin.isTTY) {
|
||||
process.stdin.pause();
|
||||
}
|
||||
process.stdin.unref();
|
||||
process.stdout.end();
|
||||
process.stderr.end();
|
||||
|
||||
// Keep the process alive indefinitely
|
||||
// Will be killed by chrome_cleanup via the PID file
|
||||
setInterval(() => {}, 1000);
|
||||
process.exit(0);
|
||||
|
||||
} catch (e) {
|
||||
const error = `${e.name}: ${e.message}`;
|
||||
|
||||
45
archivebox/plugins/gallerydl/config.json
Normal file
45
archivebox/plugins/gallerydl/config.json
Normal file
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"SAVE_GALLERY_DL": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"x-aliases": ["USE_GALLERY_DL", "FETCH_GALLERY"],
|
||||
"description": "Enable gallery downloading with gallery-dl"
|
||||
},
|
||||
"GALLERY_DL_BINARY": {
|
||||
"type": "string",
|
||||
"default": "gallery-dl",
|
||||
"description": "Path to gallery-dl binary"
|
||||
},
|
||||
"GALLERY_DL_TIMEOUT": {
|
||||
"type": "integer",
|
||||
"default": 3600,
|
||||
"minimum": 30,
|
||||
"x-fallback": "TIMEOUT",
|
||||
"description": "Timeout for gallery downloads in seconds"
|
||||
},
|
||||
"GALLERY_DL_CHECK_SSL_VALIDITY": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"x-fallback": "CHECK_SSL_VALIDITY",
|
||||
"description": "Whether to verify SSL certificates"
|
||||
},
|
||||
"GALLERY_DL_ARGS": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"default": [
|
||||
"--write-metadata",
|
||||
"--write-info-json"
|
||||
],
|
||||
"description": "Default gallery-dl arguments"
|
||||
},
|
||||
"GALLERY_DL_EXTRA_ARGS": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"description": "Extra arguments for gallery-dl (space-separated)"
|
||||
}
|
||||
}
|
||||
}
|
||||
129
archivebox/plugins/gallerydl/on_Crawl__00_validate_gallerydl.py
Executable file
129
archivebox/plugins/gallerydl/on_Crawl__00_validate_gallerydl.py
Executable file
@@ -0,0 +1,129 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Validation hook for gallery-dl.
|
||||
|
||||
Runs at crawl start to verify gallery-dl binary is available.
|
||||
Outputs JSONL for InstalledBinary and Machine config updates.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import hashlib
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_binary_version(abspath: str, version_flag: str = '--version') -> str | None:
|
||||
"""Get version string from binary."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[abspath, version_flag],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=5,
|
||||
)
|
||||
if result.returncode == 0 and result.stdout:
|
||||
first_line = result.stdout.strip().split('\n')[0]
|
||||
return first_line[:64]
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def get_binary_hash(abspath: str) -> str | None:
|
||||
"""Get SHA256 hash of binary."""
|
||||
try:
|
||||
with open(abspath, 'rb') as f:
|
||||
return hashlib.sha256(f.read()).hexdigest()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def find_gallerydl() -> dict | None:
|
||||
"""Find gallery-dl binary."""
|
||||
try:
|
||||
from abx_pkg import Binary, PipProvider, EnvProvider
|
||||
|
||||
class GalleryDlBinary(Binary):
|
||||
name: str = 'gallery-dl'
|
||||
binproviders_supported = [PipProvider(), EnvProvider()]
|
||||
|
||||
binary = GalleryDlBinary()
|
||||
loaded = binary.load()
|
||||
if loaded and loaded.abspath:
|
||||
return {
|
||||
'name': 'gallery-dl',
|
||||
'abspath': str(loaded.abspath),
|
||||
'version': str(loaded.version) if loaded.version else None,
|
||||
'sha256': loaded.sha256 if hasattr(loaded, 'sha256') else None,
|
||||
'binprovider': loaded.binprovider.name if loaded.binprovider else 'env',
|
||||
}
|
||||
except ImportError:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback to shutil.which
|
||||
abspath = shutil.which('gallery-dl') or os.environ.get('GALLERY_DL_BINARY', '')
|
||||
if abspath and Path(abspath).is_file():
|
||||
return {
|
||||
'name': 'gallery-dl',
|
||||
'abspath': abspath,
|
||||
'version': get_binary_version(abspath),
|
||||
'sha256': get_binary_hash(abspath),
|
||||
'binprovider': 'env',
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
# Check for gallery-dl (required)
|
||||
gallerydl_result = find_gallerydl()
|
||||
|
||||
missing_deps = []
|
||||
|
||||
# Emit results for gallery-dl
|
||||
if gallerydl_result and gallerydl_result.get('abspath'):
|
||||
print(json.dumps({
|
||||
'type': 'InstalledBinary',
|
||||
'name': gallerydl_result['name'],
|
||||
'abspath': gallerydl_result['abspath'],
|
||||
'version': gallerydl_result['version'],
|
||||
'sha256': gallerydl_result['sha256'],
|
||||
'binprovider': gallerydl_result['binprovider'],
|
||||
}))
|
||||
|
||||
print(json.dumps({
|
||||
'type': 'Machine',
|
||||
'_method': 'update',
|
||||
'key': 'config/GALLERY_DL_BINARY',
|
||||
'value': gallerydl_result['abspath'],
|
||||
}))
|
||||
|
||||
if gallerydl_result['version']:
|
||||
print(json.dumps({
|
||||
'type': 'Machine',
|
||||
'_method': 'update',
|
||||
'key': 'config/GALLERY_DL_VERSION',
|
||||
'value': gallerydl_result['version'],
|
||||
}))
|
||||
else:
|
||||
print(json.dumps({
|
||||
'type': 'Dependency',
|
||||
'bin_name': 'gallery-dl',
|
||||
'bin_providers': 'pip,env',
|
||||
}))
|
||||
missing_deps.append('gallery-dl')
|
||||
|
||||
if missing_deps:
|
||||
print(f"Missing dependencies: {', '.join(missing_deps)}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
299
archivebox/plugins/gallerydl/on_Snapshot__52_gallerydl.py
Executable file
299
archivebox/plugins/gallerydl/on_Snapshot__52_gallerydl.py
Executable file
@@ -0,0 +1,299 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Download image galleries from a URL using gallery-dl.
|
||||
|
||||
Usage: on_Snapshot__gallerydl.py --url=<url> --snapshot-id=<uuid>
|
||||
Output: Downloads gallery images to $PWD/gallerydl/
|
||||
|
||||
Environment variables:
|
||||
GALLERY_DL_BINARY: Path to gallery-dl binary
|
||||
GALLERY_DL_TIMEOUT: Timeout in seconds (default: 3600 for large galleries)
|
||||
GALLERY_DL_CHECK_SSL_VALIDITY: Whether to check SSL certificates (default: True)
|
||||
GALLERY_DL_EXTRA_ARGS: Extra arguments for gallery-dl (space-separated)
|
||||
|
||||
# Gallery-dl feature toggles
|
||||
USE_GALLERY_DL: Enable gallery-dl gallery extraction (default: True)
|
||||
SAVE_GALLERY_DL: Alias for USE_GALLERY_DL
|
||||
|
||||
# Fallback to ARCHIVING_CONFIG values if GALLERY_DL_* not set:
|
||||
GALLERY_DL_TIMEOUT: Fallback timeout for gallery downloads
|
||||
TIMEOUT: Fallback timeout
|
||||
CHECK_SSL_VALIDITY: Fallback SSL check
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import rich_click as click
|
||||
|
||||
|
||||
# Extractor metadata
|
||||
EXTRACTOR_NAME = 'gallerydl'
|
||||
BIN_NAME = 'gallery-dl'
|
||||
BIN_PROVIDERS = 'pip,env'
|
||||
OUTPUT_DIR = '.'
|
||||
|
||||
|
||||
def get_env(name: str, default: str = '') -> str:
|
||||
return os.environ.get(name, default).strip()
|
||||
|
||||
|
||||
def get_env_bool(name: str, default: bool = False) -> bool:
|
||||
val = get_env(name, '').lower()
|
||||
if val in ('true', '1', 'yes', 'on'):
|
||||
return True
|
||||
if val in ('false', '0', 'no', 'off'):
|
||||
return False
|
||||
return default
|
||||
|
||||
|
||||
def get_env_int(name: str, default: int = 0) -> int:
|
||||
try:
|
||||
return int(get_env(name, str(default)))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
|
||||
STATICFILE_DIR = '../staticfile'
|
||||
MEDIA_DIR = '../media'
|
||||
|
||||
def has_staticfile_output() -> bool:
|
||||
"""Check if staticfile extractor already downloaded this URL."""
|
||||
staticfile_dir = Path(STATICFILE_DIR)
|
||||
return staticfile_dir.exists() and any(staticfile_dir.iterdir())
|
||||
|
||||
|
||||
def has_media_output() -> bool:
|
||||
"""Check if media extractor already downloaded this URL."""
|
||||
media_dir = Path(MEDIA_DIR)
|
||||
return media_dir.exists() and any(media_dir.iterdir())
|
||||
|
||||
|
||||
def find_gallerydl() -> str | None:
|
||||
"""Find gallery-dl binary."""
|
||||
gallerydl = get_env('GALLERY_DL_BINARY')
|
||||
if gallerydl and os.path.isfile(gallerydl):
|
||||
return gallerydl
|
||||
|
||||
binary = shutil.which('gallery-dl')
|
||||
if binary:
|
||||
return binary
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_version(binary: str) -> str:
|
||||
"""Get gallery-dl version."""
|
||||
try:
|
||||
result = subprocess.run([binary, '--version'], capture_output=True, text=True, timeout=10)
|
||||
return result.stdout.strip()[:64]
|
||||
except Exception:
|
||||
return ''
|
||||
|
||||
|
||||
# Default gallery-dl args
|
||||
def get_gallerydl_default_args() -> list[str]:
|
||||
"""Build default gallery-dl arguments."""
|
||||
return [
|
||||
'--write-metadata',
|
||||
'--write-info-json',
|
||||
]
|
||||
|
||||
|
||||
def save_gallery(url: str, binary: str) -> tuple[bool, str | None, str]:
|
||||
"""
|
||||
Download gallery using gallery-dl.
|
||||
|
||||
Returns: (success, output_path, error_message)
|
||||
"""
|
||||
# Get config from env (with GALLERY_DL_ prefix or fallback to ARCHIVING_CONFIG style)
|
||||
timeout = get_env_int('GALLERY_DL_TIMEOUT') or get_env_int('TIMEOUT', 3600)
|
||||
check_ssl = get_env_bool('GALLERY_DL_CHECK_SSL_VALIDITY', get_env_bool('CHECK_SSL_VALIDITY', True))
|
||||
extra_args = get_env('GALLERY_DL_EXTRA_ARGS', '')
|
||||
|
||||
# Output directory is current directory (hook already runs in output dir)
|
||||
output_dir = Path(OUTPUT_DIR)
|
||||
|
||||
# Build command (later options take precedence)
|
||||
cmd = [
|
||||
binary,
|
||||
*get_gallerydl_default_args(),
|
||||
'-d', str(output_dir),
|
||||
]
|
||||
|
||||
if not check_ssl:
|
||||
cmd.append('--no-check-certificate')
|
||||
|
||||
if extra_args:
|
||||
cmd.extend(extra_args.split())
|
||||
|
||||
cmd.append(url)
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, timeout=timeout, text=True)
|
||||
|
||||
# Check if any gallery files were downloaded
|
||||
gallery_extensions = (
|
||||
'.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.svg',
|
||||
'.mp4', '.webm', '.mkv', '.avi', '.mov', '.flv',
|
||||
'.json', '.txt', '.zip',
|
||||
)
|
||||
|
||||
downloaded_files = [
|
||||
f for f in output_dir.glob('*')
|
||||
if f.is_file() and f.suffix.lower() in gallery_extensions
|
||||
]
|
||||
|
||||
if downloaded_files:
|
||||
# Return first image file, or first file if no images
|
||||
image_files = [
|
||||
f for f in downloaded_files
|
||||
if f.suffix.lower() in ('.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp')
|
||||
]
|
||||
output = str(image_files[0]) if image_files else str(downloaded_files[0])
|
||||
return True, output, ''
|
||||
else:
|
||||
stderr = result.stderr
|
||||
|
||||
# These are NOT errors - page simply has no downloadable gallery
|
||||
# Return success with no output (legitimate "nothing to download")
|
||||
if 'unsupported URL' in stderr.lower():
|
||||
return True, None, '' # Not a gallery site - success, no output
|
||||
if 'no results' in stderr.lower():
|
||||
return True, None, '' # No gallery found - success, no output
|
||||
if result.returncode == 0:
|
||||
return True, None, '' # gallery-dl exited cleanly, just no gallery - success
|
||||
|
||||
# These ARE errors - something went wrong
|
||||
if '404' in stderr:
|
||||
return False, None, '404 Not Found'
|
||||
if '403' in stderr:
|
||||
return False, None, '403 Forbidden'
|
||||
if 'Unable to extract' in stderr:
|
||||
return False, None, 'Unable to extract gallery info'
|
||||
|
||||
return False, None, f'gallery-dl error: {stderr[:200]}'
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
return False, None, f'Timed out after {timeout} seconds'
|
||||
except Exception as e:
|
||||
return False, None, f'{type(e).__name__}: {e}'
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option('--url', required=True, help='URL to download gallery from')
|
||||
@click.option('--snapshot-id', required=True, help='Snapshot UUID')
|
||||
def main(url: str, snapshot_id: str):
|
||||
"""Download image gallery from a URL using gallery-dl."""
|
||||
|
||||
start_ts = datetime.now(timezone.utc)
|
||||
version = ''
|
||||
output = None
|
||||
status = 'failed'
|
||||
error = ''
|
||||
binary = None
|
||||
cmd_str = ''
|
||||
|
||||
try:
|
||||
# Check if gallery-dl is enabled
|
||||
if not (get_env_bool('USE_GALLERY_DL', True) and get_env_bool('SAVE_GALLERY_DL', True)):
|
||||
print('Skipping gallery-dl (USE_GALLERY_DL=False or SAVE_GALLERY_DL=False)')
|
||||
status = 'skipped'
|
||||
end_ts = datetime.now(timezone.utc)
|
||||
print(f'START_TS={start_ts.isoformat()}')
|
||||
print(f'END_TS={end_ts.isoformat()}')
|
||||
print(f'STATUS={status}')
|
||||
print(f'RESULT_JSON={json.dumps({"extractor": EXTRACTOR_NAME, "status": status, "url": url, "snapshot_id": snapshot_id})}')
|
||||
sys.exit(0)
|
||||
|
||||
# Check if staticfile or media extractors already handled this (skip)
|
||||
if has_staticfile_output():
|
||||
print(f'Skipping gallery-dl - staticfile extractor already downloaded this')
|
||||
status = 'skipped'
|
||||
print(f'START_TS={start_ts.isoformat()}')
|
||||
print(f'END_TS={datetime.now(timezone.utc).isoformat()}')
|
||||
print(f'STATUS={status}')
|
||||
print(f'RESULT_JSON={json.dumps({"extractor": EXTRACTOR_NAME, "status": status, "url": url, "snapshot_id": snapshot_id})}')
|
||||
sys.exit(0)
|
||||
|
||||
if has_media_output():
|
||||
print(f'Skipping gallery-dl - media extractor already downloaded this')
|
||||
status = 'skipped'
|
||||
print(f'START_TS={start_ts.isoformat()}')
|
||||
print(f'END_TS={datetime.now(timezone.utc).isoformat()}')
|
||||
print(f'STATUS={status}')
|
||||
print(f'RESULT_JSON={json.dumps({"extractor": EXTRACTOR_NAME, "status": status, "url": url, "snapshot_id": snapshot_id})}')
|
||||
sys.exit(0)
|
||||
|
||||
# Find binary
|
||||
binary = find_gallerydl()
|
||||
if not binary:
|
||||
print(f'ERROR: {BIN_NAME} binary not found', file=sys.stderr)
|
||||
print(f'DEPENDENCY_NEEDED={BIN_NAME}', file=sys.stderr)
|
||||
print(f'BIN_PROVIDERS={BIN_PROVIDERS}', file=sys.stderr)
|
||||
print(f'INSTALL_HINT=pip install gallery-dl', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
version = get_version(binary)
|
||||
cmd_str = f'{binary} {url}'
|
||||
|
||||
# Run extraction
|
||||
success, output, error = save_gallery(url, binary)
|
||||
status = 'succeeded' if success else 'failed'
|
||||
|
||||
if success:
|
||||
output_dir = Path(OUTPUT_DIR)
|
||||
files = list(output_dir.glob('*'))
|
||||
file_count = len([f for f in files if f.is_file()])
|
||||
if file_count > 0:
|
||||
print(f'gallery-dl completed: {file_count} files downloaded')
|
||||
else:
|
||||
print(f'gallery-dl completed: no gallery found on page (this is normal)')
|
||||
|
||||
except Exception as e:
|
||||
error = f'{type(e).__name__}: {e}'
|
||||
status = 'failed'
|
||||
|
||||
# Print results
|
||||
end_ts = datetime.now(timezone.utc)
|
||||
duration = (end_ts - start_ts).total_seconds()
|
||||
|
||||
print(f'START_TS={start_ts.isoformat()}')
|
||||
print(f'END_TS={end_ts.isoformat()}')
|
||||
print(f'DURATION={duration:.2f}')
|
||||
if cmd_str:
|
||||
print(f'CMD={cmd_str}')
|
||||
if version:
|
||||
print(f'VERSION={version}')
|
||||
if output:
|
||||
print(f'OUTPUT={output}')
|
||||
print(f'STATUS={status}')
|
||||
|
||||
if error:
|
||||
print(f'ERROR={error}', file=sys.stderr)
|
||||
|
||||
# Print JSON result
|
||||
result_json = {
|
||||
'extractor': EXTRACTOR_NAME,
|
||||
'url': url,
|
||||
'snapshot_id': snapshot_id,
|
||||
'status': status,
|
||||
'start_ts': start_ts.isoformat(),
|
||||
'end_ts': end_ts.isoformat(),
|
||||
'duration': round(duration, 2),
|
||||
'cmd_version': version,
|
||||
'output': output,
|
||||
'error': error or None,
|
||||
}
|
||||
print(f'RESULT_JSON={json.dumps(result_json)}')
|
||||
|
||||
sys.exit(0 if status == 'succeeded' else 1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,9 +1,10 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Archive all network responses during page load (DAEMON MODE).
|
||||
* Archive all network responses during page load.
|
||||
*
|
||||
* This hook daemonizes and stays alive to capture network responses throughout
|
||||
* the snapshot lifecycle. It's killed by chrome_cleanup at the end.
|
||||
* This hook sets up CDP listeners BEFORE chrome_navigate loads the page,
|
||||
* then waits for navigation to complete. The listeners capture all network
|
||||
* responses during the navigation.
|
||||
*
|
||||
* Usage: on_Snapshot__24_responses.js --url=<url> --snapshot-id=<uuid>
|
||||
* Output: Creates responses/ directory with index.jsonl + listener.pid
|
||||
@@ -14,7 +15,6 @@ const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const puppeteer = require('puppeteer-core');
|
||||
|
||||
// Extractor metadata
|
||||
const EXTRACTOR_NAME = 'responses';
|
||||
const OUTPUT_DIR = '.';
|
||||
const PID_FILE = 'listener.pid';
|
||||
@@ -23,7 +23,6 @@ const CHROME_SESSION_DIR = '../chrome_session';
|
||||
// Resource types to capture (by default, capture everything)
|
||||
const DEFAULT_TYPES = ['script', 'stylesheet', 'font', 'image', 'media', 'xhr', 'websocket'];
|
||||
|
||||
// Parse command line arguments
|
||||
function parseArgs() {
|
||||
const args = {};
|
||||
process.argv.slice(2).forEach(arg => {
|
||||
@@ -35,7 +34,6 @@ function parseArgs() {
|
||||
return args;
|
||||
}
|
||||
|
||||
// Get environment variable with default
|
||||
function getEnv(name, defaultValue = '') {
|
||||
return (process.env[name] || defaultValue).trim();
|
||||
}
|
||||
@@ -52,7 +50,6 @@ function getEnvInt(name, defaultValue = 0) {
|
||||
return isNaN(val) ? defaultValue : val;
|
||||
}
|
||||
|
||||
// Get CDP URL from chrome_session
|
||||
function getCdpUrl() {
|
||||
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
|
||||
if (fs.existsSync(cdpFile)) {
|
||||
@@ -69,7 +66,6 @@ function getPageId() {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Get file extension from MIME type
|
||||
function getExtensionFromMimeType(mimeType) {
|
||||
const mimeMap = {
|
||||
'text/html': 'html',
|
||||
@@ -101,7 +97,6 @@ function getExtensionFromMimeType(mimeType) {
|
||||
return mimeMap[mimeBase] || '';
|
||||
}
|
||||
|
||||
// Get extension from URL path
|
||||
function getExtensionFromUrl(url) {
|
||||
try {
|
||||
const pathname = new URL(url).pathname;
|
||||
@@ -112,49 +107,42 @@ function getExtensionFromUrl(url) {
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize filename
|
||||
function sanitizeFilename(str, maxLen = 200) {
|
||||
return str
|
||||
.replace(/[^a-zA-Z0-9._-]/g, '_')
|
||||
.slice(0, maxLen);
|
||||
}
|
||||
|
||||
// Create symlink (handle errors gracefully)
|
||||
async function createSymlink(target, linkPath) {
|
||||
try {
|
||||
// Create parent directory
|
||||
const dir = path.dirname(linkPath);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
|
||||
// Remove existing symlink/file if present
|
||||
if (fs.existsSync(linkPath)) {
|
||||
fs.unlinkSync(linkPath);
|
||||
}
|
||||
|
||||
// Create relative symlink
|
||||
const relativePath = path.relative(dir, target);
|
||||
fs.symlinkSync(relativePath, linkPath);
|
||||
} catch (e) {
|
||||
// Ignore symlink errors (file conflicts, permissions, etc.)
|
||||
// Ignore symlink errors
|
||||
}
|
||||
}
|
||||
|
||||
// Set up response listener
|
||||
async function setupListener() {
|
||||
const typesStr = getEnv('RESPONSES_TYPES', DEFAULT_TYPES.join(','));
|
||||
const typesToSave = typesStr.split(',').map(t => t.trim().toLowerCase());
|
||||
|
||||
// Create subdirectories for organizing responses
|
||||
// Create subdirectories
|
||||
const allDir = path.join(OUTPUT_DIR, 'all');
|
||||
if (!fs.existsSync(allDir)) {
|
||||
fs.mkdirSync(allDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Create index file
|
||||
const indexPath = path.join(OUTPUT_DIR, 'index.jsonl');
|
||||
fs.writeFileSync(indexPath, ''); // Clear existing
|
||||
fs.writeFileSync(indexPath, '');
|
||||
|
||||
const cdpUrl = getCdpUrl();
|
||||
if (!cdpUrl) {
|
||||
@@ -182,7 +170,7 @@ async function setupListener() {
|
||||
throw new Error('No page found');
|
||||
}
|
||||
|
||||
// Set up response listener to capture network traffic
|
||||
// Set up response listener
|
||||
page.on('response', async (response) => {
|
||||
try {
|
||||
const request = response.request();
|
||||
@@ -205,7 +193,6 @@ async function setupListener() {
|
||||
try {
|
||||
bodyBuffer = await response.buffer();
|
||||
} catch (e) {
|
||||
// Some responses can't be captured (already consumed, etc.)
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -234,7 +221,6 @@ async function setupListener() {
|
||||
const filename = path.basename(pathname) || 'index' + (extension ? '.' + extension : '');
|
||||
const dirPath = path.dirname(pathname);
|
||||
|
||||
// Create symlink: responses/<type>/<hostname>/<path>/<filename>
|
||||
const symlinkDir = path.join(OUTPUT_DIR, resourceType, hostname, dirPath);
|
||||
const symlinkPath = path.join(symlinkDir, filename);
|
||||
await createSymlink(uniquePath, symlinkPath);
|
||||
@@ -250,7 +236,7 @@ async function setupListener() {
|
||||
const indexEntry = {
|
||||
ts: timestamp,
|
||||
method,
|
||||
url: method === 'DATA' ? url.slice(0, 128) : url, // Truncate data: URLs
|
||||
url: method === 'DATA' ? url.slice(0, 128) : url,
|
||||
urlSha256,
|
||||
status,
|
||||
resourceType,
|
||||
@@ -267,10 +253,30 @@ async function setupListener() {
|
||||
}
|
||||
});
|
||||
|
||||
// Don't disconnect - keep browser connection alive
|
||||
return { browser, page };
|
||||
}
|
||||
|
||||
async function waitForNavigation() {
|
||||
// Wait for chrome_navigate to complete
|
||||
const navDir = path.join(CHROME_SESSION_DIR, '../chrome_navigate');
|
||||
const pageLoadedMarker = path.join(navDir, 'page_loaded.txt');
|
||||
const maxWait = 120000; // 2 minutes
|
||||
const pollInterval = 100;
|
||||
let waitTime = 0;
|
||||
|
||||
while (!fs.existsSync(pageLoadedMarker) && waitTime < maxWait) {
|
||||
await new Promise(resolve => setTimeout(resolve, pollInterval));
|
||||
waitTime += pollInterval;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(pageLoadedMarker)) {
|
||||
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
|
||||
}
|
||||
|
||||
// Wait a bit longer for any post-load responses
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = parseArgs();
|
||||
const url = args.url;
|
||||
@@ -296,13 +302,16 @@ async function main() {
|
||||
const startTs = new Date();
|
||||
|
||||
try {
|
||||
// Set up listener
|
||||
// Set up listener BEFORE navigation
|
||||
await setupListener();
|
||||
|
||||
// Write PID file so chrome_cleanup can kill us
|
||||
// Write PID file
|
||||
fs.writeFileSync(path.join(OUTPUT_DIR, PID_FILE), String(process.pid));
|
||||
|
||||
// Report success immediately (we're staying alive in background)
|
||||
// Wait for chrome_navigate to complete (BLOCKING)
|
||||
await waitForNavigation();
|
||||
|
||||
// Report success
|
||||
const endTs = new Date();
|
||||
const duration = (endTs - startTs) / 1000;
|
||||
|
||||
@@ -324,18 +333,7 @@ async function main() {
|
||||
};
|
||||
console.log(`RESULT_JSON=${JSON.stringify(result)}`);
|
||||
|
||||
// Daemonize: detach from parent and keep running
|
||||
// This process will be killed by chrome_cleanup
|
||||
if (process.stdin.isTTY) {
|
||||
process.stdin.pause();
|
||||
}
|
||||
process.stdin.unref();
|
||||
process.stdout.end();
|
||||
process.stderr.end();
|
||||
|
||||
// Keep the process alive indefinitely
|
||||
// Will be killed by chrome_cleanup via the PID file
|
||||
setInterval(() => {}, 1000);
|
||||
process.exit(0);
|
||||
|
||||
} catch (e) {
|
||||
const error = `${e.name}: ${e.message}`;
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Extract SSL/TLS certificate details from a URL (DAEMON MODE).
|
||||
* Extract SSL/TLS certificate details from a URL.
|
||||
*
|
||||
* This hook daemonizes and stays alive to capture SSL details throughout
|
||||
* the snapshot lifecycle. It's killed by chrome_cleanup at the end.
|
||||
* This hook sets up CDP listeners BEFORE chrome_navigate loads the page,
|
||||
* then waits for navigation to complete. The listener captures SSL details
|
||||
* during the navigation request.
|
||||
*
|
||||
* Usage: on_Snapshot__23_ssl.js --url=<url> --snapshot-id=<uuid>
|
||||
* Output: Writes ssl.json + listener.pid
|
||||
@@ -13,14 +14,12 @@ const fs = require('fs');
|
||||
const path = require('path');
|
||||
const puppeteer = require('puppeteer-core');
|
||||
|
||||
// Extractor metadata
|
||||
const EXTRACTOR_NAME = 'ssl';
|
||||
const OUTPUT_DIR = '.';
|
||||
const OUTPUT_FILE = 'ssl.json';
|
||||
const PID_FILE = 'listener.pid';
|
||||
const CHROME_SESSION_DIR = '../chrome_session';
|
||||
|
||||
// Parse command line arguments
|
||||
function parseArgs() {
|
||||
const args = {};
|
||||
process.argv.slice(2).forEach(arg => {
|
||||
@@ -32,7 +31,6 @@ function parseArgs() {
|
||||
return args;
|
||||
}
|
||||
|
||||
// Get environment variable with default
|
||||
function getEnv(name, defaultValue = '') {
|
||||
return (process.env[name] || defaultValue).trim();
|
||||
}
|
||||
@@ -44,7 +42,6 @@ function getEnvBool(name, defaultValue = false) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// Get CDP URL from chrome_session
|
||||
function getCdpUrl() {
|
||||
const cdpFile = path.join(CHROME_SESSION_DIR, 'cdp_url.txt');
|
||||
if (fs.existsSync(cdpFile)) {
|
||||
@@ -61,7 +58,6 @@ function getPageId() {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Set up SSL listener
|
||||
async function setupListener(url) {
|
||||
const outputPath = path.join(OUTPUT_DIR, OUTPUT_FILE);
|
||||
|
||||
@@ -96,7 +92,7 @@ async function setupListener(url) {
|
||||
throw new Error('No page found');
|
||||
}
|
||||
|
||||
// Set up listener to capture SSL details when chrome_navigate loads the page
|
||||
// Set up listener to capture SSL details during navigation
|
||||
page.on('response', async (response) => {
|
||||
try {
|
||||
const request = response.request();
|
||||
@@ -148,10 +144,27 @@ async function setupListener(url) {
|
||||
}
|
||||
});
|
||||
|
||||
// Don't disconnect - keep browser connection alive
|
||||
return { browser, page };
|
||||
}
|
||||
|
||||
async function waitForNavigation() {
|
||||
// Wait for chrome_navigate to complete (it writes page_loaded.txt)
|
||||
const navDir = path.join(CHROME_SESSION_DIR, '../chrome_navigate');
|
||||
const pageLoadedMarker = path.join(navDir, 'page_loaded.txt');
|
||||
const maxWait = 120000; // 2 minutes
|
||||
const pollInterval = 100;
|
||||
let waitTime = 0;
|
||||
|
||||
while (!fs.existsSync(pageLoadedMarker) && waitTime < maxWait) {
|
||||
await new Promise(resolve => setTimeout(resolve, pollInterval));
|
||||
waitTime += pollInterval;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(pageLoadedMarker)) {
|
||||
throw new Error('Timeout waiting for navigation (chrome_navigate did not complete)');
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = parseArgs();
|
||||
const url = args.url;
|
||||
@@ -177,13 +190,16 @@ async function main() {
|
||||
const startTs = new Date();
|
||||
|
||||
try {
|
||||
// Set up listener
|
||||
// Set up listener BEFORE navigation
|
||||
await setupListener(url);
|
||||
|
||||
// Write PID file so chrome_cleanup can kill us
|
||||
// Write PID file so chrome_cleanup can kill any remaining processes
|
||||
fs.writeFileSync(path.join(OUTPUT_DIR, PID_FILE), String(process.pid));
|
||||
|
||||
// Report success immediately (we're staying alive in background)
|
||||
// Wait for chrome_navigate to complete (BLOCKING)
|
||||
await waitForNavigation();
|
||||
|
||||
// Report success
|
||||
const endTs = new Date();
|
||||
const duration = (endTs - startTs) / 1000;
|
||||
|
||||
@@ -205,18 +221,7 @@ async function main() {
|
||||
};
|
||||
console.log(`RESULT_JSON=${JSON.stringify(result)}`);
|
||||
|
||||
// Daemonize: detach from parent and keep running
|
||||
// This process will be killed by chrome_cleanup
|
||||
if (process.stdin.isTTY) {
|
||||
process.stdin.pause();
|
||||
}
|
||||
process.stdin.unref();
|
||||
process.stdout.end();
|
||||
process.stderr.end();
|
||||
|
||||
// Keep the process alive indefinitely
|
||||
// Will be killed by chrome_cleanup via the PID file
|
||||
setInterval(() => {}, 1000);
|
||||
process.exit(0);
|
||||
|
||||
} catch (e) {
|
||||
const error = `${e.name}: ${e.message}`;
|
||||
|
||||
Reference in New Issue
Block a user