Last active
December 20, 2025 01:23
-
-
Save jyarbro/09c797b4788e8804c3a5d1c7e8dbb05a to your computer and use it in GitHub Desktop.
Hacker News AI Dimmer
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| // ==UserScript== | |
| // @name Hacker News AI Content Dimmer | |
| // @namespace http://tampermonkey.net/ | |
| // @version v19 | |
| // @description Dims AI-related headlines on Hacker News with streaming per-headline scoring | |
| // @match https://news.ycombinator.com/* | |
| // @grant GM_xmlhttpRequest | |
| // @grant GM_getValue | |
| // @grant GM_setValue | |
| // @connect api.openai.com | |
| // @run-at document-end | |
| // ==/UserScript== | |
| const VERSION = 'v19'; | |
| (function () { | |
| 'use strict'; | |
| const OPENAI_API_KEY = GM_getValue('OPENAI_API_KEY', 'YOUR_API_KEY_HERE'); | |
| const OPENAI_MODEL = 'gpt-5-mini'; | |
| if (OPENAI_API_KEY === 'YOUR_API_KEY_HERE') { | |
| console.error('[HN AI Dimmer] Please set your OpenAI API key in the script settings'); | |
| alert('HN AI Dimmer: Please edit the script and set your OPENAI_API_KEY'); | |
| return; | |
| } | |
| // Processing state | |
| let processedCount = 0; | |
| let totalHeadlines = 0; | |
| // Timer tracking | |
| let timerStartTime = null; | |
| let timerInterval = null; | |
| // Show global progress indicator | |
| function showProgressIndicator() { | |
| let indicator = document.getElementById('hn-ai-dimmer-indicator'); | |
| if (!indicator) { | |
| indicator = document.createElement('div'); | |
| indicator.id = 'hn-ai-dimmer-indicator'; | |
| indicator.style.cssText = ` | |
| position: absolute; | |
| top: 40px; | |
| right: 10px; | |
| background: #ff6600; | |
| color: black; | |
| padding: 8px 12px; | |
| border-radius: 4px; | |
| font-family: Verdana, Geneva, sans-serif; | |
| font-size: 11px; | |
| z-index: 10000; | |
| box-shadow: 0 2px 8px rgba(0,0,0,0.2); | |
| `; | |
| // Find the beige content area (the inner table) | |
| const contentTable = document.querySelector('center > table > tbody > tr > td > table'); | |
| if (contentTable) { | |
| contentTable.style.position = 'relative'; | |
| contentTable.appendChild(indicator); | |
| } else { | |
| // Fallback to body | |
| indicator.style.position = 'fixed'; | |
| document.body.appendChild(indicator); | |
| } | |
| // Start timer | |
| timerStartTime = performance.now(); | |
| updateTimer(); | |
| } | |
| return indicator; | |
| } | |
| function updateTimer() { | |
| const indicator = document.getElementById('hn-ai-dimmer-indicator'); | |
| if (!indicator) return; | |
| if (timerInterval) { | |
| clearInterval(timerInterval); | |
| } | |
| timerInterval = setInterval(() => { | |
| if (!timerStartTime) return; | |
| const elapsed = (performance.now() - timerStartTime) / 1000; | |
| indicator.textContent = `Scoring: ${processedCount}/${totalHeadlines} (${elapsed.toFixed(1)}s)`; | |
| }, 100); | |
| } | |
| function finalizeIndicator() { | |
| const indicator = document.getElementById('hn-ai-dimmer-indicator'); | |
| if (indicator && timerStartTime) { | |
| if (timerInterval) { | |
| clearInterval(timerInterval); | |
| timerInterval = null; | |
| } | |
| const elapsed = (performance.now() - timerStartTime) / 1000; | |
| indicator.textContent = `Complete (${elapsed.toFixed(1)}s)`; | |
| indicator.style.cursor = 'pointer'; | |
| // Add click handler to clear cache | |
| indicator.onclick = function() { | |
| GM_setValue(SCORE_CACHE_KEY, {}); | |
| indicator.textContent = 'Cache cleared!'; | |
| console.log('[HN AI Dimmer] Cache cleared'); | |
| }; | |
| } | |
| } | |
| // Create score badge for a headline | |
| function createScoreBadge(row) { | |
| const badge = document.createElement('span'); | |
| badge.className = 'ai-score-badge'; | |
| badge.textContent = '•'; | |
| badge.style.cssText = ` | |
| `; | |
| // Find the subtext row (next sibling of the story row) | |
| const subtextRow = row.nextElementSibling; | |
| if (subtextRow) { | |
| const subtext = subtextRow.querySelector('.subtext'); | |
| if (subtext) { | |
| // Append after the last element in subtext | |
| subtext.appendChild(document.createTextNode(' | ')); | |
| subtext.appendChild(badge); | |
| badge.appendChild(document.createTextNode(' AI')); | |
| } | |
| } | |
| return badge; | |
| } | |
| // Update badge with loading state | |
| function setBadgeLoading(badge) { | |
| if (badge) { | |
| badge.textContent = '...'; | |
| } | |
| } | |
| // Update badge with score | |
| function setBadgeScore(badge, score) { | |
| if (badge) { | |
| // Display as 0.0 to 1.0 | |
| const displayScore = (score / 100).toFixed(1); | |
| badge.textContent = displayScore + " AI"; | |
| } | |
| } | |
| // Get all headlines from the page | |
| function getHeadlines() { | |
| const headlines = []; | |
| const titleLinks = document.querySelectorAll('.titleline > a'); | |
| titleLinks.forEach((link, index) => { | |
| const row = link.closest('tr.athing'); | |
| if (row) { | |
| // Create badge immediately | |
| const badge = createScoreBadge(row); | |
| headlines.push({ | |
| element: row, | |
| link: link, | |
| badge: badge, | |
| text: link.textContent.trim(), | |
| url: link.href, | |
| index: index | |
| }); | |
| } | |
| }); | |
| return headlines; | |
| } | |
| // Helper to call OpenAI Responses API | |
| function callResponsesAPI(requestBody) { | |
| return new Promise((resolve, reject) => { | |
| // console.log('[HN AI Dimmer] Request body:', JSON.stringify(requestBody, null, 2)); | |
| GM_xmlhttpRequest({ | |
| method: 'POST', | |
| url: 'https://api.openai.com/v1/responses', | |
| headers: { | |
| 'Content-Type': 'application/json', | |
| 'Authorization': `Bearer ${OPENAI_API_KEY}` | |
| }, | |
| data: JSON.stringify(requestBody), | |
| onload: function (response) { | |
| try { | |
| const data = JSON.parse(response.responseText); | |
| if (data.error) { | |
| reject(new Error(data.error.message)); | |
| return; | |
| } | |
| resolve(data); | |
| } catch (error) { | |
| console.error('[HN AI Dimmer] Error parsing API response:', error); | |
| console.error('[HN AI Dimmer] Response:', response.responseText); | |
| reject(error); | |
| } | |
| }, | |
| onerror: function (error) { | |
| console.error('[HN AI Dimmer] API request failed:', error); | |
| reject(error); | |
| } | |
| }); | |
| }); | |
| } | |
| // Response state | |
| let responseId = null; | |
| const CACHE_KEY = 'hn-ai-scorer-instructions-v1'; | |
| const SCORE_CACHE_KEY = 'hn-ai-scorer-cache-v1'; | |
| const instructions = `You are a Hacker News headline AI-relevance scorer. For each message I send, you'll receive a headline and URL. Score based on BOTH the headline text AND the URL domain/path. Respond with ONLY a number from 0-100: | |
| - 0 = Not AI-related at all (default for most topics - technology, programming, startups, privacy, open source, etc.) | |
| - 50 = Moderately AI-focused (AI is a significant part but not the main topic) | |
| - 100 = Primarily or exclusively about AI, LLMs, ML, GPTs, neural networks, or AI research | |
| Consider URL context: domains like openai.com, anthropic.com, huggingface.co, or paths containing /ai/, /ml/, /llm/ suggest AI relevance. | |
| Those numbers are merely examples of how the spectrum should be arranged. You should reply with a score that is fine-grained and not simply multiples of 25. | |
| Respond with ONLY the number, no explanation or other text.`; | |
| // Simple hash function for headlines | |
| function hashHeadline(text) { | |
| let hash = 0; | |
| for (let i = 0; i < text.length; i++) { | |
| const char = text.charCodeAt(i); | |
| hash = ((hash << 5) - hash) + char; | |
| hash = hash & hash; // Convert to 32-bit integer | |
| } | |
| return hash.toString(36); | |
| } | |
| // Get cached score for a headline | |
| function getCachedScore(headlineText, url) { | |
| const cacheKey = headlineText + '||' + url; | |
| const hash = hashHeadline(cacheKey); | |
| const cache = GM_getValue(SCORE_CACHE_KEY, {}); | |
| return cache[hash]; | |
| } | |
| // Cache score for a headline | |
| function setCachedScore(headlineText, url, score) { | |
| const cacheKey = headlineText + '||' + url; | |
| const hash = hashHeadline(cacheKey); | |
| const cache = GM_getValue(SCORE_CACHE_KEY, {}); | |
| cache[hash] = score; | |
| GM_setValue(SCORE_CACHE_KEY, cache); | |
| } | |
| // Initialize response session | |
| async function initializeResponse() { | |
| const requestBody = { | |
| model: OPENAI_MODEL, | |
| instructions: instructions, | |
| prompt_cache_key: CACHE_KEY, | |
| reasoning: { | |
| effort: 'medium' | |
| }, | |
| input: "Ready to score headlines." | |
| }; | |
| const response = await callResponsesAPI(requestBody); | |
| responseId = response.id; | |
| console.log('[HN AI Dimmer] Initialized response session:', responseId); | |
| } | |
| // Score a single headline | |
| async function scoreHeadline(headline) { | |
| // Check cache first | |
| const cachedScore = getCachedScore(headline.text, headline.url); | |
| if (cachedScore !== undefined) { | |
| console.log(`[HN AI Dimmer] Using cached score for "${headline.text.substring(0, 50)}..." - Score: ${cachedScore}`); | |
| setBadgeScore(headline.badge, cachedScore); | |
| applyTransparency(headline.element, cachedScore); | |
| processedCount++; | |
| return cachedScore; | |
| } | |
| setBadgeLoading(headline.badge); | |
| try { | |
| const input = `Headline: ${headline.text}\nURL: ${headline.url}`; | |
| const requestBody = { | |
| model: OPENAI_MODEL, | |
| instructions: instructions, | |
| prompt_cache_key: CACHE_KEY, | |
| reasoning: { | |
| effort: 'low' | |
| }, | |
| input: input, | |
| previous_response_id: responseId | |
| }; | |
| const response = await callResponsesAPI(requestBody); | |
| // Update response ID for conversation continuity | |
| responseId = response.id; | |
| // Get the output text from the response | |
| let scoreText = ''; | |
| if (response.output && Array.isArray(response.output)) { | |
| // Find the message block | |
| const messageBlock = response.output.find(block => block.type === 'message'); | |
| if (messageBlock && messageBlock.content && Array.isArray(messageBlock.content)) { | |
| // Find the output_text in the content array | |
| const outputText = messageBlock.content.find(item => item.type === 'output_text'); | |
| if (outputText && outputText.text) { | |
| scoreText = outputText.text.trim(); | |
| } | |
| } | |
| } | |
| console.log('[HN AI Dimmer] Score text:', scoreText); | |
| // Parse score - extract first number found | |
| const numberMatch = scoreText.match(/\b(\d+(?:\.\d+)?)\b/); | |
| if (!numberMatch) { | |
| throw new Error(`No number found in response: ${scoreText}`); | |
| } | |
| const score = parseFloat(numberMatch[1]); | |
| if (isNaN(score)) { | |
| throw new Error(`Invalid score: ${scoreText}`); | |
| } | |
| // Cache the score | |
| setCachedScore(headline.text, headline.url, score); | |
| // Update badge | |
| setBadgeScore(headline.badge, score); | |
| // Apply transparency | |
| applyTransparency(headline.element, score); | |
| // Increment processed count | |
| processedCount++; | |
| // console.log(`[HN AI Dimmer] "${headline.text.substring(0, 50)}..." - Score: ${score}`); | |
| return score; | |
| } catch (error) { | |
| console.error(`[HN AI Dimmer] Failed to score headline "${headline.text}":`, error); | |
| headline.badge.textContent = '?'; | |
| processedCount++; | |
| return 0; | |
| } | |
| } | |
| // Apply transparency based on AI score | |
| function applyTransparency(element, score) { | |
| // Calculate opacity based on score (0-100): | |
| // Score 0 (0.0 AI) = 1.0 opacity (fully visible) | |
| // Score 20 (0.2 AI) = 0.9 opacity (barely dimmed) | |
| // Score 50 (0.5 AI) = 0.5 opacity (50% transparent) | |
| // Score 100 (1.0 AI) = 0.05 opacity (barely visible) | |
| let opacity; | |
| if (score === 0) { | |
| opacity = 1.0; | |
| } else { | |
| // Linear interpolation from 1.0 at score 0 to 0.05 at score 100 | |
| // opacity = 1.0 - (score / 100) * 0.95 | |
| opacity = Math.max(0.05, 1.0 - (score / 100) * 0.95); | |
| } | |
| element.style.opacity = opacity.toFixed(2); | |
| element.style.transition = 'opacity 0.3s ease-in-out'; | |
| element.setAttribute('data-ai-score', score); | |
| } | |
| // Process headlines with concurrency control | |
| async function processHeadlines(headlines, concurrency = 3) { | |
| const processing = new Set(); | |
| let needsAPI = false; | |
| for (const headline of headlines) { | |
| // Check if this headline needs API call | |
| if (getCachedScore(headline.text, headline.url) === undefined) { | |
| needsAPI = true; | |
| } | |
| // Wait if we're at max concurrency | |
| while (processing.size >= concurrency) { | |
| await Promise.race(processing); | |
| } | |
| // Start processing this headline | |
| const promise = scoreHeadline(headline) | |
| .finally(() => { | |
| processing.delete(promise); | |
| }); | |
| processing.add(promise); | |
| } | |
| // Wait for all remaining headlines to complete | |
| await Promise.all(processing); | |
| return needsAPI; | |
| } | |
| // Main execution | |
| async function main() { | |
| console.log(`[HN AI Dimmer v${VERSION}] Starting analysis...`); | |
| const headlines = getHeadlines(); | |
| if (headlines.length === 0) { | |
| console.log('[HN AI Dimmer] No headlines found on page'); | |
| return; | |
| } | |
| totalHeadlines = headlines.length; | |
| processedCount = 0; | |
| // Check how many headlines need API calls | |
| const uncachedCount = headlines.filter(h => getCachedScore(h.text, h.url) === undefined).length; | |
| console.log(`[HN AI Dimmer] Found ${headlines.length} headlines (${uncachedCount} uncached)`); | |
| showProgressIndicator(); | |
| try { | |
| // Only initialize API if we have uncached headlines | |
| if (uncachedCount > 0) { | |
| await initializeResponse(); | |
| } | |
| await processHeadlines(headlines, 3); | |
| console.log('[HN AI Dimmer] Analysis complete!'); | |
| finalizeIndicator(); | |
| } catch (error) { | |
| console.error('[HN AI Dimmer] Failed to analyze headlines:', error); | |
| const indicator = document.getElementById('hn-ai-dimmer-indicator'); | |
| if (indicator) { | |
| if (timerInterval) { | |
| clearInterval(timerInterval); | |
| timerInterval = null; | |
| } | |
| indicator.textContent = 'Error!'; | |
| } | |
| } | |
| } | |
| // Run when page loads | |
| if (document.readyState === 'loading') { | |
| document.addEventListener('DOMContentLoaded', main); | |
| } else { | |
| main(); | |
| } | |
| console.log(`[HN AI Dimmer v${VERSION}] Loaded`); | |
| })(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment