// ==UserScript== // @name UnsafeYT // @namespace unsafe-yt-userscript // @license MIT // @version 1.5 // @description Full port of UnsafeYT content script to Tampermonkey. Automatically toggles ON when a valid token is detected in the video description (first line starts with "token:"). Made By ChatGPT // @match https://www.youtube.com/watch* // @match https://m.youtube.com/watch* // @grant none // @run-at document-idle // @downloadURL none // ==/UserScript== /* ===================================================================================== SUMMARY - Full WebGL (video) + AudioGraph pipeline restored from the extension's content script. - Auto-detects tokens in the video's description when the first line begins with "token:". - Adds two buttons by the Like/Dislike bar: 1) Toggle Effects (transparent bg, white text, outline red=off / green=on) 2) Enter Token (transparent bg, white text) — manual prompt - Default: OFF. If token auto-detected, the script will automatically enable effects. - Large number of comments and clear structure for easy review. ===================================================================================== */ (function () { "use strict"; /************************************************************************ * SECTION A — CONFIG & SHADERS (embedded) ************************************************************************/ // Vertex shader (screen quad) - WebGL2 (#version 300 es) const VERT_SHADER_SRC = `#version 300 es in vec2 a_position; in vec2 a_texCoord; out vec2 v_texCoord; void main() { gl_Position = vec4(a_position, 0.0, 1.0); v_texCoord = a_texCoord; }`; // Fragment shader (the decoding/visual effect). This is the original .frag you gave. const FRAG_SHADER_SRC = `#version 300 es precision highp float; in vec2 v_texCoord; out vec4 fragColor; uniform sampler2D u_sampler; uniform sampler2D u_shuffle; vec2 getNormal( vec2 uv ){ vec2 offset = vec2(0.0065,0.0065); vec2 center = round((uv+offset)*80.0)/80.0; return (center - (uv+offset))*80.0; } float getAxis( vec2 uv ){ vec2 normal = getNormal( uv ); float axis = abs(normal.x) > 0.435 ? 1.0 : 0.0; return abs(normal.y) > 0.4 ? 2.0 : axis; } float getGrid( vec2 uv ){ float axis = getAxis( uv ); return axis > 0.0 ? 1.0 : 0.0; } vec4 getColor( vec2 uv ){ vec2 shuffle_sample = texture(u_shuffle, uv).rg; vec2 base_new_uv = uv + shuffle_sample; vec4 c = texture(u_sampler, base_new_uv); return vec4(1.0 - c.rgb, c.a); } vec4 getGridFix( vec2 uv ){ vec2 normal = getNormal( uv ); vec4 base = getColor( uv ); vec4 outline = getColor( uv + normal*0.002 ); float grid = getGrid( uv ); return mix(base,outline,grid); } vec4 getSmoothed( vec2 uv, float power, float slice ){ vec4 result = vec4(0.0,0.0,0.0,0.0); float PI = 3.14159265; float TAU = PI*2.0; for( float i=0.0; i < 8.0; i++ ){ float angle = ((i/8.0)*TAU) + (PI/2.0) + slice; vec2 normal = vec2(sin(angle),cos(angle)*1.002); result += getGridFix( uv + normal*power ); } return result/8.0; } void main() { vec2 uv = vec2(v_texCoord.x, -v_texCoord.y + 1.0); float axis = getAxis( uv ); float grid = axis > 0.0 ? 1.0 : 0.0; float slices[3] = float[3](0.0,0.0,3.14159265); vec4 main = getGridFix( uv ); vec4 outline = getSmoothed( uv, 0.001, slices[int(axis)] ); main = mix(main,outline,grid); fragColor = main; }`; /************************************************************************ * SECTION B — GLOBAL STATE (clear names) ************************************************************************/ // Token / state let currentToken = ""; // the decode token (from description or manual) let savedDescription = ""; // last observed description (avoid repeated parsing) let isRendering = false; // whether effects currently active // Video / WebGL / Audio objects (reset in cleanup) let activeCanvas = null; let activeGl = null; let activeAudioCtx = null; let activeSrcNode = null; let activeGainNode = null; let activeOutputGainNode = null; let activeNotchFilters = []; let resizeIntervalId = null; let renderFrameId = null; let originalVideoContainerStyle = null; let resizeCanvasListener = null; let currentNode = null; // URL tracking (YouTube SPA) let currentUrl = location.href.split("&")[0].split("#")[0]; /************************************************************************ * SECTION C — SMALL UTILITIES (readable, documented) ************************************************************************/ /** * deterministicHash(s, prime, modulus) * - Deterministic numeric hash scaled to [0,1) * - Used by the shuffle map generator */ function deterministicHash(s, prime = 31, modulus = Math.pow(2, 32)) { let h = 0; modulus = Math.floor(modulus); for (let i = 0; i < s.length; i++) { const charCode = s.charCodeAt(i); h = (h * prime + charCode) % modulus; if (h < 0) h += modulus; } return h / modulus; } /** * _generateUnshuffleOffsetMapFloat32Array(seedToken, width, height) * - Produces a Float32Array of length width*height*2 containing * normalized offsets used by the shader to unshuffle pixels. */ function _generateUnshuffleOffsetMapFloat32Array(seedToken, width, height) { if (width <= 0 || height <= 0) { throw new Error("Width and height must be positive integers."); } if (typeof seedToken !== 'string' || seedToken.length === 0) { throw new Error("Seed string is required for deterministic generation."); } const totalPixels = width * height; // Two independent deterministic hashes const startHash = deterministicHash(seedToken, 31, Math.pow(2, 32) - 1); const stepSeed = seedToken + "_step"; const stepHash = deterministicHash(stepSeed, 37, Math.pow(2, 32) - 2); // Angle and increment used to produce per-index pseudo-random numbers const startAngle = startHash * Math.PI * 2.0; const angleIncrement = stepHash * Math.PI / Math.max(width, height); // Generate values and their original index const indexedValues = []; for (let i = 0; i < totalPixels; i++) { const value = Math.sin(startAngle + i * angleIncrement); indexedValues.push({ value: value, index: i }); } // Sort by value to create a deterministic 'shuffle' permutation indexedValues.sort((a, b) => a.value - b.value); // pLinearized maps originalIndex -> shuffledIndex const pLinearized = new Array(totalPixels); for (let k = 0; k < totalPixels; k++) { const originalIndex = indexedValues[k].index; const shuffledIndex = k; pLinearized[originalIndex] = shuffledIndex; } // Create the offset map: for each original pixel compute where it should sample from const offsetMapFloats = new Float32Array(totalPixels * 2); for (let oy = 0; oy < height; oy++) { for (let ox = 0; ox < width; ox++) { const originalLinearIndex = oy * width + ox; const shuffledLinearIndex = pLinearized[originalLinearIndex]; const sy_shuffled = Math.floor(shuffledLinearIndex / width); const sx_shuffled = shuffledLinearIndex % width; // offsets normalized relative to texture size (so shader can add to UV) const offsetX = (sx_shuffled - ox) / width; const offsetY = (sy_shuffled - oy) / height; const pixelDataIndex = (oy * width + ox) * 2; offsetMapFloats[pixelDataIndex] = offsetX; offsetMapFloats[pixelDataIndex + 1] = offsetY; } } return offsetMapFloats; } /** * sleep(ms) — small helper to await timeouts */ function sleep(ms) { return new Promise((r) => setTimeout(r, ms)); } /************************************************************************ * SECTION D — TOKEN DETECTION (strict: first line must start with "token:") ************************************************************************/ /** * extractTokenFromText(text) * - If text's first line starts with "token:" (case-insensitive), returns token after colon. * - Otherwise returns empty string. */ function extractTokenFromText(text) { if (!text) return ""; const trimmed = text.trim(); const firstLine = trimmed.split(/\r?\n/)[0] || ""; if (firstLine.toLowerCase().startsWith("token:")) { return firstLine.substring(6).trim(); } return ""; } /** * findDescriptionToken() * - Attempts multiple selectors that may contain the description. * - Returns token or empty string. */ function findDescriptionToken() { // Known description selectors const selectors = [ "#description yt-formatted-string", "#description", "ytd-video-secondary-info-renderer #description", "#meta-contents yt-formatted-string", "#meta-contents #description", "ytd-expander #content" // fallback ]; for (const sel of selectors) { const el = document.querySelector(sel); if (el && el.innerText) { const tok = extractTokenFromText(el.innerText); if (tok) return tok; } } // As a last resort, scan elements that commonly hold text const candidates = document.querySelectorAll('yt-formatted-string, yt-attributed-string, .content, #description'); for (const el of candidates) { if (!el || !el.innerText) continue; const tok = extractTokenFromText(el.innerText); if (tok) return tok; } return ""; } /************************************************************************ * SECTION E — UI (buttons & indicators) ************************************************************************/ /** * createControlButtons() * - Inserts the Toggle & Enter Token buttons beside the like/dislike controls. * - Idempotent: will not duplicate the UI. */ function createControlButtons() { // Avoid duplicates if (document.querySelector("#unsafeyt-controls")) return; // Find the top-level button bar const bar = document.querySelector("#top-level-buttons-computed"); if (!bar) return; // if not found, bail (YouTube layout may differ) // Container const container = document.createElement("div"); container.id = "unsafeyt-controls"; container.style.display = "flex"; container.style.gap = "8px"; container.style.alignItems = "center"; container.style.marginLeft = "12px"; // Toggle button (transparent, white text, outline shows state) const toggle = document.createElement("button"); toggle.id = "unsafeyt-toggle"; toggle.type = "button"; toggle.innerText = "Toggle Effects"; _styleControlButton(toggle); _setToggleOutline(toggle, false); // default OFF toggle.addEventListener("click", async () => { if (isRendering) { // If running, turn off removeEffects(); } else { // If not running, and token exists, apply if (!currentToken || currentToken.length < 1) { // Ask for manual token entry if none found const manual = prompt("No token auto-detected. Enter token manually:"); if (!manual) return; currentToken = manual.trim(); } await applyEffects(currentToken); } }); // Manual entry button const manual = document.createElement("button"); manual.id = "unsafeyt-manual"; manual.type = "button"; manual.innerText = "Enter Token"; _styleControlButton(manual); manual.style.border = "1px solid rgba(255,255,255,0.2)"; manual.addEventListener("click", () => { const v = prompt("Enter token (first line of description can also be 'token:...'):"); if (v && v.trim().length > 0) { currentToken = v.trim(); // Auto-enable when manual token entered applyEffects(currentToken); } }); // Token indicator (small circle shows green if token present) const indicator = document.createElement("div"); indicator.id = "unsafeyt-token-indicator"; indicator.style.width = "10px"; indicator.style.height = "10px"; indicator.style.borderRadius = "50%"; indicator.style.marginLeft = "6px"; indicator.style.background = "transparent"; indicator.title = "Token presence"; // Append and insert container.appendChild(toggle); container.appendChild(manual); container.appendChild(indicator); bar.appendChild(container); } /** * _styleControlButton(btn) * - Common visual styling for both buttons (transparent bg + white text). */ function _styleControlButton(btn) { btn.style.background = "transparent"; btn.style.color = "white"; btn.style.padding = "6px 8px"; btn.style.borderRadius = "6px"; btn.style.cursor = "pointer"; btn.style.fontSize = "12px"; btn.style.fontWeight = "600"; btn.style.outline = "none"; } /** * _setToggleOutline(btn, on) * - Visual cue: green outline if ON, red if OFF */ function _setToggleOutline(btn, on) { if (!btn) return; if (on) { btn.style.border = "2px solid rgba(0,200,0,0.95)"; btn.style.boxShadow = "0 0 8px rgba(0,200,0,0.25)"; } else { btn.style.border = "2px solid rgba(200,0,0,0.95)"; btn.style.boxShadow = "none"; } } /** * _updateTokenIndicator(present) * - Green dot if a token is detected, transparent otherwise. */ function _updateTokenIndicator(present) { const el = document.querySelector("#unsafeyt-token-indicator"); if (!el) return; el.style.background = present ? "limegreen" : "transparent"; } /************************************************************************ * SECTION F — CLEANUP: removeEffects() * - Thorough cleanup of WebGL and Audio states ************************************************************************/ function removeEffects() { // If not running, still try to close audio context if open if (!isRendering && activeAudioCtx) { try { activeAudioCtx.close().catch(() => {}); } catch (e) {} activeAudioCtx = null; } // mark not rendering isRendering = false; // clear token (original extension cleared token on remove) // we keep currentToken so user can re-apply; do not clear currentToken here // currentToken = ""; // <-- original extension cleared token; we keep it to allow re-applying // remove canvas if (activeCanvas) { try { activeCanvas.remove(); } catch (e) {} activeCanvas = null; } // clear timers / raf if (resizeIntervalId !== null) { clearInterval(resizeIntervalId); resizeIntervalId = null; } if (renderFrameId !== null) { cancelAnimationFrame(renderFrameId); renderFrameId = null; } // remove resize listener if (resizeCanvasListener) { window.removeEventListener("resize", resizeCanvasListener); resizeCanvasListener = null; } // lose gl context if (activeGl) { try { const lose = activeGl.getExtension('WEBGL_lose_context'); if (lose) lose.loseContext(); } catch (e) {} activeGl = null; } // restore html5 container style const html5_video_container = document.getElementsByClassName("html5-video-container")[0]; if (html5_video_container && originalVideoContainerStyle) { try { Object.assign(html5_video_container.style, originalVideoContainerStyle); } catch (e) {} originalVideoContainerStyle = null; } // audio nodes cleanup if (activeAudioCtx) { const video = document.querySelector(".video-stream"); if (video && activeSrcNode) { try { activeSrcNode.disconnect(); } catch (e) {} activeSrcNode = null; } if (activeGainNode) { try { activeGainNode.disconnect(); } catch (e) {} activeGainNode = null; } activeNotchFilters.forEach(filter => { try { filter.disconnect(); } catch (e) {} }); activeNotchFilters = []; if (activeOutputGainNode) { try { activeOutputGainNode.disconnect(); } catch (e) {} activeOutputGainNode = null; } // try closing audio context and reload video to restore default audio routing activeAudioCtx.close().then(() => { activeAudioCtx = null; if (video) { try { const currentSrc = video.src; video.src = ''; video.load(); video.src = currentSrc; video.load(); } catch (e) {} } }).catch(() => { activeAudioCtx = null; }); currentNode = null; } // update UI to OFF _setToggleOutline(document.querySelector("#unsafeyt-toggle"), false); _updateTokenIndicator(Boolean(currentToken && currentToken.length > 0)); console.log("[UnsafeYT] Removed applied effects."); } /************************************************************************ * SECTION G — CORE: applyEffects(token) * - Sets up WebGL pipeline, creates and uploads shuffle map, starts render loop * - Creates AudioContext graph with notch filters and connects to destination ************************************************************************/ async function applyEffects(seedToken) { // Prevent double-apply if (isRendering) { console.log("[UnsafeYT] Effects already running."); return; } // remove any partial state (defensive) removeEffects(); // Validate token if (typeof seedToken !== 'string' || seedToken.length < 3) { console.warn("[UnsafeYT] Invalid or empty token. Effects will not be applied."); return; } currentToken = seedToken; console.log(`[UnsafeYT] Applying effects with token: "${currentToken}"`); // Find video & container const video = document.getElementsByClassName("video-stream")[0]; const html5_video_container = document.getElementsByClassName("html5-video-container")[0]; if (!video || !html5_video_container) { console.error("[UnsafeYT] Cannot find video or container elements."); return; } // ensure crossOrigin for texImage2D from video element video.crossOrigin = "anonymous"; /* --------------------------- Create overlay canvas and style --------------------------- */ activeCanvas = document.createElement("canvas"); activeCanvas.id = "unsafeyt-glcanvas"; // Positioning differs for mobile and desktop if (location.href.includes("m.youtube")) { Object.assign(activeCanvas.style, { position: "absolute", top: "0%", left: "50%", transform: "translateY(0%) translateX(-50%)", pointerEvents: "none", zIndex: 9999, touchAction: "none" }); } else { Object.assign(activeCanvas.style, { position: "absolute", top: "50%", left: "50%", transform: "translateY(-50%) translateX(-50%)", pointerEvents: "none", zIndex: 9999, touchAction: "none" }); } // Save and change container style so canvas overlays correctly if (html5_video_container && !originalVideoContainerStyle) { originalVideoContainerStyle = { position: html5_video_container.style.position, height: html5_video_container.style.height, }; } Object.assign(html5_video_container.style, { position: "relative", height: "100%", }); html5_video_container.appendChild(activeCanvas); // Create WebGL2 or fallback WebGL1 context activeGl = activeCanvas.getContext("webgl2", { alpha: false }) || activeCanvas.getContext("webgl", { alpha: false }); if (!activeGl) { console.error("[UnsafeYT] WebGL not supported in this browser."); removeEffects(); return; } // For WebGL1 we may need OES_texture_float to upload floats let oesTextureFloatExt = null; if (activeGl instanceof WebGLRenderingContext) { oesTextureFloatExt = activeGl.getExtension('OES_texture_float'); if (!oesTextureFloatExt) { console.warn("[UnsafeYT] OES_texture_float not available: float textures may not work."); } } /* --------------------------- Resize handling --------------------------- */ resizeCanvasListener = () => { if (!activeCanvas || !video) return; activeCanvas.width = video.offsetWidth || video.videoWidth || 640; activeCanvas.height = video.offsetHeight || video.videoHeight || 360; if (activeGl) { try { activeGl.viewport(0, 0, activeGl.drawingBufferWidth, activeGl.drawingBufferHeight); } catch (e) {} } }; window.addEventListener("resize", resizeCanvasListener); resizeCanvasListener(); resizeIntervalId = setInterval(resizeCanvasListener, 2500); /* --------------------------- Shader compile / program helpers --------------------------- */ function compileShader(type, src) { if (!activeGl) return null; const shader = activeGl.createShader(type); if (!shader) { console.error("[UnsafeYT] Failed to create shader."); return null; } activeGl.shaderSource(shader, src); activeGl.compileShader(shader); if (!activeGl.getShaderParameter(shader, activeGl.COMPILE_STATUS)) { console.error("[UnsafeYT] Shader compile error:", activeGl.getShaderInfoLog(shader)); activeGl.deleteShader(shader); return null; } return shader; } function createProgram(vsSrc, fsSrc) { if (!activeGl) return null; const vs = compileShader(activeGl.VERTEX_SHADER, vsSrc); const fs = compileShader(activeGl.FRAGMENT_SHADER, fsSrc); if (!vs || !fs) { console.error("[UnsafeYT] Shader creation failed."); return null; } const program = activeGl.createProgram(); activeGl.attachShader(program, vs); activeGl.attachShader(program, fs); activeGl.linkProgram(program); if (!activeGl.getProgramParameter(program, activeGl.LINK_STATUS)) { console.error("[UnsafeYT] Program link error:", activeGl.getProgramInfoLog(program)); try { activeGl.deleteProgram(program); } catch (e) {} try { activeGl.deleteShader(vs); activeGl.deleteShader(fs); } catch (e) {} return null; } activeGl.useProgram(program); try { activeGl.deleteShader(vs); activeGl.deleteShader(fs); } catch (e) {} return program; } /* --------------------------- Create/compile program using embedded shaders --------------------------- */ try { const program = createProgram(VERT_SHADER_SRC, FRAG_SHADER_SRC); if (!program) { removeEffects(); return; } // Attribute/uniform locations const posLoc = activeGl.getAttribLocation(program, "a_position"); const texLoc = activeGl.getAttribLocation(program, "a_texCoord"); const videoSamplerLoc = activeGl.getUniformLocation(program, "u_sampler"); const shuffleSamplerLoc = activeGl.getUniformLocation(program, "u_shuffle"); // Fullscreen quad (positions + texcoords) const quadVerts = new Float32Array([ -1, -1, 0, 0, 1, -1, 1, 0, -1, 1, 0, 1, -1, 1, 0, 1, 1, -1, 1, 0, 1, 1, 1, 1, ]); const buf = activeGl.createBuffer(); activeGl.bindBuffer(activeGl.ARRAY_BUFFER, buf); activeGl.bufferData(activeGl.ARRAY_BUFFER, quadVerts, activeGl.STATIC_DRAW); activeGl.enableVertexAttribArray(posLoc); activeGl.vertexAttribPointer(posLoc, 2, activeGl.FLOAT, false, 4 * Float32Array.BYTES_PER_ELEMENT, 0); activeGl.enableVertexAttribArray(texLoc); activeGl.vertexAttribPointer(texLoc, 2, activeGl.FLOAT, false, 4 * Float32Array.BYTES_PER_ELEMENT, 2 * Float32Array.BYTES_PER_ELEMENT); // Video texture: we'll update it every frame from the