is this thing on

This commit is contained in:
2025-10-14 01:28:54 -04:00
commit 4c7648e6c5
3 changed files with 297 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
node_modules/
package.json
package-lock.json

13
README.md Normal file
View File

@@ -0,0 +1,13 @@
# GFucker
GFucker is a tool that attaches to the Chrome debugger to generate Google search tokens. WIP lolololo
## Requirements
Modify line 5 with your chrome/chromium path. Run the following garbage:
```sh
npm install ws node-fetch
node control.js # This should shit out a cookie & raw search page, assuming your IP isn't b& :')
```
## Loicense
AGPL

281
control.js Normal file
View File

@@ -0,0 +1,281 @@
import { spawn } from 'child_process';
import WebSocket from 'ws';
import fetch from 'node-fetch';
const CHROME_PATH = 'chromium';
const DEBUG_PORT = 9222;
let msgId = 1;
var pending = new Map();
var rawhtml = new Map();
var ws = null;
//
// Close chrome on script exit
//
process.on("exit", function(){
try{
process.kill(-chrome.pid);
}catch{
console.log("Chrome is still running somehow, kill it");
}
});
//
// FUNctions
//
async function poll_debugger(port, timeout = 20000){
const start = Date.now();
return new Promise((resolve, reject) => {
const check = async function(){
try {
const res = await fetch(`http://localhost:${port}/json/version`);
if(res.ok){
resolve();
return;
}
}catch(e){
// not ready, do nothing
}
if(Date.now() - start > timeout){
reject(new Error("Chrome debugger did not respond"));
}else{
setTimeout(check, 150);
}
}
check();
});
}
function send_command(method, params = {}){
const id = msgId++;
ws.send(JSON.stringify({id, method, params}));
return id;
}
async function send_command_wait(method, params = {}, timeout = 5000) {
const id = send_command(method, params);
return new Promise((resolve, reject) => {
const timer =
setTimeout(
function(){
pending.delete(id);
reject(new Error(`Timeout waiting for ${method}`));
},
timeout
);
pending.set(id, {resolve, reject, timer});
});
}
//
// Spawn chrome
//
console.log(`<GFucker> Creating chrome instance @ ${CHROME_PATH} with debugger port ${DEBUG_PORT}`);
const chrome = spawn(
CHROME_PATH,
[
`--remote-debugging-port=${DEBUG_PORT}`,
'--incognito',
'--no-first-run',
'--no-default-browser-check',
'--password-store=basic',
//'--proxy-server=https://blade1.frankfurt-rack444.nodes.gen4.ninja:9002',
//'--user-data-dir=/tmp/chrome-temp-profile',
'--window-size=1200,800'
],
{
detached: true,
stdio: "ignore"
}
);
chrome.unref();
//
// Connect to debugger
//
async function doshit(){
console.log("<GFucker> Waiting for chrome bloatware...");
await poll_debugger(DEBUG_PORT);
console.log("<GFucker> Debugger alive");
const targets = await(await fetch(`http://localhost:${DEBUG_PORT}/json`)).json();
const page = targets.find(t => t.type === "page");
if(!page){
console.log("<GFucker> Failed to find a page object");
process.kill(-chrome.pid);
}
ws = new WebSocket(page.webSocketDebuggerUrl);
//
// Navigate to the page on open
//
ws.on("open", async function(){
console.log(`<GFucker> Connected to debugger @ "${page.title}" (${page.url})`);
await send_command_wait("Network.enable");
await send_command_wait("Page.enable");
console.log("<GFucker> Registered to debugger events");
await send_command_wait(
"Page.navigate",
{
url: "https://www.google.com"
}
);
});
//
// Handle incoming websocket messages
//
ws.on("message", async function(msg){
var data = JSON.parse(msg);
// resolve promises if need be
if(data.id && pending.has(data.id)){
const {resolve, reject, timer} = pending.get(data.id);
clearTimeout(timer);
pending.delete(data.id);
if(data.error){
reject(data.error);
}else{
resolve(data.result);
}
}
// create map of all raw html payloads
if(data.method == "Network.requestWillBeSent"){
if(data.params.type == "Document"){
rawhtml.set(data.params.frameId, data.params.requestId);
}
}
// log pages visited
if(
data.method &&
data.method == "Page.frameNavigated" &&
!data.params.frame.parentId
){
var url = data.params.frame.url;
if(url.match(/^https?:\/\/(www\.)?google\.[a-z]{1,10}\/search\?/g)){
if(url.match(/sei=/)){
console.log("<GFucker> Google returned a search page!");
setTimeout(async function(){
// dump cookies
var cookie_raw = await send_command_wait("Network.getCookies", {urls: ["https://google.com"]}, 3000);
var cookie = [];
cookie_raw.cookies.forEach(function(c){
cookie.push(c.name + "=" + c.value);
});
cookie = cookie.join("; ");
console.log("<GFucker> Scraped cookies");
// dump raw html
var html = await send_command_wait(
"Network.getResponseBody",
{
requestId: rawhtml.get(data.params.frame.id)
}
);
html = html.base64Encoded ? atob(html.body) : html.body;
console.log("<GFucker> Scraped HTML");
process.kill(-chrome.pid);
console.log({
status: "ok",
cookie: cookie,
raw_html: html
});
}, 1000 + Math.random());
//
}else{
console.log("<GFucker> Obtaining search token...");
}
}else if(url.match(/\/sorry\//)){
console.log("<GFucker> FAIL: Got captcha page");
process.kill(-chrome.pid);
}else if(url == "https://www.google.com/"){
// remove GPDR prompt
console.log("<GFucker> Clearing GPDR prompt");
await send_command_wait(
"Runtime.evaluate",
{
expression:
'window.addEventListener("load", function(){' +
// click out of the GPDR prompt bullshit, if its there
'var btns = Array.from(document.getElementsByTagName("button"));' +
'if(btns.length >= 7){' +
'btns[5].click();' +
'}' +
'});'
}
);
console.log("<GFucker> Sleeping for ~3 seconds");
// do actual search
setTimeout(async function(){
console.log("<GFucker> Sending search command");
await send_command_wait(
"Runtime.evaluate",
{
expression:
'document.getElementsByTagName("textarea")[0].value = "asmr";' +
'document.getElementsByName("btnK")[0].click();'
}
);
}, 3100 + Math.random());
}else{
// this shouldn't really trigger
console.log(`<GFucker> Visited ${data.params.frame.url}`);
}
}
});
}
doshit();