lotocamion

Hcaptcha Solver

Aug 20th, 2022 (edited)
425
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 59.85 KB | None | 0 0
  1. // ==UserScript==
  2. // @name Hcaptcha Solver Workaround
  3. // @namespace hcaptcha.workaround
  4. // @version 2.3
  5. // @description Hcaptcha Solver in Browser | Automatically solves Hcaptcha in browser
  6. // @author unknow
  7. // @match https://*.hcaptcha.com/*hcaptcha-challenge*
  8. // @match https://*.hcaptcha.com/*hcaptcha*
  9. // @match https://*.hcaptcha.com/*checkbox*
  10. // @grant GM_xmlhttpRequest
  11. // @grant GM_setValue
  12. // @grant GM_getValue
  13. // @run-at document-start
  14. // @connect www.imageidentify.com
  15. // @connect https://cdnjs.cloudflare.com
  16. // @connect https://cdn.jsdelivr.net
  17. // @connect https://unpkg.com
  18. // @connect https://*.hcaptcha.com/*
  19. // @require https://unpkg.com/[email protected]/browser/lib/jimp.min.js
  20. // @require https://cdnjs.cloudflare.com/ajax/libs/tesseract.js/2.0.0-alpha.2/tesseract.min.js
  21. // @require https://cdn.jsdelivr.net/npm/@tensorflow/[email protected]/dist/tf.min.js
  22. // @require https://cdn.jsdelivr.net/npm/@tensorflow-models/[email protected]/dist/coco-ssd.min.js
  23. // @require https://cdn.jsdelivr.net/npm/@tensorflow-models/[email protected]/dist/mobilenet.min.js
  24. // ==/UserScript==
  25. (async function() {
  26.  
  27. //TODO: Enable debug mode to print console logs
  28. //TODO: Refactor Code for different models
  29. 'use strict';
  30. var selectedImageCount = 0;
  31. var tensorFlowModel = undefined;
  32. var tensorFlowMobileNetModel = undefined;
  33. var worker = undefined;
  34.  
  35. var identifiedObjectsList = [];
  36. var exampleImageList = [];
  37. var identifyObjectsFromImagesCompleted = false;
  38. var currentExampleUrls = [];
  39.  
  40. //Default Language for hcaptcha
  41. const LANG_ENGLISH = "English"
  42. const DEFAULT_LANGUAGE = LANG_ENGLISH;
  43. const ENABLE_DEFAULT_LANGUAGE = true;
  44.  
  45. //Guess/Match New Images
  46. const MATCH_IMAGES_USING_TRAINER = false;
  47. const GUESS_NEW_IMAGE_TYPE = false;
  48.  
  49. //Node Selectors
  50. const CHECK_BOX = "#checkbox";
  51. const SUBMIT_BUTTON = ".button-submit";
  52. const REFRESH_CHALLENGE_BUTTON = ".refresh.button";
  53. const TASK_IMAGE_BORDER = ".task-image .border";
  54. const IMAGE = ".task-image .image";
  55. const TASK_IMAGE = ".task-image";
  56. const PROMPT_TEXT = ".prompt-text";
  57. const NO_SELECTION = ".no-selection";
  58. const CHALLENGE_INPUT_FIELD = ".challenge-input .input-field";
  59. const CHALLENGE_INPUT = ".challenge-input";
  60. const CHALLENGE_IMAGE = ".challenge-example .image .image";
  61. const IMAGE_FOR_OCR = ".challenge-image .zoom-image";
  62. const LANGUAGE_SELECTOR = "#language-list .scroll-container .option span";
  63.  
  64. //Attributes
  65. const ARIA_CHECKED = "aria-checked";
  66. const ARIA_HIDDEN = "aria-hidden";
  67.  
  68. //Values that can be changed for other languages
  69. const AIRPLANE = "airplane";
  70. const BICYCLE = "bicycle";
  71. const BOAT = "boat";
  72. const BUS = "bus";
  73. const CAR = "car";
  74. const MOTORBUS = "motorbus";
  75. const MOTORCYCLE = "motorcycle";
  76. const SURFBOARD = "surfboard";
  77. const TRAIN = "train";
  78. const TRUCK = "truck";
  79. const TRIMARAN = "trimaran";
  80. const SEAPLANE = "seaplane";
  81. const SPEEDBOAT = "speedboat";
  82.  
  83. const BRIDGE = "bridge";
  84. const BEDROOM = "bedroom";
  85. const LIVING_ROOM = "living room";
  86. const CONFERENCE_ROOM = "conference room";
  87.  
  88. const HORSE = "һorse";
  89. const LION = "lion";
  90. const DOMESTIC_CAT = "domestic cat";
  91. const DOG = "dog";
  92. const ELEPHANT = "elephant";
  93. const HORSE_CLOUDS = "horse made of clouds";
  94. const ELEPHANT_CLOUDS = "elephant made of clouds";
  95. const PARROT = "parrot";
  96. const BIRD = "bird";
  97. const CANINE = "canine";
  98.  
  99. //Living Room Objects
  100. const BED = "bed";
  101. const BOOK = "book";
  102. const CHAIR = "chair";
  103. const CLOCK = "clock";
  104. const COUCH = "couch";
  105. const DINING_TABLE = "dining table";
  106. const POTTED_PLANT = "potted plant";
  107. const TV = "tv";
  108.  
  109. //Animals
  110. const ZEBRA = "zebra";
  111. const CAT = "cat";
  112.  
  113. // Vertical River
  114. const VALLEY = "valley";
  115. const VERTICAL_RIVER = "vertical river";
  116.  
  117. // Skippables for now
  118. const SMILING_DOG = "smiling dog";
  119. const DOG_EYES_CLOSED = "dog with closed eyes";
  120. const GIRAFFE = "giraffe";
  121. const LION_MANE = "lion with mane on its nеck";
  122. const LION_EYES_CLOSED = "lion with closed еyes";
  123. const LION_FEMALE = "female lion";
  124.  
  125. const KNOWN_WORDS = [
  126. AIRPLANE, BICYCLE, BOAT, BUS, CAR, MOTORBUS, MOTORCYCLE, SEAPLANE, SPEEDBOAT, SURFBOARD, TRAIN, TRIMARAN, TRUCK,
  127. COUCH, BRIDGE,
  128. BEDROOM, LIVING_ROOM, CONFERENCE_ROOM,
  129. HORSE, LION, DOMESTIC_CAT, DOG, ELEPHANT,
  130. HORSE_CLOUDS, ELEPHANT_CLOUDS,
  131. PARROT, BIRD, CANINE
  132. ];
  133.  
  134. const SKIPPABLE_WORDS = [
  135. SMILING_DOG, DOG_EYES_CLOSED, GIRAFFE, LION_MANE, LION_EYES_CLOSED
  136. ];
  137.  
  138. const LIVING_ROOM_TYPES = [BED, BOOK, CHAIR, CLOCK, COUCH, DINING_TABLE, POTTED_PLANT, TV];
  139. const TRANSPORT_TYPES = [AIRPLANE, BICYCLE, BOAT, BUS, CAR, MOTORBUS, MOTORCYCLE, SEAPLANE, SPEEDBOAT, SURFBOARD, TRAIN, TRIMARAN, TRUCK];
  140. const ANIMAL_TYPES = [ZEBRA];
  141.  
  142. const SENTENCE_TEXT_A = "Please click each image containing a ";
  143. const SENTENCE_TEXT_AN = "Please click each image containing an ";
  144. const SENTENCE_TEXT_B = "Please select all images with a ";
  145. const LANGUAGE_FOR_OCR = "eng";
  146.  
  147. // Option to override the default image matching
  148. // Enabling this by default
  149. const ENABLE_TENSORFLOW = true;
  150.  
  151. // Max Skips that can be done while solving the captcha
  152. // This is likely not to happen, if it occurs retry for new images
  153. const MAX_SKIPS = 10;
  154. var skipCount = 0;
  155.  
  156. var USE_MOBILE_NET = false;
  157. var USE_COLOUR_PATTERN = false;
  158. var NEW_WORD_IDENTIFIED = false;
  159. var FILTER_CLOUDS = false;
  160.  
  161. //Probablility for objects
  162. var probabilityForObject = new Map();
  163. probabilityForObject.set("speedboat", 0.14);
  164. probabilityForObject.set("fireboat", 0.4);
  165. probabilityForObject.set("boathouse", 0.4);
  166. probabilityForObject.set("submarine", 0.5);
  167. probabilityForObject.set("printer", 0.05);
  168. probabilityForObject.set("stretcher", 0.05);
  169. probabilityForObject.set("rotisserie", 0.02);
  170. probabilityForObject.set("spatula", 0.05);
  171.  
  172.  
  173. String.prototype.includesOneOf = function(arrayOfStrings) {
  174.  
  175. //If this is not an Array, compare it as a String
  176. if (!Array.isArray(arrayOfStrings)) {
  177. return this.toLowerCase().includes(arrayOfStrings.toLowerCase());
  178. }
  179.  
  180. for (var i = 0; i < arrayOfStrings.length; i++) {
  181. if ((arrayOfStrings[i].substr(0, 1) == "=" && this.toLowerCase() == arrayOfStrings[i].substr(1).toLowerCase()) ||
  182. (this.toLowerCase().includes(arrayOfStrings[i].toLowerCase()))) {
  183. return true;
  184. }
  185. }
  186. return false;
  187. }
  188.  
  189. String.prototype.transliterate = function transliterate() {
  190. const a = {"ԁ": "d", "ο":"o","ѕ": "s","і":"i","Ё":"E","Й":"I","Ц":"TS","У":"y","К":"K","Е":"E","Н":"H","Г":"G","Ш":"SH","Щ":"SCH",
  191. "З":"Z","Х":"H","Ъ":"'","ё":"yo","й":"i","ц":"ts","у":"y","к":"k","е":"e","н":"n","г":"r","ш":"sh","щ":"sch","з":"z",
  192. "х":"x","ъ":"'","Ф":"F","Ы":"I","В":"V","А":"a","П":"P","Р":"P","О":"O","Л":"L","Д":"D","Ж":"ZH","Э":"E","ф":"f","ы":"i",
  193. "в":"v","а":"a","п":"n","р":"p","о":"o","л":"l","д":"d","ж":"zh","э":"e","Я":"Ya","Ч":"CH","С":"C","М":"M","И":"I","Т":"T",
  194. "Ь":"'","Б":"B","Ю":"YU","я":"ya","ч":"ch","с":"c","м":"m","и":"i","т":"t","ь":"'","б":"b","ю":"yu",
  195. ",":" ",".":" ",":":" "};
  196. return this.split('').map(function (char) {
  197. return a[char] || char;
  198. }).join("").trim();
  199. }
  200.  
  201. String.prototype.equalsOneOf = function(arrayOfStrings) {
  202.  
  203. //If this is not an Array, compare it as a String
  204. if (!Array.isArray(arrayOfStrings)) {
  205. return this.toLowerCase() == arrayOfStrings.toLowerCase();
  206. }
  207.  
  208. for (var i = 0; i < arrayOfStrings.length; i++) {
  209. if ((arrayOfStrings[i].substr(0, 1) == "=" && this.toLowerCase() == arrayOfStrings[i].substr(1).toLowerCase()) ||
  210. (this.toLowerCase() == arrayOfStrings[i].toLowerCase())) {
  211. return true;
  212. }
  213. }
  214. return false;
  215. }
  216.  
  217.  
  218.  
  219. // This script uses imageidentify API (wolfram) . You may also use TensorFlow.js, Yolo latest version to recognize common objects.
  220. //(When the cloud service is available for yolo, we can switch the API endpoint). Accuracy varies between Wolfram, Tensorflow and Yolo.
  221. // Use this as a reference to solve recaptcha/other captchas using scripts in browser. This is intended for learning purposes.
  222. // Using TensorFlow as fallback, but this requires good CPU in order to solve quickly.
  223. // CPU utilization and memory utlization may go high when using TensorFlow.js
  224. function matchImages(imageUrl, word, i) {
  225.  
  226. GM_xmlhttpRequest({
  227. method: "POST",
  228. url: "https://www.imageidentify.com/objects/user-26a7681f-4b48-4f71-8f9f-93030898d70d/prd/urlapi/",
  229. headers: {
  230. "Content-Type": "application/x-www-form-urlencoded"
  231. },
  232. data: "image=" + encodeURIComponent(imageUrl),
  233. timeout: 8000,
  234. onload: function(response) {
  235. clickImages(response, imageUrl, word, i)
  236. },
  237. onerror: function(e) {
  238. //Using Fallback TensorFlow
  239. if (e && e.status && e.status != 0) {
  240. console.log(e);
  241. console.log("Using Fallback");
  242. }
  243. matchImagesUsingTensorFlow(imageUrl, word, i);
  244.  
  245. },
  246. ontimeout: function() {
  247. //console.log("Timed out. Using Fallback");
  248. matchImagesUsingTensorFlow(imageUrl, word, i);
  249. },
  250. });
  251.  
  252. }
  253.  
  254. function matchImagesUsingTensorFlow(imageUrl, word, i) {
  255. try {
  256. let img = new Image();
  257. img.crossOrigin = "Anonymous";
  258. img.src = imageUrl;
  259. img.onload = () => {
  260. initializeTensorFlowModel().then(model => model.detect(img))
  261. .then(function(predictions) {
  262. var predictionslen = predictions.length;
  263. for (var j = 0; j < predictionslen; j++) {
  264. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  265. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 &&
  266. predictions[j].class.includesOneOf(word)) {
  267. qSelectorAll(TASK_IMAGE)[i].click();
  268. break;
  269. }
  270. }
  271. img.removeAttribute("src");
  272. selectedImageCount = selectedImageCount + 1;
  273. });
  274. }
  275. } catch (err) {
  276. console.log(err.message);
  277. }
  278. }
  279.  
  280.  
  281. function matchImagesUsingTensorFlowMobileNet(imageUrl, word, i) {
  282.  
  283. try {
  284. let img = new Image();
  285. img.crossOrigin = "Anonymous";
  286. img.src = imageUrl;
  287. img.onload = () => {
  288. initializeTensorFlowMobilenetModel().then(model => model.classify(img))
  289. .then(function(predictions) {
  290. var predictionslen = predictions.length;
  291. for (var j = 0; j < predictionslen; j++) {
  292. var probability = 0.077;
  293. if (probabilityForObject.get(predictions[j].className)) {
  294. probability = probabilityForObject.get(predictions[j].className);
  295. }
  296.  
  297. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  298. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 &&
  299. predictions[j].className.includesOneOf(word) && predictions[j].probability > probability) {
  300. qSelectorAll(TASK_IMAGE)[i].click();
  301. break;
  302. }
  303. }
  304. img.removeAttribute("src");
  305. selectedImageCount = selectedImageCount + 1;
  306. });
  307. }
  308. } catch (err) {
  309. console.log(err.message);
  310. }
  311. }
  312.  
  313. function filterClouds(imageUrl, word, i) {
  314. try {
  315. let img = new Image();
  316. img.crossOrigin = "Anonymous";
  317. img.src = imageUrl;
  318. img.onload = () => {
  319. if (preprocess) {
  320. let ppResult = preprocess(img);
  321. if (ppResult.skipImage) {
  322. return;
  323. }
  324. }
  325. initializeTensorFlowMobilenetModel().then(model => model.classify(img))
  326. .then(function(predictions) {
  327. var predictionslen = predictions.length;
  328. for (var j = 0; j < predictionslen; j++) {
  329. var probability = 0.077;
  330. if (probabilityForObject.get(predictions[j].className)) {
  331. probability = probabilityForObject.get(predictions[j].className);
  332. }
  333.  
  334. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  335. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 &&
  336. predictions[j].className.includesOneOf(word) && predictions[j].probability > probability) {
  337. qSelectorAll(TASK_IMAGE)[i].click();
  338. break;
  339. }
  340. }
  341. img.removeAttribute("src");
  342. selectedImageCount = selectedImageCount + 1;
  343. });
  344. }
  345. } catch (err) {
  346. console.log(err.message);
  347. }
  348. }
  349.  
  350. // TODO: Generalize this logic
  351. // Identifying this based on the observation of the images seen
  352. // The actual way would be to scan the entire image to find the lake.
  353. // Mobilenet model in browser js identifies the lake but does not provide coordinates
  354. // to identify if it is horizontal or vertical
  355. function matchImageForVerticalRiver(imageUrl, word, i) {
  356.  
  357. Jimp.read(imageUrl).then(function (data) {
  358.  
  359. data.getBase64(Jimp.AUTO, async function (err, src) {
  360. var img = document.createElement("img");
  361. img.setAttribute("src", src);
  362. await img.decode();
  363. var imageHeight = img.height;
  364. var imageWidth = img.width;
  365. var cropHeight = imageHeight - 0.03*imageHeight;
  366. let url = src.replace(/^data:image\/\w+;base64,/, "");
  367. let buffer = new Buffer(url, 'base64');
  368.  
  369. Jimp.read(buffer).then(function (data) {
  370. data.crop(0, cropHeight, imageWidth, imageHeight)
  371. .getBase64(Jimp.AUTO, async function (err, src) {
  372.  
  373. var img = document.createElement("img");
  374. img.src = src;
  375. await img.decode();
  376.  
  377. var c = document.createElement("canvas")
  378. c.width = img.width;
  379. c.height = img.height;
  380. var ctx = c.getContext("2d");
  381. ctx.drawImage(img, 0, 0);
  382.  
  383. var imageData = ctx.getImageData(0, 0, c.width, c.height);
  384. var data = imageData.data;
  385. var count = 0;
  386.  
  387. //Multiple combinations and distances are required for accuracy
  388. for (let i = 0; i < data.length; i+= 4) {
  389. if( (data[i] < 140 && data[i+1] < 110 && data[i+2] > 80 && data[i+3] == 255) ||
  390. (data[i] < 200 && data[i+1] < 200 && data[i+2] > 140 && data[i+3] == 255)){
  391. count++;
  392. }
  393. }
  394.  
  395. if(count > 0.001*(data.length/4) && count < data.length/8) {
  396. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  397. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0) {
  398. qSelectorAll(TASK_IMAGE)[i].click();
  399. }
  400. }
  401.  
  402. img.removeAttribute("src");
  403. selectedImageCount = selectedImageCount + 1;
  404.  
  405. });
  406. });
  407. img.removeAttribute("src");
  408. });
  409. });
  410. }
  411.  
  412.  
  413. // This approach is naive approch to store the images and retrieve
  414. // The accuracy is 100% as long as you store the selected images
  415. // Browser memory is used to store the images and gets cleared if you delete the browser cache and cookies
  416. // You may use this to store images in remote place and retrive for quick access
  417. // This approach is only used during urgent scenarios before training the images
  418. // Image differnce can also be done with the stored images to identify new image based on the existing if they are nearly equal
  419. function matchImagesUsingTrainer(imageUrl, word, i) {
  420.  
  421. Jimp.read(imageUrl).then(function (data) {
  422.  
  423. data.getBase64(Jimp.AUTO, async function (err, src) {
  424. var trainerInterval = setInterval(function(){
  425.  
  426. if (!qSelectorAll(IMAGE)[i] || !(qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) ){
  427. clearInterval(trainerInterval);
  428. return;
  429. }
  430.  
  431. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  432. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 && GM_getValue(src) && GM_getValue(src) == word) {
  433. console.log("Retrieved image from trainer");
  434. selectedImageCount = selectedImageCount + 1;
  435. qSelectorAll(TASK_IMAGE)[i].click();
  436. clearInterval(trainerInterval);
  437. return;
  438. }
  439.  
  440. // Overriding Previously Stored values
  441. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  442. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 1 && GM_getValue(src) && GM_getValue(src) != word) {
  443. console.log("Overriding image in the trainer");
  444. selectedImageCount = selectedImageCount + 1;
  445. GM_setValue(src,word);
  446. console.log("Image Stored into database");
  447. clearInterval(trainerInterval);
  448. return;
  449. }
  450.  
  451. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  452. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 1 && !GM_getValue(src)) {
  453. selectedImageCount = selectedImageCount + 1;
  454. GM_setValue(src,word);
  455. console.log("Image Stored into database");
  456. clearInterval(trainerInterval);
  457. return;
  458.  
  459. }
  460.  
  461. },5000);
  462.  
  463. });
  464. });
  465. }
  466.  
  467. function getAvgColor(imgData) {
  468. let blockSize = 5, i = -4, length, rgb = {r:0,g:0,b:0}, count = 0;
  469.  
  470. if(!imgData) {
  471. return rgb;
  472. }
  473.  
  474. length = imgData.data.length;
  475.  
  476. while ( (i += blockSize * 4) < length ) {
  477. ++count;
  478. rgb.r += imgData.data[i];
  479. rgb.g += imgData.data[i+1];
  480. rgb.b += imgData.data[i+2];
  481. }
  482.  
  483. rgb.r = ~~(rgb.r/count);
  484. rgb.g = ~~(rgb.g/count);
  485. rgb.b = ~~(rgb.b/count);
  486.  
  487. return rgb;
  488. }
  489.  
  490. function isInColorRange(rgbColor, baseColor, tolerance) {
  491. var eDelta = function (rgbColor, baseColor) {
  492. var a = toLAB(toXYZ(rgbColor));
  493. var b = toLAB(toXYZ(baseColor));
  494. return Math.sqrt(Math.pow(a.l - b.l, 2) + Math.pow(a.a - b.a, 2) + Math.pow(a.b - b.b, 2));
  495. };
  496.  
  497. var toXYZ = function (c) {
  498. var xR = c.r / 255.0;
  499. var xG = c.g / 255.0;
  500. var xB = c.b / 255.0;
  501.  
  502. xR = xR > 0.04045 ? Math.pow((xR + 0.055) / 1.055, 2.4) : (xR / 12.92);
  503. xG = xG > 0.04045 ? Math.pow((xG + 0.055) / 1.055, 2.4) : (xG / 12.92);
  504. xB = xB > 0.04045 ? Math.pow((xB + 0.055) / 1.055, 2.4) : (xB / 12.92);
  505.  
  506. xR = xR * 100;
  507. xG = xG * 100;
  508. xB = xB * 100;
  509.  
  510. return {
  511. x: xR * 0.4124 + xG * 0.3576 + xB * 0.1805,
  512. y: xR * 0.2126 + xG * 0.7152 + xB * 0.0722,
  513. z: xR * 0.0193 + xG * 0.1192 + xB * 0.9505
  514. };
  515. };
  516.  
  517. var toLAB = function (c) {
  518. var xX = c.x / 95.047;
  519. var xY = c.y / 100.000;
  520. var xZ = c.z / 108.883;
  521.  
  522. xX = xX > 0.008856 ? Math.pow(xX, 1.0 / 3) : (7.787 * xX) + (16.0 / 116);
  523. xY = xY > 0.008856 ? Math.pow(xY, 1.0 / 3) : (7.787 * xY) + (16.0 / 116);
  524. xZ = xZ > 0.008856 ? Math.pow(xZ, 1.0 / 3) : (7.787 * xZ) + (16.0 / 116);
  525.  
  526. return {
  527. l: (116 * xY) - 16,
  528. a: 500 * (xX - xY),
  529. b: 200 * (xY - xZ)
  530. };
  531. };
  532.  
  533. return eDelta(rgbColor, baseColor) < tolerance;
  534. };
  535.  
  536. function lowerBlue(imgData) {
  537. let blockSize = 5, i = -4, length, rgb = {r:0,g:0,b:0}, count = 0;
  538.  
  539. length = imgData.data.length;
  540.  
  541. while ( (i += blockSize * 4) < length ) {
  542. ++count;
  543. imgData.data[i+2] = imgData.data[i+2] > 173 ? Math.floor(imgData.data[i+2] * 0.65) : imgData.data[i+2];
  544. }
  545.  
  546. return imgData;
  547. }
  548.  
  549. function grayscale(ctx, imgData, width, height) {
  550. for (var x = 0; x < width; x++) {
  551. for (var y = 0; y < height; y++) {
  552. var i = x * 4 + y * 4 * width;
  553. var brightness = 0.34 * imgData.data[i] + 0.5 * imgData.data[i + 1] + 0.16 * imgData.data[i + 2];
  554. imgData.data[i] = brightness;
  555. imgData.data[i + 1] = brightness;
  556. imgData.data[i + 2] = brightness;
  557. imgData.data[i + 3] = 255;
  558. }
  559. }
  560. // ctx.putImageData(imgData, 0, 0);
  561. return imgData;
  562. }
  563.  
  564. async function matchWorkaroundPrefiltered(imageList, word) {
  565.  
  566. for (let i = 0; i < 9; i++) {
  567. try {
  568. let img = new Image();
  569. img = new Image();
  570. img.crossOrigin = "Anonymous";
  571. img.setAttribute("src", imageList[i]);
  572.  
  573. img.onload = () => {
  574. let canvas = new OffscreenCanvas(0, 0);
  575. let ctx = canvas.getContext('2d');
  576.  
  577. canvas.width = img.width;
  578. canvas.height = img.height;
  579. ctx.drawImage(img, 0, 0);
  580.  
  581. const imageData = ctx.getImageData(0, 0, img.width, img.height);
  582.  
  583. let rgbColor = getAvgColor(imageData);
  584.  
  585. let hasClouds = isInColorRange(rgbColor, {r:95,g:132,b:173}, 10);
  586.  
  587. if(hasClouds) {
  588. let grayImg = lowerBlue(ctx, imageData, img.width, img.height);
  589. try {
  590. initializeTensorFlowMobilenetModel().then(model => model.classify(grayImg))
  591. .then(function(predictions) {
  592. var predictionslen = predictions.length;
  593. for (var j = 0; j < predictionslen; j++) {
  594. var probability = 0.077;
  595. if (probabilityForObject.get(predictions[j].className)) {
  596. probability = probabilityForObject.get(predictions[j].className);
  597. }
  598.  
  599. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageList[i]) &&
  600. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 &&
  601. predictions[j].className.includesOneOf(word) && predictions[j].probability > probability) {
  602. qSelectorAll(TASK_IMAGE)[i].click();
  603. break;
  604. }
  605. }
  606. img.removeAttribute("src");
  607. selectedImageCount = selectedImageCount + 1;
  608. });
  609. } catch (err) {
  610. console.log(err);
  611. }
  612. } else {
  613. selectedImageCount = selectedImageCount + 1;
  614. }
  615.  
  616. img.removeAttribute("src", imageList[i]);
  617. }
  618.  
  619. } catch (err) {
  620. console.log(err.message);
  621. }
  622. }
  623. }
  624.  
  625.  
  626. //Function to sleep or delay
  627. async function delay(ms) {
  628. return new Promise(resolve => setTimeout(resolve, ms))
  629. }
  630.  
  631. //Different Models can be set later based on usecase
  632. //Ref Models: https://github.com/tensorflow/tfjs-models
  633. async function initializeTensorFlowModel() {
  634. if (!tensorFlowModel) {
  635. tensorFlowModel = await cocoSsd.load();
  636. }
  637. return tensorFlowModel;
  638. }
  639.  
  640. //MobileNet ssd model
  641. async function initializeTensorFlowMobilenetModel() {
  642. if (!tensorFlowMobileNetModel) {
  643. tensorFlowMobileNetModel = await mobilenet.load();
  644. }
  645. return tensorFlowMobileNetModel;
  646. }
  647.  
  648.  
  649. //Initialize TesseractWorker
  650. function initializeTesseractWorker() {
  651. if (!worker) {
  652. worker = new Tesseract.TesseractWorker();
  653. }
  654. }
  655.  
  656. function clickImages(response, imageUrl, word, i) {
  657.  
  658. try {
  659. if (response && response.responseText && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  660. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0) {
  661. var responseJson = JSON.parse(response.responseText);
  662. if (responseJson.identify && responseJson.identify.title && responseJson.identify.title.includesOneOf(word)) {
  663. qSelectorAll(TASK_IMAGE)[i].click();
  664. } else if (responseJson.identify && responseJson.identify.entity && responseJson.identify.entity.includesOneOf(word)) {
  665. qSelectorAll(TASK_IMAGE)[i].click();
  666. } else if (responseJson.identify && responseJson.identify.alternatives) {
  667. var alternatives = JSON.stringify(responseJson.identify.alternatives);
  668. var alternativesJson = JSON.parse(alternatives);
  669.  
  670. for (var key in alternativesJson) {
  671. if (alternativesJson.hasOwnProperty(key)) {
  672. if ((alternativesJson[key].includesOneOf(word) || key.includesOneOf(word))) {
  673. qSelectorAll(TASK_IMAGE)[i].click();
  674. break;
  675. }
  676. }
  677. }
  678. } else {
  679. //No Match found
  680. }
  681.  
  682. selectedImageCount = selectedImageCount + 1;
  683.  
  684. } else {
  685. //console.log("Using Fallback TensorFlow");
  686. matchImagesUsingTensorFlow(imageUrl, word, i);
  687. }
  688.  
  689. } catch (err) {
  690. //Using Fallback TensorFlow
  691. //console.log(err.message);
  692. //console.log("Using Fallback TensorFlow");
  693. matchImagesUsingTensorFlow(imageUrl, word, i);
  694. }
  695. }
  696.  
  697. function qSelectorAll(selector) {
  698. return document.querySelectorAll(selector);
  699. }
  700.  
  701. function qSelector(selector) {
  702. return document.querySelector(selector);
  703. }
  704.  
  705.  
  706. async function getSynonyms(word) {
  707.  
  708. USE_MOBILE_NET = false;
  709. FILTER_CLOUDS = false;
  710. USE_COLOUR_PATTERN = false;
  711. NEW_WORD_IDENTIFIED = false;
  712.  
  713. if(word.equalsOneOf(KNOWN_WORDS)) {
  714. USE_MOBILE_NET = true;
  715. switch (word) {
  716. case MOTORBUS:
  717. case BUS:
  718. word = ['bus', 'motorbus'];
  719. break;
  720. case CAR:
  721. word = ['=car', 'coupe', 'jeep', 'limo', 'sport utility vehicle', 'station wagon', 'hatchback', 'bumper car', 'modelT', 'electric battery', 'cruiser', 'limousine', 'convertible', 'taxi', 'sport car', 'screen, CRT screen', 'recreational vehicle', 'television system'];
  722. break;
  723. case AIRPLANE:
  724. word = ['airplane', 'plane', 'aircraft', 'aeroplane', 'hangar', 'Airdock', 'JumboJet', 'jetliner', 'stealth fighter', 'field artillery', 'airliner', 'warplane', 'military plane']
  725. break;
  726. case TRAIN:
  727. word = ['train', 'rail', 'cable car', 'locomotive', 'subway station', 'passenger car']
  728. break;
  729. case BOAT:
  730. case SURFBOARD:
  731. word = ['=boat', '=barge', 'houseboat', 'boathouse', 'speedboat', '=submarine', 'bobsled', 'catamaran', 'schooner', 'ocean liner', 'lifeboat', 'fireboat', 'yawl', 'pontoon', 'small boat', 'SnowBlower', 'Sea-coast', 'paddlewheel', 'paddle wheel', 'PaddleSteamer', 'Freighter', 'Sternwheeler', 'kayak', 'canoe', 'deck', 'DockingFacility', 'surfboard', '=ship', '=cruise', 'watercraft', 'sail', 'canvas', '=raft']
  732. break;
  733. case BICYCLE:
  734. word = ['bicycle-built-for-two', 'tandem bicycle', 'bicycle', 'tricycle', 'mountain bike', 'AcceleratorPedal', 'macaw', 'knot']
  735. break;
  736. case MOTORCYCLE:
  737. word = ['moped', 'motor scooter', 'scooter', 'motorcycle', 'windshield', 'dashboard']
  738. break;
  739. case TRUCK:
  740. word = ['truck', 'cargocontainer', 'bazooka']
  741. break;
  742. case TRIMARAN:
  743. case SPEEDBOAT:
  744. case SEAPLANE:
  745. word = ['spatula', 'can opener', 'tin opener', 'monitor', 'screen', 'stretcher', 'printer', 'nail', 'mousetrap', 'TRIMARAN', 'space shuttle', 'ski', 'rotisserie', 'geyser', 'plate rack']
  746. break;
  747. case COUCH:
  748. word = ['couch', 'studio couch', 'dining table', 'home theater']
  749. break;
  750. case BRIDGE:
  751. word = ['steel arch bridge', 'pier', 'suspension bridge', 'viaduct']
  752. break;
  753. case BEDROOM:
  754. word = ['day bed', 'four-poster', 'quilt']
  755. break;
  756. case LIVING_ROOM:
  757. word = ['fire screen', 'window shade', 'entertainment center']
  758. break;
  759. case CONFERENCE_ROOM:
  760. word = ['restaurant', 'cinema', 'library', 'dining table', 'bannister', 'loudspeaker', 'maze', 'theater curtain']
  761. break;
  762. case HORSE_CLOUDS:
  763. word = ['sorrel', 'Great Dane', 'Mexican hairless']
  764. FILTER_CLOUDS = true
  765. break;
  766. case HORSE:
  767. word = ['sorrel', 'Great Dane', 'Mexican hairless']
  768. break;
  769. case LION:
  770. word = ['Panthera leo']
  771. break;
  772. case DOMESTIC_CAT:
  773. case CAT:
  774. word = ['Egyptian cat', 'Siamese', 'tabby cat', 'Japanese spaniel', 'Persian cat']
  775. break;
  776. case DOG:
  777. case CANINE:
  778. word = ['pit bull', 'Border collie', 'Cardigan Welsh corgi', 'golden retriever', 'Japanese spaniel', 'Chihuahua', 'Dandie Dinmont', 'English foxhound', 'EntleBucher', 'Eskimo dog', 'German shepherd',
  779. 'German short-haired pointer', 'Labrador retriever', 'Maltese dog', 'Pomeranian', 'Rhodesian ridgeback', 'Saint Bernard', 'bullterrier', 'Walker hound', 'beagle', 'boxer', 'black-and-tan coonhound',
  780. 'Canis dingo', 'toy terrier']
  781. break;
  782. case ELEPHANT_CLOUDS:
  783. word = ['African elephant', 'Indian elephant', 'tusker']
  784. FILTER_CLOUDS = true
  785. break;
  786. case ELEPHANT:
  787. word = ['African elephant', 'Indian elephant', 'tusker']
  788. break;
  789. case PARROT:
  790. case BIRD:
  791. word = ['macaw', 'African grey', 'lorikeet', 'cockatoo', 'coucal', 'partridge', 'hummingbird', 'bee eater', 'toucan']
  792. break;
  793. case GIRAFFE: //TODO: select by discard
  794. word = ['gazelle', 'deerhound']
  795. break;
  796. default:
  797. break;
  798. }
  799. return word;
  800. } else if (word.includesOneOf(LIVING_ROOM_TYPES)) {
  801. word = ['bed', 'couch', 'chair', 'potted plant', 'dining table', 'clock', 'tv', 'book']
  802. return word;
  803. } else if (word == VALLEY || word == VERTICAL_RIVER){
  804. word = ['alp','volcano']
  805. USE_COLOUR_PATTERN = true;
  806. return word;
  807. }
  808.  
  809. NEW_WORD_IDENTIFIED = true;
  810. console.log("Word does not match. New type identified::" + word);
  811. return word;
  812.  
  813. // console.log('@leaving getSynonyms', word);
  814. }
  815.  
  816. function isHidden(el) {
  817. return (el.offsetParent === null)
  818. }
  819.  
  820. if (window.location.href.includes("checkbox")) {
  821. var checkboxInterval = setInterval(function() {
  822. if (!qSelector(CHECK_BOX)) {
  823. //Wait until the checkbox element is visible
  824. } else if (qSelector(CHECK_BOX).getAttribute(ARIA_CHECKED) == "true") {
  825. clearInterval(checkboxInterval);
  826. } else if (!isHidden(qSelector(CHECK_BOX)) && qSelector(CHECK_BOX).getAttribute(ARIA_CHECKED) == "false") {
  827. qSelector(CHECK_BOX).click();
  828. } else {
  829. return;
  830. }
  831.  
  832. }, 5000);
  833. } else {
  834.  
  835. // try { await initializeTesseractWorker(); } catch (err) { console.log(err); console.log("Tesseract could not be initialized"); }
  836.  
  837. // try { await initializeTensorFlowModel(); } catch (err) { console.log(err); console.log("TF could not be initialized"); }
  838.  
  839. try { await initializeTensorFlowMobilenetModel(); } catch (err) { console.log(err); console.log("MobileNet could not be initialized"); }
  840.  
  841. try {
  842. selectImages();
  843. } catch (err) {
  844. console.log(err);
  845. console.log("selectImages error");
  846. }
  847. }
  848.  
  849. function selectImagesAfterDelay(delay) {
  850. setTimeout(function() {
  851. selectImages();
  852. }, delay * 1000);
  853. }
  854.  
  855. function triggerEvent(el, type) {
  856. var e = document.createEvent('HTMLEvents');
  857. e.initEvent(type, false, true);
  858. el.dispatchEvent(e);
  859. }
  860.  
  861. function triggerMouseEvent(el, type) {
  862. var e = document.createEvent('MouseEvent');
  863. e.initEvent(type, false, true);
  864. el.dispatchEvent(e);
  865. }
  866.  
  867. // Small hack to select the nodes
  868. function unsure(targetNodeText) {
  869. var targetNode = Array.from(qSelectorAll('div'))
  870. .find(el => el.textContent === targetNodeText);
  871. //Works for now
  872. //TODO: Select clothing
  873. //TODO: Draw boxes around images
  874. if (targetNode) {
  875. triggerMouseEvent(targetNode, 'mousedown');
  876. triggerMouseEvent(targetNode, 'mouseup');
  877. if (qSelector(SUBMIT_BUTTON)) {
  878. qSelector(SUBMIT_BUTTON).click();
  879. }
  880. }
  881. return selectImagesAfterDelay(1);
  882. }
  883.  
  884. function getUrlFromString(urlString) {
  885.  
  886. var imageUrl = urlString.substring(
  887. urlString.indexOf('"') + 1,
  888. urlString.lastIndexOf('"')
  889. );
  890.  
  891. if (!imageUrl || !imageUrl.includes("https")) {
  892. return 0;
  893. }
  894.  
  895. return imageUrl;
  896. }
  897.  
  898.  
  899. function getImageList() {
  900. var imageList = [];
  901. if (qSelectorAll(IMAGE).length > 0) {
  902. for (var i = 0; i < 9; i++) {
  903. var urlString = qSelectorAll(IMAGE)[i].style.background;
  904. var imageUrl = getUrlFromString(urlString);
  905. if (imageUrl == 0) {
  906. //console.log("Image url is empty");
  907. return imageList;
  908. }
  909. imageList[i] = imageUrl;
  910. }
  911. }
  912. return imageList;
  913. }
  914.  
  915. function waitUntilImageSelection() {
  916. var imageIntervalCount = 0;
  917. var imageInterval = setInterval(function() {
  918. imageIntervalCount = imageIntervalCount + 1;
  919. if (selectedImageCount == 9) {
  920. clearInterval(imageInterval);
  921. //TODO: check if none was selected to trigger a refresh of the images
  922.  
  923. if (qSelector(SUBMIT_BUTTON)) {
  924. qSelector(SUBMIT_BUTTON).click();
  925. }
  926. return selectImagesAfterDelay(5);
  927. } else if (imageIntervalCount > 8) {
  928. clearInterval(imageInterval);
  929. return selectImages();
  930. } else if(selectedImageCount > 2 && MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED && imageIntervalCount > 4){
  931. clearInterval(imageInterval);
  932. if (qSelector(SUBMIT_BUTTON)) {
  933. qSelector(SUBMIT_BUTTON).click();
  934. }
  935. return selectImagesAfterDelay(5);
  936. } else if(MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED && imageIntervalCount > 6){
  937. clearInterval(imageInterval);
  938. if (qSelector(SUBMIT_BUTTON)) {
  939. qSelector(SUBMIT_BUTTON).click();
  940. }
  941. return selectImagesAfterDelay(5);
  942. }else{
  943.  
  944. }
  945. }, 3000);
  946. }
  947.  
  948. function waitForImagesToAppear() {
  949. var checkImagesSelectedCount = 0;
  950. var waitForImagesInterval = setInterval(function() {
  951. checkImagesSelectedCount = checkImagesSelectedCount + 1;
  952. if (qSelectorAll(IMAGE) && qSelectorAll(IMAGE).length == 9) {
  953. clearInterval(waitForImagesInterval);
  954. return selectImages();
  955. } else if (checkImagesSelectedCount > 60) {
  956. clearInterval(waitForImagesInterval);
  957. } else if (qSelector(CHALLENGE_INPUT_FIELD) && qSelector(NO_SELECTION).getAttribute(ARIA_HIDDEN) != true) {
  958. clearInterval(waitForImagesInterval);
  959. return imageUsingOCR();
  960. } else {
  961. //TODO: Identify Objects for the following (Ex: bed,chair,table etc)
  962. //Ref for clothing: https://www.youtube.com/watch?v=yWwzFnAnrLM, https://www.youtube.com/watch?v=FiNglI1wRNk,https://www.youtube.com/watch?v=oHAkK_9UCQ8
  963. var targetNodeList = ["Yes", "3 or more items of furniture", "Equipped space or room", "Photo is clean, no watermarks, logos or text overlays", "An interior photo of room", "Unsure", "Photo is sharp"];
  964. for (var j = 0; j < targetNodeList.length; j++) {
  965. var targetNode = Array.from(qSelectorAll('div'))
  966. .find(el => el.textContent === targetNodeList[j]);
  967. if (targetNode) {
  968. //console.log("Target Node Found");
  969. clearInterval(waitForImagesInterval);
  970. return unsure(targetNodeList[j]);
  971. }
  972. }
  973. }
  974. }, 5000);
  975. }
  976.  
  977. //TODO: Convert Image to base64 to avoid multiple calls
  978. function preProcessImage(base64Image, imageUrl) {
  979.  
  980. //Darken and Brighten
  981. Jimp.read(base64Image).then(function(data) {
  982. data.color([
  983.  
  984. {
  985. apply: 'darken',
  986. params: [20]
  987. }
  988.  
  989. ]).color([
  990.  
  991. {
  992. apply: 'brighten',
  993. params: [20]
  994. }
  995.  
  996. ])
  997. .greyscale()
  998. .getBase64(Jimp.AUTO, function(err, src) {
  999. var img = document.createElement("img");
  1000. img.setAttribute("src", src);
  1001.  
  1002. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  1003. //Remove Image After recognizing
  1004. img.removeAttribute("src");
  1005. //If null change to other methods
  1006. if (data && data.text && data.text.length > 0) {
  1007. inputChallenge(postProcessImage(data), imageUrl);
  1008. return selectImages();
  1009. } else {
  1010. preProcessImageMethod2(base64Image, imageUrl);
  1011. }
  1012. });
  1013.  
  1014. });
  1015. });
  1016.  
  1017. }
  1018.  
  1019.  
  1020. function preProcessImageMethod2(base64Image, trimageUrl) {
  1021.  
  1022. //Multi Contrast darken and brighten
  1023. Jimp.read(base64Image).then(function(data) {
  1024. data.color([
  1025.  
  1026. {
  1027. apply: 'darken',
  1028. params: [20]
  1029. }
  1030.  
  1031. ]).contrast(1).color([
  1032.  
  1033. {
  1034. apply: 'brighten',
  1035. params: [20]
  1036. }
  1037.  
  1038. ]).contrast(1).greyscale().getBase64(Jimp.AUTO, function(err, src) {
  1039. var img = document.createElement("img");
  1040. img.setAttribute("src", src);
  1041.  
  1042. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  1043. //Remove Image After recognizing
  1044. img.removeAttribute("src");
  1045. if (data && data.text && data.text.length > 0) {
  1046. inputChallenge(postProcessImage(data), imageUrl);
  1047. return selectImages();
  1048. } else {
  1049. preProcessImageMethod3(base64Image, imageUrl);
  1050. }
  1051. });
  1052. });
  1053. });
  1054.  
  1055. }
  1056.  
  1057. function preProcessImageMethod3(base64Image, imageUrl) {
  1058. //Multi Contrast only brighten
  1059. Jimp.read(base64Image).then(function(data) {
  1060. data.contrast(1).color([{
  1061. apply: 'brighten',
  1062. params: [20]
  1063. }
  1064.  
  1065. ])
  1066. .contrast(1)
  1067. .greyscale()
  1068. .getBase64(Jimp.AUTO, function(err, src) {
  1069. var img = document.createElement("img");
  1070. img.setAttribute("src", src);
  1071.  
  1072. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  1073. //Remove Image After recognizing
  1074. img.removeAttribute("src");
  1075. if (data && data.text && data.text.length > 0) {
  1076. inputChallenge(postProcessImage(data), imageUrl);
  1077. return selectImages();
  1078. } else {
  1079. preProcessImageMethod4(base64Image, imageUrl);
  1080. }
  1081. });
  1082. });
  1083. });
  1084. }
  1085.  
  1086. function preProcessImageMethod4(base64Image, imageUrl) {
  1087. //Resize the image
  1088. Jimp.read(base64Image).then(function(data) {
  1089. data.resize(256, Jimp.AUTO)
  1090. .quality(60) // set JPEG quality
  1091. .greyscale() // set greyscale
  1092. .getBase64(Jimp.AUTO, function(err, src) {
  1093. var img = document.createElement("img");
  1094. img.setAttribute("src", src);
  1095.  
  1096. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  1097. //Remove Image After recognizing
  1098. img.removeAttribute("src");
  1099. inputChallenge(postProcessImage(data), imageUrl);
  1100. return selectImages();
  1101. });
  1102. });
  1103. });
  1104.  
  1105. }
  1106.  
  1107. function postProcessImage(data) {
  1108. var filterValues = ['\n', '{', '}', '[', ']'];
  1109. for (var i = 0; i < filterValues.length; i++) {
  1110. data.text = data.text.replaceAll(filterValues[i], "");
  1111. }
  1112. return data;
  1113. }
  1114.  
  1115. // Using Tesseract to recognize images
  1116. function imageUsingOCR() {
  1117. try {
  1118. //console.log("Image using OCR");
  1119. var urlString = qSelector(IMAGE_FOR_OCR).style.background;
  1120. var imageUrl = getUrlFromString(urlString);
  1121. if (imageUrl == 0) {
  1122. return selectImagesAfterDelay(1);
  1123. }
  1124.  
  1125. Jimp.read(imageUrl).then(function(data) {
  1126.  
  1127. data.getBase64(Jimp.AUTO, function(err, src) {
  1128.  
  1129. var img = document.createElement("img");
  1130. img.setAttribute("src", src);
  1131. var base64Image = img.src;
  1132.  
  1133. preProcessImage(base64Image, imageUrl);
  1134.  
  1135. })});
  1136.  
  1137. } catch (err) {
  1138. console.log(err.message);
  1139. return selectImagesAfterDelay(1);
  1140. }
  1141. }
  1142.  
  1143.  
  1144. async function convertTextToImage(text) {
  1145.  
  1146. //Convert Text to image
  1147. var canvas = document.createElement("canvas");
  1148. var textLength = text.length;
  1149. canvas.width = 60 * textLength;
  1150. canvas.height = 80;
  1151. var ctx = canvas.getContext('2d');
  1152. ctx.font = "30px Arial";
  1153. ctx.fillText(text, 10, 50);
  1154. var img = document.createElement("img");
  1155. img.src = canvas.toDataURL();
  1156.  
  1157. return img;
  1158. }
  1159.  
  1160. async function convertImageToText(img) {
  1161.  
  1162. await initializeTesseractWorker();
  1163.  
  1164. //Convert Image to Text
  1165. var text = "";
  1166. await worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  1167. text = data.text;
  1168. // console.log("Recognized Text::" + text);
  1169. });
  1170. return text.trim();
  1171. }
  1172.  
  1173. function areExampleImageUrlsChanged() {
  1174.  
  1175. var prevExampleUrls = exampleImageList;
  1176. currentExampleUrls = [];
  1177.  
  1178. if (qSelectorAll(CHALLENGE_IMAGE).length > 0) {
  1179. for (let i = 0; i < qSelectorAll(CHALLENGE_IMAGE).length; i++) {
  1180. var urlString = qSelectorAll(CHALLENGE_IMAGE)[i].style.background;
  1181. var imageUrl = getUrlFromString(urlString);
  1182. if (imageUrl == 0) {
  1183. console.log("Image url is empty, Retrying...");
  1184. return true;
  1185. }
  1186. currentExampleUrls[i] = imageUrl;
  1187. }
  1188. }
  1189.  
  1190. if (prevExampleUrls.length != currentExampleUrls.length) {
  1191. return true;
  1192. }
  1193.  
  1194. for (let i = 0; i < currentExampleUrls.length; i++) {
  1195.  
  1196. if (prevExampleUrls[i] != currentExampleUrls[i]) {
  1197. return true;
  1198. }
  1199. }
  1200.  
  1201. return false;
  1202. }
  1203.  
  1204. async function identifyObjectsFromImages(imageUrlList) {
  1205. identifiedObjectsList = [];
  1206.  
  1207. for (let i = 0; i < imageUrlList.length; i++) {
  1208. try {
  1209. let img = new Image();
  1210. img.crossOrigin = "Anonymous";
  1211. img.src = imageUrlList[i];
  1212. img.onload = () => {
  1213. initializeTensorFlowModel().then(model => model.detect(img))
  1214. .then(function(predictions) {
  1215. let predictionslen = predictions.length;
  1216. let hashSet = new Set();
  1217. for (let j = 0; j < predictionslen; j++) {
  1218. hashSet.add(predictions[j].class);
  1219. }
  1220.  
  1221. hashSet.forEach((key) => {
  1222. identifiedObjectsList.push(key);
  1223. });
  1224.  
  1225. img.removeAttribute("src");
  1226.  
  1227. if (i == imageUrlList.length - 1) {
  1228. identifyObjectsFromImagesCompleted = true;
  1229. }
  1230.  
  1231. })
  1232. }
  1233. } catch (e) {
  1234. console.log(e);
  1235. }
  1236.  
  1237. }
  1238.  
  1239. }
  1240.  
  1241. async function identifyObjectsFromImagesUsingMobileNet(imageUrlList) {
  1242. identifiedObjectsList = [];
  1243.  
  1244. for (let i = 0; i < imageUrlList.length; i++) {
  1245. try {
  1246. let img = new Image();
  1247. img.crossOrigin = "Anonymous";
  1248. img.src = imageUrlList[i];
  1249. img.onload = () => {
  1250. initializeTensorFlowMobilenetModel().then(model => model.classify(img))
  1251. .then(function(predictions) {
  1252.  
  1253. let predictionslen = predictions.length;
  1254. let hashSet = new Set();
  1255. for (let j = 0; j < predictionslen; j++) {
  1256. if(predictions[j].className.includes(",")){
  1257. var multiPredictions = predictions[j].className.split(',');
  1258. for(let k=0; k< multiPredictions.length;k++){
  1259. hashSet.add(multiPredictions[k].trim());
  1260. }
  1261. }else{
  1262. hashSet.add(predictions[j].className);
  1263. }
  1264. }
  1265.  
  1266. hashSet.forEach((key) => {
  1267. identifiedObjectsList.push(key);
  1268. });
  1269.  
  1270. img.removeAttribute("src");
  1271.  
  1272. if (i == imageUrlList.length - 1) {
  1273. identifyObjectsFromImagesCompleted = true;
  1274. }
  1275.  
  1276. })
  1277. }
  1278. } catch (e) {
  1279. console.log(e);
  1280. }
  1281.  
  1282. }
  1283.  
  1284. }
  1285.  
  1286. async function getWordFromIdentifiedObjects(identifiedObjectsList) {
  1287.  
  1288. var hashMap = new Map();
  1289. for (var i = 0; i < identifiedObjectsList.length; i++) {
  1290. if (hashMap.has(identifiedObjectsList[i])) {
  1291. hashMap.set(identifiedObjectsList[i], hashMap.get(identifiedObjectsList[i]) + 1)
  1292. } else {
  1293. hashMap.set(identifiedObjectsList[i], 1)
  1294. }
  1295. }
  1296. var maxCount = 0,
  1297. objectKey = -1;
  1298. await hashMap.forEach((value, key) => {
  1299. if (maxCount < value && (key.equalsOneOf(TRANSPORT_TYPES) ||
  1300. key.equalsOneOf(LIVING_ROOM_TYPES) ||
  1301. key.equalsOneOf(ANIMAL_TYPES)|| key == VALLEY)) {
  1302. objectKey = key;
  1303. maxCount = value;
  1304. }
  1305.  
  1306. });
  1307.  
  1308. return objectKey;
  1309. }
  1310.  
  1311.  
  1312. function inputChallenge(data, imageUrl) {
  1313. try {
  1314. if ((qSelector(IMAGE_FOR_OCR).style.background).includes(imageUrl)) {
  1315. console.log(data.text);
  1316. var targetNode = qSelector(CHALLENGE_INPUT_FIELD);
  1317. targetNode.value = data.text.replaceAll("\n", "");
  1318. var challengeInput = qSelector(CHALLENGE_INPUT);
  1319. triggerEvent(challengeInput, 'input');
  1320. // Set a timeout if you want to see the text
  1321. qSelector(SUBMIT_BUTTON).click();
  1322. }
  1323.  
  1324. } catch (err) {
  1325. console.log(err.message);
  1326. }
  1327. }
  1328.  
  1329. async function identifyWordFromExamples() {
  1330.  
  1331. var word = -1;
  1332. if (areExampleImageUrlsChanged()) {
  1333. exampleImageList = currentExampleUrls;
  1334. if (exampleImageList.length == 0) {
  1335. return -1;
  1336. }
  1337. identifyObjectsFromImages(exampleImageList);
  1338. while (!identifyObjectsFromImagesCompleted) {
  1339. await delay(2000)
  1340. }
  1341. identifyObjectsFromImagesCompleted = false;
  1342. word = await getWordFromIdentifiedObjects(identifiedObjectsList);
  1343.  
  1344. //Word has not been identified yet, use mobile net to recognize images
  1345. if (word == -1) {
  1346. //Initialiaze MobileNet Model
  1347. await initializeTensorFlowMobilenetModel();
  1348. identifyObjectsFromImagesUsingMobileNet(exampleImageList);
  1349. while (!identifyObjectsFromImagesCompleted) {
  1350. await delay(2000)
  1351. }
  1352. identifyObjectsFromImagesCompleted = false;
  1353.  
  1354. word = getWordFromIdentifiedObjects(identifiedObjectsList);
  1355. }
  1356. return word;
  1357.  
  1358. } else {
  1359. return getWordFromIdentifiedObjects(identifiedObjectsList);
  1360. }
  1361.  
  1362. return word;
  1363. }
  1364.  
  1365. var prevObject = "";
  1366.  
  1367. function isObjectChanged() {
  1368. if (!prevObject && qSelector(PROMPT_TEXT)) {
  1369. prevObject = qSelector(PROMPT_TEXT).innerText;
  1370. return true;
  1371. }
  1372.  
  1373. if (prevObject && qSelector(PROMPT_TEXT) &&
  1374. prevObject == qSelector(PROMPT_TEXT).innerText) {
  1375. return false;
  1376. }
  1377.  
  1378. return true;
  1379.  
  1380. }
  1381.  
  1382.  
  1383. async function identifyWord() {
  1384. var word = -1;
  1385. try {
  1386. if (window.location.href.includes('&hl=en') || (ENABLE_DEFAULT_LANGUAGE && DEFAULT_LANGUAGE == LANG_ENGLISH)) {
  1387. console.log('pre-transliterate', word)
  1388. word = qSelector(PROMPT_TEXT) ? qSelector(PROMPT_TEXT).innerText.transliterate() : word;
  1389. console.log('post-transliterate', word)
  1390. // console.log('@identifyWord', word)
  1391. if (word && (word.includes(SENTENCE_TEXT_A) || word.includes(SENTENCE_TEXT_AN) || word.includes(SENTENCE_TEXT_B))) {
  1392. word = word.replace(SENTENCE_TEXT_A, '');
  1393. word = word.replace(SENTENCE_TEXT_AN, '');
  1394. word = word.replace(SENTENCE_TEXT_B, '');
  1395. word = word.trim();
  1396. }
  1397. // console.log('@posttrim', word)
  1398.  
  1399. if (word.equalsOneOf(TRANSPORT_TYPES) || word == VERTICAL_RIVER || word.equalsOneOf(KNOWN_WORDS)) {
  1400. return word;
  1401. } else if (word.equalsOneOf(SKIPPABLE_WORDS)) {
  1402. return 'SKIPPABLE_WORD';
  1403. } else {
  1404. // Unknown/New word
  1405. word = await identifyWordFromExamples();
  1406. }
  1407. } else {
  1408. //If word is not english
  1409. //Identify Images from Example
  1410. word = await identifyWordFromExamples();
  1411. }
  1412.  
  1413. } catch (e) {
  1414. console.log(e);
  1415. }
  1416.  
  1417. return word;
  1418. }
  1419.  
  1420. var prevWord = "";
  1421.  
  1422. async function selectImages() {
  1423.  
  1424. if (ENABLE_DEFAULT_LANGUAGE) {
  1425. for (let i = 0; i < qSelectorAll(LANGUAGE_SELECTOR).length; i++) {
  1426. if (qSelectorAll(LANGUAGE_SELECTOR)[i].innerText == DEFAULT_LANGUAGE) {
  1427. document.querySelectorAll(LANGUAGE_SELECTOR)[i].click();
  1428. await delay(1000);
  1429. }
  1430. }
  1431. }
  1432.  
  1433. if (qSelectorAll(IMAGE) && qSelectorAll(IMAGE).length == 9 && qSelector(NO_SELECTION).getAttribute(ARIA_HIDDEN) != true) {
  1434. selectedImageCount = 0;
  1435. try {
  1436.  
  1437. if (isObjectChanged()) {
  1438. prevWord = await identifyWord();
  1439. }
  1440.  
  1441. var word = prevWord;
  1442.  
  1443. if (word == 'SKIPPABLE_WORD') {
  1444. if (skipCount >= MAX_SKIPS) {
  1445. console.log("Max Retries Attempted. Captcha cannot be solved");
  1446. return;
  1447. } else {
  1448. console.log("Skipping word");
  1449. if (qSelector(REFRESH_CHALLENGE_BUTTON)) {
  1450. qSelector(REFRESH_CHALLENGE_BUTTON).click();
  1451. }
  1452. return selectImagesAfterDelay(5);
  1453. }
  1454. } else if (word == -1 && skipCount >= MAX_SKIPS) {
  1455. console.log("Max Retries Attempted. Captcha cannot be solved");
  1456. return;
  1457. } else if (word == -1 && skipCount < MAX_SKIPS) {
  1458. skipCount++;
  1459. if (qSelector(SUBMIT_BUTTON)) {
  1460. qSelector(SUBMIT_BUTTON).click();
  1461. }
  1462. return selectImagesAfterDelay(5);
  1463. } else {
  1464. //Get Synonyms for the word
  1465. word = await getSynonyms(word);
  1466. //console.log("words are::" + word);
  1467. }
  1468.  
  1469.  
  1470. } catch (err) {
  1471. console.log(err.message);
  1472. return selectImagesAfterDelay(5);
  1473. }
  1474.  
  1475. var imageList = [];
  1476. try {
  1477. imageList = getImageList();
  1478. if (imageList.length != 9) {
  1479. //console.log("Waiting");
  1480. // Image containers are visible but there are no urls in the image
  1481. // Skip the image
  1482. if (qSelector(SUBMIT_BUTTON)) {
  1483. qSelector(SUBMIT_BUTTON).click();
  1484. }
  1485. return selectImagesAfterDelay(5);
  1486. }
  1487. } catch (err) {
  1488. console.log(err.message);
  1489. return selectImagesAfterDelay(5);
  1490. }
  1491.  
  1492. //Identifying word for seaplane and matching images
  1493. //TODO: Refactor Code to combine different models or use only one model based on accuracy
  1494. if(word && word != -1 && MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED){
  1495. for (let i = 0; i < 9; i++) {
  1496. matchImagesUsingTrainer(imageList[i], word, i);
  1497. }
  1498. }else if(word && word != -1 && USE_COLOUR_PATTERN){
  1499. for (let i = 0; i < 9; i++) {
  1500. matchImageForVerticalRiver(imageList[i], word, i);
  1501. }
  1502. }else if (word && word != -1 && USE_MOBILE_NET) {
  1503. if (FILTER_CLOUDS) {
  1504. matchWorkaroundPrefiltered(imageList, word);
  1505. } else {
  1506. // matchWorkaroundPrefiltered(imageList, word);
  1507. for (let i = 0; i < 9; i++) {
  1508. matchImagesUsingTensorFlowMobileNet(imageList[i], word, i);
  1509. }
  1510. }
  1511. } else if (word && word != -1) {
  1512. for (var i = 0; i < 9; i++) {
  1513. if (ENABLE_TENSORFLOW) {
  1514. matchImagesUsingTensorFlow(imageList[i], word, i);
  1515. } else {
  1516. matchImages(imageList[i], word, i);
  1517. }
  1518. }
  1519. }
  1520. waitUntilImageSelection();
  1521.  
  1522. } else {
  1523. waitForImagesToAppear();
  1524. }
  1525. }
  1526.  
  1527.  
  1528. })();
Add Comment
Please, Sign In to add comment