From 404bc59e22c19b1e786d320df63bb636f5b24d12 Mon Sep 17 00:00:00 2001 From: Alex Crocker Date: Sat, 20 Oct 2018 16:06:42 -0400 Subject: [PATCH] Final Update --- CHANGELOG | 15 +++++++- OGCheckr.py | 23 ------------ README.md | 4 +-- config.ini | 11 +++--- lib/ConfigHelper.py | 71 ++++++++++++++++++++++++++++++++++++ lib/ProxyHelper.py | 16 ++++----- lib/configure.py | 88 --------------------------------------------- lib/cookie.py | 8 +++-- lib/get.py | 31 +++++++--------- lib/headers.py | 6 ++-- lib/log.py | 19 +++++----- lib/parse.py | 28 +++++++-------- lib/payload.py | 7 ++-- lib/post.py | 27 +++++++------- lib/replace.py | 12 +++---- lib/statuses.py | 6 ++-- og.py | 25 +++++++++++++ 17 files changed, 197 insertions(+), 200 deletions(-) delete mode 100755 OGCheckr.py create mode 100644 lib/ConfigHelper.py delete mode 100644 lib/configure.py create mode 100755 og.py diff --git a/CHANGELOG b/CHANGELOG index 641f647..6a33897 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,19 @@ CURRENT RELEASE ================================ +Farewell OGCheckr, Hello Penguin! +v1.9 (October 20, 2018) + +This is the final release of OGCheckr CLI in its current state. My future efforts will be put towards the Penguin API platform and client applications. + +- Re-wrote the configuration helper class to recognize non-standard config values such as yes/no instead of true/false. +- Configuration values are no longer case sensitive +- Shortened filename of main script to og.py + +================================ + LAST RELEASE +================================ + The Bug Expedition v1.8 (April 1, 2018) @@ -12,7 +25,7 @@ v1.8 (April 1, 2018) - Removed and consolidated some of the files in the libs/ directory ================================ - LAST RELEASE + OLDER RELEASE ================================ The Glorious Update diff --git a/OGCheckr.py b/OGCheckr.py deleted file mode 100755 index 2962867..0000000 --- a/OGCheckr.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python3 -# Python Standard Modules -import sys -import os -from lib.configure import getSite as SITE -from lib.configure import enableProxy as PROXY -from lib.configure import proxyFiltering as PFILTER -from lib.ProxyHelper import ProxyHelper - -def main(): - if PROXY() == "True": - if PFILTER() == "True": - ProxyHelper().checkProxies() - - if (SITE() == 5) or (SITE() == 6): # Steam - import lib.parse - elif SITE() == 4: - import lib.post - else: - import lib.get - -if __name__ == "__main__": - main() diff --git a/README.md b/README.md index 90f7d5f..3768ed0 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![OGCheckr CLI username availability checker application](http://d.pr/i/M94CyF+ "OGCheckr CLI") +![OGCheckr CLI username availability checker application](https://i.imgur.com/77zuFqp.png "OGCheckr CLI") For additional information and installation instructions, view the wiki. -https://github.com/ogplus/cli-checker/wiki/ +https://github.com/crock/cli-checker/wiki/ diff --git a/config.ini b/config.ini index 409cf34..a4818ac 100644 --- a/config.ini +++ b/config.ini @@ -24,7 +24,7 @@ ; If your target site is not listed, put "1" for CUSTOM (without the quotes). siteNum = 5 ; Fill in the option below with the profile URL of the service you want to check available names for. -; Use %%word%% as the placeholder for the username to check. +; Use %%word%% or %%name%% as the placeholder for the username to check. ; customSite is only for sites not specifically listed in the chart above, but please be aware ; that not every site will work this way. If there is a service you would like to see support for, please ; don't hesistate to let Croc know. @@ -38,11 +38,10 @@ output = AVAILABLE.txt wordList = EXAMPLE-50.txt [proxy] -; To enable proxy support, put True. To disable, put False -enableProxy = False -; To enable proxy filtering (sorting out the bad proxies that no longer work), put True. -; To disable, put False. It has no effect if enableProxy is False -proxyFiltering = True +; Should the checker use proxies for checking? +enableProxy = no +; Should the checker filter out the bad proxies before going through the word list? +proxyFiltering = yes ; If proxy support is enabled, you must specify the path to the proxy list you want to use here ; Place all proxy lists in the proxy_lists directory ; Place one proxy per line in the this format --> ip:port diff --git a/lib/ConfigHelper.py b/lib/ConfigHelper.py new file mode 100644 index 0000000..510eec9 --- /dev/null +++ b/lib/ConfigHelper.py @@ -0,0 +1,71 @@ +import configparser +import os +import re + +# Regex Patterns +PLACEHOLDER = r"%%(name|word)%%" +URLPATT = r"(^https?:\/\/[-.a-zA-Z0-9]+)" +DOMAIN = r"(?:https:\/\/)?(?:\w+\.)?(\w+)\.\w+\/?" + +config = configparser.ConfigParser() +config.read('config.ini') + +class ConfigHelper: + + def getSite(self): + return config.getint('site', 'siteNum', fallback=5,) + + + def getCustomUrl(self): + url = config.get('site', 'customSite') + if re.match(PLACEHOLDER, url): + return url + + + def enableProxy(self): + return config.getboolean('proxy', 'enableProxy', fallback=False) + + + def proxyFiltering(self): + return config.getboolean('proxy', 'proxyFiltering', fallback=False) + + + def getProxies(self, filename_only=False): + if filename_only is True: + return config.get('proxy', 'proxyList') + proxies = [] + path = os.path.join("proxy_lists", config.get('proxy', 'proxyList')) + if path is not None: + fx = open(path, 'r') + proxies = fx.read().split('\n') + fx.close() + return proxies + else: + if not self.enableProxy(): + print("Proxy support is disabled. Please enable it in the config.") + exit() + elif proxies is None: + print("Specified proxy list is empty. Please add some proxies.") + exit() + else: + print("Unknown error.") + exit() + + def getWords(self): + words = [] + path = os.path.join("word_lists", config.get('lists', 'wordList')) + if path is not None: + fx = open(path, 'r') + words = fx.read().split('\n') + fx.close() + return words + else: + print("Word list not found.\n[DEBUG] %s" % path) + + + def getOutputList(self): + return config.get('lists', 'output', fallback="AVAILABLE.txt") + + + def numThreads(self): + return config.getint('multithreading', 'threadCount') \ No newline at end of file diff --git a/lib/ProxyHelper.py b/lib/ProxyHelper.py index 92fe49d..97ac038 100644 --- a/lib/ProxyHelper.py +++ b/lib/ProxyHelper.py @@ -3,16 +3,16 @@ from queue import Queue import time import requests -from lib.configure import getProxyList as PROXYLIST -from lib.configure import numThreads as THREADCOUNT -from lib.configure import config +from lib.ConfigHelper import ConfigHelper -class ProxyHelper(): +ch = ConfigHelper() + +class ProxyHelper(): def __init__(self): self.session = requests.Session() - self.proxies = PROXYLIST() - self.numProxies = len(PROXYLIST()) + self.proxies = ch.getProxies() + self.numProxies = len(ch.getProxies()) self.print_lock = threading.Lock() self.queue = Queue() self.good = [] @@ -59,7 +59,7 @@ def checkProxies(self): start = time.time() print("Starting up threads...") - for x in range(THREADCOUNT()): + for x in range(ch.numThreads()): t = threading.Thread(target = self.threader) t.daemon = True t.start() @@ -85,6 +85,6 @@ def checkProxies(self): numBad = len(self.bad) print("\nSearched %s proxies and filtered out %s bad proxies in %s seconds" % (self.numProxies, numBad, total)) - path = "proxy_lists/%s" % config["proxy"]["proxyList"] + path = "proxy_lists/%s" % ch.getProxies(filename_only=True) os.remove(path) os.rename('proxy_lists/good_proxies.txt', path) diff --git a/lib/configure.py b/lib/configure.py deleted file mode 100644 index c2b7a1c..0000000 --- a/lib/configure.py +++ /dev/null @@ -1,88 +0,0 @@ -import configparser - -# Regex Patterns -PLACEHOLDER = r"%word%" -URLPATT = r"(^https?:\/\/[-.a-zA-Z0-9]+)" -DOMAIN = r"(?:https:\/\/)?(?:\w+\.)?(\w+)\.\w+\/?" - -# Reads configuration file -config = configparser.ConfigParser() -config.read('config.ini') - - -def getSite(): - x = config['site']['siteNum'] - if x == "": - print("A site must be specified in the config file or via CLI arguments.") - exit() - else: - return int(x) - -def getCustomUrl(): - x = config['site']['customSite'] - if x == "": - print("A url must be specified in the config file for the customSite option. Ex: https://twitter.com/test") - exit() - else: - return str(x) - -def enableProxy(): - x = config['proxy']['enableProxy'] - if x == "": - print("Either True or False must be specified for enableProxy in the config file.") - exit() - else: - return x - -def proxyFiltering(): - x = config['proxy']['proxyFiltering'] - if x == "": - print("Either True or False must be specified for proxyFiltering in the config file.") - exit() - else: - return x - -def getProxyList(): - proxies = [] - path = "proxy_lists/" + config['proxy']['proxyList'] - if path is not None: - fx = open(path, 'r') - proxies = fx.read().split('\n') - fx.close() - return proxies - else: - if not enableProxy(): - print("Proxy support is disabled. Please enable it in the config.") - exit() - elif proxies is None: - print("Specified proxy list is empty. Please add some proxies.") - exit() - else: - print("Unkown error.") - exit() - -def getWordList(): - x = "word_lists/" + config['lists']['wordList'] - if x == "": - print("Place just the filename of the list for wordList in the config file.\nAll word lists go in the word_lists directory.") - exit() - else: - return str(x) - - -def getOutputList(): - x = config['lists']['output'] - if x == "": - print("Enter the filename for the file you want the available names to be logged to.") - exit() - else: - return str(x) - - -def numThreads(): - x = config['multithreading']['threadCount'] - if x == "": - print("Enter the number of processor threads you want to use.") - exit() - else: - return int(x) diff --git a/lib/cookie.py b/lib/cookie.py index 487d531..7a6156b 100644 --- a/lib/cookie.py +++ b/lib/cookie.py @@ -1,9 +1,13 @@ import requests from lib.replace import * -from lib.configure import getSite as SITE +from lib.ConfigHelper import ConfigHelper +from lib.ProxyHelper import ProxyHelper + +ch = ConfigHelper() +ph = ProxyHelper() s = requests.Session() def get_cookie(): - r = s.get(URLS[SITE()]) + r = s.get(URLS[ch.getSite()]) return r.cookies \ No newline at end of file diff --git a/lib/get.py b/lib/get.py index 2369eff..42e1732 100644 --- a/lib/get.py +++ b/lib/get.py @@ -3,42 +3,37 @@ import threading from queue import Queue import time -from lib.ProxyHelper import ProxyHelper from lib.log import log_result from lib.replace import replace -from lib.configure import enableProxy as PROXY -from lib.configure import getProxyList as PROXYLIST -from lib.configure import getSite as SITE -from lib.configure import numThreads as THREADS -from lib.configure import getWordList as WORD_LIST +from lib.ConfigHelper import ConfigHelper +from lib.ProxyHelper import ProxyHelper -print_lock = threading.Lock() +ch = ConfigHelper() +ph = ProxyHelper() -# Reads word list from file and adds each name to array words[] -fx = open(WORD_LIST(), 'r') -words = fx.read().split('\n') -fx.close() +print_lock = threading.Lock() +words = ch.getWords() def requestJob(item): word = words[item] - if SITE()==3 and not 4