Skip to content

Commit

Permalink
Final Update
Browse files Browse the repository at this point in the history
  • Loading branch information
crock committed Oct 20, 2018
1 parent d412c1f commit 404bc59
Show file tree
Hide file tree
Showing 17 changed files with 197 additions and 200 deletions.
15 changes: 14 additions & 1 deletion CHANGELOG
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,19 @@
CURRENT RELEASE
================================

Farewell OGCheckr, Hello Penguin!
v1.9 (October 20, 2018)

This is the final release of OGCheckr CLI in its current state. My future efforts will be put towards the Penguin API platform and client applications.

- Re-wrote the configuration helper class to recognize non-standard config values such as yes/no instead of true/false.
- Configuration values are no longer case sensitive
- Shortened filename of main script to og.py

================================
LAST RELEASE
================================

The Bug Expedition
v1.8 (April 1, 2018)

Expand All @@ -12,7 +25,7 @@ v1.8 (April 1, 2018)
- Removed and consolidated some of the files in the libs/ directory

================================
LAST RELEASE
OLDER RELEASE
================================

The Glorious Update
Expand Down
23 changes: 0 additions & 23 deletions OGCheckr.py

This file was deleted.

4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
![OGCheckr CLI username availability checker application](http://d.pr/i/M94CyF+ "OGCheckr CLI")
![OGCheckr CLI username availability checker application](https://i.imgur.com/77zuFqp.png "OGCheckr CLI")

For additional information and installation instructions, view the wiki.
https://github.com/ogplus/cli-checker/wiki/
https://github.com/crock/cli-checker/wiki/
11 changes: 5 additions & 6 deletions config.ini
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
; If your target site is not listed, put "1" for CUSTOM (without the quotes).
siteNum = 5
; Fill in the option below with the profile URL of the service you want to check available names for.
; Use %%word%% as the placeholder for the username to check.
; Use %%word%% or %%name%% as the placeholder for the username to check.
; customSite is only for sites not specifically listed in the chart above, but please be aware
; that not every site will work this way. If there is a service you would like to see support for, please
; don't hesistate to let Croc know.
Expand All @@ -38,11 +38,10 @@ output = AVAILABLE.txt
wordList = EXAMPLE-50.txt

[proxy]
; To enable proxy support, put True. To disable, put False
enableProxy = False
; To enable proxy filtering (sorting out the bad proxies that no longer work), put True.
; To disable, put False. It has no effect if enableProxy is False
proxyFiltering = True
; Should the checker use proxies for checking?
enableProxy = no
; Should the checker filter out the bad proxies before going through the word list?
proxyFiltering = yes
; If proxy support is enabled, you must specify the path to the proxy list you want to use here
; Place all proxy lists in the proxy_lists directory
; Place one proxy per line in the this format --> ip:port
Expand Down
71 changes: 71 additions & 0 deletions lib/ConfigHelper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import configparser
import os
import re

# Regex Patterns
PLACEHOLDER = r"%%(name|word)%%"
URLPATT = r"(^https?:\/\/[-.a-zA-Z0-9]+)"
DOMAIN = r"(?:https:\/\/)?(?:\w+\.)?(\w+)\.\w+\/?"

config = configparser.ConfigParser()
config.read('config.ini')

class ConfigHelper:

def getSite(self):
return config.getint('site', 'siteNum', fallback=5,)


def getCustomUrl(self):
url = config.get('site', 'customSite')
if re.match(PLACEHOLDER, url):
return url


def enableProxy(self):
return config.getboolean('proxy', 'enableProxy', fallback=False)


def proxyFiltering(self):
return config.getboolean('proxy', 'proxyFiltering', fallback=False)


def getProxies(self, filename_only=False):
if filename_only is True:
return config.get('proxy', 'proxyList')
proxies = []
path = os.path.join("proxy_lists", config.get('proxy', 'proxyList'))
if path is not None:
fx = open(path, 'r')
proxies = fx.read().split('\n')
fx.close()
return proxies
else:
if not self.enableProxy():
print("Proxy support is disabled. Please enable it in the config.")
exit()
elif proxies is None:
print("Specified proxy list is empty. Please add some proxies.")
exit()
else:
print("Unknown error.")
exit()

def getWords(self):
words = []
path = os.path.join("word_lists", config.get('lists', 'wordList'))
if path is not None:
fx = open(path, 'r')
words = fx.read().split('\n')
fx.close()
return words
else:
print("Word list not found.\n[DEBUG] %s" % path)


def getOutputList(self):
return config.get('lists', 'output', fallback="AVAILABLE.txt")


def numThreads(self):
return config.getint('multithreading', 'threadCount')
16 changes: 8 additions & 8 deletions lib/ProxyHelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@
from queue import Queue
import time
import requests
from lib.configure import getProxyList as PROXYLIST
from lib.configure import numThreads as THREADCOUNT
from lib.configure import config
from lib.ConfigHelper import ConfigHelper

class ProxyHelper():
ch = ConfigHelper()

class ProxyHelper():

def __init__(self):
self.session = requests.Session()
self.proxies = PROXYLIST()
self.numProxies = len(PROXYLIST())
self.proxies = ch.getProxies()
self.numProxies = len(ch.getProxies())
self.print_lock = threading.Lock()
self.queue = Queue()
self.good = []
Expand Down Expand Up @@ -59,7 +59,7 @@ def checkProxies(self):
start = time.time()

print("Starting up threads...")
for x in range(THREADCOUNT()):
for x in range(ch.numThreads()):
t = threading.Thread(target = self.threader)
t.daemon = True
t.start()
Expand All @@ -85,6 +85,6 @@ def checkProxies(self):
numBad = len(self.bad)
print("\nSearched %s proxies and filtered out %s bad proxies in %s seconds" % (self.numProxies, numBad, total))

path = "proxy_lists/%s" % config["proxy"]["proxyList"]
path = "proxy_lists/%s" % ch.getProxies(filename_only=True)
os.remove(path)
os.rename('proxy_lists/good_proxies.txt', path)
88 changes: 0 additions & 88 deletions lib/configure.py

This file was deleted.

8 changes: 6 additions & 2 deletions lib/cookie.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import requests
from lib.replace import *
from lib.configure import getSite as SITE
from lib.ConfigHelper import ConfigHelper
from lib.ProxyHelper import ProxyHelper

ch = ConfigHelper()
ph = ProxyHelper()

s = requests.Session()

def get_cookie():
r = s.get(URLS[SITE()])
r = s.get(URLS[ch.getSite()])
return r.cookies
31 changes: 13 additions & 18 deletions lib/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,42 +3,37 @@
import threading
from queue import Queue
import time
from lib.ProxyHelper import ProxyHelper
from lib.log import log_result
from lib.replace import replace
from lib.configure import enableProxy as PROXY
from lib.configure import getProxyList as PROXYLIST
from lib.configure import getSite as SITE
from lib.configure import numThreads as THREADS
from lib.configure import getWordList as WORD_LIST
from lib.ConfigHelper import ConfigHelper
from lib.ProxyHelper import ProxyHelper

print_lock = threading.Lock()
ch = ConfigHelper()
ph = ProxyHelper()

# Reads word list from file and adds each name to array words[]
fx = open(WORD_LIST(), 'r')
words = fx.read().split('\n')
fx.close()
print_lock = threading.Lock()
words = ch.getWords()

def requestJob(item):
word = words[item]

if SITE()==3 and not 4<len(word)<16:
if ch.getSite()==3 and not 4<len(word)<16:
with print_lock:
print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on twitter because it has illegal length.")
elif SITE()==10 and not len(word)<40:
elif ch.getSite()==10 and not len(word)<40:
with print_lock:
print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on github because it has illegal length.")
elif SITE()==13 and not 2<len(word)<21:
elif ch.getSite()==13 and not 2<len(word)<21:
with print_lock:
print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on pastebin because it has illegal length.")
else:

link = replace(word)
s = requests.Session()
if PROXY() == "True":
plist = PROXYLIST()
if ch.enableProxy():
plist = ch.getProxies()
i = random.randrange(0, plist.__len__())
sess = ProxyHelper().setProxy(s, plist[i])
sess = ph.setProxy(s, plist[i])
r = sess.get(link)
else:
r = s.get(link)
Expand All @@ -54,7 +49,7 @@ def threader():
start = time.time()

q = Queue()
for x in range(THREADS()):
for x in range(ch.numThreads()):
t = threading.Thread(target = threader)
t.daemon = True
t.start()
Expand Down
6 changes: 4 additions & 2 deletions lib/headers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
from lib.configure import getSite as SITE
from lib.ConfigHelper import ConfigHelper

ch = ConfigHelper()

def prepare_headers(cookie):
if SITE() == 4:
if ch.getSite() == 4:
return {
"referer":"https://www.instagram.com",
"x-csrftoken": cookie['csrftoken']
Expand Down
Loading

0 comments on commit 404bc59

Please sign in to comment.