Skip to content

Commit 404bc59

Browse files
committed
Final Update
1 parent d412c1f commit 404bc59

File tree

17 files changed

+197
-200
lines changed

17 files changed

+197
-200
lines changed

CHANGELOG

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,19 @@
22
CURRENT RELEASE
33
================================
44

5+
Farewell OGCheckr, Hello Penguin!
6+
v1.9 (October 20, 2018)
7+
8+
This is the final release of OGCheckr CLI in its current state. My future efforts will be put towards the Penguin API platform and client applications.
9+
10+
- Re-wrote the configuration helper class to recognize non-standard config values such as yes/no instead of true/false.
11+
- Configuration values are no longer case sensitive
12+
- Shortened filename of main script to og.py
13+
14+
================================
15+
LAST RELEASE
16+
================================
17+
518
The Bug Expedition
619
v1.8 (April 1, 2018)
720

@@ -12,7 +25,7 @@ v1.8 (April 1, 2018)
1225
- Removed and consolidated some of the files in the libs/ directory
1326

1427
================================
15-
LAST RELEASE
28+
OLDER RELEASE
1629
================================
1730

1831
The Glorious Update

OGCheckr.py

Lines changed: 0 additions & 23 deletions
This file was deleted.

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
![OGCheckr CLI username availability checker application](http://d.pr/i/M94CyF+ "OGCheckr CLI")
1+
![OGCheckr CLI username availability checker application](https://i.imgur.com/77zuFqp.png "OGCheckr CLI")
22

33
For additional information and installation instructions, view the wiki.
4-
https://github.com/ogplus/cli-checker/wiki/
4+
https://github.com/crock/cli-checker/wiki/

config.ini

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
; If your target site is not listed, put "1" for CUSTOM (without the quotes).
2525
siteNum = 5
2626
; Fill in the option below with the profile URL of the service you want to check available names for.
27-
; Use %%word%% as the placeholder for the username to check.
27+
; Use %%word%% or %%name%% as the placeholder for the username to check.
2828
; customSite is only for sites not specifically listed in the chart above, but please be aware
2929
; that not every site will work this way. If there is a service you would like to see support for, please
3030
; don't hesistate to let Croc know.
@@ -38,11 +38,10 @@ output = AVAILABLE.txt
3838
wordList = EXAMPLE-50.txt
3939

4040
[proxy]
41-
; To enable proxy support, put True. To disable, put False
42-
enableProxy = False
43-
; To enable proxy filtering (sorting out the bad proxies that no longer work), put True.
44-
; To disable, put False. It has no effect if enableProxy is False
45-
proxyFiltering = True
41+
; Should the checker use proxies for checking?
42+
enableProxy = no
43+
; Should the checker filter out the bad proxies before going through the word list?
44+
proxyFiltering = yes
4645
; If proxy support is enabled, you must specify the path to the proxy list you want to use here
4746
; Place all proxy lists in the proxy_lists directory
4847
; Place one proxy per line in the this format --> ip:port

lib/ConfigHelper.py

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
import configparser
2+
import os
3+
import re
4+
5+
# Regex Patterns
6+
PLACEHOLDER = r"%%(name|word)%%"
7+
URLPATT = r"(^https?:\/\/[-.a-zA-Z0-9]+)"
8+
DOMAIN = r"(?:https:\/\/)?(?:\w+\.)?(\w+)\.\w+\/?"
9+
10+
config = configparser.ConfigParser()
11+
config.read('config.ini')
12+
13+
class ConfigHelper:
14+
15+
def getSite(self):
16+
return config.getint('site', 'siteNum', fallback=5,)
17+
18+
19+
def getCustomUrl(self):
20+
url = config.get('site', 'customSite')
21+
if re.match(PLACEHOLDER, url):
22+
return url
23+
24+
25+
def enableProxy(self):
26+
return config.getboolean('proxy', 'enableProxy', fallback=False)
27+
28+
29+
def proxyFiltering(self):
30+
return config.getboolean('proxy', 'proxyFiltering', fallback=False)
31+
32+
33+
def getProxies(self, filename_only=False):
34+
if filename_only is True:
35+
return config.get('proxy', 'proxyList')
36+
proxies = []
37+
path = os.path.join("proxy_lists", config.get('proxy', 'proxyList'))
38+
if path is not None:
39+
fx = open(path, 'r')
40+
proxies = fx.read().split('\n')
41+
fx.close()
42+
return proxies
43+
else:
44+
if not self.enableProxy():
45+
print("Proxy support is disabled. Please enable it in the config.")
46+
exit()
47+
elif proxies is None:
48+
print("Specified proxy list is empty. Please add some proxies.")
49+
exit()
50+
else:
51+
print("Unknown error.")
52+
exit()
53+
54+
def getWords(self):
55+
words = []
56+
path = os.path.join("word_lists", config.get('lists', 'wordList'))
57+
if path is not None:
58+
fx = open(path, 'r')
59+
words = fx.read().split('\n')
60+
fx.close()
61+
return words
62+
else:
63+
print("Word list not found.\n[DEBUG] %s" % path)
64+
65+
66+
def getOutputList(self):
67+
return config.get('lists', 'output', fallback="AVAILABLE.txt")
68+
69+
70+
def numThreads(self):
71+
return config.getint('multithreading', 'threadCount')

lib/ProxyHelper.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,16 @@
33
from queue import Queue
44
import time
55
import requests
6-
from lib.configure import getProxyList as PROXYLIST
7-
from lib.configure import numThreads as THREADCOUNT
8-
from lib.configure import config
6+
from lib.ConfigHelper import ConfigHelper
97

10-
class ProxyHelper():
8+
ch = ConfigHelper()
9+
10+
class ProxyHelper():
1111

1212
def __init__(self):
1313
self.session = requests.Session()
14-
self.proxies = PROXYLIST()
15-
self.numProxies = len(PROXYLIST())
14+
self.proxies = ch.getProxies()
15+
self.numProxies = len(ch.getProxies())
1616
self.print_lock = threading.Lock()
1717
self.queue = Queue()
1818
self.good = []
@@ -59,7 +59,7 @@ def checkProxies(self):
5959
start = time.time()
6060

6161
print("Starting up threads...")
62-
for x in range(THREADCOUNT()):
62+
for x in range(ch.numThreads()):
6363
t = threading.Thread(target = self.threader)
6464
t.daemon = True
6565
t.start()
@@ -85,6 +85,6 @@ def checkProxies(self):
8585
numBad = len(self.bad)
8686
print("\nSearched %s proxies and filtered out %s bad proxies in %s seconds" % (self.numProxies, numBad, total))
8787

88-
path = "proxy_lists/%s" % config["proxy"]["proxyList"]
88+
path = "proxy_lists/%s" % ch.getProxies(filename_only=True)
8989
os.remove(path)
9090
os.rename('proxy_lists/good_proxies.txt', path)

lib/configure.py

Lines changed: 0 additions & 88 deletions
This file was deleted.

lib/cookie.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
import requests
22
from lib.replace import *
3-
from lib.configure import getSite as SITE
3+
from lib.ConfigHelper import ConfigHelper
4+
from lib.ProxyHelper import ProxyHelper
5+
6+
ch = ConfigHelper()
7+
ph = ProxyHelper()
48

59
s = requests.Session()
610

711
def get_cookie():
8-
r = s.get(URLS[SITE()])
12+
r = s.get(URLS[ch.getSite()])
913
return r.cookies

lib/get.py

Lines changed: 13 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,42 +3,37 @@
33
import threading
44
from queue import Queue
55
import time
6-
from lib.ProxyHelper import ProxyHelper
76
from lib.log import log_result
87
from lib.replace import replace
9-
from lib.configure import enableProxy as PROXY
10-
from lib.configure import getProxyList as PROXYLIST
11-
from lib.configure import getSite as SITE
12-
from lib.configure import numThreads as THREADS
13-
from lib.configure import getWordList as WORD_LIST
8+
from lib.ConfigHelper import ConfigHelper
9+
from lib.ProxyHelper import ProxyHelper
1410

15-
print_lock = threading.Lock()
11+
ch = ConfigHelper()
12+
ph = ProxyHelper()
1613

17-
# Reads word list from file and adds each name to array words[]
18-
fx = open(WORD_LIST(), 'r')
19-
words = fx.read().split('\n')
20-
fx.close()
14+
print_lock = threading.Lock()
15+
words = ch.getWords()
2116

2217
def requestJob(item):
2318
word = words[item]
2419

25-
if SITE()==3 and not 4<len(word)<16:
20+
if ch.getSite()==3 and not 4<len(word)<16:
2621
with print_lock:
2722
print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on twitter because it has illegal length.")
28-
elif SITE()==10 and not len(word)<40:
23+
elif ch.getSite()==10 and not len(word)<40:
2924
with print_lock:
3025
print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on github because it has illegal length.")
31-
elif SITE()==13 and not 2<len(word)<21:
26+
elif ch.getSite()==13 and not 2<len(word)<21:
3227
with print_lock:
3328
print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on pastebin because it has illegal length.")
3429
else:
3530

3631
link = replace(word)
3732
s = requests.Session()
38-
if PROXY() == "True":
39-
plist = PROXYLIST()
33+
if ch.enableProxy():
34+
plist = ch.getProxies()
4035
i = random.randrange(0, plist.__len__())
41-
sess = ProxyHelper().setProxy(s, plist[i])
36+
sess = ph.setProxy(s, plist[i])
4237
r = sess.get(link)
4338
else:
4439
r = s.get(link)
@@ -54,7 +49,7 @@ def threader():
5449
start = time.time()
5550

5651
q = Queue()
57-
for x in range(THREADS()):
52+
for x in range(ch.numThreads()):
5853
t = threading.Thread(target = threader)
5954
t.daemon = True
6055
t.start()

lib/headers.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
1-
from lib.configure import getSite as SITE
1+
from lib.ConfigHelper import ConfigHelper
2+
3+
ch = ConfigHelper()
24

35
def prepare_headers(cookie):
4-
if SITE() == 4:
6+
if ch.getSite() == 4:
57
return {
68
"referer":"https://www.instagram.com",
79
"x-csrftoken": cookie['csrftoken']

0 commit comments

Comments
 (0)