Browse Source

Excercises for the SPSE exam.

master
sıx 5 years ago committed by GitHub
parent
commit
a002d96104
  1. 13
      spse_excercises/exc_mod1_p9_user_defined_exception.py
  2. 24
      spse_excercises/exc_mod2_p4_syn_scan_threaded.py
  3. 21
      spse_excercises/exc_mod2_p5_syn_scan_multiprocessing.py
  4. 36
      spse_excercises/exc_mod2_p5_thread_queue_ftp.py
  5. 25
      spse_excercises/exc_mod3_p3_cgi.py
  6. 26
      spse_excercises/exc_mod3_p5_raw_socket_arp.py
  7. 18
      spse_excercises/exc_mod3_p6_sniff_http_with_scapy.py
  8. 17
      spse_excercises/exc_mod3_p6_wifi_ssid_sniff.py
  9. 13
      spse_excercises/exc_mod3_p8_scapy_fuzz.py
  10. 9
      spse_excercises/exc_mod3_raw_packet_as_user.txt
  11. 119
      spse_excercises/exc_mod4_p7_multithread_web_spider_mysql.py

13
spse_excercises/exc_mod1_p9_user_defined_exception.py

@ -0,0 +1,13 @@ @@ -0,0 +1,13 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Derive from Exception class (inheritance)
class MyError(Exception):
def __init__(self, value):
self.value = "...oooops!"
try:
raise(MyError(3*2))
except MyError as error:
print('User defined exception says: ',error.value)

24
spse_excercises/exc_mod2_p4_syn_scan_threaded.py

@ -0,0 +1,24 @@ @@ -0,0 +1,24 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Python multi-thread syn scanner
#import thread
from threading import Thread
from scapy.all import *
import time
ip = "127.0.0.1"
ports = [20,21,22,23,53,80,139,443,445,5222,8080]
result=""
def scan(ip,cur_port):
syn=IP(dst=ip)/TCP(dport=cur_port, flags="S") # "sr1" is used as only one packet is needed to be received.
answer = sr1(syn, verbose=0, timeout=2)
print answer.summary()
# For return values and communiaction between Threads: use queue
for port in ports:
th = Thread(target=scan, args=(ip,port))
th.start()

21
spse_excercises/exc_mod2_p5_syn_scan_multiprocessing.py

@ -0,0 +1,21 @@ @@ -0,0 +1,21 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
import multiprocessing
from scapy.all import *
ip = "127.0.0.1"
ports = [20,21,22,23,53,80,139,443,445,5222,8080]
def worker(ip,cur_port):
syn=IP(dst=ip)/TCP(dport=cur_port, flags="S") # "sr1" is used as only one packet is needed to be received.
answer = sr1(syn, verbose=0, timeout=2)
print answer.summary()
return
if __name__ == '__main__':
jobs = []
for port in ports:
p = multiprocessing.Process(target=worker, args=(ip,port))
jobs.append(p)
p.start()

36
spse_excercises/exc_mod2_p5_thread_queue_ftp.py

@ -0,0 +1,36 @@ @@ -0,0 +1,36 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from Queue import Queue
from threading import Thread
import ftplib
# Random list from random hosts
# For complete excercie, 10 ftp sites... but it's working already at least.
ftp_list = ["210.222.148.126","81.177.6.66", "69.168.79.145", "79.24.166.214", "103.209.143.86"]
def do_work(q,cur_ftp):
while True:
ftp = ftplib.FTP(cur_ftp)
ftp.login("anonymous", "")
data = []
ftp.dir(data.append)
ftp.quit()
for line in data:
print "-", line
q.task_done()
q = Queue(maxsize=0)
num_threads = 5
for cur_ftp in ftp_list:
worker = Thread(target=do_work, args=(q,cur_ftp))
worker.setDaemon(True)
worker.start()
# Wait until threads finish
for x in range(50):
q.put(x)
q.join()

25
spse_excercises/exc_mod3_p3_cgi.py

@ -0,0 +1,25 @@ @@ -0,0 +1,25 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# - Python can be used as CGI script
# - There is cgi module
# - Flask can handle cgi scripts too
# https://docs.python.org/2/library/cgi.html
# http://cgi.tutorial.codepoint.net/hellow-world
# http://flask.pocoo.org/docs/0.12/deploying/cgi/
import cgitb
cgitb.enable(display=1)
# ..and cgi scripting
# Or cgi called directly by other app:
print "Content-Type: text/html"
print
print """\
<html>
<body>
<h2>Hello World!</h2>
</body>
</html>
"""

26
spse_excercises/exc_mod3_p5_raw_socket_arp.py

@ -0,0 +1,26 @@ @@ -0,0 +1,26 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
import socket
import struct
rawSocket = socket.socket(socket.PF_PACKET, socket.SOCK_RAW, socket.htons(0x0800))
rawSocket.bind(("eth0", socket.htons(0x0800)))
# layer 2 message, then data
# src mac / dst mac / eth type
# arpaket = struct.pack("!6s6s2s", '\xaa\xaa\xaa\xaa\xaa\xaa', '\xbb\xbb\xbb\xbb\xbb\xbb','\x08\x60') # 14 bytes
# ARP --> dst mac / src mac / type 8006 / |ARP ~ hw type / proto type / hw addr / proto addr / operation code /
# / src hw addr / src proto addr / target hw addr / target proto addr | / Padding / CRC - note here is that padding and crc is not mandatory :)
# https://www.netometer.com/qa/arp.html
arpaket = struct.pack("!6s6s2s2s2s1s1s2s6s4s6s4s", '\xaa\xaa\xaa\xaa\xaa\xaa', '\xbb\xbb\xbb\xbb\xbb\xbb', '\x08\x06','\x00\x01','\x08\x00','\x06','\x04','\x00\x01','\xcc\xcc\xcc\xcc\xcc\xcc','\xc0\xa8\x06\x06','\xdd\xdd\xdd\xdd\xdd\xdd','\xc0\xa8\x06\x07')
rawSocket.send(arpaket)
#print "Length of the ARP packet sent: " + str(len(arpaket))
#arpreply = struct.pack("!6s6s2s2s2s1s1s2s6s4s6s4s", '\xaa\xaa\xaa\xaa\xaa\xaa', '\xbb\xbb\xbb\xbb\xbb\xbb', '\x08\x06','\x00\x01','\x08\x00','\x06','\x04','\x00\x02','\xcc\xcc\xcc\xcc\xcc\xcc','\xc0\xa8\x06\x06','\xdd\xdd\xdd\xdd\xdd\xdd','\xc0\xa8\x06\x07')
#rawSocket.send(arpreply)

18
spse_excercises/exc_mod3_p6_sniff_http_with_scapy.py

@ -0,0 +1,18 @@ @@ -0,0 +1,18 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from scapy.all import sniff
def http_header(packet):
http_packet=str(packet)
if http_packet.find('GET') or http_packet.find('POST'):
return GET_print(packet)
def GET_print(packet1):
http_out = "\n".join(packet1.sprintf("{Raw:%Raw.load%}\n").split(r"\r\n")) + "\n"
return http_out
sniff(iface="eth0", prn=http_header, filter="tcp port 80", count=99)
# Another solution: pip install scapy-http

17
spse_excercises/exc_mod3_p6_wifi_ssid_sniff.py

@ -0,0 +1,17 @@ @@ -0,0 +1,17 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Note: general solution posted multiple times.
from scapy.all import sniff, Dot11
aps = []
def PacketHandler(pkt):
if pkt.haslayer(Dot11):
if pkt.type == 0 and pkt.subtype == 8:
if pkt.addr2 not in aps :
aps.append(pkt.addr2)
print "SSID found: %s " %(pkt.info)
sniff(iface="wlan0mon", prn=PacketHandler)

13
spse_excercises/exc_mod3_p8_scapy_fuzz.py

@ -0,0 +1,13 @@ @@ -0,0 +1,13 @@
#!/usr/bin/python
#-*- coding: utf-8 -*-
# Docs for interactive use
# https://scapy.readthedocs.io/en/latest/usage.html?highlight=fuzz#fuzzing
# Interactive example: >>> send(IP(dst="127.0.0.1")/fuzz(UDP()/NTP(version=4)),loop=1)
from scapy.all import sr1,IP,fuzz,UDP,NTP
target="127.0.0.1"
target="192.168.49.39"
while True:
sr1(IP(dst=target)/fuzz(UDP()/NTP(version=4)),inter=4,timeout=1)

9
spse_excercises/exc_mod3_raw_packet_as_user.txt

@ -0,0 +1,9 @@ @@ -0,0 +1,9 @@
#!/bin/sh
# cp /usr/bin/python2.7 python
# setcap cap_net_raw+ep python
# chmod +x python
# ./python 29_raw_socket_struct_binascii.py
# or a restricted sudo privilege which is probably better, not allowing priv esc :)

119
spse_excercises/exc_mod4_p7_multithread_web_spider_mysql.py

@ -0,0 +1,119 @@ @@ -0,0 +1,119 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# A project for parsing a website, then inserting the data into mysql and serving the mysql data through socket http server.
# All is managed through multithreading.
#
# If the mysql table does not exist, this script will create it, but the database and user should be created in advance!
import multiprocessing
import MySQLdb as mdb
from lxml import html
import requests
import time
from time import gmtime, strftime
import SocketServer
import SimpleHTTPServer
# Proxy
proxies = {
'http': "socks5://127.0.0.1:9050",
'https': "socks5://127.0.0.1:9050"}
# Spidering
def spidering_l(proc):
while True:
# Sprudeling
# Get the page
page = requests.get('https://riskdiscovery.com/'), proxies=proxies)
tree = html.fromstring(page.content)
# Vars
state_time = strftime("%Y%m%d%H%M%S", gmtime())
spider_list_link = []
spider_list_text = []
spider_state = str(state_time)
# Loop around links //a
for link in tree.xpath('//a'):
spider_list_link.append(link.get('href'))
spider_list_text.append(link.text)
# MySQL
con = ""
try:
con = mdb.connect('localhost', 'someuser', 'somepass', 'news_parse')
cur = con.cursor()
zp = zip(spider_list_link,spider_list_text)
for zipo in zp:
link_c = zipo[0]
text_c = zipo[1]
cur.execute("CREATE TABLE IF NOT EXISTS news_data ( id INT(6) UNSIGNED AUTO_INCREMENT PRIMARY KEY, state_time VARCHAR(30) NOT NULL, link VARCHAR(512) NOT NULL, text VARCHAR(1024))")
con.commit()
cur.execute("INSERT INTO news_data (id, state_time, link, text) VALUES (NULL, %s, %s, %s)",(state_time,link_c,text_c))
con.commit()
except mdb.Error, e:
print e
finally:
if con:
con.close()
time.sleep(60)
return
# Serve HTTP
class HttpRequestHandler (SimpleHTTPServer.SimpleHTTPRequestHandler) :
def do_GET(self) :
last_list=""
if self.path == "/":
try:
con = mdb.connect('localhost', 'someuser', 'somepass', 'news_parse')
cur = con.cursor()
cur.execute("SELECT * FROM news_data")
data = cur.fetchall()
# Unique identifiers (that's why it's a set)
sset=set()
for x in data:
sset.add(x[2])
for xx in sset:
last_list = last_list + xx + '\n'
# Note, playing with sets
# final_out = []
# for xx in sset:
# curr_st = xx
# for x in data:
# if curr_st == x[1]:
# final_out.(x[2][3])
except mdb.Error, e:
print e
finally:
if con:
con.close()
self.wfile.write('The latest news from the past 72 hours: \n' + str(last_list))
else:
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
def serve_http(proc):
SocketServer.TCPServer.allow_reuse_address = True
httpServer = SocketServer.TCPServer(("127.0.0.1", 8080), HttpRequestHandler)
print "It should listen on 8080."
httpServer.serve_forever()
pass
if __name__ == '__main__':
jobs = []
# Start spidering
mp = multiprocessing.Process(target=spidering_l, args=(0,))
jobs.append(mp)
mp.start()
# Serve the results on HTTP
hp = multiprocessing.Process(target=serve_http, args=(0,))
jobs.append(hp)
hp.start()
Loading…
Cancel
Save