diff --git a/spse_excercises/exc_mod1_p9_user_defined_exception.py b/spse_excercises/exc_mod1_p9_user_defined_exception.py
new file mode 100644
index 0000000..a57baf8
--- /dev/null
+++ b/spse_excercises/exc_mod1_p9_user_defined_exception.py
@@ -0,0 +1,13 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Derive from Exception class (inheritance)
+class MyError(Exception):
+ def __init__(self, value):
+ self.value = "...oooops!"
+
+try:
+ raise(MyError(3*2))
+
+except MyError as error:
+ print('User defined exception says: ',error.value)
diff --git a/spse_excercises/exc_mod2_p4_syn_scan_threaded.py b/spse_excercises/exc_mod2_p4_syn_scan_threaded.py
new file mode 100644
index 0000000..e3fb80a
--- /dev/null
+++ b/spse_excercises/exc_mod2_p4_syn_scan_threaded.py
@@ -0,0 +1,24 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Python multi-thread syn scanner
+
+#import thread
+from threading import Thread
+from scapy.all import *
+import time
+
+ip = "127.0.0.1"
+ports = [20,21,22,23,53,80,139,443,445,5222,8080]
+
+result=""
+
+def scan(ip,cur_port):
+ syn=IP(dst=ip)/TCP(dport=cur_port, flags="S") # "sr1" is used as only one packet is needed to be received.
+ answer = sr1(syn, verbose=0, timeout=2)
+ print answer.summary()
+ # For return values and communiaction between Threads: use queue
+
+for port in ports:
+ th = Thread(target=scan, args=(ip,port))
+ th.start()
diff --git a/spse_excercises/exc_mod2_p5_syn_scan_multiprocessing.py b/spse_excercises/exc_mod2_p5_syn_scan_multiprocessing.py
new file mode 100644
index 0000000..2bb6bcb
--- /dev/null
+++ b/spse_excercises/exc_mod2_p5_syn_scan_multiprocessing.py
@@ -0,0 +1,21 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import multiprocessing
+from scapy.all import *
+
+ip = "127.0.0.1"
+ports = [20,21,22,23,53,80,139,443,445,5222,8080]
+
+def worker(ip,cur_port):
+ syn=IP(dst=ip)/TCP(dport=cur_port, flags="S") # "sr1" is used as only one packet is needed to be received.
+ answer = sr1(syn, verbose=0, timeout=2)
+ print answer.summary()
+ return
+
+if __name__ == '__main__':
+ jobs = []
+ for port in ports:
+ p = multiprocessing.Process(target=worker, args=(ip,port))
+ jobs.append(p)
+ p.start()
diff --git a/spse_excercises/exc_mod2_p5_thread_queue_ftp.py b/spse_excercises/exc_mod2_p5_thread_queue_ftp.py
new file mode 100644
index 0000000..e087d25
--- /dev/null
+++ b/spse_excercises/exc_mod2_p5_thread_queue_ftp.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+from Queue import Queue
+from threading import Thread
+import ftplib
+
+# Random list from random hosts
+# For complete excercie, 10 ftp sites... but it's working already at least.
+ftp_list = ["210.222.148.126","81.177.6.66", "69.168.79.145", "79.24.166.214", "103.209.143.86"]
+
+
+def do_work(q,cur_ftp):
+ while True:
+ ftp = ftplib.FTP(cur_ftp)
+ ftp.login("anonymous", "")
+ data = []
+ ftp.dir(data.append)
+ ftp.quit()
+ for line in data:
+ print "-", line
+
+ q.task_done()
+
+q = Queue(maxsize=0)
+num_threads = 5
+
+for cur_ftp in ftp_list:
+ worker = Thread(target=do_work, args=(q,cur_ftp))
+ worker.setDaemon(True)
+ worker.start()
+
+# Wait until threads finish
+for x in range(50):
+ q.put(x)
+q.join()
diff --git a/spse_excercises/exc_mod3_p3_cgi.py b/spse_excercises/exc_mod3_p3_cgi.py
new file mode 100644
index 0000000..ec7f60d
--- /dev/null
+++ b/spse_excercises/exc_mod3_p3_cgi.py
@@ -0,0 +1,25 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# - Python can be used as CGI script
+# - There is cgi module
+# - Flask can handle cgi scripts too
+
+# https://docs.python.org/2/library/cgi.html
+# http://cgi.tutorial.codepoint.net/hellow-world
+# http://flask.pocoo.org/docs/0.12/deploying/cgi/
+
+import cgitb
+cgitb.enable(display=1)
+# ..and cgi scripting
+
+# Or cgi called directly by other app:
+print "Content-Type: text/html"
+print
+print """\
+
+
+Hello World!
+
+
+"""
diff --git a/spse_excercises/exc_mod3_p5_raw_socket_arp.py b/spse_excercises/exc_mod3_p5_raw_socket_arp.py
new file mode 100644
index 0000000..e59e0ad
--- /dev/null
+++ b/spse_excercises/exc_mod3_p5_raw_socket_arp.py
@@ -0,0 +1,26 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import socket
+import struct
+
+rawSocket = socket.socket(socket.PF_PACKET, socket.SOCK_RAW, socket.htons(0x0800))
+
+rawSocket.bind(("eth0", socket.htons(0x0800)))
+
+# layer 2 message, then data
+# src mac / dst mac / eth type
+# arpaket = struct.pack("!6s6s2s", '\xaa\xaa\xaa\xaa\xaa\xaa', '\xbb\xbb\xbb\xbb\xbb\xbb','\x08\x60') # 14 bytes
+
+# ARP --> dst mac / src mac / type 8006 / |ARP ~ hw type / proto type / hw addr / proto addr / operation code /
+# / src hw addr / src proto addr / target hw addr / target proto addr | / Padding / CRC - note here is that padding and crc is not mandatory :)
+# https://www.netometer.com/qa/arp.html
+
+arpaket = struct.pack("!6s6s2s2s2s1s1s2s6s4s6s4s", '\xaa\xaa\xaa\xaa\xaa\xaa', '\xbb\xbb\xbb\xbb\xbb\xbb', '\x08\x06','\x00\x01','\x08\x00','\x06','\x04','\x00\x01','\xcc\xcc\xcc\xcc\xcc\xcc','\xc0\xa8\x06\x06','\xdd\xdd\xdd\xdd\xdd\xdd','\xc0\xa8\x06\x07')
+
+rawSocket.send(arpaket)
+
+#print "Length of the ARP packet sent: " + str(len(arpaket))
+
+#arpreply = struct.pack("!6s6s2s2s2s1s1s2s6s4s6s4s", '\xaa\xaa\xaa\xaa\xaa\xaa', '\xbb\xbb\xbb\xbb\xbb\xbb', '\x08\x06','\x00\x01','\x08\x00','\x06','\x04','\x00\x02','\xcc\xcc\xcc\xcc\xcc\xcc','\xc0\xa8\x06\x06','\xdd\xdd\xdd\xdd\xdd\xdd','\xc0\xa8\x06\x07')
+#rawSocket.send(arpreply)
diff --git a/spse_excercises/exc_mod3_p6_sniff_http_with_scapy.py b/spse_excercises/exc_mod3_p6_sniff_http_with_scapy.py
new file mode 100644
index 0000000..40fff27
--- /dev/null
+++ b/spse_excercises/exc_mod3_p6_sniff_http_with_scapy.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+from scapy.all import sniff
+
+def http_header(packet):
+ http_packet=str(packet)
+ if http_packet.find('GET') or http_packet.find('POST'):
+ return GET_print(packet)
+
+def GET_print(packet1):
+ http_out = "\n".join(packet1.sprintf("{Raw:%Raw.load%}\n").split(r"\r\n")) + "\n"
+ return http_out
+
+sniff(iface="eth0", prn=http_header, filter="tcp port 80", count=99)
+
+
+# Another solution: pip install scapy-http
diff --git a/spse_excercises/exc_mod3_p6_wifi_ssid_sniff.py b/spse_excercises/exc_mod3_p6_wifi_ssid_sniff.py
new file mode 100644
index 0000000..e9e8f07
--- /dev/null
+++ b/spse_excercises/exc_mod3_p6_wifi_ssid_sniff.py
@@ -0,0 +1,17 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Note: general solution posted multiple times.
+
+from scapy.all import sniff, Dot11
+
+aps = []
+
+def PacketHandler(pkt):
+ if pkt.haslayer(Dot11):
+ if pkt.type == 0 and pkt.subtype == 8:
+ if pkt.addr2 not in aps :
+ aps.append(pkt.addr2)
+ print "SSID found: %s " %(pkt.info)
+
+sniff(iface="wlan0mon", prn=PacketHandler)
diff --git a/spse_excercises/exc_mod3_p8_scapy_fuzz.py b/spse_excercises/exc_mod3_p8_scapy_fuzz.py
new file mode 100644
index 0000000..a105a0d
--- /dev/null
+++ b/spse_excercises/exc_mod3_p8_scapy_fuzz.py
@@ -0,0 +1,13 @@
+#!/usr/bin/python
+#-*- coding: utf-8 -*-
+
+# Docs for interactive use
+# https://scapy.readthedocs.io/en/latest/usage.html?highlight=fuzz#fuzzing
+# Interactive example: >>> send(IP(dst="127.0.0.1")/fuzz(UDP()/NTP(version=4)),loop=1)
+
+from scapy.all import sr1,IP,fuzz,UDP,NTP
+
+target="127.0.0.1"
+target="192.168.49.39"
+while True:
+ sr1(IP(dst=target)/fuzz(UDP()/NTP(version=4)),inter=4,timeout=1)
diff --git a/spse_excercises/exc_mod3_raw_packet_as_user.txt b/spse_excercises/exc_mod3_raw_packet_as_user.txt
new file mode 100644
index 0000000..af88331
--- /dev/null
+++ b/spse_excercises/exc_mod3_raw_packet_as_user.txt
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+
+# cp /usr/bin/python2.7 python
+# setcap cap_net_raw+ep python
+# chmod +x python
+# ./python 29_raw_socket_struct_binascii.py
+
+# or a restricted sudo privilege which is probably better, not allowing priv esc :)
diff --git a/spse_excercises/exc_mod4_p7_multithread_web_spider_mysql.py b/spse_excercises/exc_mod4_p7_multithread_web_spider_mysql.py
new file mode 100644
index 0000000..271894c
--- /dev/null
+++ b/spse_excercises/exc_mod4_p7_multithread_web_spider_mysql.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# A project for parsing a website, then inserting the data into mysql and serving the mysql data through socket http server.
+# All is managed through multithreading.
+#
+# If the mysql table does not exist, this script will create it, but the database and user should be created in advance!
+
+import multiprocessing
+import MySQLdb as mdb
+from lxml import html
+import requests
+import time
+from time import gmtime, strftime
+import SocketServer
+import SimpleHTTPServer
+
+# Proxy
+proxies = {
+ 'http': "socks5://127.0.0.1:9050",
+ 'https': "socks5://127.0.0.1:9050"}
+
+# Spidering
+def spidering_l(proc):
+ while True:
+ # Sprudeling
+ # Get the page
+ page = requests.get('https://riskdiscovery.com/'), proxies=proxies)
+ tree = html.fromstring(page.content)
+ # Vars
+ state_time = strftime("%Y%m%d%H%M%S", gmtime())
+ spider_list_link = []
+ spider_list_text = []
+ spider_state = str(state_time)
+ # Loop around links //a
+ for link in tree.xpath('//a'):
+ spider_list_link.append(link.get('href'))
+ spider_list_text.append(link.text)
+ # MySQL
+ con = ""
+ try:
+ con = mdb.connect('localhost', 'someuser', 'somepass', 'news_parse')
+ cur = con.cursor()
+ zp = zip(spider_list_link,spider_list_text)
+ for zipo in zp:
+ link_c = zipo[0]
+ text_c = zipo[1]
+ cur.execute("CREATE TABLE IF NOT EXISTS news_data ( id INT(6) UNSIGNED AUTO_INCREMENT PRIMARY KEY, state_time VARCHAR(30) NOT NULL, link VARCHAR(512) NOT NULL, text VARCHAR(1024))")
+ con.commit()
+ cur.execute("INSERT INTO news_data (id, state_time, link, text) VALUES (NULL, %s, %s, %s)",(state_time,link_c,text_c))
+ con.commit()
+ except mdb.Error, e:
+ print e
+ finally:
+ if con:
+ con.close()
+ time.sleep(60)
+ return
+
+
+# Serve HTTP
+class HttpRequestHandler (SimpleHTTPServer.SimpleHTTPRequestHandler) :
+ def do_GET(self) :
+ last_list=""
+ if self.path == "/":
+ try:
+ con = mdb.connect('localhost', 'someuser', 'somepass', 'news_parse')
+ cur = con.cursor()
+ cur.execute("SELECT * FROM news_data")
+ data = cur.fetchall()
+
+ # Unique identifiers (that's why it's a set)
+ sset=set()
+ for x in data:
+ sset.add(x[2])
+ for xx in sset:
+ last_list = last_list + xx + '\n'
+
+# Note, playing with sets
+# final_out = []
+# for xx in sset:
+# curr_st = xx
+# for x in data:
+# if curr_st == x[1]:
+# final_out.(x[2][3])
+
+
+ except mdb.Error, e:
+ print e
+ finally:
+ if con:
+ con.close()
+ self.wfile.write('The latest news from the past 72 hours: \n' + str(last_list))
+
+ else:
+ SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
+
+def serve_http(proc):
+ SocketServer.TCPServer.allow_reuse_address = True
+ httpServer = SocketServer.TCPServer(("127.0.0.1", 8080), HttpRequestHandler)
+ print "It should listen on 8080."
+ httpServer.serve_forever()
+
+ pass
+
+
+if __name__ == '__main__':
+
+ jobs = []
+
+ # Start spidering
+ mp = multiprocessing.Process(target=spidering_l, args=(0,))
+ jobs.append(mp)
+ mp.start()
+
+ # Serve the results on HTTP
+ hp = multiprocessing.Process(target=serve_http, args=(0,))
+ jobs.append(hp)
+ hp.start()