aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCody Hiar <cody@hiar.ca>2019-10-16 10:36:14 -0600
committerCody Hiar <cody@hiar.ca>2019-10-16 10:36:14 -0600
commit81f10379aa6de8da03ea9553d54252435a48dbea (patch)
treec023c7f11d846bd6ab87c5946764dcf13f135e6c
Initial commit
-rw-r--r--.flake82
-rw-r--r--.gitignore2
-rw-r--r--constants.py27
-rw-r--r--examples/example1/library1.py1
-rw-r--r--examples/example1/library2.py2
-rw-r--r--examples/example1/main.py4
-rw-r--r--examples/example2/library1.py12
-rw-r--r--examples/example2/main.py13
-rw-r--r--examples/example3/README.md26
-rw-r--r--examples/example3/foo.py3
-rw-r--r--original.py97
-rw-r--r--refactor.py133
-rw-r--r--slides.md306
13 files changed, 628 insertions, 0 deletions
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..2bcd70e
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,2 @@
+[flake8]
+max-line-length = 88
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..0c4323f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+.mypy_cache
+__pycache__
diff --git a/constants.py b/constants.py
new file mode 100644
index 0000000..4489d98
--- /dev/null
+++ b/constants.py
@@ -0,0 +1,27 @@
+"""Constants for refactored script."""
+PASSLIST = (
+ "password=",
+ "Password=",
+ "pass=",
+ "Pass=",
+ "pwd=",
+ "PWD=",
+ "secret=",
+)
+USERLIST = (
+ "user=",
+ "User=",
+ "username=",
+ "Username=",
+ "usr=",
+ "login=",
+ "Login=",
+ "name=",
+ "Name=",
+ "email=",
+ "Email=",
+ "auth",
+ "Auth",
+ "log=",
+ "Log=",
+)
diff --git a/examples/example1/library1.py b/examples/example1/library1.py
new file mode 100644
index 0000000..dfcc0c1
--- /dev/null
+++ b/examples/example1/library1.py
@@ -0,0 +1 @@
+print('Libary 1 Loading')
diff --git a/examples/example1/library2.py b/examples/example1/library2.py
new file mode 100644
index 0000000..fb7cc63
--- /dev/null
+++ b/examples/example1/library2.py
@@ -0,0 +1,2 @@
+if __name__ == "__main__":
+ print("Library 2 loading")
diff --git a/examples/example1/main.py b/examples/example1/main.py
new file mode 100644
index 0000000..2ef6802
--- /dev/null
+++ b/examples/example1/main.py
@@ -0,0 +1,4 @@
+import library1
+import library2
+
+print("main complete")
diff --git a/examples/example2/library1.py b/examples/example2/library1.py
new file mode 100644
index 0000000..a47a6da
--- /dev/null
+++ b/examples/example2/library1.py
@@ -0,0 +1,12 @@
+import logging
+import sys
+
+logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
+logger = logging.getLogger(__name__)
+
+
+def foo():
+ logger.debug("Library1")
+
+if __name__ == "__main__":
+ foo()
diff --git a/examples/example2/main.py b/examples/example2/main.py
new file mode 100644
index 0000000..12b8343
--- /dev/null
+++ b/examples/example2/main.py
@@ -0,0 +1,13 @@
+import logging
+import sys
+
+import library1
+
+logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
+logger = logging.getLogger(__name__)
+
+# Uncomment below to mute Library1's debug
+# logging.getLogger("library1").setLevel(logging.INFO)
+library1.foo()
+
+logger.debug("Main File")
diff --git a/examples/example3/README.md b/examples/example3/README.md
new file mode 100644
index 0000000..d7363e5
--- /dev/null
+++ b/examples/example3/README.md
@@ -0,0 +1,26 @@
+Launch ipython
+
+```
+ipython
+```
+
+Turn on autoreloading
+
+```
+%load_ext autoreload
+%autoreload 2
+```
+
+Import the bar command from foo
+
+```
+from foo import bar
+```
+
+Run bar
+
+```
+bar()
+```
+
+Make changes what bar prints, save and run again
diff --git a/examples/example3/foo.py b/examples/example3/foo.py
new file mode 100644
index 0000000..0768844
--- /dev/null
+++ b/examples/example3/foo.py
@@ -0,0 +1,3 @@
+
+def bar():
+ print("bar")
diff --git a/original.py b/original.py
new file mode 100644
index 0000000..17c4c27
--- /dev/null
+++ b/original.py
@@ -0,0 +1,97 @@
+#pylint: disable=C0103, C0301, C0325
+import json
+import urllib
+import logging
+import requests #http request library, api access
+from requests.exceptions import ConnectionError #has to be explicit, not auto-included?
+from OpenSSL import crypto #If you're going to engineer it, overengineer it
+import base64
+from scapy.all import *
+try:
+ import scapy_http.http
+except ImportError:
+ from scapy.layers import http #complicated because this layer was originally a third-party addin, but now included
+#Change the following line to change log verbosity
+# ** DO NOT deploy with logging.DEBUG or passwords will be logged to the configured destination! This is a bad thing!
+logging.basicConfig(filename='sniffer.log', filemode='w', level=logging.INFO)
+post_headers = {'Content-Type': 'application/json', 'Connection' : 'close'}
+passlist = {
+ "password=", "Password=", "pass=", "Pass=", "pwd=", "PWD=", "secret="
+ } #add more keys here to scrape out of POSTS
+userlist = {
+ "user=", "User=", "username=", "Username=", "usr=", "login=",
+ "Login=", "name=", "Name=", "email=", "Email=", "auth", "Auth",
+ "log=", "Log="
+ } #add more keys here to scrape out of POSTS
+
+APIURI = "https://lol.nope/redacted"
+infra_client_ID = "ALSO-REDACT" #ID provided by infra apps
+key_file = open("private.pem", "r") #don't send this file to anyone mkay
+shakey = key_file.read()
+
+key_file.close()
+if shakey.startswith('-----BEGIN '):
+ pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, shakey)
+else:
+ pkey = crypto.load_pkcs12(shakey).get_privatekey()
+
+def packet_handler(pkt): #Called as a handler for scapy below
+ try:
+ if pkt[2][1].Method == "POST":
+ #logging.debug(pkt[2][1].Host + pkt[2][1].Path)
+ #(getattr(pkt[2][1], "Content-Type")) How you have to access this parm because - are not valid in var names?!?
+ if getattr(pkt[2][1], "Content-Type") == "application/x-www-form-urlencoded":
+ #print bytes(pkt.payload)
+ raw_form = bytes(pkt[2][2])
+ splitified = raw_form.split("\r\n\r\n") #split on double crlf as per http spec, this will be content
+ for passw in passlist:
+ if passw in splitified[1]:
+ logging.debug(splitified)
+ for userstr in userlist:
+ if userstr in splitified[1]:
+ user_rip = urllib.unquote(splitified[1].split(userstr)[1].split("&")[0]).encode('utf-8').strip()
+ try:
+ print(user_rip)
+ except UnboundLocalError: #username not found in blob; bail
+ logging.debug("username bail")
+ break
+ pass_rip = urllib.unquote(splitified[1].split(passw)[1].split("&")[0]).encode('utf-8').strip()
+
+ try:
+ print(pass_rip)
+ except UnboundLocalError: #password not found in blob; bail
+ logging.debug("password bail")
+ break
+
+ json_data = \
+ {
+ 'username': user_rip,
+ 'password': pass_rip,
+ 'destination_ip': pkt[IP].dst,
+ 'destination_port': pkt[TCP].dport,
+ 'service': "http",
+ 'url': pkt[2][1].Host + pkt[2][1].Path
+ }
+ json_parsed_results = json.dumps(json_data)
+ logging.debug(json_parsed_results)
+ #sign the json
+ signature = "SHA256 Credential=" + infra_client_ID + ",Signature=" + \
+ base64.b64encode(crypto.sign(pkey, json_parsed_results, "sha256"))
+ logging.debug(signature)
+ post_headers['Authorization'] = signature
+ logging.info("Password sniffed; Source: " + pkt[IP].src + \
+ ", u: " + json_data['username'] + \
+ ", dest: " + json_data['destination_ip'] + \
+ ", url: " + json_data['url'])
+ res = requests.post(APIURI, data=json_parsed_results, headers=post_headers)
+ logging.info("API response was: " + str(res))
+ except IndexError:
+ pass #for debug, remove or handle me
+ except AttributeError:
+ pass #for debug, remove or handle me
+ except ConnectionError:
+ logging.error("Failed to connect to infra apps endpoint; Continuing, but previous request has been discarded!")
+#scapy filters uses BPF filters on an intel nic which is compiled to bytecode on the nic and as a result is very fast but behaviour on other platforms may be "undefined"
+results = sniff(iface="enp10s0", prn=packet_handler, filter="tcp port 80 and (src net xxx.xxx.xxx.xxx mask xxx.xxx.0.0 or src net xxx.xxx.0.0 mask xxx.xxx.0.0)", store=0)
+
+print(results) #just to see how many packets are dropping
diff --git a/refactor.py b/refactor.py
new file mode 100644
index 0000000..46566a6
--- /dev/null
+++ b/refactor.py
@@ -0,0 +1,133 @@
+# pylint: disable=C0103, C0301, C0325
+import base64
+import itertools
+import json
+import logging
+import urllib
+
+import requests
+from OpenSSL import crypto
+from requests.exceptions import ConnectionError
+
+from scapy.all import * # NOQA: F403
+
+try:
+ import scapy_http.http # NOQA: F401
+except ImportError:
+ # complicated because this layer was originally a third-party addin, but now
+ # included.
+ from scapy.layers import http # NOQA: F401
+
+import constants
+
+"""
+DO NOT deploy with logging.DEBUG or passwords will be logged to the configured
+destination! This is a bad thing! Change the following line to change log verbosity
+"""
+logging.basicConfig(filename="sniffer.log", filemode="w", level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+APIURI = "https://lol.nope/redacted"
+INFRA_CLIENT_ID = "ALSO-REDACT"
+
+
+def load_private_key():
+ """Load the private key."""
+ key_file = open("private.pem", "r")
+ shakey = key_file.read()
+ key_file.close()
+ if shakey.startswith("-----BEGIN "):
+ return crypto.load_privatekey(crypto.FILETYPE_PEM, shakey)
+ else:
+ return crypto.load_pkcs12(shakey).get_privatekey()
+
+
+def _parse_password_and_user(passw, userstr, splitified_str):
+ """Attempt to parse password and user from string."""
+ user_rip = None
+ pass_rip = None
+ if passw in splitified_str:
+ pass_rip = (
+ urllib.unquote(splitified_str.split(passw)[1].split("&")[0])
+ .encode("utf-8")
+ .strip()
+ )
+ if user_rip in splitified_str:
+ user_rip = (
+ urllib.unquote(splitified_str.split(userstr)[1].split("&")[0])
+ .encode("utf-8")
+ .strip()
+ )
+ return pass_rip, user_rip
+
+
+def _packet_handler(pkt):
+ """Handle the packet."""
+ if pkt[2][1].Method != "POST":
+ return
+ if getattr(pkt[2][1], "Content-Type") != "application/x-www-form-urlencoded":
+ return
+ raw_form = bytes(pkt[2][2])
+ pkey = load_private_key()
+ splitified = raw_form.split("\r\n\r\n")
+ password_and_users = itertools.product(constants.PASSLIST, constants.USERLIST)
+ for passw, userstr in password_and_users:
+ pass_rip, user_rip = _parse_password_and_user(passw, userstr, splitified[1])
+ if pass_rip and user_rip:
+ src_ip = pkt[IP].src # NOQA: F405
+ dest_ip = pkt[IP].dst # NOQA: F405
+ url = (pkt[2][1].Host + pkt[2][1].Path,)
+ json_data = {
+ "username": user_rip,
+ "password": pass_rip,
+ "destination_ip": dest_ip,
+ "destination_port": pkt[TCP].dport, # NOQA: F405
+ "service": "http",
+ "url": url,
+ }
+ json_parsed_results = json.dumps(json_data)
+ signature_base64 = base64.b64encode(
+ crypto.sign(pkey, json_parsed_results, "sha256")
+ )
+ signature = (
+ f"SHA256 Credential={INFRA_CLIENT_ID},Signature={signature_base64}"
+ )
+ post_headers = {
+ "Content-Type": "application/json",
+ "Connection": "close",
+ "Authorization": signature,
+ }
+ msg = f"Password sniffed; Source: {src_ip}"
+ msg += f"u: {user_rip}, dest: {dest_ip}, url {url}"
+ logger.info(msg)
+ res = requests.post(APIURI, data=json_parsed_results, headers=post_headers)
+ logger.info("API response was: " + str(res))
+
+
+def packet_handler(pkt):
+ """Process the packet."""
+ try:
+ _packet_handler(pkt)
+ except (IndexError, AttributeError) as e:
+ logger.error(e)
+ except ConnectionError:
+ msg = "Failed to connect to infra apps endpoint; Continuing, but previous request has been discarded!" # NOQA: E501
+ logger.error(msg)
+
+
+def main():
+ """Execute main function of file.
+
+ scapy filters uses BPF filters on an intel nic which is compiled to bytecode
+ on the nic and as a result is very fast but behaviour on other platforms may
+ be "undefined"
+ """
+ packet_filter = "tcp port 80 and (src net xxx.xxx.xxx.xxx mask xxx.xxx.0.0 or src net xxx.xxx.0.0 mask xxx.xxx.0.0)" # NOQA: E501
+ results = sniff( # NOQA: F405
+ iface="enp10s0", prn=packet_handler, filter=packet_filter, store=0
+ )
+ print(results) # just to see how many packets are dropping
+
+
+if __name__ == "__main__":
+ main()
diff --git a/slides.md b/slides.md
new file mode 100644
index 0000000..f01762a
--- /dev/null
+++ b/slides.md
@@ -0,0 +1,306 @@
+%title: Python Script Refactor
+%author: Cody Hiar
+%date: 2019-10-15
+
+-> Python Script Refactor <-
+============================
+
+-------------------------------------------------
+
+# About Me
+
+* Graduated Computer Engineering at U of A
+* Now working remotely @ Blendable
+* Vim/Tmux Diehard, also cli in general
+* Interests: Python, Automation, DevOps, Linux
+
+# Where I be
+
+* www.codyhiar.com
+* www.github.com/thornycrackers
+
+# Past Presentations (available on GitHub)
+
+* Docker for Homo Troglodytes (YEGSEC)
+* Scraping with scrapy (YEGSEC)
+* Python Daemons (Edmonton.py)
+* Setting Django up on a VPS (Edmonton.py)
+
+-------------------------------------------------
+
+-> My Goals <-
+==============
+
+* Give a quick rundown on tools
+* Show some "cool" tricks
+* Give a couple tips modularizing code to move past the single script.
+
+-------------------------------------------------
+
+-> Stop Calling it Bad Code <-
+==============================
+
+-> https://blog.pragmaticengineer.com/bad-code/ <-
+
+-> Props to the brave soul who supplied the code <-
+
+-------------------------------------------------
+
+-> Automatic Formatting <-
+==========================
+
+```
+pip3 install --user isort
+pip3 install --user black
+isort myscript.py
+black myscript.py
+```
+
+Automatic linters in ANY language are always a great step towards learning how
+to writing clean code. They also require little to no investment on your behalf.
+Almost all modern language will have some sort of tool available.
+
+-------------------------------------------------
+
+-> Manual Formatting <-
+=======================
+
+```
+pip3 install --user flake8
+pip3 install --user flake8-bugbear
+pip3 install --user flake8-docstrings
+pip3 install --user flake8-isort
+pip3 install --user pep8-naming
+pip3 install --user pydocstyle
+```
+
+Flake8 existed before black and has LOTS of plugins to finely tune the rules
+to your liking. Flake8 requires you to make manual changes but will help you in
+making your scripts consistent during your refactors. `NOTE:` flake8 defaults to
+80 line length where as black allows up to 88. To make sure they play nice together
+you can create a `.flake8` file and update `max-line-length`. This repo has an
+example.
+
+-------------------------------------------------
+
+-> Global Constants use Capitals and Tuples instead of list/set <-
+==================================================================
+
+```
+APIURI = "https://lol.nope/redacted"
+infra_client_ID = "ALSO-REDACT"
+userlist = { ... }
+```
+
+vs
+
+```
+APIURI = "https://lol.nope/redacted"
+INFRA_CLIENT_ID = "ALSO-REDACT"
+USERLIST = ( ... )
+```
+
+Capital letters are a typical pattern in python to signify global static vars
+(e.g `crypto.FILETYPE_PEM`). As your script grows other files can reference
+global constants instead of magic numbers (crypto.FILETYPE_PEM == 1). Tuples
+instead of lists means that code cannot modify your values. I like to put tuples
+into a separate file called `constants.py` so the top of my file is cleaner
+
+-------------------------------------------------
+
+-> Loading key file into it's own function <-
+=============================================
+
+Instead of the file being loaded into the global namespace it can be loaded
+when it is needed. Other scripts can also reuse this function if they need to
+access that file. This will build onto a larger idea of writing for modularity.
+
+-------------------------------------------------
+
+-> Long Strings into Variables Before Function Calls <-
+=======================================================
+
+```
+results = sniff(iface="enp10s0", prn=packet_handler, filter="...", store=0)
+```
+
+vs
+
+```
+filter="..."
+results = sniff(iface="enp10s0", prn=packet_handler, filter=filter, store=0)
+```
+
+In the first example the end of the function call is pushed off the screen
+whereas the second one we can see the entire call and most of the filter
+variable. Other devs will appreciate not having to scroll just to read the
+function being called.
+
+-------------------------------------------------
+
+-> Consider "not" logic to save on indents <-
+=============================================
+
+```
+if pkt[2][1].Method == "POST":
+ if getattr(pkt[2][1], "Content-Type") == "application/x-www-form-urlencoded":
+ {code block}
+```
+
+vs
+
+
+```
+if pkt[2][1].Method != "POST":
+ return
+if getattr(pkt[2][1], "Content-Type") != "application/x-www-form-urlencoded":
+ return
+{code block}
+```
+
+If you have deeply nested code it can sometimes be more valuable to test for
+the opposite of what you are looking for to save on deep indentations which
+gives your code more room to breathe.
+
+-------------------------------------------------
+
+-> Use if/else vs except UnboundLocalError <-
+=============================================
+
+```
+try:
+ print(pass_rip)
+except UnboundLocalError: #password not found in blob; bail
+ logging.debug("password bail")
+ break
+```
+
+vs
+
+```
+if not pass_rip:
+ logging.debug("password bail")
+ break
+print(pass_rip)
+```
+
+We can use if statements to see if a variable exists instead of try/catch
+
+-------------------------------------------------
+
+-> itertools.product vs Double for loop <-
+==========================================
+
+```
+for passw in passlist:
+ if passw in splitified[1]:
+ for userstr in userlist:
+ if userstr in splitified[1]:
+ {code block}
+```
+
+vs
+
+```
+password_and_users = itertools.product(passlist, userlist)
+for passw, userstr in password_and_users:
+ if passw in splitified[1] and userstr in splitified[1]:
+ {code block}
+```
+
+`itertools.product` will compute the Cartesian product between to iterables so
+we can avoid having nested for loops and checks.
+
+-------------------------------------------------
+
+-> Handling Multiple Exceptions <-
+==================================
+
+```
+except IndexError:
+ pass #for debug, remove or handle me
+except AttributeError:
+ pass #for debug, remove or handle me
+except ConnectionError:
+ logging.error("...")
+```
+
+vs
+
+```
+except (IndexError, AttributeError):
+ pass
+except ConnectionError:
+ logging.error("...")
+```
+
+If we have multiple exceptions we can put them all on the same line.
+
+-------------------------------------------------
+
+-> Using __name__ == '__main__' <-
+==================================
+
+```
+mystr = "Hello"
+print(mystr)
+```
+
+vs
+
+```
+def main():
+ mystr = "Hello"
+ print(mystr)
+
+if __name__ == "__main__":
+ main()
+```
+
+^
+
+Using this little trick our `main` function is only file is called directly
+from the command line but if another python script imports our file that code
+will not be ran.
+
+-------------------------------------------------
+
+-> logger = logging.getLogger(__name__) <-
+==========================================
+
+```
+logging.basicConfig(...)
+logging.info(...)
+```
+
+vs
+
+```
+logging.basicConfig(...)
+logger = logging.getLogger(__name__) <-
+logger.info(...)
+```
+
+^
+
+In this example we are letting the runtime name the logger for our file. This
+is helpful to prevent namespace collisions but also makes the logger in each
+file unique so that we can fine tune each logger to our specific needs. E.g:
+mute noisey files, set the log level higher or lower, send to different
+sources, adjust formatting.
+
+-------------------------------------------------
+
+-> Using a REPL for developing code <-
+======================================
+
+This is by far the best way to learn to getting hands on experience with code
+that will be help you decide how to organize code. It will help expose weak
+points and also aid in developing more modular code. If you do not want to use
+a repl you can also just write a small script that imports and uses your code.
+Some good REPLs: bpython, ipython, ptpython
+
+-------------------------------------------------
+
+-> Questions, comments, concerns? <-
+====================================