JSON_KEY_IP = "ip"
JSON_KEY_IP_SRC = "ip.src"
JSON_KEY_IP_DST = "ip.dst"
+# Checked protocols
JSON_KEY_UDP = "udp"
JSON_KEY_TCP = "tcp"
-JSON_KEY_MDNS = "mdns"
-JSON_KEY_BOOTP = "bootp"
-JSON_KEY_SSDP = "ssdp"
-JSON_KEY_DHCPV6 = "dhcpv6"
-JSON_KEY_LLMNR = "llmnr"
+# List of checked protocols
+listchkprot = [ "bootp",
+ "dhcpv6",
+ "dns",
+ "llmnr",
+ "mdns",
+ "ssdp" ]
def parse_json(file_path):
# First parse the file once, constructing a map that contains information about individual devices' DNS resolutions.
device_dns_mappings = parser.parse_dns.parse_json_dns(file_path) # "./json/eth1.dump.json"
-
+
# Init empty graph
G = nx.DiGraph()
# Parse file again, this time constructing a graph of device<->server and device<->device communication.
# Read JSON.
# data becomes reference to root JSON object (or in our case json array)
data = json.load(jf)
+
# Loop through json objects (packets) in data
for p in data:
# p is a JSON object, not an index
# Drill down to object containing data from the different layers
layers = p[JSON_KEY_SOURCE][JSON_KEY_LAYERS]
- # Skip all MDNS traffic.
- if JSON_KEY_MDNS in layers:
- continue
-
- # Skip all LLMNR traffic.
- if JSON_KEY_LLMNR in layers:
- continue
-
- # Skip all SSDP traffic - we don't care about disovery, only the actual communication.
- if JSON_KEY_SSDP in layers:
- continue
-
- # Skip all bootp traffic (DHCP related)
- if JSON_KEY_BOOTP in layers:
- continue
-
- # Skip DHCPv6 for now.
- if JSON_KEY_DHCPV6 in layers:
- continue
+ iscontinue = False
+ for prot in listchkprot:
+ if prot in layers:
+ iscontinue = True
+ if iscontinue:
+ continue
# Skip any non udp/non tcp traffic
if JSON_KEY_UDP not in layers and JSON_KEY_TCP not in layers:
ip_src = layers[JSON_KEY_IP][JSON_KEY_IP_SRC]
ip_dst = layers[JSON_KEY_IP][JSON_KEY_IP_DST]
- # ipre = re.compile(r'\b192.168.[0-9.]+')
- # src_is_local = ipre.search(ip_src)
- # dst_is_local = ipre.search(ip_dst)
+ # Categorize source and destination IP addresses: local vs. non-local
+ ipre = re.compile(r'\b192.168.[0-9.]+')
+ src_is_local = ipre.search(ip_src)
+ dst_is_local = ipre.search(ip_dst)
print "ip.src =", ip_src, "ip.dst =", ip_dst
- src_is_local = ip_src.startswith("192.168.")
- dst_is_local = ip_dst.startswith("192.168.")
src_node = None
dst_node = None
-
if src_is_local:
G.add_node(eth_src, Name=devlist[eth_src])
src_node = eth_src
else:
- # If the source is not local, then it's inbound traffic, and hence the eth_dst is the MAC of the IoT device.
- hostname = device_dns_mappings[eth_dst].hostname_for_ip_at_time(ip_src, packet_timestamp)
+ hostname = None
+ # Check first if the key (eth_dst) exists in the dictionary
+ if eth_dst in device_dns_mappings:
+ # If the source is not local, then it's inbound traffic, and hence the eth_dst is the MAC of the IoT device.
+ hostname = device_dns_mappings[eth_dst].hostname_for_ip_at_time(ip_src, packet_timestamp)
if hostname is None:
# Use IP if no hostname mapping
hostname = ip_src
G.add_node(eth_dst, Name=devlist[eth_src])
dst_node = eth_dst
else:
- # If the destination is not local, then it's outbound traffic, and hence the eth_src is the MAC of the IoT device.
- hostname = device_dns_mappings[eth_src].hostname_for_ip_at_time(ip_dst, packet_timestamp)
+ hostname = None
+ # Check first if the key (eth_dst) exists in the dictionary
+ if eth_src in device_dns_mappings:
+ # If the destination is not local, then it's outbound traffic, and hence the eth_src is the MAC of the IoT device.
+ hostname = device_dns_mappings[eth_src].hostname_for_ip_at_time(ip_dst, packet_timestamp)
if hostname is None:
# Use IP if no hostname mapping
hostname = ip_dst
dst_node = hostname
G.add_edge(src_node, dst_node)
-# # Traffic can be both outbound and inbound.
-# # Determine which one of the two by looking up device MAC in DNS map.
-# iot_device = None
-# if eth_src in device_dns_mappings:
-# iot_device = eth_src
-# elif eth_dst in device_dns_mappings:
-# iot_device = eth_dst
-# else:
-# # print "[ WARNING: DNS mapping not found for device with MAC", eth_src, "OR", eth_dst, "]"
-# # This must be local communication between two IoT devices OR an IoT device talking to a hardcoded IP.
-# # For now let's assume local communication.
-# # Add a node for each device and an edge between them.
-# G.add_node(eth_src, Name=devlist[eth_src])
-# G.add_node(eth_dst, Name=devlist[eth_src])
-# G.add_edge(eth_src, eth_dst)
-# # TODO add regex check on src+dst IP to figure out if hardcoded server IP (e.g. check if one of the two are NOT a 192.168.x.y IP)
-# continue
-# # It is outbound traffic if iot_device matches src, otherwise it must be inbound traffic.
-# outbound_traffic = iot_device == eth_src
-
-
-
-# ''' Graph construction '''
-# # No need to check if the Nodes and/or Edges we add already exist:
-# # NetworkX won't add already existing nodes/edges (except in the case of a MultiGraph or MultiDiGraph (see NetworkX doc)).
-
-# # Add a node for each host.
-# # First add node for IoT device.
-# G.add_node(iot_device, Name=devlist[eth_src])
-# # Then add node for the server.
-# # For this we need to distinguish between outbound and inbound traffic so that we look up the proper IP in our DNS map.
-# # For outbound traffic, the server's IP is the destination IP.
-# # For inbound traffic, the server's IP is the source IP.
-
-# server_ip = ip_dst if outbound_traffic else ip_src
-# hostname = device_dns_mappings[iot_device].hostname_for_ip_at_time(server_ip, packet_timestamp)
-# if hostname is None:
-# # TODO this can occur when two local devices communicate OR if IoT device has hardcoded server IP.
-# # However, we only get here for the DNS that have not performed any DNS lookups
-# # We should use a regex check early in the loop to see if it is two local devices communicating.
-# # This way we would not have to consider these corner cases later on.
-# # print "[ WARNING: no ip-hostname mapping found for ip", server_ip, " -- adding eth.src->eth.dst edge, but note that this may be incorrect if IoT device has hardcoded server IP ]"
-# G.add_node(eth_src, Name=devlist[eth_src])
-# G.add_node(eth_dst, Name=devlist[eth_src])
-# G.add_edge(eth_src, eth_dst)
-# continue
-# G.add_node(hostname)
-# # Connect the two nodes we just added.
-# if outbound_traffic:
-# G.add_edge(iot_device, hostname)
-# else:
-# G.add_edge(hostname, iot_device)
+ # Print DNS mapping for reference
+ for mac in device_dns_mappings:
+ ddm = device_dns_mappings[mac]
+ ddm.print_mappings()
+
return G
# ------------------------------------------------------
b0:b9:8a:73:69:8f, RouterPort_ETH1
b0:b9:8a:73:69:90, RouterPort_WLAN0
b0:b9:8a:73:69:91, RouterPort_WLAN1
+74:da:38:0d:05:55, RaspberryPi_Controller
+++ /dev/null
-#!/usr/bin/python\r
-\r
-"""\r
-Script used to extract only the needed information from JSON packet traces generated by\r
-tshark from PCAPNG format\r
-"""\r
-\r
-import os, sys\r
-import json\r
-import uuid\r
-\r
-from collections import OrderedDict\r
-\r
-json_key_source = "_source"\r
-json_key_layers = "layers"\r
-\r
-json_key_ip = "ip"\r
-json_key_tcp = "tcp"\r
-\r
-json_key_http = "http"\r
-json_key_method = "method"\r
-json_key_uri = "uri"\r
-json_key_headers = "headers"\r
-json_key_host = "host"\r
-\r
-json_key_http_req = json_key_http + ".request."\r
-json_key_http_req_method = json_key_http_req + json_key_method\r
-json_key_http_req_uri = json_key_http_req + json_key_uri\r
-json_key_http_req_line = json_key_http_req + "line"\r
-\r
-json_key_pkt_comment = "pkt_comment"\r
-\r
-json_key_frame = "frame"\r
-json_key_frame_num = json_key_frame + ".number"\r
-json_key_frame_comment = json_key_frame + ".comment"\r
-json_key_frame_ts = json_key_frame + ".time_epoch"\r
-\r
-\r
-JSON_KEY_ETH = "eth"\r
-JSON_KEY_ETH_SRC = "eth.src"\r
-JSON_KEY_ETH_DST = "eth.dst"\r
-\r
-\r
-def make_unique(key, dct):\r
- counter = 0\r
- unique_key = key\r
-\r
- while unique_key in dct:\r
- counter += 1\r
- unique_key = '{}_{}'.format(key, counter)\r
- return unique_key\r
-\r
-\r
-def parse_object_pairs(pairs):\r
- dct = OrderedDict()\r
- for key, value in pairs:\r
- if key in dct:\r
- key = make_unique(key, dct)\r
- dct[key] = value\r
-\r
- return dct\r
-\r
-def change_file(fpath):\r
- for fn in os.listdir(fpath):\r
- full_path = fpath + '/' + fn\r
-\r
- # Recursively go through all directories\r
- if os.path.isdir(full_path):\r
- change_file(full_path)\r
- continue\r
-\r
- print full_path\r
- with open(full_path, "r+") as jf:\r
- # Since certain json 'keys' appear multiple times in our data, we have to make them\r
- # unique first (we can't use regular json.load() or we lose some data points). From:\r
- # https://stackoverflow.com/questions/29321677/python-json-parser-allow-duplicate-keys\r
- decoder = json.JSONDecoder(object_pairs_hook=parse_object_pairs)\r
- pcap_data = decoder.decode(jf.read())\r
-\r
- # Prepare new data structure for re-formatted JSON storage\r
- data = {}\r
- for packet in pcap_data:\r
- layers = packet[json_key_source][json_key_layers]\r
-\r
- # All captured traffic should have a frame + frame number, but check anyway\r
- frame_num = " Frame: "\r
- if json_key_frame not in layers or json_key_frame_num not in layers[json_key_frame]:\r
- print "WARNING: could not find frame number! Using -1..."\r
- frame_num = frame_num + "-1"\r
- else:\r
- # Save frame number for error-reporting\r
- frame_num = frame_num + layers[json_key_frame][json_key_frame_num]\r
-\r
- # All captured traffic should be IP, but check anyway\r
- if not json_key_ip in layers:\r
- print "WARNING: Non-IP traffic detected!" + frame_num\r
- continue\r
-\r
- # For now, focus on HTTP only\r
- if json_key_tcp not in layers or json_key_http not in layers:\r
- continue\r
-\r
- # Fill our new JSON packet with TCP/IP info\r
- new_packet = {}\r
- new_packet["dst_ip"] = layers[json_key_ip][json_key_ip + ".dst"]\r
- new_packet["dst_port"] = int(layers[json_key_tcp][json_key_tcp + ".dstport"])\r
-\r
- # JV: Also include src so we can see what device initiates the traffic\r
- new_packet["src_ip"] = layers[json_key_ip][json_key_ip + ".src"]\r
- new_packet["src_port"] = int(layers[json_key_tcp][json_key_tcp + ".srcport"])\r
- #JV: Also include eth soure/destination info so that we can map traffic to physical device using MAC\r
- new_packet[JSON_KEY_ETH_SRC] = layers[JSON_KEY_ETH][JSON_KEY_ETH_SRC]\r
- new_packet[JSON_KEY_ETH_DST] = layers[JSON_KEY_ETH][JSON_KEY_ETH_DST]\r
-\r
- # Go through all HTTP fields and extract the ones that are needed\r
- http_data = layers[json_key_http]\r
- for http_key in http_data:\r
- http_value = http_data[http_key]\r
-\r
- if http_key.startswith(json_key_http_req_line):\r
- header_line = http_value.split(":", 1)\r
- if len(header_line) != 2:\r
- print ("WARNING: could not parse header '" + str(header_line) + "'"\r
- + frame_num)\r
- continue\r
-\r
- # Prepare container for HTTP headers\r
- if json_key_headers not in new_packet:\r
- new_packet[json_key_headers] = {}\r
-\r
- # Use lower case for header keys to stay consistent with our other data\r
- header_key = header_line[0].lower()\r
-\r
- # Remove the trailing carriage return\r
- header_val = header_line[1].strip()\r
-\r
- # Save the header key-value pair\r
- new_packet[json_key_headers][header_key] = header_val\r
-\r
- # If this is the host header, we also save it to the main object\r
- if header_key == json_key_host:\r
- new_packet[json_key_host] = header_val\r
-\r
- if json_key_http_req_method in http_value:\r
- new_packet[json_key_method] = http_value[json_key_http_req_method]\r
- if json_key_http_req_uri in http_value:\r
- new_packet[json_key_uri] = http_value[json_key_http_req_uri]\r
-\r
- # End of HTTP parsing\r
-\r
- # Check that we found the minimum needed HTTP headers\r
- if (json_key_uri not in new_packet or json_key_method not in new_packet or\r
- json_key_host not in new_packet):\r
- print "Missing some HTTP Headers!" + frame_num\r
- continue\r
-\r
- # Extract timestamp\r
- if json_key_frame_ts not in layers[json_key_frame]:\r
- print "WARNING: could not find timestamp!" + frame_num\r
- continue\r
-\r
- new_packet["ts"] = layers[json_key_frame][json_key_frame_ts]\r
-\r
- # Create a unique key for each packet to keep consistent with ReCon\r
- # Also good in case packets end up in different files\r
- data[str(uuid.uuid4())] = new_packet\r
-\r
- # Write the new data\r
- #print json.dumps(data, sort_keys=True, indent=4)\r
- jf.seek(0)\r
- jf.write(json.dumps(data, sort_keys=True, indent=4))\r
- jf.truncate()\r
-\r
-if __name__ == '__main__':\r
- # Needed to re-use some JSON keys\r
- change_file(sys.argv[1])
\ No newline at end of file
--- /dev/null
+#!/usr/bin/python\r
+\r
+"""\r
+Script used to extract only the needed information from JSON packet traces generated by\r
+tshark from PCAPNG format\r
+"""\r
+\r
+import os, sys\r
+import json\r
+import uuid\r
+\r
+from collections import OrderedDict\r
+\r
+json_key_source = "_source"\r
+json_key_layers = "layers"\r
+\r
+json_key_ip = "ip"\r
+json_key_tcp = "tcp"\r
+\r
+json_key_http = "http"\r
+json_key_method = "method"\r
+json_key_uri = "uri"\r
+json_key_headers = "headers"\r
+json_key_host = "host"\r
+\r
+json_key_http_req = json_key_http + ".request."\r
+json_key_http_req_method = json_key_http_req + json_key_method\r
+json_key_http_req_uri = json_key_http_req + json_key_uri\r
+json_key_http_req_line = json_key_http_req + "line"\r
+\r
+json_key_pkt_comment = "pkt_comment"\r
+\r
+json_key_frame = "frame"\r
+json_key_frame_num = json_key_frame + ".number"\r
+json_key_frame_comment = json_key_frame + ".comment"\r
+json_key_frame_ts = json_key_frame + ".time_epoch"\r
+\r
+\r
+JSON_KEY_ETH = "eth"\r
+JSON_KEY_ETH_SRC = "eth.src"\r
+JSON_KEY_ETH_DST = "eth.dst"\r
+\r
+\r
+def make_unique(key, dct):\r
+ counter = 0\r
+ unique_key = key\r
+\r
+ while unique_key in dct:\r
+ counter += 1\r
+ unique_key = '{}_{}'.format(key, counter)\r
+ return unique_key\r
+\r
+\r
+def parse_object_pairs(pairs):\r
+ dct = OrderedDict()\r
+ for key, value in pairs:\r
+ if key in dct:\r
+ key = make_unique(key, dct)\r
+ dct[key] = value\r
+\r
+ return dct\r
+\r
+def change_file(fpath):\r
+ for fn in os.listdir(fpath):\r
+ full_path = fpath + '/' + fn\r
+\r
+ # Recursively go through all directories\r
+ if os.path.isdir(full_path):\r
+ change_file(full_path)\r
+ continue\r
+\r
+ print full_path\r
+ with open(full_path, "r+") as jf:\r
+ # Since certain json 'keys' appear multiple times in our data, we have to make them\r
+ # unique first (we can't use regular json.load() or we lose some data points). From:\r
+ # https://stackoverflow.com/questions/29321677/python-json-parser-allow-duplicate-keys\r
+ decoder = json.JSONDecoder(object_pairs_hook=parse_object_pairs)\r
+ pcap_data = decoder.decode(jf.read())\r
+\r
+ # Prepare new data structure for re-formatted JSON storage\r
+ data = {}\r
+ for packet in pcap_data:\r
+ layers = packet[json_key_source][json_key_layers]\r
+\r
+ # All captured traffic should have a frame + frame number, but check anyway\r
+ frame_num = " Frame: "\r
+ if json_key_frame not in layers or json_key_frame_num not in layers[json_key_frame]:\r
+ print "WARNING: could not find frame number! Using -1..."\r
+ frame_num = frame_num + "-1"\r
+ else:\r
+ # Save frame number for error-reporting\r
+ frame_num = frame_num + layers[json_key_frame][json_key_frame_num]\r
+\r
+ # All captured traffic should be IP, but check anyway\r
+ if not json_key_ip in layers:\r
+ print "WARNING: Non-IP traffic detected!" + frame_num\r
+ continue\r
+\r
+ # For now, focus on HTTP only\r
+ if json_key_tcp not in layers or json_key_http not in layers:\r
+ continue\r
+\r
+ # Fill our new JSON packet with TCP/IP info\r
+ new_packet = {}\r
+ new_packet["dst_ip"] = layers[json_key_ip][json_key_ip + ".dst"]\r
+ new_packet["dst_port"] = int(layers[json_key_tcp][json_key_tcp + ".dstport"])\r
+\r
+ # JV: Also include src so we can see what device initiates the traffic\r
+ new_packet["src_ip"] = layers[json_key_ip][json_key_ip + ".src"]\r
+ new_packet["src_port"] = int(layers[json_key_tcp][json_key_tcp + ".srcport"])\r
+ #JV: Also include eth soure/destination info so that we can map traffic to physical device using MAC\r
+ new_packet[JSON_KEY_ETH_SRC] = layers[JSON_KEY_ETH][JSON_KEY_ETH_SRC]\r
+ new_packet[JSON_KEY_ETH_DST] = layers[JSON_KEY_ETH][JSON_KEY_ETH_DST]\r
+\r
+ # Go through all HTTP fields and extract the ones that are needed\r
+ http_data = layers[json_key_http]\r
+ for http_key in http_data:\r
+ http_value = http_data[http_key]\r
+\r
+ if http_key.startswith(json_key_http_req_line):\r
+ header_line = http_value.split(":", 1)\r
+ if len(header_line) != 2:\r
+ print ("WARNING: could not parse header '" + str(header_line) + "'"\r
+ + frame_num)\r
+ continue\r
+\r
+ # Prepare container for HTTP headers\r
+ if json_key_headers not in new_packet:\r
+ new_packet[json_key_headers] = {}\r
+\r
+ # Use lower case for header keys to stay consistent with our other data\r
+ header_key = header_line[0].lower()\r
+\r
+ # Remove the trailing carriage return\r
+ header_val = header_line[1].strip()\r
+\r
+ # Save the header key-value pair\r
+ new_packet[json_key_headers][header_key] = header_val\r
+\r
+ # If this is the host header, we also save it to the main object\r
+ if header_key == json_key_host:\r
+ new_packet[json_key_host] = header_val\r
+\r
+ if json_key_http_req_method in http_value:\r
+ new_packet[json_key_method] = http_value[json_key_http_req_method]\r
+ if json_key_http_req_uri in http_value:\r
+ new_packet[json_key_uri] = http_value[json_key_http_req_uri]\r
+\r
+ # End of HTTP parsing\r
+\r
+ # Check that we found the minimum needed HTTP headers\r
+ if (json_key_uri not in new_packet or json_key_method not in new_packet or\r
+ json_key_host not in new_packet):\r
+ print "Missing some HTTP Headers!" + frame_num\r
+ continue\r
+\r
+ # Extract timestamp\r
+ if json_key_frame_ts not in layers[json_key_frame]:\r
+ print "WARNING: could not find timestamp!" + frame_num\r
+ continue\r
+\r
+ new_packet["ts"] = layers[json_key_frame][json_key_frame_ts]\r
+\r
+ # Create a unique key for each packet to keep consistent with ReCon\r
+ # Also good in case packets end up in different files\r
+ data[str(uuid.uuid4())] = new_packet\r
+\r
+ # Write the new data\r
+ #print json.dumps(data, sort_keys=True, indent=4)\r
+ jf.seek(0)\r
+ jf.write(json.dumps(data, sort_keys=True, indent=4))\r
+ jf.truncate()\r
+\r
+if __name__ == '__main__':\r
+ # Needed to re-use some JSON keys\r
+ change_file(sys.argv[1])
\ No newline at end of file