Skip to content

Commit

Permalink
Merge pull request #40 from OxfordHCC/bre-staging
Browse files Browse the repository at this point in the history
Bre staging
  • Loading branch information
mcnutty26 authored Jan 24, 2019
2 parents 5c6cd94 + 58f818a commit a8dee7e
Show file tree
Hide file tree
Showing 33 changed files with 3,246 additions and 2,520 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@ tests/testPredictEcho.py
!tests/testData/AlexaTime1

db/__pycache__/
scripts/__pycache__/
categorisation/__pycache__/
tests/__pycache__/

*node_modules*

scripts/config.cfg
config/config.cfg
service/daemon.sh
service/iotrefine.service
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ A static version of IoT Refine is hosted at: https://dkarandikar.github.io/Stati
## Install
1. Ensure postgres is installed, create a db with name testdb, user is postgres and password is password

2. Install python3 dependencies: `pip3 install psycopg2 scapy pandas sklearn ipdata Pyshark`
2. Install python3 dependencies: `pip3 install psycopg2-binary scapy pandas sklearn ipdata Pyshark`

3. Install angular (for Refine web interface): `cd ui/ && npm install && npm install @angular/[email protected]`

Expand All @@ -35,6 +35,8 @@ A static version of IoT Refine is hosted at: https://dkarandikar.github.io/Stati

3. Enable or disable IoT Refine on boot by running `sudo systemctl {enable|disable} iotrefine`

4. To have chromium point at iotrefine on login, copy and fill out logintask-sample.desktop and move it to ~/.config/autostart/

## Configure Device Names

Device names will initially display as MAC addresses. To assign a 'friendly' name to a device, use the `SetDevice` API endpoint:
Expand Down
6 changes: 2 additions & 4 deletions categorisation/predictions.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,11 +241,9 @@ def predictOther(self, rows, printing=False):
for externalIP in ext.keys():
if ext[externalIP]*1.0 / total*1.0 > percentCutoff:
c_name = DB_MANAGER.execute("SELECT c_name FROM geodata WHERE ip=%s LIMIT 1", (externalIP,), False)
try:
if c_name and len(c_name) > 0:
if ext[externalIP] == total:
return "Exclusively " + c_name[0]
else:
else :
return "Mostly " + c_name[0]
except:
return "Unknown"
return "Unknown"
19 changes: 17 additions & 2 deletions config/config-sample.cfg
Original file line number Diff line number Diff line change
@@ -1,10 +1,25 @@
[api]
;insert the value of port using %(port)s
;port the api will run on
port=4201
;url of the api, insert the value of port using %(port)s
url=http://localhost:%(port)s/api

[macvendors]
key=insert_your_api_key_here
;API key used to retreive device manufacturers from mac address
key=key_goes_here

[loop]
;whether to automatically name new deices with a random fruit and number (e.g. Orange#123)
autogen-device-names=True
;frequency at which the processing loop is run in seconds (float)
interval=1
;whether packets will be grouped into bursts
burstify=False
;whether content prediction will be run on bursts
predict=False

[capture]
;interval to capture packets on
interface=eth0
;frequency at which new packets are commited to the database in seconds (float)
interval=5
10 changes: 0 additions & 10 deletions config/config.cfg

This file was deleted.

36 changes: 34 additions & 2 deletions db/databaseBursts.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,47 @@
Handles all interaction with the database for burstification and categorisation
"""
import psycopg2
import psycopg2.extensions
import select
import threading

class dbManager():

def __init__(self):
def __init__(self, dbname='testdb', username='postgres', password='password'):
try:
self.connection = psycopg2.connect("dbname=testdb user=postgres password=password")
print("connection string ", "dbname=%(dbname)s user=%(username)s password=%(password)s" % {'dbname':dbname,'username':username,'password':password })
self.connection = psycopg2.connect("dbname=%(dbname)s user=%(username)s password=%(password)s" % {'dbname':dbname,'username':username,'password':password })
except:
print("Connection error")


def listen(self, channel, cb=None):
try:
conn = self.connection
conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
curs = conn.cursor()
curs.execute("LISTEN %s ;" % channel)
stop = [False]
def stopme():
stop[0] = True
def subp():
while not stop[0]: # kill me with a sharp stick.
if select.select([conn],[],[],5) == ([],[],[]):
# print("Timeout")
pass
else:
conn.poll()
while not stop[0] and conn.notifies:
notify = conn.notifies.pop(0)
# print("Got NOTIFY:", notify.pid, notify.channel, notify.payload)
if cb is not None:
cb(notify.payload)
thread = threading.Thread(target=subp)
thread.start()
return stopme
except:
print("listen error")
return lambda: None


def execute(self, query, data, all=True):
Expand Down
87 changes: 82 additions & 5 deletions db/schema.sql
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
--categories table matches readable descriptions to bursts of traffic ("this burst is a weather info request")
drop table if exists categories cascade ;
drop table if exists categories cascade;
create table categories (
id SERIAL primary key,
name varchar(40) not null -- e.g. "Alexa-time" or "Alexa-joke"
);

--collates bursts of traffic and optionally assigns them a category
drop table if exists bursts cascade ;
drop table if exists bursts cascade;
create table bursts (
id SERIAL primary key,
category integer references categories --primary key assumed when no column given
);

--store core packet info, and optionally which burst it is part ofi, and which company it represents
drop table if exists packets cascade ;
drop table if exists packets cascade;
create table packets (
id SERIAL primary key,
time timestamp not null,
Expand All @@ -26,11 +26,16 @@ create table packets (
--company integer references companies --optional, assumes table of companies (stolen from refine)
);

-- create two indexes on src and dst to speed up lookups by these cols by loop.py
create index on packets (src);
create index on packets (dst);
create index on packets (time);

drop table if exists devices cascade;
create table devices(
mac varchar(17) primary key,
manufacturer varchar(40),
name varchar(40) DEFAULT 'unknown'
name varchar(255) DEFAULT 'unknown'
);

drop table if exists geodata cascade;
Expand All @@ -43,7 +48,7 @@ create table geodata(
);

--store simplified profiles of devices: Name, time, destination company, traffic
drop table if exists models cascade ;
drop table if exists models cascade;
create table models (
id SERIAL primary key,
device varchar(17) not null, --device mac address
Expand All @@ -52,3 +57,75 @@ create table models (
location varchar(2) not null, --country the company is based in
impact real not null --amount of traffic in mb
);

drop function if exists notify_trigger();
CREATE FUNCTION notify_trigger() RETURNS trigger AS $trigger$
DECLARE
rec RECORD;
payload TEXT;
column_name TEXT;
column_value TEXT;
payload_items JSONB;
BEGIN
-- Set record row depending on operation
CASE TG_OP
WHEN 'INSERT', 'UPDATE' THEN
rec := NEW;
WHEN 'DELETE' THEN
rec := OLD;
ELSE
RAISE EXCEPTION 'Unknown TG_OP: "%". Should not occur!', TG_OP;
END CASE;

-- Get required fields
FOREACH column_name IN ARRAY TG_ARGV LOOP
EXECUTE format('SELECT $1.%I::TEXT', column_name)
INTO column_value
USING rec;
payload_items := coalesce(payload_items,'{}')::jsonb || json_build_object(column_name,column_value)::jsonb;
END LOOP;

-- Build the payload
payload := json_build_object(
'timestamp',CURRENT_TIMESTAMP,
'operation',TG_OP,
'schema',TG_TABLE_SCHEMA,
'table',TG_TABLE_NAME,
'data',payload_items
);

-- Notify the channel
PERFORM pg_notify('db_notifications', payload);

RETURN rec;
END;
$trigger$ LANGUAGE plpgsql;

drop trigger if exists packets_notify on packets;
CREATE TRIGGER packets_notify AFTER INSERT OR UPDATE OR DELETE ON packets
FOR EACH ROW EXECUTE PROCEDURE notify_trigger(
'id',
'mac',
'src',
'dst',
'len',
'burst'
);

drop trigger if exists device_notify on devices;
CREATE TRIGGER device_notify AFTER INSERT OR UPDATE OR DELETE ON devices
FOR EACH ROW EXECUTE PROCEDURE notify_trigger(
'mac',
'manufacturer',
'name'
);

drop trigger if exists geodata_notify on geodata;
CREATE TRIGGER geodata_notify AFTER INSERT OR UPDATE OR DELETE ON geodata
FOR EACH ROW EXECUTE PROCEDURE notify_trigger(
'ip',
'lat',
'lon',
'c_code',
'c_name'
);
12 changes: 12 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
psycopg2-binary
scapy
pandas
sklearn
ipdata
Pyshark
libxml
requests
ipwhois
flask
flask_restful

Loading

0 comments on commit a8dee7e

Please sign in to comment.