[4] | 1 | #-*- mode: Python;-*- |
---|
| 2 | |
---|
| 3 | import sys |
---|
| 4 | import os |
---|
| 5 | import uuid |
---|
[10] | 6 | import random |
---|
[4] | 7 | import threading |
---|
| 8 | import sqlite3 |
---|
| 9 | |
---|
[8] | 10 | import numpy |
---|
[10] | 11 | # Don't trust numpy's seeding |
---|
| 12 | numpy.random.seed(random.SystemRandom().randint(0,2**32-1)) |
---|
[8] | 13 | |
---|
[4] | 14 | def _newid(): |
---|
| 15 | return uuid.uuid4().hex |
---|
| 16 | |
---|
| 17 | |
---|
| 18 | class db(threading.local): |
---|
| 19 | conn = None |
---|
| 20 | cursor = None |
---|
[7] | 21 | _population_sizes = None |
---|
[8] | 22 | _population_cache = None |
---|
[10] | 23 | _offset_cache = None |
---|
| 24 | _cur_offsets = None |
---|
[7] | 25 | |
---|
[4] | 26 | def __init__(self, path): |
---|
| 27 | exists = os.path.exists(path) |
---|
| 28 | self.conn = sqlite3.connect(path) |
---|
| 29 | self.conn.execute("PRAGMA foreign_keys = ON;") |
---|
| 30 | self.conn.row_factory = sqlite3.Row |
---|
[7] | 31 | self._population_sizes = {} |
---|
[8] | 32 | self._population_cache = {} |
---|
[10] | 33 | self._offset_cache = {} |
---|
| 34 | self._cur_offsets = {} |
---|
[4] | 35 | |
---|
| 36 | if not exists: |
---|
| 37 | self.conn.execute( |
---|
| 38 | """CREATE TABLE meta (id BLOB PRIMARY KEY, |
---|
| 39 | tcpts_mean REAL, |
---|
| 40 | tcpts_stddev REAL, |
---|
| 41 | tcpts_slopes TEXT) |
---|
| 42 | """) |
---|
| 43 | |
---|
| 44 | self.conn.execute( |
---|
| 45 | """CREATE TABLE probes (id BLOB PRIMARY KEY, |
---|
| 46 | sample INTEGER, |
---|
| 47 | test_case TEXT, |
---|
| 48 | type TEXT, |
---|
| 49 | tc_order INTEGER, |
---|
| 50 | time_of_day INTEGER, |
---|
| 51 | local_port INTEGER, |
---|
| 52 | reported INTEGER, |
---|
| 53 | userspace_rtt INTEGER, |
---|
| 54 | UNIQUE (sample, test_case)) |
---|
| 55 | """) |
---|
| 56 | |
---|
| 57 | self.conn.execute( |
---|
| 58 | """CREATE TABLE packets (id BLOB PRIMARY KEY, |
---|
| 59 | probe_id REFERENCES probes(id) ON DELETE CASCADE, |
---|
| 60 | sent INTEGER, |
---|
| 61 | observed INTEGER, |
---|
| 62 | tsval INTEGER, |
---|
| 63 | payload_len INTEGER, |
---|
| 64 | tcpseq INTEGER, |
---|
| 65 | tcpack INTEGER) |
---|
| 66 | """) |
---|
| 67 | |
---|
| 68 | self.conn.execute( |
---|
| 69 | """CREATE TABLE analysis (id BLOB PRIMARY KEY, |
---|
| 70 | probe_id UNIQUE REFERENCES probes(id) ON DELETE CASCADE, |
---|
| 71 | suspect TEXT, |
---|
| 72 | packet_rtt INTEGER, |
---|
| 73 | tsval_rtt INTEGER) |
---|
| 74 | """) |
---|
| 75 | |
---|
| 76 | self.conn.execute( |
---|
| 77 | """CREATE TABLE trim_analysis (id BLOB PRIMARY KEY, |
---|
| 78 | probe_id REFERENCES probes(id) ON DELETE CASCADE, |
---|
| 79 | suspect TEXT, |
---|
| 80 | packet_rtt INTEGER, |
---|
| 81 | tsval_rtt INTEGER, |
---|
| 82 | sent_trimmed INTEGER, |
---|
| 83 | rcvd_trimmed INTEGER) |
---|
| 84 | """) |
---|
| 85 | |
---|
| 86 | self.conn.execute( |
---|
| 87 | """CREATE TABLE classifier_results (id BLOB PRIMARY KEY, |
---|
[10] | 88 | classifier TEXT, |
---|
| 89 | trial_type TEXT, |
---|
| 90 | num_observations INTEGER, |
---|
| 91 | num_trials INTEGER, |
---|
[4] | 92 | params TEXT, |
---|
| 93 | false_positives REAL, |
---|
| 94 | false_negatives REAL) |
---|
| 95 | """) |
---|
| 96 | |
---|
| 97 | def __del__(self): |
---|
| 98 | if self.conn: |
---|
| 99 | self.conn.commit() |
---|
| 100 | self.conn.close() |
---|
| 101 | |
---|
[8] | 102 | |
---|
[7] | 103 | def populationSize(self, probe_type): |
---|
| 104 | if probe_type in self._population_sizes: |
---|
| 105 | return self._population_sizes[probe_type] |
---|
| 106 | |
---|
| 107 | try: |
---|
| 108 | cursor = self.conn.cursor() |
---|
| 109 | cursor.execute("SELECT max(c) FROM (SELECT count(sample) c FROM probes WHERE type=? GROUP BY test_case)", (probe_type,)) |
---|
| 110 | self._population_sizes[probe_type] = cursor.fetchone()[0] |
---|
| 111 | return self._population_sizes[probe_type] |
---|
| 112 | except Exception as e: |
---|
| 113 | print(e) |
---|
| 114 | return 0 |
---|
[8] | 115 | |
---|
| 116 | |
---|
| 117 | def subseries(self, probe_type, unusual_case, size=None, offset=None, field='packet_rtt'): |
---|
[10] | 118 | cache_key = (probe_type,unusual_case,field) |
---|
| 119 | |
---|
| 120 | if cache_key not in self._population_cache: |
---|
[8] | 121 | query=""" |
---|
| 122 | SELECT %(field)s AS unusual_case, |
---|
| 123 | (SELECT avg(%(field)s) FROM probes,analysis |
---|
| 124 | WHERE analysis.probe_id=probes.id AND probes.test_case!=:unusual_case AND probes.type=:probe_type AND sample=u.sample) AS other_cases |
---|
| 125 | FROM (SELECT probes.sample,%(field)s FROM probes,analysis |
---|
| 126 | WHERE analysis.probe_id=probes.id AND probes.test_case =:unusual_case AND probes.type=:probe_type) u |
---|
| 127 | """ % {"field":field} |
---|
| 128 | |
---|
| 129 | params = {"probe_type":probe_type, "unusual_case":unusual_case} |
---|
| 130 | cursor = self.conn.cursor() |
---|
| 131 | cursor.execute(query, params) |
---|
[10] | 132 | p = [dict(row) for row in cursor.fetchall()] |
---|
| 133 | self._population_cache[cache_key] = p |
---|
| 134 | self._offset_cache[cache_key] = tuple(numpy.random.random_integers(0,len(p)-1, len(p)/5)) |
---|
| 135 | self._cur_offsets[cache_key] = 0 |
---|
[8] | 136 | |
---|
[10] | 137 | population = self._population_cache[cache_key] |
---|
[8] | 138 | |
---|
| 139 | if size == None or size > len(population): |
---|
| 140 | size = len(population) |
---|
| 141 | if offset == None or offset >= len(population) or offset < 0: |
---|
[10] | 142 | offset = self._offset_cache[cache_key][self._cur_offsets[cache_key]] |
---|
| 143 | self._cur_offsets[cache_key] = (offset + 1) % len(self._offset_cache[cache_key]) |
---|
| 144 | |
---|
[9] | 145 | try: |
---|
[10] | 146 | offset = int(offset) |
---|
| 147 | size = int(size) |
---|
[9] | 148 | except Exception as e: |
---|
| 149 | print(e, offset, size) |
---|
[10] | 150 | return None |
---|
| 151 | |
---|
| 152 | ret_val = population[offset:offset+size] |
---|
[8] | 153 | if len(ret_val) < size: |
---|
| 154 | ret_val += population[0:size-len(ret_val)] |
---|
[7] | 155 | |
---|
[8] | 156 | return ret_val |
---|
[10] | 157 | |
---|
| 158 | |
---|
| 159 | def resetOffsets(self): |
---|
| 160 | for k in self._cur_offsets.keys(): |
---|
| 161 | self._cur_offsets[k] = 0 |
---|
[8] | 162 | |
---|
[10] | 163 | |
---|
[8] | 164 | def clearCache(self): |
---|
| 165 | self._population_cache = {} |
---|
[10] | 166 | self._offset_cache = {} |
---|
| 167 | self._cur_offsets = {} |
---|
[8] | 168 | |
---|
| 169 | |
---|
[4] | 170 | def _insert(self, table, row): |
---|
| 171 | rid = _newid() |
---|
| 172 | keys = row.keys() |
---|
| 173 | columns = ','.join(keys) |
---|
| 174 | placeholders = ':'+', :'.join(keys) |
---|
| 175 | query = "INSERT INTO %s (id,%s) VALUES ('%s',%s)" % (table, columns, rid, placeholders) |
---|
| 176 | #print(row) |
---|
| 177 | self.conn.execute(query, row) |
---|
| 178 | return rid |
---|
| 179 | |
---|
| 180 | def addMeta(self, meta): |
---|
| 181 | ret_val = self._insert('meta', meta) |
---|
| 182 | self.conn.commit() |
---|
| 183 | return ret_val |
---|
| 184 | |
---|
| 185 | def addProbes(self, p): |
---|
| 186 | return [self._insert('probes', row) for row in p] |
---|
| 187 | |
---|
| 188 | def addPackets(self, pkts, window_size): |
---|
| 189 | query = ("INSERT INTO packets (id,probe_id,sent,observed,tsval,payload_len,tcpseq,tcpack)" |
---|
| 190 | " VALUES(randomblob(16)," |
---|
| 191 | "(SELECT id FROM probes WHERE local_port=:local_port AND :observed>time_of_day" |
---|
| 192 | " AND :observed<time_of_day+userspace_rtt+%d" |
---|
| 193 | " ORDER BY time_of_day ASC LIMIT 1)," |
---|
| 194 | ":sent,:observed,:tsval,:payload_len,:tcpseq,:tcpack)") % window_size |
---|
| 195 | self.conn.execute("PRAGMA foreign_keys = OFF;") |
---|
| 196 | self.conn.execute("CREATE INDEX IF NOT EXISTS probes_port ON probes (local_port)") |
---|
| 197 | cursor = self.conn.cursor() |
---|
| 198 | #print(query, list(pkts)[0:3]) |
---|
| 199 | cursor.executemany(query, pkts) |
---|
| 200 | self.conn.commit() |
---|
| 201 | self.conn.execute("PRAGMA foreign_keys = ON;") |
---|
| 202 | |
---|
| 203 | def addAnalyses(self, analyses): |
---|
| 204 | return [self._insert('analysis', row) for row in analyses] |
---|
| 205 | |
---|
| 206 | def addTrimAnalyses(self, analyses): |
---|
| 207 | return [self._insert('trim_analysis', row) for row in analyses] |
---|
| 208 | |
---|
| 209 | def addClassifierResults(self, results): |
---|
| 210 | ret_val = self._insert('classifier_results', results) |
---|
| 211 | self.conn.commit() |
---|
| 212 | return ret_val |
---|
[10] | 213 | |
---|
| 214 | def fetchClassifierResult(self, classifier, trial_type, num_observations): |
---|
| 215 | query = """ |
---|
| 216 | SELECT * FROM classifier_results |
---|
| 217 | WHERE classifier=? AND trial_type=? AND num_observations=? |
---|
| 218 | ORDER BY false_positives+false_negatives |
---|
| 219 | LIMIT 1; |
---|
| 220 | """ |
---|
| 221 | cursor = self.conn.cursor() |
---|
| 222 | cursor.execute(query, (classifier, trial_type, num_observations)) |
---|
| 223 | ret_val = cursor.fetchone() |
---|
| 224 | |
---|
| 225 | if ret_val != None: |
---|
| 226 | ret_val = dict(ret_val) |
---|
| 227 | return ret_val |
---|
| 228 | |
---|
| 229 | def deleteClassifierResults(self, classifier, trial_type, num_observations=None): |
---|
| 230 | params = {"classifier":classifier,"trial_type":trial_type,"num_observations":num_observations} |
---|
| 231 | query = """ |
---|
| 232 | DELETE FROM classifier_results |
---|
| 233 | WHERE classifier=:classifier AND trial_type=:trial_type |
---|
| 234 | """ |
---|
| 235 | if num_observations != None: |
---|
| 236 | query += " AND num_observations=:num_observations" |
---|
| 237 | |
---|
| 238 | self.conn.execute(query, params) |
---|
| 239 | self.conn.commit() |
---|
| 240 | |
---|