Commit ebce69dd authored by Edvard Rejthar's avatar Edvard Rejthar

server development

Signed-off-by: Edvard Rejthar's avatarEdvard Rejthar <edvard.rejthar@nic.cz>
parent f48ea5fb
......@@ -10,7 +10,7 @@ apt install software-properties-common
add-apt-repository "deb http://archive.ubuntu.com/ubuntu $(lsb_release -sc) main universe restricted multiverse"
apt update
apt install firefox python3 mariadb-server xvfb
pip3 install xvfbwrapper pymysql peewee flask wtforms pyyaml bs4 pygments pillow requests
pip3 install xvfbwrapper pymysql peewee flask wtforms pyyaml bs4 pygments pillow requests humanize filelock
# current dir
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
......
......@@ -33,7 +33,7 @@ browser.runtime.onMessage.addListener(function (message) {
/**
* traffic listener
*/
let trafficBlacklist = ["http://www.google.com/adsense/", "https://fbstatic-a.akamaihd.net/rsrc.php", "http://clients1.google.com/ocsp", "https://safebrowsing-cache.google.com/safebrowsing/", "https://safebrowsing.google.com/safebrowsing/", "https://tiles.services.mozilla.com/", "http://localhost/redirect/"];
// let trafficBlacklist = ["http://www.google.com/adsense/", "https://fbstatic-a.akamaihd.net/rsrc.php", "http://clients1.google.com/ocsp", "https://safebrowsing-cache.google.com/safebrowsing/", "https://safebrowsing.google.com/safebrowsing/", "https://tiles.services.mozilla.com/", "http://localhost/redirect/"];
browser.webRequest.onBeforeRequest.addListener(
function (details) {
// log resources that we want
......
......@@ -104,7 +104,7 @@ function screenshot() {
} else {
screenshotted = true;
}
console.warn("Starting screenshot countdown...");
console.warn("MDMAUG: Starting screenshot countdown...");
setTimeout(function () {
// total scroll height of tab
let height = Math.max(document.body.scrollHeight, document.body.offsetHeight, document.documentElement.clientHeight, document.documentElement.scrollHeight, document.documentElement.offsetHeight);
......
......@@ -2,32 +2,34 @@
import logging
import os
from flask import Flask, request
#logging.basicConfig(level=logging.DEBUG, format="%(message)s") # init before flask so that logging is shown in the Terminal
from flask import Flask
from xvfbwrapper import Xvfb
from .lib.config import Config
from .lib.controller.api import Api
# import ipdb; ipdb.set_trace()
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
# assure the logging dir
if not os.path.exists(Config.LOG_DIR):
os.makedirs(Config.LOG_DIR)
logger = logging.getLogger("mdmaug")
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
# server setup
Api.reset()
# address = '0.0.0.0'
app = Flask(__name__, static_url_path="/static")
app.secret_key = b'as8!r"afERaa5'
app.config["preferences"] = {
"safebrowsing": True,
"pdns": True,
"geoip": False
}
# app.config["preferences"] = {
# "safebrowsing": True,
# "pdns": True,
# "geoip": False
# }
"""
httpd = HTTPServer((address, Config.APP_PORT), Server)
......@@ -45,8 +47,9 @@ try:
from .lib.controller.server import app as server
app.register_blueprint(server)
app.run(host='0.0.0.0', ssl_context=(Config.DIR + 'cert-mdmaug.pem', Config.DIR + 'key-mdmaug.pem'), threaded=True)
# for _ in range(Config.profileCount):
if __name__ == "__main__":
app.run(host=Config.APP_IP, port=Config.APP_PORT, ssl_context=(Config.DIR + 'cert-mdmaug.pem', Config.DIR + 'key-mdmaug.pem'), threaded=True)
# for _ in range(Config.profile_count):
# threading.Thread(target=httpd.serve_forever).start()
except (KeyboardInterrupt, SystemExit):
display.stop()
#!/usr/bin/env python3
# Browser extension can write to a file thanks to this.
import json
import os
import struct
import sys
# Read a message from stdin and decode it.
profile = os.environ["PROFILE"] if "PROFILE" in os.environ else "unknown-profile"
#log_dir = "/tmp/mdmaug/.cache/mdmaug-scans/_tmp/" # /tmp/ is small (200 MB) and takes precious RAM
cache_dir = os.environ["CACHE_DIR"] if "CACHE_DIR" in os.environ else "/tmp/"
files_encountered = set()
#cache_dir = log_dir # // default dir to store the analysis is log_dir if we fail to identify a better storage point
#with open(log_dir+"cache.dir", "r") as f:
# cache_dir = f.read().strip()
def get_message():
raw_length = sys.stdin.buffer.read(4)
if len(raw_length) == 0:
sys.exit(0)
message_length = struct.unpack('@I', raw_length)[0]
message = sys.stdin.buffer.read(message_length).decode('utf-8')
return json.loads(message)
return json.loads(sys.stdin.buffer.read(message_length).decode('utf-8'))
while True:
message = get_message()
file = cache_dir+message["filename"]
file = cache_dir + message["filename"]
if file in files_encountered:
method = "a"
method = "a"
else:
method = "w"
files_encountered.add(file)
with open(file, method) as f:
f.write(message["text"])
with open("/tmp/ram/zde.txt", "a") as f:
f.write(f"{file} {method}\n")
......@@ -2,15 +2,27 @@ import logging
import os
import threading
from flask import g
from peewee import MySQLDatabase
logger = logging.getLogger("mdmaug")
class Pref:
safebrowsing = True
pdns = True
geoip = False
@classmethod
def val2html(cls, param):
return "1" if getattr(cls, param) in [True, "true", "1"] else "0"
class Config:
profileCount = 21 # number of Firefox profiles. Its name is just a number – 0,1...
profile_count = 2 # number of Firefox profiles. Its name is just a number – 0,1...
browser = 'firefox' # iceweasel, firefox. What browser gets launched.
configFile = '/opt/mdmaug/.cache/mdmaug-scans/_tmp/queue.cache' # RAM disk was too small: '/tmp/mdm/queue.cache'
APP_PORT = 8000
config_file = '/opt/mdmaug/.cache/mdmaug-scans/_tmp/queue.cache' # RAM disk was too small: '/tmp/mdm/queue.cache'
APP_PORT = 5000
APP_IP = "127.0.0.1"
APP_DOMAIN = 'https://217.31.202.41:' + str(APP_PORT) # csirt.csirt.office.nic.cz
LOG_DIR = "/opt/mdmaug/.cache/mdmaug-scans/_tmp/"
CACHE_DIR = "/opt/mdmaug/.cache/mdmaug-scans/"
......@@ -22,10 +34,11 @@ class Config:
MAX_BROWSER_RUN_TIME = 25 # maximum time for a browser to run
MAX_BROWSER_EXPIRATION = 15 # seconds that we wait before killing the browser (waiting for the files to be written)
@staticmethod
def connect():
# XX resim problem peewee.OperationalError: (2006, "MySQL server has gone away (BrokenPipeError(32, 'Broken pipe'))") po 7 hodinach timeoutu
# XX kupodivu pripojeni nemuze byt v dbp DBModel.connect. Prestoze type je pak spravne (MySQLDatabase), nic udelat nejde a pokusy o select konci NoneType.
logging.debug("Connecting to DB.")
logger.debug("Connecting to DB.")
Config.myDB = MySQLDatabase("mdmaug", host='localhost', port=3306, user="mdmaug",
passwd="fidFDSs676") # XX dal jsem pryc: , threadlocals=False
......
......@@ -5,13 +5,14 @@ from urllib.parse import urlparse
from peewee import IntegrityError
from ..domains import Domains
from ..domains import domain2dir
from .scan_controller import ScanController
from ..config import Config
from ..model.dbp import Turris, Whitelist
from ..parser.traffic_log_parser import TrafficLogParser
from ...templates.crawl_view import CrawlView
logger = logging.getLogger("mdmaug")
class Api:
website = "" # http://site.cz
......@@ -19,7 +20,7 @@ class Api:
def run(self, request):
""" Accept command
:type path: dict from URL request. /api/analyze=cache/http://example.com → {"api": True, "analyze": cache, "page": "http://example.com"}
:type request: dict from URL request. /api/analyze=cache/http://example.com → {"api": True, "analyze": cache, "page": "http://example.com"}
"""
crawl = None
......@@ -34,13 +35,13 @@ class Api:
elif "decide" in request: # XX deprecated?
return self.get_undecided()
elif "nicify" in request:
return TrafficLogParser.getStylesheet() + TrafficLogParser.nicifyFile(request["page"])
return TrafficLogParser.getStylesheet() + TrafficLogParser.nicify_file(request["page"])
elif "vote" in request: # /api/vote/block/example.org/10.0.0.1
logging.debug("vote cmd")
logger.debug("vote cmd")
return Turris.vote(request["vote"], request["page"])
elif "scans" in request:
if "url" in request: # /api/scans/url/http://example.com
domain = Domains.domain2dir(request["page"])
if "url" in request: # /api/scans/url/http://example.com
domain = domain2dir(request["page"])
if not domain:
return "Wrong domain"
return ScanController().get_domain_snapdirs(domain, full_dirs=False)
......@@ -50,21 +51,23 @@ class Api:
"""url = path.split("/", 3)
if len(url) > 3:
self._setWebsite(url[2]) # osetrit self.website, ze je URL, a nikoli shell
logging.debug("XXX nejsem si jist, zda url je spravne na url[2]") # XXX
logging.debug(url) # XXX
logger.debug("XXX nejsem si jist, zda url je spravne na url[2]") # XXX
logger.debug(url) # XXX
quit() # XXX
logging.debug(self.website)
logging.debug(self.websiteDomain)
logger.debug(self.website)
logger.debug(self.websiteDomain)
return self.whitelist()"""
return "Implement first if needed."
elif "reset" in request:
self.reset()
return "reset"
else:
return "Unknown method."
return "Unknown API method."
if crawl:
if request["api"] == "json":
if type(crawl) is str: # probably an error
return crawl
elif request["api"] == "json":
return CrawlView.output_json(crawl)
elif request["api"] == "mdmaug":
return CrawlView.output_mdmaug(crawl)
......@@ -73,16 +76,16 @@ class Api:
@staticmethod
def reset():
logging.debug("resetting running browsers")
with open(Config.configFile, 'w') as f: # clear the queue
logger.debug("resetting running browsers")
with open(Config.config_file, 'w') as f: # clear the queue
json.dump({}, f)
subprocess.call(["pkill", Config.browser]) # kill frozen browsers
# prida 2ld domenu mezi whitelistovane
def whitelist(self):
logging.debug("whitelistuju")
logger.debug("whitelistuju")
# Db.cur = Db.connection.cursor()
# self._logging.debug(Db.cur.execute("""REPLACE INTO whitelist set domain = %s""", (self.websiteDomain, )))
# self._logger.debug(Db.cur.execute("""REPLACE INTO whitelist set domain = %s""", (self.websiteDomain, )))
# Db.connection.commit()
# Db.cur.close()
try:
......@@ -92,5 +95,5 @@ class Api:
@staticmethod
def get_undecided():
logging.debug("XXX jeste jsem neudelal - ma vylezt tabulka vsech nerozhodlych domen od posledniho exportu")
logger.debug("XXX jeste jsem neudelal - ma vylezt tabulka vsech nerozhodlych domen od posledniho exportu")
pass
This diff is collapsed.
import json
import logging
from collections import defaultdict
import re
from flask import Blueprint, send_from_directory, render_template, request, make_response, current_app
from flask import Blueprint, send_from_directory, render_template, request, make_response
from jinja2 import Environment, FileSystemLoader
from wtforms import Form
from wtforms.fields import BooleanField
from ..config import Config
from ..config import Config, Pref
from ..controller.api import Api
from ..model.dbp import DbModel
from ..model.dbp import Export
env = Environment()
env.loader = FileSystemLoader(Config.DIR + "templates/")
# env = Environment()
# env.loader = FileSystemLoader(Config.DIR + "templates/")
logger = logging.getLogger("mdmaug")
app = Blueprint('app', __name__, template_folder='templates')
def update_preferences():
""" cookies → config """
for k, v in request.cookies.items():
if v is "0":
if v in ["0", ""]:
v = False
elif v is "1":
v = True
current_app.config["preferences"][k] = v
setattr(Pref, k, v)
# current_app.config["preferences"][k] = v
@app.route('/test')
......@@ -41,10 +43,11 @@ def homepage():
update_preferences()
class OptionsForm(Form):
pref = defaultdict(bool, current_app.config["preferences"])
safebrowsing = BooleanField('Google Safebrowsing', default=pref["safebrowsing"])
pdns = BooleanField('PDNS', default=pref["pdns"])
geoip = BooleanField('geoip', default=pref["geoip"])
# pref = defaultdict(bool, current_app.config["preferences"])
safebrowsing = BooleanField('Google Safebrowsing', default=Pref.safebrowsing,
render_kw={"value": Pref.val2html("safebrowsing")})
pdns = BooleanField('PDNS', default=Pref.pdns, render_kw={"value": Pref.val2html("pdns")})
geoip = BooleanField('geoip', default=Pref.geoip, render_kw={"value": Pref.val2html("geoip")})
if request.method == 'POST':
name = request.form['name']
......@@ -77,13 +80,14 @@ def controller(request_url):
DbModel.assureConnection()
# parse the request url into a friendly dictionary
keywords = {"safebrowsing", "geoip", "api", "destination", "analyze", "pdns", "export"}
request = {"page": ""}
page = False
for l in request_url.split("/"):
if not page:
c, *d = l.split("=", 1)
if c in ["http:", "https:"]:
page = True
if re.search("[a-zA-Z]+:|\.", c): # page def begins when we have a transport protocol ':' or a domain dot in param
page = True # Xin ["http:", "https:"]:
else:
request[c] = d[0] if len(d) else True
if c == "nicify":
......@@ -95,9 +99,14 @@ def controller(request_url):
if request["page"]: # strip last slash
request["page"] = request["page"][:-1]
logging.debug("Request: {}".format(request))
logger.debug("Request: {}".format(request))
if "api" in request: # /api/analyze/web
# set api-preferences from request, non-set are default
for s in Pref.__dict__:
if not s.startswith("_") and s in request:
setattr(Pref, s, request[s] in ["y", "1", True])
output = Api().run(request)
if request["api"] == "json":
......
import socket
import logging
import re
import socket
import urllib.request
class Domains:
""" webove nastroje """
def get_ips_for_host(host):
try:
ips = socket.getaddrinfo(host, 80, 0, 0, socket.IPPROTO_TCP) # XXX co kdyz nepratelsky web reaguje jen na 80, 81, 8080
except socket.gaierror:
ips = []
return ips
def url2domain(url):
""" http://www.seznam.cz/url -> www.seznam.cz """
try:
return re.search('(([\w\-_]+(?:(?:\.[\w\-_]+)+)))', url).group(0)
except TypeError:
logging.debug("Domains/url2domain type error")
logging.debug(url)
raise # return ""
def url2path(url):
""" http://seznam.cz/url -> /url """
url = re.sub('^(http://|https://|ftp://)', '', url) # odstrihnout protokol
url = re.sub('^([^/])*', '', url) # stojim jen cestu, ne o domenu
return url
def assureUrl(url): # zajistit, ze se jedna o url a ne treba o shell
# XX what about other protocols? smb, sftp? Is there a danger?
return re.search(
'(((((http|https|ftp)://)?[\w\-_]+(?:(?:\.[\w\-_]+)+)))([\w\-\.,@?^=%&amp;:/~\+#]*[\w\-\@?^=%&amp;/~\+#])?)',
url).group(0)
def domain2dir(url) -> str:
"""
Friendly dir name from a domain contained in an url
XXmay be replaced by urlparse(url).netloc?
"""
url = url.lower()
url = re.sub('^(http://|https://|ftp://)', '', url) # odstrihnout protokol
url = re.sub('(/.*)', '', url) # nestojim o cestu, jen o domene
url = re.sub('[^a-z0-9\.]', '', url) # nechat jen pratelske znaky
return url
def getPdnsLink(ip):
return 'http://pdns.cert.at/p/dns?qry=' + ip
def ip2pdnsDomains(ip):
"""
try:
# XX mohl bych sortovat dle 2nd domeny. Pripadne oriznout 3rd domenu, nechat jen 2nd. Tam ale musim osetrit problemove dvojite tld - co.uk, gov.ua...
pdns = urllib.request.urlopen(Domains.getPdnsLink(ip)).read().decode("utf-8")
items = re.findall("<div class='x[BA]'>(.*)</div>", pdns)
return items
except Exception as e:
logging.debug("chyba pri kontaktu s PDNS: " + str(e))
from urllib.parse import urlparse
from .config import Pref
logger = logging.getLogger("mdmaug")
re_url = re.compile('(((((http|https|ftp)://)?[\w\-_]+(?:(?:\.[\w\-_]+)+)))([\w\-\.,@?^=%&amp;:/~\+#]*[\w\-\@?^=%&amp;/~\+#])?)$')
def url2domain(url):
""" http://www.seznam.cz/url -> www.seznam.cz """
try:
return re.search('(([\w\-_]+(?:(?:\.[\w\-_]+)+)))', url).group(0)
except TypeError:
logger.debug("Domains/url2domain type error")
logger.debug(url)
raise # return ""
def url2path(url):
""" http://seznam.cz/url -> /url """
url = re.sub('^(http://|https://|ftp://)', '', url) # odstrihnout protokol
url = re.sub('^([^/])*', '', url) # stojim jen cestu, ne o domenu
return url
def assure_url(url):
""" Assure this is URL and not a shell.
# XX what about other protocols? smb, sftp? Is there a danger?
:param url: any URL
:return: url, prepended with "http" if no scheme found, or false
"""
if not urlparse(url).scheme:
url = "http://" + url
s = re_url.match(url)
if s:
return s.group(0)
return False
def domain2dir(url) -> str:
"""
Friendly dir name from a domain contained in an url
XXmay be replaced by urlparse(url).netloc?
"""
url = url.lower()
url = re.sub('^(http://|https://|ftp://)', '', url) # odstrihnout protokol
url = re.sub('(/.*)', '', url) # nestojim o cestu, jen o domene
url = re.sub('[^a-z0-9\.]', '', url) # nechat jen pratelske znaky
return url
def ip2pdns_domains(ip):
if not Pref.pdns:
return None
"""
return None # #24 doesnt work
def ip2countryAndCity(ip):
"""
try:
hostipApi = urllib.request.urlopen('http://api.hostip.info/get_html.php?ip=' + ip + '&position=true').read().decode(
"utf-8").split("\n")
# ['Country: CZECH REPUBLIC (CZ)', 'City: Prague', '', 'Latitude: 50.0833', 'Longitude: 14.4333', 'IP: 109.123.209.188', '']
return hostipApi[0].split(":")[1], hostipApi[1].split(":")[1]
except UnicodeDecodeError: # as e
logging.debug("neumim dekodovat")
except Exception as e:
logging.debug("hostip.info down: " + str(e))
"""
try:
# XX mohl bych sortovat dle 2nd domeny. Pripadne oriznout 3rd domenu, nechat jen 2nd. Tam ale musim osetrit problemove dvojite tld - co.uk, gov.ua...
pdns = urllib.request.urlopen(Domains.get_pdns_link(ip)).read().decode("utf-8")
items = re.findall("<div class='x[BA]'>(.*)</div>", pdns)
return items
except Exception as e:
logger.debug("chyba pri kontaktu s PDNS: " + str(e))
return None
"""
return None # #24 doesnt work
def ip2country_and_city(ip):
if not Pref.geoip:
return None, None
"""
return None, None # #23 service down
@staticmethod
def is_suspicious(domain, output='bool'):
"""
Scrape Safebrowsing service webpages and try to read out if the site is considered dangerous to visit.
:param domain:
:param output: 'bool' → True/False/None or 'attr' → int "1", "0", "" for an HTML attribute
:return:
"""
# contents = urllib.request.urlopen('http://www.google.com/safebrowsing/diagnostic?site=' + domain).read().decode("utf-8")
# with open("debugsf.tmp","a") as f:
# f.write(contents + "\n\n")
# if "Site is listed as suspicious" in contents:
# elif "This site is not currently listed as suspicious." in contents:
import requests
r = requests.get("http://www.google.com/safebrowsing/diagnostic?output=jsonp&site=" + domain, timeout=5)
if '"listed"' in r.text:
return True if output == 'bool' else "1"
if '"unlisted"' in r.text: # vratilo to alespon neco rozumneho
return False if output == 'bool' else "0"
else:
return None if output == 'bool' else ""
"""
try:
hostipApi = urllib.request.urlopen('http://api.hostip.info/get_html.php?ip=' + ip + '&position=true').read().decode(
"utf-8").split("\n")
# ['Country: CZECH REPUBLIC (CZ)', 'City: Prague', '', 'Latitude: 50.0833', 'Longitude: 14.4333', 'IP: 109.123.209.188', '']
return hostipApi[0].split(":")[1], hostipApi[1].split(":")[1]
except UnicodeDecodeError: # as e
logger.debug("neumim dekodovat")
except Exception as e:
logger.debug("hostip.info down: " + str(e))
return None, None
"""
return None, None # #23 service down
def get_pdns_link(ip):
return 'http://pdns.cert.at/p/dns?qry=' + ip
def is_suspicious(domain, output='bool'):
"""
Scrape Safebrowsing service webpages and try to read out if the site is considered dangerous to visit.
:param domain:
:param output: 'bool' → True/False/None or 'attr' → int "1", "0", "" for an HTML attribute
:return:
"""
if not Pref.safebrowsing:
return None
# contents = urllib.request.urlopen('http://www.google.com/safebrowsing/diagnostic?site=' + domain).read().decode("utf-8")
# with open("debugsf.tmp","a") as f:
# f.write(contents + "\n\n")
# if "Site is listed as suspicious" in contents:
# elif "This site is not currently listed as suspicious." in contents:
import requests
logger.debug("Safebrowsing %s", domain)
r = requests.get("http://www.google.com/safebrowsing/diagnostic?output=jsonp&site=" + domain, timeout=5)
if '"listed"' in r.text:
return True if output == 'bool' else "1"
if '"unlisted"' in r.text: # vratilo to alespon neco rozumneho
return False if output == 'bool' else "0"
else:
return None if output == 'bool' else ""
def get_ips_for_host(host):
try:
ips = socket.getaddrinfo(host, 80, 0, 0, socket.IPPROTO_TCP) # XXX co kdyz nepratelsky web reaguje jen na 80, 81, 8080
except socket.gaierror:
ips = []
return ips
......@@ -28,7 +28,7 @@ class Crawl(defaultdict):
with open(filename, 'r') as f:
return Crawl(state=load(f.read(), Loader=Loader))
def __init__(self, host=None, state=None, log_dir=None, cache_dir=None):
def __init__(self, host=None, state=None, log_dir=None, cache_dir=None, profile=None):
""" State muze obsahovat vystup __getstate__() (serializace YAMLem) """
self.default_factory = _Domain
self.screenfile = None # HTML output XXX
......@@ -39,6 +39,7 @@ class Crawl(defaultdict):
self.logDir = log_dir
# if cache_dir:
self.cacheDir = cache_dir
self.profile = profile
if state:
self.__setstate__(state)
......@@ -141,6 +142,7 @@ class _Address(set):
"""
Example (and yaml-serialization check):
logger = logging.getLogger("mdmaug")
c = Crawl()
c["seznam.cz"].urls["/aurl"].spyfile = "/soubor-spyfil"
c["seznam.cz"].urls["/aurl"].sourcefiles.append("/1.source")
......@@ -153,15 +155,15 @@ c["seznam.cz"].addresses["8.8.8.8"].country = "preague"
e = Crawl()
e.__setstate__( c.__getstate__() )
#e = dill.loads(dill.dumps(c))
logging.debug(str(c) == str(e))
logger.debug(str(c) == str(e))
logging.debug(c)
logger.debug(c)
output = dump(c.__getstate__(), Dumper=Dumper)
e = Crawl()
e.__setstate__(load(output, Loader=Loader))
logging.debug(e)
logging.debug(str(c) == str(e))
logger.debug(e)
logger.debug(str(c) == str(e))
"""
"""
......
......@@ -6,51 +6,50 @@ from peewee import Model, DateTimeField, IntegerField, CharField, JOIN, BigAutoF
from peewee import RawQuery
from ..config import Config
from ..domains import Domains
from ..domains import url2domain
# Do not print all queries to stderr.
logger = logging.getLogger('peewee')
logger.setLevel(logging.WARNING)
logging.getLogger('peewee').setLevel(logging.WARNING)
logger = logging.getLogger("mdmaug")
class DbModel(Model):
@staticmethod
def assureConnection():
logging.debug("Assure connection.")
logger.debug("Assure connection.")
try: # zkousim spustit nejaky prikaz
# logging.debug("1")
# logger.debug("1")
Whitelist.select().count()
# logging.debug("2")
# logger.debug("2")
except: # OperationalError:
# logging.debug("3")
# logger.debug("3")
Config.connect()
# logging.debug("4")
# logger.debug("4")
try: # zkousim spustit nejaky prikaz
# logging.debug("5")
# logger.debug("5")
Whitelist.select().count()
# logging.debug("6")
# logger.debug("6")
except:
logging.debug("7 - failed")
logger.debug("7 - failed")
raise
logging.debug("Connection to DB assured.")
logger.debug("Connection to DB assured.")
"""A base model that will use our MySQL database"""
@staticmethod
def connect():
logging.debug("connecting db....")
logger.debug("connecting db....")
# DbModel.Meta.myDb = Config.myDB
# Config.myDB.connect() # XX kupodivu toto neni potreba
# logging.debug(Whitelist.select().count())
# logging.debug("Connecting to DB.")
# logging.debug("Start test.")
# logging.debug(Whitelist.matches("www.mozilla.org"))
# logging.debug("End test.")
# logger.debug(Whitelist.select().count())
# logger.debug("Connecting to DB.")
# logger.debug("Start test.")
# logger.debug(Whitelist.matches("www.mozilla.org"))
# logger.debug("End test.")
@staticmethod
def disconnect():
logging.debug("... db ends.")
logger.debug("... db ends.")
Config.myDB.close()
class Meta:
......@@ -58,9 +57,9 @@ class DbModel(Model):
pass
# logging.debug("DVA")
# logger.debug("DVA")
# DbModel.connect()
# logging.debug(Config.myDB)
# logger.debug(Config.myDB)
class Status(DbModel):
id = BigAutoField()
......@@ -111,7 +110,7 @@ class Export(DbModel):
q += "(select case when MAX(timestamp IS NULL)=0 THEN max(timestamp) ELSE 0 END from export)"
q += " GROUP BY concat(`ip`,`port`) " # group by concat may be a performance issue
q += " ORDER BY `timestamp` DESC"
logging.debug(q)
logger.debug(q)
rq = RawQuery(Turris, q).execute()
print(rq)
for r in rq: # for r in rows:
......@@ -132,7 +131,7 @@ class Export(DbModel):
def export_confirm():
# Db.cur = Db.connection.cursor()
Export.insert(id=None).execute()
# logging.debug(Db.cur.execute("""INSERT into export (`id`) VALUES (NULL)"""))
# logger.debug(Db.cur.execute("""INSERT into export (`id`) VALUES (NULL)"""))
# Db.connection.commit()