import json import logging import subprocess from builtins import FileNotFoundError from glob import escape from peewee import IntegrityError from mdmaug.lib.model.crawl import Crawl from .scan_controller import ScanController from ..config import Config from ..model.dbp import Encounter, Whitelist from ..parser.traffic_log_parser import TrafficLogParser from ...templates.crawl_view import CrawlView logger = logging.getLogger("mdmaug") class Api: def run(self, request): """ Accept command :type request: dict from URL request. /api/analyze=cache/http://example.com → {"api": True, "analyze": cache, "page": "http://example.com"} """ crawl = None if "analyze" in request: map_ = {"fresh": None, "cached": 1, "weekcache": 7, "oldcache": True, True: None} if request["analyze"] in map_: days = map_[request["analyze"]] else: days = int(request["analyze"]) crawl = ScanController().launch(request["page"], days, request.get("autoprune") in ["y", "1", True], request.get("creation_spree") in ["y", "1", True]) elif "aggregate" in request: crawl = self.aggregate(request) elif "scan" in request: if "date" not in request: request["date"] = "" crawl = ScanController().get_scan(escape(request["scan"]), scan=escape(request["date"])) elif "prune" in request: return ScanController.prune() elif "nicify" in request: return TrafficLogParser.getStylesheet() + TrafficLogParser.nicify_file(request["page"]) elif "vote" in request: # /api/vote/block/example.org/10.0.0.1 return Encounter.vote(request["vote"], request["page"]) elif "reset" in request: self.reset() return "reset" else: return "Unknown API method." if crawl: if type(crawl) is str: return crawl # containing an error message elif request["api"] == "json": return CrawlView.output_json(crawl) else: return CrawlView.output_html(crawl) @staticmethod def aggregate(request): date_from = int(request["from"]) date_to = int(request["to"]) crawl = Crawl() scan_count = set() domain_count = set() for domain, scan in ScanController.get_all_scans(): if date_from < int(scan) < date_to: try: scan_count.add("/".join([domain, scan])) domain_count.add(domain) crawl += Crawl.load_from_scan(domain, scan) except FileNotFoundError: logger.warning("Wrong analysis stored at %s/%s", domain, scan) pass crawl.title = f"Merged {len(scan_count)} scans from {len(domain_count)} domains" if not crawl: crawl = "No scan between these dates." return crawl @staticmethod def reset(): logger.debug("resetting running browsers") with open(Config.config_file, 'w') as f: # clear the queue json.dump({}, f) subprocess.call(["pkill", Config.browser]) # kill frozen browsers # prida 2ld domenu mezi whitelistovane def whitelist(self): logger.debug("whitelisting") # Db.cur = Db.connection.cursor() # self._logger.debug(Db.cur.execute("""REPLACE INTO whitelist set domain = %s""", (self.origin_domain, ))) # Db.connection.commit() # Db.cur.close() return # not yet implemented try: Whitelist.insert(domain=self.origin_domain).execute() except IntegrityError: pass # already inserted @staticmethod def get_undecided(): logger.debug("XXX not implemented yet - table of undecideds domain since last export") pass # elif "inspect" in request: # # XXX → migrate to dbp # output = [] # for row in Config.db.execute_sql("SELECT url from encounter where host = %s", request["inspect"]): # output.append(row[0]) # return "
".join(output) # elif "decide" in request: # XX deprecated? # return self.get_undecided() # elif "whitelist" in request: # XXX not implemented yet # """url = path.split("/", 3) # if len(url) > 3: # self._setWebsite(url[2]) # osetrit self.website, ze je URL, a nikoli shell # logger.debug("XXX nejsem si jist, zda url je spravne na url[2]") # XXX # logger.debug(url) # XXX # quit() # XXX # logger.debug(self.website) # logger.debug(self.origin_domain) # return self.whitelist()""" # return "Implement first if needed."