2018-08-09 20:55:04 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
import sys
|
2020-03-26 11:38:08 +01:00
|
|
|
|
|
|
|
|
sys.path.append("..")
|
2018-08-12 23:14:47 +02:00
|
|
|
import uuid
|
2018-09-09 19:22:21 +02:00
|
|
|
import datetime
|
2018-08-09 20:55:04 +02:00
|
|
|
from time import sleep
|
2018-08-13 23:55:17 +02:00
|
|
|
from bs4 import BeautifulSoup
|
2018-08-30 19:38:31 +02:00
|
|
|
import re
|
2020-11-15 12:54:17 +01:00
|
|
|
from random import randint, choice
|
2020-11-03 21:55:09 +01:00
|
|
|
import mysql.connector
|
2018-08-30 19:38:31 +02:00
|
|
|
from core.mysql_wrapper import get_anunciosdb, get_tasksdb
|
2018-10-29 21:57:20 +01:00
|
|
|
from core.config import monthly_new_ads_target, working_hours
|
2018-08-30 19:38:31 +02:00
|
|
|
from core.scrapping_utils import UrlAttack
|
2018-09-09 19:22:21 +02:00
|
|
|
from core.alerts import alert_master
|
2018-10-13 18:07:32 +02:00
|
|
|
from db_layer.capturing_tasks_interface import capturing_interface
|
2020-03-26 11:38:08 +01:00
|
|
|
from core import my_logger
|
|
|
|
|
import logging
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Explorer:
|
2020-11-03 21:55:09 +01:00
|
|
|
"""
|
|
|
|
|
Daemon with the full flow of execution of generating a listing page url,
|
|
|
|
|
requesting the page, scraping the ad references and storing logs in the
|
|
|
|
|
task database
|
|
|
|
|
"""
|
2018-08-09 20:55:04 +02:00
|
|
|
|
|
|
|
|
sleep_time_no_work = 60
|
|
|
|
|
sleep_time_no_service = 600
|
2020-03-26 11:38:08 +01:00
|
|
|
ad_types = {"1": "alquiler", "2": "venta"}
|
2018-09-09 19:42:52 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def __init__(self) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Connect to database and set up initial parameters.
|
|
|
|
|
"""
|
2018-08-09 20:55:04 +02:00
|
|
|
try:
|
|
|
|
|
self.anunciosdb = get_anunciosdb()
|
2018-09-09 19:42:52 +02:00
|
|
|
self.tasksdb = get_tasksdb()
|
2018-08-14 20:02:40 +02:00
|
|
|
except:
|
|
|
|
|
print("Could not connect to anuncios DB")
|
2018-08-30 19:38:31 +02:00
|
|
|
|
2018-08-09 20:55:04 +02:00
|
|
|
self.max_db_retries = 3
|
|
|
|
|
self.db_retries = 0
|
|
|
|
|
self.max_queue_retries = 3
|
|
|
|
|
self.queue_retries = 0
|
2018-08-14 20:02:40 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def start(self) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Full flow of execution. Checks whether it should capture a URL, tries
|
|
|
|
|
to do so and stores the result if successful.
|
|
|
|
|
:return: None
|
|
|
|
|
"""
|
2020-03-26 11:47:12 +01:00
|
|
|
logging.info("Starting explorer")
|
2018-08-09 20:55:04 +02:00
|
|
|
while True:
|
2020-11-03 21:55:09 +01:00
|
|
|
if not self._is_there_work():
|
2020-03-26 11:38:08 +01:00
|
|
|
print("{}: Waiting. No work".format(datetime.datetime.now()))
|
2018-09-09 19:22:21 +02:00
|
|
|
sleep(Explorer.sleep_time_no_work)
|
2018-08-09 20:55:04 +02:00
|
|
|
continue
|
2020-03-26 11:47:12 +01:00
|
|
|
logging.info("Waiting")
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
if not self._database_is_up():
|
2020-03-26 11:38:08 +01:00
|
|
|
alert_master(
|
|
|
|
|
"SQL DOWN",
|
|
|
|
|
"El explorer informa de que SQL esta caida. Actividad detenida",
|
|
|
|
|
)
|
2020-11-03 21:55:09 +01:00
|
|
|
raise ConnectionError("Unable to connect to database")
|
2018-09-09 19:22:21 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
current_task = ExploringTask(self._compose_listing_url())
|
2018-08-30 19:38:31 +02:00
|
|
|
current_task.explore()
|
2020-03-26 11:47:12 +01:00
|
|
|
logging.info("Exploring task done...")
|
2018-09-21 18:19:33 +02:00
|
|
|
|
2020-03-26 11:38:08 +01:00
|
|
|
if current_task.status == "Referencias ready":
|
2018-09-21 18:19:33 +02:00
|
|
|
referencias = current_task.get_referencias()
|
|
|
|
|
for referencia in referencias:
|
2020-03-26 11:38:08 +01:00
|
|
|
capturing_interface.create_capturing_task(
|
|
|
|
|
referencia, current_task.id
|
|
|
|
|
)
|
2018-10-14 18:41:12 +02:00
|
|
|
current_task._update_status("Sent to queue")
|
2020-03-26 11:47:12 +01:00
|
|
|
logging.info("The task was successful.")
|
2018-09-21 18:19:33 +02:00
|
|
|
|
2020-03-26 11:38:08 +01:00
|
|
|
continue
|
2018-09-09 19:22:21 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _is_there_work(self) -> bool:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Checks whether it should try to scrap a listing page according to
|
|
|
|
|
limits and cooldowns.
|
|
|
|
|
:return: True if it should work, false otherwise
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
if any(
|
|
|
|
|
[
|
|
|
|
|
self._check_if_recent_task(),
|
|
|
|
|
not self._in_working_hours(),
|
|
|
|
|
(
|
|
|
|
|
self._get_referencias_acquired_today()
|
|
|
|
|
>= self._get_max_referencias_for_today()
|
|
|
|
|
),
|
|
|
|
|
(self._get_tasks_created_today() >= self._get_max_tasks_today()),
|
|
|
|
|
]
|
2020-03-26 11:38:08 +01:00
|
|
|
):
|
2018-08-30 19:38:31 +02:00
|
|
|
return False
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-08-30 19:38:31 +02:00
|
|
|
return True
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _database_is_up(self) -> bool:
|
|
|
|
|
"""
|
|
|
|
|
Checks whether the db is reachable with some retries.
|
|
|
|
|
:return: True if db is reachable, false if not
|
|
|
|
|
"""
|
2018-08-09 20:55:04 +02:00
|
|
|
while self.db_retries <= self.max_db_retries:
|
|
|
|
|
try:
|
|
|
|
|
self.anunciosdb.ping()
|
|
|
|
|
self.db_retries = 0
|
|
|
|
|
return True
|
|
|
|
|
except:
|
2018-09-09 19:22:21 +02:00
|
|
|
sleep(Explorer.sleep_time_no_service)
|
2018-08-09 20:55:04 +02:00
|
|
|
self.db_retries = self.db_retries + 1
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-08-30 19:38:31 +02:00
|
|
|
return False
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
@staticmethod
|
|
|
|
|
def _in_working_hours() -> None:
|
|
|
|
|
"""
|
|
|
|
|
Checks whether now is within the working hours of the daemon.
|
|
|
|
|
:return: True if so, false if not
|
|
|
|
|
"""
|
2020-03-26 11:38:08 +01:00
|
|
|
return (
|
|
|
|
|
working_hours["start"]
|
|
|
|
|
<= datetime.datetime.now().time()
|
|
|
|
|
<= working_hours["end"]
|
|
|
|
|
)
|
|
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _get_referencias_acquired_today(self) -> int:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Queries the database to obtain the count of scraped ads in the last 24h.
|
|
|
|
|
:return: the resulting count
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-09-09 19:42:52 +02:00
|
|
|
query_statement = """ SELECT count(referencia)
|
2018-08-30 19:38:31 +02:00
|
|
|
FROM primera_captura_full
|
|
|
|
|
WHERE fecha_captura >= now() - INTERVAL 1 DAY;
|
|
|
|
|
"""
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-08-30 19:38:31 +02:00
|
|
|
cursor_result = self.anunciosdb.query(query_statement)
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-09-09 19:42:52 +02:00
|
|
|
return cursor_result.fetchone()[0]
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _get_max_referencias_for_today(self) -> float:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Queries the database for the number of captured ads in the last 30 days
|
|
|
|
|
and computes the max number of ad references to obtain today.
|
|
|
|
|
:return: the max number of references
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2018-09-09 19:42:52 +02:00
|
|
|
query_statement = """ SELECT count(referencia)
|
2018-08-30 19:38:31 +02:00
|
|
|
FROM primera_captura_full
|
|
|
|
|
WHERE fecha_captura >= now() - INTERVAL 30 DAY;
|
|
|
|
|
"""
|
|
|
|
|
cursor_result = self.anunciosdb.query(query_statement)
|
2018-09-09 19:42:52 +02:00
|
|
|
new_referencias_last_30 = cursor_result.fetchone()[0]
|
|
|
|
|
|
2020-03-26 11:38:08 +01:00
|
|
|
deviation = (
|
|
|
|
|
monthly_new_ads_target - new_referencias_last_30
|
|
|
|
|
) / monthly_new_ads_target
|
2018-10-29 21:57:20 +01:00
|
|
|
max_referencias = (monthly_new_ads_target / 30) * (1 + deviation)
|
2018-09-09 19:42:52 +02:00
|
|
|
|
2018-08-30 19:38:31 +02:00
|
|
|
return max_referencias
|
2018-09-09 19:42:52 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _get_tasks_created_today(self) -> int:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Queries the database for the number of exploring tasks created in the
|
|
|
|
|
last 24h, returns it.
|
|
|
|
|
:return: number of exploring tasks created
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
|
|
|
|
query_statement = """ SELECT count(uuid)
|
2018-09-09 19:42:52 +02:00
|
|
|
FROM exploring_tasks_logs
|
2018-08-30 19:38:31 +02:00
|
|
|
WHERE status = 'Attacked'
|
|
|
|
|
AND write_time >= now() - INTERVAL 1 DAY;
|
|
|
|
|
"""
|
|
|
|
|
cursor_result = self.tasksdb.query(query_statement)
|
2018-09-09 19:42:52 +02:00
|
|
|
tasks_created_today = cursor_result.fetchone()[0]
|
2018-08-30 19:38:31 +02:00
|
|
|
|
|
|
|
|
return tasks_created_today
|
|
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _get_max_tasks_today(self) -> float:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Computes the current task goal
|
|
|
|
|
:return: max current tasks target
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
return (self._get_max_referencias_for_today() / 30) * 6
|
2018-08-30 19:38:31 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _check_if_recent_task(self) -> int:
|
2018-09-21 18:19:33 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Queries the db for the number of tasks created in the last 10 minutes.
|
|
|
|
|
:return: the number of recently created tasks
|
2018-09-21 18:19:33 +02:00
|
|
|
"""
|
|
|
|
|
query_statement = """ SELECT count(uuid)
|
|
|
|
|
FROM exploring_tasks_logs
|
|
|
|
|
WHERE status = 'Attacked'
|
|
|
|
|
AND write_time >= now() - INTERVAL 10 MINUTE
|
|
|
|
|
"""
|
|
|
|
|
cursor_result = self.tasksdb.query(query_statement)
|
|
|
|
|
|
2018-10-14 18:41:12 +02:00
|
|
|
return cursor_result.fetchone()[0]
|
2018-09-21 18:19:33 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
@staticmethod
|
|
|
|
|
def _compose_listing_url() -> str:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Generates a listing page URL randomly.
|
|
|
|
|
:return: the listing page URL
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-03-26 11:38:08 +01:00
|
|
|
root = "https://www.idealista.com/"
|
|
|
|
|
type = Explorer.ad_types[str(randint(1, 2))]
|
|
|
|
|
city = "barcelona"
|
2020-11-15 12:54:17 +01:00
|
|
|
page_number = str(randint(1, 45))
|
|
|
|
|
order_string = choice(
|
|
|
|
|
[
|
|
|
|
|
"?ordenado-por=fecha-publicacion-asc",
|
|
|
|
|
"?ordenado-por=fecha-publicacion-desc",
|
|
|
|
|
]
|
|
|
|
|
)
|
2020-03-26 11:38:08 +01:00
|
|
|
url = (
|
|
|
|
|
root
|
|
|
|
|
+ type
|
|
|
|
|
+ "-garajes/"
|
|
|
|
|
+ city
|
|
|
|
|
+ "-"
|
|
|
|
|
+ city
|
|
|
|
|
+ "/"
|
|
|
|
|
+ "pagina-"
|
|
|
|
|
+ page_number
|
|
|
|
|
+ ".htm"
|
2020-11-15 12:54:17 +01:00
|
|
|
+ order_string
|
2020-03-26 11:38:08 +01:00
|
|
|
)
|
|
|
|
|
|
2018-08-12 23:14:47 +02:00
|
|
|
return url
|
2018-08-09 20:55:04 +02:00
|
|
|
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-09-21 18:19:33 +02:00
|
|
|
class ExploringTask:
|
2020-11-03 21:55:09 +01:00
|
|
|
"""
|
|
|
|
|
Task object wrapping the process of attempting to capture a listing page,
|
|
|
|
|
parsing the ad references and sending to db.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, url: str) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Initialize with task parameters and mark the task as being worked on
|
|
|
|
|
in the task queue.
|
|
|
|
|
:param url: string with the listing page url to be captured
|
|
|
|
|
"""
|
2018-08-30 19:38:31 +02:00
|
|
|
self.anunciosdb = get_anunciosdb()
|
|
|
|
|
self.tasksdb = get_tasksdb()
|
2018-08-12 23:14:47 +02:00
|
|
|
self.target_url = url
|
|
|
|
|
self.id = str(uuid.uuid4())
|
2020-03-26 11:38:08 +01:00
|
|
|
self._update_status("Pending")
|
|
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _update_status(self, new_status: str) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Updates the task status and persists it in the task queue.
|
|
|
|
|
:param new_status: string describing the new status
|
|
|
|
|
:return: None
|
|
|
|
|
"""
|
2018-08-14 20:02:40 +02:00
|
|
|
self.status = new_status
|
2018-08-30 19:38:31 +02:00
|
|
|
self._log_in_tasksdb()
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def explore(self) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Main flow of work.
|
|
|
|
|
:return: None
|
|
|
|
|
"""
|
2018-08-30 19:38:31 +02:00
|
|
|
attack = UrlAttack(self.target_url)
|
2018-08-12 23:14:47 +02:00
|
|
|
attack.attack()
|
2020-03-26 11:38:08 +01:00
|
|
|
self._update_status("Attacked")
|
|
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
if not attack.success:
|
2020-03-26 11:38:08 +01:00
|
|
|
self._update_status("Failure - Bad request")
|
2020-11-03 21:55:09 +01:00
|
|
|
return
|
2018-08-13 23:55:17 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
self._validate_referencias(attack.get_text())
|
|
|
|
|
self._extract_referencias(attack.get_text())
|
|
|
|
|
if self.referencias:
|
|
|
|
|
self._update_status("Referencias ready")
|
|
|
|
|
elif self.there_are_referencias:
|
|
|
|
|
self._update_status("Failure - No new referencias in HTML")
|
|
|
|
|
else:
|
|
|
|
|
self._update_status("Failure - HTML with no referencias")
|
|
|
|
|
|
|
|
|
|
def _log_in_tasksdb(self) -> None:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Logs status in the task db.
|
|
|
|
|
:return: None
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-08-30 19:38:31 +02:00
|
|
|
query_statement = """INSERT INTO exploring_tasks_logs
|
|
|
|
|
(uuid, write_time, status)
|
|
|
|
|
VALUES (%(uuid)s, NOW(), %(status)s)"""
|
2020-03-26 11:38:08 +01:00
|
|
|
|
|
|
|
|
query_parameters = {"uuid": self.id, "status": self.status}
|
|
|
|
|
|
2018-08-30 19:38:31 +02:00
|
|
|
self.tasksdb.query(query_statement, query_parameters)
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _validate_referencias(self, html: str) -> None:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Checks that the ad references are in the HTML code.
|
|
|
|
|
:param html: string with HTML code of the listing page
|
|
|
|
|
:return: None
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-03-26 11:38:08 +01:00
|
|
|
soup = BeautifulSoup(html, "html5lib")
|
2018-12-01 16:26:25 +01:00
|
|
|
ads = soup.find_all(class_="item")
|
2018-08-30 19:38:31 +02:00
|
|
|
pattern = "^[0-9]{3,20}$"
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-08-30 19:38:31 +02:00
|
|
|
for ad in ads:
|
|
|
|
|
if not re.match(pattern, ad["data-adid"]):
|
2020-03-26 11:38:08 +01:00
|
|
|
alert_master(
|
|
|
|
|
"Alerta - Referencias no válidas",
|
|
|
|
|
"""Una tarea de exploración ha considerado inválida
|
2018-09-09 19:22:21 +02:00
|
|
|
una referencia. El texto de la referencia era : {}
|
2020-03-26 11:38:08 +01:00
|
|
|
""".format(
|
|
|
|
|
ad["data-adid"]
|
|
|
|
|
),
|
|
|
|
|
)
|
2018-09-09 19:22:21 +02:00
|
|
|
break
|
|
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _extract_referencias(self, html: str) -> None:
|
2018-08-14 20:02:40 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Scraps the ad references out of the HTML code and stores them.
|
|
|
|
|
:param html: string with HTML code of the listing page
|
|
|
|
|
:return: None
|
2018-08-14 20:02:40 +02:00
|
|
|
"""
|
|
|
|
|
|
2020-03-26 11:38:08 +01:00
|
|
|
soup = BeautifulSoup(html, "html5lib")
|
|
|
|
|
ads = soup.find_all(class_="item")
|
2018-08-30 19:38:31 +02:00
|
|
|
self.there_are_referencias = bool(ads)
|
2018-08-14 20:02:40 +02:00
|
|
|
self.referencias = []
|
|
|
|
|
for ad in ads:
|
|
|
|
|
if self._is_new_listing(ad["data-adid"]):
|
2020-03-26 11:38:08 +01:00
|
|
|
self.referencias.append(ad["data-adid"])
|
2018-09-21 18:19:33 +02:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def _is_new_listing(self, referencia: str) -> bool:
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Checks if an ad reference already exists in the db.
|
|
|
|
|
:param referencia:
|
|
|
|
|
:return: True if it is new, false if not
|
2018-08-30 19:38:31 +02:00
|
|
|
"""
|
2018-08-14 20:02:40 +02:00
|
|
|
query_statement = """SELECT count(referencia)
|
|
|
|
|
FROM capturas
|
|
|
|
|
WHERE referencia = %s"""
|
|
|
|
|
query_params = (referencia,)
|
|
|
|
|
cursor_result = self.anunciosdb.query(query_statement, query_params)
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2018-08-14 20:02:40 +02:00
|
|
|
result = cursor_result.fetchone()
|
|
|
|
|
if result[0] > 0:
|
|
|
|
|
return False
|
|
|
|
|
else:
|
|
|
|
|
return True
|
2020-03-26 11:38:08 +01:00
|
|
|
|
2020-11-03 21:55:09 +01:00
|
|
|
def get_referencias(self) -> list:
|
2018-09-21 18:19:33 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
Gets the references.
|
|
|
|
|
:return: list of ad references
|
2018-09-21 18:19:33 +02:00
|
|
|
"""
|
2020-11-03 21:55:09 +01:00
|
|
|
return self.referencias
|
2018-09-09 19:22:21 +02:00
|
|
|
|
2018-10-13 18:17:05 +02:00
|
|
|
|
2020-03-26 11:38:08 +01:00
|
|
|
if __name__ == "__main__":
|
2018-10-13 18:17:05 +02:00
|
|
|
explorer = Explorer()
|
|
|
|
|
explorer.start()
|