198 lines
5.8 KiB
Python
198 lines
5.8 KiB
Python
# -*- coding: utf-8 -*-
|
|
import sys
|
|
sys.path.append('..')
|
|
import uuid
|
|
import datetime
|
|
from time import sleep
|
|
from bs4 import BeautifulSoup
|
|
from core.mysql_wrapper import get_anunciosdb
|
|
from core.scrapping_utils import UrlAttack
|
|
|
|
class Explorer():
|
|
|
|
sleep_time_no_work = 60
|
|
sleep_time_no_service = 600
|
|
|
|
|
|
def __init__(self):
|
|
try:
|
|
self.anunciosdb = get_anunciosdb()
|
|
|
|
except:
|
|
print("Could not connect to anuncios DB")
|
|
try:
|
|
self.task_log_db = #get_task_log_db()
|
|
except:
|
|
print("Could not connect to task log DB")
|
|
|
|
self.max_db_retries = 3
|
|
self.db_retries = 0
|
|
self.max_queue_retries = 3
|
|
self.queue_retries = 0
|
|
|
|
def start(self):
|
|
#Arrancar el servicio
|
|
|
|
while True:
|
|
if not self.there_is_work():
|
|
sleep(sleep_time_no_work)
|
|
continue
|
|
|
|
if not self.database_is_up():
|
|
break
|
|
|
|
if not self.queue_is_up():
|
|
break
|
|
|
|
current_task = ExploringTask(self.compose_listing_url)
|
|
if current_task.is_ready_to_explore:
|
|
current_task.explore()
|
|
else:
|
|
break
|
|
if current_task.status == 'referencias ready':
|
|
current_referencias = current_task.get_referencias()
|
|
|
|
for referencia in current_referencias:
|
|
self.post_task_to_queue(referencia)
|
|
current_task.update_status('Sent to queue')
|
|
|
|
continue
|
|
|
|
self.stop()
|
|
|
|
|
|
def stop(self):
|
|
#TODO
|
|
#Detener el servicio
|
|
|
|
def there_is_work(self):
|
|
#TODO
|
|
#Comprueba si hay trabajo por hacer
|
|
#Mirando en la bd de tasks cuantas se han hecho ultimamente, mensualmente
|
|
#etc.
|
|
|
|
def database_is_up(self):
|
|
while self.db_retries <= self.max_db_retries:
|
|
try:
|
|
self.anunciosdb.ping()
|
|
self.db_retries = 0
|
|
return True
|
|
except:
|
|
sleep(sleep_time_no_service)
|
|
self.db_retries = self.db_retries + 1
|
|
|
|
return False
|
|
|
|
|
|
def queue_is_up(self):
|
|
#TODO
|
|
while self.queue_retries <= self.max_queue_retries:
|
|
try:
|
|
#codigo que testea si redis esta vivo
|
|
self.queue_retries = 0
|
|
return True
|
|
except:
|
|
sleep(sleep_time_no_service)
|
|
self.queue_retries = self.queue_retries + 1
|
|
|
|
return False
|
|
|
|
def compose_listing_url(self):
|
|
#TODO
|
|
#Decide que url hay que componer y la compone
|
|
raiz = 'https://www.idealista.com/'
|
|
tipo = #Logica random
|
|
ciudad = 'barcelona'
|
|
numero = #logica random
|
|
url = raiz + tipo + '-garajes/' + ciudad + '-' + ciudad + '/' +
|
|
'pagina-' + numero + '.htm'
|
|
|
|
return url
|
|
|
|
def post_task_to_queue(self, referencia):
|
|
#TODO
|
|
#Manda la task a la cola redis
|
|
|
|
|
|
|
|
class ExploringTask():
|
|
|
|
def __init__(self, url):
|
|
self.target_url = url
|
|
self.id = str(uuid.uuid4())
|
|
self.update_status('Pending')
|
|
|
|
try:
|
|
self.anunciosdb = get_anunciosdb()
|
|
except:
|
|
self.anunciosdb = None
|
|
self.update_status('Unable to connect to anuncios DB')
|
|
|
|
try:
|
|
#TODO
|
|
#Pendiente de implementar wraper para MongoDB
|
|
#self.task_log_db =
|
|
except:
|
|
self.update_status('Unable to connect to task log DB')
|
|
#self.task_log_db = None
|
|
|
|
def update_status(self, new_status):
|
|
self.status = new_status
|
|
self._log_in_taskdb()
|
|
|
|
|
|
def is_ready_to_explore(self):
|
|
if self.anunciosdb is not None and self.task_log_db is not None:
|
|
return True
|
|
else:
|
|
return False
|
|
|
|
def explore(self):
|
|
attack = UrlAttack(self.url)
|
|
attack.attack()
|
|
|
|
if attack.success:
|
|
self._extract_referencias(attack.get_text())
|
|
if self.new_listings:
|
|
self.update_status('referencias ready')
|
|
else:
|
|
self.update_status('Failure - No listings in HTML')
|
|
else:
|
|
self.update_status('Failure - Bad request')
|
|
|
|
def get_referencias(self):
|
|
return self.referencias
|
|
|
|
def _log_in_taskdb(self):
|
|
#TODO
|
|
#Funcion que grabe estado y demas en una mongodb o argo azin
|
|
|
|
def _extract_referencias(self, html):
|
|
"""
|
|
Saca referencias de HTML, descarta las que ya exiten en la base de datos
|
|
de capturas, y guarda si han aparecido listings y si hay alguno nuevo
|
|
"""
|
|
|
|
soup = BeautifulSoup(self.html, 'html5lib')
|
|
ads = sopa.find_all(class_ = "item")
|
|
self.referencias = []
|
|
for ad in ads:
|
|
if self._is_new_listing(ad["data-adid"]):
|
|
self.referencias.append(ad["data-adid"])
|
|
self.new_listings = bool(self.referencias)
|
|
|
|
def _is_new_listing(self, referencia):
|
|
#TODO
|
|
#Comprobar contra base de datos si la referencia existe en base de datos
|
|
query_statement = """SELECT count(referencia)
|
|
FROM capturas
|
|
WHERE referencia = %s"""
|
|
query_params = (referencia,)
|
|
cursor_result = self.anunciosdb.query(query_statement, query_params)
|
|
|
|
result = cursor_result.fetchone()
|
|
if result[0] > 0:
|
|
return False
|
|
else:
|
|
return True
|
|
|