70 lines
2.2 KiB
Python
70 lines
2.2 KiB
Python
import os, sys
|
|
from datetime import datetime
|
|
from dateutil import relativedelta
|
|
|
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
|
import dtss
|
|
dtss.getReady()
|
|
|
|
from time import sleep
|
|
from pprint import pprint as ppr
|
|
import pytz
|
|
|
|
from events.models import Organization, Scraper
|
|
import events.digitools as digitools
|
|
|
|
venue, created = Organization.objects.get_or_create(
|
|
name="Magers & Quinn",
|
|
city="Minneapolis",
|
|
website="https://www.magersandquinn.com/events",
|
|
is_venue=False
|
|
)
|
|
|
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
|
|
|
DATETIME_FORMAT = '%A, %B %d , %Y %I:%M %p'
|
|
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
|
|
|
def get_events(ps, event_type):
|
|
contents = ps.xpath('.//*/div[@class="day has-event"]')
|
|
for c in contents:
|
|
try:
|
|
event = {}
|
|
day = c.xpath('.//*/div[@class="dd"]/text()')[0]
|
|
month = c.xpath('.//*/div[@class="month"]/text()')[0]
|
|
year = c.xpath('.//*/div[@class="year"]/text()')[0]
|
|
event['scraper'] = scraper
|
|
event['calendars'] = [scraper.calendar]
|
|
event['title'] = c.xpath('.//*/h3/text()')[0]
|
|
event['date'] = [month, day, year, c.xpath('.//*/p[@class="time"]/text()')[0]]
|
|
event['date'] = " ".join(event['date'])
|
|
event['dateStamp'] =datetime.strptime(event['date'], DATETIME_FORMAT)
|
|
event['link'] = "https://www.magersandquinn.com" + c.xpath('.//a[@class="event in-store"]/@href')[0]
|
|
digitools.createBasicEvent(event, "Ed", venue)
|
|
scraper.items+=1
|
|
except Exception as e:
|
|
event['link'] = "https://www.magersandquinn.com" + c.xpath('.//a[@class="event off-site"]/@href')[0]
|
|
print("Error: ", e)
|
|
ppr(event)
|
|
digitools.createBasicEvent(event, "Ed", venue)
|
|
print("\n\n+++\n\n")
|
|
|
|
|
|
links = digitools.createBasicURL("https://www.magersandquinn.com/events/")
|
|
|
|
if len(sys.argv) >= 2:
|
|
arg1 = sys.argv[1]
|
|
br = digitools.getBrowser(arg1)
|
|
else:
|
|
print("No run_env")
|
|
quit()
|
|
|
|
for link in links:
|
|
ps = digitools.getSource(br, link)
|
|
get_events(ps, "Ed")
|
|
sleep(3)
|
|
# ppr(events)
|
|
br.close()
|
|
|
|
digitools.updateScraper(scraper, item_count_start)
|