74 lines
2.3 KiB
Python
74 lines
2.3 KiB
Python
|
|
import os, sys
|
||
|
|
from datetime import datetime
|
||
|
|
from dateutil import relativedelta
|
||
|
|
|
||
|
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||
|
|
import dtss
|
||
|
|
dtss.getReady()
|
||
|
|
|
||
|
|
from time import sleep
|
||
|
|
from pprint import pprint as ppr
|
||
|
|
import pytz
|
||
|
|
|
||
|
|
from events.models import Organization, Scraper, Calendar, Event
|
||
|
|
import events.digitools as digitools
|
||
|
|
|
||
|
|
tz = pytz.timezone("US/Central")
|
||
|
|
tz_str = "-0600 UTC"
|
||
|
|
DATETIME_FORMAT = '%a %b %d %Y %I:%M %p %z %Z'
|
||
|
|
# DATETIME_FORMAT_2 = '%a, %b %d, %Y %I %p %z %Z'
|
||
|
|
|
||
|
|
venue, created = Organization.objects.get_or_create(
|
||
|
|
name="Subterranean",
|
||
|
|
city="Chicago",
|
||
|
|
website="https://subt.net/",
|
||
|
|
is_venue = True
|
||
|
|
)
|
||
|
|
|
||
|
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'chi')
|
||
|
|
scraper.items = 0
|
||
|
|
scraper.save()
|
||
|
|
|
||
|
|
def get_events(ps, event_type):
|
||
|
|
contents = ps.xpath('.//*/div[@class="event-info-block"]')
|
||
|
|
for c in contents:
|
||
|
|
try:
|
||
|
|
event = {}
|
||
|
|
date = c.xpath('.//p[@class="fs-18 bold mt-1r event-date"]/text()')[0]
|
||
|
|
time = c.xpath('.//*/span[@class="door-time"]/text()')[0].replace("Doors: ", "").replace("/", "").strip()
|
||
|
|
year = datetime.now().year
|
||
|
|
event['date'] = (' ').join([date, str(year), time, tz_str])
|
||
|
|
event['scraper'] = scraper
|
||
|
|
event['calendars'] = [scraper.calendar]
|
||
|
|
event['title'] = c.xpath('.//p[@class="fs-18 bold mb-12 event-title"]/a/text()')[0]
|
||
|
|
try:
|
||
|
|
event['support'] = c.xpath('.//p[@class="fs-12 supporting-talent"]/text()')[0]
|
||
|
|
except:
|
||
|
|
pass
|
||
|
|
event['venue'] = c.xpath('.//p[@class="fs-12 venue"]/text()')[0]
|
||
|
|
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
||
|
|
event['link'] = venue.website
|
||
|
|
if len(event['support']) > 0:
|
||
|
|
event['new_title'] = event['title'] + " w/ " + event['support']
|
||
|
|
digitools.createBasicEvent(event, "Mu", venue)
|
||
|
|
scraper.items+=1
|
||
|
|
except Exception as e:
|
||
|
|
print("\nError: ", e)
|
||
|
|
# ppr(event)
|
||
|
|
pass
|
||
|
|
|
||
|
|
if len(sys.argv) >= 2:
|
||
|
|
arg1 = sys.argv[1]
|
||
|
|
br = digitools.getBrowser(arg1)
|
||
|
|
else:
|
||
|
|
print("No run_env")
|
||
|
|
br.close()
|
||
|
|
quit()
|
||
|
|
|
||
|
|
ps = digitools.getSource(br, venue.website)
|
||
|
|
get_events(ps, "Mu")
|
||
|
|
sleep(3)
|
||
|
|
|
||
|
|
digitools.updateScraper(scraper, item_count_start)
|
||
|
|
br.close()
|