more scrapers

This commit is contained in:
2026-01-24 19:01:47 -05:00
parent 7013d8327a
commit 3c4a41ae2c
58 changed files with 1614 additions and 2988 deletions

View File

@@ -0,0 +1,79 @@
import os, sys
from datetime import datetime
from dateutil import relativedelta
sys.path.append('/var/www/digisnaxx.ado/scrapers')
import dtss
dtss.getReady()
from time import sleep
from pprint import pprint as ppr
import pytz
from events.models import Organization, Scraper
import events.digitools as digitools
current_year = str(datetime.now().year)
venue, created = Organization.objects.get_or_create(
name="Guthrie Theater",
city="Minneapolis",
website="https://www.guthrietheater.org",
is_venue = True
)
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
event_type = "Mu"
tz_str = "-0600 UTC"
DATETIME_FORMAT = '%b %d %Y %I:%M %p %z %Z'
def get_events(ps, event_type):
contents = ps.xpath('.//*/div[@class="image-callouts-item-inner"]')
events = []
year = datetime.today().year
for c in contents:
try:
event = {}
event['title'] = c.xpath('.//figcaption/p/text()')[0]
event['link'] = c.xpath('.//figcaption/a/@href')[0]
event['link'] = venue.website + event['link']
event['scraper'] = scraper
event['calendars'] = scraper.calendar
# ppr(event)
events.append(event)
except Exception as e:
print(e)
pass
for e in events[:-3]:
try:
ps = digitools.getSource(br, e['link'])
nc = ps.xpath('.//div[@class="modal-row-inner buytix"]')
for n in nc:
date = n.xpath('.//div[@class="buytix-info"]/h4/text()')[0].split(',')[1].strip()
time = n.xpath('.//div[@class="buytix-time"]/p/text()')[0].strip()
e['datetime'] = "{0} {1} {2} {3}".format(date, year, time, tz_str)
e['dateStamp'] =datetime.strptime(e['datetime'], DATETIME_FORMAT)
digitools.createBasicEvent(e, 'Th', venue)
scraper.items+=1
except Exception as e:
print("Error: ", e)
pass
if len(sys.argv) >= 2:
arg1 = sys.argv[1]
br = digitools.getBrowser(arg1)
else:
print("No run_env")
quit()
shows = "/shows-and-tickets/"
ps = digitools.getSource(br, venue.website+shows)
get_events(ps, event_type)
sleep(3)
br.close()
digitools.updateScraper(scraper, item_count_start)