Files
scrapers/Working/venues/mn.mpls.cabooze.py

68 lines
2.0 KiB
Python
Raw Normal View History

2025-11-30 16:29:50 -05:00
import os, sys
from datetime import datetime
from dateutil import relativedelta
2026-01-09 22:49:19 -05:00
sys.path.append('/var/www/digisnaxx.ado/scrapers')
import dtss
dtss.getReady()
2025-11-30 16:29:50 -05:00
from time import sleep
from pprint import pprint as ppr
import pytz
from events.models import Organization, Scraper
import events.digitools as digitools
2026-01-24 19:01:47 -05:00
tz_str = " -0600 UTC"
DATETIME_FORMAT = '%b %d %I:%M %p %Y %z %Z'
2026-01-09 22:49:19 -05:00
2025-11-30 16:29:50 -05:00
venue, created = Organization.objects.get_or_create(
name="Cabooze",
city="Minneapolis",
website="https://www.cabooze.com/#/events",
is_venue=True
)
2026-01-09 22:49:19 -05:00
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
2025-11-30 16:29:50 -05:00
def get_events(ps, event_type):
print("Getting events ...")
contents = ps.xpath('.//*/div[@class="vp-event-row vp-widget-reset vp-venue-thecabooze"]')
for c in contents:
try:
event = {}
date = c.xpath('.//*/span[@class="vp-month-n-day"]/text()')[0]
print(date)
month = date.split(" ")[0]
time = c.xpath('.//*/span[@class="vp-time"]/text()')[0]
year = datetime.now().year
if month == "Jan":
year = int(year) + 1
2026-01-09 22:49:19 -05:00
event['scraper'] = scraper
event['calendars'] = [scraper.calendar]
2025-11-30 16:29:50 -05:00
event['title'] = c.xpath('.//*/div[@class="vp-event-name"]/text()')[0]
event['date'] = [date, time, str(year)]
event['date'] = " ".join(event['date'])
2026-01-24 19:01:47 -05:00
event['dateStamp'] = datetime.strptime(event['date'] + tz_str, DATETIME_FORMAT)
2025-11-30 16:29:50 -05:00
event['link'] = "https://www.cabooze.com/" + c.xpath('.//a[@class="vp-event-link"]/@href')[0]
digitools.createBasicEvent(event, event_type, venue)
scraper.items+=1
except Exception as e:
print(e)
ppr(event)
2026-01-24 19:01:47 -05:00
2025-11-30 16:29:50 -05:00
if len(sys.argv) >= 2:
arg1 = sys.argv[1]
br = digitools.getBrowser(arg1)
else:
print("No run_env")
quit()
ps = digitools.getSource(br, venue.website)
get_events(ps, "Mu")
sleep(3)
digitools.updateScraper(scraper, item_count_start)
2026-01-24 19:01:47 -05:00
br.close()