import os, sys from datetime import datetime from dateutil import relativedelta sys.path.append('/var/www/digisnaxx.ado/scrapers') import dtss dtss.getReady() from time import sleep from pprint import pprint as ppr import pytz from events.models import Organization, Scraper import events.digitools as digitools venue, created = Organization.objects.get_or_create( name="Cedar Cultural Center", city="Minneapolis", website="https://www.thecedar.org", is_venue=True ) scraper,item_count_start,virtcal = digitools.getScraper(venue,venue.website, 'msp') DATETIME_FORMAT = '%A, %B %d, %Y %I:%M %p' DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y' DATETIME_FORMAT_3 = '%A, %B %d at %I:%M%p %Y' DATETIME_FORMAT_4 = '%A, %B %d at %I%p %Y' DATETIME_FORMAT_5 = '%A, %B %d @%I%p %Y' def get_events(ps): links = ps.xpath('.//*/div[@class="summary-title"]/a/@href') print("Length of Links: ", len(links)) links = list(set(links)) print("New Length of Links: ", len(links)) for l in links: if "cedar-news-blog" in l: continue pse = digitools.getSource(br, "https://www.thecedar.org" + l) event = {} event['scraper'] = scraper event['calendars'] = [scraper.calendar] event['link'] = "https://www.thecedar.org" + l try: time = pse.xpath('.//*/time[@class="event-time-localized-start"]/text()')[0] date = pse.xpath('.//*/time[@class="event-date"]/text()')[0] event['title'] = pse.xpath('.//*/h1[@class="eventitem-title"]/text()')[0] except: try: time = pse.xpath('.//*/time[@class="event-time-localized"]/text()')[0] date = pse.xpath('.//*/time[@class="event-date"]/text()')[0] event['title'] = pse.xpath('.//*/h1[@class="eventitem-title"]/text()')[0] except Exception as e: print(e) print("failed event: ", event) dateStamp = date + " " + time event['dateStamp'] = datetime.strptime(dateStamp, DATETIME_FORMAT) digitools.createBasicEvent(event, "Mu", venue) scraper.items+=1 if len(sys.argv) >= 2: arg1 = sys.argv[1] br = digitools.getBrowser(arg1) else: print("No run_env") quit() calendar_url = 'https://www.thecedar.org/listing' ps = digitools.getSource(br, calendar_url) get_events(ps) br.close() digitools.updateScraper(scraper, item_count_start)