more scrapers
This commit is contained in:
75
Working/venues/wa.pdx.mississippi.py
Normal file
75
Working/venues/wa.pdx.mississippi.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper, Calendar, Event
|
||||
import events.digitools as digitools
|
||||
|
||||
tz_str = "-0800 UTC"
|
||||
DATETIME_FORMAT = '%A, %B %d, %Y %I:%M %p %z %Z'
|
||||
DATETIME_FORMAT_2 = '%A, %B %d, %Y %I%p %z %Z'
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Mississippi Studios",
|
||||
city="Portland",
|
||||
website="https://mississippistudios.com/full-view/",
|
||||
is_venue = True
|
||||
)
|
||||
|
||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'pdx')
|
||||
scraper.items = 0
|
||||
scraper.save()
|
||||
|
||||
def get_events(contents, event_type):
|
||||
contents = ps.xpath('.//*/div[@class="event"]/div/div[@class="event__inner"]')
|
||||
i = 0
|
||||
for c in contents:
|
||||
i += 1
|
||||
try:
|
||||
event = {}
|
||||
date = c.xpath('.//*/h5/text()')[0].replace("\n", "").replace("\t", "")#.replace("rd", "").replace("nd", "").strip()
|
||||
time = c.xpath('//div/div/div[4]/p[1]/span/text()')[i].split("/")[0].replace("Doors: ", "").strip()
|
||||
event['date'] = (' ').join([date, time, tz_str])
|
||||
event['date'] = (' ').join([date, time, tz_str])
|
||||
event['scraper'] = scraper
|
||||
event['calendars'] = [scraper.calendar]
|
||||
event['title'] = c.xpath('.//*/h2/a/text()')[0]
|
||||
try:
|
||||
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
||||
except:
|
||||
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT_2)
|
||||
event['link'] = c.xpath('.//*/h2/a/@href')[0]
|
||||
digitools.createBasicEvent(event, "Mu", venue)
|
||||
scraper.items+=1
|
||||
except Exception as e:
|
||||
print("\nError: ", e)
|
||||
pass
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = digitools.getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
br.close()
|
||||
quit()
|
||||
|
||||
ps = digitools.getSource(br, venue.website)
|
||||
get_events(ps, "Mu")
|
||||
sleep(3)
|
||||
|
||||
link = venue.website + "2"
|
||||
print(link)
|
||||
ps = digitools.getSource(br, link)
|
||||
get_events(ps, "Mu")
|
||||
sleep(3)
|
||||
|
||||
digitools.updateScraper(scraper, item_count_start)
|
||||
br.close()
|
||||
Reference in New Issue
Block a user