66 lines
1.9 KiB
Python
66 lines
1.9 KiB
Python
import os, sys
|
|
from datetime import datetime
|
|
from dateutil import relativedelta
|
|
|
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
|
import dtss
|
|
dtss.getReady()
|
|
|
|
from time import sleep
|
|
from pprint import pprint as ppr
|
|
import pytz
|
|
|
|
from events.models import Organization, Scraper
|
|
import events.digitools as digitools
|
|
|
|
venue, created = Organization.objects.get_or_create(
|
|
name="Boston City Council",
|
|
city="Boston",
|
|
website="https://www.boston.gov",
|
|
is_venue=True
|
|
)
|
|
|
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'bos')
|
|
|
|
event_type = "Gv"
|
|
|
|
# Time Signatures
|
|
tz = pytz.timezone("US/Eastern")
|
|
DATETIME_FORMAT = '%B %d, %Y %I:%M%p'
|
|
|
|
def get_events(ps, event_type):
|
|
print("Getting events ...")
|
|
contents = ps.xpath('.//*/div[@class="g g--m0 n-li"]')
|
|
ppr(contents)
|
|
for c in contents:
|
|
try:
|
|
event = {}
|
|
event['scraper'] = scraper
|
|
event['calendars'] = [scraper.calendar]
|
|
event['title'] = c.xpath('.//*/div[@class="n-li-t"]/a/text()')[0]
|
|
event['link'] = venue.website + c.xpath('.//*/div[@class="n-li-t"]/a/@href')[0]
|
|
event['date'] = c.xpath('.//*/li[@class="dl-i"]/span[@class="dl-d"]/text()')[0].replace('\n', '').split("-")[0].strip()
|
|
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
|
digitools.createBasicEvent(event, event_type, venue)
|
|
ppr(event)
|
|
scraper.items+=1
|
|
except Exception as e:
|
|
print(e)
|
|
ppr(event)
|
|
print("\n\n+++\n\n")
|
|
|
|
if len(sys.argv) >= 2:
|
|
arg1 = sys.argv[1]
|
|
br = digitools.getBrowser(arg1)
|
|
else:
|
|
print("No run_env")
|
|
quit()
|
|
|
|
for n in range(6):
|
|
link = venue.website + "/public-notices?page=" + str(n)
|
|
ps = digitools.getSource(br, link)
|
|
get_events(ps, "Gv")
|
|
sleep(2)
|
|
|
|
digitools.updateScraper(scraper, item_count_start)
|
|
br.close() |