updated naming conventions
This commit is contained in:
66
Working/govt/ma.boston.citycouncil.py
Normal file
66
Working/govt/ma.boston.citycouncil.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
import events.digitools as digitools
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Boston City Council",
|
||||
city="Boston",
|
||||
website="https://www.boston.gov",
|
||||
is_venue=True
|
||||
)
|
||||
|
||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'bos')
|
||||
|
||||
event_type = "Gv"
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Eastern")
|
||||
DATETIME_FORMAT = '%B %d, %Y %I:%M%p'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
print("Getting events ...")
|
||||
contents = ps.xpath('.//*/div[@class="g g--m0 n-li"]')
|
||||
ppr(contents)
|
||||
for c in contents:
|
||||
try:
|
||||
event = {}
|
||||
event['scraper'] = scraper
|
||||
event['calendars'] = [scraper.calendar]
|
||||
event['title'] = c.xpath('.//*/div[@class="n-li-t"]/a/text()')[0]
|
||||
event['link'] = venue.website + c.xpath('.//*/div[@class="n-li-t"]/a/@href')[0]
|
||||
event['date'] = c.xpath('.//*/li[@class="dl-i"]/span[@class="dl-d"]/text()')[0].replace('\n', '').split("-")[0].strip()
|
||||
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
||||
digitools.createBasicEvent(event, event_type, venue)
|
||||
ppr(event)
|
||||
scraper.items+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(event)
|
||||
print("\n\n+++\n\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = digitools.getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
for n in range(6):
|
||||
link = venue.website + "/public-notices?page=" + str(n)
|
||||
ps = digitools.getSource(br, link)
|
||||
get_events(ps, "Gv")
|
||||
sleep(2)
|
||||
|
||||
digitools.updateScraper(scraper, item_count_start)
|
||||
br.close()
|
||||
@@ -5,10 +5,9 @@ import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
||||
django.setup()
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
@@ -7,10 +7,9 @@ td = relativedelta.relativedelta(hours=5)
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
import django
|
||||
sys.path.append('../../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
||||
django.setup()
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||
|
||||
@@ -3,10 +3,9 @@ from datetime import datetime, timedelta
|
||||
import requests
|
||||
import json
|
||||
|
||||
import django
|
||||
sys.path.append('../../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
||||
django.setup()
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from events.models import Event, Organization, Scraper, Calendar
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import re, os, sys
|
||||
from datetime import datetime
|
||||
|
||||
import django
|
||||
sys.path.append('../../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
||||
django.setup()
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from events.models import Event, Organization, Scraper, Calendar
|
||||
|
||||
74
Working/govt/or.portland.py
Normal file
74
Working/govt/or.portland.py
Normal file
@@ -0,0 +1,74 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
import events.digitools as digitools
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Portland City Council",
|
||||
city="Portand",
|
||||
website="https://www.portland.gov",
|
||||
is_venue=True
|
||||
)
|
||||
|
||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'pdx')
|
||||
|
||||
event_type = "Gv"
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Eastern")
|
||||
DATETIME_FORMAT = '%B %d, %Y %I:%M %p'
|
||||
DATETIME_FORMAT_2 = '%B %d, %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
print("Getting events ...")
|
||||
contents = ps.xpath('.//*/div[@class="row position-relative"]')
|
||||
ppr(contents)
|
||||
for c in contents:
|
||||
try:
|
||||
event = {}
|
||||
event['scraper'] = scraper
|
||||
event['calendars'] = [scraper.calendar]
|
||||
event['title'] = c.xpath('.//*/h2[@class="h4"]/a/span/text()')[0]
|
||||
event['link'] = venue.website + c.xpath('.//*/h2[@class="h4"]/a/@href')[0]
|
||||
event['date'] = c.xpath('.//*/time/text()')[0]
|
||||
event['time'] = c.xpath('.//*/span[@class="pe-4"]/text()')[0].replace("\n", "").strip()
|
||||
# event['time2'] = c.xpath('.//*/span[@class="pe-4"]/text()')
|
||||
try:
|
||||
event['dateStamp'] = datetime.strptime(event['date']+" "+event['time'], DATETIME_FORMAT)
|
||||
except:
|
||||
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT_2)
|
||||
|
||||
|
||||
# ppr(event)
|
||||
digitools.createBasicEvent(event, event_type, venue)
|
||||
scraper.items+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(event)
|
||||
print("\n\n+++\n\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = digitools.getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
for n in range(3):
|
||||
link = venue.website + "/auditor/council-clerk/events?page=" + str(n)
|
||||
ps = digitools.getSource(br, link)
|
||||
get_events(ps, "Gv")
|
||||
sleep(2)
|
||||
|
||||
digitools.updateScraper(scraper, item_count_start)
|
||||
br.close()
|
||||
66
Working/govt/pa.philadelpha.py
Normal file
66
Working/govt/pa.philadelpha.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
import events.digitools as digitools
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Philadelphia City Council",
|
||||
city="Philadelphia",
|
||||
website="https://phila.legistar.com/Calendar.aspx",
|
||||
is_venue=True
|
||||
)
|
||||
|
||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'phl')
|
||||
|
||||
event_type = "Gv"
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Eastern")
|
||||
DATETIME_FORMAT = '%m/%d/%Y %I:%M %p'
|
||||
DATETIME_FORMAT_2 = '%B %d, %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
print("Getting events ...")
|
||||
contents = ps.xpath('.//*/tr[@class="rgRow"]')
|
||||
for c in contents:
|
||||
try:
|
||||
event = {}
|
||||
event['scraper'] = scraper
|
||||
event['link'] = venue.website
|
||||
event['calendars'] = [scraper.calendar]
|
||||
event['title'] = c.xpath('.//*/a/text()')[0]
|
||||
event['time'] = c.xpath('.//*/span/text()')[2]
|
||||
event['date'] = c.xpath('.//td[@class="rgSorted"]/text()')[0]
|
||||
event['dateStamp'] = datetime.strptime(event['date']+" "+event['time'], DATETIME_FORMAT)
|
||||
# ppr(event)
|
||||
digitools.createBasicEvent(event, event_type, venue)
|
||||
scraper.items+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(event)
|
||||
print("\n\n+++\n\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = digitools.getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
ps = digitools.getSource(br, venue.website)
|
||||
sleep(1)
|
||||
get_events(ps, "Gv")
|
||||
sleep(2)
|
||||
|
||||
digitools.updateScraper(scraper, item_count_start)
|
||||
br.close()
|
||||
Reference in New Issue
Block a user