updated naming conventions
This commit is contained in:
66
Working/govt/ma.boston.citycouncil.py
Normal file
66
Working/govt/ma.boston.citycouncil.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import os, sys
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from events.models import Organization, Scraper
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Boston City Council",
|
||||||
|
city="Boston",
|
||||||
|
website="https://www.boston.gov",
|
||||||
|
is_venue=True
|
||||||
|
)
|
||||||
|
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'bos')
|
||||||
|
|
||||||
|
event_type = "Gv"
|
||||||
|
|
||||||
|
# Time Signatures
|
||||||
|
tz = pytz.timezone("US/Eastern")
|
||||||
|
DATETIME_FORMAT = '%B %d, %Y %I:%M%p'
|
||||||
|
|
||||||
|
def get_events(ps, event_type):
|
||||||
|
print("Getting events ...")
|
||||||
|
contents = ps.xpath('.//*/div[@class="g g--m0 n-li"]')
|
||||||
|
ppr(contents)
|
||||||
|
for c in contents:
|
||||||
|
try:
|
||||||
|
event = {}
|
||||||
|
event['scraper'] = scraper
|
||||||
|
event['calendars'] = [scraper.calendar]
|
||||||
|
event['title'] = c.xpath('.//*/div[@class="n-li-t"]/a/text()')[0]
|
||||||
|
event['link'] = venue.website + c.xpath('.//*/div[@class="n-li-t"]/a/@href')[0]
|
||||||
|
event['date'] = c.xpath('.//*/li[@class="dl-i"]/span[@class="dl-d"]/text()')[0].replace('\n', '').split("-")[0].strip()
|
||||||
|
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
||||||
|
digitools.createBasicEvent(event, event_type, venue)
|
||||||
|
ppr(event)
|
||||||
|
scraper.items+=1
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
ppr(event)
|
||||||
|
print("\n\n+++\n\n")
|
||||||
|
|
||||||
|
if len(sys.argv) >= 2:
|
||||||
|
arg1 = sys.argv[1]
|
||||||
|
br = digitools.getBrowser(arg1)
|
||||||
|
else:
|
||||||
|
print("No run_env")
|
||||||
|
quit()
|
||||||
|
|
||||||
|
for n in range(6):
|
||||||
|
link = venue.website + "/public-notices?page=" + str(n)
|
||||||
|
ps = digitools.getSource(br, link)
|
||||||
|
get_events(ps, "Gv")
|
||||||
|
sleep(2)
|
||||||
|
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
|
br.close()
|
||||||
@@ -5,10 +5,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -7,10 +7,9 @@ td = relativedelta.relativedelta(hours=5)
|
|||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
|
||||||
@@ -3,10 +3,9 @@ from datetime import datetime, timedelta
|
|||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event, Organization, Scraper, Calendar
|
from events.models import Event, Organization, Scraper, Calendar
|
||||||
|
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
import re, os, sys
|
import re, os, sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event, Organization, Scraper, Calendar
|
from events.models import Event, Organization, Scraper, Calendar
|
||||||
|
|
||||||
74
Working/govt/or.portland.py
Normal file
74
Working/govt/or.portland.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
import os, sys
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from events.models import Organization, Scraper
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Portland City Council",
|
||||||
|
city="Portand",
|
||||||
|
website="https://www.portland.gov",
|
||||||
|
is_venue=True
|
||||||
|
)
|
||||||
|
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'pdx')
|
||||||
|
|
||||||
|
event_type = "Gv"
|
||||||
|
|
||||||
|
# Time Signatures
|
||||||
|
tz = pytz.timezone("US/Eastern")
|
||||||
|
DATETIME_FORMAT = '%B %d, %Y %I:%M %p'
|
||||||
|
DATETIME_FORMAT_2 = '%B %d, %Y'
|
||||||
|
|
||||||
|
def get_events(ps, event_type):
|
||||||
|
print("Getting events ...")
|
||||||
|
contents = ps.xpath('.//*/div[@class="row position-relative"]')
|
||||||
|
ppr(contents)
|
||||||
|
for c in contents:
|
||||||
|
try:
|
||||||
|
event = {}
|
||||||
|
event['scraper'] = scraper
|
||||||
|
event['calendars'] = [scraper.calendar]
|
||||||
|
event['title'] = c.xpath('.//*/h2[@class="h4"]/a/span/text()')[0]
|
||||||
|
event['link'] = venue.website + c.xpath('.//*/h2[@class="h4"]/a/@href')[0]
|
||||||
|
event['date'] = c.xpath('.//*/time/text()')[0]
|
||||||
|
event['time'] = c.xpath('.//*/span[@class="pe-4"]/text()')[0].replace("\n", "").strip()
|
||||||
|
# event['time2'] = c.xpath('.//*/span[@class="pe-4"]/text()')
|
||||||
|
try:
|
||||||
|
event['dateStamp'] = datetime.strptime(event['date']+" "+event['time'], DATETIME_FORMAT)
|
||||||
|
except:
|
||||||
|
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT_2)
|
||||||
|
|
||||||
|
|
||||||
|
# ppr(event)
|
||||||
|
digitools.createBasicEvent(event, event_type, venue)
|
||||||
|
scraper.items+=1
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
ppr(event)
|
||||||
|
print("\n\n+++\n\n")
|
||||||
|
|
||||||
|
if len(sys.argv) >= 2:
|
||||||
|
arg1 = sys.argv[1]
|
||||||
|
br = digitools.getBrowser(arg1)
|
||||||
|
else:
|
||||||
|
print("No run_env")
|
||||||
|
quit()
|
||||||
|
|
||||||
|
for n in range(3):
|
||||||
|
link = venue.website + "/auditor/council-clerk/events?page=" + str(n)
|
||||||
|
ps = digitools.getSource(br, link)
|
||||||
|
get_events(ps, "Gv")
|
||||||
|
sleep(2)
|
||||||
|
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
|
br.close()
|
||||||
66
Working/govt/pa.philadelpha.py
Normal file
66
Working/govt/pa.philadelpha.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import os, sys
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from events.models import Organization, Scraper
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Philadelphia City Council",
|
||||||
|
city="Philadelphia",
|
||||||
|
website="https://phila.legistar.com/Calendar.aspx",
|
||||||
|
is_venue=True
|
||||||
|
)
|
||||||
|
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'phl')
|
||||||
|
|
||||||
|
event_type = "Gv"
|
||||||
|
|
||||||
|
# Time Signatures
|
||||||
|
tz = pytz.timezone("US/Eastern")
|
||||||
|
DATETIME_FORMAT = '%m/%d/%Y %I:%M %p'
|
||||||
|
DATETIME_FORMAT_2 = '%B %d, %Y'
|
||||||
|
|
||||||
|
def get_events(ps, event_type):
|
||||||
|
print("Getting events ...")
|
||||||
|
contents = ps.xpath('.//*/tr[@class="rgRow"]')
|
||||||
|
for c in contents:
|
||||||
|
try:
|
||||||
|
event = {}
|
||||||
|
event['scraper'] = scraper
|
||||||
|
event['link'] = venue.website
|
||||||
|
event['calendars'] = [scraper.calendar]
|
||||||
|
event['title'] = c.xpath('.//*/a/text()')[0]
|
||||||
|
event['time'] = c.xpath('.//*/span/text()')[2]
|
||||||
|
event['date'] = c.xpath('.//td[@class="rgSorted"]/text()')[0]
|
||||||
|
event['dateStamp'] = datetime.strptime(event['date']+" "+event['time'], DATETIME_FORMAT)
|
||||||
|
# ppr(event)
|
||||||
|
digitools.createBasicEvent(event, event_type, venue)
|
||||||
|
scraper.items+=1
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
ppr(event)
|
||||||
|
print("\n\n+++\n\n")
|
||||||
|
|
||||||
|
if len(sys.argv) >= 2:
|
||||||
|
arg1 = sys.argv[1]
|
||||||
|
br = digitools.getBrowser(arg1)
|
||||||
|
else:
|
||||||
|
print("No run_env")
|
||||||
|
quit()
|
||||||
|
|
||||||
|
ps = digitools.getSource(br, venue.website)
|
||||||
|
sleep(1)
|
||||||
|
get_events(ps, "Gv")
|
||||||
|
sleep(2)
|
||||||
|
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
|
br.close()
|
||||||
55
Working/iCal/col.medellin.py
Normal file
55
Working/iCal/col.medellin.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="idioki",
|
||||||
|
city="Medellin",
|
||||||
|
website="https://idioki.com/",
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
scraper, created = Scraper.objects.get_or_create(
|
||||||
|
name="idioki",
|
||||||
|
website="https://calendar.google.com/calendar/ical/46ae0446724b1b3ee83cbd7dbc0db6a235bf97509ad860ca91eada3c267b5e41%40group.calendar.google.com/public/basic.ics",
|
||||||
|
calendar = Calendar.objects.get(shortcode='mde'),
|
||||||
|
items = 0,
|
||||||
|
new_items = 0,
|
||||||
|
last_ran = datetime.now(),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
scraper = Scraper.objects.get(name=venue.name)
|
||||||
|
|
||||||
|
event_type = "Mu"
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
|
||||||
|
counter = 0
|
||||||
|
digitools.getiCalRepeateEvents(gcal, scraper, venue, "Ed", cal)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
|
|
||||||
|
new_time = datetime.now() - timedelta(days=1)
|
||||||
|
right_bound_time = datetime.now() + timedelta(days=45)
|
||||||
|
events = DSEvent.objects.filter(show_date__lte=new_time)
|
||||||
|
events1 = DSEvent.objects.filter(show_date__gte=right_bound_time)
|
||||||
|
|
||||||
|
for e in events:
|
||||||
|
e.delete()
|
||||||
|
|
||||||
|
for e in events1:
|
||||||
|
e.delete()
|
||||||
56
Working/iCal/df.online_events.py
Normal file
56
Working/iCal/df.online_events.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Online Events",
|
||||||
|
city="Online",
|
||||||
|
website="https://dreamfreely.org/",
|
||||||
|
)
|
||||||
|
event_type = "Mu"
|
||||||
|
|
||||||
|
try:
|
||||||
|
scraper, created = Scraper.objects.get_or_create(
|
||||||
|
name="Online Events",
|
||||||
|
website="https://calendar.google.com/calendar/ical/p1a4r9glkjpu4u6iv3fkmu8qtc%40group.calendar.google.com/public/basic.ics",
|
||||||
|
calendar = Calendar.objects.get(shortcode='000'),
|
||||||
|
items = 0,
|
||||||
|
new_items = 0,
|
||||||
|
last_ran = datetime.now(),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
scraper = Scraper.objects.get(name=venue.name)
|
||||||
|
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
|
||||||
|
counter = 0
|
||||||
|
|
||||||
|
digitools.getiCalRepeateEvents(gcal, scraper, venue, "Ed", cal)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
|
|
||||||
|
new_time = datetime.now() - timedelta(days=1)
|
||||||
|
right_bound_time = datetime.now() + timedelta(days=45)
|
||||||
|
events = DSEvent.objects.filter(show_date__lte=new_time)
|
||||||
|
events1 = DSEvent.objects.filter(show_date__gte=right_bound_time)
|
||||||
|
|
||||||
|
for e in events:
|
||||||
|
e.delete()
|
||||||
|
|
||||||
|
for e in events1:
|
||||||
|
e.delete()
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
from icalendar import Calendar as iCalendar, Event
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Chicago Ave Fire Arts Center",
|
|
||||||
city="Minneapolis",
|
|
||||||
website="https://www.cafac.org/classes",
|
|
||||||
)
|
|
||||||
event_type = "Ed"
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="Chicago Ave Fire Arts Center",
|
|
||||||
website="https://calendar.google.com/calendar/ical/9qj2426rukra3jv933nslsf3r8%40group.calendar.google.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(id=1),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
|
|
||||||
event_type = "Ed"
|
|
||||||
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.text)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
|
|
||||||
events = digitools.getiCalEvents(gcal, scraper, venue, "Ed")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
# ppr(event)
|
|
||||||
e = {}
|
|
||||||
e['calendars'] = event['calendars']
|
|
||||||
try:
|
|
||||||
e['dateStamp'] = event['dateStart'][0]
|
|
||||||
except:
|
|
||||||
e['dateStamp'] = event['dateStart']
|
|
||||||
e['title'] = event['strSummary']
|
|
||||||
e['scraper'] = scraper
|
|
||||||
e['link'] = venue.website
|
|
||||||
try:
|
|
||||||
digitools.createBasicEvent(e, 'Ed', venue)
|
|
||||||
except Exception as e:
|
|
||||||
print("Error: ", e)
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
@@ -1,146 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
from icalendar import Calendar as iCalendar, Event
|
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from config.env import env
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="idioki",
|
|
||||||
city="Medellin",
|
|
||||||
website="https://idioki.com/",
|
|
||||||
)
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="idioki",
|
|
||||||
website="https://calendar.google.com/calendar/ical/46ae0446724b1b3ee83cbd7dbc0db6a235bf97509ad860ca91eada3c267b5e41%40group.calendar.google.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(shortcode='mde'),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.text)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
|
|
||||||
def createEvent(day, date, event, scraper, venue, event_type):
|
|
||||||
ppr(event)
|
|
||||||
print("NAME: ", venue.name)
|
|
||||||
print('\n\n')
|
|
||||||
|
|
||||||
if venue.name == "DANCEFREE":
|
|
||||||
venue.website = "https://www.instagram.com/dancefreeco"
|
|
||||||
if venue.name == "Vintrash":
|
|
||||||
venue.website = "https://www.instagram.com/vintrashbar"
|
|
||||||
if venue.name == "The Wandering Paisa":
|
|
||||||
venue.website = "https://wanderingpaisahostel.com"
|
|
||||||
if venue.name == "Dulce Posion":
|
|
||||||
venue.website = "https://www.instagram.com/dulceposionr"
|
|
||||||
if venue.name == "Blood Dance Company":
|
|
||||||
venue.website = "https://www.instagram.com/blooddancecompany"
|
|
||||||
if venue.name == "OLSA Certified Spanish School":
|
|
||||||
venue.website = "https://www.olsafoundation.org/"
|
|
||||||
if event['strSummary'] == "Merli Rooftop Language Exchange":
|
|
||||||
venue.website = "https://calendar.google.com/calendar/embed?src=46ae0446724b1b3ee83cbd7dbc0db6a235bf97509ad860ca91eada3c267b5e41%40group.calendar.google.com&ctz=America%2FBogota"
|
|
||||||
if "Concious Warrior" in event['strSummary']:
|
|
||||||
venue.website = "https://www.consciouscolombia.com/"
|
|
||||||
# if venue.name == "":
|
|
||||||
# venue.website = "https://www.consciouscolombia.com/"
|
|
||||||
# if venue.name == "":
|
|
||||||
# venue.website = "https://www.consciouscolombia.com/"
|
|
||||||
# if venue.name == "":
|
|
||||||
# venue.website = "https://www.consciouscolombia.com/"
|
|
||||||
venue.save()
|
|
||||||
|
|
||||||
days = [day-1, day+6, day+13]
|
|
||||||
for day in days:
|
|
||||||
event['dateStamp'] = date + timedelta(days=day)
|
|
||||||
event['dateStart'] = event['dateStamp']
|
|
||||||
print("sending")
|
|
||||||
digitools.createCleanIcalEvent(event, scraper, venue, event_type)
|
|
||||||
return
|
|
||||||
|
|
||||||
def splitLocation(event):
|
|
||||||
loc_split = event['strLocation'].split(',')
|
|
||||||
venue_name = loc_split[0]
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name=venue_name,
|
|
||||||
city="Medellin",
|
|
||||||
)
|
|
||||||
event['venue'] = venue
|
|
||||||
return event
|
|
||||||
|
|
||||||
counter = 0
|
|
||||||
|
|
||||||
for component in gcal.walk():
|
|
||||||
event = {}
|
|
||||||
event['scraper'] = scraper
|
|
||||||
event['calendars'] = [scraper.calendar]
|
|
||||||
event['strSummary'] = f"{(component.get('SUMMARY'))}"
|
|
||||||
event['strDesc'] = component.get('DESCRIPTION')
|
|
||||||
event['strLocation'] = str(component.get('LOCATION'))
|
|
||||||
# startDate = component.get('DTSTART')
|
|
||||||
# startTime = startDate.time()
|
|
||||||
event['dateStart'] = component.get('DTSTART')
|
|
||||||
event['dateStamp'] = component.get('DTSTAMP')
|
|
||||||
|
|
||||||
if event['strSummary'] != 'None':
|
|
||||||
event['details'] = {
|
|
||||||
"description" : event['strDesc'],
|
|
||||||
"Location" : event['strLocation'],
|
|
||||||
}
|
|
||||||
|
|
||||||
if event['dateStamp'] != None:
|
|
||||||
event['dateStart'] = event['dateStart'].dt
|
|
||||||
event['dateStart'] = datetime.strptime(str(event['dateStart'])[:-6], '%Y-%m-%d %H:%M:%S')
|
|
||||||
rules = component.get('RRule')
|
|
||||||
try:
|
|
||||||
if rules['FREQ'][0] == 'WEEKLY':
|
|
||||||
if datetime.today().weekday() != 0:
|
|
||||||
event = splitLocation(event)
|
|
||||||
date = datetime.today().date() - timedelta(days=datetime.today().weekday())
|
|
||||||
date = datetime.combine(date, event['dateStart'].time())
|
|
||||||
days = ["SU", "MO", "TU", "WE", "TH", "FR", "SA"]
|
|
||||||
for day in rules['BYDAY']:
|
|
||||||
day = days.index(day)
|
|
||||||
createEvent(day, date, event, scraper, event['venue'], "Ed")
|
|
||||||
except Exception as e:
|
|
||||||
print("Error: ", e, "\n\n\n\n")
|
|
||||||
pass
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
|
|
||||||
new_time = datetime.now() - timedelta(days=1)
|
|
||||||
right_bound_time = datetime.now() + timedelta(days=45)
|
|
||||||
events = DSEvent.objects.filter(show_date__lte=new_time)
|
|
||||||
events1 = DSEvent.objects.filter(show_date__gte=right_bound_time)
|
|
||||||
|
|
||||||
for e in events:
|
|
||||||
e.delete()
|
|
||||||
|
|
||||||
for e in events1:
|
|
||||||
e.delete()
|
|
||||||
@@ -1,118 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
from icalendar import Calendar as iCalendar, Event
|
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from config.env import env
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Online Events",
|
|
||||||
city="Online",
|
|
||||||
website="https://dreamfreely.org/",
|
|
||||||
)
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="Online Events",
|
|
||||||
website="https://calendar.google.com/calendar/ical/p1a4r9glkjpu4u6iv3fkmu8qtc%40group.calendar.google.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(shortcode='000'),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.text)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
|
|
||||||
def createEvent(day, date, event, scraper, venue, event_type):
|
|
||||||
days = [day-1, day+6, day+13]
|
|
||||||
for day in days:
|
|
||||||
event['dateStamp'] = date + timedelta(days=day)
|
|
||||||
event['dateStart'] = event['dateStamp']
|
|
||||||
digitools.createCleanIcalEvent(event, scraper, venue, event_type)
|
|
||||||
return
|
|
||||||
|
|
||||||
def splitLocation(event):
|
|
||||||
loc_split = event['strLocation'].split(',')
|
|
||||||
venue_name = loc_split[0]
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name=venue_name,
|
|
||||||
)
|
|
||||||
event['venue'] = venue
|
|
||||||
return event
|
|
||||||
|
|
||||||
counter = 0
|
|
||||||
|
|
||||||
for component in gcal.walk():
|
|
||||||
event = {}
|
|
||||||
event['scraper'] = scraper
|
|
||||||
event['calendars'] = [scraper.calendar]
|
|
||||||
event['strSummary'] = f"{(component.get('SUMMARY'))}"
|
|
||||||
event['strDesc'] = component.get('DESCRIPTION')
|
|
||||||
event['strLocation'] = str(component.get('LOCATION'))
|
|
||||||
# startDate = component.get('DTSTART')
|
|
||||||
# startTime = startDate.time()
|
|
||||||
event['dateStart'] = component.get('DTSTART')
|
|
||||||
event['dateStamp'] = component.get('DTSTAMP')
|
|
||||||
|
|
||||||
if event['strSummary'] != 'None':
|
|
||||||
event['details'] = {
|
|
||||||
"description" : event['strDesc'],
|
|
||||||
"Location" : event['strLocation'],
|
|
||||||
}
|
|
||||||
if 'Mikel' in event['strSummary']:
|
|
||||||
print('JELLOO \n\n\n JELOOO')
|
|
||||||
pass
|
|
||||||
elif event['dateStamp'] != None:
|
|
||||||
event['dateStart'] = event['dateStart'].dt
|
|
||||||
event['dateStart'] = datetime.strptime(str(event['dateStart'])[:-6], '%Y-%m-%d %H:%M:%S')
|
|
||||||
rules = component.get('RRule')
|
|
||||||
try:
|
|
||||||
if rules['FREQ'][0] == 'WEEKLY':
|
|
||||||
if datetime.today().weekday() != 0:
|
|
||||||
event = splitLocation(event)
|
|
||||||
date = datetime.today().date() - timedelta(days=datetime.today().weekday())
|
|
||||||
date = datetime.combine(date, event['dateStart'].time())
|
|
||||||
days = ["SU", "MO", "TU", "WE", "TH", "FR", "SA"]
|
|
||||||
for day in rules['BYDAY']:
|
|
||||||
day = days.index(day)
|
|
||||||
createEvent(day, date, event, scraper, event['venue'], "Ed")
|
|
||||||
except Exception as e:
|
|
||||||
print("Error (no repeat): ", e)
|
|
||||||
pass
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
|
|
||||||
new_time = datetime.now() - timedelta(days=1)
|
|
||||||
right_bound_time = datetime.now() + timedelta(days=45)
|
|
||||||
events = DSEvent.objects.filter(show_date__lte=new_time)
|
|
||||||
events1 = DSEvent.objects.filter(show_date__gte=right_bound_time)
|
|
||||||
|
|
||||||
for e in events:
|
|
||||||
e.delete()
|
|
||||||
|
|
||||||
for e in events1:
|
|
||||||
e.delete()
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
from icalendar import Calendar as iCalendar, Event
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
import events.digitools as digitools
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Sociable Ciderwerks",
|
|
||||||
city="Minneapolis",
|
|
||||||
website="https://sociablecider.com/events",
|
|
||||||
)
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="Sociable Ciderwerks",
|
|
||||||
website="https://calendar.google.com/calendar/ical/c_oa7uitvkn871o1ojl5e1os4ve8%40group.calendar.google.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(id=1),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.text)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
events = digitools.getiCalEvents(gcal, scraper, venue, "Ed")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
# ppr(event)
|
|
||||||
e = {}
|
|
||||||
e['calendars'] = event['calendars']
|
|
||||||
try:
|
|
||||||
e['dateStamp'] = event['dateStart'][0]
|
|
||||||
except:
|
|
||||||
e['dateStamp'] = event['dateStart']
|
|
||||||
e['title'] = event['strSummary']
|
|
||||||
e['scraper'] = scraper
|
|
||||||
e['link'] = venue.website
|
|
||||||
try:
|
|
||||||
digitools.createBasicEvent(e, 'Mu', venue)
|
|
||||||
except Exception as e:
|
|
||||||
print("Error: ", e)
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
from icalendar import Calendar as iCalendar, Event
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Bunkers",
|
|
||||||
city="Minneapolis",
|
|
||||||
website="https://bunkersmusic.com/calendar/",
|
|
||||||
is_venue = True
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="Bunkers",
|
|
||||||
website="https://calendar.google.com/calendar/ical/js94epu90r2et31aopons1ifm8%40group.calendar.google.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(id=1),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.text)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
events = digitools.getiCalEvents(gcal, scraper, venue, "Mu")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
# ppr(event)
|
|
||||||
e = {}
|
|
||||||
e['calendars'] = event['calendars']
|
|
||||||
try:
|
|
||||||
e['dateStamp'] = event['dateStart'][0]
|
|
||||||
except:
|
|
||||||
e['dateStamp'] = event['dateStart']
|
|
||||||
e['title'] = event['strSummary']
|
|
||||||
e['scraper'] = scraper
|
|
||||||
e['link'] = venue.website
|
|
||||||
try:
|
|
||||||
digitools.createBasicEvent(e, 'Mu', venue)
|
|
||||||
except Exception as e:
|
|
||||||
print("Error: ", e)
|
|
||||||
|
|
||||||
# digitools.returniCalEvents(gcal, scraper, venue, "Mu")
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
@@ -1,93 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
from icalendar import Calendar as iCalendar, Event
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil import relativedelta
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Center for Performing Arts",
|
|
||||||
city="Minneapolis",
|
|
||||||
website="https://www.cfpampls.com/events",
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="Center for Performing Arts",
|
|
||||||
website="https://calendar.google.com/calendar/ical/6rpooudjg01vc8bjek1snu2ro0%40group.calendar.google.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(id=1),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
|
|
||||||
event_type = "Ed"
|
|
||||||
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.text)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
|
|
||||||
events = digitools.getiCalEvents(gcal, scraper, venue, "Ed")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
ppr(event)
|
|
||||||
e = {}
|
|
||||||
e['calendars'] = event['calendars']
|
|
||||||
try:
|
|
||||||
e['dateStamp'] = event['dateStart'][0]
|
|
||||||
except:
|
|
||||||
e['dateStamp'] = event['dateStart']
|
|
||||||
e['title'] = event['strSummary']
|
|
||||||
e['scraper'] = scraper
|
|
||||||
e['link'] = venue.website
|
|
||||||
try:
|
|
||||||
digitools.createBasicEvent(e, 'Ed', venue)
|
|
||||||
except Exception as e:
|
|
||||||
print("Error: ", e)
|
|
||||||
|
|
||||||
|
|
||||||
# now_now = datetime.today().date()
|
|
||||||
# try:
|
|
||||||
# print("1Event: ", event['dateStart'])
|
|
||||||
# if event['dateStart'] > now_now:
|
|
||||||
# print("Check Check: ", event['dateStart'])
|
|
||||||
# ppr(event)
|
|
||||||
# # createIcalEvent(event, scraper, venue, event_type)
|
|
||||||
# else:
|
|
||||||
# print("WHAT?")
|
|
||||||
# ppr(event)
|
|
||||||
# except Exception as e:
|
|
||||||
# try:
|
|
||||||
# event['dateStart'] = event['dateStart'].date()
|
|
||||||
# print("2Event: ", event['dateStart'])
|
|
||||||
# if event['dateStart'] > now_now:
|
|
||||||
# print("Check Check: ", event['dateStart'])
|
|
||||||
# ppr(event)
|
|
||||||
# # createIcalEvent(event, scraper, venue, event_type)
|
|
||||||
# else:
|
|
||||||
# print("WHAT?")
|
|
||||||
# ppr(event)
|
|
||||||
# except Exception as e:
|
|
||||||
# print("The Error: ", e)
|
|
||||||
# pass
|
|
||||||
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
from icalendar import Calendar as iCalendar, Event
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Eagles #34",
|
|
||||||
city="Minneapolis",
|
|
||||||
website="https://www.minneapoliseagles34.org/events-entertainment.html",
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="Eagles #34",
|
|
||||||
website="https://calendar.google.com/calendar/ical/teflgutelllvla7r6vfcmjdjjo%40group.calendar.google.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(id=1),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.text)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
events = digitools.getiCalEvents(gcal, scraper, venue, "Ed")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
ppr(event)
|
|
||||||
e = {}
|
|
||||||
e['calendars'] = event['calendars']
|
|
||||||
try:
|
|
||||||
e['dateStamp'] = event['dateStart'][0]
|
|
||||||
except:
|
|
||||||
e['dateStamp'] = event['dateStart']
|
|
||||||
e['title'] = event['strSummary']
|
|
||||||
e['scraper'] = scraper
|
|
||||||
e['link'] = venue.website
|
|
||||||
try:
|
|
||||||
digitools.createBasicEvent(e, 'Mu', venue)
|
|
||||||
except Exception as e:
|
|
||||||
print("Error: ", e)
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
from icalendar import Calendar as iCalendar, Event
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
odt = datetime.now() + td
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Terminal Bar",
|
|
||||||
city="Minneapolis",
|
|
||||||
website="https://terminalbarmn.com",
|
|
||||||
)
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="Terminal Bar",
|
|
||||||
website="https://calendar.google.com/calendar/ical/terminalbar32%40gmail.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(id=1),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.text)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
events = digitools.getiCalEvents(gcal, scraper, venue, "Ed")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
# ppr(event)
|
|
||||||
e = {}
|
|
||||||
e['calendars'] = event['calendars']
|
|
||||||
try:
|
|
||||||
e['dateStamp'] = event['dateStart'][0]
|
|
||||||
except:
|
|
||||||
e['dateStamp'] = event['dateStart']
|
|
||||||
e['title'] = event['strSummary']
|
|
||||||
e['scraper'] = scraper
|
|
||||||
e['link'] = venue.website
|
|
||||||
try:
|
|
||||||
digitools.createBasicEvent(e, 'Mu', venue)
|
|
||||||
except Exception as e:
|
|
||||||
print("Error: ", e)
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
import requests, os, sys
|
|
||||||
# from icalendar import Calendar as iCalendar, Event
|
|
||||||
from icalendar import Calendar as iCalendar
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
from dateutil import relativedelta
|
|
||||||
td = relativedelta.relativedelta(hours=5)
|
|
||||||
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
DATETIME_FORMAT = '%Y-%m-%d %I:%M %p'
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="White Squirrel",
|
|
||||||
city="St. Paul",
|
|
||||||
website="https://whitesquirrelbar.com",
|
|
||||||
is_venue = True
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
scraper, created = Scraper.objects.get_or_create(
|
|
||||||
name="White Squirrel",
|
|
||||||
website="https://calendar.google.com/calendar/ical/vh5sr9h59nmrs2op5lmptu2fsa344cig%40import.calendar.google.com/public/basic.ics",
|
|
||||||
calendar = Calendar.objects.get(shortcode='msp'),
|
|
||||||
items = 0,
|
|
||||||
new_items = 0,
|
|
||||||
last_ran = datetime.now(),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
scraper = Scraper.objects.get(name=venue.name)
|
|
||||||
|
|
||||||
item_count_start = scraper.items
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
objIcalData = requests.get(scraper.website)
|
|
||||||
gcal = iCalendar.from_ical(objIcalData.content)
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
events = digitools.getiCalEvents(gcal, scraper, venue, event_type)
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
ppr(event)
|
|
||||||
e = {}
|
|
||||||
e['calendars'] = event['calendars']
|
|
||||||
try:
|
|
||||||
e['dateStamp'] = event['dateStart'][0]
|
|
||||||
except:
|
|
||||||
e['dateStamp'] = event['dateStart']
|
|
||||||
e['title'] = event['strSummary']
|
|
||||||
e['scraper'] = scraper
|
|
||||||
e['link'] = venue.website
|
|
||||||
try:
|
|
||||||
digitools.createBasicEvent(e, event_type, venue)
|
|
||||||
except Exception as e:
|
|
||||||
print("Error: ", e)
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
33
Working/iCal/il.chicago.citycouncil.py
Normal file
33
Working/iCal/il.chicago.citycouncil.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Chicago City Hall",
|
||||||
|
city="Chicago",
|
||||||
|
website="https://chicityclerkelms.chicago.gov/Meetings/",
|
||||||
|
)
|
||||||
|
|
||||||
|
website="https://calendar.google.com/calendar/ical/chicagolegislativereference%40gmail.com/public/basic.ics",
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, website, 'chi')
|
||||||
|
|
||||||
|
event_type = "Gv"
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
events = digitools.getiCalEvents(gcal, scraper, venue, event_type)
|
||||||
|
digitools.buildiCalEvents(events, event_type)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
File diff suppressed because one or more lines are too long
36
Working/iCal/mn.mpls.bunkers.py
Normal file
36
Working/iCal/mn.mpls.bunkers.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Bunkers",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://bunkersmusic.com/calendar/",
|
||||||
|
is_venue = True
|
||||||
|
)
|
||||||
|
|
||||||
|
event_type = "Mu"
|
||||||
|
website="https://calendar.google.com/calendar/ical/js94epu90r2et31aopons1ifm8%40group.calendar.google.com/public/basic.ics",
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, website, 'msp')
|
||||||
|
scraper.items = 0
|
||||||
|
scraper.save()
|
||||||
|
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
|
||||||
|
events = digitools.getiCalEvents(gcal, scraper, venue, event_type)
|
||||||
|
digitools.buildiCalEvents(events, event_type)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
32
Working/iCal/mn.mpls.cafac.py
Normal file
32
Working/iCal/mn.mpls.cafac.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Chicago Ave Fire Arts Center",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://www.cafac.org/classes",
|
||||||
|
)
|
||||||
|
|
||||||
|
event_type = "Ed"
|
||||||
|
website="https://calendar.google.com/calendar/ical/9qj2426rukra3jv933nslsf3r8%40group.calendar.google.com/public/basic.ics",
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, website, 'msp')
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
events = digitools.getiCalEvents(gcal, scraper, venue, event_type)
|
||||||
|
digitools.buildiCalEvents(events, event_type)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
32
Working/iCal/mn.mpls.eagles.py
Normal file
32
Working/iCal/mn.mpls.eagles.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Eagles #34",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://www.minneapoliseagles34.org/events-entertainment.html",
|
||||||
|
)
|
||||||
|
|
||||||
|
event_type = "Mu"
|
||||||
|
website="https://calendar.google.com/calendar/ical/teflgutelllvla7r6vfcmjdjjo%40group.calendar.google.com/public/basic.ics",
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, website, 'msp')
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
events = digitools.getiCalEvents(gcal, scraper, venue, event_type)
|
||||||
|
digitools.buildiCalEvents(events, event_type)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
32
Working/iCal/mn.mpls.socialablecider.py
Normal file
32
Working/iCal/mn.mpls.socialablecider.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Sociable Ciderwerks",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://sociablecider.com/events",
|
||||||
|
)
|
||||||
|
|
||||||
|
event_type = "Mu"
|
||||||
|
website="https://calendar.google.com/calendar/ical/c_oa7uitvkn871o1ojl5e1os4ve8%40group.calendar.google.com/public/basic.ics",
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, website, 'msp')
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
events = digitools.getiCalEvents(gcal, scraper, venue, "Mu")
|
||||||
|
digitools.buildiCalEvents(events, event_type)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
33
Working/iCal/mn.mpls.terminalbar.py
Normal file
33
Working/iCal/mn.mpls.terminalbar.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
odt = datetime.now() + td
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Terminal Bar",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://terminalbarmn.com",
|
||||||
|
)
|
||||||
|
|
||||||
|
event_type = "Mu"
|
||||||
|
website="https://calendar.google.com/calendar/ical/terminalbar32%40gmail.com/public/basic.ics",
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, website, 'msp')
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
events = digitools.getiCalEvents(gcal, scraper, venue, event_type)
|
||||||
|
digitools.buildiCalEvents(events, event_type)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
32
Working/iCal/mn.stp.cfpa.py
Normal file
32
Working/iCal/mn.stp.cfpa.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Center for Performing Arts",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://www.cfpampls.com/events",
|
||||||
|
)
|
||||||
|
|
||||||
|
event_type = "Ed"
|
||||||
|
website="https://calendar.google.com/calendar/ical/6rpooudjg01vc8bjek1snu2ro0%40group.calendar.google.com/public/basic.ics",
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, website, 'msp')
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.text)
|
||||||
|
events = digitools.getiCalEvents(gcal, scraper, venue, event_type)
|
||||||
|
digitools.buildiCalEvents(events, event_type)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
33
Working/iCal/mn.stp.whitesquirrel.py
Normal file
33
Working/iCal/mn.stp.whitesquirrel.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import requests, os, sys
|
||||||
|
from icalendar import Calendar as iCalendar, Event
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
td = relativedelta.relativedelta(hours=5)
|
||||||
|
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from events.models import Event as DSEvent, Organization, Scraper, Calendar
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="White Squirrel",
|
||||||
|
city="St. Paul",
|
||||||
|
website="https://whitesquirrelbar.com",
|
||||||
|
is_venue = True
|
||||||
|
)
|
||||||
|
|
||||||
|
event_type = "Mu"
|
||||||
|
website="https://calendar.google.com/calendar/ical/vh5sr9h59nmrs2op5lmptu2fsa344cig%40import.calendar.google.com/public/basic.ics",
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, website, 'msp')
|
||||||
|
item_count_start = scraper.items
|
||||||
|
objIcalData = requests.get(scraper.website)
|
||||||
|
gcal = iCalendar.from_ical(objIcalData.content)
|
||||||
|
events = digitools.getiCalEvents(gcal, scraper, venue, event_type)
|
||||||
|
digitools.buildiCalEvents(events, event_type)
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
@@ -1,498 +0,0 @@
|
|||||||
BEGIN:VCALENDAR
|
|
||||||
PRODID:-//Google Inc//Google Calendar 70.9054//EN
|
|
||||||
VERSION:2.0
|
|
||||||
CALSCALE:GREGORIAN
|
|
||||||
METHOD:PUBLISH
|
|
||||||
X-WR-CALNAME:The White Squirrel Bar
|
|
||||||
X-WR-TIMEZONE:UTC
|
|
||||||
X-WR-CALDESC:Events for The White Squirrel Bar
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251118T030000Z
|
|
||||||
DTEND:20251118T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10806-1763413200-1763424000@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/the-deeper-kind-w-23-watts/
|
|
||||||
CREATED:20251104T233617Z
|
|
||||||
LAST-MODIFIED:20251105T053618Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:The Deeper Kind w. 23 Watts
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251129T000000Z
|
|
||||||
DTEND:20251129T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10852-1764352800-1764360000@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/the-mary-cutrufello-band-3/
|
|
||||||
CREATED:20251115T173617Z
|
|
||||||
DESCRIPTION:A rock and roll band! Killer harmonies\, rippin’ solos\, cool s
|
|
||||||
ongs\, funny\nshit in between. \nHailed as a country guitarist\, Mary Cutru
|
|
||||||
fello grew up on East Coast\nclassic rock. (Yes\, think Springsteen!) This
|
|
||||||
tight\, taut trio explores\nher songs old and new with the fearless guitars
|
|
||||||
\, stories\, and\nbetween-song banter she’s known for.
|
|
||||||
LAST-MODIFIED:20251115T233617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:The Mary Cutrufello Band
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251117T030000Z
|
|
||||||
DTEND:20251117T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10802-1763326800-1763337600@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/karaoke-with-ally/
|
|
||||||
CREATED:20251103T163617Z
|
|
||||||
LAST-MODIFIED:20251103T223619Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Karaoke With Ally!
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251127T000000Z
|
|
||||||
DTEND:20251127T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10846-1764180000-1764187200@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/family-dinner-w-doug-sarah-4/
|
|
||||||
CREATED:20251115T013617Z
|
|
||||||
LAST-MODIFIED:20251115T073617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Family Dinner w. Doug & Sarah
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251116T190000Z
|
|
||||||
DTEND:20251116T220000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10147-1763298000-1763308800@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/the-friend-ship/
|
|
||||||
CREATED:20251103T003616Z
|
|
||||||
DESCRIPTION:These popular singer/songwriters first got together to perform
|
|
||||||
on Joel Sax’s debut\nalbum\, ‘Billions of Stars.’ They had so much fun and
|
|
||||||
got such a great response at their\nsold-out release show that they have jo
|
|
||||||
ined together to form a new band\, The Friend\nShip! They will bring their
|
|
||||||
best songs\, sweetest harmonies\, and funniest stories to the\nWhite Squirr
|
|
||||||
el for a night of beautiful music. \nNikki Lemire is an incredibly in deman
|
|
||||||
d artist\, having earned accolades from fans and critics\nalike. Natalia To
|
|
||||||
ledo\, from The Current describes ‘Mend’ It as “a dreamy\, simple\, and\nbe
|
|
||||||
autiful song led by the sounds of her harp.”\nJoel Sax writes heartfelt son
|
|
||||||
gs about love\, regret\, and hope for a bigger\, brighter future.\nHis debu
|
|
||||||
t album title track\, Billions of Stars\, is “… a wonderful mix for a conte
|
|
||||||
mporary\nfolk song” – Minnesota Sound and ‘A Lovely New Album\,’ by Mostly
|
|
||||||
Minnesota\nMusic. \nMarc Severin has been a mainstay of the music scene for
|
|
||||||
many years. His music is\ntrue Americana and his storytelling is midwester
|
|
||||||
n country at its best. The Friend Ship\nwill be previewing songs from his w
|
|
||||||
ell anticipated new album coming out this Winter. \nDave Mehling is a highl
|
|
||||||
y sought-after musician\, producer and songwriter. He has\nproduced albums
|
|
||||||
for many artists\, including Sarah Morris\, Haley E Rydell\, Joel Sax and\n
|
|
||||||
Emily Haavik. “You are drawn in with his lyrics. They speak of very univers
|
|
||||||
al issues of\nlove which we all have\, but few of us have the talent to giv
|
|
||||||
e them a voice. Dave Mehling\npossesses this rare gift.” – Becca Martin
|
|
||||||
LAST-MODIFIED:20251103T063617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:The Friend Ship
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251123T000000Z
|
|
||||||
DTEND:20251123T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10827-1763834400-1763841600@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/giant-valley-string-band/
|
|
||||||
CREATED:20251109T183617Z
|
|
||||||
DESCRIPTION:Giant Valley String Band is a Twin Cities acoustic group playin
|
|
||||||
g an\neclectic mix of covers and originals in Bluegrass and Americana. The\
|
|
||||||
nband members are Noelle Haland on guitar\, Ted Haland on dobro and\nbass\,
|
|
||||||
Nate Hess on fiddle and guitar\, Erika Janik on banjo\, Matt\nJensen on ma
|
|
||||||
ndolin\, and Debra Pflipsen on guitar\, harmonica\,\naccordion\, and bass.
|
|
||||||
More info at giantvalleystringband.com. \n\n\n\n\nBlue Groove is a multiple
|
|
||||||
award-winning bluegrass band based in the\nTwin Cities that plays both con
|
|
||||||
temporary and classic bluegrass tunes\nwith a creative approach that drives
|
|
||||||
while still maintaining a sense\nof laid-back\, Midwestern chill. That’s w
|
|
||||||
hat they mean by “groove”. The\nbands inviting sound combined with their op
|
|
||||||
enness warmth on and\noff-stage have made them crowd favorites at festivals
|
|
||||||
and concert\nvenues all around Minnesota and the Upper Midwest. \nThe band
|
|
||||||
covers a number of past and current contemporary bluegrass\nartists. A Blu
|
|
||||||
e Groove show may feature songs from Alison Krauss or\nRhonda Vincent as we
|
|
||||||
ll as traditional bluegrass (Bill Monroe\, Flatt &\nScruggs) with all of it
|
|
||||||
s drive\, improvisation and harmony singing!\nBlue Groove also likes to put
|
|
||||||
a bluegrass spin on more\ncontemporary/pop artists such as U2\, Passenger\
|
|
||||||
, and the Doobie\nBrothers! \nMembers of Blue Groove bluegrass band include
|
|
||||||
Adelle Hyrkas\, lead\nvocals and rhythm guitar\; David Smith\, banjo and v
|
|
||||||
ocals\; Bob Doe\, lead\nguitar\, dobro\, and vocals\; Pete Mathison bass an
|
|
||||||
d vocals\, Tom\nWadzinski mandolin and vocals. Ocassionally we may have a g
|
|
||||||
uest\nfiddler- past guests have included fiddlers Richard Kriehn\, Michael\
|
|
||||||
nPrewitt\, and AJ Srubas. \nYou can find out more about Blue Groove at www.
|
|
||||||
bluegroovebluegrass.com
|
|
||||||
LAST-MODIFIED:20251110T003617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Giant Valley String Band w. Blue Groove
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251121T220000Z
|
|
||||||
DTEND:20251122T040000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:11255-1763740800-1763762400@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/the-angry-line-cook-4/
|
|
||||||
CREATED:20251114T133616Z
|
|
||||||
DESCRIPTION:The Best Smash Burgers in town!
|
|
||||||
LAST-MODIFIED:20251114T193617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:The Angry Line Cook
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251127T030000Z
|
|
||||||
DTEND:20251127T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10848-1764190800-1764201600@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/nights-with-tim-2/
|
|
||||||
CREATED:20251115T013617Z
|
|
||||||
DESCRIPTION:Dig into the deeper\, progressive side of disco\, funk\, and R&
|
|
||||||
B from the ’70s and ’80s\, with the occasional turn into house and left-fie
|
|
||||||
ld gems. An all vinyl night courtesy of local selector Tim. He knows how to
|
|
||||||
keep the vibes at White Squirrel curious and moving.
|
|
||||||
LAST-MODIFIED:20251115T073617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Nights with Tim
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251122T190000Z
|
|
||||||
DTEND:20251122T220000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10825-1763816400-1763827200@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/caitlin-robertson/
|
|
||||||
CREATED:20251109T023617Z
|
|
||||||
LAST-MODIFIED:20251109T083618Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Caitlin Robertson
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251128T220000Z
|
|
||||||
DTEND:20251129T040000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:11257-1764345600-1764367200@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/the-angry-line-cook-5/
|
|
||||||
CREATED:20251115T013617Z
|
|
||||||
DESCRIPTION:The Best Smash Burgers in town!
|
|
||||||
LAST-MODIFIED:20251115T073617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:The Angry Line Cook
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251122T000000Z
|
|
||||||
DTEND:20251122T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10821-1763748000-1763755200@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/chris-holm-friends-2/
|
|
||||||
CREATED:20251108T203618Z
|
|
||||||
LAST-MODIFIED:20251109T023619Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Chris Holm & Friends
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251123T030000Z
|
|
||||||
DTEND:20251123T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10829-1763845200-1763856000@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/13-howell/
|
|
||||||
CREATED:20251110T003617Z
|
|
||||||
LAST-MODIFIED:20251110T063617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:13 Howell w. Steph Was & The Secret Izz\, TH3
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251120T000000Z
|
|
||||||
DTEND:20251120T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10813-1763575200-1763582400@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/st-paul-mudsteppers-4/
|
|
||||||
CREATED:20251107T033617Z
|
|
||||||
LAST-MODIFIED:20251107T093617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:St Paul Mudsteppers
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251116T000000Z
|
|
||||||
DTEND:20251116T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10819-1763229600-1763236800@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/in-circles-sunny-day-real-estate-tri
|
|
||||||
bute-w-120-minutes/
|
|
||||||
CREATED:20251102T013616Z
|
|
||||||
DESCRIPTION:It’s the 30th anniversary of Sunny Day Real Estates LP2 record!
|
|
||||||
In Circles with be performing the album in its entirety. \n
|
|
||||||
LAST-MODIFIED:20251102T063617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:In Circles (Sunny Day Real Estate Tribute) w. 120 Minutes
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251124T000000Z
|
|
||||||
DTEND:20251124T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10833-1763920800-1763928000@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/the-jeff-becker-band/
|
|
||||||
CREATED:20251110T223617Z
|
|
||||||
LAST-MODIFIED:20251111T043617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:The Jeff Becker Band
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251122T030000Z
|
|
||||||
DTEND:20251122T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10823-1763758800-1763769600@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/ancient-waves-2/
|
|
||||||
CREATED:20251108T203618Z
|
|
||||||
LAST-MODIFIED:20251109T023619Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Ancient Waves w. Muun Batos & Jarad Miles
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251121T000000Z
|
|
||||||
DTEND:20251121T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10815-1763661600-1763668800@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/devaney-friends-2/
|
|
||||||
CREATED:20251107T223617Z
|
|
||||||
LAST-MODIFIED:20251108T043617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Devaney & Friends w. Aaron James
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251121T030000Z
|
|
||||||
DTEND:20251121T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10817-1763672400-1763683200@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/country-oke-3/
|
|
||||||
CREATED:20251108T043618Z
|
|
||||||
LAST-MODIFIED:20251108T103619Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Country-oke
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251124T030000Z
|
|
||||||
DTEND:20251124T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10835-1763931600-1763942400@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/bryan-the-haggards/
|
|
||||||
CREATED:20251111T043617Z
|
|
||||||
LAST-MODIFIED:20251111T103617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Bryan & The Haggards w. Jump Loop\, The American Songbook
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251125T030000Z
|
|
||||||
DTEND:20251125T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10839-1764018000-1764028800@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/seven-seasons/
|
|
||||||
CREATED:20251112T013616Z
|
|
||||||
LAST-MODIFIED:20251112T073617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Slasher Film
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251126T030000Z
|
|
||||||
DTEND:20251126T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10841-1764104400-1764115200@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/third-date-tuesday-night-residency-2
|
|
||||||
/
|
|
||||||
CREATED:20251113T053616Z
|
|
||||||
LAST-MODIFIED:20251113T113617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Third Date Tuesday Night Residency
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251117T000000Z
|
|
||||||
DTEND:20251117T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10800-1763316000-1763323200@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/bingo-with-pete-3/
|
|
||||||
CREATED:20251103T003616Z
|
|
||||||
LAST-MODIFIED:20251103T063617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Bingo With Pete!
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251125T000000Z
|
|
||||||
DTEND:20251125T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10837-1764007200-1764014400@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/devil-dodger/
|
|
||||||
CREATED:20251111T203617Z
|
|
||||||
LAST-MODIFIED:20251112T023617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Devil Dodger
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251126T000000Z
|
|
||||||
DTEND:20251126T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10843-1764093600-1764100800@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/ali-grays-country-jamboree-2/
|
|
||||||
CREATED:20251112T233616Z
|
|
||||||
DESCRIPTION:Come on out for a tribute to the Darlings of Country music\, fe
|
|
||||||
aturing Ali Gray’s Country Jamboree! Hear songs from the catalogs of Lorett
|
|
||||||
a Lynn\, Tammy Wynette\, Tanya Tucker\, Dolly Parton\, Linda Ronstadt\, Pam
|
|
||||||
Tillis\, Patty Loveless and many more. We won’t leave out the Kings of Cou
|
|
||||||
ntry\, sung by guitar legend Dan Neale and drummer extraordinaire Scott Wen
|
|
||||||
um\, including favorites like Roger Miller\, Hank Williams\, Johnny Cash\,
|
|
||||||
Buck Owens\, Waylon Jennings and more! Also featuring Dan Lowinger on bass
|
|
||||||
! 6-8.
|
|
||||||
LAST-MODIFIED:20251113T053616Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Ali Gray's Country Jamboree
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251116T030000Z
|
|
||||||
DTEND:20251116T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10798-1763240400-1763251200@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/lost-island-society-w-woolly-mack-po
|
|
||||||
ison-ivy-the-people/
|
|
||||||
CREATED:20251102T183617Z
|
|
||||||
LAST-MODIFIED:20251103T003617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Lost Island Society w. Woolly Mack\, Poison Ivy & The People
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251123T190000Z
|
|
||||||
DTEND:20251123T220000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10831-1763902800-1763913600@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/the-stress-of-her-regard/
|
|
||||||
CREATED:20251110T003617Z
|
|
||||||
LAST-MODIFIED:20251110T063617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:The Stress Of Her Regard
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251119T030000Z
|
|
||||||
DTEND:20251119T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10811-1763499600-1763510400@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/third-date-tuesday-night-residency/
|
|
||||||
CREATED:20251106T213618Z
|
|
||||||
LAST-MODIFIED:20251107T033618Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Third Date Tuesday Night Residency
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251119T000000Z
|
|
||||||
DTEND:20251119T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10809-1763488800-1763496000@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/the-new-havoline-supremes/
|
|
||||||
CREATED:20251105T053616Z
|
|
||||||
DESCRIPTION:The New Havoline Supremes are a Twin Cities based band\, featu
|
|
||||||
ring the legendary Mary Cutrufello on guitar and vocals\, and Dan Lowinger
|
|
||||||
on guitar. Formed in October of 2023\, the band plays a mix of classic cou
|
|
||||||
ntry and Mary’s original tunes\, and features Greg Schutte (Ryan Bingham\,
|
|
||||||
Mickey Hart\, Chasity Brown) on drums\, and Erik Lillestol (Cafe Accordion)
|
|
||||||
on bass.\n\nMary Cutrufello\n\nWith 30 years of experience in the music bu
|
|
||||||
siness\, Mary Cutrufello is everything from Texas honky-tonk heroine and fi
|
|
||||||
ery Midwestern roots-rocker to a powerhouse acoustic performer. Connecticut
|
|
||||||
-raised and Yale-educated\, Mary’s musical journey has taken her from the E
|
|
||||||
ast Coast to Houston and now to Minnesota. Career highlights include: the
|
|
||||||
major label release of ”When the Night is Through\,” on Mercury Records
|
|
||||||
in 1998\, appearances on national tv shows such as Tonight Show with Jay Le
|
|
||||||
no\, appearances at major festivals including Farm Aid\, and tours with Jim
|
|
||||||
mie Dale Gilmore and Tish Hinojosa. Mary has seven self-released albums\,
|
|
||||||
and has played guitar locally for Annie Mack\, Michael Perry and the Long B
|
|
||||||
eds\, and many others.\n\n\n\nDan Lowinger\, a transplant from Portland\, O
|
|
||||||
R\, is a guitarist steeped in the tradition of Western swing and classic co
|
|
||||||
untry. Since moving to the Twin Cities in 2017\, Dan has become a fixture i
|
|
||||||
n the local country and Americana scene\, playing on a regular basis with a
|
|
||||||
ll star Western swing band Honky Tonk Jump\, and country rockers Cole Diamo
|
|
||||||
nd\, while also sharing the stage with local greats\, Martin Zellar (Gear D
|
|
||||||
addies)\, Tony Andreason (The Trashmen)\, Pat Donohue (Prairie Home Compani
|
|
||||||
on) and many others! He has performed regionally and toured nationally with
|
|
||||||
many different country and swing bands from the Pacific Northwest includin
|
|
||||||
g: The Barn Door Slammers\, Ralph Carney’s Pepper Grinders\, Vince Mira\, a
|
|
||||||
nd Western Centuries.
|
|
||||||
LAST-MODIFIED:20251105T113617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:The New Havoline Supremes
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251120T030000Z
|
|
||||||
DTEND:20251120T060000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10782-1763586000-1763596800@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/venturer-w-peeler-mild-manner/
|
|
||||||
CREATED:20251107T223617Z
|
|
||||||
DESCRIPTION:Venturer is a rock based band with many other genre influences
|
|
||||||
such as funk\, folk\, prog\, punk\, and jazz. Venturer has played around th
|
|
||||||
e United States\, mainly in Milwaukee\, Madison\, Chicago\, and Minneapolis
|
|
||||||
\, but also on the East Coast and greater Midwest.
|
|
||||||
LAST-MODIFIED:20251108T043617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Venturer w. Peeler & Mild Manner
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20251118T000000Z
|
|
||||||
DTEND:20251118T020000Z
|
|
||||||
DTSTAMP:20251116T033816Z
|
|
||||||
UID:10804-1763402400-1763409600@whitesquirrelbar.com
|
|
||||||
URL:https://whitesquirrelbar.com/event/clovers-daughter/
|
|
||||||
CREATED:20251103T223616Z
|
|
||||||
LAST-MODIFIED:20251104T043617Z
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Clovers Daughter
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
@@ -22,7 +22,7 @@ import events.digitools as digitools
|
|||||||
from events.models import Organization, Scraper, Calendar, Event
|
from events.models import Organization, Scraper, Calendar, Event
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
venue, created = Organization.objects.get_or_create(
|
||||||
name="Acme Comedy Club",
|
name="Comuniful",
|
||||||
city="Minneapolis",
|
city="Minneapolis",
|
||||||
website="https://acmecomedycompany.com/the-club/calendar/",
|
website="https://acmecomedycompany.com/the-club/calendar/",
|
||||||
is_venue = True
|
is_venue = True
|
||||||
@@ -6,10 +6,9 @@ import json
|
|||||||
from selenium.webdriver.common.by import By
|
from selenium.webdriver.common.by import By
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -25,7 +24,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue=True
|
is_venue=True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'mde')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'mde')
|
||||||
|
|
||||||
DATETIME_FORMAT = '%B %d %Y %I:%M%p'
|
DATETIME_FORMAT = '%B %d %Y %I:%M%p'
|
||||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||||
|
|||||||
70
Working/venues/mn.mpls.AcmeComedy.py
Normal file
70
Working/venues/mn.mpls.AcmeComedy.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
import os, sys
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from events.models import Organization, Scraper, Calendar, Event
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
tz = pytz.timezone("US/Central")
|
||||||
|
DATETIME_FORMAT = '%b %d %Y %I:%M %p'
|
||||||
|
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Acme Comedy Club",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://acmecomedycompany.com/the-club/calendar/",
|
||||||
|
is_venue = True
|
||||||
|
)
|
||||||
|
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
scraper.items = 0
|
||||||
|
scraper.save()
|
||||||
|
|
||||||
|
def get_events(ps, event_type):
|
||||||
|
contents = ps.xpath('.//*/li[@class="event"]')
|
||||||
|
for c in contents:
|
||||||
|
try:
|
||||||
|
event = {}
|
||||||
|
day = c.xpath('.//*/span[@class="day"]/text()')[0]
|
||||||
|
month = c.xpath('.//*/span[@class="mth"]/text()')[0]
|
||||||
|
year = datetime.now().year
|
||||||
|
if month == "Jan":
|
||||||
|
year = int(year) + 1
|
||||||
|
event['scraper'] = scraper
|
||||||
|
event['calendars'] = [scraper.calendar]
|
||||||
|
event['title'] = c.xpath('.//*/span[@class="event_title"]/a/text()')[0]
|
||||||
|
event['date'] = [month, day, str(year), c.xpath('.//*/span[@class="event_time"]/text()')[0].strip()]
|
||||||
|
event['date'] = " ".join(event['date'])
|
||||||
|
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
||||||
|
event['link'] = c.xpath('.//*/span[@class="event_title"]/a/@href')[0]
|
||||||
|
digitools.createBasicEvent(event, "Co", venue)
|
||||||
|
scraper.items+=1
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
ppr(event)
|
||||||
|
print("\n\n+++\n\n")
|
||||||
|
|
||||||
|
if len(sys.argv) >= 2:
|
||||||
|
arg1 = sys.argv[1]
|
||||||
|
br = digitools.getBrowser(arg1)
|
||||||
|
else:
|
||||||
|
print("No run_env")
|
||||||
|
br.close()
|
||||||
|
quit()
|
||||||
|
|
||||||
|
links = digitools.createURL("https://acmecomedycompany.com/the-club/calendar/")
|
||||||
|
|
||||||
|
for link in links:
|
||||||
|
ps = digitools.getSource(br, link)
|
||||||
|
get_events(ps, "Co")
|
||||||
|
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
|
br.close()
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -22,7 +21,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue = True
|
is_venue = True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
event_type = "Mu"
|
event_type = "Mu"
|
||||||
|
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -13,8 +12,6 @@ import pytz
|
|||||||
|
|
||||||
from events.models import Organization, Scraper, Event
|
from events.models import Organization, Scraper, Event
|
||||||
import events.digitools as digitools
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
@@ -26,7 +23,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue=True,
|
is_venue=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
ppr(scraper)
|
ppr(scraper)
|
||||||
|
|
||||||
tz = pytz.timezone("US/Central")
|
tz = pytz.timezone("US/Central")
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -21,7 +20,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue=False
|
is_venue=False
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
DATETIME_FORMAT = '%A, %B %d , %Y %I:%M %p'
|
DATETIME_FORMAT = '%A, %B %d , %Y %I:%M %p'
|
||||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -14,7 +13,6 @@ import pytz
|
|||||||
from events.models import Organization, Scraper, Event as DSEvent
|
from events.models import Organization, Scraper, Event as DSEvent
|
||||||
import events.digitools as digitools
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
venue, created = Organization.objects.get_or_create(
|
venue, created = Organization.objects.get_or_create(
|
||||||
name="Parkway Theater",
|
name="Parkway Theater",
|
||||||
@@ -25,7 +23,7 @@ try:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
venue = Organization.objects.get(name="Parkway Theater")
|
venue = Organization.objects.get(name="Parkway Theater")
|
||||||
|
|
||||||
scraper,item_count_start, virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start, virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
tz = pytz.timezone("US/Central")
|
tz = pytz.timezone("US/Central")
|
||||||
|
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -25,13 +24,14 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue = True
|
is_venue = True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start, virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start, virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
tz = pytz.timezone("US/Central")
|
tz = pytz.timezone("US/Central")
|
||||||
|
|
||||||
DATETIME_FORMAT = '%a %B %d @ %I:%M %p %Y'
|
DATETIME_FORMAT = '%a %B %d @ %I:%M %p %Y'
|
||||||
DATETIME_FORMAT_2 = '%b %d %I:%M%p %Y'
|
DATETIME_FORMAT_2 = '%a %B %d, %Y @ %I:%M %p'
|
||||||
DATETIME_FORMAT_3 = '%b %d %Y'
|
# DATETIME_FORMAT_2 = '%b %d %I:%M%p %Y'
|
||||||
|
# DATETIME_FORMAT_3 = '%b %d %Y'
|
||||||
# Set initial variables for City, etc
|
# Set initial variables for City, etc
|
||||||
calendar_url = 'https://noboolpresents.com/venues/uptown-vfw/'
|
calendar_url = 'https://noboolpresents.com/venues/uptown-vfw/'
|
||||||
current_year = str(datetime.now().year)
|
current_year = str(datetime.now().year)
|
||||||
@@ -69,7 +69,21 @@ def getEvents(br):
|
|||||||
digitools.add_calendar(new_event, 'msp')
|
digitools.add_calendar(new_event, 'msp')
|
||||||
scraper.items+=1
|
scraper.items+=1
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("oops", e)
|
try:
|
||||||
|
new_event, created = Event.objects.update_or_create(
|
||||||
|
scraper = scraper,
|
||||||
|
event_type = 'Mu',
|
||||||
|
show_title = title,
|
||||||
|
show_link = link,
|
||||||
|
show_date = datetime.strptime(dateTime[:-4].strip(), DATETIME_FORMAT_2),
|
||||||
|
show_day = datetime.strptime(dateTime[:-4].strip(), DATETIME_FORMAT_2),
|
||||||
|
# more_details = deets["tickets"],
|
||||||
|
venue = venue
|
||||||
|
)
|
||||||
|
digitools.add_calendar(new_event, 'msp')
|
||||||
|
scraper.items+=1
|
||||||
|
except Exception as e:
|
||||||
|
print("oops", e, dateTime[:-4])
|
||||||
|
|
||||||
getEvents(br)
|
getEvents(br)
|
||||||
br.find_element(By.XPATH, './/*/li[@class="tribe-events-c-nav__list-item tribe-events-c-nav__list-item--next"]/a').click()
|
br.find_element(By.XPATH, './/*/li[@class="tribe-events-c-nav__list-item tribe-events-c-nav__list-item--next"]/a').click()
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -14,6 +13,10 @@ import pytz
|
|||||||
from events.models import Organization, Scraper
|
from events.models import Organization, Scraper
|
||||||
import events.digitools as digitools
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
tz = pytz.timezone("US/Central")
|
||||||
|
DATETIME_FORMAT = '%b %d %I:%M %p %Y'
|
||||||
|
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
venue, created = Organization.objects.get_or_create(
|
||||||
name="Cabooze",
|
name="Cabooze",
|
||||||
city="Minneapolis",
|
city="Minneapolis",
|
||||||
@@ -21,14 +24,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue=True
|
is_venue=True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start = digitools.getScraper(venue)
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
event_type = ""
|
|
||||||
|
|
||||||
# Time Signatures
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
DATETIME_FORMAT = '%b %d %I:%M %p %Y'
|
|
||||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
|
||||||
|
|
||||||
def get_events(ps, event_type):
|
def get_events(ps, event_type):
|
||||||
print("Getting events ...")
|
print("Getting events ...")
|
||||||
@@ -44,14 +40,15 @@ def get_events(ps, event_type):
|
|||||||
year = datetime.now().year
|
year = datetime.now().year
|
||||||
if month == "Jan":
|
if month == "Jan":
|
||||||
year = int(year) + 1
|
year = int(year) + 1
|
||||||
event['calendar'] = scraper.calendar
|
event['scraper'] = scraper
|
||||||
|
event['calendars'] = [scraper.calendar]
|
||||||
event['title'] = c.xpath('.//*/div[@class="vp-event-name"]/text()')[0]
|
event['title'] = c.xpath('.//*/div[@class="vp-event-name"]/text()')[0]
|
||||||
event['date'] = [date, time, str(year)]
|
event['date'] = [date, time, str(year)]
|
||||||
event['date'] = " ".join(event['date'])
|
event['date'] = " ".join(event['date'])
|
||||||
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
||||||
event['link'] = "https://www.cabooze.com/" + c.xpath('.//a[@class="vp-event-link"]/@href')[0]
|
event['link'] = "https://www.cabooze.com/" + c.xpath('.//a[@class="vp-event-link"]/@href')[0]
|
||||||
print("Event Dict Created")
|
# print("Event Dict Created")
|
||||||
ppr(event)
|
# ppr(event)
|
||||||
digitools.createBasicEvent(event, event_type, venue)
|
digitools.createBasicEvent(event, event_type, venue)
|
||||||
scraper.items+=1
|
scraper.items+=1
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -21,9 +20,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue=True
|
is_venue=True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue,venue.website, 'msp')
|
||||||
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
|
|
||||||
DATETIME_FORMAT = '%A, %B %d, %Y %I:%M %p'
|
DATETIME_FORMAT = '%A, %B %d, %Y %I:%M %p'
|
||||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||||
@@ -33,6 +30,9 @@ DATETIME_FORMAT_5 = '%A, %B %d @%I%p %Y'
|
|||||||
|
|
||||||
def get_events(ps):
|
def get_events(ps):
|
||||||
links = ps.xpath('.//*/div[@class="summary-title"]/a/@href')
|
links = ps.xpath('.//*/div[@class="summary-title"]/a/@href')
|
||||||
|
print("Length of Links: ", len(links))
|
||||||
|
links = list(set(links))
|
||||||
|
print("New Length of Links: ", len(links))
|
||||||
for l in links:
|
for l in links:
|
||||||
if "cedar-news-blog" in l:
|
if "cedar-news-blog" in l:
|
||||||
continue
|
continue
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -17,6 +16,16 @@ from lxml import html
|
|||||||
from events.models import Organization, Scraper, Event
|
from events.models import Organization, Scraper, Event
|
||||||
import events.digitools as digitools
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
def process_times(times):
|
||||||
|
time = []
|
||||||
|
for t in times:
|
||||||
|
t = t.replace("\n", "").replace("TBA", "")
|
||||||
|
if len(t) > 0 and t.endswith("pm"):
|
||||||
|
if "-" in t:
|
||||||
|
t = t.split("-")[0] + "pm"
|
||||||
|
time.append(t)
|
||||||
|
return time
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
venue, created = Organization.objects.get_or_create(
|
||||||
name="Club 331",
|
name="Club 331",
|
||||||
city="Minneapolis",
|
city="Minneapolis",
|
||||||
@@ -24,13 +33,12 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue=True,
|
is_venue=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
|
|
||||||
DATETIME_FORMAT = '%b %d %I%p %Y'
|
DATETIME_FORMAT = '%b %d %I%p %Y'
|
||||||
DATETIME_FORMAT_2 = '%b %d %I:%M%p %Y'
|
DATETIME_FORMAT_2 = '%b %d %I:%M%p %Y'
|
||||||
DATETIME_FORMAT_3 = '%b %d %Y'
|
DATETIME_FORMAT_3 = '%b %d %Y'
|
||||||
|
|
||||||
# Set initial variables for City, etc
|
# Set initial variables for City, etc
|
||||||
calendar_url = 'https://331club.com/#calendar'
|
calendar_url = 'https://331club.com/#calendar'
|
||||||
current_year = str(datetime.now().year)
|
current_year = str(datetime.now().year)
|
||||||
@@ -53,19 +61,7 @@ sleep(3)
|
|||||||
dates = ps.xpath('.//*/div[@class="event"]')
|
dates = ps.xpath('.//*/div[@class="event"]')
|
||||||
dates = dates + ps.xpath('.//*/div[@class="event hidden"]')
|
dates = dates + ps.xpath('.//*/div[@class="event hidden"]')
|
||||||
|
|
||||||
def process_times(times):
|
|
||||||
# print("Times: ", times)
|
|
||||||
time = []
|
|
||||||
for t in times:
|
|
||||||
t = t.replace("\n", "").replace("TBA", "")
|
|
||||||
if len(t) > 0 and t.endswith("pm"):
|
|
||||||
if "-" in t:
|
|
||||||
t = t.split("-")[0] + "pm"
|
|
||||||
time.append(t)
|
|
||||||
return time
|
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
for d in dates:
|
for d in dates:
|
||||||
event_date = d.xpath('.//div[@class="event-date"]/span/text()')[:2]
|
event_date = d.xpath('.//div[@class="event-date"]/span/text()')[:2]
|
||||||
cols = d.xpath('.//div[@class="column"]')
|
cols = d.xpath('.//div[@class="column"]')
|
||||||
@@ -101,5 +97,4 @@ for d in dates:
|
|||||||
events.append(event)
|
events.append(event)
|
||||||
|
|
||||||
br.close()
|
br.close()
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -16,6 +15,59 @@ import pytz
|
|||||||
from events.models import Organization, Scraper, Event
|
from events.models import Organization, Scraper, Event
|
||||||
import events.digitools as digitools
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
def get_info(pse):
|
||||||
|
event = {}
|
||||||
|
event['scraper'] = scraper
|
||||||
|
event['calendars'] = [scraper.calendar]
|
||||||
|
event["venue"] = pse.xpath('.//*/div[@class="content"]/div/div[@class="venue_name"]/text()')[0].replace('\t', '').replace('\n', '').strip()
|
||||||
|
event["show_title"] = pse.xpath('.//*/span[@class="show_title"]/text()')[0].replace('\t', '').replace('\n', '')
|
||||||
|
if event["show_title"] == "":
|
||||||
|
event["show_title"] = pse.xpath('.//*/span[@class="show_title"]/text()')[2].replace('\t', '').replace('\n', '')
|
||||||
|
event["guests"] = pse.xpath('.//*/div[@class="feature_details_main d-flex align-items-center"]/div/h4/text()')
|
||||||
|
event["flyer"] = pse.xpath('.//*/img[@class="gig_poster lazy loaded"]/@src')
|
||||||
|
try:
|
||||||
|
event = get_date(pse, event)
|
||||||
|
except Exception as e:
|
||||||
|
print("date issue: ", e)
|
||||||
|
try:
|
||||||
|
event = get_details(pse, event)
|
||||||
|
except Exception as e:
|
||||||
|
print("details issue: ", e)
|
||||||
|
try:
|
||||||
|
event["date_time"] = datetime.strptime(" ".join(event["date"]) + " " + event["details"]["Doors Open"], DATETIME_FORMAT)
|
||||||
|
except Exception as e:
|
||||||
|
print("Using alt date format 2: ", e)
|
||||||
|
try:
|
||||||
|
event["date_time"] = datetime.strptime(" ".join(event["date"]) + " " + event["details"]["Doors Open"], DATETIME_FORMAT_2)
|
||||||
|
ppr(event)
|
||||||
|
except Exception as e:
|
||||||
|
print("Using alt date format 3: ", e)
|
||||||
|
print(event['date'])
|
||||||
|
event["date_time"] = datetime.strptime(" ".join(event["date"]), DATETIME_FORMAT_3)
|
||||||
|
print("The Event:")
|
||||||
|
ppr(event)
|
||||||
|
return event
|
||||||
|
|
||||||
|
def get_date(pse, event):
|
||||||
|
month = pse.xpath('.//*/div[@class="date_container"]/div/div[@class="month"]/text()')[0].replace('\t', '').replace('\n', '')
|
||||||
|
day = pse.xpath('.//*/div[@class="date_container"]/div/div[@class="day"]/text()')[0].replace('\t', '').replace('\n', '')
|
||||||
|
year = pse.xpath('.//*/div[@class="date_container"]/div/div[@class="year"]/text()')[0].replace('\t', '').replace('\n', '')
|
||||||
|
event["date"] = [month, day, year]
|
||||||
|
return event
|
||||||
|
|
||||||
|
def get_details(pse, event):
|
||||||
|
try:
|
||||||
|
details = pse.xpath('.//*/div[@class="show_details text-center"]/div/div/h6/text()')
|
||||||
|
info = pse.xpath('.//*/div[@class="show_details text-center"]/div/div/h2/text()')
|
||||||
|
di = zip(details, info)
|
||||||
|
details = {}
|
||||||
|
for d,i in di:
|
||||||
|
details[d] = i
|
||||||
|
event["details"] = details
|
||||||
|
return event
|
||||||
|
except Exception as e:
|
||||||
|
print("details issue: ", e)
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
venue, created = Organization.objects.get_or_create(
|
||||||
name="First Avenue",
|
name="First Avenue",
|
||||||
city="Minneapolis",
|
city="Minneapolis",
|
||||||
@@ -23,7 +75,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue = True
|
is_venue = True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
tz = pytz.timezone("US/Central")
|
tz = pytz.timezone("US/Central")
|
||||||
|
|
||||||
@@ -60,7 +112,7 @@ else:
|
|||||||
calendar_url_2 = 'https://first-avenue.com/shows/?start_date=' + str(year) + next_month_string
|
calendar_url_2 = 'https://first-avenue.com/shows/?start_date=' + str(year) + next_month_string
|
||||||
|
|
||||||
|
|
||||||
print("\n\n", calendar_url, calendar_url_2, "\n\n")
|
# print("\n\n", calendar_url, calendar_url_2, "\n\n")
|
||||||
|
|
||||||
if len(sys.argv) >= 2:
|
if len(sys.argv) >= 2:
|
||||||
arg1 = sys.argv[1]
|
arg1 = sys.argv[1]
|
||||||
@@ -69,7 +121,7 @@ else:
|
|||||||
print("No run_env")
|
print("No run_env")
|
||||||
quit()
|
quit()
|
||||||
|
|
||||||
|
# Get Events based on date of month
|
||||||
if datetime.now().day < 8:
|
if datetime.now().day < 8:
|
||||||
ps = digitools.getSource(br, calendar_url)
|
ps = digitools.getSource(br, calendar_url)
|
||||||
shows = ps.xpath('.//*/div[@class="show_name content flex-fill"]/div/div/h4/a/@href')[:63]
|
shows = ps.xpath('.//*/div[@class="show_name content flex-fill"]/div/div/h4/a/@href')[:63]
|
||||||
@@ -84,63 +136,11 @@ elif 14 < datetime.now().day < 21:
|
|||||||
else:
|
else:
|
||||||
ps = digitools.getSource(br, calendar_url)
|
ps = digitools.getSource(br, calendar_url)
|
||||||
shows = ps.xpath('.//*/div[@class="show_name content flex-fill"]/div/div/h4/a/@href')
|
shows = ps.xpath('.//*/div[@class="show_name content flex-fill"]/div/div/h4/a/@href')
|
||||||
|
|
||||||
ps = digitools.getSource(br, calendar_url_2)
|
ps = digitools.getSource(br, calendar_url_2)
|
||||||
shows = shows + ps.xpath('.//*/div[@class="show_name content flex-fill"]/div/div/h4/a/@href')[:63]
|
shows = shows + ps.xpath('.//*/div[@class="show_name content flex-fill"]/div/div/h4/a/@href')[:63]
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
def get_info(pse):
|
|
||||||
event = {}
|
|
||||||
event['scraper'] = scraper
|
|
||||||
event['calendars'] = [scraper.calendar]
|
|
||||||
event["venue"] = pse.xpath('.//*/div[@class="content"]/div/div[@class="venue_name"]/text()')[0].replace('\t', '').replace('\n', '').strip()
|
|
||||||
event["show_title"] = pse.xpath('.//*/span[@class="show_title"]/text()')[0].replace('\t', '').replace('\n', '')
|
|
||||||
if event["show_title"] == "":
|
|
||||||
event["show_title"] = pse.xpath('.//*/span[@class="show_title"]/text()')[2].replace('\t', '').replace('\n', '')
|
|
||||||
event["guests"] = pse.xpath('.//*/div[@class="feature_details_main d-flex align-items-center"]/div/h4/text()')
|
|
||||||
event["flyer"] = pse.xpath('.//*/img[@class="gig_poster lazy loaded"]/@src')
|
|
||||||
try:
|
|
||||||
event = get_date(pse, event)
|
|
||||||
except Exception as e:
|
|
||||||
print("date issue: ", e)
|
|
||||||
try:
|
|
||||||
event = get_details(pse, event)
|
|
||||||
except Exception as e:
|
|
||||||
print("details issue: ", e)
|
|
||||||
try:
|
|
||||||
event["date_time"] = datetime.strptime(" ".join(event["date"]) + " " + event["details"]["Doors Open"], DATETIME_FORMAT)
|
|
||||||
except Exception as e:
|
|
||||||
print("Using alt date format 2: ", e)
|
|
||||||
try:
|
|
||||||
event["date_time"] = datetime.strptime(" ".join(event["date"]) + " " + event["details"]["Doors Open"], DATETIME_FORMAT_2)
|
|
||||||
ppr(event)
|
|
||||||
except Exception as e:
|
|
||||||
print("Using alt date format 3: ", e)
|
|
||||||
print(event['date'])
|
|
||||||
event["date_time"] = datetime.strptime(" ".join(event["date"]), DATETIME_FORMAT_3)
|
|
||||||
return event
|
|
||||||
|
|
||||||
def get_date(pse, event):
|
|
||||||
month = pse.xpath('.//*/div[@class="date_container"]/div/div[@class="month"]/text()')[0].replace('\t', '').replace('\n', '')
|
|
||||||
day = pse.xpath('.//*/div[@class="date_container"]/div/div[@class="day"]/text()')[0].replace('\t', '').replace('\n', '')
|
|
||||||
year = pse.xpath('.//*/div[@class="date_container"]/div/div[@class="year"]/text()')[0].replace('\t', '').replace('\n', '')
|
|
||||||
event["date"] = [month, day, year]
|
|
||||||
return event
|
|
||||||
|
|
||||||
def get_details(pse, event):
|
|
||||||
try:
|
|
||||||
details = pse.xpath('.//*/div[@class="show_details text-center"]/div/div/h6/text()')
|
|
||||||
info = pse.xpath('.//*/div[@class="show_details text-center"]/div/div/h2/text()')
|
|
||||||
di = zip(details, info)
|
|
||||||
details = {}
|
|
||||||
for d,i in di:
|
|
||||||
details[d] = i
|
|
||||||
event["details"] = details
|
|
||||||
return event
|
|
||||||
except Exception as e:
|
|
||||||
print("details issue: ", e)
|
|
||||||
|
|
||||||
for show in shows:
|
for show in shows:
|
||||||
br.get(show)
|
br.get(show)
|
||||||
sleep(2)
|
sleep(2)
|
||||||
@@ -153,6 +153,7 @@ for show in shows:
|
|||||||
event = get_info(pse)
|
event = get_info(pse)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("get_info error: ", e)
|
print("get_info error: ", e)
|
||||||
|
pass
|
||||||
try:
|
try:
|
||||||
event["link"] = show
|
event["link"] = show
|
||||||
if event["venue"] in ["Palace Theater", "Turf Club", "The Fitzgerald Theater", "Amsterdam Bar & Hall"]:
|
if event["venue"] in ["Palace Theater", "Turf Club", "The Fitzgerald Theater", "Amsterdam Bar & Hall"]:
|
||||||
@@ -168,7 +169,7 @@ for show in shows:
|
|||||||
scraper.items+=1
|
scraper.items+=1
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("event creation error: ", e, "\n\n", event, "\n\n", created)
|
print("event creation error: ", e, "\n\n", event, "\n\n", created)
|
||||||
quit()
|
# quit()
|
||||||
|
|
||||||
ppr(events)
|
ppr(events)
|
||||||
br.close()
|
br.close()
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -16,7 +15,7 @@ import events.digitools as digitools
|
|||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
tz = pytz.timezone("US/Central")
|
tz = pytz.timezone("US/Central")
|
||||||
DATETIME_FORMAT = '%a, %b %d %Y %I:%M %p SHOW'
|
DATETIME_FORMAT = '%a, %b %d %Y %I%p'
|
||||||
DATETIME_FORMAT_2 = '%a, %b %d %Y %I:%M %p SHOW'
|
DATETIME_FORMAT_2 = '%a, %b %d %Y %I:%M %p SHOW'
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
venue, created = Organization.objects.get_or_create(
|
||||||
@@ -26,7 +25,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue = True
|
is_venue = True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start = digitools.getScraper(venue)
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
def get_events(ps, event_type):
|
def get_events(ps, event_type):
|
||||||
contents = ps.xpath('.//*/div[@class="performances whitespace-pre-line w-full md:w-3/4"]')
|
contents = ps.xpath('.//*/div[@class="performances whitespace-pre-line w-full md:w-3/4"]')
|
||||||
@@ -34,34 +33,26 @@ def get_events(ps, event_type):
|
|||||||
try:
|
try:
|
||||||
event = {}
|
event = {}
|
||||||
event['scraper'] = scraper
|
event['scraper'] = scraper
|
||||||
event['calendar'] = scraper.calendar
|
event['calendars'] = scraper.calendar
|
||||||
event['title'] = c.xpath('.//*/h3[@class="text-3xl font-semibold font-heading mr-auto"]/text()')[0]
|
event['title'] = c.xpath('.//*/h3[@class="text-3xl font-semibold font-heading mr-auto"]/text()')[0]
|
||||||
event['link'] = venue.website + c.xpath('.//*/a[@class="show-link"]/@href')[0]
|
event['link'] = venue.website + c.xpath('.//*/a[@class="show-link"]/@href')[0]
|
||||||
event['date'] = c.xpath('.//*/h4[@class="day-of-week"]/text()')[0]
|
event['date'] = c.xpath('.//*/h4[@class="day-of-week"]/text()')[0]
|
||||||
year = datetime.now().year
|
month = event['date'].split(' ')[1].strip()
|
||||||
if "Brunch" in event['title']:
|
print("MONTH: ", month)
|
||||||
event['time'] = "11:00 AM SHOW"
|
year = int(datetime.today().year)
|
||||||
else:
|
if month in ['Jan', 'Feb', 'Mar']:
|
||||||
event['time'] = c.xpath('.//*/div[@class="performance-btn"]/button/text()')[0]
|
year = year + 1
|
||||||
|
time = c.xpath('.//p/span/text()')[0][:4].strip()
|
||||||
event['datetime'] = event['date'] + " " + str(year) + " " + event['time']
|
if time[-1:] == 'm':
|
||||||
try:
|
event['dateStamp'] = event['date'] + ' ' + str(year) + ' ' + time
|
||||||
event['dateStamp'] =datetime.strptime(event['datetime'], DATETIME_FORMAT)
|
event['dateStamp'] =datetime.strptime(event['dateStamp'], DATETIME_FORMAT)
|
||||||
except:
|
|
||||||
event['datetime'] = event['date'] + " " + str(year) + " " + "07:00 PM SHOW"
|
|
||||||
event['dateStamp'] =datetime.strptime(event['datetime'], DATETIME_FORMAT)
|
|
||||||
event['title'] = event['title'] + " (Time Estimated)"
|
|
||||||
try:
|
|
||||||
digitools.createBasicEvent(event, event_type, venue)
|
digitools.createBasicEvent(event, event_type, venue)
|
||||||
scraper.items+=1
|
else:
|
||||||
except Exception as e:
|
ppr(event)
|
||||||
print(e)
|
print("MONTH: ", month)
|
||||||
quit()
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
ppr(event)
|
print("What?",e,"\n\n+++")
|
||||||
print(e)
|
|
||||||
quit()
|
|
||||||
|
|
||||||
if len(sys.argv) >= 2:
|
if len(sys.argv) >= 2:
|
||||||
arg1 = sys.argv[1]
|
arg1 = sys.argv[1]
|
||||||
@@ -71,6 +62,7 @@ else:
|
|||||||
quit()
|
quit()
|
||||||
|
|
||||||
ps = digitools.getSource(br, venue.website)
|
ps = digitools.getSource(br, venue.website)
|
||||||
|
br.execute_script("window.scrollTo(0, window.scrollY + 5000)")
|
||||||
get_events(ps, "Mu")
|
get_events(ps, "Mu")
|
||||||
|
|
||||||
# ppr(events)
|
# ppr(events)
|
||||||
78
Working/venues/mn.mpls.pillarforum.py
Normal file
78
Working/venues/mn.mpls.pillarforum.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import os, sys
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from events.models import Organization, Scraper
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
|
||||||
|
current_year = str(datetime.now().year)
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Piller Forum",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://www.pilllar.com/pages/events",
|
||||||
|
is_venue = True
|
||||||
|
)
|
||||||
|
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
|
event_type = "Mu"
|
||||||
|
|
||||||
|
# Time Signatures
|
||||||
|
tz = pytz.timezone("US/Central")
|
||||||
|
DATETIME_FORMAT = '%b. %d %Y %I:%M%p'
|
||||||
|
DATETIME_FORMAT_night = '%b. %d %Y %I:%M %p'
|
||||||
|
DATETIME_FORMAT_2 = '%b. %d %Y %I:%Mam'
|
||||||
|
|
||||||
|
def get_events(ps, event_type):
|
||||||
|
contents = ps.xpath('.//*/div[@class="sse-row sse-clearfix"]')
|
||||||
|
for c in contents:
|
||||||
|
year = datetime.today().year
|
||||||
|
|
||||||
|
try:
|
||||||
|
date = c.xpath('.//h1[@class="sse-size-64"]/text()')[0]
|
||||||
|
month = date.split(".")[0]
|
||||||
|
ppr(date)
|
||||||
|
if month in ['JAN', 'FEB', 'MAR']:
|
||||||
|
year = int(datetime.today().year) + 1
|
||||||
|
event = {}
|
||||||
|
event['scraper'] = scraper
|
||||||
|
event['calendars'] = scraper.calendar
|
||||||
|
event['link'] = venue.website
|
||||||
|
|
||||||
|
event['title'] = c.xpath('.//p/span/b/text()')[0]
|
||||||
|
event['deets'] = c.xpath('.//p/span/text()')[0]
|
||||||
|
event['title'] = event['title'] + ' ' + event['deets']
|
||||||
|
paras = c.xpath('.//p/text()')
|
||||||
|
times = paras[1].split(" ")[1]
|
||||||
|
event['datetime'] = "{0} {1} {2}".format(date, year, times)
|
||||||
|
event['dateStamp'] =datetime.strptime(event['datetime'], DATETIME_FORMAT)
|
||||||
|
ppr(event)
|
||||||
|
digitools.createBasicEvent(event, event_type, venue)
|
||||||
|
except Exception as e:
|
||||||
|
# print(e)
|
||||||
|
pass
|
||||||
|
|
||||||
|
if len(sys.argv) >= 2:
|
||||||
|
arg1 = sys.argv[1]
|
||||||
|
br = digitools.getBrowser(arg1)
|
||||||
|
else:
|
||||||
|
print("No run_env")
|
||||||
|
quit()
|
||||||
|
|
||||||
|
ps = digitools.getSource(br, venue.website)
|
||||||
|
get_events(ps, event_type)
|
||||||
|
sleep(3)
|
||||||
|
|
||||||
|
br.close()
|
||||||
|
|
||||||
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -21,7 +20,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue=True
|
is_venue=True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
DATETIME_FORMAT = '%B %d %Y %I:%M%p'
|
DATETIME_FORMAT = '%B %d %Y %I:%M%p'
|
||||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -21,7 +20,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue = True
|
is_venue = True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
event_type = ""
|
event_type = ""
|
||||||
|
|
||||||
@@ -2,10 +2,10 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
import pytz
|
import pytz
|
||||||
@@ -21,7 +21,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue = False
|
is_venue = False
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start, virtcal = digitools.getScraper(venue, 'msp')
|
scraper,item_count_start, virtcal = digitools.getScraper(venue, venue.website, 'msp')
|
||||||
|
|
||||||
# Time Signatures
|
# Time Signatures
|
||||||
tz = pytz.timezone("US/Central")
|
tz = pytz.timezone("US/Central")
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import os, sys
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil import relativedelta
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
|
|
||||||
from events.models import Event as DSEvent, Organization
|
|
||||||
from digitools import getBrowser, createDashURL, createBasicEvent, getSource
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Comedy Corner",
|
|
||||||
city="Minneapolis",
|
|
||||||
website="https://comedycornerunderground.com/calendar",
|
|
||||||
)
|
|
||||||
|
|
||||||
calendar_url = "https://comedycornerunderground.com/calendar"
|
|
||||||
|
|
||||||
DATETIME_FORMAT = '%m %d %I%p %Y'
|
|
||||||
|
|
||||||
def get_events(ps, link):
|
|
||||||
contents = ps.xpath('.//*/td')
|
|
||||||
for c in contents:
|
|
||||||
try:
|
|
||||||
day_num = c.xpath('.//*/div[@class="marker-daynum"]/text()')[0]
|
|
||||||
events = c.xpath('.//*/li')
|
|
||||||
# print(events)
|
|
||||||
for e in events:
|
|
||||||
event = {}
|
|
||||||
print(link)
|
|
||||||
month = link.split("month=")[1].split("-")[0]
|
|
||||||
year = link.split("month=")[1].split("-")[1]
|
|
||||||
event['title'] = e.xpath('.//*/span[@class="item-title"]/text()')[0]
|
|
||||||
event['time'] = e.xpath('.//*/span[@class="item-time item-time--12hr"]/text()')[0].replace("\xa0", "")
|
|
||||||
event['link'] = "https://comedycornerunderground.com" + e.xpath('.//a/@href')[0]
|
|
||||||
event['date'] = str(day_num) + ' ' + event['time'] + 'm'
|
|
||||||
dateStamp = month + ' ' + event['date'] + ' ' + year
|
|
||||||
event['dateStamp'] = datetime.strptime(dateStamp, DATETIME_FORMAT)
|
|
||||||
createBasicEvent(event, 'Co')
|
|
||||||
print('\n\n++\n\n')
|
|
||||||
except Exception as e:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if len(sys.argv) >= 2:
|
|
||||||
arg1 = sys.argv[1]
|
|
||||||
br = getBrowser(arg1)
|
|
||||||
else:
|
|
||||||
print("No run_env")
|
|
||||||
quit()
|
|
||||||
|
|
||||||
links = createDashURL("https://comedycornerunderground.com/calendar?view=calendar&month=")
|
|
||||||
|
|
||||||
for link in links:
|
|
||||||
ps = getSource(br, link)
|
|
||||||
get_events(ps, link)
|
|
||||||
sleep(5)
|
|
||||||
|
|
||||||
# ppr(events)
|
|
||||||
br.close()
|
|
||||||
76
Working/workshop/mn.mpls.ComedyUnderground.py
Normal file
76
Working/workshop/mn.mpls.ComedyUnderground.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import os, sys
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil import relativedelta
|
||||||
|
|
||||||
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
|
import dtss
|
||||||
|
dtss.getReady()
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
|
||||||
|
from events.models import Organization, Scraper
|
||||||
|
import events.digitools as digitools
|
||||||
|
|
||||||
|
venue, created = Organization.objects.get_or_create(
|
||||||
|
name="Comedy Corner",
|
||||||
|
city="Minneapolis",
|
||||||
|
website="https://comedycornerunderground.com/",
|
||||||
|
)
|
||||||
|
|
||||||
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
||||||
|
|
||||||
|
DATETIME_FORMAT = '%m %d %Y %I:%M %p'
|
||||||
|
|
||||||
|
def get_events(ps):
|
||||||
|
sleep(3)
|
||||||
|
contents = ps.xpath('.//*/div[@class="calendar-day"]')
|
||||||
|
month = []
|
||||||
|
year = []
|
||||||
|
print("Contents: ", contents)
|
||||||
|
for c in contents:
|
||||||
|
try:
|
||||||
|
day_num = c.xpath('.//*/div[@class="day-number"]/text()')[0]
|
||||||
|
events = c.xpath('.//div[@class="event-item"]')
|
||||||
|
# print(events)
|
||||||
|
for e in events:
|
||||||
|
event = {}
|
||||||
|
event['scraper'] = scraper
|
||||||
|
event['calendars'] = [scraper.calendar]
|
||||||
|
event['title'] = e.xpath('.//*/div[@class="event-name"]/text()')[0]
|
||||||
|
time = e.xpath('.//*/div[@class="event-name"]/text()')[0]
|
||||||
|
event['link'] = e.xpath('.//a/@href')[0]
|
||||||
|
event['date'] = str(day_num) + ' ' + event['time'] + 'm'
|
||||||
|
dateStamp = month + ' ' + day_num + ' ' + year + time
|
||||||
|
event['dateStamp'] = datetime.strptime(dateStamp, DATETIME_FORMAT)
|
||||||
|
ppr(event)
|
||||||
|
# digitools.createBasicEvent(event, 'Co')
|
||||||
|
print('\n\n++\n\n')
|
||||||
|
except Exception as e:
|
||||||
|
print("\n\n",e,"\n\n")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if len(sys.argv) >= 2:
|
||||||
|
arg1 = sys.argv[1]
|
||||||
|
br = digitools.getBrowser(arg1)
|
||||||
|
else:
|
||||||
|
print("No run_env")
|
||||||
|
quit()
|
||||||
|
|
||||||
|
|
||||||
|
ps = digitools.getSource(br, venue.website)
|
||||||
|
br.execute_script("window.scrollTo(0, window.scrollY + 2500)")
|
||||||
|
sleep(8)
|
||||||
|
cal = ps.xpath('.//*/div[@id="ccu-calendar"]')
|
||||||
|
print(cal)
|
||||||
|
calendar = ps.xpath('.//*/div[@id="ccu-calendar"]/*')
|
||||||
|
print(calendar)
|
||||||
|
contents = ps.xpath('.//*/div[@id="ccu-calendar"]/*/div[@class="calendar-grid"]')
|
||||||
|
print(contents)
|
||||||
|
br.close()
|
||||||
|
quit()
|
||||||
|
get_events(ps)
|
||||||
|
sleep(5)
|
||||||
|
|
||||||
|
# ppr(events)
|
||||||
|
br.close()
|
||||||
@@ -2,10 +2,9 @@ import os, sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dateutil import relativedelta
|
from dateutil import relativedelta
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from pprint import pprint as ppr
|
from pprint import pprint as ppr
|
||||||
@@ -23,9 +22,7 @@ venue, created = Organization.objects.get_or_create(
|
|||||||
is_venue=True
|
is_venue=True
|
||||||
)
|
)
|
||||||
|
|
||||||
scraper,item_count_start = digitools.getScraper(venue)
|
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
||||||
|
|
||||||
event_type = ""
|
|
||||||
|
|
||||||
# Time Signatures
|
# Time Signatures
|
||||||
tz = pytz.timezone("US/Central")
|
tz = pytz.timezone("US/Central")
|
||||||
@@ -72,9 +69,10 @@ else:
|
|||||||
quit()
|
quit()
|
||||||
|
|
||||||
|
|
||||||
ps = digitools.getSource(br, venue.website + '?wicked_month=04&wicked_year=2025&wicked_view=month')
|
ps = digitools.getSource(br, "https://www.dakotacooks.com/events/?__cf_chl_tk=pnk3Xrc76yMA_SU9ADPYcnvIvjODoxJnhbnM7YcePaM-1764701728-1.0.1.1-1SD9BF5cmpUbVJVna52BCyFUZkVtXdExNBglo9nxRU8")
|
||||||
|
sleep(15)
|
||||||
get_events(ps, "Mu")
|
get_events(ps, "Mu")
|
||||||
sleep(1)
|
sleep(15)
|
||||||
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
digitools.updateScraper(scraper, item_count_start)
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
import os, sys
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil import relativedelta
|
|
||||||
|
|
||||||
import django
|
|
||||||
sys.path.append('../../../')
|
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from time import sleep
|
|
||||||
from pprint import pprint as ppr
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
from events.models import Organization, Scraper
|
|
||||||
import events.digitools as digitools
|
|
||||||
|
|
||||||
|
|
||||||
current_year = str(datetime.now().year)
|
|
||||||
|
|
||||||
venue, created = Organization.objects.get_or_create(
|
|
||||||
name="Piller Forum",
|
|
||||||
city="Minneapolis",
|
|
||||||
website="https://www.pilllar.com/pages/events",
|
|
||||||
is_venue = True
|
|
||||||
)
|
|
||||||
|
|
||||||
scraper,item_count_start = digitools.getScraper(venue)
|
|
||||||
|
|
||||||
event_type = "Mu"
|
|
||||||
|
|
||||||
# Time Signatures
|
|
||||||
tz = pytz.timezone("US/Central")
|
|
||||||
DATETIME_FORMAT = '%b. %d %Y %I:%M %p'
|
|
||||||
DATETIME_FORMAT_night = '%b. %d %Y %I:%M %p'
|
|
||||||
DATETIME_FORMAT_2 = '%b. %d %Y %I:%Mam'
|
|
||||||
|
|
||||||
def get_events(ps, event_type):
|
|
||||||
contents = ps.xpath('.//*/div[@class="sse-column sse-half sse-center"]')
|
|
||||||
for c in contents:
|
|
||||||
try:
|
|
||||||
event = {}
|
|
||||||
event['scraper'] = scraper
|
|
||||||
event['calendar'] = scraper.calendar
|
|
||||||
event['link'] = venue.website
|
|
||||||
# time = c.xpath('.//*/span[@class="vp-time"]/text()')[0].strip()
|
|
||||||
date = c.xpath('.//h1[@class="sse-size-64"]/text()')[0]
|
|
||||||
if len(date) > 1:
|
|
||||||
print(date)
|
|
||||||
year = datetime.now().year
|
|
||||||
event_date = date + " " + str(year)
|
|
||||||
event['title'] = c.xpath('.//p/span/b/text()')[0]
|
|
||||||
details = c.xpath('.//p/text()')
|
|
||||||
if 'Music' in details[-1]:
|
|
||||||
event_time = c.xpath('.//p/text()')[-1].split("Music")[1].strip()
|
|
||||||
event_type = "Mu"
|
|
||||||
event_dt = event_date + " " + event_time + " PM"
|
|
||||||
event['dateStamp'] = datetime.strptime(event_dt, DATETIME_FORMAT_night)
|
|
||||||
elif len(details) == 1:
|
|
||||||
try:
|
|
||||||
event_time = details[0].split("-")[0].strip()
|
|
||||||
event_dt = event_date + " " + event_time + ' PM'
|
|
||||||
event['dateStamp'] = datetime.strptime(event_dt, DATETIME_FORMAT_night)
|
|
||||||
event_type = "Ot"
|
|
||||||
except Exception as e:
|
|
||||||
event_time = details[0].split("-")[0].strip()
|
|
||||||
event_dt = event_date + " " + event_time
|
|
||||||
event['dateStamp'] = datetime.strptime(event_dt, DATETIME_FORMAT_2)
|
|
||||||
event_type = "Ot"
|
|
||||||
digitools.createBasicEvent(event, event_type, venue)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
if len(sys.argv) >= 2:
|
|
||||||
arg1 = sys.argv[1]
|
|
||||||
br = digitools.getBrowser(arg1)
|
|
||||||
else:
|
|
||||||
print("No run_env")
|
|
||||||
quit()
|
|
||||||
|
|
||||||
ps = digitools.getSource(br, venue.website)
|
|
||||||
get_events(ps, event_type)
|
|
||||||
sleep(3)
|
|
||||||
|
|
||||||
br.close()
|
|
||||||
|
|
||||||
digitools.updateScraper(scraper, item_count_start)
|
|
||||||
@@ -3,10 +3,9 @@ from datetime import datetime, timedelta
|
|||||||
|
|
||||||
from django.db.models import Count
|
from django.db.models import Count
|
||||||
|
|
||||||
import django
|
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||||
sys.path.append('../../../')
|
import dtss
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
dtss.getReady()
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from events.models import Event, Organization
|
from events.models import Event, Organization
|
||||||
|
|
||||||
@@ -23,8 +22,8 @@ for e in events1:
|
|||||||
|
|
||||||
org_sin_events = Organization.objects.annotate(num_events = Count('event')).filter(num_events__lt=1)
|
org_sin_events = Organization.objects.annotate(num_events = Count('event')).filter(num_events__lt=1)
|
||||||
|
|
||||||
for o in org_sin_events:
|
# for o in org_sin_events:
|
||||||
print(o)
|
# print(o)
|
||||||
|
|
||||||
# for org in org_sin_events:
|
# for org in org_sin_events:
|
||||||
# print(org)
|
# print(org)
|
||||||
|
|||||||
Reference in New Issue
Block a user