updated naming conventions
This commit is contained in:
@@ -1,64 +0,0 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
|
||||
from events.models import Event as DSEvent, Organization
|
||||
from digitools import getBrowser, createDashURL, createBasicEvent, getSource
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Comedy Corner",
|
||||
city="Minneapolis",
|
||||
website="https://comedycornerunderground.com/calendar",
|
||||
)
|
||||
|
||||
calendar_url = "https://comedycornerunderground.com/calendar"
|
||||
|
||||
DATETIME_FORMAT = '%m %d %I%p %Y'
|
||||
|
||||
def get_events(ps, link):
|
||||
contents = ps.xpath('.//*/td')
|
||||
for c in contents:
|
||||
try:
|
||||
day_num = c.xpath('.//*/div[@class="marker-daynum"]/text()')[0]
|
||||
events = c.xpath('.//*/li')
|
||||
# print(events)
|
||||
for e in events:
|
||||
event = {}
|
||||
print(link)
|
||||
month = link.split("month=")[1].split("-")[0]
|
||||
year = link.split("month=")[1].split("-")[1]
|
||||
event['title'] = e.xpath('.//*/span[@class="item-title"]/text()')[0]
|
||||
event['time'] = e.xpath('.//*/span[@class="item-time item-time--12hr"]/text()')[0].replace("\xa0", "")
|
||||
event['link'] = "https://comedycornerunderground.com" + e.xpath('.//a/@href')[0]
|
||||
event['date'] = str(day_num) + ' ' + event['time'] + 'm'
|
||||
dateStamp = month + ' ' + event['date'] + ' ' + year
|
||||
event['dateStamp'] = datetime.strptime(dateStamp, DATETIME_FORMAT)
|
||||
createBasicEvent(event, 'Co')
|
||||
print('\n\n++\n\n')
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
links = createDashURL("https://comedycornerunderground.com/calendar?view=calendar&month=")
|
||||
|
||||
for link in links:
|
||||
ps = getSource(br, link)
|
||||
get_events(ps, link)
|
||||
sleep(5)
|
||||
|
||||
# ppr(events)
|
||||
br.close()
|
||||
@@ -1,74 +0,0 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
import events.digitools as digitools
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Cabooze",
|
||||
city="Minneapolis",
|
||||
website="https://www.cabooze.com/#/events",
|
||||
is_venue=True
|
||||
)
|
||||
|
||||
scraper,item_count_start = digitools.getScraper(venue)
|
||||
|
||||
event_type = ""
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
DATETIME_FORMAT = '%b %d %I:%M %p %Y'
|
||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
print("Getting events ...")
|
||||
contents = ps.xpath('.//*/div[@class="vp-event-row vp-widget-reset vp-venue-thecabooze"]')
|
||||
ppr(contents)
|
||||
for c in contents:
|
||||
try:
|
||||
event = {}
|
||||
date = c.xpath('.//*/span[@class="vp-month-n-day"]/text()')[0]
|
||||
print(date)
|
||||
month = date.split(" ")[0]
|
||||
time = c.xpath('.//*/span[@class="vp-time"]/text()')[0]
|
||||
year = datetime.now().year
|
||||
if month == "Jan":
|
||||
year = int(year) + 1
|
||||
event['calendar'] = scraper.calendar
|
||||
event['title'] = c.xpath('.//*/div[@class="vp-event-name"]/text()')[0]
|
||||
event['date'] = [date, time, str(year)]
|
||||
event['date'] = " ".join(event['date'])
|
||||
event['dateStamp'] = datetime.strptime(event['date'], DATETIME_FORMAT)
|
||||
event['link'] = "https://www.cabooze.com/" + c.xpath('.//a[@class="vp-event-link"]/@href')[0]
|
||||
print("Event Dict Created")
|
||||
ppr(event)
|
||||
digitools.createBasicEvent(event, event_type, venue)
|
||||
scraper.items+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(event)
|
||||
print("\n\n+++\n\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = digitools.getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
ps = digitools.getSource(br, venue.website)
|
||||
get_events(ps, "Mu")
|
||||
sleep(3)
|
||||
|
||||
digitools.updateScraper(scraper, item_count_start)
|
||||
br.close()
|
||||
102
Working/workshop/cals/MplStpMag.mn.py
Normal file
102
Working/workshop/cals/MplStpMag.mn.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization
|
||||
from events.digitools import getBrowser, createURL, createBasicEvent, getSource
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Mpls Stp Mag",
|
||||
city="Minneapolis",
|
||||
website="https://calendar.mspmag.com/calendars/all-events/",
|
||||
)
|
||||
|
||||
event_type = ""
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
td = relativedelta.relativedelta(days=1)
|
||||
fortnight = relativedelta.relativedelta(days=14)
|
||||
odt = datetime.now() + fortnight
|
||||
|
||||
# DATETIME_FORMAT = '%b %d %Y %I:%M %p'
|
||||
DATETIME_FORMAT = '%A, %B %d %Y %I:%M%p'
|
||||
DATETIME_FORMAT_ALT = '%A, %B %d %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
contents = ps.xpath('.//*/div[@class="css-card js-card day-card type-smad expandable"]')
|
||||
for c in contents:
|
||||
try:
|
||||
event = {}
|
||||
event['calendar'] = venue.calendar
|
||||
event_block = c.xpath('.//*/li[@class="card-listings-item event-element"]')
|
||||
date = c.xpath('.//div[@class="day-card__header day-card__header--daily"]/text()')[0].replace("\n", "").strip()
|
||||
if date == "Today":
|
||||
date = datetime.today()
|
||||
elif date == "Tomorrow":
|
||||
date = datetime.today() + td
|
||||
# month = c.xpath('.//*/span[@class="mth"]/text()')[0]
|
||||
year = datetime.now().year
|
||||
# if month == "Jan":
|
||||
# year = int(year) + 1
|
||||
dateTime = datetime.strptime(date + " " + str(year), DATETIME_FORMAT_ALT)
|
||||
if dateTime > odt:
|
||||
print("DATE TIME ", dateTime)
|
||||
break
|
||||
for ev in event_block:
|
||||
time = ev.xpath('.//*/span[@class="card-listing-item-time"]/text()')[0].replace("@", "").strip()
|
||||
if time == "All day":
|
||||
time = "12:00pm"
|
||||
event['title'] = ev.xpath('.//*/div[@class="card-listing-item-title"]/text()')[0] + " (Check link for times.)"
|
||||
elif "-" in time:
|
||||
time = time.split("-")[0]
|
||||
event['title'] = ev.xpath('.//*/div[@class="card-listing-item-title"]/text()')[0]
|
||||
else:
|
||||
event['title'] = ev.xpath('.//*/div[@class="card-listing-item-title"]/text()')[0]
|
||||
|
||||
event['location'] = ev.xpath('.//*/span[@class="card-listing-item-location"]/text()')[0]
|
||||
if event['location'] == '7th St. Entry':
|
||||
event['location'] = '7th St Entry'
|
||||
elif event['location'] == '7th Street Entry':
|
||||
event['location'] = '7th St Entry'
|
||||
elif event['location'] == 'Amsterdam Bar and Hall':
|
||||
event['location'] = 'Amsterdam Bar & Hall'
|
||||
new_venue, created = Organization.objects.get_or_create(name=event['location'])
|
||||
print("V: ", new_venue, created)
|
||||
|
||||
event['dateTime'] = date + " " + str(year) + " " + time
|
||||
event['link'] = venue.website + c.xpath('.//@data-event')[0]
|
||||
event['dateStamp'] = datetime.strptime(event['dateTime'], DATETIME_FORMAT)
|
||||
|
||||
|
||||
|
||||
createBasicEvent(event, event_type, new_venue)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(event)
|
||||
print("\n\n+++\n\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
# Get Event Page Link(s)
|
||||
# links = createURL("https://acmecomedycompany.com/the-club/calendar/")
|
||||
|
||||
ps = getSource(br, venue.website)
|
||||
get_events(ps, "Ed")
|
||||
sleep(3)
|
||||
|
||||
br.close()
|
||||
105
Working/workshop/cals/minnestar.py
Normal file
105
Working/workshop/cals/minnestar.py
Normal file
@@ -0,0 +1,105 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
from events.digitools import getBrowser, createURL, createBasicEvent, getSource
|
||||
|
||||
count = 0
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Minnestar",
|
||||
city="Minneapolis",
|
||||
website="https://minnestar.org/community/calendar",
|
||||
)
|
||||
|
||||
try:
|
||||
scraper, created = Scraper.objects.get_or_create(
|
||||
name=venue.name,
|
||||
website=venue.website,
|
||||
items = 0,
|
||||
last_ran = datetime.now(),
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
scraper = Scraper.objects.get(name=venue.name)
|
||||
print("Scraper: ", scraper)
|
||||
|
||||
event_type = ""
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
DATETIME_FORMAT = '%B %d, %Y %I:%M %p'
|
||||
DATETIME_FORMAT_2 = '%B %d %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
links = ps.xpath('.//*/div[@id="community-calendar-list-view-container"]/a/@href')
|
||||
ppr(links)
|
||||
for l in links:
|
||||
pse = getSource(br, l)
|
||||
sleep(1)
|
||||
event = {}
|
||||
event['calendar'] = venue.calendar
|
||||
event['link'] = l
|
||||
try:
|
||||
starttime = pse.xpath('.//*/time/text()')[0]
|
||||
endtime = pse.xpath('.//*/time/@datetime')[1]
|
||||
event['dateStamp'] = datetime.strptime(starttime, DATETIME_FORMAT)
|
||||
event['title'] = pse.xpath('.//*/h1[@class="heading-2"]/text()')[0]
|
||||
# event['detail-headers'] = pse.xpath('.//*/ul[@class="eo-event-meta"]/li/strong/text()')
|
||||
# event['details'] = pse.xpath('.//*/ul[@class="eo-event-meta"]/li/text()')
|
||||
|
||||
except:
|
||||
try:
|
||||
event['title'] = pse.xpath('.//*/h1[@class="heading-2"]/text()')[0]
|
||||
starttime = pse.xpath('.//*/time/text()')[0]
|
||||
event['dateStamp'] = datetime.strptime(starttime, DATETIME_FORMAT)
|
||||
except Exception as e:
|
||||
try:
|
||||
print(e)
|
||||
print('failed event: ', event)
|
||||
starttime = pse.xpath('.//*/time/text()')[0]
|
||||
event['dateStamp'] = datetime.strptime(starttime + ' 2025', DATETIME_FORMAT_2)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("failed event: ", event)
|
||||
ppr(event)
|
||||
try:
|
||||
createBasicEvent(event, "Ot", venue)
|
||||
scraper.items+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print('failed to create: ', event)
|
||||
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
ps = getSource(br, venue.website)
|
||||
get_events(ps, "Ot")
|
||||
sleep(3)
|
||||
|
||||
scraper.save()
|
||||
|
||||
# Get Event Page Link(s)
|
||||
# links = createURL("https://acmecomedycompany.com/the-club/calendar/")
|
||||
|
||||
# for link in links:
|
||||
# ps = getSource(br, link)
|
||||
# get_events(ps, "Ed")
|
||||
# sleep(3)
|
||||
|
||||
br.close()
|
||||
@@ -1,78 +0,0 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
import events.digitools as digitools
|
||||
|
||||
count = 0
|
||||
tz = pytz.timezone("US/Central")
|
||||
DATETIME_FORMAT = '%a, %b %d %Y %I:%M %p SHOW'
|
||||
DATETIME_FORMAT_2 = '%a, %b %d %Y %I:%M %p SHOW'
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Icehouse",
|
||||
city="Minneapolis",
|
||||
website = "https://icehouse.turntabletickets.com",
|
||||
is_venue = True
|
||||
)
|
||||
|
||||
scraper,item_count_start = digitools.getScraper(venue)
|
||||
|
||||
def get_events(ps, event_type):
|
||||
contents = ps.xpath('.//*/div[@class="performances whitespace-pre-line w-full md:w-3/4"]')
|
||||
for c in contents:
|
||||
try:
|
||||
event = {}
|
||||
event['scraper'] = scraper
|
||||
event['calendar'] = scraper.calendar
|
||||
event['title'] = c.xpath('.//*/h3[@class="text-3xl font-semibold font-heading mr-auto"]/text()')[0]
|
||||
event['link'] = venue.website + c.xpath('.//*/a[@class="show-link"]/@href')[0]
|
||||
event['date'] = c.xpath('.//*/h4[@class="day-of-week"]/text()')[0]
|
||||
year = datetime.now().year
|
||||
if "Brunch" in event['title']:
|
||||
event['time'] = "11:00 AM SHOW"
|
||||
else:
|
||||
event['time'] = c.xpath('.//*/div[@class="performance-btn"]/button/text()')[0]
|
||||
|
||||
event['datetime'] = event['date'] + " " + str(year) + " " + event['time']
|
||||
try:
|
||||
event['dateStamp'] =datetime.strptime(event['datetime'], DATETIME_FORMAT)
|
||||
except:
|
||||
event['datetime'] = event['date'] + " " + str(year) + " " + "07:00 PM SHOW"
|
||||
event['dateStamp'] =datetime.strptime(event['datetime'], DATETIME_FORMAT)
|
||||
event['title'] = event['title'] + " (Time Estimated)"
|
||||
try:
|
||||
digitools.createBasicEvent(event, event_type, venue)
|
||||
scraper.items+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
quit()
|
||||
|
||||
except Exception as e:
|
||||
ppr(event)
|
||||
print(e)
|
||||
quit()
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = digitools.getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
ps = digitools.getSource(br, venue.website)
|
||||
get_events(ps, "Mu")
|
||||
|
||||
# ppr(events)
|
||||
br.close()
|
||||
digitools.updateScraper(scraper, item_count_start)
|
||||
76
Working/workshop/mn.mpls.ComedyUnderground.py
Normal file
76
Working/workshop/mn.mpls.ComedyUnderground.py
Normal file
@@ -0,0 +1,76 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
import events.digitools as digitools
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Comedy Corner",
|
||||
city="Minneapolis",
|
||||
website="https://comedycornerunderground.com/",
|
||||
)
|
||||
|
||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
||||
|
||||
DATETIME_FORMAT = '%m %d %Y %I:%M %p'
|
||||
|
||||
def get_events(ps):
|
||||
sleep(3)
|
||||
contents = ps.xpath('.//*/div[@class="calendar-day"]')
|
||||
month = []
|
||||
year = []
|
||||
print("Contents: ", contents)
|
||||
for c in contents:
|
||||
try:
|
||||
day_num = c.xpath('.//*/div[@class="day-number"]/text()')[0]
|
||||
events = c.xpath('.//div[@class="event-item"]')
|
||||
# print(events)
|
||||
for e in events:
|
||||
event = {}
|
||||
event['scraper'] = scraper
|
||||
event['calendars'] = [scraper.calendar]
|
||||
event['title'] = e.xpath('.//*/div[@class="event-name"]/text()')[0]
|
||||
time = e.xpath('.//*/div[@class="event-name"]/text()')[0]
|
||||
event['link'] = e.xpath('.//a/@href')[0]
|
||||
event['date'] = str(day_num) + ' ' + event['time'] + 'm'
|
||||
dateStamp = month + ' ' + day_num + ' ' + year + time
|
||||
event['dateStamp'] = datetime.strptime(dateStamp, DATETIME_FORMAT)
|
||||
ppr(event)
|
||||
# digitools.createBasicEvent(event, 'Co')
|
||||
print('\n\n++\n\n')
|
||||
except Exception as e:
|
||||
print("\n\n",e,"\n\n")
|
||||
continue
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = digitools.getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
|
||||
ps = digitools.getSource(br, venue.website)
|
||||
br.execute_script("window.scrollTo(0, window.scrollY + 2500)")
|
||||
sleep(8)
|
||||
cal = ps.xpath('.//*/div[@id="ccu-calendar"]')
|
||||
print(cal)
|
||||
calendar = ps.xpath('.//*/div[@id="ccu-calendar"]/*')
|
||||
print(calendar)
|
||||
contents = ps.xpath('.//*/div[@id="ccu-calendar"]/*/div[@class="calendar-grid"]')
|
||||
print(contents)
|
||||
br.close()
|
||||
quit()
|
||||
get_events(ps)
|
||||
sleep(5)
|
||||
|
||||
# ppr(events)
|
||||
br.close()
|
||||
@@ -2,10 +2,9 @@ import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
||||
django.setup()
|
||||
sys.path.append('/var/www/digisnaxx.ado/scrapers')
|
||||
import dtss
|
||||
dtss.getReady()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
@@ -23,9 +22,7 @@ venue, created = Organization.objects.get_or_create(
|
||||
is_venue=True
|
||||
)
|
||||
|
||||
scraper,item_count_start = digitools.getScraper(venue)
|
||||
|
||||
event_type = ""
|
||||
scraper,item_count_start,virtcal = digitools.getScraper(venue, 'msp')
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
@@ -72,9 +69,10 @@ else:
|
||||
quit()
|
||||
|
||||
|
||||
ps = digitools.getSource(br, venue.website + '?wicked_month=04&wicked_year=2025&wicked_view=month')
|
||||
ps = digitools.getSource(br, "https://www.dakotacooks.com/events/?__cf_chl_tk=pnk3Xrc76yMA_SU9ADPYcnvIvjODoxJnhbnM7YcePaM-1764701728-1.0.1.1-1SD9BF5cmpUbVJVna52BCyFUZkVtXdExNBglo9nxRU8")
|
||||
sleep(15)
|
||||
get_events(ps, "Mu")
|
||||
sleep(1)
|
||||
sleep(15)
|
||||
|
||||
|
||||
digitools.updateScraper(scraper, item_count_start)
|
||||
78
Working/workshop/news/minnpost.mn.py
Normal file
78
Working/workshop/news/minnpost.mn.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
from events.digitools import getBrowser, createURL, createBasicArticle, getSource
|
||||
|
||||
|
||||
org, created = Organization.objects.get_or_create(
|
||||
name="MinnPost",
|
||||
city="Minneapolis",
|
||||
website="https://www.minnpost.com/",
|
||||
is_venue=False,
|
||||
)
|
||||
|
||||
try:
|
||||
scraper, created = Scraper.objects.get_or_create(
|
||||
name=org.name,
|
||||
website=org.website,
|
||||
items = 0,
|
||||
last_ran = datetime.now(),
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
scraper = Scraper.objects.get(name=org.name)
|
||||
print("Scraper: ", scraper)
|
||||
|
||||
event_type = "Ja"
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
DATETIME_FORMAT = '%b %d %Y %I:%M %p'
|
||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
contents = ps.xpath('.//*/article')
|
||||
count = 0
|
||||
ppr(contents)
|
||||
for c in contents:
|
||||
try:
|
||||
if count > 10:
|
||||
br.close()
|
||||
quit()
|
||||
article = {}
|
||||
article['title'] = c.xpath('.//*/h2[@class="entry-title"]/a/text()')[0]
|
||||
article['link'] = c.xpath('.//*/h2[@class="entry-title"]/a/@href')[0]
|
||||
createBasicArticle(article, event_type, org)
|
||||
ppr(article)
|
||||
print("Success")
|
||||
count+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(article)
|
||||
print("\n\n+++\n\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
|
||||
ps = getSource(br, org.website)
|
||||
get_events(ps, "Ed")
|
||||
sleep(3)
|
||||
|
||||
br.close()
|
||||
scraper.save()
|
||||
68
Working/workshop/news/racket.mn.py
Normal file
68
Working/workshop/news/racket.mn.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
from events.digitools import getBrowser, createURL, createBasicArticle, getSource
|
||||
|
||||
scraper, created = Scraper.objects.get_or_create(
|
||||
name="Racket MN",
|
||||
website="https://racketmn.com",
|
||||
last_ran = datetime.now(),
|
||||
)
|
||||
|
||||
org, created = Organization.objects.get_or_create(
|
||||
name="Racket MN",
|
||||
city="Minneapolis",
|
||||
website="https://racketmn.com",
|
||||
is_venue=False,
|
||||
)
|
||||
|
||||
event_type = "Ja"
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
DATETIME_FORMAT = '%b %d %Y %I:%M %p'
|
||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
count = 0
|
||||
contents = ps.xpath('.//*/div[@class="PostCard_stackedWrapper__S21Fy"]') + ps.xpath('.//*/div[@class="PostCard_wrapper__uteO3"]')
|
||||
for c in contents:
|
||||
if count > 10:
|
||||
br.close()
|
||||
quit()
|
||||
try:
|
||||
article = {}
|
||||
article['title'] = c.xpath('.//div/a/h3/text()')[0]
|
||||
article['link'] = org.website + c.xpath('.//div/a/@href')[1]
|
||||
createBasicArticle(article, event_type, org)
|
||||
count+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(article)
|
||||
print("\n+++\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
|
||||
ps = getSource(br, org.website)
|
||||
get_events(ps, "Ed")
|
||||
sleep(3)
|
||||
|
||||
br.close()
|
||||
scraper.save()
|
||||
68
Working/workshop/news/sahan.mn.py
Normal file
68
Working/workshop/news/sahan.mn.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
from events.digitools import getBrowser, createURL, createBasicArticle, getSource
|
||||
|
||||
scraper, created = Scraper.objects.get_or_create(
|
||||
name="Sahan Journal",
|
||||
website="https://sahanjournal.com/",
|
||||
last_ran = datetime.now(),
|
||||
)
|
||||
|
||||
org, created = Organization.objects.get_or_create(
|
||||
name="Sahan Journal",
|
||||
city="Minneapolis",
|
||||
website="https://sahanjournal.com/",
|
||||
is_venue=False,
|
||||
)
|
||||
|
||||
event_type = "Ja"
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
DATETIME_FORMAT = '%b %d %Y %I:%M %p'
|
||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
contents = ps.xpath('.//*/article')
|
||||
count = 0
|
||||
for c in contents:
|
||||
try:
|
||||
if count > 10:
|
||||
br.close()
|
||||
quit()
|
||||
article = {}
|
||||
article['title'] = c.xpath('.//*/h2[@class="entry-title"]/a/text()')[0]
|
||||
article['link'] = c.xpath('.//*/h2[@class="entry-title"]/a/@href')[0]
|
||||
createBasicArticle(article, event_type, org)
|
||||
count+=1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(article)
|
||||
print("\n+++\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
|
||||
ps = getSource(br, org.website)
|
||||
get_events(ps, "Ed")
|
||||
sleep(3)
|
||||
|
||||
br.close()
|
||||
scraper.save()
|
||||
63
Working/workshop/news/unicornriot.py
Normal file
63
Working/workshop/news/unicornriot.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
from events.digitools import getBrowser, createURL, createBasicArticle, getSource
|
||||
|
||||
scraper, created = Scraper.objects.get_or_create(
|
||||
name="Uniocorn Riot",
|
||||
website="https://unicornriot.ninja/",
|
||||
last_ran = datetime.now(),
|
||||
)
|
||||
|
||||
org, created = Organization.objects.get_or_create(
|
||||
name="Uniocorn Riot",
|
||||
city="Minneapolis",
|
||||
website="https://unicornriot.ninja/",
|
||||
is_venue=False,
|
||||
)
|
||||
|
||||
event_type = "Ja"
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
DATETIME_FORMAT = '%b %d %Y %I:%M %p'
|
||||
DATETIME_FORMAT_2 = '%A, %B %d @ %I%p %Y'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
contents = ps.xpath('.//*/article')
|
||||
for c in contents[:10]:
|
||||
try:
|
||||
article = {}
|
||||
article['title'] = c.xpath('.//*/h3[@class="title entry-title is-3"]/a/text()')[0]
|
||||
article['link'] = c.xpath('.//*/h3[@class="title entry-title is-3"]/a/@href')[0]
|
||||
createBasicArticle(article, event_type, org)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
ppr(article)
|
||||
print("\n+++\n")
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
|
||||
ps = getSource(br, org.website)
|
||||
get_events(ps, "Ed")
|
||||
sleep(3)
|
||||
|
||||
br.close()
|
||||
scraper.save()
|
||||
@@ -1,86 +0,0 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'ds_events.settings'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from events.models import Organization, Scraper
|
||||
import events.digitools as digitools
|
||||
|
||||
|
||||
current_year = str(datetime.now().year)
|
||||
|
||||
venue, created = Organization.objects.get_or_create(
|
||||
name="Piller Forum",
|
||||
city="Minneapolis",
|
||||
website="https://www.pilllar.com/pages/events",
|
||||
is_venue = True
|
||||
)
|
||||
|
||||
scraper,item_count_start = digitools.getScraper(venue)
|
||||
|
||||
event_type = "Mu"
|
||||
|
||||
# Time Signatures
|
||||
tz = pytz.timezone("US/Central")
|
||||
DATETIME_FORMAT = '%b. %d %Y %I:%M %p'
|
||||
DATETIME_FORMAT_night = '%b. %d %Y %I:%M %p'
|
||||
DATETIME_FORMAT_2 = '%b. %d %Y %I:%Mam'
|
||||
|
||||
def get_events(ps, event_type):
|
||||
contents = ps.xpath('.//*/div[@class="sse-column sse-half sse-center"]')
|
||||
for c in contents:
|
||||
try:
|
||||
event = {}
|
||||
event['scraper'] = scraper
|
||||
event['calendar'] = scraper.calendar
|
||||
event['link'] = venue.website
|
||||
# time = c.xpath('.//*/span[@class="vp-time"]/text()')[0].strip()
|
||||
date = c.xpath('.//h1[@class="sse-size-64"]/text()')[0]
|
||||
if len(date) > 1:
|
||||
print(date)
|
||||
year = datetime.now().year
|
||||
event_date = date + " " + str(year)
|
||||
event['title'] = c.xpath('.//p/span/b/text()')[0]
|
||||
details = c.xpath('.//p/text()')
|
||||
if 'Music' in details[-1]:
|
||||
event_time = c.xpath('.//p/text()')[-1].split("Music")[1].strip()
|
||||
event_type = "Mu"
|
||||
event_dt = event_date + " " + event_time + " PM"
|
||||
event['dateStamp'] = datetime.strptime(event_dt, DATETIME_FORMAT_night)
|
||||
elif len(details) == 1:
|
||||
try:
|
||||
event_time = details[0].split("-")[0].strip()
|
||||
event_dt = event_date + " " + event_time + ' PM'
|
||||
event['dateStamp'] = datetime.strptime(event_dt, DATETIME_FORMAT_night)
|
||||
event_type = "Ot"
|
||||
except Exception as e:
|
||||
event_time = details[0].split("-")[0].strip()
|
||||
event_dt = event_date + " " + event_time
|
||||
event['dateStamp'] = datetime.strptime(event_dt, DATETIME_FORMAT_2)
|
||||
event_type = "Ot"
|
||||
digitools.createBasicEvent(event, event_type, venue)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
arg1 = sys.argv[1]
|
||||
br = digitools.getBrowser(arg1)
|
||||
else:
|
||||
print("No run_env")
|
||||
quit()
|
||||
|
||||
ps = digitools.getSource(br, venue.website)
|
||||
get_events(ps, event_type)
|
||||
sleep(3)
|
||||
|
||||
br.close()
|
||||
|
||||
digitools.updateScraper(scraper, item_count_start)
|
||||
132
Working/workshop/smedia/bluesky.py
Normal file
132
Working/workshop/smedia/bluesky.py
Normal file
@@ -0,0 +1,132 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
from atproto import Client
|
||||
|
||||
import django
|
||||
sys.path.append('../../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from socials.models import SocialLink, SocialPost
|
||||
# from digitools import getBrowser, createURL, createBasicEvent, getSource
|
||||
|
||||
tz = pytz.timezone("US/Central")
|
||||
|
||||
USERNAME = "dreamfreely.org"
|
||||
PASSWORD = "gU):3-BA]DaK[_K$DHmI"
|
||||
|
||||
client = Client()
|
||||
client.login(USERNAME, PASSWORD)
|
||||
feed = client.get_author_feed(USERNAME, limit = 100)
|
||||
|
||||
def createSocialLink(post):
|
||||
new_post, created = SocialLink.objects.update_or_create(
|
||||
uri = post['uri'],
|
||||
text = post['text'],
|
||||
link = post['link'],
|
||||
handle = post['handle'],
|
||||
likes = post['likes'],
|
||||
reposts = post['reposts'],
|
||||
quotes = post['quotes'],
|
||||
replies = post['replies'],
|
||||
created_at = post['created_at'],
|
||||
platform = 'bluesky',
|
||||
rt_uri = post['rt_uri'],
|
||||
rt_text = post['rt_text'],
|
||||
rt_link = post['rt_link'],
|
||||
rt_handle = post['rt_handle'],
|
||||
)
|
||||
# print(created, new_post)
|
||||
print("completed write")
|
||||
|
||||
tweets = []
|
||||
|
||||
print(len(feed.feed))
|
||||
|
||||
for post in feed.feed:
|
||||
post = post.post
|
||||
print("\n\nNEW POST\n\n")
|
||||
# try:
|
||||
# ppr(post.embed.record.record.author.handle)
|
||||
# ppr(post.embed.record.record.value.text.split("\n")[:2])
|
||||
# ppr(post.embed.record.record.value.embed.external.uri.split("?")[0])
|
||||
# ppr(post.embed.record.record.uri.split("feed.post/")[1])
|
||||
# except:
|
||||
# pass
|
||||
|
||||
if hasattr(post.record.embed, 'external'):
|
||||
p = {}
|
||||
try:
|
||||
p['link'] = post.record.embed.external.uri.split("?")[0]
|
||||
except:
|
||||
pass
|
||||
p['text'] = " ".join(post.record.text.split("\n")[:2])
|
||||
p['handle'] = post.author.handle
|
||||
p['uri'] = post.uri.split("feed.post/")[1]
|
||||
p['likes'] = post.like_count
|
||||
p['quotes'] = post.quote_count
|
||||
p['replies'] = post.reply_count
|
||||
p['reposts'] = post.repost_count
|
||||
p['created_at'] = post.record.created_at
|
||||
|
||||
p['rt_handle'] = "blank"
|
||||
p['rt_text'] = "blank"
|
||||
p['rt_uri'] = "blank"
|
||||
p['rt_link'] = "blank"
|
||||
|
||||
elif hasattr(post.embed, 'record'):
|
||||
p = {}
|
||||
p['text'] = " ".join(post.record.text.split("\n")[:2])
|
||||
p['handle'] = post.author.handle
|
||||
p['uri'] = post.uri.split("feed.post/")[1]
|
||||
p['likes'] = post.like_count
|
||||
p['quotes'] = post.quote_count
|
||||
p['replies'] = post.reply_count
|
||||
p['reposts'] = post.repost_count
|
||||
p['created_at'] = post.record.created_at
|
||||
p['link'] = "blank"
|
||||
|
||||
try:
|
||||
p['rt_handle'] = post.embed.record.record.author.handle
|
||||
p['rt_text'] = " ".join(post.embed.record.record.value.text.split("\n")[:2])
|
||||
p['rt_uri'] = post.embed.record.record.uri.split("feed.post/")[1]
|
||||
p['rt_link'] = post.embed.record.record.value.embed.external.uri.split("?")[0]
|
||||
except:
|
||||
p['rt_handle'] = "blank"
|
||||
p['rt_text'] = "blank"
|
||||
p['rt_uri'] = "blank"
|
||||
p['rt_link'] = "blank"
|
||||
|
||||
|
||||
else:
|
||||
p = {}
|
||||
p['text'] = " ".join(post.record.text.split("\n")[:2])
|
||||
p['handle'] = post.author.handle
|
||||
p['uri'] = post.uri.split("feed.post/")[1]
|
||||
p['likes'] = post.like_count
|
||||
p['quotes'] = post.quote_count
|
||||
p['replies'] = post.reply_count
|
||||
p['reposts'] = post.repost_count
|
||||
p['created_at'] = post.record.created_at
|
||||
|
||||
p['rt_handle'] = "blank"
|
||||
p['rt_text'] = "blank"
|
||||
p['rt_uri'] = "blank"
|
||||
p['rt_link'] = "blank"
|
||||
p['link'] = "blank"
|
||||
|
||||
# ppr(p)
|
||||
# tweets.append(p)
|
||||
|
||||
try:
|
||||
print('writing file')
|
||||
createSocialLink(p)
|
||||
except Exception as e:
|
||||
ppr(post.record.embed)
|
||||
print(e, "\nthis\n\n")
|
||||
72
Working/workshop/smedia/bluesky_media.py
Normal file
72
Working/workshop/smedia/bluesky_media.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
from atproto import Client
|
||||
|
||||
import django
|
||||
sys.path.append('../../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
from socials.models import SocialImg
|
||||
# from digitools import getBrowser, createURL, createBasicEvent, getSource
|
||||
|
||||
tz = pytz.timezone("US/Central")
|
||||
|
||||
USERNAME = "dreamfreely.org"
|
||||
PASSWORD = "gU):3-BA]DaK[_K$DHmI"
|
||||
|
||||
client = Client()
|
||||
client.login(USERNAME, PASSWORD)
|
||||
feed = client.get_author_feed(USERNAME, limit = 100)
|
||||
|
||||
def createSocialImg(post):
|
||||
new_post, created = SocialImg.objects.update_or_create(
|
||||
uri = post['uri'],
|
||||
text = post['text'],
|
||||
img_link = post['img_link'],
|
||||
handle = post['handle'],
|
||||
created_at = post['created_at'],
|
||||
platform = 'bluesky',
|
||||
)
|
||||
print(created, new_post)
|
||||
|
||||
tweets = []
|
||||
|
||||
print(len(feed.feed))
|
||||
|
||||
for post in feed.feed:
|
||||
post = post.post
|
||||
|
||||
# print(post, "\n\n")
|
||||
|
||||
# try:
|
||||
# ppr(post.embed.images[0].fullsize)
|
||||
# # ppr(post.embed.record.record.value.text.split("\n")[:2])
|
||||
# # ppr(post.embed.record.record.value.embed.external.uri.split("?")[0])
|
||||
# # ppr(post.embed.record.record.uri.split("feed.post/")[1])
|
||||
# except Exception as e:
|
||||
# print("failed:", e)
|
||||
|
||||
if hasattr(post.embed, 'images'):
|
||||
p = {}
|
||||
p['img_link'] = post.embed.images[0].fullsize
|
||||
p['text'] = " ".join(post.record.text.split("\n")[:2])
|
||||
p['handle'] = post.author.handle
|
||||
p['uri'] = post.uri.split("feed.post/")[1]
|
||||
p['created_at'] = post.record.created_at
|
||||
|
||||
# ppr(p)
|
||||
tweets.append(p)
|
||||
|
||||
try:
|
||||
print('writing file')
|
||||
createSocialImg(p)
|
||||
except Exception as e:
|
||||
ppr(post.embed)
|
||||
print(e, "\nthis\n\n")
|
||||
72
Working/workshop/smedia/redsky.py
Normal file
72
Working/workshop/smedia/redsky.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import os, sys
|
||||
from datetime import datetime
|
||||
from dateutil import relativedelta
|
||||
|
||||
import django
|
||||
sys.path.append('../../../../')
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.django.local'
|
||||
django.setup()
|
||||
|
||||
from time import sleep
|
||||
from pprint import pprint as ppr
|
||||
import pytz
|
||||
|
||||
import praw
|
||||
|
||||
from socials.models import SocialLink, SocialPost
|
||||
# from digitools import getBrowser, createURL, createBasicEvent, getSource
|
||||
|
||||
tz = pytz.timezone("US/Central")
|
||||
|
||||
|
||||
# timestamp = 1729322547223
|
||||
# dt_object = datetime.datetime.fromtimestamp(timestamp)
|
||||
# print(dt_object)
|
||||
|
||||
reddit = praw.Reddit(
|
||||
client_id="rxW3Ywqke6FZDP7pIhYYuw",
|
||||
client_secret="cg1VNl0I-RTuYUwgz16ryKh2wWKEcA",
|
||||
password="7CTu4sGFi9E0",
|
||||
user_agent="CultureClap",
|
||||
username="cultureclap",
|
||||
)
|
||||
|
||||
|
||||
def createSocialLink(post):
|
||||
new_post, created = SocialLink.objects.update_or_create(
|
||||
text = post['text'],
|
||||
link = post['link'],
|
||||
handle = post['handle'],
|
||||
likes = post['likes'],
|
||||
replies = post['replies'],
|
||||
platform = post['platform'],
|
||||
created_at = post['created_at'],
|
||||
rt_uri = 'blank',
|
||||
rt_text = 'blank',
|
||||
rt_link = 'blank',
|
||||
rt_handle = 'blank',
|
||||
)
|
||||
print(created, new_post)
|
||||
|
||||
count = 0
|
||||
|
||||
for item in reddit.user.me().upvoted():
|
||||
rdt = {}
|
||||
rdt['text'] = item.title + " | " + item.selftext
|
||||
rdt['handle'] = item.author.name
|
||||
rdt['link'] = item.url
|
||||
rdt['likes'] = item.ups
|
||||
rdt['replies'] = len(item.comments.list())
|
||||
rdt['created_at'] = datetime.fromtimestamp(item.created_utc)
|
||||
rdt['platform'] = 'reddit'
|
||||
|
||||
try:
|
||||
print('writing file')
|
||||
createSocialLink(rdt)
|
||||
count +=1
|
||||
if count > 50:
|
||||
quit()
|
||||
except Exception as e:
|
||||
ppr(item)
|
||||
print(e, "\nthis\n\n")
|
||||
# ppr(item)
|
||||
Reference in New Issue
Block a user