init
commit
e678081a77
@ -0,0 +1 @@
|
||||
venv
|
@ -0,0 +1,2 @@
|
||||
bs4
|
||||
requests
|
@ -0,0 +1,46 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
overview_url = 'https://gruene-hohenlohe.de/kalender'
|
||||
overview_html = requests.get(overview_url, timeout=60).text
|
||||
overview_soup = BeautifulSoup(overview_html, 'html.parser')
|
||||
|
||||
|
||||
for href in overview_soup.select('.media-body h2 a'):
|
||||
event_url = 'https://gruene-hohenlohe.de/' + href.attrs['href']
|
||||
event_html = requests.get(event_url, timeout=60).text
|
||||
event_soup = BeautifulSoup(event_html, 'html.parser')
|
||||
|
||||
data = event_soup.select('.calendarize dl dd')
|
||||
|
||||
output = ''
|
||||
|
||||
# date
|
||||
output += data[0].text.strip()
|
||||
output += ' '
|
||||
|
||||
# time
|
||||
timespan = data[1].text.strip()
|
||||
time = timespan.split(' ')[0]
|
||||
|
||||
output += time
|
||||
output += ' Uhr'
|
||||
output += ', '
|
||||
|
||||
# place
|
||||
output += data[2].text.strip()
|
||||
output += ', '
|
||||
|
||||
# title
|
||||
#output += '<b>'
|
||||
#output += event_soup.select('.calendarize h1')[0].text.strip()
|
||||
#output += '</b>'
|
||||
|
||||
#try:
|
||||
description = event_soup.select('.calendarize .text p')[0].text.strip()
|
||||
#output += ' '
|
||||
output += description
|
||||
#except IndexError:
|
||||
# pass
|
||||
|
||||
print(output)
|
Loading…
Reference in New Issue