python3
sudo apt-get install python3-bs4
or…
pip3 install beautifulsoup4
python3 /path/to/saunastatsgrabber.py
nohup python3 /path/to/saunastatsgrabber.py > /path/to/saunastatsgrabber.log 2>$
sudo ps -ef | grep python
python3
sudo apt-get install python3-bs4
or…
pip3 install beautifulsoup4
python3 /path/to/saunastatsgrabber.py
nohup python3 /path/to/saunastatsgrabber.py > /path/to/saunastatsgrabber.log 2>$
sudo ps -ef | grep python
| #!/usr/bin/env python3 | |
| import sys | |
| import os | |
| import datetime | |
| from bs4 import BeautifulSoup | |
| webpage='https://www.seebadenge.ch/wp/sauna' | |
| file_out = '/home/inex/SCRIPTS/saunastats/saunastats.txt' | |
| data_container = 'ase_crowdmonitor' | |
| repeat=300 | |
| def getFreiePlaetze(): | |
| now = datetime.datetime.now() | |
| # Open file to append | |
| sys.stdout = fs = open(file_out, "a") | |
| # Fetch using curl | |
| html_doc = os.popen('curl -sSL '+webpage).read() | |
| if 'curl: (' in html_doc: | |
| # On error | |
| print(str(now)+html_doc) | |
| else: | |
| # Parse | |
| try: | |
| soup = BeautifulSoup(html_doc, 'html.parser') | |
| fly = soup.find('div', class_='ase_crowdmonitor') | |
| print(fly.contents[0]) | |
| except: | |
| print(str(now)+': ase_crowdmonitor not found (website down?)') | |
| finally: | |
| pass | |
| fs.close() | |
| import threading | |
| import time | |
| import schedule | |
| while True: | |
| getFreiePlaetze() | |
| time.sleep(repeat) |