diff --git a/cy_airquality_rpi.py b/cy_airquality_rpi.py index 7ff8627..9a46015 100644 --- a/cy_airquality_rpi.py +++ b/cy_airquality_rpi.py @@ -3,64 +3,115 @@ import time import Adafruit_CharLCD as LCD from unidecode import unidecode +import logging +from datetime import datetime -# Raspberry Pi pin and LCD configuration -lcd_rs = 25 -lcd_en = 24 -lcd_d4 = 23 -lcd_d5 = 17 -lcd_d6 = 18 -lcd_d7 = 22 -lcd_backlight = 4 -lcd_columns = 16 -lcd_rows=2 +# Configure logging +logging.basicConfig( + filename='console.log', + level=logging.INFO, + format='%(asctime)s [%(levelname)s] %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' +) +# Raspberry Pi GPIO pin and LCD use configuration +lcd_rs = 25 +lcd_en = 24 +lcd_d4 = 23 +lcd_d5 = 17 +lcd_d6 = 18 +lcd_d7 = 22 +lcd_backlight = 4 lcd_columns = 16 -lcd_rows = 2 +lcd_rows = 2 -lcd = LCD.Adafruit_CharLCD(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6, lcd_d7, - lcd_columns, lcd_rows, lcd_backlight) +lcd = LCD.Adafruit_CharLCD(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6, lcd_d7, lcd_columns, lcd_rows, lcd_backlight) while True: - # Update screen - lcd.clear() - # Send a GET request to the webpage url = "https://www.airquality.dli.mlsi.gov.cy/" response = requests.get(url) - # Parse the HTML content using BeautifulSoup - soup = BeautifulSoup(response.content, "html.parser") - - # Find the div containing the data where class is the div with info needed - divs = soup.find_all("div", class_="col col-xs-12 col-sm-6 col-md-3 col-lg-2 col-3") - - # Loop through the divs and extract the data for Limassol only - for div in divs: - # Extract the location, pollutant, and value data - location_element = div.find("h4") - pollutant_label = div.find("span", class_="pollutant-label") - pollutant_value = div.find("span", class_="pollutant-value") - - # Check if elements are found - if location_element and pollutant_label and pollutant_value and "Limassol" in location_element.text: - location = location_element.text.strip() - pollutant = pollutant_label.text.strip() - value = pollutant_value.text.strip() - # Print result - print(f"Pollutant: {pollutant}, Value: {value}") - -# else: -# print("PARSE\nERROR") -# lcd.message("PARSE\nERROR") -# Todo: error handling - - # Normalize unicode text since LCD can't render unicode symbols - unicode_text = (f"Pollutant {pollutant}\nValue {value}") - normalized_text = unidecode(unicode_text) - - # Render on screen - lcd.message(normalized_text) - + if response.status_code == 200: + # Parse the HTML content using BeautifulSoup + soup = BeautifulSoup(response.content, "html.parser") + + # Find the div element with the specified class that shows pollution data + div_element = soup.find("div", class_="col col-xs-12 col-sm-6 col-md-3 col-lg-2 col-3") + + if div_element: + # Find the h4 header within the div to filter out results for Limassol + h4_header = div_element.find("h4", class_="field-content stations-overview-title") + + if h4_header: + # Get the station city + station_name = h4_header.text.strip() + + # Find all pollutant labels and values within the div + pollutant_labels = div_element.find_all(class_="pollutant-label") + pollutant_values = div_element.find_all(class_="pollutant-value") + + # Create a list of formatted pollutant lines + pollutant_lines = [] + for label, value in zip(pollutant_labels, pollutant_values): + pollutant_label = label.text.strip().rstrip(":") # Remove trailing colon + pollutant_value = value.text.strip() + if pollutant_value == "Not Measured": # Short text for N/A + pollutant_value = "N/A" + line = f"Pollutant: {pollutant_label}\nValue: {pollutant_value}" + pollutant_lines.append(line) + + # Add the timestamp + now = datetime.now() + time_line = f"Updated:\n{now.strftime('%d.%m.%Y %H:%M:%S')}" + pollutant_lines.insert(8, time_line) + + index = 0 + while True: + # Get the current line to display + line = pollutant_lines[index] + + # Clear the LCD screen + lcd.clear() + + # Normalize unicode text since LCD can't render unicode symbols + normalized_text = unidecode(line) + + # Display the line on the LCD screen + lcd.message(normalized_text) + + # Print all lines of pollutant data in the console + for line in pollutant_lines: + print(line) + + # Log all lines of pollutant data in the console without timestamp + for i, line in enumerate(pollutant_lines): + if i != 8: # Skip logging for the 9th line + logging.info(line) + + # Set the display duration for the first pollutant + if index == 0: + display_duration = 15 + else: + display_duration = 7 + + # Pause for the specified duration + time.sleep(display_duration) + + # Increment the index and loop back to the beginning + index = (index + 1) % len(pollutant_lines) + + else: + print("Could not find table for Limassol, webpage updated or unavailable") + logging.error("Could not find table for Limassol, webpage updated or unavailable") + else: + print("Could not find the table, webpage updated or unavailable") + logging.error("Could not find the table, webpage updated or unavailable") + + else: + lcd.clear() + lcd.message("Error fetching data") + logging.error("Error fetching data") + # Update every 10 minutes time.sleep(600)