för att man inte kan ladda upp fler filer
This commit is contained in:
99
crawler.py
Normal file
99
crawler.py
Normal file
@ -0,0 +1,99 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
import json
|
||||
import os
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.chrome.options import Options
|
||||
|
||||
# CONFIGURATION
|
||||
NATIONER_URL = "https://www.nationsguiden.se/" # Example URL, replace with actual
|
||||
DISCORD_WEBHOOK_URL = "https://discord.com/api/webhooks/1437888900747104317/TI2RfDGC5dzoi5JGz6UO2aD23teYNwa6pLQOskhaDnsSVe3cr8_rly0L3K0VyIYARgeR"
|
||||
|
||||
|
||||
def fetch_nationer_open_times(url):
|
||||
import re
|
||||
print(f"DEBUG: Current working directory is {os.getcwd()}")
|
||||
# Use headless browser to get rendered DOM and extract event data
|
||||
chrome_options = Options()
|
||||
chrome_options.add_argument('--headless')
|
||||
chrome_options.add_argument('--disable-gpu')
|
||||
chrome_options.add_argument('--no-sandbox')
|
||||
driver = webdriver.Chrome(options=chrome_options)
|
||||
driver.get(url)
|
||||
import time
|
||||
time.sleep(5) # Wait for JS to load events
|
||||
|
||||
nationer_data = []
|
||||
# Find all event blocks (adjust selector as needed)
|
||||
event_blocks = driver.find_elements("css selector", "div.flex.flex-col.justify-evenly")
|
||||
print(f"DEBUG: Found {len(event_blocks)} event blocks in DOM.")
|
||||
for block in event_blocks:
|
||||
try:
|
||||
event_title = block.find_element("css selector", "h4 a").text.strip()
|
||||
except Exception:
|
||||
event_title = ""
|
||||
try:
|
||||
organiser = block.find_element("css selector", "a.text-primary, p.text-primary").text.strip()
|
||||
except Exception:
|
||||
organiser = ""
|
||||
try:
|
||||
open_time = block.find_element("css selector", "time").text.strip()
|
||||
except Exception:
|
||||
open_time = ""
|
||||
try:
|
||||
permalink = block.find_element("css selector", "h4 a").get_attribute("href")
|
||||
except Exception:
|
||||
permalink = ""
|
||||
if event_title:
|
||||
print(f"DEBUG: Event: {event_title}, Organiser: {organiser}, Time: {open_time}")
|
||||
nationer_data.append({
|
||||
"event": event_title,
|
||||
"nation": organiser,
|
||||
"open_time": open_time,
|
||||
"permalink": permalink
|
||||
})
|
||||
driver.quit()
|
||||
# Write debug file for extracted events
|
||||
debug_path = os.path.abspath("/mnt/serverdata/html/crawlernation/events_debug.json")
|
||||
try:
|
||||
with open(debug_path, "w", encoding="utf-8") as f:
|
||||
json.dump(nationer_data, f, ensure_ascii=False, indent=2)
|
||||
print(f"Wrote debug event data to {debug_path}")
|
||||
except Exception as e:
|
||||
print(f"Error writing debug file: {e}")
|
||||
return nationer_data
|
||||
|
||||
|
||||
def send_to_discord_webhook(data, webhook_url):
|
||||
if not webhook_url:
|
||||
raise ValueError("DISCORD_WEBHOOK_URL not set")
|
||||
content = "Nationer Öppna Idag:\n"
|
||||
for item in data:
|
||||
content += f"**{item['nation']}**\nEvent: {item['event']}\nTid: {item['open_time']}\n---\n"
|
||||
# Discord message limit is 2000 characters
|
||||
if len(content) > 1900:
|
||||
content = content[:1900] + "... (truncated)"
|
||||
payload = {"content": content}
|
||||
try:
|
||||
|
||||
response = requests.post(webhook_url, json=payload)
|
||||
response.raise_for_status()
|
||||
return response.status_code
|
||||
except requests.exceptions.HTTPError as e:
|
||||
print(f"Discord webhook error: {e}\nPayload: {payload}")
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
nationer_data = fetch_nationer_open_times(NATIONER_URL)
|
||||
if nationer_data:
|
||||
send_to_discord_webhook(nationer_data, DISCORD_WEBHOOK_URL)
|
||||
else:
|
||||
send_to_discord_webhook([
|
||||
{"nation": "Inga aktiviteter hittades.", "event": "", "open_time": ""}
|
||||
], DISCORD_WEBHOOK_URL)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
Reference in New Issue
Block a user