Skip to content

Commit

Permalink
basic API integration with frontend
Browse files Browse the repository at this point in the history
  • Loading branch information
mahathu committed Feb 25, 2022
1 parent d143d98 commit 3026919
Show file tree
Hide file tree
Showing 7 changed files with 109 additions and 66 deletions.
44 changes: 19 additions & 25 deletions client/src/App.svelte
Original file line number Diff line number Diff line change
@@ -1,30 +1,24 @@
<script>
export let name;
import { onMount } from 'svelte';
import Event from "./components/Event.svelte";
let events = [];
onMount(async () => {
const res = await fetch("./get_bath_info");
events = await res.json();
});
// function getBaths() {
// fetch("./get_bath_info")
// .then(d => d.text())
// .then(d => (events = JSON.parse(d)));
// }
</script>

<main>
<h1>Hello {name}!</h1>
<p>Visit the <a href="https://svelte.dev/tutorial">Svelte tutorial</a> to learn how to build Svelte apps.</p>
</main>

<style>
main {
text-align: center;
padding: 1em;
max-width: 240px;
margin: 0 auto;
}
h1 {
color: #ff3e00;
text-transform: uppercase;
font-size: 4em;
font-weight: 100;
}
<h1>Hello!</h1>

@media (min-width: 640px) {
main {
max-width: none;
}
}
</style>
{#each events as event}
<Event {event}/>
{/each}
</main>
5 changes: 5 additions & 0 deletions client/src/components/Event.svelte
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
<script>
export let event;
</script>

<div>{event.name}: {event.state}</div>
5 changes: 1 addition & 4 deletions client/src/main.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
import App from './App.svelte';

const app = new App({
target: document.body,
props: {
name: 'wordle'
}
target: document.body
});

export default app;
76 changes: 76 additions & 0 deletions scraping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
from typing import List
import requests
from datetime import datetime
from bs4 import BeautifulSoup

baths = {
1: "Stadtbad Mitte",
# 2: "Schwimmhalle Fischerinsel",
# 7: "Sommerbad Humboldthain",
# 9: "Kombibad Seestraße (Halle)",
# 11: "Schwimmhalle Thomas-Mann-Straße",
# 15: "Wellenbad am Spreewaldplatz",
# 17: "Sommerbad Kreuzberg",
# 18: "Stadtbad Schöneberg",
# 19: "Sport - und Lehrschwimmhalle Schöneberg",
# 21: "Stadtbad Charlottenburg \"Alte Halle\"",
# 24: "Sommerbad Olympiastadion",
# 26: "Stadtbad Spandau Nord",
# 27: "Sommerbad Staaken",
# 28: "Kombibad Spandau Süd (Halle)",
# 29: "Stadtbad Wilmersdorf 1",
# 30: "Stadtbad Wilmersdorf 2",
# 31: "Sommerbad Wilmersdorf",
# 34: "Schwimmhalle Hüttenweg",
# 38: "Stadtbad Märkisches Viertel",
# 42: "Stadtbad Lankwitz",
# 43: "Schwimmhalle Finckensteinallee",
# 45: "Sommerbad Insulaner",
# 46: "Stadtbad Tempelhof",
# 47: "Kombibad Mariendorf (Halle)",
# 48: "Sommerbad Mariendorf (Rixdorfer Straße)",
# 49: "Stadtbad Neukölln",
# 51: "Sommerbad Neukölln",
# 52: "Kombibad Gropiusstadt (Halle)",
# 54: "Schwimmhalle Baumschulenweg",
# 60: "Kleine Schwimmhalle Wuhlheide",
# 61: "Schwimmhalle Allendeviertel",
# 62: "Sommerbad Wuhlheide",
# 64: "Schwimmhalle Sewanstraße",
# 68: "Schwimmhalle Buch",
# 70: "Sommerbad Pankow",
# 71: "Schwimmhalle Helene-Weigel-Platz",
# 74: "Schwimmhalle Zingster Straße",
# 76: "Schwimmhalle Kaulsdorf",
# 79: "Schwimm- und Sprunghalle im Europasportpark",
# 81: "Schwimmhalle Kreuzberg",
}

def get_slot_info(bath_id: int, year: int, month: int) -> List:
url = f'https://pretix.eu/Baeder/{bath_id}/?date={year}-{month:02}'
r = requests.get(url)
soup = BeautifulSoup(r.content, 'html.parser')

events = []
for event in soup.find_all('a', class_='event'):
state = event['class'][-1]

if state == 'over':
continue

time_obj = event.find('span', class_='event-time')
date = datetime.fromisoformat(time_obj['data-time'])
times = [t.text for t in time_obj.find_all('time')]
from_date = (
event.find('span', class_='event-status').text.strip().split()[-1]
if state == 'soon' else '' )

events.append({
'bath_id': bath_id,
'date': date.date(), #only date
'start_time': times[0],
'end_time': times[1],
'state': state,
'from_date': from_date
})
return events
Empty file removed scraping/__init__.py
Empty file.
33 changes: 0 additions & 33 deletions scraping/lib.py

This file was deleted.

12 changes: 8 additions & 4 deletions server.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from flask import Flask, send_from_directory
from scraping.lib import get_slot_info
import os
import csv
from flask import Flask, send_from_directory, jsonify
import scraping
app = Flask(__name__)

@app.route("/")
Expand All @@ -13,8 +15,10 @@ def home(path):

@app.route("/get_bath_info")
def get_bath_info():
events = get_slot_info(2, 2022, 2)
return str(events)
events = [dict(event, name=scraping.baths[bath_id])
for bath_id in scraping.baths.keys()
for event in scraping.get_slot_info(bath_id, 2022, 2)]
return jsonify(events)

if __name__ == "__main__":
app.run(debug=True)

0 comments on commit 3026919

Please sign in to comment.