chore: depend on safe_traverse from lewy_globals, add scraper cron job

This commit is contained in:
2025-06-11 00:35:48 +02:00
parent 5d238cb6b8
commit bc82fcd6b8
3 changed files with 7 additions and 18 deletions

View File

@@ -1,23 +1,13 @@
from flask import session
from lewy_db import baza as ldb
from lewy_globals import colors as c
from lewy_globals import safe_traverse
import json
import lewy_globals
import requests
import time
from sqlalchemy import func
def safe_traverse(obj: dict, path: list, default=None):
result = obj
try:
for x in path:
result = result[x]
except (KeyError, TypeError):
result = default
print(f"safe_traverse: error reading {' -> '.join(path)} - returning: {default}")
finally:
return result
class scraper:
headers = {

View File

@@ -1,7 +1,7 @@
from argparse import ArgumentParser
from flask import Flask, Response, render_template
from flask_apscheduler import APScheduler
from fs_scraper import scraper as scr
from fs_scraper import scraper
from lewy_globals import colors as c
import lewy_api
import lewy_db
@@ -13,7 +13,7 @@ import time
app = Flask(__name__)
app_host = "None"
app_port = "None"
scrape = None
scr = None
def setup():
# sanity check: make sure config is set
@@ -79,7 +79,7 @@ def setup():
app.add_url_rule('/api/<path:received_request>', view_func=lewy_api.api_global_catchall)
db = lewy_globals.setupDb(app, config)
scraper = scr()
scr = scraper()
with app.app_context():
db.create_all()
@@ -97,8 +97,7 @@ def every5seconds():
def every2hours():
# zaktualizuj bazę danych scrapując FS
# ...
# scraper.aktualizuj_dane()
scr.aktualizuj_dane()
return
@app.route('/<string:val>', methods=['GET'])

View File

@@ -23,9 +23,9 @@ def safeTraverse(obj: dict, path: list, default=None):
try:
for x in path:
result = result[x]
except KeyError:
except (KeyError, TypeError):
result = default
# print(f"error reading: {' -> '.join(path)} - returning: {default}")
print(f"error reading: {' -> '.join(path)} - returning: {default}")
finally:
return result