Skip to content
This repository was archived by the owner on Feb 22, 2025. It is now read-only.

Commit 0e2daaa

Browse files
authored
Merge pull request #791 from rix1337/dev
v.20.0.2
2 parents 0ed87bd + 500a505 commit 0e2daaa

File tree

9 files changed

+341
-38
lines changed

9 files changed

+341
-38
lines changed
Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ jobs:
88
steps:
99
- uses: actions/checkout@v4
1010
- name: Setup python
11-
uses: actions/setup-python@v4
11+
uses: actions/setup-python@v5
1212
with:
1313
python-version: 3.12
1414
architecture: x64
@@ -24,7 +24,7 @@ jobs:
2424
steps:
2525
- uses: actions/checkout@v4
2626
- name: Setup python
27-
uses: actions/setup-python@v4
27+
uses: actions/setup-python@v5
2828
with:
2929
python-version: ${{ matrix.python-version }}
3030
architecture: x64
@@ -45,7 +45,7 @@ jobs:
4545
- name: Setup Node.js
4646
uses: actions/setup-node@v4
4747
with:
48-
node-version: 16.x
48+
node-version: 20.x
4949
- name: Install dependencies
5050
run: npm ci
5151
working-directory: feedcrawler/web_interface/vuejs_frontend

.github/workflows/CreateRelease.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ jobs:
2929
- name: Setup Node.js
3030
uses: actions/setup-node@v4
3131
with:
32-
node-version: 16.x
32+
node-version: 20.x
3333
- name: Install Node dependencies
3434
run: npm ci
3535
working-directory: feedcrawler/web_interface/vuejs_frontend

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,8 +103,8 @@ Beispiel:
103103

104104
## Hostnamen
105105

106-
FeedCrawler kann zum Durchsuchen beliebiger Webseiten verwendet werden. Welche das sind, entscheiden Anwender durch das
107-
Befüllen der _FeedCrawler.ini_ in der Kategorie _[Hostnames]_. Eingetragen werden dort Hostnamen, bspw. _ab = xyz.com_.
106+
FeedCrawler kann zum Durchsuchen beliebiger Webseiten verwendet werden.
107+
Welche das sind, entscheiden Anwender selbständig bei der Einrichtung. Es gilt dabei:
108108

109109
* Welche Hostname aufgerufen werden entscheidet allein der Anwender.
110110
* Ist nicht mindestens ein Hostname gesetzt, wird der FeedCrawler nicht starten.

feedcrawler/providers/common_functions.py

Lines changed: 21 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@
1010
import socket
1111
import sys
1212
from urllib.parse import urlparse
13+
import signal
14+
import platform
1315

1416
from feedcrawler.providers import gui
1517
from feedcrawler.providers import shared_state
@@ -557,7 +559,7 @@ def rreplace(s, old, new, occurrence):
557559

558560
def configpath(configpath):
559561
pathfile = "FeedCrawler.conf"
560-
current_path = os.path.dirname(sys.argv[0])
562+
current_path = os.path.dirname(os.path.abspath(sys.argv[0]))
561563
if configpath:
562564
f = open(pathfile, "w")
563565
f.write(configpath)
@@ -569,8 +571,24 @@ def configpath(configpath):
569571
if shared_state.values["gui"]:
570572
configpath = gui.configpath_gui(current_path)
571573
else:
572-
print("Wo sollen Einstellungen und Logs abgelegt werden? Leer lassen, um den aktuellen Pfad zu nutzen.")
573-
configpath = input("Pfad angeben:")
574+
def handler(signum, frame):
575+
raise Exception("Timeout!")
576+
577+
signal.signal(signal.SIGALRM, handler)
578+
if platform.system() != "Windows":
579+
print("Wo sollen Einstellungen und Logs abgelegt werden?")
580+
print("30 Sekunden warten oder leer lassen, um den aktuellen Pfad zu nutzen.")
581+
signal.alarm(30)
582+
583+
try:
584+
configpath = input("Pfad angeben:")
585+
signal.alarm(0)
586+
except Exception:
587+
print(" ... 30 Sekunden verstrichen! Nutze aktuellen Pfad.")
588+
configpath = ""
589+
else:
590+
print("Wo sollen Einstellungen und Logs abgelegt werden? Leer lassen, um den aktuellen Pfad zu nutzen.")
591+
configpath = input("Pfad angeben:")
574592
if len(configpath) > 0:
575593
f = open(pathfile, "w")
576594
f.write(configpath)

feedcrawler/providers/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99

1010
def get_version():
11-
return "20.0.1"
11+
return "20.0.2"
1212

1313

1414
def create_version_file():

feedcrawler/run.py

Lines changed: 27 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from feedcrawler.providers.config import CrawlerConfig
2222
from feedcrawler.providers.myjd_connection import set_device_from_config, get_if_one_device, myjd_input
2323
from feedcrawler.providers.sqlite_database import FeedDb, remove_redundant_db_tables
24-
from feedcrawler.web_interface.web_server import web_server
24+
from feedcrawler.web_interface.web_server import hostnames_config, myjd_config, web_server
2525

2626
version = "v." + version.get_version()
2727

@@ -82,6 +82,14 @@ def main():
8282
shared_state.update("log_level", log_level)
8383
shared_state.set_logger()
8484

85+
local_address = 'http://' + check_ip()
86+
port = int('9090')
87+
docker = False
88+
if shared_state.values["docker"]:
89+
docker = True
90+
elif arguments.port:
91+
port = int(arguments.port)
92+
8593
hostnames = CrawlerConfig('Hostnames')
8694

8795
def clean_up_hostname(host, string):
@@ -106,18 +114,21 @@ def clean_up_hostname(host, string):
106114
if not shared_state.values["test_run"] and not set_hostnames:
107115
if shared_state.values["gui"]:
108116
gui.no_hostnames_gui(shared_state.values["configfile"])
117+
sys.exit(1)
109118
else:
110-
print('Keine Hostnamen in der FeedCrawler.ini gefunden! Beende FeedCrawler!')
111-
time.sleep(10)
112-
sys.exit(1)
119+
if not hostnames_config(port, local_address):
120+
print('Keine Hostnamen in der FeedCrawler.ini gefunden! Beende FeedCrawler!')
121+
time.sleep(10)
122+
sys.exit(1)
113123

114124
if not shared_state.values["test_run"]:
125+
myjd_input_required = False
115126
if not os.path.exists(shared_state.values["configfile"]):
116127
if shared_state.values["docker"]:
117128
if arguments.jd_user and arguments.jd_pass:
118-
myjd_input(arguments.port, arguments.jd_user, arguments.jd_pass, arguments.jd_device)
129+
myjd_input_required = True
119130
else:
120-
myjd_input(arguments.port, arguments.jd_user, arguments.jd_pass, arguments.jd_device)
131+
myjd_input_required = True
121132
else:
122133
feedcrawler = CrawlerConfig('FeedCrawler')
123134
user = feedcrawler.get('myjd_user')
@@ -132,8 +143,11 @@ def clean_up_hostname(host, string):
132143
feedcrawler.save('myjd_device', one_device)
133144
set_device_from_config()
134145
else:
135-
myjd_input(arguments.port, arguments.jd_user, arguments.jd_pass,
136-
arguments.jd_device)
146+
myjd_input_required = True
147+
148+
if myjd_input_required:
149+
if not myjd_config(port, local_address):
150+
myjd_input(arguments.port, arguments.jd_user, arguments.jd_pass, arguments.jd_device)
137151

138152
if not shared_state.values["test_run"]:
139153
if shared_state.get_device() and shared_state.get_device().name:
@@ -171,21 +185,16 @@ def clean_up_hostname(host, string):
171185
sys.exit(1)
172186

173187
feedcrawler = CrawlerConfig('FeedCrawler')
174-
port = int(feedcrawler.get("port"))
175-
docker = False
176-
if shared_state.values["docker"]:
177-
port = int('9090')
178-
docker = True
179-
elif arguments.port:
180-
port = int(arguments.port)
188+
if not arguments.docker and not arguments.port:
189+
port = int(feedcrawler.get("port"))
181190

182191
if feedcrawler.get("prefix"):
183192
prefix = '/' + feedcrawler.get("prefix")
184193
else:
185194
prefix = ''
186-
local_address = 'http://' + check_ip() + ':' + str(port) + prefix
195+
187196
if not shared_state.values["docker"]:
188-
print('Der Webserver ist erreichbar unter "' + local_address + '"')
197+
print(f'Der Webserver ist erreichbar unter "{local_address}:{port}{prefix}"')
189198

190199
shared_state.set_connection_info(local_address, port, prefix, docker)
191200

@@ -202,7 +211,7 @@ def clean_up_hostname(host, string):
202211

203212
if arguments.delay:
204213
delay = int(arguments.delay)
205-
print("Verzögere den ersten Suchlauf um " + str(delay) + " Sekunden")
214+
print(f"Verzögere den ersten Suchlauf um {delay} Sekunden")
206215
time.sleep(delay)
207216

208217
if not shared_state.values["test_run"]:

feedcrawler/web_interface/vuejs_frontend/package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

feedcrawler/web_interface/vuejs_frontend/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "feedcrawler-web",
3-
"version": "20.0.1",
3+
"version": "20.0.2",
44
"type": "module",
55
"scripts": {
66
"dev": "vite",

0 commit comments

Comments
 (0)