Merge v2.5.4 to master

This commit is contained in:
coulisse 2024-03-31 19:14:26 +02:00
commit 8cf0969dad
25 changed files with 384 additions and 8764 deletions

View File

@ -24,5 +24,5 @@ keywords:
- dxcluster
- spiderweb
license: GPL-3.0
version: v2.5.3
date-released: 2024-03-10
version: v2.5.4
date-released: 2024-03-31

View File

@ -9,11 +9,11 @@
[![CodeFactor](https://www.codefactor.io/repository/github/coulisse/spiderweb/badge)](https://www.codefactor.io/repository/github/coulisse/spiderweb)
- **Release:** v2.5.3
- **Release:** v2.5.4
- **Author:** Corrado Gerbaldo - [IU1BOW](https://www.qrz.com/db/IU1BOW)
- **Mail:** <corrado.gerbaldo@gmail.com>
- **Licensing:** Gpl V3.0 see [LICENSE](LICENSE) file.
- **Languages:** This application is written in Python 3.11/flask,Javascript and HTML
- **Languages:** This application is written in Python 3.12/flask,Javascript and HTML
___
**DXSpider** is a great DX Cluster software that has useful telnet interface.
@ -52,6 +52,10 @@ $dbpass = "your-password";
If you would change some MariaDB parameters, then you can find them in `/etc/mysql/my.cnf` or `/etc/my.cnf`, depending on your distro.
If the database will not be created automatically, please see ["DB_ISSUES.md"](docs/DB_ISSUES.md)
TODO: se installando i moduli python errore:
sudo apt-get install libmariadb3 libmariadb-dev
TODO: check number ADXO
**3) Python modules**
You could install python modules using automatic or manual way.
@ -73,12 +77,6 @@ foo@bar:~$ pip install flask
foo@bar:~$ pip install Flask-minify
foo@bar:~$ pip install flask_wtf
foo@bar:~$ pip install pandas
```
Then you have to install mysql libraries**:
```console
foo@bar:~$ pip install mysql-connector-python
foo@bar:~$ pip install --upgrade mysql-connector-python==8.0.12
```
### Configuration

View File

@ -1 +1 @@
v2.5.3
v2.5.4

2
data/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
*
!.gitignore

View File

@ -1,11 +1,23 @@
### Change log
Date: 31/03/2024
Release: v2.5.4
- tested with Python 3.12
- replaced mysql driver with mariadb driver
- upgraded Echarts lib from 5.3 to 5.5
- upgraded flag-icon-css lib from to 7.2
- upgraded bootstrap to 5.3.3
- issue [#51](https://github.com/coulisse/spiderweb/issues/51): added total number of users & nodes connected Issue
- issue [#56](https://github.com/coulisse/spiderweb/issues/56): added a simple counter
- issue [#58](https://github.com/coulisse/spiderweb/issues/58)
___
Date: 10/03/2024
Release: v2.5.3
- adapted card size and text for mobile
- removed monitor
- removed cookie consent banner, since this application uses only technical cookies
- issue [#51] (https://github.com/coulisse/spiderweb/issues/51) -- just for caching
- security [#22] (https://github.com/coulisse/spiderweb/security/dependabot/22)
- issue [#51](https://github.com/coulisse/spiderweb/issues/51) -- just for caching
- security issue [#22](https://github.com/coulisse/spiderweb/security/dependabot/22)
___
Date: 03/12/2023

View File

@ -1,86 +1,90 @@
# ***********************************************************************************
# Module used to get Announced DX Operation from NG3K website via .ICS (Calendar)
# Module used to get Announced DX Operation from NG3K website via rss feed
# file, parse it and return a dictionary with these events
# ***********************************************************************************
__author__ = "IU1BOW - Corrado"
import requests
import logging
from datetime import datetime
import tempfile
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s]: %(message)s",
datefmt="%m/%d/%Y %I:%M:%S",
)
# format single line
def format_line(prop):
prop_out = dict()
try:
dtstart = datetime.strptime(prop["DTSTART;VALUE=DATE"], "%Y%m%d")
dtend = datetime.strptime(prop["DTEND;VALUE=DATE"], "%Y%m%d")
now = datetime.now()
if dtstart <= now and dtend >= now:
prop_out["start"] = dtstart.strftime("%Y-%m-%dT%H:%M:%S%z")
prop_out["end"] = dtend.strftime("%Y-%m-%dT%H:%M:%S%z")
prop_out["summary"] = prop["SUMMARY"].split("(")[0].strip()
prop_out["callsign"] = prop["SUMMARY"].split("(", 1)[1].split(")", 1)[0]
prop_out["description"] = prop["DESCRIPTION"].replace("\\", "")
except KeyError:
pass
return prop_out
# TODO: url from conf parameter
import requests
import feedparser
import re
import unicodedata
def remove_control_characters(s):
return "".join(ch for ch in s if unicodedata.category(ch)[0]!="C")
def get_adxo_events():
url = "http://dxcal.kj4z.com/dxcal"
line_num = 0
event_num = 0
# URL del file XML RSS
rss_url = "https://www.ng3k.com/adxo.xml"
try:
logging.info("connection to: " + url)
req = requests.get(url)
# download XML
response = requests.get(rss_url)
xml_content = response.content
# parse XML
feed = feedparser.parse(xml_content)
events = []
prop = dict()
prop_name = ""
with tempfile.TemporaryFile() as temp:
temp.write(req.content)
temp.seek(0)
lines = temp.readlines()
for line_bytes in lines:
line = line_bytes.decode()
line_num += 1
current_line_array = line.strip().split(":", 1)
if current_line_array[0] == "BEGIN":
if current_line_array[1] == "VCALENDAR":
prop = {}
if current_line_array[1] == "VEVENT":
event_num += 1
prop = {}
else:
if current_line_array[0] == "END":
if current_line_array[1] == "VCALENDAR":
pass
if current_line_array[1] == "VEVENT":
prop = format_line(prop)
if prop:
events.append(prop)
else:
if len(current_line_array) > 1:
prop_name = current_line_array[0]
prop[prop_name] = current_line_array[1]
else:
if len(prop_name) > 0:
prop[prop_name] = (
prop[prop_name] + current_line_array[0]
)
now = datetime.now()
# extract elements
for item in feed.entries:
prop = {}
title = item.title
#title = "Sint Maarten: Dec 2 2023 - Jan 20 2024 -- PJ7AA -- QSL via: LoTW "
logging.debug(title)
#callsign
start_callsign_idx = title.find("--")
end_callsign_idx = title.find("--", start_callsign_idx + 2)
prop["callsign"] = title[start_callsign_idx + 2:end_callsign_idx].strip()
#period
period = title[title.find(":")+1: start_callsign_idx]
comma_year_idx = period.find(",")
#start date - end date
if comma_year_idx > 0:
#Mar 23-Apr 1, 2024 or Mar 23-30, 2024
year = period[comma_year_idx+1:].strip()
date_start = period[:period.find("-")]+" "+year
date_end = period[period.find("-")+1:comma_year_idx]+" "+year
match = re.search(r"^([A-Za-z]{3}) \d{1,2} \d{4}$", date_end)
if match:
#Mar 23-Apr 1, 2024
pass
else:
#Mar 23-30, 2024
date_end=date_start[:5]+date_end
else:
#Mar 23 2023-Apr 1 2024
date_start = period[:period.find("-")]
date_end = period[period.find("-")+1:]
prop["start"] = datetime.strptime(date_start.strip(), "%b %d %Y")
prop["end"] = datetime.strptime(date_end.strip(), "%b %d %Y")
prop["summary"] = remove_control_characters(title)
prop["description"] = remove_control_characters(item.description)
logging.debug("date start: "+ str(prop["start"]) )
logging.debug("date end: "+ str(prop["end"]) )
#append only valids (in date) events
if prop["start"] <= now and prop["end"] >= now:
events.append(prop)
logging.debug(events)
if len(events) > 0:
logging.info("number ADXO events: " + str(len(events)))
else:
logging.warn("No ADXO events founds")
logging.debug("number of line reads: " + str(line_num))
logging.info("number ADXO events: " + str(event_num))
return events
except Exception as e1:
logging.error(e1)
return

View File

@ -17,7 +17,7 @@ logging.basicConfig(
)
# TODO: url from conf parameter
url = "https://www.country-files.com/cty/cty_wt_mod.dat"
cty_local = os.path.dirname(__file__) + "/../static/data/cty_wt_mod.dat"
cty_local = os.path.dirname(__file__) + "/../data/cty_wt_mod.dat"
country_file = os.path.dirname(__file__) + "/../cfg/country.json"
# -------------------------------------------------------------------------------------
# download country files cty.dat
@ -166,6 +166,7 @@ def parse_alias(alias, master):
# load file from configuration, containing all world country, with related ISO codes
# -------------------------------------------------------------------------------------
def load_country():
logging.info('loading:' +country_file)
with open(country_file) as json_country:
return json.load(json_country)

View File

@ -9,36 +9,48 @@ import logging
def parse_who(lines):
# print(lines.decode('ascii'))
# create a list o lines and define the structure
lines = lines.splitlines()
fmtstring = "2x 9s 10s 18s 9s 8s 15s"
fieldstruct = struct.Struct(fmtstring)
row_headers = ("callsign", "type", "started", "name", "average_rtt", "link")
# skip first lines and last line
payload = []
for i in range(3, len(lines) - 1):
line = lines[i]
ln = len(line)
padding = bytes(" " * (struct.calcsize(fmtstring) - ln), "utf-8")
line = line + padding
line = lines[i].lstrip().decode("utf-8")
logging.debug(line)
if ln > 10:
parse = fieldstruct.unpack_from
fields = list(parse(line))
line_splitted_by_first_space = line.split(" ", 1)
first_part = line_splitted_by_first_space[0]
second_part = line_splitted_by_first_space[1]
ln = len(second_part)
try:
if ln > 32:
fields = [first_part.encode()] #adding callsign
for j, item_field in enumerate(fields):
try:
fields[j] = item_field.decode("utf-8").strip()
except AttributeError:
print(item_field)
payload.append(dict(zip(row_headers, fields)))
if ln > 45:
fieldstruct = struct.Struct("10s 18s 9s 2x 5s")
else:
fieldstruct = struct.Struct("10s 18s 9s")
# payload = json.dumps(payload)
parse = fieldstruct.unpack_from
logging.debug(second_part)
fields += list(parse(second_part.encode())) #adding rest of informations
for j, item_field in enumerate(fields):
try:
fields[j] = item_field.decode("utf-8").strip()
except AttributeError:
logging.error(item_field)
payload.append(dict(zip(row_headers, fields)))
except Exception as e1:
logging.error(e1)
return payload

View File

@ -1,10 +1,9 @@
# *****************************************************************************************
# module used to make query to mysql
# module used to make query to mariadb
# TODO: manage polymorfism and use only one qry sign
# *****************************************************************************************
# import MySQLdb as my
import mysql.connector as my
from mysql.connector import pooling
import mariadb as my
import logging
import json
import pandas as pd
@ -34,18 +33,19 @@ class query_manager:
return
logging.info("config file loaded")
self.__cnxpool = pooling.MySQLConnectionPool(
self.__cnxpool = my.ConnectionPool(
host=cfg["mysql"]["host"],
user=cfg["mysql"]["user"],
passwd=cfg["mysql"]["passwd"],
db=cfg["mysql"]["db"],
charset="latin1",
# charset='utf8mb4',
# collation = 'utf8mb4_general_ci',
pool_name="spider_pool",
use_pure=True,
pool_size=3,
pool_size=5,
pool_validation_interval=250
)
logging.info("db connection pool created")
# normal query

View File

@ -186,6 +186,4 @@ def query_build(logger,parameters,band_frequencies,modes_frequencies,continents_
return query_string
def query_build_callsing_list():
query_string = "SELECT spotcall AS dx FROM (select spotcall from spot order by rowid desc limit 50000) s1 GROUP BY spotcall ORDER BY count(spotcall) DESC, spotcall LIMIT 100;"
return query_string
query_build_callsing_list = lambda: 'SELECT spotcall AS dx FROM (select spotcall from spot order by rowid desc limit 50000) s1 GROUP BY spotcall ORDER BY count(spotcall) DESC, spotcall LIMIT 100;'

View File

@ -1,41 +1,33 @@
astroid==2.12.14
blinker==1.6.2
charset-normalizer==2.1.1
click==8.1.3
dill==0.3.6
docopt-ng==0.8.1
easywatch==0.0.5
Flask==2.3.3
Flask-Consent==0.0.3
Flask-Minify==0.41
Flask-WTF==1.1.1
blinker==1.7.0
charset-normalizer==3.3.2
click==8.1.7
feedparser==6.0.11
Flask==3.0.2
Flask-Minify==0.42
Flask-WTF==1.2.1
htmlmin==0.1.12
idna==3.4
isort==5.11.4
idna==3.6
itsdangerous==2.1.2
Jinja2==3.1.3
jsmin==3.0.1
lazy-object-proxy==1.9.0
lesscpy==0.15.1
markup==0.2
MarkupSafe==2.1.1
mccabe==0.7.0
mysql-connector-python>=8.2.0
numpy==1.24.1
pandas==1.5.2
platformdirs==2.6.2
mariadb==1.1.10
MarkupSafe==2.1.5
numpy==1.26.4
packaging==24.0
pandas==2.2.1
ply==3.11
protobuf==4.21.12
python-dateutil==2.8.2
pytz==2022.7
rcssmin==1.1.1
python-dateutil==2.9.0.post0
pytz==2024.1
rcssmin==1.1.2
requests==2.31.0
setuptools==68.2.2
sgmllib3k==1.0.0
six==1.16.0
tomlkit==0.11.6
urllib3==2.0.7
watchdog==3.0.0
Werkzeug==2.3.8
wrapt==1.14.1
WTForms==3.0.1
tzdata==2024.1
urllib3==2.2.1
Werkzeug==3.0.1
wheel==0.41.2
WTForms==3.1.2
xmltodict==0.13.0
xxhash==3.1.0
xxhash==3.4.1

View File

@ -134,9 +134,9 @@ if [ "$1" == "-r" ]; then
sed -i '/staticjinja==/d' ../requirements.txt
sed -i '/lighthouse==/d' ../requirements.txt
echo 'force some requirements...'
sed -i 's/mysql-connector-python==8.0.31/mysql-connector-python>=8.0.31/' ../requirements.txt
sed -i 's/mysql-connector-python==8.2.0/mysql-connector-python>=8.2.0/' ../requirements.txt
#echo 'force some requirements...'
#sed -i 's/mysql-connector-python==8.0.31/mysql-connector-python>=8.0.31/' ../requirements.txt
#sed -i 's/mysql-connector-python==8.2.0/mysql-connector-python>=8.2.0/' ../requirements.txt
if ! sed -i '7,25s/level=DEBUG/level=INFO/g' ${app_ini}; then
echo 'ERROR settimg loglevel=INFO '
@ -277,7 +277,7 @@ if [ "$2" == "-c" ]; then
head -10 ../docs/CHANGELOG.md
read -p "Do you want to proceed to commit version ${ver} (yes/no) " yn
read -r -p "Do you want to proceed to commit version ${ver} (yes/no) " yn
case $yn in
yes ) echo ok, we will proceed;;
@ -297,7 +297,7 @@ if [ "$2" == "-c" ]; then
fi
echo 'Please, add comment for commit on tag ' ${ver}
read comm_tag_msg
read -r comm_tag_msg
if ! git commit -m "${comm_tag_msg}"; then
echo 'Error on commit'
exit 9

0
scripts/dxcluster.db Normal file
View File

View File

@ -9,7 +9,7 @@ chr() {
}
db_insert () {
n=10000
n=2000000
for (( i=1; i<=${n}; i++ ))
do
freq=$(shuf -i 100-50000 -n 1)
@ -25,8 +25,8 @@ db_insert () {
#timestamp=$(shuf -i 1673759569-1673763169 -n 1)
#epoch_start=$((${curr_epoch_time}-3600*24*365*2))
epoch_start=$((${curr_epoch_time}-3600))
echo ${curr_epoch_time}
echo ${epoch_start}
#echo ${curr_epoch_time}
#echo ${epoch_start}
timestamp=$(shuf -i ${epoch_start}-${curr_epoch_time} -n 1)
cs_letter_1=$(chr $(shuf -i 65-90 -n1))
@ -43,10 +43,10 @@ db_insert () {
#sudo mysql -uroot dxcluster -e "INSERT INTO spot VALUES (${i},${freq},'${callsign}',UNIX_TIMESTAMP(),'DUMMY TEST','IU1BOW',${spotdxcc},${spotterdxcc},'IU1BOW-2',${spotitu},${spotcq},${spotteritu},${spottercq},NULL,NULL,'5.198.229.129');"
sleep 3
p=$(( ${i}*100/${n} ))
echo -ne ${p}'% \r'
# echo -ne ${p}'% \r'
done
echo -ne '\n'
# echo -ne '\n'
}

126
scripts/mysql2sqlite.sh Executable file
View File

@ -0,0 +1,126 @@
#!/bin/bash
echo this script will convert your mysql db to sqllite databases
#TODO:
#
# read dxvars
# check sqllite perl
# dump mysql
# create table in sqlite
# import in sqlite
# create indexes
# change dxvars.pm
#
sqlite_db=dxcluster.db
mysql_dump_db=$(mktemp)
mysql_dump_db="mysql.sql" #TODO: remove
progress_bar() {
local width=50
local percent="$1"
local filled_width=$((width * percent / 100))
local dots="$(printf '%*s' "$filled_width" | tr ' ' '=')"
local spaces="$(printf '%*s' "$((width - filled_width))" | tr ' ' ' ')"
echo -ne "[$dots$spaces] ($percent%)\r"
}
#Empty database
if ! > ${sqlite_db};
then
echo 'Error empting sqlite db: ' ${sqlite_db}
exit 1
else
echo 'sqlite db created: ' ${sqlite_db}
fi
#dump mysql data
#TODO: remove comments
#read -p 'MySQL User: ' user
#
#if ! mysqldump -u ${user} -p --skip-create-options --compatible=ansi --skip-extended-insert --compact --single-transaction --databases dxcluster \
# | grep "INSERT INTO" \
# | sed -e ':a' -e 'N' -e '$!ba' -e 's/,\n)/\n)/'\
# | sed -e 's/\\'\''/'\'''\''/g'\
# > ${mysql_dump_db};
# then
# echo 'Error on dumping mysql data'
# exit 1
# else
# echo 'dump created: ' ${mysql_dump_db}
#fi
#create table spot
if ! sqlite3 ${sqlite_db} <<EOF
CREATE TABLE "spot" (
"rowid" INTEGER PRIMARY KEY,
"freq" REAL NOT NULL,
"spotcall" TEXT NOT NULL,
"time" INTEGER NOT NULL,
"comment" TEXT DEFAULT NULL,
"spotter" TEXT NOT NULL,
"spotdxcc" INTEGER DEFAULT NULL,
"spotterdxcc" INTEGER DEFAULT NULL,
"origin" TEXT DEFAULT NULL,
"spotitu" INTEGER DEFAULT NULL,
"spotcq" INTEGER DEFAULT NULL,
"spotteritu" INTEGER DEFAULT NULL,
"spottercq" INTEGER DEFAULT NULL,
"spotstate" TEXT DEFAULT NULL,
"spotterstate" TEXT DEFAULT NULL,
"ipaddr" TEXT DEFAULT NULL
);
EOF
then
echo 'Error on creating table spot in Sqlite'
exit 1
else
echo 'Table spot created in sqlite db: ' ${sqlite_db}
fi
#import spot in sqlite
max_insert=$(wc -l ${mysql_dump_db}|cut -d ' ' -f1)
echo 'Importing dump into Sqlite' ${max_insert} 'rows: '
counter=0
sv_perc=-1
while IFS= read -r line; do
let "counter++"
if ! sqlite3 ${sqlite_db} "${line}";
then
echo '...at line: ' ${counter} ' | ' ${line}
fi
perc=$(( ${counter} * 100 / ${max_insert} ))
if [ ${perc} -ne ${sv_perc} ]; then
sv_perc=${perc}
progress_bar ${perc}
fi
done < ${mysql_dump_db}
echo 'Sqlite db imported: ' ${sqlite_db}
#create index
echo 'Creating indexes...'
if ! sqlite3 ${sqlite_db} <<EOF
CREATE INDEX idx_spot_spotcall ON spot (spotcall);
CREATE INDEX idx_spot_spotter ON spot (spotter);
EOF
then
echo 'Error on creating indexes on spot in Sqlite'
exit 1
else
echo 'Indexes created in sqlite db: ' ${sqlite_db}
fi
exit #TODO: remove exit
#remove dump file
rm ${mysql_dump_db};
echo done

View File

@ -170,7 +170,7 @@ span.search-callsign {
.kpi-card {
overflow: hidden;
position: relative;
box-shadow: 1px 1px 3px rgba(0,0,0,0.75);;
box-shadow: 1px 1px 3px rgba(0,0,0,0.75);
display: inline-block;
padding: 1em;
border-radius: 0;

File diff suppressed because it is too large Load Diff

View File

@ -6,7 +6,7 @@
//var my_adxo_events=jQuery.parseJSON(my_adxo_events_json.replaceAll("\t",""));
var my_adxo_events = JSON.parse(my_adxo_events_json.replaceAll('\t', ''));
//var my_adxo_events = JSON.parse(my_adxo_events_json)
refresh_timer(); //run first data fetch
var myRefresh = setInterval(refresh_timer, timer_interval_json);
window.onload = () => {

View File

@ -320,9 +320,8 @@ function compose_filter(id, len, qry_json) {
}
}
catch (err) {
if (err.name == 'TypeError') {
console.error(err.name);
/* error managed: it is ok: probabilly ther is no filter on cq region */
if (err instanceof TypeError) {
console.log(err.name + ' managed - it is ok: probabilly ther is no filter on cq region');
} else {
throw err;
}

File diff suppressed because one or more lines are too long

View File

@ -1,5 +1,5 @@
{
"name": "IU1BOW Spiderweb v2.5.3",
"name": "IU1BOW Spiderweb v2.5.4",
"description": "DXCluser for ham radio by IU1BOW",
"short_name": "Spiderweb",
"theme_color": "#f3b221",

View File

@ -1,5 +1,5 @@
// Dichiarazione della costante per il nome della cache
const CACHE_NAME = 'pwa-spiderweb_v2.5.3'
const CACHE_NAME = 'pwa-spiderweb_v2.5.4'
// Dichiarazione della costante per gli URL da mettere in cache
const URLS_TO_CACHE = [

View File

@ -16,10 +16,10 @@
<link rel="manifest" href="/static/pwa/manifest.webmanifest">
<link rel="stylesheet" href="/static/css/rel/style.min.css">
<!-- Bootstrap CSS -->
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/css/bootstrap.min.css"
integrity="sha384-rbsA2VBKQhggwzxH7pPCaAqO46MgnOM80zW1RWuH61DGLwZJEdK2Kadq2F9CUG65" crossorigin="anonymous">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bootstrap/5.3.3/css/bootstrap.min.css"
integrity="sha512-jnSuA4Ss2PkkikSOLtYs8BlYIeeIK1h99ty4YfvRPAlzr377vr3CXDb7sb7eEEBYjDtcYj+AjBH3FLv5uSJuXg==" crossorigin="anonymous">
<!-- Flag Icon CSS -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/flag-icon-css/6.15.0/css/flag-icons.min.css"
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/flag-icon-css/7.2.0/css/flag-icons.min.css"
integrity="sha512-bZBu2H0+FGFz/stDN/L0k8J0G8qVsAL0ht1qg5kTwtAheiXwiRKyCq1frwfbSFSJN3jooR5kauE0YjtPzhZtJQ=="
crossorigin="anonymous" referrerpolicy="no-referrer" />
<!-- Tom-Select CSS -->
@ -91,13 +91,23 @@
{% block contents %}
{% endblock contents %}
</div>
<footer class="page-footer font-small blue">
<hr class="hr" />
<div class="text-center ">
<span class="bi-person-up" role="button" aria-label="funnel-fill"></span>
Website unique visits: <strong>{{ visits }}</strong>
</div>
<div class="footer-copyright text-center py-3">
<span class="copyleft">&copy;</span> Copyleft:
<span id="copyDate"></span>
&nbsp;
<a href="https://github.com/coulisse/spiderweb/" target="blank" rel="noopener">IU1BOW - Spiderweb</a>
<span id="version">v2.5.3</span>
</div>
&nbsp;
<span id="version">v2.5.4</span>
</div>
</footer>
<script async src="static/js/rel/load-sw.min.js"></script>
<script nonce="{{ inline_script_nonce }}">
@ -108,15 +118,14 @@
<script defer src="static/js/rel/common.min.js"></script>
<!-- Bootstrap -->
<script defer src="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.bundle.min.js"
integrity="sha384-kenU1KFdBIe4zVF0s0G1M5b4hcpxyD9F7jL+jjXkk+Q2h455rYXK/7HAuoJl+0I4"
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/bootstrap/5.3.3/js/bootstrap.bundle.min.js"
integrity="sha512-7Pi/otdlbbCR+LnW+F7PwFcSDJOuUJB3OxtEHbg4vSMvzvJjde4Po1v4BR9Gdc9aXNUNFVUY+SK51wWT8WF0Gg=="
crossorigin="anonymous"></script>
<!-- Tom-select library -->
<script defer src="https://cdn.jsdelivr.net/npm/tom-select@2.3.1/dist/js/tom-select.complete.min.js"
integrity="sha384-cnROoUgVILyibe3J0zhzWoJ9p2WmdnK7j/BOTSWqVDbC1pVw2d+i6Q/1ESKJKCYf"
crossorigin="anonymous"></script>
</body>
{% block app_scripts %}
<script async src="static/js/rel/callsign_search.min.js"></script>

View File

@ -49,7 +49,11 @@
<div class="container-fluid">
<div class="shadow-lg mb-5 bg-body rounded">
<strong>Physically connected callsigns to {{ mycallsign }}</strong>
<strong>{{ mycallsign }} telnet nodes & users online. </strong>
<br>
Nodes: <strong>{{ who|selectattr('type', 'equalto', 'NODE DXSP')|map(attribute='type')|map('upper')|list|length }}</strong>
<br>
Users: <strong> {{ who|selectattr('type', 'equalto', 'USER EXT')|map(attribute='type')|map('upper')|list|length }}</strong>
<hr>
<table class="table table-striped table-borderless table-sm text-responsive table-hover">
<thead id="telnet-thead">
@ -70,7 +74,7 @@
<td class="d-none d-lg-table-cell d-xl-table-cell">{{dict_item["name"]}}</td>
<td>{{dict_item["average_rtt"]}}</td>
</tr>
{% endfor %}
{% endfor %}
</tbody>
</table>
</div>
@ -86,8 +90,8 @@ var band_frequencies={{bands["bands"]|tojson|safe}};
{% block app_scripts %}
{{ super() }}
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/echarts/5.4.3/echarts.min.js"
integrity="sha512-EmNxF3E6bM0Xg1zvmkeYD3HDBeGxtsG92IxFt1myNZhXdCav9MzvuH/zNMBU1DmIPN6njrhX1VTbqdJxQ2wHDg=="
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/echarts/5.5.0/echarts.min.js"
integrity="sha512-k37wQcV4v2h6jgYf5IUz1MoSKPpDs630XGSmCaCCOXxy2awgAWKHGZWr9nMyGgk3IOxA1NxdkN8r1JHgkUtMoQ=="
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<script defer src="static/js/rel/plot.min.js"></script>
{% endblock app_scripts %}

View File

@ -17,6 +17,7 @@ import requests
import xmltodict
from lib.qry_builder import query_build, query_build_callsign, query_build_callsing_list
logging.config.fileConfig("cfg/webapp_log_config.ini", disable_existing_loggers=True)
logger = logging.getLogger(__name__)
logger.info("Starting SPIDERWEB")
@ -48,6 +49,7 @@ else:
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
# load config file
with open("cfg/config.json") as json_data_file:
cfg = json.load(json_data_file)
@ -66,6 +68,31 @@ with open("cfg/modes.json") as json_modes:
with open("cfg/continents.json") as json_continents:
continents_cq = json.load(json_continents)
#load visitour counter
visits_file_path = "data/visits.json"
try:
# Load the visits data from the file
with open(visits_file_path) as json_visitors:
visits = json.load(json_visitors)
except FileNotFoundError:
# If the file does not exist, create an empty visits dictionary
visits = {}
#save visits
def save_visits():
with open(visits_file_path, "w") as json_file:
json.dump(visits, json_file)
logging.info('visit saved on: '+ visits_file_path)
# saving scheduled
def schedule_save():
save_visits()
threading.Timer(1000, schedule_save).start()
# Start scheduling
schedule_save()
# read and set default for enabling cq filter
if cfg.get("enable_cq_filter"):
enable_cq_filter = cfg["enable_cq_filter"].upper()
@ -90,7 +117,6 @@ def spotquery(parameters):
else:
logging.debug('search eith other filters')
query_string = query_build(logger,parameters,band_frequencies,modes_frequencies,continents_cq,enable_cq_filter)
qm.qry(query_string)
data = qm.get_data()
row_headers = qm.get_headers()
@ -125,9 +151,11 @@ def get_adxo():
adxo_events = get_adxo_events()
threading.Timer(12 * 3600, get_adxo).start()
get_adxo()
# create data provider for charts
heatmap_cbp = ContinentsBandsProvider(logger, qm, continents_cq, band_frequencies)
bar_graph_spm = SpotsPerMounthProvider(logger, qm)
@ -160,9 +188,21 @@ def get_nonce():
inline_script_nonce = secrets.token_hex()
return inline_script_nonce
#check if it is a unique visitor
def visitor_count():
user_ip =request.environ.get('HTTP_X_REAL_IP', request.remote_addr)
if user_ip not in visits:
visits[user_ip] = 1
else:
visits[user_ip] += 1
@app.route("/", methods=["GET"])
@app.route("/index.html", methods=["GET"])
def spots():
visitor_count();
response = flask.Response(
render_template(
"index.html",
@ -171,6 +211,7 @@ def spots():
telnet=cfg["telnet"]["host"]+":"+cfg["telnet"]["port"],
mail=cfg["mail"],
menu_list=cfg["menu"]["menu_list"],
visits=len(visits),
enable_cq_filter=enable_cq_filter,
timer_interval=cfg["timer"]["interval"],
adxo_events=adxo_events,
@ -184,8 +225,9 @@ def spots():
#Show all dx spot callsigns
def get_dx_calls():
try:
query_string = query_build_callsing_list
query_string = query_build_callsing_list()
qm.qry(query_string)
data = qm.get_data()
row_headers = qm.get_headers()
@ -211,7 +253,8 @@ def sw():
def root():
return app.send_static_file("html/offline.html")
@app.route("/world.json")
#used for plots
@app.route("/world.json")
def world_data():
return app.send_static_file("data/world.json")
@ -226,6 +269,7 @@ def plots():
telnet=cfg["telnet"]["host"]+":"+cfg["telnet"]["port"],
mail=cfg["mail"],
menu_list=cfg["menu"]["menu_list"],
visits=len(visits),
who=whoj,
continents=continents_cq,
bands=band_frequencies,
@ -257,6 +301,7 @@ def propagation():
telnet=cfg["telnet"]["host"]+":"+cfg["telnet"]["port"],
mail=cfg["mail"],
menu_list=cfg["menu"]["menu_list"],
visits=len(visits),
solar_data=solar_data
)
)
@ -274,6 +319,7 @@ def cookies():
telnet=cfg["telnet"]["host"]+":"+cfg["telnet"]["port"],
mail=cfg["mail"],
menu_list=cfg["menu"]["menu_list"],
visits=len(visits),
)
)
return response
@ -288,6 +334,7 @@ def privacy():
telnet=cfg["telnet"]["host"]+":"+cfg["telnet"]["port"],
mail=cfg["mail"],
menu_list=cfg["menu"]["menu_list"],
visits=len(visits),
)
)
return response
@ -309,6 +356,7 @@ def callsign():
telnet=cfg["telnet"]["host"]+":"+cfg["telnet"]["port"],
mail=cfg["mail"],
menu_list=cfg["menu"]["menu_list"],
visits=len(visits),
timer_interval=cfg["timer"]["interval"],
callsign=callsign,
adxo_events=adxo_events,
@ -334,7 +382,7 @@ def find_callsign():
def get_heatmap_data():
#continent = request.args.get("continent")
continent = request.json['continent']
logger.debug(request.get_json());
logger.debug(request.get_json())
response = flask.Response(json.dumps(heatmap_cbp.get_data(continent)))
logger.debug(response)
if response is None: