Commit 61cc5d99 authored by matt24smith's avatar matt24smith
Browse files

improved error handling for websocket client when setting query range

parent 6e561028
No related merge requests found
Pipeline #5655 passed with stages
in 6 minutes and 35 seconds
Showing with 50 additions and 32 deletions
+50 -32
[package]
version = "1.3.143"
version = "1.3.144"
name = "aisdb"
authors = [ "MERIDIAN // Matt Smith matthew.smith@dal.ca",]
edition = "2021"
......
......@@ -172,7 +172,6 @@ class DBQuery(UserDict):
def gen_qry(self,
fcn=sqlfcn.crawl_dynamic,
printqry=False,
force_reaggregate_static=False,
verbose=False):
''' queries the database using the supplied SQL function and dbpath.
......@@ -237,7 +236,7 @@ class DBQuery(UserDict):
'''
qry = fcn(dbpath=dbpath, **self.data)
if printqry:
if verbose:
print(qry)
# get 500k rows at a time, yield sets of rows for each unique MMSI
......@@ -247,7 +246,7 @@ class DBQuery(UserDict):
_ = cur.execute(qry)
res = cur.fetchmany(10**5)
delta = datetime.now() - dt
if printqry:
if verbose:
print(
f'query time: {delta.total_seconds():.2f}s\nfetching rows...'
)
......@@ -293,7 +292,6 @@ class DBQuery_async(DBQuery):
async def gen_qry(self,
fcn=sqlfcn.crawl_dynamic,
printqry=False,
force_reaggregate_static=False,
verbose=False):
......@@ -318,7 +316,7 @@ class DBQuery_async(DBQuery):
aggregate_static_msgs(syncdb, [month], verbose=verbose)
qry = fcn(dbpath='main', **self)
if printqry:
if verbose:
print(qry)
cursor = await self.dbconn.execute(qry)
mmsi_rows = []
......
......@@ -39,7 +39,7 @@ def interp_time(tracks, step=timedelta(minutes=10)):
step=int(step.total_seconds()),
).astype(int)
assert len(intervals) >= 2
assert len(intervals) >= 1
itr = dict(
**{k: track[k]
......
......@@ -289,7 +289,7 @@ def _aggregate_output(outputfile, tmp_dir, filters=[lambda row: False]):
def pipeline_callback(tracks, *, domain, tmp_dir, trafficDBpath, maxdelta,
distance_threshold, speed_threshold, minscore,
interp_delta, shoredist_raster):
interp_delta, shoredist_raster, **kw):
# pipeline configuration from arguments
serialize_CSV = partial(_serialize_network_edge,
domain=domain,
......@@ -354,7 +354,9 @@ def graph(qry,
distance_threshold=250000,
interp_delta=timedelta(hours=1),
minscore=0,
pipeline_callback=pipeline_callback):
pipeline_callback=pipeline_callback,
qryfcn=sqlfcn.crawl_dynamic_static,
verbose=False):
''' Compute network graph of vessel movements within domain zones.
Zone polygons will be used as network nodes, with graph edges
represented by movements between zones.
......@@ -484,7 +486,7 @@ def graph(qry,
print(f'network graph {tmp_dir = }')
print(f'\n{domain.name=} {domain.boundary=}')
rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic_static)
rowgen = qry.gen_qry(fcn=qryfcn, verbose=verbose)
tracks = serialize_tracks(
bathy.merge_tracks(pdist.get_distance(TrackGen(rowgen))))
fcn = partial(
......
......@@ -82,6 +82,9 @@ def test_shiftcoord():
xshift = shiftcoord(x)
assert sum(xshift == np.array([0, 90, 180, -90, 0, 90, -180, -90, 0])) == 9
x2 = np.array([-200, -190, -181, -180, -179, -170, -160])
xshift2 = shiftcoord(x2)
def test_distance3D():
x1, y1 = -45, 50
......
......@@ -31,7 +31,7 @@ def test_TrackGen(tmpdir):
end=end,
callback=sqlfcn_callbacks.valid_mmsi,
)
rowgen = qry.gen_qry(printqry=True)
rowgen = qry.gen_qry(verbose=True)
tracks = track_gen.TrackGen(rowgen)
for track in tracks:
......@@ -59,7 +59,7 @@ def test_min_speed_filter(tmpdir):
end=end,
callback=sqlfcn_callbacks.in_timerange_validmmsi,
)
rowgen = qry.gen_qry(printqry=True)
rowgen = qry.gen_qry(verbose=True)
tracks = vesseltrack_3D_dist(
mask_in_radius_2D(min_speed_filter(encode_greatcircledistance(
track_gen.TrackGen(rowgen),
......
......@@ -43,7 +43,7 @@ def test_write_csv_rows(tmpdir):
end=end,
callback=sqlfcn_callbacks.in_timerange_validmmsi,
)
rowgen = qry.gen_qry(printqry=True)
rowgen = qry.gen_qry(verbose=True)
aisdb.proc_util.write_csv_rows(
rowgen,
pathname=os.path.join(
......@@ -68,7 +68,7 @@ def test_write_csv_fromdict(tmpdir):
callback=sqlfcn_callbacks.in_timerange_validmmsi,
)
rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic, printqry=True)
rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic, verbose=True)
tracks = track_gen.TrackGen(rowgen)
aisdb.proc_util.write_csv(tracks,
fpath=os.path.join(tmpdir,
......@@ -98,7 +98,7 @@ def test_write_csv_fromdict_marinetraffic(tmpdir):
'ymax': 50,
})
rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic_static, printqry=True)
rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic_static, verbose=True)
tracks = vessel_info(track_gen.TrackGen(rowgen), trafficDBpath)
aisdb.proc_util.write_csv(tracks,
fpath=os.path.join(tmpdir,
......
......@@ -26,7 +26,7 @@ def test_interp(tmpdir):
end=end,
callback=sqlfcn_callbacks.in_timerange_validmmsi,
)
rowgen = qry.gen_qry(printqry=True)
rowgen = qry.gen_qry(verbose=True)
tracks = interp_time(
track_gen.TrackGen(rowgen),
step=timedelta(hours=0.5),
......
......@@ -49,7 +49,7 @@ def test_retrieve_marinetraffic_data(tmpdir):
trafficDBpath=trafficDBpath,
boundary=domain.boundary,
retry_404=False)
rowgen = qry.gen_qry(printqry=True)
rowgen = qry.gen_qry(verbose=True)
tracks = track_gen.TrackGen(rowgen)
try:
......
......@@ -43,7 +43,7 @@ def test_wetted_surface_area_regression_marinetraffic(tmpdir):
end=end,
callback=sqlfcn_callbacks.in_timerange_validmmsi,
)
rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic_static, printqry=True)
rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic_static, verbose=True)
tracks = vessel_info(
encode_greatcircledistance(
track_gen.TrackGen(rowgen),
......
......@@ -94,8 +94,7 @@ class Gebco():
for lon, lat in zip(track['lon'], track['lat']):
if not (-180 <= lon <= 180) or not (-90 <= lat <=
90): # pragma: no cover
warnings.warn('coordinates out of range! '
f'{lon=},{lat=}\t{track["mmsi"]=}')
warnings.warn('coordinates out of range!')
lon = shiftcoord([lon])[0]
lat = shiftcoord([lat], rng=90)[0]
......
import asyncio
import calendar
import os
import ssl
import warnings
import websockets
import websockets.exceptions
......
[package]
name = "client"
version = "1.3.143"
version = "1.3.144"
edition = "2021"
authors = [ "MERIDIAN // Matt Smith matthew.smith@dal.ca",]
readme = "../readme.rst"
......
......@@ -35,6 +35,8 @@ let newTrackFeature = null;
* of currently selected polygons
*/
async function setSearchAreaFromSelected() {
let alt_xmin = 180;
let alt_xmax = -180;
for (let ft of polySource.getFeatures()) {
if (ft.get('selected') === true) {
if (window.searcharea === null) {
......@@ -43,12 +45,18 @@ async function setSearchAreaFromSelected() {
let coords = ft.getGeometry().clone()
.transform('EPSG:3857', 'EPSG:4326').getCoordinates()[0];
for (let point of coords) {
if (point[0] < window.searcharea.minX) {
if (ft.get('meta_str').includes('_b') && point[0] < alt_xmin) {
alt_xmin = point[0];
} else if (!ft.get('meta_str').includes('_c') && point[0] < window.searcharea.minX) {
window.searcharea.minX = point[0];
}
if (point[0] > window.searcharea.maxX) {
if (ft.get('meta_str').includes('_c') && point[0] > alt_xmax) {
alt_xmax = point[0];
} else if (!ft.get('meta_str').includes('_b') && point[0] > window.searcharea.maxX) {
window.searcharea.maxX = point[0];
}
if (point[1] < window.searcharea.minY) {
window.searcharea.minY = point[1];
}
......@@ -58,6 +66,12 @@ async function setSearchAreaFromSelected() {
}
}
}
if (alt_xmin !== 180) {
window.searcharea.minX = alt_xmin;
}
if (alt_xmax !== -180) {
window.searcharea.maxX = alt_xmax;
}
}
/** initialize map layer and associated imports dynamically */
......
{"name": "AISDB", "version": "1.3.143", "description": "AIS database and processing utils", "main": "server.js", "type": "module", "author": "MERIDIAN / Matt Smith", "scripts": {"dev": "vite map", "build": "vite build", "preview": "vite preview", "doc": "node docserver.js"}, "dependencies": {"express": ">=4.18.1", "flatpickr": "^4.6.13", "html2canvas": ">=1.4.1", "ol": ">=6.15.1", "vite": ">=2.9.15"}, "devDependencies": {"jsdoc": "^3.6.10"}}
\ No newline at end of file
{"name": "AISDB", "version": "1.3.144", "description": "AIS database and processing utils", "main": "server.js", "type": "module", "author": "MERIDIAN / Matt Smith", "scripts": {"dev": "vite map", "build": "vite build", "preview": "vite preview", "doc": "node docserver.js"}, "dependencies": {"express": ">=4.18.1", "flatpickr": "^4.6.13", "html2canvas": ">=1.4.1", "ol": ">=6.15.1", "vite": ">=2.9.15"}, "devDependencies": {"jsdoc": "^3.6.10"}}
\ No newline at end of file
......@@ -114,7 +114,6 @@ services:
ports:
- "80:80"
- "443:443"
- "8082:8082"
- "${AISDBPORT:-9924}:${AISDBPORT:-9924}"
networks:
ipv6_private:
......
......@@ -53,5 +53,6 @@ COPY --chown=ais_env aisdb_web/build_docs.sh aisdb_web/build_docs.sh
COPY --chown=ais_env aisdb_web/server_module.js aisdb_web/server_module.js
COPY --chown=ais_env aisdb_web/docserver.js aisdb_web/docserver.js
COPY --chown=ais_env docs/ docs/
COPY --chown=ais_env aisdb_web/dist_coverage aisdb_web/dist_coverage
#COPY --chown=ais_env aisdb_web/package.json aisdb_web/package.json
RUN /bin/bash ./aisdb_web/build_docs.sh
......@@ -2,6 +2,12 @@
Changelog
=========
v1.3.144
--------
improved error handling for websocket client when setting query range
v1.3.143
--------
......
......@@ -13,12 +13,9 @@ zones_dir = environ.get('AISDBZONES', '/home/ais_env/ais/zones/')
trafficDBpath = environ.get('AISDBMARINETRAFFIC',
'/home/ais_env/ais/marinetraffic.db')
print(f'starting websocket\n{dbpath = }\n{zones_dir = }\n{trafficDBpath = }\n')
domain = DomainFromTxts(domainName='example', folder=zones_dir)
domain = DomainFromTxts(domainName='example',
folder=zones_dir,
correct_coordinate_range=False)
print(f'starting websocket\n{dbpath = }\n{zones_dir = }\n{trafficDBpath = }\n')
serv = SocketServ(dbpath=dbpath, domain=domain, trafficDBpath=trafficDBpath)
asyncio.run(serv.main())
......@@ -9,7 +9,7 @@ requires-python = ">=3.8"
requires-dist = [ "maturin>=0.12", "numpy", "wheel",]
dependencies = [ "aiosqlite", "orjson", "packaging", "pillow", "pysqlite3", "requests", "selenium", "shapely", "tqdm", "websockets", "numpy", "rasterio", "webdriver-manager",]
zip-safe = false
version = "1.3.143"
version = "1.3.144"
readme = "readme.rst"
description = "AIS Database and Processing Utils"
classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: POSIX :: Linux", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Rust", "Programming Language :: SQL", "Programming Language :: JavaScript", "Topic :: Communications :: Ham Radio", "Topic :: Database :: Database Engines/Servers", "Topic :: Database :: Front-Ends", "Topic :: Scientific/Engineering :: GIS", "Topic :: Scientific/Engineering :: Information Analysis", "Topic :: Utilities",]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment