Add ability to retrieve data from a user or site directory
.. thus ending the requirement to make releases to use new data
This commit is contained in:
parent
968a8188d7
commit
6153b923b5
8 changed files with 129 additions and 71 deletions
4
.github/workflows/cron.yml
vendored
4
.github/workflows/cron.yml
vendored
|
|
@ -31,7 +31,7 @@ jobs:
|
|||
run: pip install -r requirements-dev.txt
|
||||
|
||||
- name: Update DUT1 data
|
||||
run: python tools/iers2py.py
|
||||
run: python -m wwvb.updateiers --dist
|
||||
|
||||
- name: Test (python 3.9)
|
||||
run: python -munittest
|
||||
|
|
@ -41,4 +41,4 @@ jobs:
|
|||
git config user.name "${GITHUB_ACTOR} (github actions cron)"
|
||||
git config user.email "${GITHUB_ACTOR}@users.noreply.github.com"
|
||||
git remote set-url --push origin https://${GITHUB_ACTOR}:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}
|
||||
if git commit -m"update iersdata" wwvb/iersdata.py; then git push origin HEAD:main; fi
|
||||
if git commit -m"update iersdata" wwvb/iersdata_dist.py; then git push origin HEAD:main; fi
|
||||
|
|
|
|||
|
|
@ -4,9 +4,14 @@
|
|||
|
||||
[build-system]
|
||||
requires = [
|
||||
"beautifulsoup4",
|
||||
"click",
|
||||
"requests",
|
||||
"platformdirs",
|
||||
"python-dateutil",
|
||||
"setuptools>=45",
|
||||
"setuptools_scm[toml]>=6.0",
|
||||
"wheel",
|
||||
"setuptools_scm[toml]>=6.0"
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
[tool.setuptools_scm]
|
||||
|
|
|
|||
|
|
@ -23,12 +23,16 @@ python_requires = >=3.7
|
|||
py_modules = uwwvb
|
||||
install_requires =
|
||||
adafruit-circuitpython-datetime
|
||||
beautifulsoup4
|
||||
click
|
||||
platformdirs
|
||||
requests
|
||||
python-dateutil
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
wwvbgen = wwvb.gen:main
|
||||
dut1table = wwvb.dut1table:main
|
||||
updateiers = wwvb.updateiers:main
|
||||
gui_scripts =
|
||||
wwvbtk = wwvb.wwvbtk:main
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import collections
|
|||
import datetime
|
||||
import enum
|
||||
import math
|
||||
import warnings
|
||||
from typing import List, Tuple
|
||||
|
||||
from . import iersdata
|
||||
|
|
@ -25,12 +26,26 @@ def _date(dt):
|
|||
return dt
|
||||
|
||||
|
||||
def maybe_warn_update(dt): # pragma no cover
|
||||
"""Maybe print a notice to run updateiers, if it seems useful to do so."""
|
||||
# We already know this date is not covered.
|
||||
# If the date is less than 330 days after today, there should be (possibly)
|
||||
# prospective available now.
|
||||
today = datetime.date.today()
|
||||
if dt < today + datetime.timedelta(days=330):
|
||||
warnings.warn(
|
||||
"Note: Running `updateiers` may provide better DUT1 and LS information"
|
||||
)
|
||||
|
||||
|
||||
def get_dut1(dt):
|
||||
"""Return the DUT1 number for the given timestamp"""
|
||||
i = (_date(dt) - iersdata.DUT1_DATA_START).days
|
||||
dt = _date(dt)
|
||||
i = (dt - iersdata.DUT1_DATA_START).days
|
||||
if i < 0:
|
||||
v = iersdata.DUT1_OFFSETS[0]
|
||||
elif i >= len(iersdata.DUT1_OFFSETS):
|
||||
maybe_warn_update(dt)
|
||||
v = iersdata.DUT1_OFFSETS[-1]
|
||||
else:
|
||||
v = iersdata.DUT1_OFFSETS[i]
|
||||
|
|
|
|||
|
|
@ -1,37 +1,23 @@
|
|||
# -*- python3 -*-
|
||||
"""File generated from public data - not subject to copyright"""
|
||||
# SPDX-FileCopyrightText: Public domain
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
# fmt: off
|
||||
# pylint: disable=invalid-name
|
||||
import datetime
|
||||
__all__ = ['DUT1_DATA_START', 'DUT1_OFFSETS']
|
||||
DUT1_DATA_START = datetime.date(1972, 6, 1)
|
||||
d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s = 'defghijklmnopqrs'
|
||||
DUT1_OFFSETS = str( # 19720601
|
||||
i*30+s*203+r*31+q*29+p*28+o*30+n*36+m*40+l*39+k*33+j*31+i*31 # 19731231
|
||||
+h*18+r*19+q*38+p*32+o*31+n*33+m*48+l*45+k*37+j*33+i*34+h*15 # 19750122
|
||||
+r*22+q*34+p*33+o*34+n*37+m*49+l*45+k*36+j*32+i*36+h*7+r*28 # 19760301
|
||||
+q*33+p*32+o*30+n*33+m*42+l*42+k*34+j*29+i*33+h*30+r*6+q*36 # 19770317
|
||||
+p*34+o*31+n*32+m*42+l*51+k*37+j*32+i*33+h*31+q*32+p*29+o*29 # 19780430
|
||||
+n*30+m*32+l*47+k*47+j*36+i*33+h*32+g*18+q*16+p*35+o*33+n*32 # 19790531
|
||||
+m*35+l*45+k*51+j*39+i*39+h*38+g*2+q*40+p*39+o*38+n*43+m*57 # 19800923
|
||||
+l*50+k*39+j*42+i*41+h*43+g*37+f*39+e*39+o*19+n*62+m*43+l*45 # 19820202
|
||||
+k*48+j*44+i*39+h*44+g*21+q*44+p*48+o*43+n*41+m*36+l*34+k*34 # 19830514
|
||||
+j*38+i*47+s+r*64+q*50+p*42+o*56+n*57+m*52+l*100+k*61+j*62 # 19850302
|
||||
+i*66+h*52+g*67+f+p*103+o*56+n*68+m*69+l*107+k*82+j*72+i*67 # 19870518
|
||||
+h*63+g*113+f*63+e*51+o*11+n*60+m*59+l*121+k*71+j*71+i*67 # 19890531
|
||||
+h*57+g*93+f*61+e*48+d*12+n*41+m*44+l*46+k*61+j*66+i*47+h*45 # 19901231
|
||||
+g*15+q*32+p*44+o*41+n*48+m*74+l*49+k*45+j*44+i*40+h*37+g*38 # 19920625
|
||||
+f*50+e*5+o*60+n*49+m*40+l*40+k*38+j*38+i*36+h*39+g*25+q*31 # 19930919
|
||||
+p*50+o*41+n*41+m*43+l*41+k*39+j*40+i*39+s*24+r*57+q*43+p*41 # 19950120
|
||||
+o*39+n*38+m*35+l*37+k*43+j*69+i*44+h*42+g*37+q*4+p*51+o*45 # 19960523
|
||||
+n*44+m*69+l*70+k*50+j*54+i*53+h*40+g*49+f*18+p*59+o*53+n*52 # 19980206
|
||||
+m*57+l*48+k*53+j*127+i*70+h*30+r*62+q*79+p*152+o*82+n*106 # 20001026
|
||||
+m*184+l*125+k*217+j*133+i*252+h*161+g*392+f*322+e*290+n*116 # 20060927
|
||||
+m*154+l*85+k*83+j*91+i*168+h*105+g*147+f*105+e*42+o*70+n*91 # 20091111
|
||||
+m*154+l*119+k*84+j*217+i*126+h*176+g*97+f*91+e*52+o*116 # 20130130
|
||||
+n*98+m*70+l*133+k*91+j*91+i*77+h*140+g*91+f*84+e*70+d*34 # 20150910
|
||||
+n*72+m*76+l*66+k*53+j*56+i*105+h*77+g*45+q*25+p*63+o*91 # 20171129
|
||||
+n*154+m*105+l*190+k*118+j*105+i*853+j*353+k*15 # 20220903
|
||||
)
|
||||
"""Retrieve iers data, possibly from user or site data or from the wwvbpy distribution"""
|
||||
|
||||
# Copyright (C) 2021 Jeff Epler <jepler@gmail.com>
|
||||
# SPDX-FileCopyrightText: 2021 Jeff Epler
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-only
|
||||
|
||||
import os
|
||||
import platformdirs
|
||||
|
||||
__all__ = ["DUT1_DATA_START", "DUT1_OFFSETS"]
|
||||
from .iersdata_dist import DUT1_DATA_START, DUT1_OFFSETS
|
||||
|
||||
for location in [
|
||||
platformdirs.user_data_dir("wwvbpy", "unpythonic.net"),
|
||||
platformdirs.site_data_dir("wwvbpy", "unpythonic.net"),
|
||||
]: # pragma no cover
|
||||
filename = os.path.join(location, "wwvbpy_iersdata.py")
|
||||
if os.path.exists(filename):
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
exec(f.read(), globals(), globals()) # pylint: disable=exec-used
|
||||
break
|
||||
|
|
|
|||
37
wwvb/iersdata_dist.py
Normal file
37
wwvb/iersdata_dist.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
# -*- python3 -*-
|
||||
"""File generated from public data - not subject to copyright"""
|
||||
# SPDX-FileCopyrightText: Public domain
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
# fmt: off
|
||||
# pylint: disable=invalid-name
|
||||
import datetime
|
||||
__all__ = ['DUT1_DATA_START', 'DUT1_OFFSETS']
|
||||
DUT1_DATA_START = datetime.date(1972, 6, 1)
|
||||
d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s = 'defghijklmnopqrs'
|
||||
DUT1_OFFSETS = str( # 19720601
|
||||
i*30+s*203+r*31+q*29+p*28+o*30+n*36+m*40+l*39+k*33+j*31+i*31 # 19731231
|
||||
+h*18+r*19+q*38+p*32+o*31+n*33+m*48+l*45+k*37+j*33+i*34+h*15 # 19750122
|
||||
+r*22+q*34+p*33+o*34+n*37+m*49+l*45+k*36+j*32+i*36+h*7+r*28 # 19760301
|
||||
+q*33+p*32+o*30+n*33+m*42+l*42+k*34+j*29+i*33+h*30+r*6+q*36 # 19770317
|
||||
+p*34+o*31+n*32+m*42+l*51+k*37+j*32+i*33+h*31+q*32+p*29+o*29 # 19780430
|
||||
+n*30+m*32+l*47+k*47+j*36+i*33+h*32+g*18+q*16+p*35+o*33+n*32 # 19790531
|
||||
+m*35+l*45+k*51+j*39+i*39+h*38+g*2+q*40+p*39+o*38+n*43+m*57 # 19800923
|
||||
+l*50+k*39+j*42+i*41+h*43+g*37+f*39+e*39+o*19+n*62+m*43+l*45 # 19820202
|
||||
+k*48+j*44+i*39+h*44+g*21+q*44+p*48+o*43+n*41+m*36+l*34+k*34 # 19830514
|
||||
+j*38+i*47+s+r*64+q*50+p*42+o*56+n*57+m*52+l*100+k*61+j*62 # 19850302
|
||||
+i*66+h*52+g*67+f+p*103+o*56+n*68+m*69+l*107+k*82+j*72+i*67 # 19870518
|
||||
+h*63+g*113+f*63+e*51+o*11+n*60+m*59+l*121+k*71+j*71+i*67 # 19890531
|
||||
+h*57+g*93+f*61+e*48+d*12+n*41+m*44+l*46+k*61+j*66+i*47+h*45 # 19901231
|
||||
+g*15+q*32+p*44+o*41+n*48+m*74+l*49+k*45+j*44+i*40+h*37+g*38 # 19920625
|
||||
+f*50+e*5+o*60+n*49+m*40+l*40+k*38+j*38+i*36+h*39+g*25+q*31 # 19930919
|
||||
+p*50+o*41+n*41+m*43+l*41+k*39+j*40+i*39+s*24+r*57+q*43+p*41 # 19950120
|
||||
+o*39+n*38+m*35+l*37+k*43+j*69+i*44+h*42+g*37+q*4+p*51+o*45 # 19960523
|
||||
+n*44+m*69+l*70+k*50+j*54+i*53+h*40+g*49+f*18+p*59+o*53+n*52 # 19980206
|
||||
+m*57+l*48+k*53+j*127+i*70+h*30+r*62+q*79+p*152+o*82+n*106 # 20001026
|
||||
+m*184+l*125+k*217+j*133+i*252+h*161+g*392+f*322+e*290+n*116 # 20060927
|
||||
+m*154+l*85+k*83+j*91+i*168+h*105+g*147+f*105+e*42+o*70+n*91 # 20091111
|
||||
+m*154+l*119+k*84+j*217+i*126+h*176+g*97+f*91+e*52+o*116 # 20130130
|
||||
+n*98+m*70+l*133+k*91+j*91+i*77+h*140+g*91+f*84+e*70+d*34 # 20150910
|
||||
+n*72+m*76+l*66+k*53+j*56+i*105+h*77+g*45+q*25+p*63+o*91 # 20171129
|
||||
+n*154+m*105+l*190+k*118+j*105+i*853+j*353+k*15 # 20220903
|
||||
)
|
||||
|
|
@ -28,7 +28,7 @@ class WWVBTestCase(unittest.TestCase):
|
|||
"""Generate a test case for each expected output in tests/"""
|
||||
for test in glob.glob("tests/*"):
|
||||
with self.subTest(test=test):
|
||||
with open(test) as f:
|
||||
with open(test, "rt", encoding="utf-8") as f:
|
||||
text = f.read()
|
||||
lines = [line for line in text.split("\n") if not line.startswith("#")]
|
||||
while not lines[0]:
|
||||
|
|
|
|||
|
|
@ -4,25 +4,27 @@
|
|||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-only
|
||||
|
||||
"""Update the content of 'iersdata.py' based on online sources"""
|
||||
"""Update the DUT1 and LS data based on online sources"""
|
||||
|
||||
import csv
|
||||
import datetime
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
import pathlib
|
||||
import bs4
|
||||
import click
|
||||
import platformdirs
|
||||
import requests
|
||||
|
||||
sys.path.insert(0, str(pathlib.Path(__file__).parent.parent))
|
||||
try:
|
||||
import wwvb.iersdata
|
||||
DIST_PATH = str(pathlib.Path(__file__) / "iersdata_dist.py")
|
||||
|
||||
print(wwvb.iersdata.__file__)
|
||||
OLD_TABLE_START = wwvb.iersdata.DUT1_DATA_START
|
||||
try:
|
||||
import wwvb.iersdata_dist
|
||||
|
||||
OLD_TABLE_START = wwvb.iersdata_dist.DUT1_DATA_START
|
||||
OLD_TABLE_END = OLD_TABLE_START + datetime.timedelta(
|
||||
days=len(wwvb.iersdata.DUT1_OFFSETS) - 1
|
||||
days=len(wwvb.iersdata_dist.DUT1_OFFSETS) - 1
|
||||
)
|
||||
except (ImportError, NameError) as e:
|
||||
OLD_TABLE_START = OLD_TABLE_END = None
|
||||
|
|
@ -31,28 +33,14 @@ IERS_URL = "https://datacenter.iers.org/data/csv/finals2000A.all.csv"
|
|||
NIST_URL = "https://www.nist.gov/pml/time-and-frequency-division/atomic-standards/leap-second-and-ut1-utc-information"
|
||||
|
||||
|
||||
def open_url_with_cache(url, cache):
|
||||
"""Fetch the content of a URL, storing it in a cache, returning it as a file"""
|
||||
if not os.path.exists(cache):
|
||||
with requests.get(url) as f:
|
||||
text = f.text
|
||||
with open(cache, "w") as f:
|
||||
f.write(text)
|
||||
return open(cache, "r") # pylint: disable=consider-using-with
|
||||
|
||||
|
||||
def read_url_with_cache(url, cache):
|
||||
"""Read the content of a URL, returning it as a string"""
|
||||
with open_url_with_cache(url, cache) as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def main(): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
|
||||
def update_iersdata(
|
||||
target_file,
|
||||
): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
|
||||
"""Update iersdata.py"""
|
||||
|
||||
offsets = []
|
||||
with open_url_with_cache(IERS_URL, "iersdata.csv") as iers_data:
|
||||
for r in csv.DictReader(iers_data, delimiter=";"):
|
||||
with requests.get(IERS_URL) as iers_data:
|
||||
for r in csv.DictReader(io.StringIO(iers_data.text), delimiter=";"):
|
||||
jd = float(r["MJD"])
|
||||
offs_str = r["UT1-UTC"]
|
||||
if not offs_str:
|
||||
|
|
@ -71,7 +59,7 @@ def main(): # pylint: disable=too-many-locals, too-many-branches, too-many-stat
|
|||
table_start = datetime.date(1972, 6, 1)
|
||||
offsets.append(offs)
|
||||
|
||||
wwvb_text = read_url_with_cache(NIST_URL, "wwvbdata.html")
|
||||
wwvb_text = requests.get(NIST_URL).text
|
||||
wwvb_data = bs4.BeautifulSoup(wwvb_text, features="html.parser")
|
||||
wwvb_dut1_table = wwvb_data.findAll("table")[2]
|
||||
assert wwvb_dut1_table
|
||||
|
|
@ -110,7 +98,7 @@ def main(): # pylint: disable=too-many-locals, too-many-branches, too-many-stat
|
|||
# modified timestamp of the NIST data.
|
||||
patch(wwvb_start, wwvb_data_stamp + datetime.timedelta(days=1), wwvb_dut1)
|
||||
|
||||
with open("wwvb/iersdata.py", "wt") as output:
|
||||
with open(target_file, "wt", encoding="utf-8") as output:
|
||||
|
||||
def code(*args):
|
||||
"""Print to the output file"""
|
||||
|
|
@ -161,5 +149,28 @@ def main(): # pylint: disable=too-many-locals, too-many-branches, too-many-stat
|
|||
print(f"iersdata covers {table_start} .. {table_end}")
|
||||
|
||||
|
||||
def iersdata_path(callback):
|
||||
"""Find out the path for this directory"""
|
||||
return callback("wwvbpy", "unpythonic.net")
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
"--user",
|
||||
"location",
|
||||
flag_value=iersdata_path(platformdirs.user_data_dir),
|
||||
default=iersdata_path(platformdirs.user_data_dir),
|
||||
)
|
||||
@click.option("--dist", "location", flag_value=DIST_PATH)
|
||||
@click.option(
|
||||
"--site", "location", flag_value=iersdata_path(platformdirs.site_data_dir)
|
||||
)
|
||||
def main(location):
|
||||
"""Update DUT1 data"""
|
||||
print("will write to", location)
|
||||
os.makedirs(location, exist_ok=True)
|
||||
update_iersdata(os.path.join(location, "wwvbpy_iersdata.py"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
main() # pylint: disable=no-value-for-parameter
|
||||
|
|
|
|||
Loading…
Reference in a new issue