Skip to content

Commit

Permalink
read ephs in both
Browse files Browse the repository at this point in the history
  • Loading branch information
volodymyrss committed May 9, 2023
1 parent 13de385 commit b9debf5
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)
import re

__author__ = "Volodymyr Savchenko, Andrea Tramacere"

Expand Down Expand Up @@ -77,7 +78,7 @@ def __init__(self, message='Spiacs unknown exception', debug_message=''):
super(SpiacsUnknownException, self).__init__(message, debug_message)


class SpiacsDispatcher(object):
class SpiacsDispatcher:

def __init__(self, config=None, param_dict=None, instrument=None):
logger.info('--> building class SpiacsDispatcher instrument: %s config: %s', instrument, config)
Expand Down Expand Up @@ -179,14 +180,19 @@ def _run(self, data_server_url, param_dict):
dt_s=param_dict['dt_s'],
)

url_ephs = data_server_url.replace("genlc/ACS", "ephs").rsplit('/', 1)[0].format(
t0_isot=param_dict['t0_isot'],
)

if param_dict['data_level'] == 'realtime':
url = url.replace("genlc/ACS", "rtlc") + "?json&prophecy"

logger.info("calling data server %s with %s", data_server_url, param_dict)
logger.info('calling GET on %s', url)

res = requests.get(url, params=param_dict)

res_ephs = requests.get(url_ephs)

if len(res.content) < 8000: # typical length to avoid searching in long strings, which can not be errors of this kind
if 'this service are limited' in res.text or 'Over revolution' in res.text:
raise SpiacsAnalysisException(f"SPI-ACS backend refuses to process this request, due to resource constrain: {res.text}")
Expand All @@ -199,7 +205,7 @@ def _run(self, data_server_url, param_dict):
f'Spiacs Analysis error: {e}')


return res
return res, res_ephs

def run_query(self, call_back_url=None, run_asynch=False, logger=None, param_dict=None,):

Expand Down
18 changes: 12 additions & 6 deletions dispatcher_plugin_integral_all_sky/spiacs_lightcurve_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

from __future__ import absolute_import, division, print_function
import json
import re
from typing import List

from builtins import (bytes, str, open, super, range,
Expand Down Expand Up @@ -108,6 +109,8 @@ def build_from_res(cls,
out_dir=None,
delta_t=None):

(res, res_ephs) = res

lc_list = []

if out_dir is None:
Expand All @@ -122,6 +125,7 @@ def build_from_res(cls,
meta_data['src_name'] = src_name

res_text_stripped = res.text.replace(r"\n", "\n").strip('" \n\\n')
res_ephs_text_stripped = re.sub(r"[\'\" \n\r]+", " ", res_ephs.text).strip('" \n\\n')

for keyword in 'ZeroData', 'NoData':
if keyword in res_text_stripped:
Expand Down Expand Up @@ -179,7 +183,9 @@ def build_from_res(cls,
header['TIMEZERO'] = (
t_ref.value*u.d-integral_mjdref*u.d).to('s').value
header['TIMEUNIT'] = 's '
header['COMMENT'] = comment

header['PROPHECY'] = comment
header['EPHS'] = res_ephs_text_stripped
units_dict = {}

units_dict['RATE'] = 'count/s'
Expand Down Expand Up @@ -222,8 +228,7 @@ def parse_ordinary_data(cls, res_text_stripped):
assert len(data['TIME_IJD']) > 100

return data




@classmethod
def parse_realtime_data(cls, res_text_stripped):
Expand All @@ -234,7 +239,9 @@ def parse_realtime_data(cls, res_text_stripped):

data = np.zeros(cd.shape[0], dtype=[('TIME_IJD', '<f8'), ('COUNTS', '<f8')])

data['TIME_IJD'] = cd[:, cn.index('ijd')]
# TODO: add tracked deviation from the accurate time from the past
data['TIME_IJD'] = cd[:, cn.index('ijd')]
print("\033[32mdata['TIME_IJD']", data['TIME_IJD'], "\033[0m")
data['COUNTS'] = cd[:, cn.index('counts')]

assert len(data['TIME_IJD']) > 100
Expand All @@ -258,8 +265,7 @@ def reformat_and_rebin(cls, data, delta_t):
instr_t_bin, unique_dt_s_counts[i]/len(dt_s))

t_ref = time.Time(
(data['TIME_IJD'][0] + data['TIME_IJD'][-1]) /
2 + integral_mjdref,
(data['TIME_IJD'][0] + data['TIME_IJD'][-1]) / 2 + integral_mjdref,
format='mjd')

# IJD offset from MJD, https://heasarc.gsfc.nasa.gov/W3Browse/integral/intscw.html
Expand Down
34 changes: 22 additions & 12 deletions tests/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,27 +144,37 @@ def test_odaapi_data_coherence(dispatcher_live_fixture, dispatcher_test_conf):
url=dispatcher_live_fixture).get_product(**{**params, 'data_level': "realtime"})

data = np.array(product_spiacs.spi_acs_lc_0_query.data_unit[1].data)
t_ref = product_spiacs.spi_acs_lc_0_query.data_unit[1].header['TIMEZERO']
t = data['TIME'] + t_ref

data_rt = np.array(product_spiacs_rt.spi_acs_lc_0_query.data_unit[1].data)
t_ref_rt = product_spiacs_rt.spi_acs_lc_0_query.data_unit[1].header['TIMEZERO']
t_rt = data_rt['TIME'] + t_ref_rt

assert len(data) > 100
assert len(data_rt) > 100

dt_s = (data['TIME'] - data_rt['TIME']) * 24 * 3600
dt_s = (t - t_rt)
print("dt min, max, mean, std", dt_s.min(), dt_s.max(), dt_s.mean(), dt_s.std())

cc = np.correlate(data['RATE'], data_rt['RATE'], mode='valid')
cc_offset = cc.argmax() - cc.shape[0]//2

assert np.abs(cc_offset) < 1
assert "next break in data in 46 hr" in product_spiacs_rt.spi_acs_lc_0_query.data_unit[1].header['PROPHECY']
assert "166.134 81.107 109932.3 0.016 0.016 30.0" in product_spiacs_rt.spi_acs_lc_0_query.data_unit[1].header['EPHS']
assert "166.134 81.107 109932.3 0.016 0.016 30.0" in product_spiacs.spi_acs_lc_0_query.data_unit[1].header['EPHS']

from matplotlib import pyplot as plt
plt.plot(data['RATE'])
plt.plot(data_rt['RATE'])
plt.xlim(0, 10)
plt.savefig("test_odaapi_data_coherence.png")
# TODO: this verification will have to be completed
# for offset in np.arange(-10, 10):
# if data['RATE'][offset:] - data_rt['RATE'][:-offset]:
# pass


assert (data['RATE'] - data_rt['RATE']).max() < 1e-5
# from matplotlib import pyplot as plt
# t0 = t.min()
# plt.plot((t - t0), data['RATE'])
# plt.plot((t_rt - t0), data_rt['RATE'])
# plt.grid()
# plt.xlim(0, 1)
# plt.savefig("test_odaapi_data_coherence.png")

# assert np.abs(data['RATE'] - data_rt['RATE']).max() < 1e-5


def test_request_too_large(dispatcher_live_fixture):
Expand Down

0 comments on commit b9debf5

Please sign in to comment.