Skip to content
This repository has been archived by the owner on Sep 19, 2024. It is now read-only.

WIP: port to work on Ska3 #26

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 33 additions & 36 deletions fix_load_segments.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
#!/usr/bin/env python

import Ska.DBI
from Chandra.Time import DateTime


def repair(ifot_loads):
"""
Make any known edits to a recarray of load segments. This is called
Expand All @@ -12,78 +8,79 @@ def repair(ifot_loads):

:param ifot_loads: numpy.recarray from the (Ska.Table) parsed rdb of load segments from arc/iFOT
:rtype: numpy.recarray

"""

import numpy as np

# delete a load that was never run
match = ((ifot_loads.load_segment == 'CL304:0504') &
(ifot_loads.year == 2003))
match = ((ifot_loads.load_segment == 'CL304:0504')
& (ifot_loads.year == 2003))
if any(match):
# slice to get the ones that don't match
ifot_loads = ifot_loads[match == False]
ifot_loads = ifot_loads[~match]

# repair a couple of loads
# 352 ended early at...
match = ((ifot_loads.load_segment == 'CL352:1208') &
(ifot_loads.year == 2008))
match = ((ifot_loads.load_segment == 'CL352:1208')
& (ifot_loads.year == 2008))
if any(match):
ifot_loads.datestop[match] = '2008:353:05:00:00.000'
ifot_loads.fixed_by_hand[match] = 1

# CL110:1404 was run, not CL110:1409
match = ((ifot_loads.load_segment == 'CL110:1409') &
(ifot_loads.year == 2003))
match = ((ifot_loads.load_segment == 'CL110:1409')
& (ifot_loads.year == 2003))
if any(match):
rec_tuple = ( 'CL110:1404', 2003,
'2003:110:14:07:09.439', '2003:112:00:00:31.542', 130, 1)
rec_tuple = ('CL110:1404', 2003,
'2003:110:14:07:09.439', '2003:112:00:00:31.542', 130, 1)
# ifot_list except the bad one
ifot_list = ifot_loads[match == False].tolist()
ifot_list = ifot_loads[~match].tolist()
ifot_list.append(rec_tuple)
# sort by datestart in third field
ifot_list.sort(lambda x,y: cmp(x[2],y[2]))
new_ifot = np.rec.fromrecords( ifot_list,
dtype=ifot_loads.dtype,
)
ifot_loads = new_ifot
# sort by datestart in third field

def cmp(a, b):
return (a > b) - (a < b)
ifot_list.sort(lambda x, y: cmp(x[2], y[2]))
new_ifot = np.rec.fromrecords(ifot_list,
dtype=ifot_loads.dtype,
)
ifot_loads = new_ifot

# CL188:0402 is missing
cmd_date = '2009:188:04:00:00.000'

if ((ifot_loads[0].datestart <= cmd_date) and (ifot_loads[-1].datestart > cmd_date)):
if np.flatnonzero((ifot_loads.load_segment == 'CL188:0402') &
(ifot_loads.year == 2009)):
if np.flatnonzero((ifot_loads.load_segment == 'CL188:0402')
& (ifot_loads.year == 2009)):
pass
else:
rec_tuple = ( 'CL188:0402', 2009,
rec_tuple = ('CL188:0402', 2009,
'2009:188:04:00:00.000', '2009:188:20:57:33.571', 129, 1)
ifot_list = ifot_loads.tolist()
ifot_list.append(rec_tuple)
ifot_list.sort(lambda x,y: cmp(x[2],y[2]))
new_ifot = np.rec.fromrecords( ifot_list,
dtype=ifot_loads.dtype,
)
ifot_list.sort(lambda x, y: cmp(x[2], y[2]))
new_ifot = np.rec.fromrecords(ifot_list,
dtype=ifot_loads.dtype,
)
ifot_loads = new_ifot

# 2011 CL103:2002 is missing, interrupted by the APR1311 replan
cmd_date = '2011:103:20:40:00.000'

if ((ifot_loads[0].datestart <= cmd_date) and (ifot_loads[-1].datestart > cmd_date)):
if np.flatnonzero((ifot_loads.load_segment == 'CL103:2002') &
(ifot_loads.year == 2011)):
if np.flatnonzero((ifot_loads.load_segment == 'CL103:2002')
& (ifot_loads.year == 2011)):
pass
else:
rec_tuple = ( 'CL103:2002', 2011,
rec_tuple = ('CL103:2002', 2011,
'2011:103:20:40:00.000', '2011:103:22:57:00.000', 129, 1)
ifot_list = ifot_loads.tolist()
ifot_list.append(rec_tuple)
ifot_list.sort(lambda x,y: cmp(x[2],y[2]))
new_ifot = np.rec.fromrecords( ifot_list,
dtype=ifot_loads.dtype,
)
ifot_list.sort(lambda x, y: cmp(x[2], y[2]))
new_ifot = np.rec.fromrecords(ifot_list,
dtype=ifot_loads.dtype,
)
ifot_loads = new_ifot


return ifot_loads
19 changes: 10 additions & 9 deletions fix_tl_processing.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env python

import Ska.DBI
from Chandra.Time import DateTime


def get_options():
from optparse import OptionParser
Expand All @@ -17,27 +17,28 @@ def get_options():
return opt, args


def repair( dbh ):
def repair(dbh):
"""
Make any necessary edits to the tl_processing table. This is called in
update_load_seg_db.py before loads are inserted and timelines are determined.

It is not expected that any edits will need to be created, but this mechanism
is scripted into the suite to allow the possibility.

If, at some point, we need to manually override mapping of replan commands
to a specific directory instead of the one determined by parse_cmd_load_gen.pl,
we could use something like the commented-out code in this routine.
"""
dbh.verbose = True
#dbh.execute("delete from tl_processing where dir = '/2008/FEB1808/oflsb/'")
#dbh.execute("""insert into tl_processing
#( year, dir, file )
#values
#(2008, '/2008/FEB1808/oflsb/', 'C048_0802.sum')
#""")
# dbh.execute("delete from tl_processing where dir = '/2008/FEB1808/oflsb/'")
# dbh.execute("""insert into tl_processing
# ( year, dir, file )
# values
# (2008, '/2008/FEB1808/oflsb/', 'C048_0802.sum')
# """)
dbh.verbose = False


def main():
opt, args = get_options()
dbh = Ska.DBI.DBI(dbi=opt.dbi, server=opt.server)
Expand Down
46 changes: 17 additions & 29 deletions parse_cmd_load_gen.pl
Original file line number Diff line number Diff line change
Expand Up @@ -57,29 +57,17 @@
print "Updating clgps tables with files newer than ", $opt{touch_file}, "\n";
}

if (-e $opt{touch_file}){
if (-e $opt{touch_file}){
$touch_stat = stat($opt{touch_file});
}


my $load_arg;
if ($opt{dbi} eq 'sybase'){
my $user = defined $opt{user} ? $opt{user}
: $opt{dryrun} ? 'aca_read'
: 'aca_ops';
my $database = defined $opt{database} ? $opt{database}
: defined $ENV{SKA_DATABASE} ? $ENV{SKA_DATABASE}
: 'aca';
$load_arg = sprintf("%s-%s-%s", 'sybase', $database, $user);
}
else{
$load_arg = { database => $opt{server},
my $load_arg = { database => $opt{server},
type => 'array',
raise_error => 1,
print_error => 1,
DBI_module => 'dbi:SQLite',
raise_error => 1,
print_error => 1,
DBI_module => 'dbi:SQLite',
};
}

my $load_handle = sql_connect($load_arg);

Expand Down Expand Up @@ -111,7 +99,7 @@
if ($max_touch_time < $mtime){
$max_touch_time = $mtime;
$max_touch_file = $file;
}
}
}
}

Expand All @@ -133,9 +121,9 @@ sub update_for_file{
my $file = shift;

print "Parsing $file" if $opt{verbose};

my $file_stat = stat("$file");

my ( $week, $loads ) = parse_clgps( $file );
my ( $dir, $filename);
if ($file =~ /${mp_dir}(\/\d{4}\/\w{3}\d{4}\/ofls\w?\/)mps\/(C.*\.sum)/){
Expand All @@ -152,23 +140,23 @@ sub update_for_file{
if (defined $dir and defined $filename){
$week->{dir} = $dir;
$week->{file} = $filename;
$week->{sumfile_modtime} = $file_stat->mtime;
$week->{sumfile_modtime} = $file_stat->mtime;
for my $load_ref (@{$loads}){
$load_ref->{file} = $filename;
$load_ref->{sumfile_modtime} = $week->{sumfile_modtime};
my $delete = qq( delete from tl_built_loads where year = $load_ref->{year}
and load_segment = "$load_ref->{load_segment}"
my $delete = qq( delete from tl_built_loads where year = $load_ref->{year}
and load_segment = "$load_ref->{load_segment}"
and file = "$filename"
and load_scs = $load_ref->{load_scs}
and sumfile_modtime = $week->{sumfile_modtime} );
sql_do( $load_handle, $delete);
sql_insert_hash( $load_handle, 'tl_built_loads', $load_ref );
}

# only bother to store if it has loads
sql_do( $load_handle, qq( delete from tl_processing where dir = "$dir" and file = "$filename"));
sql_insert_hash( $load_handle, 'tl_processing', $week );

my $obsids = get_obsids("${mp_dir}/${dir}", $loads);
for my $obs_load (keys %{$obsids}){
for my $obs_entry (@{$obsids->{$obs_load}}){
Expand All @@ -179,7 +167,7 @@ sub update_for_file{
load_segment => $ids[1],
obsid => $obs_entry->{obsid},
date => $obs_entry->{date},
);
);
my $obs_delete = qq( delete from tl_obsids where dir = "$dir"
and year = $ids[0]
and load_segment = "$ids[1]"
Expand All @@ -190,7 +178,7 @@ sub update_for_file{
}
}
}

print " ... Done \n" if $opt{verbose};
return $file_stat->mtime;

Expand All @@ -205,7 +193,7 @@ sub update_for_file{
sub get_obsids{
my $dir = shift;
my $loads = shift;

my @bs_list = glob("${dir}/*.backstop");
my $backstop = $bs_list[0];

Expand Down Expand Up @@ -235,7 +223,7 @@ sub parse_clgps {

# assume this isn't a replan
my %week = ( replan => 0 );

my @rawloads;
my $lines = io($gps)->slurp;

Expand Down
9 changes: 3 additions & 6 deletions task_schedule.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,9 @@ alert [email protected]
<task timelines_cmd_states>
cron */10 * * * *
check_cron 15 7 * * *
exec 1: parse_cmd_load_gen.pl --dbi 'sybase' --server 'sybase' --verbose
exec 1: update_load_seg_db.py --dbi 'sybase' --server 'sybase' --verbose
exec 6: $ENV{SKA_SHARE}/cmd_states/update_cmd_states.py --dbi 'sybase' --server 'sybase' --h5file $ENV{SKA_DATA}/cmd_states/cmd_states.h5
exec 1: parse_cmd_load_gen.pl --dbi 'sqlite' --server $ENV{SKA_DATA}/cmd_states/cmd_states.db3 --touch_file $ENV{SKA_DATA}/timelines/sum_files_sqlite3.touch --verbose
exec 1: update_load_seg_db.py --dbi 'sqlite' --server $ENV{SKA_DATA}/cmd_states/cmd_states.db3 --verbose
exec 6: $ENV{SKA_SHARE}/cmd_states/update_cmd_states.py --dbi 'sqlite' --server $ENV{SKA_DATA}/cmd_states/cmd_states.db3 --h5file ''
exec 1: parse_cmd_load_gen.pl --dbi 'sqlite' --server $ENV{SKA_DATA}/cmd_states3/cmd_states.db3 --touch_file $ENV{SKA_DATA}/timelines/sum_files_sqlite3.touch --verbose
exec 1: update_load_seg_db.py --dbi 'sqlite' --server $ENV{SKA_DATA}/cmd_states3/cmd_states.db3 --verbose
exec 6: $ENV{SKA_SHARE}/cmd_states/update_cmd_states.py --dbi 'sqlite' --server $ENV{SKA_DATA}/cmd_states3/cmd_states.db3 --h5file ''
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So, do we want to retire the "states" part of cmd_states universally? I suggested "let's retire sybase first", but maybe if we are there they should just all go?

If we want to keep the 'states' product h5 file we'd need to move that updating to the sqlite job. Right now it is a "side-effect" of the sybase job.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a fine time to drop the 'states' product. It has been deprecated for what feels like years.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sounds good. For that I think we'd want to reach out to MTA too? Not sure if there might have been any other customers... and we'll need to do a bit more of a survey with some updates if there are some pieces that aren't converted to kadi command states.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

See email "Package deprecations: Chandra.cmd_states and Ska.ParseCM" from June 10 2020. I can re-send that same email if you are worried.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Given resources during these past 20 months it would probably be a good idea to re-send.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done (re-send)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks! To enforce "deprecation", I was thinking rename the sybase tables and remove cmd_states.h5. Wasn't sure if we also wanted to strip out the cmd_states table from the cmd_states.db3.

Copy link
Member Author

@taldcroft taldcroft Nov 1, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes on all three (in a week).

context 1
<check>
<error>
Expand Down
Loading