Документ взят из кэша поисковой машины. Адрес оригинального документа : http://xmm.vilspa.esa.es/calibration/ept/perlscripts/monitoring.pl
Дата изменения: Wed Aug 30 19:10:16 2006
Дата индексирования: Sun Apr 10 14:15:20 2016
Кодировка:

Поисковые слова: apod
#!/usr/bin/perl -w

# ----------------------------------------------------------------------------------------------- #
#
# monitor.pl - script to search for calclosed data (observation + slews)
#
# Author: Daniel Harbarth (august 2005)
#
# ----------------------------------------------------------------------------------------------- #
#
# Usage: start_monitoring.pl [OPTION] [FILE] [OBSID(s)]
# Description:
#
# This script provides two tasks:
# 1. search for CALCLOSED/CLOSED exposures in observations
# INPUT:
# - /home/xsaops/obslist/obslist_D_Month.txt - checks these new OBSID's (can be changed by the -f or -n options)
# - /home/epicmon/monitoring/bin/checked_obs.txt - already checked OBSID's
# - /home/epicmon/monitoring/bin/special_obs.txt - OBSID's will be checked in any case (even if already checked)
# OUTPUT:
# - /home/epicmon/monitoring/bin/calclosed.log - list of all observations and (if available) their calclosed exposures
# - /home/epicmon/monitoring/bin/new_calclosed.log - same, but only the new ones
# - /home/epicmon/monitoring/bin/closed.log - list of all observations and (if available) their closed exposures
# - /home/epicmon/monitoring/bin/new_closed.log - same, but only the new ones
# 2. search and merge CALCLOSED MOS slews
# INPUT:
# - /odfdata/sdfs/ - directory in which the slews are (can be changed by the -f or -n options)
# - /home/epicmon/monitoring/bin/slews_checked.log - already checked slew OBSID's
# OUTPUT:
# - /home/epicmon/monitoring/bin/slews_calclosed.log - list of all calclosed and already merged slews
# - /home/epicmon/monitoring/bin/slews_new_calclosed.log - same, but only the new ones
#
# Options:
# Notes !!!
# - either the -onlysearch or the -onlyslews option can be used
# - only if one of the -only* options is used then the -f and the -n options are available
#
# -onlysearch [-fn] only exposures will be checked
# -f file-mode: FILE will be taken as input list for the obsids (each line one odsid)
# -n manual-mode: space separated obsids numbers as input
# -onlyslew [-fn] only slews will be checked
# -f file-mode: FILE will be taken as input list for the obsids (each line one odsid)
# -n manual-mode: space separated obsids numbers as input
#
# -clean clean all the folowing directories:
# /home/epicmon/monitoring/tmp/in/
# /home/epicmon/monitoring/tmp/calclosed_tmp/
# /home/epicmon/monitoring/tmp/closed_tmp/
# /home/epicmon/monitoring/tmp/working/
# /home/epicmon/monitoring/tmp/out/calclosed/
# /home/epicmon/monitoring/tmp/out/closed/
# /home/epicmon/monitoring/tmp/out/MOS_OV/
# /home/epicmon/monitoring/tmp/out/MOS_DI/
# /home/epicmon/monitoring/tmp/out/PN_DL/
# /home/epicmon/monitoring/tmp/out/PN_NO/
# /home/epicmon/monitoring/tmp/out/PN_OD_OBS/
# /home/epicmon/monitoring/tmp/out/new_week/calclosed/
# /home/epicmon/monitoring/tmp/out/new_week/closed/
# /home/epicmon/monitoring/testing/
# /home/epicmon/monitoring/tmp/slew/evl/
#
# Examples:
# start_monitoring.pl
# start_monitoring.pl -onlyslews
# start_monitoring.pl -onlysearch -f obsids.list
# start_monitoring.pl -onlyslews -n 0109141201 0006810201
#
# ----------------------------------------------------------------------------------------------- #


use strict;
use LWP::Simple; # to get the web site (testing)

my $script_start_time = time(); # for time taking
my $script_start_datetime = get_datetime(); # the time for the logfile


# ---------------------------------------------- #
# flags for testing / debuging #
# ---------------------------------------------- #
my $FAST = 0; # disabled copy_files, tar_files
my $NO_AIO = 0; # disabled aio
my $NO_RM_FILES = 0; # disables removing_files, empty_dir
my $DO_IT_FOR_ALL = 0; # get all exposures - & mark checked
my $WEB_TEST = 0; # compare results with the old results


##############################################################
# -------------------- config file --------------------------#
##############################################################


# ---------------------- #
# the directories #
# ---------------------- #

my $ROOT_DIR = '/home/epicmon/monitoring/';
#my $CONFIG_DIR = $ROOT_DIR . 'config/';
my $BIN_DIR = $ROOT_DIR . 'bin/';
my $AIO_DIR = $BIN_DIR . 'aio/'; # the aioclient
my $IDL_DIR = $BIN_DIR . 'idl/'; # idl scripts
my $TMP_DIR = $ROOT_DIR . 'tmp/'; #
my $SLEW_DIR = $TMP_DIR . 'slew/'; # root for slew things
my $EVL_DIR = $SLEW_DIR . 'evl/'; # for the eventlists - temporary
#my $PSEUDO_ODF_DIR = $SLEW_DIR . 'pseudo_odfs/'; # for the merged pseudo odf's
my $CALCLOSED_TMP_DIR = $TMP_DIR . 'calclosed_tmp/';
my $CLOSED_TMP_DIR = $TMP_DIR . 'closed_tmp/';
my $WORKING_DIR = $TMP_DIR. 'working/'; # working dir for the script
my $IN_DIR = $TMP_DIR . 'in/';
my $OUT_DIR = '/xvsoc01/ftpexport1/secure/epic_ex/'; # root for the central repository - results will be copied here

my $PSEUDO_ODF_DIR = $OUT_DIR . 'pseudo_odfs/'; # for the merged pseudo odf's
my $PSEUDO_ODF_DIR_SIMULATE = $SLEW_DIR . 'simulateresults';
my $CALCLOSED_DIR = $OUT_DIR . 'calclosed/';
my $CLOSED_DIR = $OUT_DIR . 'closed/';
my $MOS_OV_DIR = $OUT_DIR . 'MOS_OV/';
my $MOS_DI_DIR = $OUT_DIR . 'MOS_DI/';
my $PN_DL_DIR = $OUT_DIR . 'PN_DL/';
my $PN_NO_DIR = $OUT_DIR . 'PN_NO/';
my $PN_OD_OBS_DIR = $OUT_DIR . 'PN_OD_OBS/';
my $NEW_WEEK_DIR = $OUT_DIR . 'new_week/';
my $NEW_WEEK_CALCLOSED_DIR = $NEW_WEEK_DIR . 'calclosed/';
my $NEW_WEEK_CLOSED_DIR = $NEW_WEEK_DIR . 'closed/';
my $LOG_DIR = $ROOT_DIR . 'logs/'; # logfiles
my $MAIL_DIR = $LOG_DIR . 'send_mails/'; # backups of the mails


my $TEST_DIR = $ROOT_DIR . 'testing/';


# ------------------------------ #
# input dir for the slews #
# ------------------------------ #
my $NEW_SLEWS_DIR = '/odfdata/sdfs/'; # new slews will be found here



# ---------------------- #
# the files #
# ---------------------- #

# INPUT
my $OBSLIST_MONTH = '/home/xsaops/obslist/obslist_D_Month.txt'; # the new OBS id's
#my $OBSLIST_MONTH = '/home/xsaops/obslist/obslist_D_All.txt'; # reprocessing - but just 'not checked'
my $OBSLIST_ALL = '/home/xsaops/obslist/obslist_D_All.txt'; # list of all OBS id's -> reprocessing

my $CHECKED_OBS = $BIN_DIR . 'checked_obs.log'; # checked observations
my $SPECIAL_OBS = $BIN_DIR . 'special_obs.log'; # check always

# OUTPUT
my $CLOSED_LOG = $OUT_DIR . 'closed.log'; # all closed observations
my $CALCLOSED_LOG = $OUT_DIR . 'calclosed.log'; # all calclosed observations
my $NEW_WEEK_CLOSED_LOG = $BIN_DIR . 'new_closed.log'; # new closed observations (last run)
my $NEW_WEEK_CALCLOSED_LOG = $BIN_DIR . 'new_calclosed.log'; # new calclosed observations (last run)

# for SLEW
my $SLEWS_CALCLOSED_LOG = $OUT_DIR . 'slews_calclosed.log'; # all calclosed slew observations
my $SLEWS_NEW_WEEK_CALCLOSED_LOG = $BIN_DIR .'slews_new_calclosed.log'; # new calclosed slews (last run)
my $PROCESSED_SLEW_OBS = $BIN_DIR . 'slews_checked.log'; # was checked for cc + emproc + EVL created
my $PROCESSED_SLEW_OBS_SIMULATE = $BIN_DIR . 'slews_checked_simulation.log'; # for simulation
my $SLEWS_IN_ONE_CYCLE = 10; # slews which will be processed in one cycle (--> for saving diskspace)

# ---------------------- #
# env.vars to export #
# ---------------------- #
my $EXPORT_VARS = {
JDK_HOME => '/usr/local/j2sdk1.4.0_03',
AIOUSER => 'epicmon',
AIOPWD => 'ep1cpass',
SAS_CCF => 'ccf.cif',
SAS_ODF => "$WORKING_DIR",
};


# ---------------------------------------------- #
# people who'll get the status report via email #
# ---------------------------------------------- #

#my $MAIL_ADRESSES = 'daniel.harbarth@sciops.esa.int';
#my $MAIL_ADRESSES = 'mkirsch@sciops.esa.int' ;
my $MAIL_ADRESSES = 'mkirsch@sciops.esa.int mstuhlin@sciops.esa.int kod@mpe.mpg.de mjf@mpe.mpg.de dbl@star.le.ac.uk sfs5@star.le.ac.uk mehle@xmm.vilspa.esa.es msmith@xmm.vilspa.esa.es snowden@milkyway.gsfc.nasa.gov';

my $MAIL_ADRESSES_SIMULATE = 'mkirsch@sciops.esa.int';


##############################################################
# ------------------ end config file ------------------------#
##############################################################




##############################################################
# -------------------- start --------------------------------#
##############################################################


# --------------------------------------------------------------
# initialize the global variables
# --------------------------------------------------------------
my %result; # for storing the result
my $obsid_mode = ''; # commandline parameter
my @observations_with_aio_errro = (); # failed observations (aioclient)
my $found_closed = 0; # counter for found closed exposures
my $found_calclosed = 0; # counter for found calclosed exposures
my $found_closed_obs = 0; # counter for found closed observations
my $found_calclosed_obs = 0; # counter for found calclosed observations
my $obs_counter = 0; # counter for checked observations (correct recieved from aioclient)
my @observations = (); # observation to check
my $reprocess_all = 0; # reprocess mode for search_obs
my $simulate = 0; # simulation flag

# for slew
my $slews_counter = 0; # counter for processed slews
my $slews_pseudo_odf = 0; # counter for created pseudo odfs
my @slews_pseudo_odfs = (); # to store the names of the pseudo odfs
my $slews_merged_counter= 0; # counter for merged slews


# ------------------------------------------------------------
# main
# ------------------------------------------------------------

if (@ARGV > 0) { # scanning comandline paramter

# !!!!!!!!!!!!!!!!!!!!!! i've to change the dirs
my @dirs = (
$IN_DIR,
$CALCLOSED_TMP_DIR,
$CLOSED_TMP_DIR,
$WORKING_DIR,
'/home/epicmon/monitoring/testing/',
);

if (@ARGV > 1) {
if ($ARGV[1] eq '-f') {
$obsid_mode = 'filemode';
} elsif ($ARGV[1] eq '-n') {
$obsid_mode = 'manual';
} elsif ($ARGV[1] eq '-reprocess_all') {
$reprocess_all = 1;
} elsif ($ARGV[1] eq '-simulate') {
;
} else {
printh("wrong parameter");
exit;
}
}


if ($ARGV[0] eq '-clean') {
print "\nwill clean the folowing directories:\n\t" . join "\n\t", @dirs;
print "\nare you sure ? (y/n)\n";

my $in = ;
chomp $in;
if ($in eq 'y') {
foreach (@dirs) {
empty_dir($_);
}
} else {
print "\naborted\n";
}
exit;


} elsif ($ARGV[0] eq '-onlyslews') {
if ($ARGV[1] eq '-simulate') {
printh("starting onlyslews in simulation mode ....");
system("cp $PROCESSED_SLEW_OBS $PROCESSED_SLEW_OBS_SIMULATE") and die "couldn't 'cp $PROCESSED_SLEW_OBS $PROCESSED_SLEW_OBS_SIMULATE'";
$simulate = 1;
}
# only slews will be checked
cc_slews();

} elsif ($ARGV[0] eq '-onlysearch') {
# only exposures will be checked
if ($ARGV[1] eq '-simulate') {
printh("starting onlysearch in simulation mode ....");
$simulate = 1;
}
init();
@observations = get_new_observations();
calclosed_closed_exposures_search();


} elsif ($ARGV[0] eq '-simulate') {
# like normal mode but:
# - no files will be copied to the repository
# - email only to $MAIL_ADRESSES_SIMULATE
# - no update for closed.log and calclosed.log
# - checked_obs.txt wont't be touched,
printh("starting in simulation mode ....");
$simulate = 1;
@observations = get_new_observations();
init();
calclosed_closed_exposures_search();

system("cp $PROCESSED_SLEW_OBS $PROCESSED_SLEW_OBS_SIMULATE") and die "couldn't 'cp $PROCESSED_SLEW_OBS $PROCESSED_SLEW_OBS_SIMULATE'";
cc_slews();

} else {
print "\n\n\nUsage: start_monitoring.pl [OPTION] [FILE] [OBSID(s)]\n";

print "Description:\n\n";
print "This script provides two tasks:\n";
print "\t1. search for CALCLOSED/CLOSED exposures in observations\n";
print "\t\tINPUT:\n";
print "\t\t- $OBSLIST_MONTH - checks these new OBSID's (can be changed by the -f or -n options)\n";
print "\t\t- $CHECKED_OBS - already checked OBSID's\n";
print "\t\t- $SPECIAL_OBS - OBSID's will be checked in any case (even if already checked)\n";
print "\t\tOUTPUT:\n";
print "\t\t- $CALCLOSED_LOG - list of all observations and (if available) their calclosed exposures\n";
print "\t\t- $NEW_WEEK_CALCLOSED_LOG - same, but only the new ones\n";
print "\t\t- $CLOSED_LOG - list of all observations and (if available) their closed exposures\n";
print "\t\t- $NEW_WEEK_CLOSED_LOG - same, but only the new ones\n";

print "\t2. search and merge CALCLOSED MOS slews\n";
print "\t\tINPUT:\n";
print "\t\t- $NEW_SLEWS_DIR - directory in which the slews are (can be changed by the -f or -n options)\n";
print "\t\t- $PROCESSED_SLEW_OBS - already checked slew OBSID's\n";
print "\t\tOUTPUT:\n";
print "\t\t- $SLEWS_CALCLOSED_LOG - list of all calclosed and already merged slews\n";
print "\t\t- $SLEWS_NEW_WEEK_CALCLOSED_LOG - same, but only the new ones\n";


print "\nOptions:\n";
print "\t\t -simulate: just simulate --> no files will be copied to the repository , \n";
print "\t\t\todsid's wont't be marked as checked, email only to $MAIL_ADRESSES_SIMULATE\n";
print "\t\t\tno update for closed.log and calclosed.log \n";
print "\t\t \n";
print "\tNotes !!! \n";
print "\t\t - either the -onlysearch or the -onlyslews option can be used\n";
print "\t\t - only if one of the -only* options is used then the -f and the -n options are available\n\n";
print "\t-onlysearch [-fn]\tonly exposures will be checked\n";
print "\t\t-f\tfile-mode: FILE will be taken as input list for the obsids (each line one odsid)\n";
print "\t\t-n\tmanual-mode: space separeated obsids numbers as input\n";
print "\t\t-reprocess_all\tthe list $OBSLIST_ALL will be taken as input\n";


print "\t-onlyslews [-fn]\t\tonly slews will be checked\n";
print "\t\t-f\tfile-mode: FILE will be taken as input list for the obsids (each line one odsid)\n";
print "\t\t-n\tmanual-mode: space separeated obsids numbers as input\n";


print "\n\t-clean\t\tclean all the folowing directories:\n\t\t\t\t" . join ("\n\t\t\t\t", @dirs) . "\n";

print "\nExamples:\n";
print "\tstart_monitoring.pl\n";
print "\tstart_monitoring.pl -simulate\n";
print "\tstart_monitoring.pl -onlyslews\n";
print "\tstart_monitoring.pl -onlyslews -simulate\n";
print "\tstart_monitoring.pl -onlysearch -f obsids.list \n";
print "\tstart_monitoring.pl -onlyslews -n 0109141201 0006810201\n";
print "\tstart_monitoring.pl -onlysearch -reprocess_all \n";
print "\tstart_monitoring.pl -onlysearch -simulate \n";
print "\n";
exit;
}
} else {


# --------------------------------------------------------------
# normal mode
# 1. exposures
# 2. slews

# --------------------------------------------------------------
# get the new, to be checked observations
# --------------------------------------------------------------
@observations = get_new_observations();

init();
calclosed_closed_exposures_search();

cc_slews();


}

mail(\%result); # also mail the result
end(); # clean up, etc.





##############################################################
# -------------------- slew section -----------------#
##############################################################


sub cc_slews
{
# --------------------------------------------------------------
# main routine for looking after new slew data for the MOS in order
# to merge the calclosed ones
# --------------------------------------------------------------

printh("looking for slews ....");

my @slews_odfs;
while ( @slews_odfs = get_new_slew_obs() ) { # get new_slews - only $SLEWS_IN_ONE_CYCLE in one cycle (disc space)
printh(scalar @slews_odfs . " slew(s) found");
# -------------- processing the slews ---------------
#
# 1. look if it's calclosed
# 2. if yes -> execute emproc to get the eventlist
# ---------------------------------------------------

for my $slew (@slews_odfs) {
if (-e '/home/epicmon/monitoring/bin/stopit.txt') {print "aborted becouse of flag file\n"; end();}

printl("processing $slew ...");
$slews_counter++; #MOS1
$slews_counter++; #MOS2

# extract the files -> $WORKING_DIR
gtar_odf($WORKING_DIR,$slew) or printerrorlog("couldn't extract slew $slew") and next; # from $IN_DIR
untar($WORKING_DIR, $WORKING_DIR ."*".$slew.".TAR") or printerrorlog("couldn't extract slew $slew") and next;

# check if slew is cc
if (is_it_a_calclosed_slew($slew) == 0) {
printh("skipping $slew. Not a calclosed slew ...");
empty_dir($WORKING_DIR);
mark_slew_obs_as_processed($slew);
next;
}

# execute emproc - to get the eventlists
my $emproc = emproc($slew);
if ($emproc == 0) {
printh("skipping $slew. SAS error ...");
empty_dir($WORKING_DIR);
printerrorlog("SAS error on $slew");
#print_new_log("slews_with_sas_error.txt", "SAS error on $slew\n");
mark_slew_obs_as_processed($slew);
next;
}

mark_slew_obs_as_processed($slew);

# clean up
empty_dir($WORKING_DIR);
}

# --------------------------------------------------
# merge files - all that are still in the directory
# --------------------------------------------------
exec_slewccmerge('1'); # MOS1
exec_slewccmerge('2'); # MOS2
}
exec_slewccmerge('1'); # MOS1
exec_slewccmerge('2'); # MOS2



if ($simulate == 1) {
my $result = {};
mail($result);
} else {
write_slew_result();
}

printh("OK. slew check finished.");
}



sub write_slew_result
{
# --------------------------------------------------
# write results to file
# --------------------------------------------------
my $slews = join "\n", @slews_pseudo_odfs;

return if ($simulate == 1);

# append new slews + sort and make unique
append_to_file("$SLEWS_CALCLOSED_LOG",$slews);
sort_and_unique_file($SLEWS_CALCLOSED_LOG);
# new slews
write_to_file($SLEWS_NEW_WEEK_CALCLOSED_LOG,$slews);

}

sub exec_slewccmerge
{
# --------------------------------------------------------------
# eventlists in $EVL_DIR will be merged to a pseudo odfs
# --------------------------------------------------------------

# IN: 1 or 2 ( MOS1 or MOS2)
# OUT: merged pseudo odfs to $PSEUDO_ODF_DIR

my $inst = shift; # 1 or 2 (MOS1 or MOS2)

printh("looking for eventlists to merge for MOS${inst} ...");
chdir($EVL_DIR);

# get start.- and endrevolution
my @mos_files = get_files($EVL_DIR,"_EMOS${inst}.*ImagingEvts\.ds");
printl('nothing to merge') and return 1 if scalar @mos_files < 2;
my ($start_rev, $end_rev) = get_max_min_rev(@mos_files);

# calculate the exposure time limit
my $time = get_minexptime($end_rev);

# create the execution file "exec_slewccmerge", in order to pass arguments to the idl script
my $text = join '', read_file_array($IDL_DIR . "exec_slewccmerge_template");
$text =~ s/{IDL_DIR}/$IDL_DIR/;
$text =~ s/{EVL_DIR}/$EVL_DIR/;
$text =~ s/{INST}/$inst/;
$text =~ s/{TIME}/$time/;
$text =~ s/{START_REV}/$start_rev/;
$text =~ s/{END_REV}/$end_rev/;
chdir($EVL_DIR);
write_to_file("exec_slewccmerge", $text);

# execute exec_slewccmerge
printh("starting 'idl exec_slewccmerge' ...");
my $idl = system('idl exec_slewccmerge');

chdir($EVL_DIR);
# check if idl finished successful
if (! -e 'ok.flag') {
printerrorlog("!!! ERROR. IDL (slewccmerge.pro) not finished successful");
exit;
#return 0;
} else {
system('rm ok.flag') and die "couldn't 'rm ok.flag'";
system('rm exec_slewccmerge') and die "couldn't 'rm exec_slewccmerge'";
}

# save the generated .fits files
# + create the pseudo odf
my @created_fit_files = get_files($EVL_DIR,"m${inst}_automerge.*.fits"); # .fits files
for (@created_fit_files){
if ($simulate == 0) {
move_files($EVL_DIR, $PSEUDO_ODF_DIR ,$_);
} else {
move_files($EVL_DIR, $PSEUDO_ODF_DIR_SIMULATE ,$_);
}
$slews_pseudo_odf++;
}

# get merged files
my @automerge_files = get_files($EVL_DIR,"m${inst}_automerge.*.txt"); # the .txt files
printh(scalar @automerge_files . " merged .fits files created.");

my @merged_files; # .ds files
for (@automerge_files) {
my @files_in_the_file = read_file_array($EVL_DIR . $_); # .ds files (with full path)
printl("files in $_:\n\t". join "\t", @files_in_the_file );

# remove the .txt file and store the files to remove later
push @merged_files , @files_in_the_file;
# remove the *automerge*.txt file
remove_files($EVL_DIR ,$_);

# for the result log
my $fit_file = $_;
$fit_file =~ s/\.txt/EVL\.fits/; # get the fit file
my @files_in_the_file_without_path;
for (@files_in_the_file) {m/(\d{4}_.*\.ds)/; push @files_in_the_file_without_path,$1;}
push @slews_pseudo_odfs, ($fit_file . " : " . join (" - ",@files_in_the_file_without_path)) ;
}



# remove the merged evl files (.ds)

for (@merged_files){
$slews_merged_counter++;

m/(\d{4}_.*)/;
my $filename = $1;
# delete files - moving for testing
if ($simulate == 0) {
move_files($EVL_DIR, $SLEW_DIR . "backup" ,$filename);
remove_files($EVL_DIR ,$filename);
}
}

# remove AttHk.ds * Badpixels.ds files
remove_files($EVL_DIR ,'.*AttHk.ds');
remove_files($EVL_DIR ,'.*Badpixels.ds');


return 1;
}


sub get_max_min_rev
{
# --------------------------------------------------------------
# get the lowest and the highest revolution number
# --------------------------------------------------------------

# IN : files in the format 0426_*
# OUT: the lowest and the highest revolution of these files

my @files = @_;

my $start_rev = 999999;
my $end_rev = 0;

for (@files) {
m/(\d{4})_/;
$start_rev = $1 < $start_rev ? $1 : $start_rev;
$end_rev = $1 > $end_rev ? $1 : $end_rev;
}
return ($start_rev,$end_rev)

}

sub get_minexptime
{
# --------------------------------------------------------------
# calculate the exposure time, dependent on the revolution,
# which is needed to have good statistics ( for merging the slews)
# --------------------------------------------------------------
#
# formulas for the radioactive decay:
#
# A(t) = A_0 * exp(-l*t) ----> l [1/s]
#
# T_1/2 = ln 2 / l
# --> l = ln2 / T_1/2
#
# A(t) = A_0 * exp (- (ln2/T_1/2) * t)
#
# A ~ cts/sec
#
# T_0 = 15 ks # T_0 = time needed in the first revolution
# T_12 = 2.7 years # T_12 = half-life period
#
# A(0) * T_0 = n events
# A(t) * T` = n events
#
# A(0) * T_0
# --> ------------- = T` # T` = time needed dependent on t (t = time since the 1. revolution)
# A(t)
#
# set A(0) = 1 (=100%)
#
# --> T_0
# ----- = T`
# A(t)
#
# T_0
# --> ------------- = T`
# exp( -l * t)
#
# --> T_0 * exp( l * t) = T`
#

my $rev = shift;

my $T_12 = 2.7; # half-life period
my $lambda = log(2)/$T_12; # lamda
my $T_0 = 15000 / 60 / 60 / 24 / 365 ; # at the beginn of the mission 15ks were needed --> in years
my $t = ($rev * 48) / 24 / 365 ; # time since mission beginn --> in years

my $exp = exp( $lambda * $t);
my $result = ($T_0 * $exp) * 365 * 24 * 60 * 60; # in seconds

# get the ks
$result = int ($result / 1000 ); # /
# round up
$result++;

printh("$result ks are needed for revolution $rev");
return $result;
}

sub emproc
{
# --------------------------------------------------------------
# process the raw data with emproc in order to get the eventlists
# tar.gz is in $IN_DIR -> copy the *EVL* to $EVL_DIR
# --------------------------------------------------------------

my $odf = shift;

printh("preparing $odf ... ");

# set the env. variables needed by emproc
$ENV{'SAS_DIR'} = "${WORKING_DIR}";
$ENV{'SAS_ODF'} = "${WORKING_DIR}";

chdir($WORKING_DIR);

# cif
printl("executing cifbuild ... ");
my $cif = system('cifbuild');
print 'ok';
$ENV{'SAS_CCF'} = "ccf.cif";

# odfingtest
printl("executing odfingtest ... ");
my $fing = system('odfingest');
print 'ok';

# emproc
printl("executing emproc ... ");
#my $emproc = system('emproc');
#my $emproc = system('emproc selectccds=yes ccd1=true searchforbadpixels=false runevlistcomb=false');
#no CTI ect correction: taken out by MK 06.10.2005
#my $emproc = system('emproc searchforbadpixels=false correctcti=no correctgain=no');
#changed to
my $emproc = system('emproc');

#my $emproc = system('emchain instruments=m1 ccds=1 runevlistcomb=N makeflaregti=N emenergy:correctcti=N emenergy:correctgain=N badpixfindalgo=NO');

print 'ok';

if ( ($cif != 0) or ($fing != 0) or ($emproc != 0) ) {
printl("one or more SAS tools returned an errror !");
return 0;
}

# copy the eventlists to $EVL_DIR
copy_files("${WORKING_DIR}", $EVL_DIR,".*\.ds");

return 1;
}

sub get_new_slew_obs {

# --------------------------------------------------------------
# get new 10 slew's (not processed yet) by looking in $NEW_SLEWS_DIR
# copy them into the $IN_DIR
# --------------------------------------------------------------

#return;

if ($obsid_mode ne '') {
my @slews;
if ($obsid_mode eq 'done') {
return ();
} elsif ($obsid_mode eq 'filemode') {
if (-e $ARGV[2]) {
print "\nfilemode. obslist will be taken from $ARGV[2]\n\n";
@slews = read_obsids_from_file($ARGV[2]);
} else {
print "\n\nERROR !!!: $ARGV[2] doesn't exist\n\n";
exit;
}
} elsif ($obsid_mode eq 'manual') {
shift @ARGV; # remove the -onlyslews
shift @ARGV; # remove the -n
print "\nmanual mode.\n\n";
@slews = @ARGV;
}

for (@slews) {
printh("new slew $_ found. copy it to $IN_DIR");
copy_files($NEW_SLEWS_DIR, $IN_DIR,"$_.*");
}
$obsid_mode = 'done';
return @slews;
} else { # normal mode - get slews from $NEW_SLEWS_DIR

# get the obsids from the gz's
my $ls = `ls $NEW_SLEWS_DIR*.gz`;
my @all_slews;
while ($ls =~ /(\d{10})/g) {
push @all_slews, $1;
}

# take 10 obs-ids not retieved until now
my $counter = 1;
my @new_slews = ();
for my $slew_obs (@all_slews) {
if (slew_was_processed($slew_obs) ){
#printl("skipping $slew_obs. already processed ...");
next;
}
printh("new slew $slew_obs found. copy it to $IN_DIR");
push @new_slews, $slew_obs;
copy_files($NEW_SLEWS_DIR, $IN_DIR,"$slew_obs.*");
last if ++$counter > $SLEWS_IN_ONE_CYCLE;
}
printh("following slews will be processed now:\n\t" . join "\n\t\t", @new_slews) if scalar @new_slews ;
return @new_slews;
}


}

# ----------------------------------------------------------------------------------------------- #
# ----------------------------------------------------------------------------------------------- #


sub mark_slew_obs_as_processed
{
# writes obs at the end of $PROCESSED_SLEW_OBS
my $obs = shift;
if ($simulate == 1) {
append_to_file($PROCESSED_SLEW_OBS_SIMULATE, $obs . "\n");
} else {
append_to_file($PROCESSED_SLEW_OBS, $obs . "\n");
}
}

# ----------------------------------------------------------------------------------------------- #

sub slew_was_processed
{
# check if obs exists already in $PROCESSED_SLEW_OBS
my $obs = shift;
if ($simulate == 1) {
return is_id_in_file($PROCESSED_SLEW_OBS_SIMULATE, $obs);
} else {
return is_id_in_file($PROCESSED_SLEW_OBS, $obs);
}

}

# ----------------------------------------------------------------------------------------------- #

sub is_id_in_file
{
# returns 1 if obs is in $file
my $file = shift;
my $obs = shift;

my $found = 0;
open(FILE,"<$file") or error("couldn't open $file ...");
while (){
$found = 1 if (m/$obs/);
}
close FILE;
$found == 1 ? return 1 : return 0;
}

# ----------------------------------------------------------------------------------------------- #

sub is_it_a_calclosed_slew
{
# checks via house-keeping file if the slew is cc
# return 1 or 0

my $obs = shift;

# get the rev. number
my $revolution = get_revolution($obs);

# log
printh("checking if slew '$obs' is calclosed --- revolution $revolution");


###########################################################################################
# MOS 1
###########################################################################################
# --------------------------------------------------------------
# get the exposures - the .fit fils
# --------------------------------------------------------------
my @mos1_files = get_files($WORKING_DIR,"${revolution}_${obs}_M1.*40IME.*\.FIT");
printl("\n (slew)--- MOS1 --- (" . scalar @mos1_files . " .fit files found) \n");

# --------------------------------------------------------------
# sort the exposures by calclosed, closed and science
# --------------------------------------------------------------
my ( $mos1_calclosed, $mos1_closed, $mos1_science, $return_status_m1)
= search_for_calclosed_and_closed(
$revolution,
$obs,
"${revolution}_${obs}_M1X00000PEH.FIT",
"expression='E1018==\"OFF\".and.E1019==\"OFF\".and.E1317==1580'",
"expression='E1012==\"OFF\".and.(E1254==\"IN POSITION\".or.E1254==\"IN_POSITION\").and.E1257==\"CLOSED\".and.((E1008==\"PRIME\").OR.(E1008==\"FAST\"))'",
'exec_gen_exp_gti',
'exp_name.dat',
'exec_gen_hk_gti',
@mos1_files);


if ($return_status_m1 ne '') {
printerrorlog($return_status_m1 . " skipping (slew) $obs");
remove_files($WORKING_DIR,"${revolution}_${obs}_M1.*40IME.*\.FIT");
return 0;
}
###########################################################################################
# MOS 2
###########################################################################################
# get the exposures - the .fit fils
my @mos2_files = get_files($WORKING_DIR,"${revolution}_${obs}_M2.*40IME.*\.FIT");
printl("\n (slew)--- MOS2 --- (" . scalar @mos2_files . " .fit files found) \n");

# --------------------------------------------------------------
# sort the exposures by calclosed, closed and science
# --------------------------------------------------------------
my ( $mos2_calclosed, $mos2_closed, $mos2_science, $return_status_m2)
= search_for_calclosed_and_closed(
$revolution,
$obs,
"${revolution}_${obs}_M2X00000PEH.FIT",
"expression='K1018==\"OFF\".and.K1019==\"OFF\".and.K1317==1580'",
"expression='K1012==\"OFF\".and.(K1254==\"IN POSITION\".or.K1254==\"IN_POSITION\").and.K1257==\"CLOSED\".and.((K1008==\"PRIME\").OR.(K1008==\"FAST\"))'",
'exec_gen_exp_gti',
'exp_name.dat',
'exec_gen_hk_gti',
@mos2_files);

if ($return_status_m2 ne '') {
printerrorlog($return_status_m2 . " skipping $obs");
remove_files($WORKING_DIR,"${revolution}_${obs}_M2.*40IME.*\.FIT");
return 0;
}

printl(scalar @$mos1_calclosed . " mos1 calclosed found\t" . join " ", @$mos1_calclosed);
printl(scalar @$mos1_closed . " mos1 closed found\t" . join " ", @$mos1_closed);
printl(scalar @$mos1_science . " mos1 science found\t" . join " ", @$mos1_science);
printl(scalar @$mos2_calclosed . " mos2 calclosed found\t" . join " ", @$mos2_calclosed);
printl(scalar @$mos2_closed . " mos2 closed found\t" . join " ", @$mos2_closed);
printl(scalar @$mos2_science . " mos2 science found\t" . join " ", @$mos2_science);

# look if there are slews in which just one of the most is cc
if ( (scalar @$mos1_calclosed) != (scalar @$mos2_calclosed) ) {
print_new_log("slews_mos_cam_differs.txt", "obs: $obs - mos1: @$mos1_calclosed - mos2: @$mos2_calclosed\n");
}

# return
if ( (scalar @$mos1_calclosed > 0) and (scalar @$mos2_calclosed > 0) ) {
#print_new_log("slews_mos_calclosed.txt", "$obs\n");
# testing
#copy_files("${WORKING_DIR}", $TEST_DIR,"${revolution}_${obs}_M.*40IME.*\.FIT");
return 1;
} else {
#print_new_log("slews_mos_not_calclosed.txt", "$obs\n");
return 0;
}
}



##############################################################
# -------------------- end slew section -----------------#
##############################################################






# --------------------------------------------------------------
# main - search for closed/calclosed
# --------------------------------------------------------------

sub calclosed_closed_exposures_search {

$ENV{'SAS_ODF'} = $EXPORT_VARS->{'SAS_ODF'};

printh(scalar @observations . " observations found\n" . join " ", @observations);




# --------------------------------------------------------------
# the main loop - analyse every observation for their exposures
# --------------------------------------------------------------
foreach my $obs (@observations) {

# just for testing - that i can abort the script by generating a flag file (stopit.txt)
if (-e '/home/epicmon/monitoring/bin/stopit.txt') {print "aborted becouse of flag file\n"; last;}

# exit if space to less
# (my $disk_free = `du -sb /home/epicmon/monitoring/tmp/out/`) =~ s/(\d+).*/$1/;
# my $gigs = ($disk_free / (2**30)); # in GB
# if ( $gigs > 20) {
# printh("space lower than 20 GB (in /out)\n");
# last;
# } else {
# printh ("$gigs GB in /out");
# }


# --------------------------------------------------------------
# # get the files from the archive + extract them
# --------------------------------------------------------------
my $aio = load_odf_via_aio($obs);
if ($aio == 0) {
# there are errors with the aioclient
printerror("aioclient Error: skipping observation $obs\n");
push @observations_with_aio_errro, $obs;
next;
}

# get the rev. number
my $revolution = get_revolution($obs);

# log
printh("checking observation $obs --- revolution $revolution (" . ++$obs_counter . " of " . scalar @observations .")" );

# initialize
$result{"${revolution}_${obs}"} =
{
'calclosed' => [],
'closed' => [],
'science' => [],
};


###########################################################################################
# MOS 1
###########################################################################################
# --------------------------------------------------------------
# get the exposures - the .fit fils
# --------------------------------------------------------------
my @mos1_files = get_files($WORKING_DIR,"${revolution}_${obs}_M1.*40IME.*\.FIT",
#"${revolution}_${obs}_M1.*20IME.*\.FIT");
#"${revolution}_${obs}_M1.*10TIE.*\.FIT"
);
printl("\n --- MOS1 --- (" . scalar @mos1_files . " .fit files found) \n");

# --------------------------------------------------------------
# sort the exposures by calclosed, closed and science
# --------------------------------------------------------------
my ( $mos1_calclosed, $mos1_closed, $mos1_science, $return_status_m1)
= search_for_calclosed_and_closed(
$revolution,
$obs,
"${revolution}_${obs}_M1X00000PEH.FIT",
"expression='E1018==\"OFF\".and.E1019==\"OFF\".and.E1317==1580'",
"expression='E1012==\"OFF\".and.E1254==\"IN POSITION\".and.E1257==\"CLOSED\".and.((E1008==\"PRIME\").OR.(E1008==\"FAST\"))'",
'exec_gen_exp_gti',
'exp_name.dat',
'exec_gen_hk_gti',
@mos1_files);

if ($return_status_m1 ne '') {
printerrorlog($return_status_m1 . " skipping $obs for MOS1");
#next;
}

# --------------------------------------------------------------
# old
# --------------------------------------------------------------
foreach my $m1 (@mos1_files) {

my $exp = get_exposure($m1);

printl("----------------------------------- old --------------------------------- ");

printl("\n ---> analyzing $m1 (exposure: $exp)");
# --------------------------------------------------------------
# fstatistic - analyze the spectra to look for Mg and Al lines
# --------------------------------------------------------------
my $val;
if ($m1 =~ /IME/) { # Imaging Mode
$val = fstatistic($m1, 'colname=energye1 rows=1-1000 minval=1760 maxval=1810');
} elsif ($m1 =~ /TIE/) { # TIMING Mode
$val = fstatistic($m1, 'colname=engye1e2 rows=1-1000 minval=1950 maxval=2050');
}
printh("just to compare: if $val >= 35 its calclosed");

printl("----------------------------------- \\old --------------------------------- ");
}
# ---------------------------- /old -------------------------------------------

# --------------------------------------------------------------
# CALCLOSED
# --------------------------------------------------------------
foreach my $exp ( @$mos1_calclosed ) {

printl(" $exp --> CALCLOSED observation found for MOS1");
push_result(\%result, "${revolution}_${obs}", 'calclosed' , $exp);

copy_files($WORKING_DIR, $CALCLOSED_TMP_DIR,"${revolution}_${obs}_${exp}.*",
"${revolution}_${obs}_M1.*OV.*",
"${revolution}_${obs}_M1.*DI.*",
"${revolution}_${obs}_M1.*X0.*" );
$found_calclosed++;
}

# NO CALCLOSED
printl("--> NO-CALCLOSED observation found for MOS1");

# --------------------------------------------------------------
# CLOSED
# --------------------------------------------------------------
foreach my $exp ( @$mos1_closed ) {

printl(" $exp ----> CLOSED observation found for MOS1");
push_result(\%result, "${revolution}_${obs}", 'closed' , $exp);

# copy all files of to $CLOSED_TMP_DIR
copy_files($WORKING_DIR, $CLOSED_TMP_DIR,"${revolution}_${obs}_${exp}.*",
"${revolution}_${obs}_M1.*OV.*",
"${revolution}_${obs}_M1.*DI.*",
"${revolution}_${obs}_M1.*X0.*" );
$found_closed++;
}

# --------------------------------------------------------------
# SCIENCE
# --------------------------------------------------------------
foreach my $exp ( @$mos1_science ) {

printl("$exp ----> SCIENCE observation found for MOS1");
push_result(\%result, "${revolution}_${obs}", 'science' , $exp);

tar_files($WORKING_DIR, $MOS_OV_DIR, "${revolution}_${obs}_M1_OV.TAR", "${revolution}_${obs}_M1.*OV.*" );
tar_files($WORKING_DIR, $MOS_DI_DIR, "${revolution}_${obs}_M1_DI.TAR", "${revolution}_${obs}_M1.*DI.*" );
}
printl('');


###########################################################################################
# MOS 2
###########################################################################################

# get the exposures - the .fit fils
my @mos2_files = get_files($WORKING_DIR,"${revolution}_${obs}_M2.*40IME.*\.FIT",
#"${revolution}_${obs}_M1.*20IME.*\.FIT");
#"${revolution}_${obs}_M2.*10TIE.*\.FIT"
);
printl("\n --- MOS2 --- (" . scalar @mos2_files . " .fit files found) \n");

# --------------------------------------------------------------
# sort the exposures by calclosed, closed and science
# --------------------------------------------------------------
my ( $mos2_calclosed, $mos2_closed, $mos2_science, $return_status_m2)
= search_for_calclosed_and_closed(
$revolution,
$obs,
"${revolution}_${obs}_M2X00000PEH.FIT",
"expression='K1018==\"OFF\".and.K1019==\"OFF\".and.K1317==1580'",
"expression='K1012==\"OFF\".and.K1254==\"IN POSITION\".and.K1257==\"CLOSED\".and.((K1008==\"PRIME\").OR.(K1008==\"FAST\"))'",
'exec_gen_exp_gti',
'exp_name.dat',
'exec_gen_hk_gti',
@mos2_files);

if ($return_status_m2 ne '') {
printerrorlog($return_status_m2 . " skipping $obs for MOS2");
#next;
}

# ---------------------------- old -------------------------------------------
foreach my $m2 (@mos2_files) {
#printl("MOS2 disabled") and last;
my $exp = get_exposure($m2);
printl("----------------------------------- old --------------------------------- ");
printl("\n ---> analyzing $m2 (exposure: $exp)");
# --------------------------------------------------------------
# fstatistic - analyze the spectra to look for Mg and Al lines
# --------------------------------------------------------------
# fstatistic
my $val;
if ($m2 =~ /IME/) { # Imaging Mode
$val = fstatistic($m2, 'colname=energye1 rows=1-1000 minval=1705 maxval=1755');
} elsif ($m2 =~ /TIE/) { # TIMING Mode
$val = fstatistic($m2, 'colname=engye1e2 rows=1-1000 minval=1800 maxval=1950');
}
printh("just to compare: if $val >= 70 its calclosed");
printl("----------------------------------- \\old --------------------------------- ");
}
# ---------------------------- /old -------------------------------------------

# --------------------------------------------------------------
# CALCLOSED
# --------------------------------------------------------------
foreach my $exp ( @$mos2_calclosed ) {

printl(" $exp --> CALCLOSED observation found for MOS2");

push_result(\%result, "${revolution}_${obs}", 'calclosed' , $exp);

copy_files($WORKING_DIR, $CALCLOSED_TMP_DIR,"${revolution}_${obs}_${exp}.*",
"${revolution}_${obs}_M2.*OV.*",
"${revolution}_${obs}_M2.*DI.*",
"${revolution}_${obs}_M2.*X0.*" );
$found_calclosed++;
}
printl("--> NO-CALCLOSED observation found for MOS2");

# --------------------------------------------------------------
# CLOSED
# --------------------------------------------------------------
foreach my $exp ( @$mos2_closed ) {

printl(" $exp ----> CLOSED observation found for MOS2");

push_result(\%result, "${revolution}_${obs}", 'closed' , $exp);
# copy all files of to $CLOSED_TMP_DIR
copy_files($WORKING_DIR, $CLOSED_TMP_DIR,"${revolution}_${obs}_${exp}.*",
"${revolution}_${obs}_M2.*OV.*",
"${revolution}_${obs}_M2.*DI.*",
"${revolution}_${obs}_M2.*X0.*" );
$found_closed++;
}

# --------------------------------------------------------------
# SCIENCE
# --------------------------------------------------------------
foreach my $exp ( @$mos2_science ) {

printl(" $exp ----> SCIENCE observation found for MOS2");
push_result(\%result, "${revolution}_${obs}", 'science' , $exp);

tar_files($WORKING_DIR, $MOS_OV_DIR, "${revolution}_${obs}_M2_OV.TAR", "${revolution}_${obs}_M2.*OV.*" );
tar_files($WORKING_DIR, $MOS_DI_DIR, "${revolution}_${obs}_M2_DI.TAR", "${revolution}_${obs}_M2.*DI.*" );
}
printl('');



###########################################################################################
# PN
###########################################################################################

# copy Noise Map (if there is only noise)
tar_files($WORKING_DIR, $PN_NO_DIR, "${revolution}_${obs}_NO.TAR", "${revolution}_${obs}_PN.*NO.*" );
tar_files($WORKING_DIR, $PN_OD_OBS_DIR, "${revolution}_${obs}_OD.TAR", "${revolution}_${obs}_PN.*OD.*" );
tar_files($WORKING_DIR, $PN_DL_DIR, "${revolution}_${obs}_DL.TAR", "${revolution}_${obs}_PN.*DL.*" );

my @pn_files = get_files($WORKING_DIR, "${revolution}_${obs}_PN.*04IME\.FIT", # Imaging
"${revolution}_${obs}_PN.*04TIE\.FIT", # Timing
"${revolution}_${obs}_PN.*04BUE\.FIT" ); # Burst
printl("\n --- PN --- (" . scalar @pn_files . " .fit files found) \n");

# --------------------------------------------------------------
# sort the exposures by calclosed, closed and science
# --------------------------------------------------------------
my ( $pn_calclosed, $pn_closed, $pn_science, $return_status_pn)
= search_for_calclosed_and_closed(
$revolution,
$obs,
"${revolution}_${obs}_PNX00000PMH.FIT",
"expression='F1119==\"STOPPED\".and.((F1120==\"NOT ACTIVE\".and.F1118==\"NO STOP POS.\").or.(F1120==\"NOT_ACTIVE\".and.F1118==\"NO_STOP_POS.\")).and.F1122<51.6.and.F1008==\"OBSERVATION\"'",
"expression='F1119==\"STOPPED\".and.F1120==\"ACTIVE\".and.F1118==\"CLOSE\".and.F1008==\"OBSERVATION\"'",
'exec_gen_exp_gti',
'exp_name.dat',
'exec_gen_hk_gti',
@pn_files);

if ($return_status_pn ne '') {
printerrorlog($return_status_pn . " skipping $obs for PN");
#next;
}

foreach my $pn (@pn_files) {
my $exp = get_exposure($pn);
printl("----------------------------------- old --------------------------------- ");
printl("\n ---> analyzing $pn (exposure: $exp)");

##########################################################################################
# the old procedure
my $val1 = fstatistic($pn, 'colname=energy rows=1-5000 minval=1100 maxval=1180');
my $val2 = fstatistic($pn, 'colname=energy rows=1-5000 minval=920 maxval=1000');
my $diff = $val1 - $val2;
#if ($diff >= 35) {
printh("just to compare: if $diff >= 35 its calclosed");
##########################################################################################
printl("----------------------------------- \\ old --------------------------------- ");
}


# --------------------------------------------------------------
# CALCLOSED
# --------------------------------------------------------------
foreach my $exp ( @$pn_calclosed ) {
printl(" $exp --> CALCLOSED observation found for PN");
push_result(\%result, "${revolution}_${obs}", 'calclosed' , $exp);

copy_files($WORKING_DIR, $CALCLOSED_TMP_DIR,"${revolution}_${obs}_${exp}.*",
"${revolution}_${obs}_PN.*OD.*",
"${revolution}_${obs}_PN.*DL.*",
"${revolution}_${obs}_PN.*NO.*",
"${revolution}_${obs}_PNX.*" );
$found_calclosed++;
}

# NO CALCLOSED
printl("--> NO-CALCLOSED observation found for PN");

# --------------------------------------------------------------
# CLOSED
# --------------------------------------------------------------
foreach my $exp ( @$pn_closed ) {
printl(" $exp ----> CLOSED observation found for PN");
push_result(\%result, "${revolution}_${obs}", 'closed' , $exp);

# copy files
copy_files($WORKING_DIR, $CLOSED_TMP_DIR,"${revolution}_${obs}_${exp}.*",
"${revolution}_${obs}_PN.*OD.*",
"${revolution}_${obs}_PN.*DL.*",
"${revolution}_${obs}_PN.*NO.*",
"${revolution}_${obs}_PNX.*" );
$found_closed++;
}

# --------------------------------------------------------------
# SCIENCE
# --------------------------------------------------------------
foreach my $exp ( @$pn_science ) {

printl(" $exp ----> SCIENCE observation found for MOS1");
push_result(\%result, "${revolution}_${obs}", 'science' , $exp);
}
printl('');





##################################################################################################

# --------------------------------------------------------------
# tar, gzip and copy the requiered files to the ftp
# --------------------------------------------------------------

if (@{$result{"${revolution}_${obs}"}->{'calclosed'}} > 0 ) { # are there calclosed for this observation
$found_calclosed_obs++;
printl("\n ---> " .scalar @{$result{"${revolution}_${obs}"}->{'calclosed'}} . " calclosed found for ${revolution}_${obs}.");

copy_files($WORKING_DIR, $CALCLOSED_TMP_DIR,"${revolution}_${obs}.*RM.*",
"${revolution}_${obs}.*SCX.*P.*",
"${revolution}_${obs}.*SCX.*TCS.*",
"${revolution}_${obs}.*SCX.*ATS.*",
"${revolution}_${obs}.*SCX.*ROS.*",
"${revolution}_${obs}.*SCX.*SUM.*",
"MANIFEST.*");

tar_files($CALCLOSED_TMP_DIR, $CALCLOSED_TMP_DIR, "${revolution}_${obs}.TAR",
"${revolution}_${obs}.*M1.*",
"${revolution}_${obs}.*M2.*",
"${revolution}_${obs}.*PN.*",
"${revolution}_${obs}.*RM.*",
"${revolution}_${obs}.*SCX.*P.*",
);

gzip_files($CALCLOSED_TMP_DIR, $CALCLOSED_TMP_DIR, "${obs}.tar.gz",
"${revolution}_${obs}.TAR",
"${revolution}_${obs}.*SCX.*TCS.FIT",
"${revolution}_${obs}.*SCX.*ATS.FIT",
"${revolution}_${obs}.*SCX.*ROS.ASC",
"${revolution}_${obs}.*SCX.*SUM.ASC",
"MANIFEST.*",
);
if ($simulate == 0) {
copy_files($CALCLOSED_TMP_DIR, $NEW_WEEK_CALCLOSED_DIR, "${obs}.tar.gz") if ($reprocess_all == 0);
copy_files($CALCLOSED_TMP_DIR, $CALCLOSED_DIR, "${obs}.tar.gz");
empty_dir($CALCLOSED_TMP_DIR);
}
}


if (@{$result{"${revolution}_${obs}"}->{'closed'}} > 0) { # are there closed for this observation
$found_closed_obs++;
printl("\n ---> " . scalar @{$result{"${revolution}_${obs}"}->{'closed'}} . " closed found for ${revolution}_${obs}.");

copy_files($WORKING_DIR, $CLOSED_TMP_DIR,"${revolution}_${obs}.*RM.*",
"${revolution}_${obs}.*SCX.*P.*",
"${revolution}_${obs}.*SCX.*TCS.*",
"${revolution}_${obs}.*SCX.*ATS.*",
"${revolution}_${obs}.*SCX.*ROS.*",
"${revolution}_${obs}.*SCX.*SUM.*",
"MANIFEST.*");

tar_files($CLOSED_TMP_DIR, $CLOSED_TMP_DIR, "${revolution}_${obs}.TAR",
"${revolution}_${obs}.*M1.*",
"${revolution}_${obs}.*M2.*",
"${revolution}_${obs}.*PN.*",
"${revolution}_${obs}.*RM.*",
"${revolution}_${obs}.*SCX.*P.*",
);

gzip_files($CLOSED_TMP_DIR, $CLOSED_TMP_DIR, "${obs}.tar.gz",
"${revolution}_${obs}.TAR",
"${revolution}_${obs}.*SCX.*TCS.FIT",
"${revolution}_${obs}.*SCX.*ATS.FIT",
"${revolution}_${obs}.*SCX.*ROS.ASC",
"${revolution}_${obs}.*SCX.*SUM.ASC",
"MANIFEST.*",
);
if ($simulate == 0) {
copy_files($CLOSED_TMP_DIR, $NEW_WEEK_CLOSED_DIR, "${obs}.tar.gz") if ($reprocess_all == 0);
copy_files($CLOSED_TMP_DIR, $CLOSED_DIR, "${obs}.tar.gz");
empty_dir($CLOSED_TMP_DIR);
}
}

# clean up
empty_dir($WORKING_DIR);
empty_dir($IN_DIR);


mark_obs_as_checked($obs);
}


print_result(\%result); # print the %result
update_web(\%result); # update closed.log and calclosed.log

if ($WEB_TEST == 1) {
compare_exposures_with_web(\%result); # for testing only
test_doubles(\%result); # for testing only
}



}

##############################################################
# -------------------- end --------------------------------#
##############################################################





##############################################################
# -------------------- functions ----------------------------#
##############################################################


sub search_for_calclosed_and_closed
{

# --------------------------------------------------------------
# using tabgtigen & idl
# -> look if there are (CAL)CLOSED exposure(s) in the house keeping
# file. If there is one or more, check to which
# exposure it's dedicated by comparing their times.
# (see documentation for details)
# --------------------------------------------------------------

# IN:
my $revolution = shift; # the revolution number
my $obs = shift; # observation id
my $tabgtigen_housekeeping_file = shift; # the house-keeping file
my $tabgtigen_param_calclosed = shift; # parameter for tabgtigen to find calclosed
my $tabgtigen_param_closed = shift; # parameter for tabgtigen to find closed
my $idl_script_exp = shift; # idl script for finding the exp. gti
my $idl_script_exp_input = shift; # file-name in which will be written the name of the exp. file
my $idl_script_hk = shift; # idl script for finding the house-keeping. gti
my @files = @_; # all exp. files

# Out: exposures sorted by their types
my (@closed, @calclosed, @science);

my $return_status = '';

my @err_arr = ();

# 1. get the calclosed gti's
printh("searching for calclosed gti's in the house-keeping-file: $tabgtigen_housekeeping_file");
$return_status = tabgtigen($tabgtigen_housekeeping_file, $tabgtigen_param_calclosed);
if ($return_status ne '') { # tabgtigen error ?
return (\@err_arr, \@err_arr, \@err_arr, $return_status);
}
my ($cal_gti_start, $cal_gti_stop) = get_hk_times($idl_script_hk);
# if nothing found - remove the 0.00 entry
if ((scalar @$cal_gti_start == 1) and (@$cal_gti_start[0] eq '0.00')) {
printl('no cal gti found');
@$cal_gti_start = ();
}

# 2. get the closed gtis
printh("searching for closed gti's in the house-keeping-file: $tabgtigen_housekeeping_file");
$return_status = tabgtigen($tabgtigen_housekeeping_file, $tabgtigen_param_closed);
if ($return_status ne '') { # tabgtigen error ?
return (\@err_arr, \@err_arr, \@err_arr, $return_status);
}
my ($closed_gti_start, $closed_gti_stop) = get_hk_times($idl_script_hk);
# if nothing found - remove the 0.00 entry
if ((scalar @$closed_gti_start == 1) and (@$closed_gti_start[0] eq '0.00')) {
printl('no closed gti found');
@$closed_gti_start = ();
}

my %best_calclosed_exp;
my %best_calclosed_exp_time;
my %best_closed_exp;
my %best_closed_exp_time;

foreach my $f (@files) {

my $exp = get_exposure($f);

# get the exp. times
my ($exp_start, $exp_stop) = get_exp_times($idl_script_exp, $f, $idl_script_exp_input); # $idl_script


printh("SEARCHING FOR CALCLOSED in $f") if scalar @$cal_gti_start > 0;
# 1. calclosed gti's
for (my $x = 0; $x < scalar @$cal_gti_start; $x++ ) {

# calculate the time diffrence
my $start_diff_cal = @$cal_gti_start[$x] - $exp_start;
printl("gti_start_cal - exp_start_cal (@$cal_gti_start[$x] - $exp_start) = $start_diff_cal");
my $stop_diff_cal = @$cal_gti_stop[$x] - $exp_stop;
printl("gti_stop_cal - exp_stop_cal (@$cal_gti_stop[$x] - $exp_stop) = $stop_diff_cal");

if ( (abs($start_diff_cal) <= 800) or (abs($stop_diff_cal) <= 800) ) {
# CALCLOSED
# look if there are a exp. which matches better to the gti
if ( (! defined $best_calclosed_exp{"@$cal_gti_start[$x]"})
or ($best_calclosed_exp_time{"@$cal_gti_start[$x]"} > (abs($start_diff_cal) + abs($stop_diff_cal))) )
{
printl("$exp is calclosed");
$best_calclosed_exp{"@$cal_gti_start[$x]"} = $exp;
$best_calclosed_exp_time{"@$cal_gti_start[$x]"} = abs($start_diff_cal) + abs($stop_diff_cal);
}
}
}

printh("SEARCHING FOR CLOSED in $f") if scalar @$closed_gti_start > 0;;
# 2. closed gti's
for (my $i = 0; $i < scalar @$closed_gti_start; $i++ ) {

# calculate the time diffrence
my $start_diff_closed = @$closed_gti_start[$i] - $exp_start;
printl("gti_start - exp_start (@$closed_gti_start[$i] - $exp_start) = $start_diff_closed");
my $stop_diff_closed = @$closed_gti_stop[$i] - $exp_stop; # a - i
printl("gti_stop - exp_stop (@$closed_gti_stop[$i] - $exp_stop) = $stop_diff_closed"); # a - i

if (
( (abs($start_diff_closed) <= 800) )
or ( (abs($stop_diff_closed) <= 800) and (abs($start_diff_closed) <= 3000) )
){
# CLOSED
# look if there are a exp. which matches better to the gti
if ( (! defined $best_closed_exp{"@$closed_gti_start[$i]"})
or ($best_closed_exp_time{"@$closed_gti_start[$i]"} > (abs($start_diff_closed) + abs($stop_diff_closed))) )
{
printl("$exp is closed");
$best_closed_exp{"@$closed_gti_start[$i]"} = $exp;
$best_closed_exp_time{"@$closed_gti_start[$i]"} = abs($start_diff_closed) + abs($stop_diff_closed);
}
}
}

}


# 1. store the calclosed gti's in a array
for my $k_cal (keys %best_calclosed_exp) {
push @calclosed, $best_calclosed_exp{$k_cal};
}
# 2. store the closed gti's in a array
for my $k_clo (keys %best_closed_exp) {
push @closed, $best_closed_exp{$k_clo};
}

# 3. science is the rest
my @closed_and_calclosed = @closed;
push @closed_and_calclosed, @calclosed;
foreach my $f (@files) {
my $exp = get_exposure($f);
my $flag = 0;
foreach my $cc (@closed_and_calclosed) {
if ($cc eq $exp) {
$flag = 1;
}
}
push @science, $exp if ($flag == 0);
}


# proof that no exp. is more than one - closed, science and calclosed
for my $clo (@closed) {
for my $cal (@calclosed) {
for my $sci (@science) {
if (($clo eq $cal) or ($sci eq $cal) or ($clo eq $sci)) {
error("\n$cal apears in more than one ($clo == $sci == $cal))!!!!");
}
}
}
}

return (\@calclosed, \@closed, \@science, $return_status);
}


sub get_new_observations
{

# testing
my @test_ODFS = (

# # CALCLOSED
# # 0960 0204010101 M1U002 M2U002 PNU026
# '0204010101',
# # 0942 0134540901 M1S003 M2S017 PNS005
# '0134540901',
# # 0122 0112371601 M1S002M1S010M1S012M1S014M1S018M1S020 M2S011M2S013M2S015M2S019M2S021M2S405 PNS001
# '0112371601',
#
# # 0135 0101440901 M1S001M1S012M1S014M1S016M1S018M1S020M1S022M1S024 M2S002M2S017M2S019M2S021M2S023M2S025M2S403M2S404 PNS003
# '0101440901',
# # 0652 0151581301 M1U002M1U003M1U004M1U005 M2U002M2U003M2U004M2U005 PNS003
# '0151581301',
# # 0668 0160362501 M1S003 M2S004
# '0160362501',
# # 0958 0210682701 PNS002
# '0210682701',
# # 0962 0305540601 M2S028
# '0305540601',
#


# '0136750301', # 1 CLOSED 0 CALCLOSED
# '0128720601', # 0 CLOSED 1 CALCLOSED

# # NO CALCLOSED
#
# # SCIENCE
# # 0948 0207130401
# '0207130401',
# # 0873 0206450301
# '0206450301',
#
# '0207090201',
# '0202860101',
# '0203770201',
# '0203360601',
# '9065000002',
# '0155557601',
# '0150100101',
# '0158760801',
# '0202680101',
#
# # CLOSED
#
# # 0875 0203850201 M1S009 M2S010 PNS011
# '0203850201',
#
# # 0230 0086360901 M1S005 M2S006
# '0086360901',
# # 0663 0156560101 M1S015 M2S016
# '0156560101',
# # 0240 0112971601 PNS003
# '0112971601',
# # 0621 0150651101 PNS001
# '0150651101',
# # 0914 0203240201 M1S017 M2S018 PNS016
# '0203240201',
# # 0961 0305540501 M1U014
# '0305540501',

# with errors

# first
# '0105470101', # TIE (without correction)
# '0105470201', # TIE (without correction)
# '0128120301', # TIE (without correction)
# '0112971301', # TIE (without correction)
# '0128120401', # TIE (without correction)
# '0099280101', # web says SCIENCE , but new and old(!) script say CLOSED

# second
# '0109980601', # 2 GTI's
# '0132530501','0125320401', # TIE

# third
#'0081340201', # doulbe web

# fourth
#'0110012101', # web
#'0112590101', # web
#'0134521401', # TIE (with correction)
#'0135741301', # TIE (with correction)

# canceled at
#'0014740201', # aioclient delivers no .tar


#'0151581301', # ???

#'0086360901',
#'0115890701',

# aioclient recieved nothing:
# '0014740201',
# '0045940101',
# '0083150101',
#
# '0094380401',
# '0049560201',
# '0112450201',
# '0135950101',
# '0031740201',


# diffrent tars
#'0153750701',
#'0134521601',

# fstatistic
# '0115890701',

# problems with new calclosed-tabgtigen-search
#'0094170301',
#'0086360901',
# with the shorter one only the following make problem
#'0112371601',
#'0006810201',
#'0009221301',


#'0082340201', # good

#'0109141201', # cal PNS001 (0265_0109141201 - not found) !!! ERROR
#'0116340701', # cal PNU002 (0028_0116340701 - not found) !!! ERROR


# not found on web
#'0125911701',
#'0125911401',

# forgoten (found on web)
#'0125912101',
#'0124711801',
#'0109461101', # no gti
#'0125912101',

#'0127920401',
#'0134520501',



#'0099280101', # to much ? closed M2 -> old script says the same
#'0116340701', # to much ? PN cal -> spectra looks good but shifted


# '0125920201', # to much ?


# strange PN
#'0109141201',
#'0108860401',
#'0081340201',
#'0109461101',


# mos - tumuch ???
#'0151590101',
#'0137150301',
#'0144500101',
# PN wrong ?
#'0155560801',
#'0155560701',

#'0134520601',
#'0109490201',
#'0147330101',
#'0151581301',

# gti problem - one gti matches to two exp.
#'0151590101',
#'0137150301',
#'0144500101',

# gti differs to much
'0110012201',
'0109890301',
'0147330101',

# -----------------------------------------------

'0134520601',
# - M1U002, M2U002
#-> old script founds them as calclosed, new one not
#-> but very strange times in ObsBrowser
'0125110101',
# - M2S022
#-> CALCLOSED (ObsBrowser)
'0122310301', #- M2U003
# -> ?
'0102641701', #- M1S002, M2S003, PNS001
#-> tabgtigen NOT finished successful

#* strange times *
'0127920401', #- M1U002, M2U002
'0006810201', #- M1S013, M2S014
'0124711801', #- M1S002, M2U009


# strange Modes
'0097610601', #- M1S003, M2S004
#-> CALCLOSED - Mode: Full Frame Two Nodes
'0096210101', #, - M2S002
#-> CALCLOSED - Mode: Free Running


# dosn't exist in ObsBrowser
'0110011101', #- M1U002
'0110980701', #- M1S001,M2S401

# "-----------------------------------------------

);
#return @test_ODFS;


# ------------------------------------------------------------
# for testing only
# ------------------------------------------------------------
if ($DO_IT_FOR_ALL == 1) {
return test_all_odfs();
}

# ------------------------------------------------------------
# comandline paramter dependend
# ------------------------------------------------------------

if ($reprocess_all == 1) {
printh("reprocess mode. obslist will be taken from $OBSLIST_ALL");
return read_obsids_from_file($OBSLIST_ALL);
}

if ($obsid_mode ne '') {
if ($obsid_mode eq 'filemode') {
if (-e $ARGV[2]) {
printh("filemode. obslist will be taken from $ARGV[1]");
return read_obsids_from_file($ARGV[2]);
} else {
printerrorlog("ERROR !!!: $ARGV[2] doesn't exist");
exit;
}
} elsif ($obsid_mode eq 'manual') {
shift @ARGV; # remove the -onlysearch
shift @ARGV; # remove the -n
printh("manual mode.");
return @ARGV;
}
} else { # normal mode - get obslist from $OBSLIST_MONTH

my @ODFS = ();
my @ODFS_month = read_obsids_from_file($OBSLIST_MONTH);
my @ODFS_checked = read_obsids_from_file($CHECKED_OBS);

# take only obs-ids not checked yet
for my $m (@ODFS_month) {
my $already_checked = 0;
for my $c (@ODFS_checked) {
$already_checked = 1 if ($m eq $c);
}
push @ODFS, $m if ($already_checked == 0);
#print "already checked. skipping: $m\n" if ($already_checked == 1);
}
# add special obs-ids
my @ODFS_special = read_obsids_from_file($SPECIAL_OBS);
push @ODFS, @ODFS_special;

return @ODFS;
}


}

# ----------------------------------------------------------------------------------------------- #


sub init
{
# --------------------------------------------------------------
# initialation things like reset directories, clean up directories, export needed variables ...
# --------------------------------------------------------------

chdir($BIN_DIR);

# SAS --> !!! see start_monitoring.sh
#system(". ${CONFIG_DIR}setsas.sh") and error("couldn't exec setsas.sh");

# report
my $date = `date`;
chomp $date;
printh("starting monitor.pl on $date");

#empty directories
if ($simulate == 0) {
empty_dir($NEW_WEEK_CALCLOSED_DIR);
empty_dir($NEW_WEEK_CLOSED_DIR);
}
empty_dir($IN_DIR);
empty_dir($WORKING_DIR);

# export needed variables
foreach my $k (keys %$EXPORT_VARS) {
$ENV{$k} = $EXPORT_VARS->{$k};
}

}

# ----------------------------------------------------------------------------------------------- #

sub end
{
# --------------------------------------------------------------
# el fin
# --------------------------------------------------------------

# clean up
empty_dir($IN_DIR);
empty_dir($WORKING_DIR);
empty_dir($CLOSED_TMP_DIR);
empty_dir($CALCLOSED_TMP_DIR);

# report - summary
printh("summary");
printl("- $obs_counter observations (of " .scalar @observations .") have been checked");
printl("- found $found_closed CLOSED exposure(s) (in $found_closed_obs observations) and $found_calclosed CALCLOSED exposure(s) (in $found_calclosed_obs observations)");
printl(" ");
printl(" Slews:");
printl("- $slews_counter slews have been checked");
printl("- $slews_merged_counter slews have been merged");
printl("- $slews_pseudo_odf pseudo odfs have been created");

# report aio errors
if ((scalar @observations_with_aio_errro) > 0) {
printl("\n\t!!! there were ERRORS !!!");
printl("- the following observations could not be checked because of problems with the aioclient:\n\t\t" . (join "\n\t\t", @observations_with_aio_errro));
}

# time taken
my $script_end_time = time();
my $diff = $script_end_time - $script_start_time;
my ($min, $sec) = get_time_by_sec($diff);

if (-e "${LOG_DIR}${script_start_datetime}_errorlog.txt" ) {
printh("\nscript finished with errors on " . `date` . "Time taken was: $min minutes $sec seconds.\nFor details see ${LOG_DIR}${script_start_datetime}_log.txt\nand\n${LOG_DIR}${script_start_datetime}_errorlog.txt\n there are errors ! \n");
} else {
printh("\nscript finished successful on " . `date` . "Time taken was: $min minutes $sec seconds.\nFor details see ${LOG_DIR}${script_start_datetime}_log.txt\n OK \n");
}


exit 0;
}


# ----------------------------------------------------------------------------------------------- #


sub update_web
{

# --------------------------------------------------------------
# update closed.log and calclosed.log on the repository
# --------------------------------------------------------------

my $result = shift;

return if ($simulate == 1);

my $calclosed = get_result($result,'calclosed');
my $closed = get_result($result, 'closed');

printh("starting .log update - will add following lines:\n\ncalclosed:\n$calclosed\n\nclosed:\n$closed");

# backup the old
copy_files($OUT_DIR, $LOG_DIR,"calclosed.log","calclosed.log_${script_start_datetime}");
copy_files($OUT_DIR, $LOG_DIR,"closed.log","closed.log_${script_start_datetime}");

# append the obs id's to the files
append_to_file($CALCLOSED_LOG,$calclosed);
append_to_file($CLOSED_LOG,$closed);

# sort and make unique
sort_and_unique_file($CALCLOSED_LOG);
sort_and_unique_file($CLOSED_LOG);


# -----------------------------------------------------
# create new_ODF_closed.log and new_ODF_calclosed.log
# -----------------------------------------------------

write_to_file($NEW_WEEK_CALCLOSED_LOG,$calclosed);
write_to_file($NEW_WEEK_CLOSED_LOG,$closed);

`cp $NEW_WEEK_CALCLOSED_LOG /xvsoc01/ftpexport1/secure/epic_ex/new_week `;
`cp $NEW_WEEK_CLOSED_LOG /xvsoc01/ftpexport1/secure/epic_ex/new_week `;
}

# ----------------------------------------------------------------------------------------------- #

sub sort_and_unique_file
{
# -----------------------------------------------------
# sorts a file (alphanum.) and removes the doubles
# -----------------------------------------------------

my $file = shift;

my @lines = read_file_array($file);

# remove the doubles
@lines = make_array_unique(@lines);

# sort it
@lines = sort @lines;

# recreate it,
write_to_file($file,join '', @lines);
}

# ----------------------------------------------------------------------------------------------- #

sub get_result
{
# --------------------------------------------------------------
# returns list of result for closed or calclosed ( for mail and web)
# --------------------------------------------------------------

my $result = shift;
my $type = shift; # closed or calclosed

my $list = '';
foreach my $rev_observation (sort keys %$result) {
my ($rev, $obs) = split /_/, $rev_observation;
$list .= "$rev\t$obs\t";
foreach my $what (keys %{$result->{$rev_observation}}) {
next if $what ne $type;
foreach my $exposure (@{$result->{$rev_observation}->{$what}} ) {
$list .= $exposure;
$list .= " ";
}
}
$list .= "\n";
}
return $list;
}

# --------------------------------------------------------------------------- #
# ------------------ mail -------------------------------------------------- #
# --------------------------------------------------------------------------- #

sub mail
{

# --------------------------------------------------------------
# mail (use sendmail) the result to the people
# --------------------------------------------------------------

my $result = shift;

my $calclosed = get_result($result,'calclosed');
my $closed = get_result($result, 'closed');
my $slews = join "\n", @slews_pseudo_odfs;
$slews =~ s/:/\n\t/g;
$slews =~ s/-/\n\t/g;


#my $text = < my $text = <
$script_start_datetime

Your EPIC IDT scanned again the latest archive data for calclosed and closed data.
Please find below the results.


*summary*

observations:
- $obs_counter new observations have been checked
- found $found_closed CLOSED exposure(s) (in $found_closed_obs observations) and $found_calclosed CALCLOSED exposure(s) (in $found_calclosed_obs observations)

slews:
- $slews_counter slews have been checked
- $slews_merged_counter slews have been merged
- $slews_pseudo_odf pseudo odfs have been created


*details*

List of scanned ODF and found CALCLOSED exposures:
$calclosed

List of scanned ODF and found CLOSED exposures:
$closed

List of new merged calclosed slews:
$slews


*info*

The new CALCLOSED/CLOSED are available in the subdirectories 'closed' and 'calclosed'
in $NEW_WEEK_DIR .
They will remain there for a week.

All the old files are stored permanently in the subdirectories 'closed' and 'calclosed'
in $OUT_DIR.

The new merged calclosed slews (for the MOS) are available in
$PSEUDO_ODF_DIR

Not VILSPA user can access the data via
ftp xmm.vilspa.esa.es (login: epic_ex, password: 20epic_ex02)


-----------------------------------
All the best from ESAC
Your EPIC Instrument Dedicated Team

EOF


print "\n\nTo: $MAIL_ADRESSES\n\n";
#print "\nsend the mail now ? (y/n)";
#my $in = ;
#chomp $in;
#if ($in eq 'n') {
# print "\nmail not send. aborted\n";
# return;
#}

# send the mail
sendmail(
'epicmon@sciops.esa.int',
$MAIL_ADRESSES,
"New CALCLOSED & CLOSED available at VILSPA",
$text
);

}

# ----------------------------------------------------------------------------------------------- #

sub sendmail
{
# --------------------------------------------------------------
# send a mail with sendmail - sendmail wrapper
# --------------------------------------------------------------

my $from = shift;
my $to = shift;
my $subject = shift;
my $text = shift;

if ($simulate == 1) {
$to = $MAIL_ADRESSES_SIMULATE;
}

my $datetime = get_datetime();

my $msg = "From: $from \n";
$msg .= "To: $to \n";
$msg .= "Subject: $subject - GMT $datetime\n\n";
$msg .= $text;

open(FILE,">${MAIL_DIR}mail_${datetime}.txt") or error("couldn't write to mail ...");
print FILE $msg;
close FILE;

system("/usr/lib/sendmail -t < ${MAIL_DIR}mail_${datetime}.txt") and die "Error. Couldn't send the mail";
printh("mail was send to: $to");

}

# ----------------------------------------------------------------------------------------------- #

sub get_datetime
{
# --------------------------------------------------------------
# Out: a string in following format: 'YYYY-MM-DD HH:MM:SS'
# --------------------------------------------------------------

my ($day, $month, $year) = (localtime)[3,4,5];
$year += 1900;
$month++;
my $date = (localtime());
#print "$date ---
";
my ($wday, $tmonth, $tday, $time, $tyear) = split /\s+/, $date;
#print "$time ---
";
my $datetime = sprintf("%04d-%02d-%02d--%s", $year, $month, $day, $time);
return $datetime;
}

# ----------------------------------------------------------------------------------------------- #

# --------------------------------------------------------------------------- #
# ------------------ specific --------------------------------------------- #
# --------------------------------------------------------------------------- #


sub push_result
{

# --------------------------------------------------------------
# fill the datastructure %result
# --------------------------------------------------------------

# structure of %result
# --> %result := hash of hashes ( of arrays )
# i.e.
# (
# 0457_0129837484 => { 'calclosed' => [ M1S005, M2S008, M2S009, PNS003 ],
# 'closed' => [ M1S003, PNS005],
# 'science' => [ M1S005, M1S002, M2S008, M2S009],
# },
# ...
# )


my $result = shift;
my $rev_obs = shift;
my $what = shift; # closed or calclosed
my $exp = shift;

# dont't push it twice
foreach my $e (@{$result->{$rev_obs}->{$what}}) {
return if ($e eq $exp);
}

# else
push @{$result->{$rev_obs}->{$what}}, $exp;
return;
}



# ----------------------------------------------------------------------------------------------- #

sub get_revolution
{
# --------------------------------------------------------------
# return the revolution number as string. get it from the filename
# --------------------------------------------------------------

my $obs = shift;
my $file = get_file($WORKING_DIR, ".*$obs.TAR");
$file =~ m/(\d\d\d\d)_$obs\.TAR/;
return $1;
}

# ----------------------------------------------------------------------------------------------- #

sub get_exposure
{
# --------------------------------------------------------------
# return the exposure as string. get it from the filename
# --------------------------------------------------------------
# i.e. IN: 0960_0204010101_M1S00120IME.FIT
# OUT: M1S001

my $exp_file = shift;
$exp_file =~ m/.{16}(......)/;
return $1;
}


# ----------------------------------------------------------------------------------------------- #


sub error
{
# --------------------------------------------------------------
# error handling. shut down the script
# --------------------------------------------------------------

my $message = shift;

printerrorlog("!!! ERROR !!!\n\t$message");
printh("!!! ERROR !!!\n\t$message");
print_result(\%result);


# only for testing
#compare_exposures_with_web(\%result);
#test_doubles(\%result);
#mail(\%result);

# report
my $script_end_time = time();
my $diff = $script_end_time - $script_start_time;
my ($min, $sec) = get_time_by_sec($diff);
printerrorlog("\n!!!! script halts because of errors on " . `date` . "Time taken was: $min minutes $sec seconds\n\n ERROR ");

if (-e "${LOG_DIR}${script_start_datetime}_errorlog.txt" ) {
printh("\nFor details see ${LOG_DIR}${script_start_datetime}_log.txt\nand\n${LOG_DIR}${script_start_datetime}_errorlog.txt\n there are errors ! \n");
}
printh("");
exit 0;

}

# ----------------------------------------------------------------------------------------------- #

# --------------------------------------------------------------------------- #
# -------------------------------- converters ------------------------------- #
# --------------------------------------------------------------------------- #

sub get_time_by_sec
{
# i.e.
# IN: 70 ( 70 sec)
# OUT: 1, 10 ( 1 min 10 sec)

my $seconds = shift;

my $minutes = int ($seconds / 60);
my $sec = $seconds % 60;

return ($minutes, $sec);
}

# ----------------------------------------------------------------------------------------------- #

sub convert_scientific_to_float
{
# i.e.
# 2.1859972e+08 will be converted to 218599720

my $num = shift;

# remove the +
$num =~ s/\+//;

# sprintf will do the rest
return sprintf('%1$.2f', $num);
}

# --------------------------------------------------------------------------- #
# -------------------------------- SAS -------------------------------------- #
# --------------------------------------------------------------------------- #

sub fstatistic
{
# --------------------------------------------------------------
# wrapper for fstatisitc - analyse the spectra
# --------------------------------------------------------------

# IN: - .*_M1X00000PEH.FIT (looks for it in $WORKING_DIR)
# - options
# OUT: - the value returned by fstatistic (the line events)

my $file = shift;
my $options = shift;

# execute fstatistic
my $value = `fstatistic $WORKING_DIR$file $options |grep points|cut -c57-61`;

# extract the nuber
$value =~ m/.*?(\d+).*?/ or printerrorlog("fstatistic-error returned no number for following file: $file ($options)\n\n");
$value = $1;

$value eq '' and $value = 0;

printl("Mn line events: $value");

return $value;
}

# ----------------------------------------------------------------------------------------------- #

sub tabgtigen
{
# --------------------------------------------------------------
# wrapper for tabgtigen - get the gti from the house keeping file
# --------------------------------------------------------------

# IN: - house-keeping file. i.e..*_M1X00000PEH.FIT (looks for it in $WORKING_DIR)
# - parameter for tabgtigen
# OUT: - hk_gti.fit (for further use by idl)

my $house_keeping_data_file = shift;
my $expression = shift;

# get the file

my $file = get_file($WORKING_DIR, $house_keeping_data_file);
return "Error. no house keeping file found ($house_keeping_data_file)" if $file == 0;
#printh("tabgtigen table=$file gtiset=hk_gti.fit $expression");

# execute tabgtigen
chdir($WORKING_DIR);
my $status = system("tabgtigen table=$file gtiset=hk_gti.fit $expression");
return "tabgtigen NOT finished successful ($house_keeping_data_file)\n\n)" if $status != 0;

return '';

}

# ----------------------------------------------------------------------------------------------- #
sub get_exp_times
{
# --------------------------------------------------------------
# wrapper for idl - extracts the exposure gti from the .fit file
# --------------------------------------------------------------

# IN: - idl-script (gen_exp_gti - in $IDL_DIR)
# - exp. file (created with tabgtigen befor - in $WORING_DIR)
# - file to write the exposure-name (exp_name.dat) - used by idl
# OUT: - $exp_start - exp. start time
# - $exp_stop - exp. stop time

my $idl_file = shift; # idl script
my $exp_file = shift; # exposure (.fit) file to look for times
my $dat_file = shift; # file to write the output (the times)

create_file($dat_file, $exp_file); # writes the exp.-file-name in $dat_file (for idl)

# execute IDL
chdir($IDL_DIR);
my $ret = `idl $idl_file`;
error("idl NOT finished successful ($idl_file)") if $ret =~ m/error/i;
chdir($WORKING_DIR);

# get the times - convert it with sas-tool ttag
my $exp_start = ttag('start_exp.dat');
my $exp_stop = ttag('stop_exp.dat');

# clean up
remove_files($WORKING_DIR,'.*\.dat', 'hk_gti\.fit');

return ($exp_start, $exp_stop);
}

sub get_hk_times
{
# --------------------------------------------------------------
# wrapper for idl - extracts the gti's from the house-keeping files
# --------------------------------------------------------------

# IN: - readmyfits_?? file (in $IDL_DIR)
# - hk_gti.fit (created with tabgtigen befor - in $WORING_DIR)
# -
# OUT: - \@gti_start
# - \@gti_stop

my $idl_file = shift;


# IDL (SAS tool)
# IN: - $idl_file
# - hk_gti.fit
# OUT:
# - gti_start.dat
# - gti_stop.dat

# execute IDL
chdir($IDL_DIR);
my $ret = `idl $idl_file`;
error("idl NOT finished successful ($idl_file)") if $ret =~ m/error/i;
chdir($WORKING_DIR);

my @gti_start = get_gti('gti_start.dat');
my @gti_stop = get_gti('gti_stop.dat');

# clean up
remove_files($WORKING_DIR,'.*\.dat', 'hk_gti\.fit');

return (\@gti_start, \@gti_stop);
}

# ----------------------------------------------------------------------------------------------- #

sub get_gti
{
# IN: file with GTM time
# OUT: ttag the GTM time and return the time (in sec)

my $file = shift;
my @time = read_file_array($file);

my @times_without_newline;
foreach my $t (@time) {
chomp $t; # newline
$t =~ s/\s*//g; # spaces
$t = convert_scientific_to_float($t); # convert it to float
push @times_without_newline, $t;
printl("GTI - ($file) --->$t<----");
}
return @times_without_newline;
}

# ----------------------------------------------------------------------------------------------- #

sub ttag
{
# IN: file with GTM time
# OUT: ttag the GTM time and return the time (in sec)

my $file = shift;

my $time = read_file($file);
chomp $time;
my $newtime = `ttag $time`;
error("ttag NOT finished successful ($time)") if ($newtime =~ m/error/i or $newtime eq '');
chomp $newtime;

#printl("ttag--$time-->$newtime-");

return $newtime;
}

# ----------------------------------------------------------------------------------------------- #

sub load_odf_via_aio
{

my $odf = shift;

return 1 if ($NO_AIO == 1);

# get the file from the archieve
aio($odf);

# extract odf from 'in' to 'working'
gtar_odf($WORKING_DIR,$odf) or return 0;

#extracts the rest of the odf from 'working' to 'working'
untar($WORKING_DIR, $WORKING_DIR."*".$odf.".TAR") or return 0;

# remove RGS and OM files
remove_files($WORKING_DIR,'.*R1.*','.*R2.*','.*OM.*');

return 1;
}

# ----------------------------------------------------------------------------------------------- #

sub aio
{
# --------------------------------------------------------------
# wrapper for the aioclient
# --------------------------------------------------------------

# getting the $odf.tar.gz file via aioclient
# stores it in $IN_DIR

my $odf = shift;

my $start_time = time();
printh ("Executing aioclient for observation $odf");

# execute aio client to get the file
chdir($AIO_DIR);
my $status = system("./aioclient -S xsa.vilspa.esa.es -O $IN_DIR -P 2002 -L \"GET obsno=$odf level=ODF\" -prop");
error("AIO client NOT finished successful") if $status != 0;
chdir($BIN_DIR);

# report
my $end_time = time();
my $diff = $end_time - $start_time;
printl("aioclient execution for observation $odf successful finished.\nTime taken was $diff seconds");

}

# ----------------------------------------------------------------------------------------------- #


# --------------------------------------------------------------------------- #
# ----------------------------- files / dirs -------------------------------- #
# --------------------------------------------------------------------------- #

sub append_to_file
{
my $file = shift;
my $text = shift;

open(FILE,">>$file") or die("couldn't append to $file");
print FILE $text;
close FILE;
}


# ----------------------------------------------------------------------------------------------- #

sub write_to_file
{
my $file = shift;
my $text = shift;

open(FILE,">$file") or die("couldn't write to $file");
print FILE $text;
close FILE;
}

# ----------------------------------------------------------------------------------------------- #

sub clean_up_all
{
# --------------------------------------------------------------
# empties all directories
# --------------------------------------------------------------

my @dirs = (
$IN_DIR,
$OUT_DIR,
$WORKING_DIR,
$CALCLOSED_TMP_DIR,
$CLOSED_TMP_DIR,
#$EVL_DIR,
);
foreach (@dirs) {
empty_dir($_);
}
}

# ----------------------------------------------------------------------------------------------- #

sub backup_files
{
# for gti testing
my $exp_file = shift;
chdir($WORKING_DIR);
my @files = qw (start_exp.dat stop_exp.dat gti_start.dat gti_stop.dat);
printd("backup files: start_exp.dat stop_exp.dat gti_start.dat gti_stop.dat to $TEST_DIR");
for my $f (@files) {
`cp -p $f ${TEST_DIR}${exp_file}_$f`;
}
}

# ----------------------------------------------------------------------------------------------- #

sub remove_files
{
# IN: - $dir to look in
# - patternlist @patterns
# OUT: - filnames matched by one of the pattern will be removed

my $dir = shift;
my @patterns = @_;

return if ($NO_RM_FILES == 1);

my @files = get_files($dir,@patterns);

foreach my $file (@files) {
system("rm $dir$file") and error("Couldn't remove $file - (rm $dir$file) ");
printd("rm $dir$file")
}
}

# ----------------------------------------------------------------------------------------------- #

sub copy_files
{
# copys files (matched by pattern) from $from_dir to $to_dir

my $from_dir = shift;
my $to_dir = shift;
my @patterns = @_;

return if ($FAST == 1);

my @files = get_files($from_dir,@patterns);

foreach my $file (@files) {
system("cp -p $from_dir$file $to_dir") and error("Couldn't copy $file - (cp -p $from_dir$file $to_dir) ");
printd("cp -p $from_dir$file $to_dir")
}
}

# ----------------------------------------------------------------------------------------------- #

sub move_files
{
# move files (matched by pattern) from $from_dir to $to_dir

my $from_dir = shift;
my $to_dir = shift;
my @patterns = @_;

return if ($FAST == 1);

my @files = get_files($from_dir,@patterns);

foreach my $file (@files) {
system("mv $from_dir$file $to_dir") and error("Couldn't move $file - (mv $from_dir$file $to_dir) ");
printd("mv $from_dir$file $to_dir")
}
}

# ----------------------------------------------------------------------------------------------- #

sub get_files
{
# IN: - $dir to look in
# - patternlist @patterns
# OUT: - returns filnames matched by one of the pattern

my $dir = shift;
my @patterns = @_;

opendir(DIR, $dir) or error("Couldn't open dir: $dir");
my @files = readdir(DIR);
closedir(DIR);

my @matched_files;
foreach my $file (@files) {
foreach my $pat (@patterns) {
if ($file =~ m/$pat/) {
next if ( $file eq '.' or $file eq '..');
push @matched_files, $file;
}
}
}
return @matched_files;
}

# ----------------------------------------------------------------------------------------------- #

sub get_file
{

my @files = get_files(@_);
scalar @files != 1 and return 0;
return $files[0];
}

# ----------------------------------------------------------------------------------------------- #

sub create_file
{
my $file = shift;
my $data = shift;

open(FILE,">$file") or error("couldn't open/create file ($file)");
print FILE $data;
close FILE,
}

# ----------------------------------------------------------------------------------------------- #

sub read_file
{
my $file = shift;

open(FILE,"<$file") or error("couldn't open file ($file)");
my @lines = ;
close FILE,
return join '\n', @lines;
}

# ----------------------------------------------------------------------------------------------- #

sub read_file_array
{
my $file = shift;

open(FILE,"<$file") or error("couldn't open file ($file)");
my @lines = ;
close FILE;

return @lines;
}



# ----------------------------------------------------------------------------------------------- #

sub read_obsids_from_file
{
my $file = shift;

open(FILE,"<$file") or error("couldn't open file ($file)");
my @lines = ;
close FILE;
for (@lines) {s/\s*(\d{10})\s*/$1/;} # delete newlines and spaces
return @lines;
}

# ----------------------------------------------------------------------------------------------- #

sub untar
{
# extract $file (.tar) into $to_dir
my $to_dir = shift;
my $file = shift;

chdir($to_dir);
system("tar -xf $file") and printerror("Error: Couldn't extract (untar) file ($file). Perhaps disk is full or file not found\n\n") and return 0;
chdir($BIN_DIR);

printl("$file sucessfully extract to $to_dir")
}

# ----------------------------------------------------------------------------------------------- #

sub tar_files
{
# creates $tar_file.tar file into $to_dir with files (within $from_dir) matches pattern
my $from_dir = shift;
my $to_dir = shift;
my $tar_file = shift;
my @patterns = @_;

return if ($FAST == 1);

my @files = get_files($from_dir,@patterns);

if ( scalar @files == 0 ) {
printd("no files found to create tarfile $tar_file");
return;
}

chdir($from_dir);
my $files = join " ", @files;
printd("tar " . scalar @files. " files\n ---> tar -cf $to_dir$tar_file $files" );
system("tar -cf $to_dir$tar_file $files") and error("Couldn't create tar - (tar -cf $to_dir$tar_file $files)");

}

# ----------------------------------------------------------------------------------------------- #

sub gzip_files
{
# creates $tar_file.gz file into $to_dir with files (within $from_dir) matches pattern
my $from_dir = shift;
my $to_dir = shift;
my $tar_file = shift;
my @patterns = @_;

return if ($FAST == 1);

my @files = get_files($from_dir,@patterns);

if ( scalar @files == 0 ) {
printd("no files found to create tarfile $tar_file");
return;
}

chdir($from_dir);
my $files = join " ", @files;
printd("tar " . scalar @files. " files\n ---> tar -zcf $to_dir$tar_file $files" );
system("tar -zcf $to_dir$tar_file $files") and error("Couldn't create tar - (tar -cf $to_dir$tar_file $files)");

}

# ----------------------------------------------------------------------------------------------- #

sub gtar_odf
{
# extract $odf.tar.gz in the $IN_DIR to $dir
my $dir = shift;
my $odf = shift;

chdir($dir);
system("gtar -zxf $IN_DIR$odf.tar.gz") and printerror("Error: Couldn't extract (gtar) odf ($odf). Perhaps disk is full or file not found\n\n") and return 0;
# (! note !: syscalls return 0 if success !!!. ---> and )
chdir($BIN_DIR);
printl("$IN_DIR$odf.tar.gz sucessfully extract to $dir");
}

# ----------------------------------------------------------------------------------------------- #


sub empty_dir
{
# rm $dir/*
my $dir = shift;

return if ($NO_RM_FILES == 1);
printd("empty dir: $dir");

if (dir_not_empty($dir)) {
#system("rm $dir*"); # !!!!! fehlermeldungen bei Dirs !!!! # and error "Couldn't remove $dir - (rm $dir*) ";
# if you have too much files
system("find $dir -type f -print | xargs rm");

}
}

# ----------------------------------------------------------------------------------------------- #

sub dir_not_empty
{
# IN: directory name
# OUT: 1 if dir is not empty
# 0 else

my $dir = shift;
opendir(DIR, $dir) or error("Couldn't open $dir");
my @files = readdir(DIR);
closedir(DIR);
if (scalar @files > 2) {
return 1;
} else {
return 0;
}
}

# --------------------------------------------------------------------------- #
# ----------------------------- arrays -------------------------------------- #
# --------------------------------------------------------------------------- #


sub make_array_unique
{
# removes the elements of an array which apear at least twice
# makes the arrary an unique one

my @list = @_;

my @unique_list = ();
my %seen;

foreach my $item (@list) {
push (@unique_list, $item) unless $seen{$item}++;
}
return @unique_list;
}


# --------------------------------------------------------------------------- #
# ----------------------------- output -------------------------------------- #
# --------------------------------------------------------------------------- #

sub printh
{
# print formated as header
my $message = shift;
my $output = "\n#--------------------------------------------------------------\n#\t";
$output .= $message;
$output .= "\n#--------------------------------------------------------------\n";
print $output;
printlog($output);
return 1;
}

# ----------------------------------------------------------------------------------------------- #

sub printl
{
# print line
my $message = shift;
my $output .= "\t" . $message . "\n";
print $output;
printlog($output);
return 1;
}


# ----------------------------------------------------------------------------------------------- #

sub printerror
{
# print error = error_log
my $message = shift;
my $output .= $message . "\n";
print $output;
printerrorlog($output);
return 1;
}

# ----------------------------------------------------------------------------------------------- #

sub printd
{
# debug output
my $message = shift;
my $output .= "--> $message \n";
print $output;
printlog($output);

return 1;

}

# ----------------------------------------------------------------------------------------------- #

sub printlog
{

my $message = shift;

my $datetime = get_datetime();
open(LOG, ">>${LOG_DIR}${script_start_datetime}_log.txt") or die "couldn't open log";
print LOG $message;
close(LOG);
return 1;

}

# ----------------------------------------------------------------------------------------------- #

sub printerrorlog
{
# write message to the error_log

my $message = shift;

printh($message);

my $datetime = get_datetime();
open(LOG, ">>${LOG_DIR}${script_start_datetime}_errorlog.txt") or die "couldn't open errorlog";
print LOG $message;
print LOG "\n";
close(LOG);
return 1;

}
# ----------------------------------------------------------------------------------------------- #

sub print_result
{
my $result = shift; # ref to %result

# datastructure %result
# --> %result := hash of hashes ( of arrays )
# i.e.
# (
# 0457_0129837484 => { 'calclosed' => [ M1S005, M2S008, M2S009, PNS003 ],
# 'closed' => [ M1S003, PNS005],
# 'science' => [ M1S005, M1S002, M2S008, M2S009],
# },
# ...
# )

printh("print result");

my $output;

foreach my $rev_observation (keys %$result) {
$output .= "$rev_observation\n";
foreach my $what (keys %{$result->{$rev_observation}}) {
$output .= "\t\t";
$output .= $what; # calclosed, closed, science
$output .= ":\t";
foreach my $exposure (@{$result->{$rev_observation}->{$what}} ) {
$output .= $exposure;
$output .= "\t";
}
$output .= "\n";
}
}
printl($output);
}

# ----------------------------------------------------------------------------------------------- #

sub print_new_log
{
my $file = shift;
my $message = shift;

open(LOG2, ">>${LOG_DIR}${file}.txt") or die "couldn't open log2";
print LOG2 $message;
close(LOG2);
}



# ----------------------------------------------------------------------------------------------- #
# just for testing
# ----------------------------------------------------------------------------------------------- #

sub test_doubles
{
# tests if one exposure appears in two "classes" i.e. CLOSED and SCIENCE
my $result = shift; # ref to %result

my $msg = "test for doubles";
foreach my $rev_observation (keys %$result) {
foreach my $what (keys %{$result->{$rev_observation}}) {
foreach my $exposure (@{$result->{$rev_observation}->{$what}} ) {
foreach my $what2 (keys %{$result->{$rev_observation}}) {
last if ($what eq $what2);
foreach my $exposure2 (@{$result->{$rev_observation}->{$what2}} ) {
if ($exposure2 eq $exposure) {
$msg .= "\n!!! ERROR in $rev_observation !!!\t-->$exposure appears in $what and in $what2";
}
}
}
}
}
}
printerrorlog($msg);
printh($msg);
}


# ----------------------------------------------------------------------------------------------- #

sub print_env
{
#print "\n---------------\ndrucke ENV\n---------------\n";
print "\n---------------\nsearching in ENV\n---------------\n";
foreach my $k (keys %ENV) {
#print "$k --> $ENV{$k}\n";
printh("$k --> $ENV{$k}") if ($k =~ /aio/i or $k =~ /sas/i);
}

}

# ----------------------------------------------------------------------------------------------- #

sub test_all_odfs
{
# return all observations from the web page 'http://xmm.vilspa.esa.es/~xmmdoc/EPIC/closed.log'

my $page = get('http://xmm.vilspa.esa.es/~xmmdoc/EPIC/closed.log');
die 'couldnt get html-page ' if ($page eq '');

my @odfs = ();
while ( $page =~ m/^(\d{4})\s{4}(\d{10}).*$/mg ) {
push @odfs, $2
}
return @odfs;
}

# ----------------------------------------------------------------------------------------------- #

sub get_doubleodfs
{
# return list of exposures which are double on the web
# just for testing/debuging

my $page = get('http://xmm.vilspa.esa.es/~xmmdoc/EPIC/closed.log');
die 'couldnt get html-page ' if ($page eq '');

my @odfs = ();
while ( $page =~ m/^(\d{4})\s{4}(\d{10}).*$/mg ) {
push @odfs, $2
}


my @doubles;
foreach my $o (@odfs) {
my $count = 0;
foreach my $o2 (@odfs) {
if ($o eq $o2) {
$count++;
if ($count > 1) {
push @doubles, $o ;
}
}
}
}
printh("CLOSED: following exp are on the web twice");
print join "\n", @doubles;
print "\ninsg." . (scalar @doubles / 2). "\n\n";


# -----------------------------------------------------------------------

# the same again for calclosed
$page = get('http://xmm.vilspa.esa.es/~xmmdoc/EPIC/calclosed.log');
die 'couldnt get html-page ' if ($page eq '');

@odfs = ();
while ( $page =~ m/^(\d{4})\s{4}(\d{10}).*$/mg ) {
push @odfs, $2
}


@doubles = ();
foreach my $o (@odfs) {
my $count = 0;
foreach my $o2 (@odfs) {
if ($o eq $o2) {
$count++;
if ($count > 1) {
push @doubles, $o ;
}
}
}
}
printh("CALCLOSED: following exp are on the web twice");
print join "\n", @doubles;
print "\ninsg." . (scalar @doubles / 2). "\n\n";
}

# ----------------------------------------------------------------------------------------------- #

sub mark_obs_as_checked
{
# writes obs at the end of checked_obs.txt
my $obs = shift;

return if ($simulate == 1);
open(FILE,">>$CHECKED_OBS") or error("couldn't write to $CHECKED_OBS ...");
print FILE $obs . "\n";
close FILE;
}



# ----------------------------------------------------------------------------------------------- #
# sub is_obs_checked
# {
# # returns 1 if obs is in checked_obs.txt
# my $obs = shift;
#
# my $found = 0;
# open(FILE,"<$CHECKED_OBS") or error("couldn't open $CHECKED_OBS ...");
# while (){
# $found = 1 if (m/$obs/);
# }
# close FILE;
# $found == 1 ? return 1 : return 0;
# }


# --------------------------------------------------------------------------- #
# ------------------ compare with the web ---------------------------------- #
# --------------------------------------------------------------------------- #

sub get_exposure_from_web
{
# my ($m1, $m2, $pn) = get_exposure_from_web('calclosed', '0162160601' );

my $what = shift; # closed or calcloed
my $rev_observation = shift;

my $observation = substr $rev_observation, 5,10;
my $rev = substr $rev_observation, 0,4;

#print_new_log('web_test_log.log', "keine observation angegeben\n") if $observation eq '';

my $page;
if ($what eq 'closed') {
$page = get('http://xmm.vilspa.esa.es/~xmmdoc/EPIC/closed.log');
} elsif ($what eq 'calclosed') {
$page = get('http://xmm.vilspa.esa.es/~xmmdoc/EPIC/calclosed.log');
}

die 'couldnt get html-page ' if ($page eq '');

my @web_exp;
while ($page =~ m/^$rev\s\s\s\s?$observation\s*?(.*)$/mg ){

push @web_exp, split /\s+/, $1;

#print_new_log('web_test_log.log', "Konnte nichts fuer obs: $observation finden\n") if $1 eq '';
}

@web_exp = split_exposures(@web_exp);
return @web_exp;
}

# ----------------------------------------------------------------------------------------------- #

sub split_exposures
{
# IN: (M1S008M1U002, M2S002M2U003)
# OUT: (M1S008, M1U002, M2S002 M2U003)
my @web_exp = @_; # M1, M2, PN

return '' if (scalar @web_exp == 0);

my @res;
foreach (@web_exp) {
while ( m/(M1....)/g) {
push @res, $1;
}
while ( m/(M2....)/g) {
push @res, $1;
}
while ( m/(PN....)/g) {
push @res, $1;
}
}

return @res;
}


# ----------------------------------------------------------------------------------------------- #

sub compare_exposures_with_web
{
my $result = shift;

my $output;
printh("compare exposures with web");
foreach my $rev_observation (keys %$result) {
$output .= "$rev_observation\n";

foreach my $what (keys %{$result->{$rev_observation}}) { # foreach closed, calclosed
next if ($what eq 'science');
#next if (scalar @{$result->{$rev_observation}->{$what}} == 0);

$output .= "\t\t";
$output .= $what; # calclosed, closed, science
$output .= ":\n\t\t\t";

my @web_exp = get_exposure_from_web($what, $rev_observation);

# 1. control if all in %result is found on the web
$output .= "\n\t\t --> checking if not found to much\n\t\t\t";
foreach my $exposure (@{$result->{$rev_observation}->{$what}} ) {
my $ok = 0;
my $web = '';
foreach (@web_exp) {
if ($exposure eq $_) {
$ok = 1;
$web = $_;
last;
}
}

if ($ok == 1) {
$output .= $exposure . " <---- (Ok. found on web) ---> " . $web . " (web)\t\tOK";
$output .= "\n\t\t\t";
} else {
$output .= $exposure . " ($rev_observation - not found) !!! \t\tERROR" ;
$output .= "\n\t\t\t";
}
}

# 2. also to controll that nothing has been forgoten
$output .= "\n\t\t --> checking if nothing was forgoten\n";
foreach my $w (@web_exp) {

next if ($w eq '');
$output .= "\t\t\tfound on web: $w \t---> ";
my $ok = 0;
my $found_exp = '';
foreach my $exposure (@{$result->{$rev_observation}->{$what}} ) {
if ($exposure eq $w) {
$ok = 1;
$found_exp = $exposure;
last;
}
}
if ($ok == 1) {
$output .= "OK. $found_exp was found on web \tOK\n";
} else {
$output .= "-$w- was forgoten\t\t\tERROR\n";
}
}
$output .= "\n";
}
}

printerrorlog($output);
printl($output);
}


# ----------------------------------------------------------------------------------------------- #

sub compare_ftp_files
{
printh("starting comparison ...");

my $FTP_OUT_DIR = '/xvsoc01/ftpexport1/secure/epic_ex/';
my $FTP_CALCLOSED_DIR = $FTP_OUT_DIR . 'calclosed/';
my $FTP_CLOSED_DIR = $FTP_OUT_DIR . 'closed/';
my $FTP_MOS_OV_DIR = $FTP_OUT_DIR . 'MOS_OV/';
my $FTP_MOS_DI_DIR = $FTP_OUT_DIR . 'MOS_DI/';
my $FTP_PN_DL_DIR = $FTP_OUT_DIR . 'PN_DL/';
my $FTP_PN_NO_DIR = $FTP_OUT_DIR . 'PN_NO/';
my $FTP_PN_OD_OBS_DIR = $FTP_OUT_DIR . 'PN_OD_OBS/';
my $FTP_NEW_WEEK_DIR = $FTP_OUT_DIR . 'new_week/';
my $FTP_NEW_WEEK_CALCLOSED_DIR = $NEW_WEEK_DIR . 'calclosed/';
my $FTP_NEW_WEEK_CLOSED_DIR = $NEW_WEEK_DIR . 'closed/';

#######################################################

# all
my @TAR_DIRS = qw (CALCLOSED_DIR PN_NO_DIR CLOSED_DIR PN_DL_DIR MOS_OV_DIR MOS_DI_DIR PN_OD_OBS_DIR NEW_WEEK_CALCLOSED_DIR NEW_WEEK_CLOSED_DIR); # FTP_CALCLOSED_DIR $FTP_CLOSED_DIR $FTP_MOS_OV_DIR $FTP_MOS_DI_DIR $FTP_PN_DL_DIR
# all but PN_DL PN_OD_OBS_DIR
# my @TAR_DIRS = qw (CALCLOSED_DIR PN_NO_DIR CLOSED_DIR MOS_OV_DIR MOS_DI_DIR NEW_WEEK_CALCLOSED_DIR NEW_WEEK_CLOSED_DIR); # FTP_CALCLOSED_DIR $FTP_CLOSED_DIR $FTP_MOS_OV_DIR $FTP_MOS_DI_DIR $FTP_PN_DL_DIR
# my @TAR_DIRS = qw ( PN_OD_OBS_DIR );
# my @TAR_DIRS = qw ( PN_DL_DIR );

my $tar_file_counter;
my (@error_files, @error_size);
my (@f_error_files, @f_error_size);
my @files_not_found;

for my $dir (@TAR_DIRS) {
my $var_local_dir = '$' . $dir;
my $var_ftp_dir = '$FTP_' . $dir;
my $local_dir = eval $var_local_dir;
my $ftp_dir = eval $var_ftp_dir;

printh("compare: $local_dir --- $ftp_dir");

my @local_tar_files = get_files($local_dir,".+\.(TAR|gz)");

printl(scalar @local_tar_files . " found in ". $local_dir);

# foreach tar file in the dir
for my $local_tar_file (@local_tar_files) {

$tar_file_counter++;
printl("\ncompare $local_tar_file ...");

my @local_files_in_tar;
my $ftp_tar_file;
my @ftp_files_in_tar;



# look in the tar bzw. gz files
if ($local_tar_file =~ m/.+\.tar$/i) {
printl("is a TAR file");
@local_files_in_tar = split /\n/, `tar -tvf $local_dir$local_tar_file`;
$ftp_tar_file = get_file_if_exists($ftp_dir, $local_tar_file );
if ($ftp_tar_file == 0) {
push @files_not_found, $local_tar_file;
printh("tar file not found on ftp: $ftp_dir$local_tar_file");
next;
}
@ftp_files_in_tar = split /\n/, `tar -tvf $ftp_dir$ftp_tar_file`;

} elsif ($local_tar_file =~ m/.+\.gz$/i) {
printl("is a GZ file");
@local_files_in_tar = split /\n/, `tar -ztvf $local_dir$local_tar_file`;

$ftp_tar_file = get_file_if_exists($ftp_dir, $local_tar_file );
if ($ftp_tar_file == 0) {
push @files_not_found, $local_tar_file;
printh("gz file not found on ftp: $ftp_dir$local_tar_file");
next;
}
@ftp_files_in_tar = split /\n/, `tar -ztvf $ftp_dir$ftp_tar_file`;

} else {
printh("Error 3985. no tar or gz file");
}


print scalar @local_files_in_tar;
print " files found in $local_dir$local_tar_file:\n";
print join "\n",@local_files_in_tar;
print "\n----- now ftp ------\n";

print scalar @ftp_files_in_tar;
print " files found in $ftp_dir$ftp_tar_file:\n";
print join "\n",@ftp_files_in_tar;
print "\n----------------------------\n";

# -rwxr-xr-x mkirsch/users 377280 2005-03-09 16:03:17 0203_0109980601_SCX00000ATS.FIT

foreach my $file (@local_files_in_tar) {
my ($rights, $user, $size, $date, $time, $name) = split /\s+/, $file;
printl("no name. so next") and next if $name eq '';
printl("looking for $name ($size)");
my $flag = 0;
foreach my $ftp_file (@ftp_files_in_tar) {
my ($f_rights, $f_user, $f_size, $f_date, $f_time, $f_name) = split /\s+/, $ftp_file;
if ($f_name eq $name) {
$flag = 1;
printl("\t$name found");
if ($size eq $f_size) {
printl("\tok. size matches");
} else {
printl(" !!!!!!!!!!!!!! Error !!!!!!!!!!!!!. size is not matching");
push @error_size, "$dir - $name (size local: $size --- size ftp $f_size)";
}
}
}
if ($flag == 0) {
printl("!!!! ERROR !!!. file $name not found on ftp.");
push @error_files, "$dir - $name";
}
}

printl("\ncheck nothing was forgotten:\n");

# 2. check nothing was forgotten
foreach my $f_file (@ftp_files_in_tar) {
my ($f_rights, $f_user, $f_size, $f_date, $f_time, $f_name) = split /\s+/, $f_file;
printl("looking for $f_name ($f_size)");
my $flag = 0;
foreach my $l_file (@local_files_in_tar) {
my ($l_rights, $l_user, $l_size, $l_date, $l_time, $l_name) = split /\s+/, $l_file;
if ($f_name eq $l_name) {
$flag = 1;
printl("\t$f_name found");
if ($l_size eq $f_size) {
printl("\tok. size matches");
} else {
printl(" !!!!!!!!!!!!!! Error !!!!!!!!!!!!!. size is not matching");
push @f_error_size, "$dir - $f_name (size local: $l_size --- size ftp $f_size)";
}
}
}
if ($flag == 0) {
printl("!!!! ERROR !!!. file $f_name not found local.");
push @f_error_files, "$dir - $f_name";
}
}
}
}

printh("result of comparison");
printl("following files could not be found on the ftp:");
foreach (@error_files) {
printl($_);
}
printl("");
printl("following files have diffrent sizes:");
foreach (@error_size) {
printl($_);
}


printh("result of comparison: nothing was forgotten");
printl("following files could not be found local:");
foreach (@f_error_files) {
printl($_);
}
printl("");
printl("following files have diffrent sizes:");
foreach (@f_error_size) {
printl($_);
}

printl("");
printl("following tar/gz-files could not been found:");
foreach (@files_not_found) {
printl($_);
}


print "\n\nOK. compare end. $tar_file_counter tar/gz files compared\n\n";
}

# ----------------------------------------------------------------------------------------------- #

sub get_file_if_exists
{

my @files = get_files(@_);
scalar @files > 1 and error("get_file_if_exists found more than one (@_). files found: " . scalar @files);
scalar @files == 0 and return 0;
return $files[0];
}

# ----------------------------------------------------------------------------------------------- #