generate_pickle_list.py 2.37 KB
Newer Older
maming's avatar
maming committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#=====================================================================================
#        Author: Aobo Li
#        Contact: liaobo77@gmail.com
#        
#        Last Modified: Aug. 29, 2021
#        
#        * This code generates the .dat list of all .picke files.
#        * After running processing_kamland_new_mc.py or processing_sparse_time.py
#          run this code to generate the pickle list. The pickle list is the input to 
#          KamNet.
#=====================================================================================
#!/usr/bin/python
import json
import time
import datetime
import sys
import argparse
import os
import re
import string
import signal
import subprocess
from settings import OUT_DIR, OUT_PICKLE_DIR, TAIL, ROWS, COLS
from tools import cd, append_file

def main():
    # Setting the output Directory if it does not exist.
    if not os.path.exists(OUT_PICKLE_DIR):
            os.mkdir(OUT_PICKLE_DIR )

    '''
    Training combo is a python dict containing types of isotopes to generate pickle list
    Each entry of the python dict takes the form of:
        map[sig] = [bkg1, bkg2, bkg3,...]
    Note that "sig" and every "bkg" string has to be part of the .pickle filename
    '''
    training_combo = {}
    training_combo['Solar'] = ['Bi214m']

    # Reads out all .picke file addresses
    inputfiles = [(ifile) for ifile in os.listdir(OUT_DIR) if ".pickle" in ifile]
    inputfiles.sort()

    filename_array = {}
    # Categorize .picke file addresses into corresponding types of isotopes (sig or bkg)
    for npyfile in inputfiles:
        for sig, bkg in training_combo.iteritems():
            if sig in npyfile:
                filename_array = append_file(sig, str(OUT_DIR + npyfile), filename_array)
            else:
                for single_bkg in bkg:
                    if single_bkg in npyfile:
                        filename_array = append_file(single_bkg, str(OUT_DIR + npyfile), filename_array)
    # Generate the .dat pickle list
    for key in filename_array.keys():
        with cd(OUT_PICKLE_DIR):
            writefile = open(str(key + TAIL +'.dat'),"w")
            for filename in filename_array[key]:
                if os.stat(filename).st_size == 0:
                    # Skip file with 0 size
                    continue
                writefile.write(filename + '\n')
            writefile.close()

if __name__=="__main__":
     main()