1
- #!/usr/bin/env python2.7
2
1
# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT)
3
2
# Bespoke Link to Instruments and Small Satellites (BLISS)
4
3
#
@@ -36,30 +35,14 @@ def __init__(self, filename, loader):
36
35
"""
37
36
Creates a new ObjectCache
38
37
39
- Caches the Python object returned by loader(filename), using
40
- Python's pickle object serialization mechanism. An ObjectCache
41
- is useful when loader(filename) is slow.
38
+ Caches the Python object returned by loader(filename).
39
+ An ObjectCache is useful when loader(filename) is slow.
42
40
43
- The result of loader(filename) is cached to cachename, the
44
- basename of filename with a '.pkl' extension.
45
-
46
- Use the load() method to load, either via loader(filename) or
47
- the pickled cache file, whichever was modified most recently.
41
+ Use the load() method to load
48
42
"""
49
43
self ._loader = loader
50
44
self ._dict = None
51
45
self ._filename = filename
52
- self ._cachename = os .path .splitext (filename )[0 ] + ".pkl"
53
-
54
- @property
55
- def cachename (self ):
56
- """The pickled cache filename"""
57
- return self ._cachename
58
-
59
- @property
60
- def dirty (self ):
61
- """True if the pickle cache needs to be regenerated, False to use current pickle binary"""
62
- return check_yaml_timestamps (self .filename , self .cachename )
63
46
64
47
@property
65
48
def filename (self ):
@@ -70,19 +53,11 @@ def load(self):
70
53
"""
71
54
Loads the Python object
72
55
73
- Loads the Python object, either via loader (filename) or the
74
- pickled cache file, whichever was modified most recently.
56
+ Loads the Python object via loader (filename).
75
57
"""
76
58
77
59
if self ._dict is None :
78
- if self .dirty :
79
- self ._dict = self ._loader (self .filename )
80
- update_cache (self .filename , self .cachename , self ._dict )
81
- log .info (f"Loaded new pickle file: { self .cachename } " )
82
- else :
83
- with open (self .cachename , "rb" ) as stream :
84
- self ._dict = pickle .load (stream )
85
- log .info (f'Current pickle file loaded: { self .cachename .split ("/" )[- 1 ]} ' )
60
+ self ._dict = self ._loader (self .filename )
86
61
return self ._dict
87
62
88
63
@@ -94,45 +69,46 @@ def load(self):
94
69
timer = time .time
95
70
96
71
97
- def check_yaml_timestamps (yaml_file_name , cache_name ):
72
+ def check_yaml_timestamps (yaml_file_name , cache_file_name ):
98
73
"""
99
- Checks YAML configuration file timestamp and any 'included' YAML configuration file's
100
- timestamp against the pickle cache file timestamp.
101
- The term 'dirty' means that a yaml config file has a more recent timestamp than the
102
- pickle cache file. If a pickle cache file is found to be 'dirty' (return true) the
103
- pickle cache file is not up-to-date, and a new pickle cache file must be generated .
104
- If the cache file in not 'dirty' (return false ) the existing pickle binary will
105
- be loaded .
74
+ Checks YAML configuration file timestamp and any 'included' YAML
75
+ configuration file's timestamp against the cache file's timestamp.
76
+ The term 'dirty' means that a yaml config file has a more recent
77
+ timestamp than the cache file. If file is found to be 'dirty'
78
+ (return True) the cache file can be considered not up-to-date.
79
+ If the other file in not 'dirty' (return False ) the cache file can be
80
+ considered up-to-date .
106
81
107
82
param: yaml_file_name: str
108
83
Name of the yaml configuration file to be tested
109
- param: cache_name : str
110
- Filename with path to the cached pickle file for this config file.
84
+ param: cache_file_name : str
85
+ Filename with path to the cache file to be compared
111
86
112
87
return: boolean
113
88
True:
114
- Indicates 'dirty' pickle cache: i.e. the file is not current, generate new binary
89
+ The cache file is not current, or does not exist
115
90
False
116
- Load current cache file
91
+ The cache file can be considered current
117
92
118
93
"""
119
- # If no pickle cache exists return True to make a new one.
120
- if not os .path .exists (cache_name ):
121
- log .debug ("No pickle cache exists, make a new one" )
94
+ # If no cache exists return True to make a new one.
95
+ if not os .path .exists (cache_file_name ):
96
+ log .debug ("No cache exists, make a new one" )
122
97
return True
123
- # Has the yaml config file has been modified since the creation of the pickle cache
124
- if os .path .getmtime (yaml_file_name ) > os .path .getmtime (cache_name ):
125
- log .info (f"{ yaml_file_name } modified - make a new binary pickle cache file." )
98
+ # Has the yaml config file has been modified since the creation of the cache
99
+ if os .path .getmtime (yaml_file_name ) > os .path .getmtime (cache_file_name ):
100
+ log .info (f"{ yaml_file_name } modified - make a new cache file." )
126
101
return True
127
102
# Get the directory of the yaml config file to be parsed
128
103
dir_name = os .path .dirname (yaml_file_name )
129
- # Open the yaml config file to look for '!includes' to be tested on the next iteration
104
+ # Open the yaml config file to look for '!includes' to be tested
105
+ # on the next iteration
130
106
with open (yaml_file_name , "r" ) as file :
131
107
try :
132
108
for line in file :
133
109
if not line .strip ().startswith ("#" ) and "!include" in line :
134
110
check = check_yaml_timestamps (
135
- os .path .join (dir_name , line .strip ().split (" " )[2 ]), cache_name
111
+ os .path .join (dir_name , line .strip ().split (" " )[2 ]), cache_file_name
136
112
)
137
113
if check :
138
114
return True
@@ -144,27 +120,6 @@ def check_yaml_timestamps(yaml_file_name, cache_name):
144
120
return False
145
121
146
122
147
- def update_cache (yaml_file_name , cache_file_name , object_to_serialize ):
148
- """
149
- Caches the result of loader (yaml_file_name) to pickle binary (cache_file_name), if
150
- the yaml config file has been modified since the last pickle cache was created, i.e.
151
- (the binary pickle cache is declared to be 'dirty' in 'check_yaml_timestamps()').
152
-
153
- param: yaml_file_name: str
154
- Name of the yaml configuration file to be serialized ('pickled')
155
- param: cache_file_name: str
156
- File name with path to the new serialized cached pickle file for this config file.:
157
- param: object_to_serialize: object
158
- Object to serialize ('pickle') e.g. instance of 'ait.core.cmd.CmdDict'
159
-
160
- """
161
-
162
- msg = f"Saving updates from more recent { yaml_file_name } to { cache_file_name } ."
163
- log .info (msg )
164
- with open (cache_file_name , "wb" ) as output :
165
- pickle .dump (object_to_serialize , output , - 1 )
166
-
167
-
168
123
def __init_extensions__ (modname , modsyms ): # noqa
169
124
"""
170
125
Initializes a module (given its name and :func:`globals()` symbol
@@ -290,11 +245,11 @@ def setDictDefaults(d, defaults): # noqa
290
245
291
246
def getDefaultDict (modname , config_key , loader , reload = False , filename = None ): # noqa
292
247
"""
293
- Returns default AIT dictonary for modname
248
+ Returns default AIT dictionary for modname
294
249
295
250
This helper function encapulates the core logic necessary to
296
- (re)load, cache (via util.ObjectCache), and return the default
297
- dictionary. For example, in ait.core.cmd:
251
+ (re)load and return the default dictionary.
252
+ For example, in ait.core.cmd:
298
253
299
254
def getDefaultDict(reload=False):
300
255
return ait.util.getDefaultDict(__name__, 'cmddict', CmdDict, reload)
0 commit comments