Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# -*- coding: utf-8 -*- 

2 

3# gms_preprocessing, spatial and spectral homogenization of satellite remote sensing data 

4# 

5# Copyright (C) 2020 Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz-potsdam.de) 

6# 

7# This software was developed within the context of the GeoMultiSens project funded 

8# by the German Federal Ministry of Education and Research 

9# (project grant code: 01 IS 14 010 A-C). 

10# 

11# This program is free software: you can redistribute it and/or modify it under 

12# the terms of the GNU General Public License as published by the Free Software 

13# Foundation, either version 3 of the License, or (at your option) any later version. 

14# Please note the following exception: `gms_preprocessing` depends on tqdm, which 

15# is distributed under the Mozilla Public Licence (MPL) v2.0 except for the files 

16# "tqdm/_tqdm.py", "setup.py", "README.rst", "MANIFEST.in" and ".gitignore". 

17# Details can be found here: https://github.com/tqdm/tqdm/blob/master/LICENCE. 

18# 

19# This program is distributed in the hope that it will be useful, but WITHOUT 

20# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

21# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

22# details. 

23# 

24# You should have received a copy of the GNU Lesser General Public License along 

25# with this program. If not, see <http://www.gnu.org/licenses/>. 

26 

27__author__ = 'Daniel Scheffler' 

28 

29import sys 

30import warnings 

31import logging 

32import os 

33 

34from ..options.config import GMS_config as CFG 

35 

36# try: 

37# # noinspection PyCompatibility 

38# from StringIO import StringIO # Python 2 

39# except ImportError: 

40# from io import StringIO # Python 3 

41 

42 

43class GMS_logger(logging.Logger): 

44 def __init__(self, name_logfile, fmt_suffix=None, path_logfile=None, log_level='INFO', append=True, 

45 log_to_joblog=True): 

46 # type: (str, any, str, any, bool, bool) -> None 

47 """Returns a logging.logger instance pointing to the given logfile path. 

48 :param name_logfile: 

49 :param fmt_suffix: if given, it will be included into log formatter 

50 :param path_logfile: if no path is given, only a StreamHandler is created 

51 :param log_level: the logging level to be used (choices: 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'; 

52 default: 'INFO') 

53 :param append: <bool> whether to append the log message to an existing logfile (1) 

54 or to create a new logfile (0); default=1 

55 :param log_to_joblog: whether to additionally log all messages to the logfile of the GMS job (default=1) 

56 """ 

57 

58 # private attributes 

59 self._captured_stream = '' 

60 

61 # attributes that need to be present in order to unpickle the logger via __setstate_ 

62 self.name_logfile = name_logfile 

63 self.fmt_suffix = fmt_suffix 

64 self.path_logfile = path_logfile 

65 self.log_level = log_level 

66 self.appendMode = append 

67 

68 super(GMS_logger, self).__init__(name_logfile) 

69 

70 self.path_logfile = path_logfile 

71 self.formatter_fileH = logging.Formatter('%(asctime)s' + (' [%s]' % fmt_suffix if fmt_suffix else '') + 

72 ' %(levelname)s: %(message)s', datefmt='%Y/%m/%d %H:%M:%S') 

73 self.formatter_ConsoleH = logging.Formatter('%(asctime)s' + (' [%s]' % fmt_suffix if fmt_suffix else '') + 

74 ': %(message)s', datefmt='%Y/%m/%d %H:%M:%S') 

75 

76 from ..misc.helper_functions import silentmkdir 

77 

78 # set fileHandler 

79 if path_logfile: 

80 # create output directory 

81 silentmkdir(path_logfile) 

82 

83 # create FileHandler 

84 fileHandler = logging.FileHandler(path_logfile, mode='a' if append else 'w') 

85 fileHandler.setFormatter(self.formatter_fileH) 

86 fileHandler.setLevel(log_level) 

87 else: 

88 fileHandler = None 

89 

90 # set fileHandler for job logfile 

91 if log_to_joblog: 

92 job_logfile = os.path.join(CFG.path_job_logs, '%s.log' % CFG.ID) 

93 

94 # create output directory 

95 silentmkdir(job_logfile) 

96 

97 # create FileHandler 

98 joblog_Handler = logging.FileHandler(job_logfile, mode='a' if append else 'w') 

99 joblog_Handler.setFormatter(self.formatter_fileH) 

100 joblog_Handler.setLevel(log_level) 

101 

102 # create SocketHandler 

103 # joblog_Handler = SocketHandler('localhost', DEFAULT_TCP_LOGGING_PORT) # FIXME static host 

104 # joblog_Handler.setFormatter(self.formatter_fileH) 

105 # joblog_Handler.setLevel(log_level) 

106 

107 else: 

108 joblog_Handler = None 

109 

110 # create StreamHandler # TODO add a StringIO handler 

111 # self.streamObj = StringIO() 

112 # self.streamHandler = logging.StreamHandler(stream=self.streamObj) 

113 # self.streamHandler.setFormatter(formatter) 

114 # self.streamHandler.set_name('StringIO handler') 

115 

116 # create ConsoleHandler for logging levels DEGUG and INFO -> logging to sys.stdout 

117 consoleHandler_out = logging.StreamHandler(stream=sys.stdout) # by default it would go to sys.stderr 

118 consoleHandler_out.setFormatter(self.formatter_ConsoleH) 

119 consoleHandler_out.set_name('console handler stdout') 

120 consoleHandler_out.setLevel(log_level) 

121 consoleHandler_out.addFilter(LessThanFilter(logging.WARNING)) 

122 

123 # create ConsoleHandler for logging levels WARNING, ERROR, CRITICAL -> logging to sys.stderr 

124 consoleHandler_err = logging.StreamHandler(stream=sys.stderr) 

125 consoleHandler_err.setFormatter(self.formatter_ConsoleH) 

126 consoleHandler_err.setLevel(logging.WARNING) 

127 consoleHandler_err.set_name('console handler stderr') 

128 

129 self.setLevel(log_level) 

130 

131 if not self.handlers: 

132 if fileHandler: 

133 self.addHandler(fileHandler) 

134 if joblog_Handler: 

135 self.addHandler(joblog_Handler) 

136 # self.addHandler(self.streamHandler) 

137 self.addHandler(consoleHandler_out) 

138 self.addHandler(consoleHandler_err) 

139 

140 def __getstate__(self): 

141 self.close() 

142 return self.__dict__ 

143 

144 def __setstate__(self, ObjDict): 

145 """Defines how the attributes of GMS object are unpickled.""" 

146 self.__init__(ObjDict['name_logfile'], fmt_suffix=ObjDict['fmt_suffix'], path_logfile=ObjDict['path_logfile'], 

147 log_level=ObjDict['log_level'], append=True) 

148 ObjDict = self.__dict__ 

149 return ObjDict 

150 

151 @property 

152 def captured_stream(self): 

153 if not self._captured_stream: 

154 self._captured_stream = self.streamObj.getvalue() 

155 

156 return self._captured_stream 

157 

158 @captured_stream.setter 

159 def captured_stream(self, string): 

160 assert isinstance(string, str), "'captured_stream' can only be set to a string. Got %s." % type(string) 

161 self._captured_stream = string 

162 

163 def close(self): 

164 # update captured_stream and flush stream 

165 # self.captured_stream += self.streamObj.getvalue() 

166 # print(self.handlers[:]) 

167 

168 # self.removeHandler(self.streamHandler) 

169 # print(dir(self.streamHandler)) 

170 # self.streamHandler = None 

171 

172 for handler in self.handlers[:]: 

173 if handler: 

174 try: 

175 # if handler.get_name()=='StringIO handler': 

176 # self.streamObj.flush() 

177 # self.streamHandler.flush() 

178 self.removeHandler(handler) # if not called with '[:]' the StreamHandlers are left open 

179 handler.flush() 

180 handler.close() 

181 except PermissionError: 

182 warnings.warn('Could not properly close logfile due to a PermissionError: %s' % sys.exc_info()[1]) 

183 

184 if self.handlers[:]: 

185 warnings.warn('Not all logging handlers could be closed. Remaining handlers: %s' % self.handlers[:]) 

186 

187 # print('sh', self.streamHandler) 

188 

189 def view_logfile(self): 

190 with open(self.path_logfile) as inF: 

191 print(inF.read()) 

192 

193 def __del__(self): 

194 try: 

195 self.close() 

196 except ValueError as e: 

197 if str(e) == 'I/O operation on closed file': 

198 pass 

199 

200 def __enter__(self): 

201 return self 

202 

203 def __exit__(self, exc_type, exc_val, exc_tb): 

204 self.close() 

205 return True if exc_type is None else False 

206 

207 

208def close_logger(logger): 

209 if logger and hasattr(logger, 'handlers'): 

210 for handler in logger.handlers[:]: # if not called with '[:]' the StreamHandlers are left open 

211 if handler: 

212 try: 

213 logger.removeHandler(handler) 

214 handler.flush() 

215 handler.close() 

216 except PermissionError: 

217 warnings.warn('Could not properly close logfile due to a PermissionError: %s' % sys.exc_info()[1]) 

218 

219 if logger.handlers[:]: 

220 warnings.warn('Not all logging handlers could be closed. Remaining handlers: %s' % logger.handlers[:]) 

221 

222 

223def shutdown_loggers(): 

224 logging.shutdown() 

225 

226 

227class LessThanFilter(logging.Filter): 

228 """Filter class to filter log messages by a maximum log level. 

229 

230 Based on http://stackoverflow.com/questions/2302315/ 

231 how-can-info-and-debug-logging-message-be-sent-to-stdout-and-higher-level-messag 

232 """ 

233 def __init__(self, exclusive_maximum, name=""): 

234 """Get an instance of LessThanFilter. 

235 

236 :param exclusive_maximum: maximum log level, e.g., logger.WARNING 

237 :param name: 

238 """ 

239 super(LessThanFilter, self).__init__(name) 

240 self.max_level = exclusive_maximum 

241 

242 def filter(self, record): 

243 """Filter funtion. 

244 

245 NOTE: Returns True if logging level of the given record is below the maximum log level. 

246 

247 :param record: 

248 :return: bool 

249 """ 

250 # non-zero return means we log this message 

251 return True if record.levelno < self.max_level else False