"""Logging for Tidy3d."""importinspectfromdatetimeimportdatetimefromtypingimportUnion,List,Callablefromtyping_extensionsimportLiteralfromrich.consoleimportConsole# Note: "SUPPORT" and "USER" levels are meant for backend runs only.# Logging in frontend code should just use the standard debug/info/warning/error/critical.LogLevel=Literal["DEBUG","SUPPORT","USER","INFO","WARNING","ERROR","CRITICAL"]LogValue=Union[int,LogLevel]# Logging levels compatible with logging module_level_value={"DEBUG":10,"SUPPORT":12,"USER":15,"INFO":20,"WARNING":30,"ERROR":40,"CRITICAL":50,}_level_name={v:kfork,vin_level_value.items()}DEFAULT_LEVEL="WARNING"DEFAULT_LOG_STYLES={"DEBUG":None,"SUPPORT":None,"USER":None,"INFO":None,"WARNING":"red","ERROR":"red bold","CRITICAL":"red bold",}# Width of the console used for rich logging (in characters).CONSOLE_WIDTH=80def_default_log_level_format(level:str)->str:"""By default just return unformatted log level string."""returnleveldef_get_level_int(level:LogValue)->int:"""Get the integer corresponding to the level string."""ifisinstance(level,int):returnleveliflevelnotin_level_value:# We don't want to import ConfigError to avoid a circular dependencyraiseValueError(f"logging level {level} not supported, must be ""'DEBUG', 'SUPPORT', 'USER', 'INFO', 'WARNING', 'ERROR', or 'CRITICAL'")return_level_value[level]
[docs]classLogHandler:"""Handle log messages depending on log level"""
[docs]defhandle(self,level,level_name,message):"""Output log messages depending on log level"""iflevel>=self.level:stack=inspect.stack()offset=4ifstack[offset-1].filename.endswith("exceptions.py"):# We want the calling site for exceptions.pyoffset+=1self.console.log(self.log_level_format(level_name),message,sep=": ",style=DEFAULT_LOG_STYLES[level_name],_stack_offset=offset,)
[docs]classLogger:"""Custom logger to avoid the complexities of the logging module The logger can be used in a context manager to avoid the emission of multiple messages. In this case, the first message in the context is emitted normally, but any others are discarded. When the context is exited, the number of discarded messages of each level is displayed with the highest level of the captures messages. Messages can also be captured for post-processing. That can be enabled through 'set_capture' to record all warnings emitted during model validation. A structured copy of all validation messages can then be recovered through 'captured_warnings'. """_static_cache=set()
[docs]defset_capture(self,capture:bool):"""Turn on/off tree-like capturing of log messages."""self._capture=capture
[docs]defcaptured_warnings(self):"""Get the formatted list of captured log messages."""captured_warnings=self._captured_warningsself._captured_warnings=[]returncaptured_warnings
[docs]def__enter__(self):"""If suppression is enabled, enter a consolidation context (only a single message is emitted)."""ifself.suppressionandself._countsisNone:self._counts={}returnself
[docs]def__exit__(self,exc_type,exc_value,traceback):"""Exist a consolidation context (report the number of messages discarded)."""ifself._countsisnotNone:total=sum(vforvinself._counts.values())iftotal>0:max_level=max(kfork,vinself._counts.items()ifv>0)counts=[f"{v}{_level_name[k]}"fork,vinself._counts.items()ifv>0]self._counts=Noneiftotal>0:noun=" messages."iftotal>1else" message."# Temporarily prevent capturing messages to emit consolidated summarystack=self._stackself._stack=Noneself.log(max_level,"Suppressed "+", ".join(counts)+noun)self._stack=stackreturnFalse
[docs]defbegin_capture(self):"""Start capturing log stack for consolidated validation log. This method is used before any model validation starts and is included in the initialization of 'BaseModel'. It must be followed by a corresponding 'end_capture'. """ifnotself._capture:returnstack_item={"messages":[],"children":{}}ifself._stack:self._stack.append(stack_item)else:self._stack=[stack_item]
[docs]defend_capture(self,model):"""End capturing log stack for consolidated validation log. This method is used after all model validations and is included in the initialization of 'BaseModel'. It must follow a corresponding 'begin_capture'. """ifnotself._stack:returnstack_item=self._stack.pop()iflen(self._stack)==0:self._stack=None# Check if this stack item contains any messages or childreniflen(stack_item["messages"])>0orlen(stack_item["children"])>0:stack_item["type"]=model.__class__.__name__# Set the path for each childrenmodel_fields=model.get_submodels_by_hash()forchild_hash,child_dictinstack_item["children"].items():child_dict["parent_fields"]=model_fields.get(child_hash,[])# Are we at the bottom of the stack?ifself._stackisNone:# Yes, we're rootself._parse_warning_capture(current_loc=[],stack_item=stack_item)else:# No, we're someone else's childhash_=hash(model)self._stack[-1]["children"][hash_]=stack_item
def_parse_warning_capture(self,current_loc,stack_item):"""Process capture tree to compile formatted captured warnings."""if"parent_fields"instack_item:forfieldinstack_item["parent_fields"]:ifisinstance(field,tuple):# array fieldnew_loc=current_loc+list(field)else:# single fieldnew_loc=current_loc+[field]# process current level warningsforlevel,msg,custom_locinstack_item["messages"]:iflevel=="WARNING":self._captured_warnings.append({"loc":new_loc+custom_loc,"msg":msg})# initialize processing at children levelforchild_stackinstack_item["children"].values():self._parse_warning_capture(current_loc=new_loc,stack_item=child_stack)else:# for root object# process current level warningsforlevel,msg,custom_locinstack_item["messages"]:iflevel=="WARNING":self._captured_warnings.append({"loc":current_loc+custom_loc,"msg":msg})# initialize processing at children levelforchild_stackinstack_item["children"].values():self._parse_warning_capture(current_loc=current_loc,stack_item=child_stack)def_log(self,level:int,level_name:str,message:str,*args,log_once:bool=False,custom_loc:List=None,capture:bool=True,)->None:"""Distribute log messages to all handlers"""# Compose messageiflen(args)>0:try:composed_message=str(message)%argsexceptExceptionase:composed_message=f"{message} % {args}\n{e}"else:composed_message=str(message)# Capture all messages (even if suppressed later)ifself._stackandcapture:ifcustom_locisNone:custom_loc=[]self._stack[-1]["messages"].append((level_name,composed_message,custom_loc))# Check global cache if requestediflog_once:# Use the message body before composition as keyifmessageinself._static_cache:returnself._static_cache.add(message)# Context-local logger emits a single message and consolidates the restifself._countsisnotNone:iflen(self._counts)>0:self._counts[level]=1+self._counts.get(level,0)returnself._counts[level]=0# Forward message to handlersforhandlerinself.handlers.values():handler.handle(level,level_name,composed_message)
[docs]deflog(self,level:LogValue,message:str,*args,log_once:bool=False)->None:"""Log (message) % (args) with given level"""ifisinstance(level,str):level_name=levellevel=_get_level_int(level)else:level_name=_level_name.get(level,"unknown")self._log(level,level_name,message,*args,log_once=log_once)
[docs]defdebug(self,message:str,*args,log_once:bool=False)->None:"""Log (message) % (args) at debug level"""self._log(_level_value["DEBUG"],"DEBUG",message,*args,log_once=log_once)
[docs]defsupport(self,message:str,*args,log_once:bool=False)->None:"""Log (message) % (args) at support level"""self._log(_level_value["SUPPORT"],"SUPPORT",message,*args,log_once=log_once)
[docs]defuser(self,message:str,*args,log_once:bool=False)->None:"""Log (message) % (args) at user level"""self._log(_level_value["USER"],"USER",message,*args,log_once=log_once)
[docs]definfo(self,message:str,*args,log_once:bool=False)->None:"""Log (message) % (args) at info level"""self._log(_level_value["INFO"],"INFO",message,*args,log_once=log_once)
[docs]defwarning(self,message:str,*args,log_once:bool=False,custom_loc:List=None,capture:bool=True,)->None:"""Log (message) % (args) at warning level"""self._log(_level_value["WARNING"],"WARNING",message,*args,log_once=log_once,custom_loc=custom_loc,capture=capture,)
[docs]deferror(self,message:str,*args,log_once:bool=False)->None:"""Log (message) % (args) at error level"""self._log(_level_value["ERROR"],"ERROR",message,*args,log_once=log_once)
[docs]defcritical(self,message:str,*args,log_once:bool=False)->None:"""Log (message) % (args) at critical level"""self._log(_level_value["CRITICAL"],"CRITICAL",message,*args,log_once=log_once)
[docs]defset_logging_level(level:LogValue=DEFAULT_LEVEL)->None:"""Set tidy3d console logging level priority. Parameters ---------- level : str The lowest priority level of logging messages to display. One of ``{'DEBUG', 'SUPPORT', 'USER', INFO', 'WARNING', 'ERROR', 'CRITICAL'}`` (listed in increasing priority). """if"console"inlog.handlers:log.handlers["console"].level=_get_level_int(level)
[docs]defset_log_suppression(value:bool)->None:"""Control log suppression for repeated messages."""log.suppression=value
[docs]defget_aware_datetime()->datetime:"""Get an aware current local datetime(with local timezone info)"""returndatetime.now().astimezone()
[docs]defset_logging_console(stderr:bool=False)->None:"""Set stdout or stderr as console output Parameters ---------- stderr : bool If False, logs are directed to stdout, otherwise to stderr. """if"console"inlog.handlers:previous_level=log.handlers["console"].levelelse:previous_level=DEFAULT_LEVELlog.handlers["console"]=LogHandler(Console(stderr=stderr,width=CONSOLE_WIDTH,log_path=False,get_datetime=get_aware_datetime,log_time_format="%X %Z",),previous_level,)
[docs]defset_logging_file(fname:str,filemode:str="w",level:LogValue=DEFAULT_LEVEL,log_path:bool=False,)->None:"""Set a file to write log to, independently from the stdout and stderr output chosen using :meth:`set_logging_level`. Parameters ---------- fname : str Path to file to direct the output to. If empty string, a previously set logging file will be closed, if any, but nothing else happens. filemode : str 'w' or 'a', defining if the file should be overwritten or appended. level : str One of ``{'DEBUG', 'SUPPORT', 'USER', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'}``. This is set for the file independently of the console output level set by :meth:`set_logging_level`. log_path : bool = False Whether to log the path to the file that issued the message. """iffilemodenotin"wa":raiseValueError("filemode must be either 'w' or 'a'")# Close previous handler, if anyif"file"inlog.handlers:try:log.handlers["file"].console.file.close()exceptException:# TODO: catch specific exceptionlog.warning("Log file could not be closed")finally:dellog.handlers["file"]iffname=="":# Empty string can be passed to just stop previously opened file handlerreturntry:file=open(fname,filemode)exceptException:# TODO: catch specific exceptionlog.error(f"File {fname} could not be opened")returnlog.handlers["file"]=LogHandler(Console(file=file,force_jupyter=False,log_path=log_path),level)
# Initialize Tidy3d's loggerlog=Logger()# Set default logging outputset_logging_console()
[docs]defget_logging_console()->Console:"""Get console from logging handlers."""if"console"notinlog.handlers:set_logging_console()returnlog.handlers["console"].console