|
from dataclasses import dataclass,field |
|
from typing import Optional,List,Dict,Tuple |
|
import torch |
|
from typing import Union |
|
import wandb |
|
from argparse import Namespace |
|
from collections import defaultdict |
|
import os |
|
from warnings import warn |
|
|
|
|
|
@dataclass |
|
class WandBConfig(object): |
|
|
|
|
|
config:Optional[Namespace]=field(default_factory=lambda :None) |
|
|
|
|
|
log_interval:int=field(default=1) |
|
|
|
queue_size:int=field(default=int(1e3)) |
|
|
|
local_dir:Optional[str]=field(default=None) |
|
|
|
|
|
|
|
|
|
entity:str=field(default="meditron") |
|
|
|
project:str=field(default="meditron") |
|
|
|
save_code:bool=field(default=False) |
|
|
|
tags:Optional[List[str]]=field(default=None) |
|
|
|
name:Optional[str]=field(default=None) |
|
|
|
notes:Optional[str]=field(default=None) |
|
|
|
resume:str=field(default="allow") |
|
|
|
|
|
run_id:Optional[str]=field(default=None) |
|
|
|
magic:bool=field(default=False) |
|
api_key:Optional[str]=field(default=None) |
|
with_tensorboard:bool=field(default=True) |
|
try_catch_guard:bool=field(default=True) |
|
|
|
@staticmethod |
|
def default(project:str,run_id:str=run_id): |
|
return WandBConfig(project=project,run_id=run_id) |
|
|
|
@staticmethod |
|
def from_args(args)->'WandBConfig': |
|
assert args.rank==args.world_size-1, f"Only supposed to launch on rank {args.rank+1}, but got {args.rank}" |
|
|
|
return WandBConfig(local_dir=args.tensorboard_dir, |
|
queue_size=args.tensorboard_queue_size, |
|
log_interval=args.log_interval, |
|
config=args,entity=args.wandb_entity, |
|
project=args.wandb_project, |
|
run_id=args.wandb_id, |
|
resume=args.wandb_resume, |
|
api_key=args.wandb_api_key, |
|
try_catch_guard=False, |
|
with_tensorboard=True) |
|
|
|
import functools |
|
|
|
def try_catch_guard(_func=None,*,dummy_named=None,**decorator_kwargs): |
|
def decorator_try_catch_guard(func): |
|
@functools.wraps(func) |
|
def try_catch_wrapper(*args,**kwargs): |
|
s=args[0] |
|
if s.cfg.try_catch_guard: |
|
try: |
|
return func(*args,**kwargs) |
|
except BaseException as e: |
|
warn(f"Ignoring error {e} in WandbTBShim") |
|
else: |
|
return func(*args,**kwargs) |
|
return try_catch_wrapper |
|
if _func is None: |
|
return decorator_try_catch_guard |
|
else: |
|
return decorator_try_catch_guard(_func) |
|
|
|
|
|
class WandbTBShim(object): |
|
""" |
|
Shim class that holds the configuration we want the wandb wrapper to use |
|
(e.g. to control sampling, delay upload etc) and that translates the API |
|
""" |
|
def __init__(self, config:WandBConfig): |
|
super().__init__() |
|
self.cfg=config |
|
if os.environ.get("WANDB_API_KEY") is None: |
|
if self.cfg.api_key is None: |
|
raise ValueError("WANDB_API_KEY is not set, nor passed as an argument") |
|
else: |
|
os.environ["WANDB_API_KEY"]=self.cfg.api_key |
|
wandb.init(config=config.config, |
|
entity=config.entity, |
|
project=config.project, |
|
save_code=config.save_code, |
|
tags=config.tags, |
|
name=config.name, |
|
notes=config.notes, |
|
resume=config.resume, |
|
id=config.run_id, |
|
dir=config.local_dir |
|
) |
|
self._last_step = None |
|
self._log_accum = {} |
|
if self.cfg.with_tensorboard: |
|
try: |
|
from torch.utils.tensorboard import SummaryWriter |
|
print('> setting tensorboard ...') |
|
self.tb_writer = SummaryWriter( |
|
log_dir=config.local_dir, |
|
max_queue=config.queue_size) |
|
except ModuleNotFoundError: |
|
print('WARNING: TensorBoard writing requested but is not ' |
|
'available (are you using PyTorch 1.1.0 or later?), ' |
|
'no TensorBoard logs will be written.', flush=True) |
|
else: |
|
self.tb_writer=None |
|
|
|
@try_catch_guard |
|
def add_scalar(self, name: str, var: Union[float, int, torch.Tensor], step: int): |
|
if isinstance(var, torch.Tensor): |
|
var = var.item() |
|
if self.tb_writer is not None: |
|
self.tb_writer.add_scalar(name, var, global_step=step) |
|
if " vs " in name: |
|
|
|
|
|
|
|
|
|
warn(f"Ignoring wandb log for {name}") |
|
return |
|
|
|
if self._last_step is not None and step > self._last_step: |
|
self.flush_all() |
|
|
|
self._last_step = step |
|
self._log_accum[name] = var |
|
|
|
@try_catch_guard |
|
def flush_all(self): |
|
if len(self._log_accum) > 0: |
|
wandb.log(self._log_accum, step=self._last_step, commit=True) |
|
self._log_accum = {} |
|
self._last_step = None |
|
|
|
@try_catch_guard |
|
def add_text(self,name:str,value:str): |
|
|
|
if self.tb_writer is not None: |
|
self.tb_writer.add_text(name,value) |
|
|
|
def toy_test(writer): |
|
for i,l in zip(range(10),range(10,20)): |
|
r=10-i |
|
for k in range(5): |
|
writer.add_scalar(f"forward{k}",l,i) |
|
writer.add_scalar(f"backward{k}",r,i) |
|
writer.add_scalar(f"forward{k} vs forward",l,i) |
|
writer.add_scalar(f"forward{k} vs backward",i,r) |
|
if __name__=="__main__": |
|
writer=WandbTBShim(WandBConfig.default("wandb-toy-test",run_id="meditron-wandb-test")) |
|
toy_test(writer) |
|
|