Skip to content

Commit e4930cd

Browse files
committed
aded seaborn and removed tensorbord loger
1 parent 062518c commit e4930cd

File tree

4 files changed

+54
-111
lines changed

4 files changed

+54
-111
lines changed

ddopai/_modidx.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2044,13 +2044,7 @@
20442044
'ddopai/meta_learning/utils/storage_vae.py'),
20452045
'ddopai.meta_learning.utils.storage_vae.RolloutStorageVAE.ready_for_update': ( '50_meta_learning/50_utils/storage_vae.html#rolloutstoragevae.ready_for_update',
20462046
'ddopai/meta_learning/utils/storage_vae.py')},
2047-
'ddopai.meta_learning.utils.tb_logger': { 'ddopai.meta_learning.utils.tb_logger.TBLogger': ( '50_meta_learning/50_utils/tb_loger.html#tblogger',
2048-
'ddopai/meta_learning/utils/tb_logger.py'),
2049-
'ddopai.meta_learning.utils.tb_logger.TBLogger.__init__': ( '50_meta_learning/50_utils/tb_loger.html#tblogger.__init__',
2050-
'ddopai/meta_learning/utils/tb_logger.py'),
2051-
'ddopai.meta_learning.utils.tb_logger.TBLogger.add': ( '50_meta_learning/50_utils/tb_loger.html#tblogger.add',
2052-
'ddopai/meta_learning/utils/tb_logger.py'),
2053-
'ddopai.meta_learning.utils.tb_logger.WandbLogger': ( '50_meta_learning/50_utils/tb_loger.html#wandblogger',
2047+
'ddopai.meta_learning.utils.tb_logger': { 'ddopai.meta_learning.utils.tb_logger.WandbLogger': ( '50_meta_learning/50_utils/tb_loger.html#wandblogger',
20542048
'ddopai/meta_learning/utils/tb_logger.py'),
20552049
'ddopai.meta_learning.utils.tb_logger.WandbLogger.__init__': ( '50_meta_learning/50_utils/tb_loger.html#wandblogger.__init__',
20562050
'ddopai/meta_learning/utils/tb_logger.py'),

ddopai/meta_learning/utils/tb_logger.py

Lines changed: 2 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../nbs/50_meta_learning/50_utils/40_tb_loger.ipynb.
22

33
# %% auto 0
4-
__all__ = ['device', 'TBLogger', 'WandbLogger']
4+
__all__ = ['device', 'WandbLogger']
55

66
# %% ../../../nbs/50_meta_learning/50_utils/40_tb_loger.ipynb 1
77
import datetime
@@ -10,61 +10,10 @@
1010
import os
1111
import wandb
1212
import torch
13-
from torch.utils.tensorboard import SummaryWriter
13+
#from torch.utils.tensorboard import SummaryWriter
1414

1515
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
1616

17-
# %% ../../../nbs/50_meta_learning/50_utils/40_tb_loger.ipynb 2
18-
class TBLogger:
19-
def __init__(self, args, exp_label):
20-
self.output_name = exp_label + '_' + str(args.seed) + '_' + datetime.datetime.now().strftime('_%d:%m_%H:%M:%S') + '_' + hex(random.Random().randint(0,1e6)).replace("0x","")
21-
try:
22-
log_dir = args.results_log_dir
23-
except AttributeError:
24-
log_dir = args['results_log_dir']
25-
26-
if log_dir is None:
27-
dir_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
28-
dir_path = os.path.join(dir_path, 'logs')
29-
else:
30-
dir_path = log_dir
31-
32-
if not os.path.exists(dir_path):
33-
try:
34-
os.mkdir(dir_path)
35-
except FileExistsError: # This can still happen in the intervening time
36-
pass
37-
except:
38-
dir_path_head, dir_path_tail = os.path.split(dir_path)
39-
if len(dir_path_tail) == 0:
40-
dir_path_head, dir_path_tail = os.path.split(dir_path_head)
41-
os.mkdir(dir_path_head)
42-
os.mkdir(dir_path)
43-
44-
try:
45-
self.full_output_folder = os.path.join(os.path.join(dir_path, 'logs_{}'.format(args.env_name)),
46-
self.output_name)
47-
except:
48-
self.full_output_folder = os.path.join(os.path.join(dir_path, 'logs_{}'.format(args["env_name"])),
49-
self.output_name)
50-
51-
self.writer = SummaryWriter(log_dir=self.full_output_folder)
52-
53-
print('logging under', self.full_output_folder)
54-
55-
if not os.path.exists(self.full_output_folder):
56-
os.makedirs(self.full_output_folder)
57-
with open(os.path.join(self.full_output_folder, 'config.json'), 'w') as f:
58-
try:
59-
config = {k: v for (k, v) in vars(args).items() if k != 'device'}
60-
except:
61-
config = args
62-
config.update(device=device.type)
63-
json.dump(config, f, indent=2)
64-
65-
def add(self, name, value, x_pos):
66-
self.writer.add_scalar(name, value, x_pos)
67-
6817
# %% ../../../nbs/50_meta_learning/50_utils/40_tb_loger.ipynb 3
6918
class WandbLogger:
7019
def __init__(self, args, exp_label):

nbs/50_meta_learning/50_utils/40_tb_loger.ipynb

Lines changed: 50 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
"import os\n",
2424
"import wandb\n",
2525
"import torch\n",
26-
"from torch.utils.tensorboard import SummaryWriter\n",
26+
"#from torch.utils.tensorboard import SummaryWriter\n",
2727
"\n",
2828
"device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")"
2929
]
@@ -34,56 +34,56 @@
3434
"metadata": {},
3535
"outputs": [],
3636
"source": [
37-
"#| export\n",
38-
"class TBLogger:\n",
39-
" def __init__(self, args, exp_label):\n",
40-
" self.output_name = exp_label + '_' + str(args.seed) + '_' + datetime.datetime.now().strftime('_%d:%m_%H:%M:%S') + '_' + hex(random.Random().randint(0,1e6)).replace(\"0x\",\"\")\n",
41-
" try:\n",
42-
" log_dir = args.results_log_dir\n",
43-
" except AttributeError:\n",
44-
" log_dir = args['results_log_dir']\n",
45-
"\n",
46-
" if log_dir is None:\n",
47-
" dir_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))\n",
48-
" dir_path = os.path.join(dir_path, 'logs')\n",
49-
" else:\n",
50-
" dir_path = log_dir\n",
51-
"\n",
52-
" if not os.path.exists(dir_path):\n",
53-
" try:\n",
54-
" os.mkdir(dir_path)\n",
55-
" except FileExistsError: # This can still happen in the intervening time\n",
56-
" pass\n",
57-
" except:\n",
58-
" dir_path_head, dir_path_tail = os.path.split(dir_path)\n",
59-
" if len(dir_path_tail) == 0:\n",
60-
" dir_path_head, dir_path_tail = os.path.split(dir_path_head)\n",
61-
" os.mkdir(dir_path_head)\n",
62-
" os.mkdir(dir_path)\n",
63-
"\n",
64-
" try:\n",
65-
" self.full_output_folder = os.path.join(os.path.join(dir_path, 'logs_{}'.format(args.env_name)),\n",
66-
" self.output_name)\n",
67-
" except:\n",
68-
" self.full_output_folder = os.path.join(os.path.join(dir_path, 'logs_{}'.format(args[\"env_name\"])),\n",
69-
" self.output_name)\n",
70-
"\n",
71-
" self.writer = SummaryWriter(log_dir=self.full_output_folder)\n",
7237
"\n",
73-
" print('logging under', self.full_output_folder)\n",
74-
"\n",
75-
" if not os.path.exists(self.full_output_folder):\n",
76-
" os.makedirs(self.full_output_folder)\n",
77-
" with open(os.path.join(self.full_output_folder, 'config.json'), 'w') as f:\n",
78-
" try:\n",
79-
" config = {k: v for (k, v) in vars(args).items() if k != 'device'}\n",
80-
" except:\n",
81-
" config = args\n",
82-
" config.update(device=device.type)\n",
83-
" json.dump(config, f, indent=2)\n",
84-
"\n",
85-
" def add(self, name, value, x_pos):\n",
86-
" self.writer.add_scalar(name, value, x_pos)"
38+
"# class TBLogger:\n",
39+
"# def __init__(self, args, exp_label):\n",
40+
"# self.output_name = exp_label + '_' + str(args.seed) + '_' + datetime.datetime.now().strftime('_%d:%m_%H:%M:%S') + '_' + hex(random.Random().randint(0,1e6)).replace(\"0x\",\"\")\n",
41+
"# try:\n",
42+
"# log_dir = args.results_log_dir\n",
43+
"# except AttributeError:\n",
44+
"# log_dir = args['results_log_dir']\n",
45+
"\n",
46+
"# if log_dir is None:\n",
47+
"# dir_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))\n",
48+
"# dir_path = os.path.join(dir_path, 'logs')\n",
49+
"# else:\n",
50+
"# dir_path = log_dir\n",
51+
"\n",
52+
"# if not os.path.exists(dir_path):\n",
53+
"# try:\n",
54+
"# os.mkdir(dir_path)\n",
55+
"# except FileExistsError: # This can still happen in the intervening time\n",
56+
"# pass\n",
57+
"# except:\n",
58+
"# dir_path_head, dir_path_tail = os.path.split(dir_path)\n",
59+
"# if len(dir_path_tail) == 0:\n",
60+
"# dir_path_head, dir_path_tail = os.path.split(dir_path_head)\n",
61+
"# os.mkdir(dir_path_head)\n",
62+
"# os.mkdir(dir_path)\n",
63+
"\n",
64+
"# try:\n",
65+
"# self.full_output_folder = os.path.join(os.path.join(dir_path, 'logs_{}'.format(args.env_name)),\n",
66+
"# self.output_name)\n",
67+
"# except:\n",
68+
"# self.full_output_folder = os.path.join(os.path.join(dir_path, 'logs_{}'.format(args[\"env_name\"])),\n",
69+
"# self.output_name)\n",
70+
"\n",
71+
"# self.writer = SummaryWriter(log_dir=self.full_output_folder)\n",
72+
"\n",
73+
"# print('logging under', self.full_output_folder)\n",
74+
"\n",
75+
"# if not os.path.exists(self.full_output_folder):\n",
76+
"# os.makedirs(self.full_output_folder)\n",
77+
"# with open(os.path.join(self.full_output_folder, 'config.json'), 'w') as f:\n",
78+
"# try:\n",
79+
"# config = {k: v for (k, v) in vars(args).items() if k != 'device'}\n",
80+
"# except:\n",
81+
"# config = args\n",
82+
"# config.update(device=device.type)\n",
83+
"# json.dump(config, f, indent=2)\n",
84+
"\n",
85+
"# def add(self, name, value, x_pos):\n",
86+
"# self.writer.add_scalar(name, value, x_pos)"
8787
]
8888
},
8989
{

settings.ini

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ status = 3
3838
user = opimwue
3939

4040
### Optional ###
41-
requirements = fastcore pandas numpy gymnasium==0.28.1 gym==0.26.2 scikit-learn==1.5.1 requests tqdm mushroom_rl==1.10.1 torchinfo xgboost wandb statsmodels
41+
requirements = fastcore pandas numpy gymnasium==0.28.1 gym==0.26.2 scikit-learn==1.5.1 seaborn>=0.13 requests tqdm mushroom_rl==1.10.1 torchinfo xgboost wandb statsmodels
4242
# dev_requirements =
4343
# console_scripts =
4444
# conda_user =

0 commit comments

Comments
 (0)