diff --git a/config/config_hparam.json b/config/config_hparam.json index 0ce1aaa..bb0a7ca 100644 --- a/config/config_hparam.json +++ b/config/config_hparam.json @@ -1,28 +1,27 @@ { - "name": "biomarker_log", + "name": "biomarker_log", - "d_model_name" : "seyonec/PubChem10M_SMILES_BPE_450k", - "p_model_name" : "Rostlab/prot_bert_bfd", - - "gpu_ids" : "4,5,6,7", - "model_mode" : "train", - "load_checkpoint" : "./checkpoint/bindingDB/epoch=33-step=13463.ckpt", + "d_model_name": "seyonec/PubChem10M_SMILES_BPE_450k", + "p_model_name": "Rostlab/prot_bert_bfd", - "prot_maxlength" : 545, - "layer_limit" : true, + "gpu_ids": "1", + "model_mode": "train", + "load_checkpoint": "./checkpoint/bindingDB/epoch=33-step=13463.ckpt", - "max_epoch": 50, - "batch_size": 54, - "num_workers": 16, + "prot_maxlength": 545, + "layer_limit": true, - "task_name" : "davis", - "lr": 5e-6, - "layer_features" : [768, 32, 1], - "dropout" : 0.1, - "loss_fn" : "smooth", + "max_epoch": 50, + "batch_size": 54, + "num_workers": 16, - "traindata_rate" : 1.0, - "pretrained": {"chem": true, "prot": true}, - "num_seed" : 9095 -} + "task_name": "davis", + "lr": 5e-6, + "layer_features": [768, 32, 1], + "dropout": 0.1, + "loss_fn": "smooth", + "traindata_rate": 1.0, + "pretrained": { "chem": true, "prot": true }, + "num_seed": 9095 +} diff --git a/train.py b/train.py index 31e59e8..eea2f43 100644 --- a/train.py +++ b/train.py @@ -21,6 +21,7 @@ from sklearn.metrics import f1_score, roc_curve, precision_score, recall_score, auc from sklearn.metrics import roc_auc_score, average_precision_score +from module.model import deleteEncodingLayers class BiomarkerDataset(Dataset): def __init__(self, list_IDs, labels, df_dti, d_tokenizer, p_tokenizer, prot_maxLength): diff --git a/train_regression.py b/train_regression.py index 2feeacf..9ce4abf 100644 --- a/train_regression.py +++ b/train_regression.py @@ -8,7 +8,7 @@ from utils.attention_flow import * from utils.emetric import regression_score -from module.model import BApredictModel +from module.model import BApredictModel, deleteEncodingLayers from module.datamodule import BAPredictDataModule import torch