Close
Sidebar
Search tutorials
Get Started
Documentation
XCurve.AUROC.optimizer.ASGDA
The following functions build the class ofXCurve.AUROC.optimizer.ASGDA
:
CLASS ASGDA(SGD): (params=None, momentum=0, dampening=0, weight_decay=0, nesterov=False, init_lr=0.01, hparams=None) [SOURCE]
|
the parameters of the model. default=`0`, similar to momentum in SGD. default=`0`, the alpha in addition of gradient is (`1-dampening`). default=`0`, similar to weight_decay in SGD. default=`False`, similar to nesterov in SGD. hyper parameters for PartialAUC Optimization, including |
---|
XCurve.AUROC.losses.PartialAUROC.UnbiasedPAUCLoss
.
Example:
import sys
import os
import copy
import numpy as np
import torch
import torch.nn as nn
from XCurve.AUROC.losses.PartialAUROC import UnbiasedPAUCLoss
from XCurve.AUROC.optimizer.ASGDA import ASGDA
from XCurve.AUROC.dataloaders import get_datasets
from XCurve.AUROC.dataloaders import get_data_loaders
from XCurve.AUROC.models import generate_net
from XCurve.Metrics.AUROC import AUROC
from easydict import EasyDict as edict
# hyper parameters
hyper_param = {
'mini-batch': 256,
'alpha': 1.0,
'beta': 0.3,
'weight_decay': 1e-5,
'init_lr': 0.001
}
if hyper_param['alpha'] == 1:
metrics = 'OPAUC'
else:
metrics = 'TPAUC'
sigmoid = nn.Sigmoid() # Limit the output score between 0 and 1
# load data and dataloader
args_dataset = edict({
"data_dir": "data/cifar-10-long-tail/",
"input_size": [32, 32],
"norm_params": {
"mean": [123.675, 116.280, 103.530],
"std": [58.395, 57.120, 57.375]
},
"use_lmdb": True,
"resampler_type": "None",
"sampler": { # only used for binary classification
"rpos": 1,
"rneg": 10
},
"npy_style": True,
"aug": True,
"class2id": { # positive (minority) class idx
"1": 1, "0":0, "2":0, "3":0, "4":0, "5":0,
"6":0, "7":0, "8":0, "9":0
}
})
args_model = edict({
"model_type": "resnet18", # (support resnet18,resnet20, densenet121 and mlp)
"num_classes": 2,
"pretrained": None
})
train_set, val_set, test_set = get_datasets(args_dataset)
train_loader, val_loader, test_loader = get_data_loaders(
train_set,
val_set,
test_set,
hyper_param['mini-batch'],
hyper_param['mini-batch']
)
device = "cuda" if torch.cuda.is_available() else "cpu"
# load model (train model from the scratch, using model: resnet18)
model = generate_net(args_model).to(device)
# load hyper parameters from json file
hparams = {
"k": 1,
"c1": 3,
"c2": 3,
"lam": 0.02,
"nu": 0.02,
"m": 500,
"device": device,
}
# define loss and optimizer
criterion = UnbiasedPAUCLoss(hyper_param['alpha'], hyper_param['beta'], device)
####################
# create optimizer #
####################
optimizer = ASGDA([
{'params': model.parameters(), 'name':'net'},
{'params': [criterion.a, criterion.b], 'clip':(0, 1), 'name':'ab'},
{'params': criterion.s_n, 'clip':(0, 5), 'name':'sn'},
{'params': criterion.s_p, 'clip':(-4, 1), 'name':'sp'},
{'params': criterion.lam_b, 'clip':(0, 1e9), 'name':'lamn'},
{'params': criterion.lam_a, 'clip':(0, 1e9), 'name':'lamp'},
{'params': criterion.g, 'clip':(-1, 1), 'name':'g'}],
weight_decay=hyper_param['weight_decay'], hparams=hparams)
best_model = model.state_dict()
best_perf = 0
all_counter = 0
# train 50 epoch
for epoch in range(50):
all_pauc = 0
counter = 0
model.train()
for i, (img, lbl) in enumerate(train_loader):
optimizer.zero_grad()
img = img.to(device)
lbl = lbl.to(device).float()
out = sigmoid(model(img))
loss = criterion(out, lbl)
loss.backward()
optimizer.step(pre=True, t=all_counter)
optimizer.zero_grad()
out = sigmoid(model(img))
loss = criterion(out, lbl)
loss.backward()
optimizer.step(pre=False, t=all_counter)
label = lbl.cpu().detach().numpy().reshape((-1, ))
pred = out.cpu().detach().numpy().reshape((-1, ))
all_pauc += AUROC(label, pred, hyper_param['alpha'], hyper_param['beta'])
counter += 1
all_counter += 1
print(loss.item())
# record instances' prediction and label of val set