-
Notifications
You must be signed in to change notification settings - Fork 13
/
config.py
93 lines (70 loc) · 2.7 KB
/
config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
##############################################################################
# Config
# Config is used to set dataset path for training and testing
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
from utils.attr_dict import AttrDict
__C = AttrDict()
cfg = __C
__C.EPOCH = 0
# Use Class Uniform Sampling to give each class proper sampling
__C.CLASS_UNIFORM_PCT = 0.0
# Use class weighted loss per batch to increase loss for low pixel count classes per batch
__C.BATCH_WEIGHTING = False
# Border Relaxation Count
__C.BORDER_WINDOW = 1
# Number of epoch to use before turn off border restriction
__C.REDUCE_BORDER_EPOCH = -1
# Comma Seperated List of class id to relax
__C.STRICTBORDERCLASS = None
#Attribute Dictionary for Dataset
__C.DATASET = AttrDict()
#Cityscapes Dir Location
# Set you dataset path here
# MSD Dataset Dir Location
__C.DATASET.MSD_DIR = './data/MSD'
# GDD Dataset Dir Location
__C.DATASET.GDD_DIR = './data/GDD'
# Trans10k Dataset Dir Location
__C.DATASET.TRANS10K_DIR = './data/Trans10k'
#Number of splits to support
__C.DATASET.CV_SPLITS = 3
__C.MODEL = AttrDict()
__C.MODEL.BN = 'regularnorm'
__C.MODEL.BNFUNC = None
def assert_and_infer_cfg(args, make_immutable=True, train_mode=True):
"""Call this function in your script after you have finished setting all cfg
values that are necessary (e.g., merging a config from a file, merging
command line config options, etc.). By default, this function will also
mark the global cfg as immutable to prevent changing the global cfg settings
during script execution (which can lead to hard to debug errors or code
that's harder to understand than is necessary).
"""
if hasattr(args, 'syncbn') and args.syncbn:
if args.apex:
import apex
__C.MODEL.BN = 'apex-syncnorm'
__C.MODEL.BNFUNC = apex.parallel.SyncBatchNorm
else:
raise Exception('No Support for SyncBN without Apex')
else:
__C.MODEL.BNFUNC = torch.nn.BatchNorm2d
print('Using regular batch norm')
if not train_mode:
cfg.immutable(True)
return
if args.class_uniform_pct:
cfg.CLASS_UNIFORM_PCT = args.class_uniform_pct
if args.batch_weighting:
__C.BATCH_WEIGHTING = True
if args.jointwtborder:
if args.strict_bdr_cls != '':
__C.STRICTBORDERCLASS = [int(i) for i in args.strict_bdr_cls.split(",")]
if args.rlx_off_epoch > -1:
__C.REDUCE_BORDER_EPOCH = args.rlx_off_epoch
if make_immutable:
cfg.immutable(True)