Skip to content
This repository has been archived by the owner on Dec 1, 2021. It is now read-only.

Commit

Permalink
Replace EasyDict to SmartDict (#1220)
Browse files Browse the repository at this point in the history
Solve #1092 

Odoku-san implemnetd EasyDict. We can replace EasyDict to SmartDict.
  • Loading branch information
iizukak authored Sep 28, 2020
1 parent a6a622d commit 8387f17
Show file tree
Hide file tree
Showing 60 changed files with 200 additions and 202 deletions.
14 changes: 7 additions & 7 deletions blueoil/cmd/tune_ray.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import click
import six
import tensorflow as tf
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict

import ray
from blueoil.datasets.base import ObjectDetectionBase
Expand Down Expand Up @@ -265,19 +265,19 @@ def run(config_file, tunable_id, local_dir):
register_trainable(tunable_id, TrainTunable)
lm_config = config_util.load(config_file)

def easydict_to_dict(config):
if isinstance(config, EasyDict):
def smartdict_to_dict(config):
if isinstance(config, SmartDict):
config = dict(config)

for key, value in config.items():
if isinstance(value, EasyDict):
if isinstance(value, SmartDict):
value = dict(value)
easydict_to_dict(value)
smartdict_to_dict(value)
config[key] = value
return config

tune_space = easydict_to_dict(lm_config['TUNE_SPACE'])
tune_spec = easydict_to_dict(lm_config['TUNE_SPEC'])
tune_space = smartdict_to_dict(lm_config['TUNE_SPACE'])
tune_spec = smartdict_to_dict(lm_config['TUNE_SPEC'])
tune_spec['run'] = tunable_id
tune_spec['config'] = {'lm_config': os.path.join(os.getcwd(), config_file)}
tune_spec['local_dir'] = local_dir
Expand Down
6 changes: 3 additions & 3 deletions blueoil/configs/convert_weight_from_darknet/darknet19.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict

from blueoil.common import Tasks
from blueoil.networks.classification.darknet import Darknet
Expand Down Expand Up @@ -49,13 +49,13 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.IMAGE_SIZE = IMAGE_SIZE
NETWORK.BATCH_SIZE = BATCH_SIZE
NETWORK.DATA_FORMAT = DATA_FORMAT

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
6 changes: 3 additions & 3 deletions blueoil/configs/convert_weight_from_darknet/yolo_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict

from blueoil.common import Tasks
from blueoil.networks.object_detection.yolo_v2 import YoloV2
Expand Down Expand Up @@ -69,7 +69,7 @@
NMS(iou_threshold=nms_iou_threshold, max_output_size=nms_max_output_size, classes=CLASSES,),
])

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.IMAGE_SIZE = IMAGE_SIZE
NETWORK.BATCH_SIZE = BATCH_SIZE
NETWORK.DATA_FORMAT = DATA_FORMAT
Expand All @@ -79,7 +79,7 @@
NETWORK.NMS_MAX_OUTPUT_SIZE = nms_max_output_size

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
6 changes: 3 additions & 3 deletions blueoil/configs/core/classification/darknet_cifar10.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -68,7 +68,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -83,7 +83,7 @@
NETWORK.WEIGHT_DECAY_RATE = 0.0005

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -71,7 +71,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -95,7 +95,7 @@
NETWORK.QUANTIZE_LAST_CONVOLUTION = False

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -75,7 +75,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.polynomial_decay
Expand All @@ -96,7 +96,7 @@
NETWORK.QUANTIZE_LAST_CONVOLUTION = False

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -75,7 +75,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -96,7 +96,7 @@
NETWORK.WEIGHT_QUANTIZER_KWARGS = {}

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
6 changes: 3 additions & 3 deletions blueoil/configs/core/classification/lmnet_cifar10.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -68,7 +68,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -83,7 +83,7 @@
NETWORK.WEIGHT_DECAY_RATE = 0.0005

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
6 changes: 3 additions & 3 deletions blueoil/configs/core/classification/lmnet_cifar100.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -68,7 +68,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -83,7 +83,7 @@
NETWORK.WEIGHT_DECAY_RATE = 0.0005

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
6 changes: 3 additions & 3 deletions blueoil/configs/core/classification/lmnet_openimagesv4.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -67,7 +67,7 @@
# SUMMARISE_STEPS = 2
# IS_DEBUG = True

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -81,7 +81,7 @@
NETWORK.WEIGHT_DECAY_RATE = 0.0005

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
DATASET.AUGMENTOR = Sequence([
Expand Down
6 changes: 3 additions & 3 deletions blueoil/configs/core/classification/lmnet_quantize_cifar10.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -72,7 +72,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -94,7 +94,7 @@
NETWORK.WEIGHT_QUANTIZER_KWARGS = {}

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -71,7 +71,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -93,7 +93,7 @@
NETWORK.WEIGHT_QUANTIZER_KWARGS = {}

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
6 changes: 3 additions & 3 deletions blueoil/configs/core/classification/lmnet_v1_cifar10.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -67,7 +67,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -82,7 +82,7 @@
NETWORK.WEIGHT_DECAY_RATE = 0.0005

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from easydict import EasyDict
from blueoil.utils.smartdict import SmartDict
import tensorflow as tf

from blueoil.common import Tasks
Expand Down Expand Up @@ -71,7 +71,7 @@
])
POST_PROCESSOR = None

NETWORK = EasyDict()
NETWORK = SmartDict()
NETWORK.OPTIMIZER_CLASS = tf.compat.v1.train.MomentumOptimizer
NETWORK.OPTIMIZER_KWARGS = {"momentum": 0.9}
NETWORK.LEARNING_RATE_FUNC = tf.compat.v1.train.piecewise_constant
Expand All @@ -93,7 +93,7 @@
NETWORK.WEIGHT_QUANTIZER_KWARGS = {}

# dataset
DATASET = EasyDict()
DATASET = SmartDict()
DATASET.BATCH_SIZE = BATCH_SIZE
DATASET.DATA_FORMAT = DATA_FORMAT
DATASET.PRE_PROCESSOR = PRE_PROCESSOR
Expand Down
Loading

0 comments on commit 8387f17

Please sign in to comment.