Skip to content

Commit

Permalink
Pylint2.10 update (#937)
Browse files Browse the repository at this point in the history
* fixed all issues in examples folder for pylint 2.10 update

* fixed some pylint warnings in tests and nncf folders

* fixed pylint warnings in tools folder + setup.py file

* fixed parameter naming in synthetic.py

* forgot to pass file in json.load()

* added pylint flags

* disabled high-priority warnings in tests and fixed low-priority warnings

* fixed issues with iteration

* fixed issue with checkpoint_loading

* tried to fix pylint issues with bad option values

* fixed disable flag in examples/tensorflow/common/callbacks.py

* fixed disable flag in examples/tensorflow/common/callbacks.py
  • Loading branch information
Noktyrn committed Sep 24, 2021
1 parent 5d60171 commit 24811aa
Show file tree
Hide file tree
Showing 137 changed files with 300 additions and 286 deletions.
3 changes: 2 additions & 1 deletion examples/tensorflow/common/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,9 @@ def __init__(self,
self.step = initial_step
self._track_lr = track_lr

# pylint: disable=W0237
def on_train_batch_begin(self,
epoch: int, # pylint: disable=W0613
epoch: int,
logs: MutableMapping[str, Any] = None) -> None:
self.step += 1
logs = logs or {}
Expand Down
2 changes: 1 addition & 1 deletion examples/tensorflow/common/object_detection/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -698,7 +698,7 @@ def yolo3_decode(self, feats, anchors, num_classes, input_shape, scale_x_y=None)

def get_anchors(self, anchors_path):
"""loads the anchors from a file"""
with open(anchors_path) as f:
with open(anchors_path, encoding='utf8') as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@

from abc import ABCMeta
from abc import abstractmethod
from abc import abstractproperty

import tensorflow as tf

Expand All @@ -28,7 +27,8 @@ class BoxCoder:
"""Abstract base class for box coder."""
__metaclass__ = ABCMeta

@abstractproperty
@property
@abstractmethod
def code_size(self):
"""Return the size of each code.
Expand Down
2 changes: 1 addition & 1 deletion examples/tensorflow/common/sample_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def parse_known_args(self, args=None, namespace=None):
class SampleConfig(Dict):
@classmethod
def from_json(cls, path) -> 'SampleConfig':
with open(path) as f:
with open(path, encoding='utf8') as f:
loaded_json = json.load(f)
return cls(loaded_json)

Expand Down
6 changes: 3 additions & 3 deletions examples/tensorflow/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def get_name(config):
def write_metrics(acc, filename):
avg = round(acc * 100, 2)
metrics = {"Accuracy": avg}
with open(filename, 'w') as outfile:
with open(filename, 'w', encoding='utf8') as outfile:
json.dump(metrics, outfile)


Expand Down Expand Up @@ -116,7 +116,7 @@ def print_args(config, logger=default_logger):


def serialize_config(config, log_dir):
with open(osp.join(log_dir, 'config.json'), 'w') as f:
with open(osp.join(log_dir, 'config.json'), 'w', encoding='utf8') as f:
json.dump(config, f, indent=4)


Expand All @@ -126,7 +126,7 @@ def serialize_cli_args(argparser, argv, log_dir):
cli_args = {k:v for k, v in vars(args).items() if k in argparser.seen_actions}
else:
cli_args = {k:v for k, v in vars(args).items() if v is not None}
with open(osp.join(log_dir, 'cli_args.json'), 'w') as f:
with open(osp.join(log_dir, 'cli_args.json'), 'w', encoding='utf8') as f:
json.dump(cli_args, f, indent=4)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def build_model(self, weights=None, is_training=None):
keras_model.load_weights(weights, by_name=True)
return keras_model

def build_loss_fn(self, compress_model, compression_loss_fn):
def build_loss_fn(self, keras_model, compression_loss_fn):
def _total_loss_fn(labels, outputs):
loss_fn_out = self._loss_fn(labels, outputs,
self._params.anchors,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -510,7 +510,7 @@ def yolo_adjust_boxes(boxes, img_shape):

def get_anchors(anchors_path):
"""loads the anchors from a file"""
with open(anchors_path) as f:
with open(anchors_path, encoding='utf8') as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
Expand Down
14 changes: 7 additions & 7 deletions examples/torch/classification/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@
from typing import Any

import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
from torch.backends import cudnn
from torch import nn
import torch.nn.parallel
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
import torchvision.datasets as datasets
import torchvision.models as models
import torchvision.transforms as transforms
from torchvision import datasets
from torchvision import models
from torchvision import transforms
from torch.nn.modules.loss import _Loss
from torch.optim.lr_scheduler import ReduceLROnPlateau
from torchvision.datasets import CIFAR10
Expand Down Expand Up @@ -79,9 +79,9 @@
from nncf.torch.utils import is_main_process
from nncf.torch.utils import safe_thread_call

model_names = sorted(name for name in models.__dict__
model_names = sorted(name for name, val in models.__dict__.items()
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
and callable(val))


def get_argument_parser():
Expand Down
2 changes: 1 addition & 1 deletion examples/torch/classification/models/mobilenet_v2_32x32.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from typing import List
from typing import Optional

import torch.nn as nn
from torch import nn
from torch import Tensor


Expand Down
4 changes: 2 additions & 2 deletions examples/torch/classification/staged_quantization_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
import time

import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
from torch.backends import cudnn
from torch import nn
import torch.nn.parallel
import torch.optim
import torch.utils.data
Expand Down
2 changes: 1 addition & 1 deletion examples/torch/common/model_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import examples.torch.common.models as custom_models
from examples.torch.classification.models.mobilenet_v2_32x32 import MobileNetV2For32x32
from examples.torch.common.example_logger import logger
import examples.torch.common.restricted_pickle_module as restricted_pickle_module
from examples.torch.common import restricted_pickle_module
from nncf.torch.checkpoint_loading import load_state
from nncf.torch.utils import safe_thread_call

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from collections import namedtuple

import torch
import torch.nn as nn
from torch import nn
import torch.nn.functional as F
from torch.utils import model_zoo

Expand Down Expand Up @@ -94,7 +94,7 @@ def __init__(self, num_classes=1000, aux_logits=True, transform_input=False):

for m in self.modules():
if isinstance(m, (nn.Conv2d, nn.Linear)):
import scipy.stats as stats
from scipy import stats
stddev = m.stddev if hasattr(m, 'stddev') else 0.1
X = stats.truncnorm(-2, 2, scale=stddev)
values = torch.as_tensor(X.rvs(m.weight.numel()), dtype=m.weight.dtype)
Expand Down
2 changes: 1 addition & 1 deletion examples/torch/common/models/classification/mobilenetv3.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"""

import torch.nn as nn
from torch import nn
import math

__all__ = ['mobilenetv3_Large', 'mobilenetv3']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
limitations under the License.
"""

import torch.nn as nn
from torch import nn
import torch.nn.functional as F


Expand Down
2 changes: 1 addition & 1 deletion examples/torch/common/models/classification/rmnet_cifar.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from collections import OrderedDict

import torch.nn as nn
from torch import nn
import torch.nn.functional as F


Expand Down
2 changes: 1 addition & 1 deletion examples/torch/common/models/segmentation/enet.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# ENet implementation from:
# https://github.com/davidtvs/PyTorch-ENet

import torch.nn as nn
from torch import nn
import torch

from examples.torch.common.example_logger import logger
Expand Down
2 changes: 1 addition & 1 deletion examples/torch/common/models/segmentation/icnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from pkg_resources import parse_version

from numpy import lcm
import torch.nn as nn
from torch import nn
import torch
import torch.nn.functional as F

Expand Down
2 changes: 1 addition & 1 deletion examples/torch/common/models/segmentation/unet.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from pkg_resources import parse_version

import torch.nn as nn
from torch import nn
import torch
import torch.nn.functional as F

Expand Down
4 changes: 2 additions & 2 deletions examples/torch/common/restricted_pickle_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ def find_class(self, module_name, class_name):
return getattr(builtins, class_name)
if module_name == "collections" and class_name in Unpickler.safe_collections:
return getattr(collections, class_name)
for allowed_module_name in Unpickler.allowed_classes:
if module_name == allowed_module_name and class_name in Unpickler.allowed_classes[allowed_module_name]:
for allowed_module_name, val in Unpickler.allowed_classes.items():
if module_name == allowed_module_name and class_name in val:
module = importlib.import_module(module_name)
return getattr(module, class_name)

Expand Down
9 changes: 5 additions & 4 deletions examples/torch/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from typing import Tuple

from PIL import Image
import torch.utils.data as data
from torch.utils import data

from examples.torch.common.distributed import configure_distributed
from examples.torch.common.execution import ExecutionMode, get_device
Expand Down Expand Up @@ -76,7 +76,7 @@ def get_name(config):
def write_metrics(acc, filename):
avg = round(acc * 100, 2)
metrics = {"Accuracy": avg}
with open(filename, 'w') as outfile:
with open(filename, 'w', encoding='utf8') as outfile:
json.dump(metrics, outfile)


Expand Down Expand Up @@ -223,8 +223,9 @@ class ForkedPdb(pdb.Pdb):
def interaction(self, *args, **kwargs):
_stdin = sys.stdin
try:
sys.stdin = open('/dev/stdin')
pdb.Pdb.interaction(self, *args, **kwargs)
with open('/dev/stdin', encoding='utf8') as file:
sys.stdin = file
pdb.Pdb.interaction(self, *args, **kwargs)
finally:
sys.stdin = _stdin

Expand Down
4 changes: 2 additions & 2 deletions examples/torch/object_detection/datasets/coco.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import cv2
import numpy as np
import torch
import torch.utils.data as data
from torch.utils import data

from examples.torch.common.example_logger import logger

Expand Down Expand Up @@ -50,7 +50,7 @@ def _read_coco_annotation(annotation_file, images_folder):
images_folder = Path(images_folder)
anno_dict = OrderedDict()

with open(annotation_file) as data_file:
with open(annotation_file, encoding="utf8") as data_file:
json_annotation = json.load(data_file)
annotation = json_annotation["annotations"]

Expand Down
9 changes: 5 additions & 4 deletions examples/torch/object_detection/datasets/voc0712.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import cv2
import numpy as np
import torch
import torch.utils.data as data
from torch.utils import data

if sys.version_info[0] == 2:
import defusedxml.cElementTree as ET
Expand Down Expand Up @@ -115,12 +115,13 @@ def __init__(self, root, image_sets=(('2007', 'trainval'), ('2012', 'trainval'))
self.return_image_info = return_image_info
self._annopath = os.path.join('%s', 'Annotations', '%s.xml')
self._imgpath = os.path.join('%s', 'JPEGImages', '%s.jpg')
self.ids = list()
self.ids = []

for (year, name) in self.image_set:
rootpath = os.path.join(self.root, 'VOC' + year)
for line in open(os.path.join(rootpath, 'ImageSets', 'Main', name + '.txt')):
self.ids.append((rootpath, line.strip()))
with open(os.path.join(rootpath, 'ImageSets', 'Main', name + '.txt'), encoding='utf8') as lines:
for line in lines:
self.ids.append((rootpath, line.strip()))

def __getitem__(self, index):
"""
Expand Down
4 changes: 2 additions & 2 deletions examples/torch/object_detection/eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,11 +217,11 @@ def load_detection_annotations(cachedir, dataset):
# save
logger.info('Saving cached annotations to {:s}'.format(cachefile))
pathlib.Path(cachedir).mkdir(parents=True, exist_ok=True)
with open(cachefile, 'w') as f:
with open(cachefile, 'w', encoding='utf8') as f:
json.dump(gt, f)
if is_dist_avail_and_initialized():
dist.barrier()
with open(cachefile, 'r') as f:
with open(cachefile, 'r', encoding='utf8') as f:
gt = json.load(f)
return gt, imagenames

Expand Down
8 changes: 4 additions & 4 deletions examples/torch/object_detection/layers/functions/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,8 @@ def forward(ctx, loc_data, conf_data, prior_data, detection_output_params):
conf_scores = conf_preds[i].clone()

total_detections_count = 0
all_indices = dict() # indices of confident detections for each class
boxes = dict()
all_indices = {} # indices of confident detections for each class
boxes = {}
for cl in range(0, detection_output_params.num_classes):
if cl == detection_output_params.background_label_id:
continue
Expand All @@ -115,7 +115,7 @@ def forward(ctx, loc_data, conf_data, prior_data, detection_output_params):
all_indices[cl] = all_indices[cl][:count]
total_detections_count += count

score_index_pairs = list() # list of tuples (score, label, idx)
score_index_pairs = [] # list of tuples (score, label, idx)
for label, indices in all_indices.items():
indices = indices.cpu().numpy()
for idx in indices:
Expand All @@ -124,7 +124,7 @@ def forward(ctx, loc_data, conf_data, prior_data, detection_output_params):
score_index_pairs.sort(key=lambda tup: tup[0], reverse=True)
score_index_pairs = score_index_pairs[:detection_output_params.keep_top_k]

all_indices_new = dict()
all_indices_new = {}
for _, label, idx in score_index_pairs:
if label not in all_indices_new:
all_indices_new[label] = [idx]
Expand Down
4 changes: 2 additions & 2 deletions examples/torch/object_detection/layers/modules/l2norm.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
"""

import torch
import torch.nn as nn
import torch.nn.init as init
from torch import nn
from torch.nn import init

from nncf.torch.utils import add_domain
from nncf.torch import register_module
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"""

import torch
import torch.nn as nn
from torch import nn
import torch.nn.functional as F

from ..box_utils import match, log_sum_exp
Expand Down
1 change: 1 addition & 0 deletions examples/torch/object_detection/layers/modules/ssd_head.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ def __init__(self, outputs, modules):
super().__init__(*modules)
self.outputs = [str(o) for o in outputs]

#pylint:disable=W0237
def forward(self, x):
outputs = []
for name, module in self._modules.items():
Expand Down
2 changes: 1 addition & 1 deletion examples/torch/object_detection/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from pathlib import Path

import torch
import torch.utils.data as data
from torch.utils import data

from examples.torch.common.argparser import parse_args
from torch.optim.lr_scheduler import ReduceLROnPlateau
Expand Down
2 changes: 1 addition & 1 deletion examples/torch/object_detection/models/ssd_mobilenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"""

import torch
import torch.nn as nn
from torch import nn

from examples.torch.common import restricted_pickle_module
from examples.torch.common.example_logger import logger
Expand Down
Loading

0 comments on commit 24811aa

Please sign in to comment.