Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove --allow-external #1198

Merged
merged 6 commits into from
Sep 6, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix flake8 in tests
  • Loading branch information
dmitriy-serdyuk committed Sep 6, 2018
commit 7f0a123f999db5d0a45faa0efb72f4c2033e3773
4 changes: 2 additions & 2 deletions blocks/bricks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
from .interfaces import (Activation, Feedforward, Initializable, LinearLike,
Random)
from .recurrent import (BaseRecurrent, SimpleRecurrent, LSTM, GatedRecurrent,
Bidirectional, RecurrentStack, RECURRENTSTACK_SEPARATOR,
recurrent)
Bidirectional, RecurrentStack,
RECURRENTSTACK_SEPARATOR, recurrent)
from .simple import (Linear, Bias, Maxout, LinearMaxout, Identity, Tanh,
Logistic, Softplus, Rectifier, LeakyRectifier,
Softmax, NDimensionalSoftmax)
Expand Down
2 changes: 2 additions & 0 deletions blocks/bricks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def create_unbound_method(func, cls):
if six.PY3:
return func


# Rename built-in property to avoid conflict with Application.property
property_ = property

Expand Down Expand Up @@ -806,6 +807,7 @@ def __bool__(self):

__nonzero__ = __bool__


NoneAllocation = LazyNone('NoneAllocation')
NoneInitialization = LazyNone('NoneInitialization')

Expand Down
5 changes: 5 additions & 0 deletions blocks/bricks/recurrent/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
from .base import BaseRecurrent, recurrent
from .architectures import SimpleRecurrent, LSTM, GatedRecurrent
from .misc import Bidirectional, RecurrentStack, RECURRENTSTACK_SEPARATOR


__all__ = ("BaseRecurrent", "recurrent", "SimpleRecurrent", "LSTM",
"GatedRecurrent", "Bidirectional", "RecurrentStack",
"RECURRENTSTACK_SEPARATOR")
1 change: 1 addition & 0 deletions blocks/bricks/recurrent/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def get_dim(self, name):
return self.prototype.get_dim(name) * 2
return self.prototype.get_dim(name)


RECURRENTSTACK_SEPARATOR = '#'


Expand Down
3 changes: 3 additions & 0 deletions blocks/monitoring/aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ def get_aggregator(self):
return self._build_aggregator(tensor.minimum(self.storage,
self.variable))


minimum = partial(_simple_aggregation, Minimum)


Expand All @@ -227,6 +228,7 @@ def get_aggregator(self):
return self._build_aggregator(tensor.maximum(self.storage,
self.variable))


maximum = partial(_simple_aggregation, Maximum)


Expand All @@ -251,6 +253,7 @@ def get_aggregator(self):
return self._build_aggregator(tensor.concatenate([self.storage,
self.variable]))


concatenate = partial(_simple_aggregation, Concatenate)


Expand Down
25 changes: 25 additions & 0 deletions blocks/roles.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,146 +75,167 @@ def __repr__(self):
class InputRole(VariableRole):
pass


#: The input of a :class:`~.bricks.Brick`
INPUT = InputRole()


class OutputRole(VariableRole):
pass


#: The output of a :class:`~.bricks.Brick`
OUTPUT = OutputRole()


class CostRole(VariableRole):
pass


#: A scalar cost that can be used to train or regularize
COST = CostRole()


class PersistentRole(VariableRole):
pass


# Any persistent quantity that should be saved as part of the model
PERSISTENT = PersistentRole()


class ParameterRole(PersistentRole):
pass


#: A parameter of the model
PARAMETER = ParameterRole()


class AuxiliaryRole(VariableRole):
pass


#: Variables added to the graph as annotations
AUXILIARY = AuxiliaryRole()


class WeightRole(ParameterRole):
pass


#: The weight matrices of linear transformations
WEIGHT = WeightRole()


class BiasRole(ParameterRole):
pass


#: Biases of linear transformations
BIAS = BiasRole()


class InitialStateRole(ParameterRole):
pass


#: Initial state of a recurrent network
INITIAL_STATE = InitialStateRole()


class FilterRole(WeightRole):
pass


#: The filters (kernels) of a convolution operation
FILTER = FilterRole()


class DropoutRole(VariableRole):
pass


#: Inputs with applied dropout
DROPOUT = DropoutRole()


class CollectedRole(VariableRole):
pass


#: The replacement of a variable collected into a single shared variable
COLLECTED = CollectedRole()


class CollectorRole(ParameterRole):
pass


#: A collection of parameters combined into a single shared variable
COLLECTOR = CollectorRole()


class AlgorithmStateRole(VariableRole):
pass


#: Shared variables used in algorithms updates
ALGORITHM_STATE = AlgorithmStateRole()


class AlgorithmHyperparameterRole(AlgorithmStateRole):
pass


#: hyperparameters accociated with algorithms
ALGORITHM_HYPERPARAMETER = AlgorithmHyperparameterRole()


class AlgorithmBufferRole(AlgorithmStateRole):
pass


#: buffers accociated with algorithms
ALGORITHM_BUFFER = AlgorithmBufferRole()


class BatchNormPopulationStatisticsRole(PersistentRole):
pass


#: base role for batch normalization population statistics
BATCH_NORM_POPULATION_STATISTICS = BatchNormPopulationStatisticsRole()


class BatchNormPopulationMeanRole(BatchNormPopulationStatisticsRole):
pass


#: mean activations accumulated over the dataset
BATCH_NORM_POPULATION_MEAN = BatchNormPopulationMeanRole()


class BatchNormPopulationStdevRole(BatchNormPopulationStatisticsRole):
pass


#: standard deviations of activations accumulated over the dataset
BATCH_NORM_POPULATION_STDEV = BatchNormPopulationStdevRole()


class BatchNormGraphVariableRole(VariableRole):
pass


#: base for roles used for within-graph batch normalization replacement
BATCH_NORM_GRAPH_VARIABLE = BatchNormGraphVariableRole()


class BatchNormOffsetRole(BatchNormGraphVariableRole):
pass


#: offset applied in a BatchNormalization application (or its
# batch-normalized replacement)
BATCH_NORM_OFFSET = BatchNormOffsetRole()
Expand All @@ -223,6 +244,7 @@ class BatchNormOffsetRole(BatchNormGraphVariableRole):
class BatchNormDivisorRole(BatchNormGraphVariableRole):
pass


#: divisor applied in a BatchNormalization application (or its
# batch-normalized replacement)
BATCH_NORM_DIVISOR = BatchNormDivisorRole()
Expand All @@ -231,6 +253,7 @@ class BatchNormDivisorRole(BatchNormGraphVariableRole):
class BatchNormMinibatchEstimateRole(BatchNormGraphVariableRole):
pass


#: role added to variables that are the result of a batch normalization
# replacement, rather than the original population statistics variables.
BATCH_NORM_MINIBATCH_ESTIMATE = BatchNormMinibatchEstimateRole()
Expand All @@ -239,6 +262,7 @@ class BatchNormMinibatchEstimateRole(BatchNormGraphVariableRole):
class BatchNormScaleParameterRole(ParameterRole):
pass


#: role given to the scale parameter, referred to as "scale" in the
# batch normalization manuscript, applied after normalizing.
BATCH_NORM_SCALE_PARAMETER = BatchNormScaleParameterRole()
Expand All @@ -247,6 +271,7 @@ class BatchNormScaleParameterRole(ParameterRole):
class BatchNormShiftParameterRole(BiasRole):
pass


#: role given to the shift parameter, referred to as "beta" in the
# batch normalization manuscript, applied after normalizing and scaling.
# Inherits from BIAS, because there really is no functional difference
Expand Down
3 changes: 2 additions & 1 deletion blocks/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def secure_dump(object_, path, dump_function=dump, **kwargs):
logger.debug("Moving the temporary file")
shutil.move(temp.name, path)
logger.debug("Dump finished")
except:
except: # noqa: E722
if "temp" in locals():
os.remove(temp.name)
raise
Expand Down Expand Up @@ -564,6 +564,7 @@ def _recreate_pygpu_array(context_name, content):
context = theano.gpuarray.get_context(context_name)
return pygpu.gpuarray.array(content, context=context)


_ARRAY_TYPE_MAP = {numpy.ndarray: 'numpy_ndarray'}
_INVERSE_ARRAY_TYPE_MAP = {'numpy_ndarray': _recreate_numpy_ndarray}
if cuda_ndarray:
Expand Down
16 changes: 9 additions & 7 deletions blocks/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,15 @@


class UtilsModule(ModuleType):
utils_attrs = ("pack", "unpack", "reraise_as", "dict_subset", "dict_union",
"repr_attrs", "ipdb_breakpoint", "print_sum", "print_shape",
"change_recursion_limit", "extract_args", "find_bricks")
theano_utils_attrs = ("shared_floatx_zeros_matching", "shared_floatx_zeros",
"shared_floatx_nans", "shared_floatx", "shared_like",
"check_theano_variable", "is_graph_input",
"is_shared_variable", "put_hook")
utils_attrs = (
"pack", "unpack", "reraise_as", "dict_subset", "dict_union",
"repr_attrs", "ipdb_breakpoint", "print_sum", "print_shape",
"change_recursion_limit", "extract_args", "find_bricks")
theano_utils_attrs = (
"shared_floatx_zeros_matching", "shared_floatx_zeros",
"shared_floatx_nans", "shared_floatx", "shared_like",
"check_theano_variable", "is_graph_input",
"is_shared_variable", "put_hook")
__all__ = utils_attrs + theano_utils_attrs
__doc__ = __doc__
__file__ = __file__
Expand Down
4 changes: 2 additions & 2 deletions blocks/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import contextlib
from collections import OrderedDict, deque

import numpy
# for documentation
import numpy # noqa: F401


def pack(arg):
Expand Down Expand Up @@ -362,4 +363,3 @@ def find_bricks(top_bricks, predicate):
found.append(current)
to_visit.extend(current.children)
return found

2 changes: 2 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,13 @@
'python': ('http://docs.python.org/3.4', None)
}


class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()


MOCK_MODULES = ['fuel']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)

Expand Down
2 changes: 1 addition & 1 deletion doctests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite(
module=importlib.import_module(module), setUp=setup,
optionflags=doctest.IGNORE_EXCEPTION_DETAIL))
except:
except: # noqa: E722
pass

# This part loads the doctests from the documentation
Expand Down
4 changes: 2 additions & 2 deletions tests/algorithms/test_algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,8 +186,8 @@ def test_gradient_descent_spurious_sources():
algorithm = GradientDescent(cost=cost, parameters=[W])
algorithm.step_rule.learning_rate.set_value(0.75)
algorithm.initialize()
assert_raises(lambda:
algorithm.process_batch(dict(example_id='test')))
with assert_raises(ValueError):
algorithm.process_batch(dict(example_id='test'))

algorithm = GradientDescent(cost=cost, parameters=[W],
on_unused_sources='ignore')
Expand Down
Loading