Python源码示例:syntaxnet.util.check.In()
示例1
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例2
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例3
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(GatherNetwork, self).__init__(component)
self._attrs = get_attrs_with_defaults(
component.spec.network_unit.parameters, {'trainable_padding': False})
check.In('indices', self._linked_feature_dims,
'Missing required linked feature')
check.Eq(self._linked_feature_dims['indices'], 1,
'Wrong dimension for "indices" feature')
self._dim = self._concatenated_input_dim - 1 # exclude 'indices'
self._layers.append(Layer(component, 'outputs', self._dim))
if self._attrs['trainable_padding']:
self._params.append(
tf.get_variable(
'pre_padding', [1, 1, self._dim],
initializer=tf.random_normal_initializer(stddev=1e-4),
dtype=tf.float32))
示例4
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例5
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(GatherNetwork, self).__init__(component)
self._attrs = get_attrs_with_defaults(
component.spec.network_unit.parameters, {'trainable_padding': False})
check.In('indices', self._linked_feature_dims,
'Missing required linked feature')
check.Eq(self._linked_feature_dims['indices'], 1,
'Wrong dimension for "indices" feature')
self._dim = self._concatenated_input_dim - 1 # exclude 'indices'
self._layers.append(Layer(component, 'outputs', self._dim))
if self._attrs['trainable_padding']:
self._params.append(
tf.get_variable(
'pre_padding', [1, 1, self._dim],
initializer=tf.random_normal_initializer(stddev=1e-4),
dtype=tf.float32))
示例6
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例7
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例8
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例9
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(GatherNetwork, self).__init__(component)
self._attrs = get_attrs_with_defaults(
component.spec.network_unit.parameters, {'trainable_padding': False})
check.In('indices', self._linked_feature_dims,
'Missing required linked feature')
check.Eq(self._linked_feature_dims['indices'], 1,
'Wrong dimension for "indices" feature')
self._dim = self._concatenated_input_dim - 1 # exclude 'indices'
self._layers.append(Layer(component, 'outputs', self._dim))
if self._attrs['trainable_padding']:
self._params.append(
tf.get_variable(
'pre_padding', [1, 1, self._dim],
initializer=tf.random_normal_initializer(stddev=1e-4),
dtype=tf.float32))
示例10
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例11
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例12
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(GatherNetwork, self).__init__(component)
self._attrs = get_attrs_with_defaults(
component.spec.network_unit.parameters, {'trainable_padding': False})
check.In('indices', self._linked_feature_dims,
'Missing required linked feature')
check.Eq(self._linked_feature_dims['indices'], 1,
'Wrong dimension for "indices" feature')
self._dim = self._concatenated_input_dim - 1 # exclude 'indices'
self._layers.append(Layer(component, 'outputs', self._dim))
if self._attrs['trainable_padding']:
self._params.append(
tf.get_variable(
'pre_padding', [1, 1, self._dim],
initializer=tf.random_normal_initializer(stddev=1e-4),
dtype=tf.float32))
示例13
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例14
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(GatherNetwork, self).__init__(component)
self._attrs = get_attrs_with_defaults(
component.spec.network_unit.parameters, {'trainable_padding': False})
check.In('indices', self._linked_feature_dims,
'Missing required linked feature')
check.Eq(self._linked_feature_dims['indices'], 1,
'Wrong dimension for "indices" feature')
self._dim = self._concatenated_input_dim - 1 # exclude 'indices'
self._layers.append(Layer(component, 'outputs', self._dim))
if self._attrs['trainable_padding']:
self._params.append(
tf.get_variable(
'pre_padding', [1, 1, self._dim],
initializer=tf.random_normal_initializer(stddev=1e-4),
dtype=tf.float32))
示例15
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(self, 'adjacency', -1))
示例16
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(component, 'adjacency', -1))
示例17
def testCheckIn(self):
check.In('a', ('a', 'b', 'c'), 'foo')
check.In('b', {'a': 1, 'b': 2}, 'bar')
with self.assertRaisesRegexp(ValueError, 'bar'):
check.In('d', ('a', 'b', 'c'), 'bar')
with self.assertRaisesRegexp(RuntimeError, 'baz'):
check.In('c', {'a': 1, 'b': 2}, 'baz', RuntimeError)
示例18
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(component, 'adjacency', -1))
示例19
def testCheckIn(self):
check.In('a', ('a', 'b', 'c'), 'foo')
check.In('b', {'a': 1, 'b': 2}, 'bar')
with self.assertRaisesRegexp(ValueError, 'bar'):
check.In('d', ('a', 'b', 'c'), 'bar')
with self.assertRaisesRegexp(RuntimeError, 'baz'):
check.In('c', {'a': 1, 'b': 2}, 'baz', RuntimeError)
示例20
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(self, 'adjacency', -1))
示例21
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(self, 'adjacency', -1))
示例22
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(component, 'adjacency', -1))
示例23
def testCheckIn(self):
check.In('a', ('a', 'b', 'c'), 'foo')
check.In('b', {'a': 1, 'b': 2}, 'bar')
with self.assertRaisesRegexp(ValueError, 'bar'):
check.In('d', ('a', 'b', 'c'), 'bar')
with self.assertRaisesRegexp(RuntimeError, 'baz'):
check.In('c', {'a': 1, 'b': 2}, 'baz', RuntimeError)
示例24
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(self, 'adjacency', -1))
示例25
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
self._weights = []
self._weights.append(
tf.get_variable('weights_arc', [self._source_dim, self._target_dim],
tf.float32, tf.orthogonal_initializer()))
self._weights.append(
tf.get_variable('weights_source', [self._source_dim], tf.float32,
tf.zeros_initializer()))
self._weights.append(
tf.get_variable('root', [self._source_dim], tf.float32,
tf.zeros_initializer()))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Add runtime hooks for pre-computed weights.
self._derived_params.append(self._get_root_weights)
self._derived_params.append(self._get_root_bias)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(component, 'adjacency', -1))
示例26
def __init__(self, component):
"""Initializes layers.
Args:
component: Parent ComponentBuilderBase object.
"""
layers = [
network_units.Layer(self, 'lengths', -1),
network_units.Layer(self, 'scores', -1),
network_units.Layer(self, 'logits', -1),
network_units.Layer(self, 'arcs', -1),
]
super(MstSolverNetwork, self).__init__(component, init_layers=layers)
self._attrs = network_units.get_attrs_with_defaults(
component.spec.network_unit.parameters,
defaults={
'forest': False,
'loss': 'softmax',
'crf_max_dynamic_range': 20,
})
check.Eq(
len(self._fixed_feature_dims.items()), 0, 'Expected no fixed features')
check.Eq(
len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('lengths', self._linked_feature_dims,
'Missing required linked feature')
check.In('scores', self._linked_feature_dims,
'Missing required linked feature')
示例27
def testCheckIn(self):
check.In('a', ('a', 'b', 'c'), 'foo')
check.In('b', {'a': 1, 'b': 2}, 'bar')
with self.assertRaisesRegexp(ValueError, 'bar'):
check.In('d', ('a', 'b', 'c'), 'bar')
with self.assertRaisesRegexp(RuntimeError, 'baz'):
check.In('c', {'a': 1, 'b': 2}, 'baz', RuntimeError)
示例28
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
self._weights = []
self._weights.append(
tf.get_variable('weights_arc', [self._source_dim, self._target_dim],
tf.float32, tf.orthogonal_initializer()))
self._weights.append(
tf.get_variable('weights_source', [self._source_dim], tf.float32,
tf.zeros_initializer()))
self._weights.append(
tf.get_variable('root', [self._source_dim], tf.float32,
tf.zeros_initializer()))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Add runtime hooks for pre-computed weights.
self._derived_params.append(self._get_root_weights)
self._derived_params.append(self._get_root_bias)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(component, 'adjacency', -1))
示例29
def __init__(self, component):
"""Initializes layers.
Args:
component: Parent ComponentBuilderBase object.
"""
layers = [
network_units.Layer(self, 'lengths', -1),
network_units.Layer(self, 'scores', -1),
network_units.Layer(self, 'logits', -1),
network_units.Layer(self, 'arcs', -1),
]
super(MstSolverNetwork, self).__init__(component, init_layers=layers)
self._attrs = network_units.get_attrs_with_defaults(
component.spec.network_unit.parameters,
defaults={
'forest': False,
'loss': 'softmax',
'crf_max_dynamic_range': 20,
})
check.Eq(
len(self._fixed_feature_dims.items()), 0, 'Expected no fixed features')
check.Eq(
len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('lengths', self._linked_feature_dims,
'Missing required linked feature')
check.In('scores', self._linked_feature_dims,
'Missing required linked feature')
示例30
def testCheckIn(self):
check.In('a', ('a', 'b', 'c'), 'foo')
check.In('b', {'a': 1, 'b': 2}, 'bar')
with self.assertRaisesRegexp(ValueError, 'bar'):
check.In('d', ('a', 'b', 'c'), 'bar')
with self.assertRaisesRegexp(RuntimeError, 'baz'):
check.In('c', {'a': 1, 'b': 2}, 'baz', RuntimeError)