Python源码示例:syntaxnet.util.check.IsTrue()

示例1
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例2
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例3
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例4
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例5
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例6
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例7
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例8
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例9
def testCheckIsTrue(self):
    check.IsTrue(1 == 1.0, 'foo')
    check.IsTrue(True, 'foo')
    check.IsTrue([0], 'foo')
    check.IsTrue({'x': 1}, 'foo')
    check.IsTrue(not 0, 'foo')
    check.IsTrue(not None, 'foo')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(False, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(None, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue(0, 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue([], 'bar')
    with self.assertRaisesRegexp(ValueError, 'bar'):
      check.IsTrue({}, 'bar')
    with self.assertRaisesRegexp(RuntimeError, 'baz'):
      check.IsTrue('', 'baz', RuntimeError) 
示例10
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(feature.HasField('pretrained_embedding_matrix'),
                   'Constant embeddings must be pretrained: %s' % feature) 
示例11
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(feature.HasField('pretrained_embedding_matrix'),
                   'Constant embeddings must be pretrained: %s' % feature) 
示例12
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(feature.HasField('pretrained_embedding_matrix'),
                   'Constant embeddings must be pretrained: %s' % feature) 
示例13
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(feature.HasField('pretrained_embedding_matrix'),
                   'Constant embeddings must be pretrained: %s' % feature) 
示例14
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(feature.HasField('pretrained_embedding_matrix'),
                   'Constant embeddings must be pretrained: %s' % feature) 
示例15
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(feature.HasField('pretrained_embedding_matrix'),
                   'Constant embeddings must be pretrained: %s' % feature) 
示例16
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(feature.HasField('pretrained_embedding_matrix'),
                   'Constant embeddings must be pretrained: %s' % feature) 
示例17
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(
          feature.HasField('pretrained_embedding_matrix'),
          'Constant embeddings must be pretrained: %s' % feature) 
示例18
def _validate_embedded_fixed_features(comp):
  """Checks that the embedded fixed features of |comp| are set up properly."""
  for feature in comp.spec.fixed_feature:
    check.Gt(feature.embedding_dim, 0,
             'Embeddings requested for non-embedded feature: %s' % feature)
    if feature.is_constant:
      check.IsTrue(
          feature.HasField('pretrained_embedding_matrix'),
          'Constant embeddings must be pretrained: %s' % feature) 
示例19
def fill_from_resources(self, resource_path, tf_master=''):
    """Fills in feature sizes and vocabularies using SyntaxNet lexicon.

    Must be called before the spec is ready to be used to build TensorFlow
    graphs. Requires a SyntaxNet lexicon built at the resource_path. Using the
    lexicon, this will call the SyntaxNet custom ops to return the number of
    features and vocabulary sizes based on the FML specifications and the
    lexicons. It will also compute the number of actions of the transition
    system.

    This will often CHECK-fail if the spec doesn't correspond to a valid
    transition system or feature setup.

    Args:
      resource_path: Path to the lexicon.
      tf_master: TensorFlow master executor (string, defaults to '' to use the
        local instance).
    """
    check.IsTrue(
        self.spec.transition_system.registered_name,
        'Set a transition system before calling fill_from_resources().')

    context = lexicon.create_lexicon_context(resource_path)
    for key, value in self.spec.transition_system.parameters.iteritems():
      context.parameter.add(name=key, value=value)

    context.parameter.add(
        name='brain_parser_embedding_dims',
        value=';'.join(
            [str(x.embedding_dim) for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_features',
        value=';'.join([x.fml for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_predicate_maps',
        value=';'.join(['' for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_embedding_names',
        value=';'.join([x.name for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_transition_system',
        value=self.spec.transition_system.registered_name)

    # Propagate information from SyntaxNet C++ backends into the DRAGNN
    # self.spec.
    with tf.Session(tf_master) as sess:
      feature_sizes, domain_sizes, _, num_actions = sess.run(
          gen_parser_ops.feature_size(task_context_str=str(context)))
      self.spec.num_actions = int(num_actions)
      for i in xrange(len(feature_sizes)):
        self.spec.fixed_feature[i].size = int(feature_sizes[i])
        self.spec.fixed_feature[i].vocabulary_size = int(domain_sizes[i])

    for i in xrange(len(self.spec.linked_feature)):
      self.spec.linked_feature[i].size = len(
          self.spec.linked_feature[i].fml.split(' '))

    for resource in context.input:
      self.spec.resource.add(name=resource.name).part.add(
          file_pattern=resource.part[0].file_pattern) 
示例20
def fill_from_resources(self, resource_path, tf_master=''):
    """Fills in feature sizes and vocabularies using SyntaxNet lexicon.

    Must be called before the spec is ready to be used to build TensorFlow
    graphs. Requires a SyntaxNet lexicon built at the resource_path. Using the
    lexicon, this will call the SyntaxNet custom ops to return the number of
    features and vocabulary sizes based on the FML specifications and the
    lexicons. It will also compute the number of actions of the transition
    system.

    This will often CHECK-fail if the spec doesn't correspond to a valid
    transition system or feature setup.

    Args:
      resource_path: Path to the lexicon.
      tf_master: TensorFlow master executor (string, defaults to '' to use the
        local instance).
    """
    check.IsTrue(
        self.spec.transition_system.registered_name,
        'Set a transition system before calling fill_from_resources().')

    context = lexicon.create_lexicon_context(resource_path)
    for key, value in self.spec.transition_system.parameters.iteritems():
      context.parameter.add(name=key, value=value)

    context.parameter.add(
        name='brain_parser_embedding_dims',
        value=';'.join(
            [str(x.embedding_dim) for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_features',
        value=';'.join([x.fml for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_predicate_maps',
        value=';'.join(['' for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_embedding_names',
        value=';'.join([x.name for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_transition_system',
        value=self.spec.transition_system.registered_name)

    # Propagate information from SyntaxNet C++ backends into the DRAGNN
    # self.spec.
    with tf.Session(tf_master) as sess:
      feature_sizes, domain_sizes, _, num_actions = sess.run(
          gen_parser_ops.feature_size(task_context_str=str(context)))
      self.spec.num_actions = int(num_actions)
      for i in xrange(len(feature_sizes)):
        self.spec.fixed_feature[i].size = int(feature_sizes[i])
        self.spec.fixed_feature[i].vocabulary_size = int(domain_sizes[i])

    for i in xrange(len(self.spec.linked_feature)):
      self.spec.linked_feature[i].size = len(
          self.spec.linked_feature[i].fml.split(' '))

    for resource in context.input:
      self.spec.resource.add(name=resource.name).part.add(
          file_pattern=resource.part[0].file_pattern) 
示例21
def fill_from_resources(self, resource_path, tf_master=''):
    """Fills in feature sizes and vocabularies using SyntaxNet lexicon.

    Must be called before the spec is ready to be used to build TensorFlow
    graphs. Requires a SyntaxNet lexicon built at the resource_path. Using the
    lexicon, this will call the SyntaxNet custom ops to return the number of
    features and vocabulary sizes based on the FML specifications and the
    lexicons. It will also compute the number of actions of the transition
    system.

    This will often CHECK-fail if the spec doesn't correspond to a valid
    transition system or feature setup.

    Args:
      resource_path: Path to the lexicon.
      tf_master: TensorFlow master executor (string, defaults to '' to use the
        local instance).
    """
    check.IsTrue(
        self.spec.transition_system.registered_name,
        'Set a transition system before calling fill_from_resources().')

    context = lexicon.create_lexicon_context(resource_path)
    for key, value in self.spec.transition_system.parameters.iteritems():
      context.parameter.add(name=key, value=value)

    context.parameter.add(
        name='brain_parser_embedding_dims',
        value=';'.join(
            [str(x.embedding_dim) for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_features',
        value=';'.join([x.fml for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_predicate_maps',
        value=';'.join(['' for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_embedding_names',
        value=';'.join([x.name for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_transition_system',
        value=self.spec.transition_system.registered_name)

    # Propagate information from SyntaxNet C++ backends into the DRAGNN
    # self.spec.
    with tf.Session(tf_master) as sess:
      feature_sizes, domain_sizes, _, num_actions = sess.run(
          gen_parser_ops.feature_size(task_context_str=str(context)))
      self.spec.num_actions = int(num_actions)
      for i in xrange(len(feature_sizes)):
        self.spec.fixed_feature[i].size = int(feature_sizes[i])
        self.spec.fixed_feature[i].vocabulary_size = int(domain_sizes[i])

    for i in xrange(len(self.spec.linked_feature)):
      self.spec.linked_feature[i].size = len(
          self.spec.linked_feature[i].fml.split(' '))

    for resource in context.input:
      self.spec.resource.add(name=resource.name).part.add(
          file_pattern=resource.part[0].file_pattern) 
示例22
def fill_from_resources(self, resource_path, tf_master=''):
    """Fills in feature sizes and vocabularies using SyntaxNet lexicon.

    Must be called before the spec is ready to be used to build TensorFlow
    graphs. Requires a SyntaxNet lexicon built at the resource_path. Using the
    lexicon, this will call the SyntaxNet custom ops to return the number of
    features and vocabulary sizes based on the FML specifications and the
    lexicons. It will also compute the number of actions of the transition
    system.

    This will often CHECK-fail if the spec doesn't correspond to a valid
    transition system or feature setup.

    Args:
      resource_path: Path to the lexicon.
      tf_master: TensorFlow master executor (string, defaults to '' to use the
        local instance).
    """
    check.IsTrue(
        self.spec.transition_system.registered_name,
        'Set a transition system before calling fill_from_resources().')

    context = lexicon.create_lexicon_context(resource_path)
    for key, value in self.spec.transition_system.parameters.iteritems():
      context.parameter.add(name=key, value=value)

    context.parameter.add(
        name='brain_parser_embedding_dims',
        value=';'.join(
            [str(x.embedding_dim) for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_features',
        value=';'.join([x.fml for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_predicate_maps',
        value=';'.join(['' for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_embedding_names',
        value=';'.join([x.name for x in self.spec.fixed_feature]))
    context.parameter.add(
        name='brain_parser_transition_system',
        value=self.spec.transition_system.registered_name)

    # Propagate information from SyntaxNet C++ backends into the DRAGNN
    # self.spec.
    with tf.Session(tf_master) as sess:
      feature_sizes, domain_sizes, _, num_actions = sess.run(
          gen_parser_ops.feature_size(task_context_str=str(context)))
      self.spec.num_actions = int(num_actions)
      for i in xrange(len(feature_sizes)):
        self.spec.fixed_feature[i].size = int(feature_sizes[i])
        self.spec.fixed_feature[i].vocabulary_size = int(domain_sizes[i])

    for i in xrange(len(self.spec.linked_feature)):
      self.spec.linked_feature[i].size = len(
          self.spec.linked_feature[i].fml.split(' '))

    for resource in context.input:
      self.spec.resource.add(name=resource.name).part.add(
          file_pattern=resource.part[0].file_pattern)