當前位置: 首頁>>代碼示例>>Python>>正文


Python gen_parser_ops.feature_size方法代碼示例

本文整理匯總了Python中syntaxnet.ops.gen_parser_ops.feature_size方法的典型用法代碼示例。如果您正苦於以下問題:Python gen_parser_ops.feature_size方法的具體用法?Python gen_parser_ops.feature_size怎麽用?Python gen_parser_ops.feature_size使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在syntaxnet.ops.gen_parser_ops的用法示例。


在下文中一共展示了gen_parser_ops.feature_size方法的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: setUp

# 需要導入模塊: from syntaxnet.ops import gen_parser_ops [as 別名]
# 或者: from syntaxnet.ops.gen_parser_ops import feature_size [as 別名]
def setUp(self):
    # Creates a task context with the correct testing paths.
    initial_task_context = os.path.join(FLAGS.test_srcdir,
                                        'syntaxnet/'
                                        'testdata/context.pbtxt')
    self._task_context = os.path.join(FLAGS.test_tmpdir, 'context.pbtxt')
    with open(initial_task_context, 'r') as fin:
      with open(self._task_context, 'w') as fout:
        fout.write(fin.read().replace('SRCDIR', FLAGS.test_srcdir)
                   .replace('OUTPATH', FLAGS.test_tmpdir))

    # Creates necessary term maps.
    with self.test_session() as sess:
      gen_parser_ops.lexicon_builder(task_context=self._task_context,
                                     corpus_name='training-corpus').run()
      self._num_features, self._num_feature_ids, _, self._num_actions = (
          sess.run(gen_parser_ops.feature_size(task_context=self._task_context,
                                               arg_prefix='brain_parser'))) 
開發者ID:ringringyi,項目名稱:DOTA_models,代碼行數:20,代碼來源:beam_reader_ops_test.py

示例2: setUp

# 需要導入模塊: from syntaxnet.ops import gen_parser_ops [as 別名]
# 或者: from syntaxnet.ops.gen_parser_ops import feature_size [as 別名]
def setUp(self):
    # Creates a task context with the correct testing paths.
    initial_task_context = os.path.join(
        FLAGS.test_srcdir,
        'syntaxnet/'
        'testdata/context.pbtxt')
    self._task_context = os.path.join(FLAGS.test_tmpdir, 'context.pbtxt')
    with open(initial_task_context, 'r') as fin:
      with open(self._task_context, 'w') as fout:
        fout.write(fin.read().replace('SRCDIR', FLAGS.test_srcdir)
                   .replace('OUTPATH', FLAGS.test_tmpdir))

    # Creates necessary term maps.
    with self.test_session() as sess:
      gen_parser_ops.lexicon_builder(task_context=self._task_context,
                                     corpus_name='training-corpus').run()
      self._num_features, self._num_feature_ids, _, self._num_actions = (
          sess.run(gen_parser_ops.feature_size(task_context=self._task_context,
                                               arg_prefix='brain_parser'))) 
開發者ID:coderSkyChen,項目名稱:Action_Recognition_Zoo,代碼行數:21,代碼來源:beam_reader_ops_test.py

示例3: setUp

# 需要導入模塊: from syntaxnet.ops import gen_parser_ops [as 別名]
# 或者: from syntaxnet.ops.gen_parser_ops import feature_size [as 別名]
def setUp(self):
    # Creates a task context with the correct testing paths.
    initial_task_context = os.path.join(test_flags.source_root(),
                                        'syntaxnet/'
                                        'testdata/context.pbtxt')
    self._task_context = os.path.join(test_flags.temp_dir(), 'context.pbtxt')
    with open(initial_task_context, 'r') as fin:
      with open(self._task_context, 'w') as fout:
        fout.write(fin.read().replace('SRCDIR', test_flags.source_root())
                   .replace('OUTPATH', test_flags.temp_dir()))

    # Creates necessary term maps.
    with self.test_session() as sess:
      gen_parser_ops.lexicon_builder(task_context=self._task_context,
                                     corpus_name='training-corpus').run()
      self._num_features, self._num_feature_ids, _, self._num_actions = (
          sess.run(gen_parser_ops.feature_size(task_context=self._task_context,
                                               arg_prefix='brain_parser'))) 
開發者ID:generalized-iou,項目名稱:g-tensorflow-models,代碼行數:20,代碼來源:beam_reader_ops_test.py

示例4: testParsingReaderOpWhileLoop

# 需要導入模塊: from syntaxnet.ops import gen_parser_ops [as 別名]
# 或者: from syntaxnet.ops.gen_parser_ops import feature_size [as 別名]
def testParsingReaderOpWhileLoop(self):
    feature_size = 3
    batch_size = 5

    def ParserEndpoints():
      return gen_parser_ops.gold_parse_reader(self._task_context,
                                              feature_size,
                                              batch_size,
                                              corpus_name='training-corpus')

    with self.test_session() as sess:
      # The 'condition' and 'body' functions expect as many arguments as there
      # are loop variables. 'condition' depends on the 'epoch' loop variable
      # only, so we disregard the remaining unused function arguments. 'body'
      # returns a list of updated loop variables.
      def Condition(epoch, *unused_args):
        return tf.less(epoch, 2)

      def Body(epoch, num_actions, *feature_args):
        # By adding one of the outputs of the reader op ('epoch') as a control
        # dependency to the reader op we force the repeated evaluation of the
        # reader op.
        with epoch.graph.control_dependencies([epoch]):
          features, epoch, gold_actions = ParserEndpoints()
        num_actions = tf.maximum(num_actions,
                                 tf.reduce_max(gold_actions, [0], False) + 1)
        feature_ids = []
        for i in range(len(feature_args)):
          feature_ids.append(features[i])
        return [epoch, num_actions] + feature_ids

      epoch = ParserEndpoints()[-2]
      num_actions = tf.constant(0)
      loop_vars = [epoch, num_actions]

      res = sess.run(
          tf.while_loop(Condition, Body, loop_vars,
                        shape_invariants=[tf.TensorShape(None)] * 2,
                        parallel_iterations=1))
      logging.info('Result: %s', res)
      self.assertEqual(res[0], 2) 
開發者ID:rky0930,項目名稱:yolo_v2,代碼行數:43,代碼來源:reader_ops_test.py


注:本文中的syntaxnet.ops.gen_parser_ops.feature_size方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。