It looks like in the following line:
> ali_to_05[:,1:] - ali_to_05[:,:-1]

you are subtracting Booleans from Booleans, which is not supported.
Maybe cast `ali_to_05` to int8?

On 2017-11-15 09:03 AM, [email protected] wrote:
I trained the model . But at last it output this error ?  Can anyone help me? ,thankyou

-------------------------------------------------------------------------------
TRAINING HAS BEEN FINISHED:
-------------------------------------------------------------------------------
.。。。。。。

Traceback (most recent call last):
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/bin/run.py", line 154, in <module>
     getattr(lvsr.main, args.pop('func'))(config, **args)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py", line 920, in train_multistage
     stage_params, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py", line 693, in train
     load_log, fast_start)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py", line 325, in initialize_all
     batch=True, prediction=prediction, prediction_mask=prediction_mask)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/recognizer.py", line 445, in get_cost_graph
     **inputs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 377, in __call__
     return self.application.apply(self, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 312, in apply
     outputs = self.application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/recognizer.py", line 390, in cost
     attended=encoded, attended_mask=encoded_mask)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 377, in __call__
     return self.application.apply(self, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 312, in apply
     outputs = self.application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/sequence_generators.py", line 326, in cost_matrix
     return self.evaluate(outputs, mask=mask, **kwargs)[0]
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 377, in __call__
     return self.application.apply(self, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 312, in apply
     outputs = self.application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/sequence_generators.py", line 269, in evaluate
     **dict_union(inputs, states, contexts))
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 377, in __call__
     return self.application.apply(self, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 312, in apply
     outputs = self.application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py", line 738, in apply
     preprocessed_attended}))
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 377, in __call__
     return self.application.apply(self, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 312, in apply
     outputs = self.application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py", line 231, in recurrent_apply
     brick.name, application.application_name))
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scan_module/scan.py", line 773, in scan     condition, outputs, updates = scan_utils.get_updates_and_outputs(fn(*args))   File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py", line 213, in scan_function
     outputs = application(iterate=False, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 377, in __call__
     return self.application.apply(self, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 312, in apply
     outputs = self.application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py", line 133, in recurrent_apply
     return application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py", line 703, in do_apply
     self.preprocessed_attended_name: preprocessed_attended}))
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 377, in __call__
     return self.application.apply(self, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 312, in apply
     outputs = self.application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py", line 616, in take_glimpses
     **dict_union(states, glimpses_needed))
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 377, in __call__
     return self.application.apply(self, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py", line 312, in apply
     outputs = self.application_function(brick, *args, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/attention.py", line 141, in take_glimpses
     ali_median_pos = ali_to_05[:,1:] - ali_to_05[:,:-1]
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/var.py", line 147, in __sub__
     return theano.tensor.basic.sub(self, other)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/gof/op.py", line 615, in __call__
     node = self.make_node(*inputs, **kwargs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/elemwise.py", line 578, in make_node
     DimShuffle, *inputs)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/elemwise.py", line 520, in get_output_info
     for i in inputs])
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py", line 1041, in make_node
     for input in inputs])]
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py", line 1049, in output_types
     variables = self.output_types_preference(*types)
  File "/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py", line 842, in upcast_out_nobool
     raise TypeError("bool output not supported")
TypeError: bool output not supported

--

---
You received this message because you are subscribed to the Google Groups "theano-users" group. To unsubscribe from this group and stop receiving emails from it, send an email to [email protected] <mailto:[email protected]>.
For more options, visit https://groups.google.com/d/optout.

--
Pascal Lamblin

--

--- You received this message because you are subscribed to the Google Groups "theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to