fix test_jit canonicalize_tensor_iterator

Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/17104

Differential Revision: D14089928

Pulled By: wanchaol

fbshipit-source-id: 8b288514ab9ee8d24a11d39b75eef95783f28f20
This commit is contained in:
Wanchao Liang
2019-02-14 15:37:42 -08:00
committed by Facebook Github Bot
parent 65e06df24a
commit f3dd5563e4

View File

@ -886,9 +886,9 @@ class TestJit(JitTestCase):
traced = torch.jit.trace(f, (x,))
f(x)
graph = traced.graph_for(x)
# There should be 4 int constants for the right sides of operators, plus two
# for alpha arguments for add and sub
self.assertTrue(str(traced.graph_for(x)).count(': int = prim::Constant'), 6)
# There should be 4 int constants for the right sides of operators, plus one
# for the alpha argument for add and sub
self.assertTrue(str(traced.graph_for(x)).count(': int = prim::Constant') == 5)
# TODO: adapt this test to check that GraphExecutor treats them differently
@unittest.skip("Need to be adjusted to Graph Executor")