The following issues were found
test/cpp/jit/test_schema_matching.cpp
1 issues
Line: 53
err.find("previously matched to type") != std::string::npos);
}
TEST(SchemaMatchingTest, VarType2) {
RegisterOperators reg({
Operator(
"aten::test_vartype2(t a, t[] b) -> (t[])",
[](Stack* stack) {
// NOLINTNEXTLINE(cppcoreguidelines-init-variables)
Reported by Cppcheck.
test/cpp/jit/test_qualified_name.cpp
1 issues
Line: 24
ASSERT_EQ(nullstate.name(), "");
}
TEST(QualifiedNameTest, DottedConstruction) {
// Test dotted construction
auto foo = QualifiedName("foo.bar.baz");
ASSERT_EQ(foo.qualifiedName(), "foo.bar.baz");
ASSERT_EQ(foo.prefix(), "foo.bar");
ASSERT_EQ(foo.name(), "baz");
Reported by Cppcheck.
test/cpp/jit/test_peephole_optimize.cpp
1 issues
Line: 32
->run(*graph);
}
TEST(PeepholeOptimizeTest, IsAndIsNot2) {
auto graph = std::make_shared<Graph>();
parseIR(
R"IR(
graph(%0: int?):
%1 : None = prim::Constant()
Reported by Cppcheck.
test/cpp/jit/test_mobile_type_parser.cpp
1 issues
Line: 18
ASSERT_ANY_THROW(c10::parseType(empty_ps));
}
TEST(MobileTypeParserTest, RoundTripAnnotationStr) {
std::string int_ps("int");
auto int_tp = c10::parseType(int_ps);
std::string int_tps = int_tp->annotation_str();
ASSERT_EQ(int_ps, int_tps);
}
Reported by Cppcheck.
test/cpp/jit/test_misc.cpp
1 issues
Line: 95
return out;
}
TEST(InternedStringsTest, Basic) {
ASSERT_EQ(prim::Param, Symbol::prim("Param"));
ASSERT_EQ(prim::Return, Symbol::prim("Return"));
ASSERT_EQ(prim::Return.toUnqualString(), std::string("Return"));
ASSERT_EQ(prim::Return.toQualString(), std::string("prim::Return"));
Symbol newsym = Symbol::aten("__NEW_SYMBOL");
Reported by Cppcheck.
test/cpp/jit/test_lite_trainer.cpp
1 issues
Line: 138
}
*/
TEST(MobileTest, SaveLoadParametersEmpty) {
Module m("m");
m.define(R"(
def add_it(self, x):
b = 4
return x + b
Reported by Cppcheck.
test/cpp/jit/test_jit_type.cpp
1 issues
Line: 25
TORCH_INTERNAL_ASSERT(!tt->strides().isComplete());
}
TEST(JitTypeTest, UnifyTypes) {
auto bool_tensor = TensorType::get()->withScalarType(at::kBool);
auto opt_bool_tensor = OptionalType::create(bool_tensor);
auto unified_opt_bool = unifyTypes(bool_tensor, opt_bool_tensor);
TORCH_INTERNAL_ASSERT(opt_bool_tensor->isSubtypeOf(*unified_opt_bool));
Reported by Cppcheck.
test/cpp/jit/test_jit_logging_levels.cpp
1 issues
Line: 16
"file_to_test.cpp", JitLoggingLevels::GRAPH_DUMP));
}
TEST(JitLoggingLevelsTest, CheckSetMultipleLogLevels) {
::torch::jit::set_jit_logging_levels("f1:>f2:>>f3");
ASSERT_TRUE(::torch::jit::is_enabled("f1.cpp", JitLoggingLevels::GRAPH_DUMP));
ASSERT_TRUE(
::torch::jit::is_enabled("f2.cpp", JitLoggingLevels::GRAPH_UPDATE));
ASSERT_TRUE(
Reported by Cppcheck.
test/cpp/jit/test_ir.cpp
1 issues
Line: 37
ASSERT_EQ(attr2.f(one), 5);
}
TEST(IRTest, Blocks) {
auto g = std::make_shared<Graph>();
const auto graph_string = R"IR(
graph(%a : Tensor,
%b : Tensor,
%c : Tensor):
Reported by Cppcheck.
test/cpp/jit/test_interpreter.cpp
1 issues
Line: 41
}
};
TEST_F(TypeCheckTest, MatchingType) {
// TypeCheck yields to true! Shape, grad and device matches.
auto a = at::zeros({2, 2}, at::kFloat);
auto b = at::ones({3, 3}, at::kFloat);
a.set_requires_grad(true);
a = a.to(at::kCPU);
Reported by Cppcheck.