Compare commits
No commits in common. "df8e5c5d09ae74781bb1c2a03a3344d4008cc651" and "6ed38077c6231c2301bcd9eae5bdea514483ad65" have entirely different histories.
df8e5c5d09
...
6ed38077c6
20
.drone.yml
20
.drone.yml
@ -7,30 +7,12 @@ steps:
|
|||||||
pull: never
|
pull: never
|
||||||
image: erki/llvm:latest
|
image: erki/llvm:latest
|
||||||
commands:
|
commands:
|
||||||
- apt install --no-install-recommends python3-pip -y
|
|
||||||
- pip3 install --upgrade conan
|
- pip3 install --upgrade conan
|
||||||
- conan profile new default --detect
|
- conan profile new default --detect
|
||||||
- conan profile update settings.compiler.libcxx=libstdc++11 default
|
- conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||||
- mkdir -p build
|
- mkdir build
|
||||||
- cd build
|
- cd build
|
||||||
- conan install .. --build=missing
|
- conan install .. --build=missing
|
||||||
- cmake .. -G "Ninja" -DCMAKE_BUILD_TYPE=Release -DWITH_TESTS=ON
|
- cmake .. -G "Ninja" -DCMAKE_BUILD_TYPE=Release -DWITH_TESTS=ON
|
||||||
- ninja
|
- ninja
|
||||||
- ctest . --output-on-failure
|
- ctest . --output-on-failure
|
||||||
- name: opsummarizer integration tests
|
|
||||||
pull: never
|
|
||||||
image: erki/llvm:latest
|
|
||||||
commands:
|
|
||||||
- apt install --no-install-recommends gcovr python3-pip -y
|
|
||||||
- pip3 install --upgrade conan
|
|
||||||
- conan profile new default --detect
|
|
||||||
- conan profile update settings.compiler.libcxx=libstdc++11 default
|
|
||||||
- mkdir -p build
|
|
||||||
- cd build
|
|
||||||
- conan install .. --build=missing
|
|
||||||
- cmake .. -G "Ninja" -DCMAKE_BUILD_TYPE=Release -DWITH_TESTS=OFF
|
|
||||||
- ninja
|
|
||||||
- cp -r ../testcases ./testcases
|
|
||||||
- cd ./testcases
|
|
||||||
- mv ../op-finder ./
|
|
||||||
- python3 ../../op-summarizer/tests.py
|
|
||||||
|
|||||||
@ -23,14 +23,12 @@ public:
|
|||||||
void toStream(std::ostream& stream);
|
void toStream(std::ostream& stream);
|
||||||
void toFile(const std::string& output_filename);
|
void toFile(const std::string& output_filename);
|
||||||
|
|
||||||
void pushOperation(const std::string& original_filename, OperationLog&& op) override;
|
void pushOperation(const std::string& filename, OperationLog&& op) override;
|
||||||
[[nodiscard]] const std::unordered_map<std::string, std::vector<OperationLog>>& getOperations() const;
|
[[nodiscard]] const std::unordered_map<std::string, std::vector<OperationLog>>& getOperations() const;
|
||||||
private:
|
private:
|
||||||
std::unordered_map<std::string, std::vector<OperationLog>> _operations;
|
std::unordered_map<std::string, std::vector<OperationLog>> _operations;
|
||||||
|
|
||||||
bool _pretty_print = false;
|
bool _pretty_print = false;
|
||||||
|
|
||||||
std::string _convertFilepath(const std::string& original);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif //C_ANALYZER_OPERATIONSTORAGE_HPP
|
#endif //C_ANALYZER_OPERATIONSTORAGE_HPP
|
||||||
|
|||||||
@ -35,9 +35,8 @@ void OperationStorage::toFile(const std::string& output_filename)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void OperationStorage::pushOperation(const std::string& original_filename, OperationLog&& op)
|
void OperationStorage::pushOperation(const std::string& filename, OperationLog&& op)
|
||||||
{
|
{
|
||||||
const std::string filename = _convertFilepath(original_filename);
|
|
||||||
auto it = _operations.find(filename);
|
auto it = _operations.find(filename);
|
||||||
|
|
||||||
if (it == _operations.end())
|
if (it == _operations.end())
|
||||||
@ -50,10 +49,3 @@ const std::unordered_map<std::string, std::vector<OperationLog>>& OperationStora
|
|||||||
{
|
{
|
||||||
return _operations;
|
return _operations;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string OperationStorage::_convertFilepath(const std::string& original)
|
|
||||||
{
|
|
||||||
const std::filesystem::path path = original;
|
|
||||||
|
|
||||||
return path.filename().string();
|
|
||||||
}
|
|
||||||
|
|||||||
@ -8,7 +8,7 @@ from typing import Dict, List
|
|||||||
class GCovLine:
|
class GCovLine:
|
||||||
line_number: int
|
line_number: int
|
||||||
count: int
|
count: int
|
||||||
branch_number: int = 0
|
is_fallthrough: bool = False
|
||||||
|
|
||||||
|
|
||||||
class GCovFile:
|
class GCovFile:
|
||||||
@ -28,11 +28,8 @@ class GCovFile:
|
|||||||
|
|
||||||
for line in file["lines"]:
|
for line in file["lines"]:
|
||||||
if len(line["branches"]):
|
if len(line["branches"]):
|
||||||
# GCov reports branches in reverse order to our parser.
|
|
||||||
branch_number = len(line["branches"])
|
|
||||||
for branch in line["branches"]:
|
for branch in line["branches"]:
|
||||||
lines.append(GCovLine(line["line_number"], branch["count"], branch_number))
|
lines.append(GCovLine(line["line_number"], branch["count"], branch["fallthrough"]))
|
||||||
branch_number -= 1
|
|
||||||
else:
|
else:
|
||||||
lines.append(GCovLine(line["line_number"], line["count"]))
|
lines.append(GCovLine(line["line_number"], line["count"]))
|
||||||
|
|
||||||
|
|||||||
58
op-summarizer/op-summarizer.py
Normal file
58
op-summarizer/op-summarizer.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
from gcovreader import GCovFile, GCovLine
|
||||||
|
from opfinderreader import OperationLogReader, OperationLog
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class UniqueOperation:
|
||||||
|
name: str
|
||||||
|
type_lhs: Optional[str]
|
||||||
|
type_rhs: Optional[str]
|
||||||
|
type_expr: Optional[str]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_operation_log(op_log: OperationLog) -> "UniqueOperation":
|
||||||
|
return UniqueOperation(
|
||||||
|
op_log.operation,
|
||||||
|
op_log.operand_lhs,
|
||||||
|
op_log.operand_rhs,
|
||||||
|
op_log.operand_result
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
gcov = GCovFile("./data/gcov.json")
|
||||||
|
ops = OperationLogReader("./data/opfinder.json")
|
||||||
|
|
||||||
|
gcov.read()
|
||||||
|
ops.read()
|
||||||
|
|
||||||
|
for file_name in gcov.files:
|
||||||
|
op_counter: Dict[UniqueOperation, int] = {}
|
||||||
|
|
||||||
|
if file_name not in ops.files:
|
||||||
|
print(f"Couldn't find {file_name} in op-finder output. Skipping.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
loop_stack: List[int] = []
|
||||||
|
|
||||||
|
for gcov_line in gcov.files[file_name]:
|
||||||
|
op_lines = ops.get_lines(file_name, gcov_line.line_number)
|
||||||
|
for opfinder_line in op_lines:
|
||||||
|
# TODO: revise this. Need a special case for for-loop clauses
|
||||||
|
# or branching in general.
|
||||||
|
if opfinder_line.is_fallthrough != gcov_line.is_fallthrough:
|
||||||
|
continue
|
||||||
|
|
||||||
|
unique_op = UniqueOperation.from_operation_log(opfinder_line)
|
||||||
|
|
||||||
|
if unique_op in op_counter:
|
||||||
|
op_counter[unique_op] += gcov_line.count
|
||||||
|
else:
|
||||||
|
op_counter[unique_op] = gcov_line.count
|
||||||
|
|
||||||
|
print(f"Unique operations for file {file_name}:")
|
||||||
|
for uop, count in op_counter.items():
|
||||||
|
print(f"\t{count}: {uop}")
|
||||||
@ -1,48 +1,18 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from enum import Enum
|
from dataclasses import dataclass, field
|
||||||
from dataclasses import dataclass
|
from typing import Dict, List
|
||||||
from typing import Any, Dict, List, Union
|
|
||||||
|
|
||||||
|
|
||||||
class OperationType(Enum):
|
|
||||||
BASIC = "basic_operation"
|
|
||||||
FUNCTION_CALL = "function_call"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class BasicOperation:
|
|
||||||
operation_name: str
|
|
||||||
type_lhs: str
|
|
||||||
type_rhs: str
|
|
||||||
type_result: str
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class FunctionCall:
|
|
||||||
function_name: str
|
|
||||||
call_result_type: str
|
|
||||||
|
|
||||||
|
|
||||||
UniqueOperation = Union[BasicOperation, FunctionCall]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OperationLog:
|
class OperationLog:
|
||||||
|
operation: str
|
||||||
line: int
|
line: int
|
||||||
branch_number: int
|
operand_lhs: str
|
||||||
entry_type: OperationType
|
operand_rhs: str
|
||||||
entry: UniqueOperation
|
operand_result: str
|
||||||
|
is_fallthrough: bool
|
||||||
def __post_init__(self) -> None:
|
current_for_loops: list[int] = field(default_factory=list)
|
||||||
self.entry_type = OperationType(self.entry_type)
|
|
||||||
|
|
||||||
if self.entry_type == OperationType.BASIC:
|
|
||||||
self.entry = BasicOperation(**self.entry)
|
|
||||||
elif self.entry_type == OperationType.FUNCTION_CALL:
|
|
||||||
self.entry = FunctionCall(**self.entry)
|
|
||||||
else:
|
|
||||||
assert False, "Unaccounted for operation type."
|
|
||||||
|
|
||||||
|
|
||||||
class OperationLogReader:
|
class OperationLogReader:
|
||||||
|
|||||||
@ -1,83 +0,0 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
from dataclasses import asdict
|
|
||||||
from typing import Dict, List
|
|
||||||
|
|
||||||
from gcovreader import GCovFile
|
|
||||||
from opfinderreader import OperationLogReader, UniqueOperation
|
|
||||||
|
|
||||||
|
|
||||||
class OpSummarizer:
|
|
||||||
def __init__(self, gcov_path: str, opfinder_path: str) -> None:
|
|
||||||
self.gcov = GCovFile(gcov_path)
|
|
||||||
self.ops = OperationLogReader(opfinder_path)
|
|
||||||
|
|
||||||
self.gcov.read()
|
|
||||||
self.ops.read()
|
|
||||||
|
|
||||||
def count_operations(self, file: str) -> Dict[UniqueOperation, int]:
|
|
||||||
if file not in self.gcov.files or file not in self.ops.files:
|
|
||||||
raise RuntimeError(f"File {file} not in both parsers.")
|
|
||||||
|
|
||||||
op_counter: Dict[UniqueOperation, int] = {}
|
|
||||||
|
|
||||||
for gcov_line in self.gcov.files[file]:
|
|
||||||
op_lines = self.ops.get_lines(file, gcov_line.line_number)
|
|
||||||
for op_log in op_lines:
|
|
||||||
# TODO: revise this. Need a special case for for-loop clauses
|
|
||||||
# or branching in general.
|
|
||||||
if op_log.branch_number != gcov_line.branch_number:
|
|
||||||
continue
|
|
||||||
|
|
||||||
unique_op = op_log.entry
|
|
||||||
|
|
||||||
if unique_op in op_counter:
|
|
||||||
op_counter[unique_op] += gcov_line.count
|
|
||||||
else:
|
|
||||||
op_counter[unique_op] = gcov_line.count
|
|
||||||
|
|
||||||
return op_counter
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def operation_count_to_json_dict(unique_ops: Dict[UniqueOperation, int]) -> List[Dict]:
|
|
||||||
out = []
|
|
||||||
|
|
||||||
for uo, uo_count in unique_ops.items():
|
|
||||||
d = asdict(uo)
|
|
||||||
d["count"] = uo_count
|
|
||||||
out.append(d)
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Merges gcovr and op-finder outputs.")
|
|
||||||
parser.add_argument("files", metavar="FILES", type=str, nargs="+",
|
|
||||||
help="The files to accumulate.")
|
|
||||||
parser.add_argument("--gcov", type=str, default="./data/gcov.json",
|
|
||||||
help="The gcovr json file to use.")
|
|
||||||
parser.add_argument("--finder", type=str, default="./data/opfinder.json",
|
|
||||||
help="The op-finder json file to use.")
|
|
||||||
parser.add_argument("--output", type=str, default=None, required=False,
|
|
||||||
help="The file to output the data to.")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
summarizer = OpSummarizer(args.gcov, args.finder)
|
|
||||||
|
|
||||||
total_count = {}
|
|
||||||
|
|
||||||
for file_name in args.files:
|
|
||||||
ops = summarizer.count_operations(file_name)
|
|
||||||
total_count[file_name] = summarizer.operation_count_to_json_dict(ops)
|
|
||||||
|
|
||||||
print(f"Unique operations for file {file_name}:")
|
|
||||||
for uop, count in ops.items():
|
|
||||||
print(f"\t{count}: {uop}")
|
|
||||||
|
|
||||||
print("---------")
|
|
||||||
|
|
||||||
if args.output:
|
|
||||||
with open(args.output, "w") as outfile:
|
|
||||||
json.dump(total_count, outfile)
|
|
||||||
@ -1,76 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import subprocess
|
|
||||||
import json
|
|
||||||
|
|
||||||
from dataclasses import asdict
|
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
from opfinderreader import UniqueOperation
|
|
||||||
from opsummarizer import OpSummarizer
|
|
||||||
|
|
||||||
|
|
||||||
class Compiler:
|
|
||||||
def __init__(self, root_file: str) -> None:
|
|
||||||
self.root_file = root_file
|
|
||||||
self.gcov_file = f"{root_file}_gcov.json"
|
|
||||||
self.opfinder_file = f"{root_file}_opfinder.json"
|
|
||||||
|
|
||||||
def compile_and_profile(self) -> None:
|
|
||||||
output_file = f"{self.root_file}.out"
|
|
||||||
input_file = f"{self.root_file}.c"
|
|
||||||
|
|
||||||
subprocess.call(["gcc",
|
|
||||||
"-fprofile-arcs",
|
|
||||||
"-ftest-coverage",
|
|
||||||
"-o", output_file,
|
|
||||||
input_file])
|
|
||||||
|
|
||||||
subprocess.call([f"./{output_file}"])
|
|
||||||
|
|
||||||
subprocess.call(["gcovr",
|
|
||||||
"-r", ".",
|
|
||||||
"--json",
|
|
||||||
"--output", self.gcov_file])
|
|
||||||
|
|
||||||
def find_operations(self) -> None:
|
|
||||||
input_file = f"{self.root_file}.c"
|
|
||||||
|
|
||||||
subprocess.call(["./op-finder",
|
|
||||||
"-o", self.opfinder_file,
|
|
||||||
input_file])
|
|
||||||
|
|
||||||
|
|
||||||
class SummarizerCreatesExpectedOutput(unittest.TestCase):
|
|
||||||
def __init__(self, test_name, file_name) -> None:
|
|
||||||
super(SummarizerCreatesExpectedOutput, self).__init__(test_name)
|
|
||||||
self.root_file_name = file_name
|
|
||||||
self.compiler = Compiler(file_name)
|
|
||||||
|
|
||||||
def setUp(self) -> None:
|
|
||||||
self.compiler.compile_and_profile()
|
|
||||||
self.compiler.find_operations()
|
|
||||||
|
|
||||||
def _get_etalon(self) -> Dict:
|
|
||||||
filename = f"{self.root_file_name}_expected.json"
|
|
||||||
with open(filename, "r") as f:
|
|
||||||
return json.load(f)[f"{self.root_file_name}.c"]
|
|
||||||
|
|
||||||
def test_summarizer_output(self) -> None:
|
|
||||||
summarizer = OpSummarizer(self.compiler.gcov_file, self.compiler.opfinder_file)
|
|
||||||
found = summarizer.count_operations(f"{self.root_file_name}.c")
|
|
||||||
found = summarizer.operation_count_to_json_dict(found)
|
|
||||||
|
|
||||||
etalon = self._get_etalon()
|
|
||||||
|
|
||||||
self.assertEqual(found, etalon, msg="Found operations doesn't match etalon dictionary.")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
suite = unittest.TestSuite()
|
|
||||||
|
|
||||||
suite.addTest(SummarizerCreatesExpectedOutput("test_summarizer_output", "matrix"))
|
|
||||||
suite.addTest(SummarizerCreatesExpectedOutput("test_summarizer_output", "gauss_blur"))
|
|
||||||
suite.addTest(SummarizerCreatesExpectedOutput("test_summarizer_output", "for_loop"))
|
|
||||||
suite.addTest(SummarizerCreatesExpectedOutput("test_summarizer_output", "fir"))
|
|
||||||
|
|
||||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
|
||||||
24
testcases/assignment.c
Normal file
24
testcases/assignment.c
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
#define A (unsigned long)4
|
||||||
|
#define STUFF int c = a + b
|
||||||
|
|
||||||
|
#warning butts
|
||||||
|
|
||||||
|
#include "stuff.h"
|
||||||
|
|
||||||
|
void g()
|
||||||
|
{
|
||||||
|
int a = 4;
|
||||||
|
int b = 6;
|
||||||
|
|
||||||
|
int c = a + b;
|
||||||
|
(void)c;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main()
|
||||||
|
{
|
||||||
|
short a = A;
|
||||||
|
char b = 3;
|
||||||
|
|
||||||
|
STUFF;
|
||||||
|
int c2 = a + A;
|
||||||
|
}
|
||||||
@ -1 +0,0 @@
|
|||||||
{"fir.c": [{"operation_name": "<", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 324}, {"operation_name": "++", "type_lhs": "int", "type_rhs": "", "type_result": "int", "count": 324}, {"operation_name": "=", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 37}, {"operation_name": "=", "type_lhs": "volatile float", "type_rhs": "float", "type_result": "float", "count": 360}, {"operation_name": "/", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 360}, {"operation_name": "+", "type_lhs": "float", "type_rhs": "float", "type_result": "float", "count": 324}, {"operation_name": "*", "type_lhs": "float", "type_rhs": "float", "type_result": "float", "count": 324}, {"operation_name": "subscript", "type_lhs": "const float *", "type_rhs": "int", "type_result": "const float", "count": 324}, {"operation_name": "+", "type_lhs": "unsigned int", "type_rhs": "unsigned int", "type_result": "unsigned int", "count": 288}, {"operation_name": "subscript", "type_lhs": "const unsigned int *", "type_rhs": "int", "type_result": "const unsigned int", "count": 612}, {"operation_name": "-", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 288}, {"operation_name": "+", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 612}, {"operation_name": "subscript", "type_lhs": "volatile float *", "type_rhs": "int", "type_result": "volatile float", "count": 36}]}
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
int main()
|
|
||||||
{
|
|
||||||
int i;
|
|
||||||
for (i = 0; i < 0; i++)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1 +0,0 @@
|
|||||||
{"for_loop.c": [{"operation_name": "<", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 0}, {"operation_name": "++", "type_lhs": "int", "type_rhs": "", "type_result": "int", "count": 0}, {"operation_name": "=", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 1}]}
|
|
||||||
@ -1,8 +1,7 @@
|
|||||||
|
|
||||||
//#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
// TODO: stddef.h not found.
|
|
||||||
|
|
||||||
int main(){
|
int gauss_blur(){
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1 +0,0 @@
|
|||||||
{"gauss_blur.c": [{"operation_name": "=", "type_lhs": "unsigned char *", "type_rhs": "unsigned char *", "type_result": "unsigned char *", "count": 4}, {"function_name": "malloc", "call_result_type": "void *", "count": 6}, {"operation_name": "*", "type_lhs": "unsigned long", "type_rhs": "unsigned long", "type_result": "unsigned long", "count": 6}, {"operation_name": "*", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 36868}, {"operation_name": "=", "type_lhs": "unsigned char **", "type_rhs": "unsigned char **", "type_result": "unsigned char **", "count": 2}, {"operation_name": "=", "type_lhs": "unsigned char", "type_rhs": "unsigned char", "type_result": "unsigned char", "count": 5000}, {"operation_name": "subscript", "type_lhs": "unsigned char *", "type_rhs": "int", "type_result": "unsigned char", "count": 41864}, {"operation_name": "==", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 18}, {"operation_name": "<", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 18824}, {"operation_name": "++", "type_lhs": "int", "type_rhs": "", "type_result": "int", "count": 69512}, {"operation_name": "=", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 18820}, {"operation_name": "-", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 50688}, {"operation_name": "<=", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 50688}, {"operation_name": "-", "type_lhs": "int", "type_rhs": "", "type_result": "int", "count": 18432}, {"operation_name": "||", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 36864}, {"operation_name": "!=", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 73728}, {"operation_name": "=", "type_lhs": "char", "type_rhs": "char", "type_result": "char", "count": 36864}, {"function_name": "abs", "call_result_type": "int", "count": 36864}, {"operation_name": "+", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 110592}, {"operation_name": ">", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 8680}, {"function_name": "free", "call_result_type": "void", "count": 4}]}
|
|
||||||
@ -17,12 +17,12 @@ int main(void)
|
|||||||
{
|
{
|
||||||
int m, n, p;
|
int m, n, p;
|
||||||
volatile UInt16 m3[3][5];
|
volatile UInt16 m3[3][5];
|
||||||
for(m = 0; m < 3; m++) // 1
|
for(m = 0; m < 3; m++)
|
||||||
{
|
{
|
||||||
for(p = 0; p < 5; p++) // 3
|
for(p = 0; p < 5; p++)
|
||||||
{
|
{
|
||||||
m3[m][p] = 0; // 15: BasicOperation(operation_name='=', type_lhs='unsigned short', type_rhs='unsigned short', type_result='unsigned short')
|
m3[m][p] = 0;
|
||||||
for(n = 0; n < 4; n++) // 15
|
for(n = 0; n < 4; n++)
|
||||||
{
|
{
|
||||||
m3[m][p] += m1[m][n] * m2[n][p];
|
m3[m][p] += m1[m][n] * m2[n][p];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1 +0,0 @@
|
|||||||
{"matrix.c": [{"operation_name": "<", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 78}, {"operation_name": "++", "type_lhs": "int", "type_rhs": "", "type_result": "int", "count": 78}, {"operation_name": "=", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 19}, {"operation_name": "=", "type_lhs": "unsigned short", "type_rhs": "unsigned short", "type_result": "unsigned short", "count": 15}, {"operation_name": "subscript", "type_lhs": "volatile UInt16 *", "type_rhs": "int", "type_result": "unsigned short", "count": 75}, {"operation_name": "subscript", "type_lhs": "volatile UInt16 (*)[5]", "type_rhs": "int", "type_result": "volatile UInt16 [5]", "count": 75}, {"operation_name": "*", "type_lhs": "int", "type_rhs": "int", "type_result": "int", "count": 60}, {"operation_name": "subscript", "type_lhs": "const UInt16 *", "type_rhs": "int", "type_result": "unsigned short", "count": 120}, {"operation_name": "subscript", "type_lhs": "const UInt16 (*)[4]", "type_rhs": "int", "type_result": "const UInt16 [4]", "count": 60}, {"operation_name": "subscript", "type_lhs": "const UInt16 (*)[5]", "type_rhs": "int", "type_result": "const UInt16 [5]", "count": 60}]}
|
|
||||||
17
testcases/stuff.h
Normal file
17
testcases/stuff.h
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
//
|
||||||
|
// Created by erki on 23.02.21.
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef C_ANALYZER_STUFF_H
|
||||||
|
#define C_ANALYZER_STUFF_H
|
||||||
|
|
||||||
|
void f()
|
||||||
|
{
|
||||||
|
int a = 4;
|
||||||
|
int b = 6;
|
||||||
|
|
||||||
|
int c = a + b;
|
||||||
|
(void)c;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif //C_ANALYZER_STUFF_H
|
||||||
Loading…
x
Reference in New Issue
Block a user