Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion web/api/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@ container so `docker` needs to be installed to generate the stubs.
- [`py/codechecker_api/setup.py`](py/codechecker_api/setup.py)
- [`py/codechecker_api_shared/setup.py`](py/codechecker_api_shared/setup.py)
- [`js/codechecker-api-node/package.json`](js/codechecker-api-node/package.json)
- [`/web/server/vue-cli/package.json`](/web/server/vue-cli/package.json)
- Let's assume that the current API version is `6.39.0`. Run the
[change-api-version.sh](change-api-version.sh) script to increment the API
version: `change-api-version.sh 6.40.0`.
- Update the supported api versions to `6.40` in the server files:
- `web/codechecker_web/shared/version.py`
- `/web/codechecker_web/shared/version.py`
- Run the command `make build` to generate the Thrift API stubs and to create
new pypi and npm packages. It will modify the following files:
- [`py/codechecker_api/dist/codechecker_api.tar.gz`](py/codechecker_api/dist/codechecker_api.tar.gz)
Expand Down
Binary file not shown.
Binary file not shown.
2 changes: 1 addition & 1 deletion web/api/js/codechecker-api-node/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "codechecker-api",
"version": "6.69.0",
"version": "6.70.0",
"description": "Generated node.js compatible API stubs for CodeChecker server.",
"main": "lib",
"homepage": "https://github.com/Ericsson/codechecker",
Expand Down
Binary file modified web/api/py/codechecker_api/dist/codechecker_api.tar.gz
Binary file not shown.
2 changes: 1 addition & 1 deletion web/api/py/codechecker_api/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
with open('README.md', encoding='utf-8', errors="ignore") as f:
long_description = f.read()

api_version = '6.69.0'
api_version = '6.70.0'

setup(
name='codechecker_api',
Expand Down
Binary file not shown.
2 changes: 1 addition & 1 deletion web/api/py/codechecker_api_shared/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
with open('README.md', encoding='utf-8', errors="ignore") as f:
long_description = f.read()

api_version = '6.69.0'
api_version = '6.70.0'

setup(
name='codechecker_api_shared',
Expand Down
1 change: 1 addition & 0 deletions web/api/report_server.thrift
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,7 @@ struct ReportData {
// of custom labels that describe some properties of a report. For example the
// timestamp in case of dynamic analyzers when the report was actually emitted.
18: optional map<string, string> annotations,
19: optional BlameInfo blameInfo, // Contains the git blame information of the report if it exists.
}
typedef list<ReportData> ReportDataList

Expand Down
2 changes: 1 addition & 1 deletion web/codechecker_web/shared/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
# The newest supported minor version (value) for each supported major version
# (key) in this particular build.
SUPPORTED_VERSIONS = {
6: 69
6: 70
}

# Used by the client to automatically identify the latest major and minor
Expand Down
53 changes: 49 additions & 4 deletions web/server/codechecker_server/api/report_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -2267,9 +2267,15 @@ def getRunResults(self, run_ids, limit, offset, sort_types,

# Get report details if it is required.
report_details = {}
if get_details:
report_ids = [r.id for r in query_result]
blame_infos = {}
if get_details and len(query_result):
report_ids, blames = zip(*[
(
r.id,
(r.id, self.getBlameInfo(r.file_id))
) for r in query_result])
report_details = get_report_details(session, report_ids)
blame_infos = dict(blames)

for row in query_result:
annotations = {
Expand All @@ -2286,6 +2292,21 @@ def getRunResults(self, run_ids, limit, offset, sort_types,
row.review_status_date,
row.review_status_is_in_source)

blame_info = blame_infos.get(row.id)
if blame_info and blame_info.commits and blame_info.blame:
blame_data = [b for b in blame_info.blame
if row.line >= b.startLine
and row.line <= b.endLine]
commitHash = blame_data[0].commitHash \
if len(blame_data) else None
commitInfo = {cHash: commit for cHash, commit
in blame_info.commits.items()
if cHash == commitHash}
blame_info = BlameInfo(
commits=commitInfo,
blame=blame_data
)

results.append(
ReportData(runId=row.run_id,
bugHash=row.bug_id,
Expand All @@ -2305,6 +2326,7 @@ def getRunResults(self, run_ids, limit, offset, sort_types,
fixedAt=str(row.fixed_at),
bugPathLength=row.path_length,
details=report_details.get(row.id),
blameInfo=blame_info,
annotations=annotations))
else: # not is_unique
filter_expression, join_tables = process_report_filter(
Expand Down Expand Up @@ -2371,9 +2393,15 @@ def getRunResults(self, run_ids, limit, offset, sort_types,

# Get report details if it is required.
report_details = {}
if get_details:
report_ids = [r[0].id for r in query_result]
blame_infos = {}
if get_details and len(query_result):
report_ids, blames = zip(*[
(
r[0].id,
(r[0].id, self.getBlameInfo(r[0].file_id))
) for r in query_result])
report_details = get_report_details(session, report_ids)
blame_infos = dict(blames)

for row in query_result:
report, filepath = row[0], row[1]
Expand All @@ -2388,6 +2416,22 @@ def getRunResults(self, run_ids, limit, offset, sort_types,
report.review_status_date,
report.review_status_is_in_source)

blame_info = blame_infos[report.id] \
if report.id in blame_infos else None
if blame_info and blame_info.commits and blame_info.blame:
blame_data = [b for b in blame_info.blame
if report.line >= b.startLine
and report.line <= b.endLine]
commitHash = blame_data[0].commitHash \
if len(blame_data) else None
commitInfo = {cHash: commit for cHash, commit
in blame_info.commits.items()
if cHash == commitHash}
blame_info = BlameInfo(
commits=commitInfo,
blame=blame_data
)

results.append(
ReportData(runId=report.run_id,
bugHash=report.bug_id,
Expand All @@ -2408,6 +2452,7 @@ def getRunResults(self, run_ids, limit, offset, sort_types,
report.fixed_at else None,
bugPathLength=report.path_length,
details=report_details.get(report.id),
blameInfo=blame_info,
annotations=annotations))

return results
Expand Down
Loading
Loading