commit: d80a7a0551d4d0dd4ce5431dc66377418ee199e7
Author: Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Sun Dec 12 10:57:54 2021 +0000
Commit: Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Sun Dec 12 10:57:54 2021 +0000
URL:
https://gitweb.gentoo.org/proj/tinderbox-cluster.git/commit/?id=d80a7a05
Add MakeIssue code in parse_build_log
Signed-off-by: Magnus Granberg <zorry <AT> gentoo.org>
buildbot_gentoo_ci/db/model.py | 2 +-
buildbot_gentoo_ci/logs/log_parser.py | 75 +++++++++++++-----------
buildbot_gentoo_ci/steps/logs.py | 106 +++++++++++++++++++++++++++++-----
3 files changed, 133 insertions(+), 50 deletions(-)
diff --git a/buildbot_gentoo_ci/db/model.py b/buildbot_gentoo_ci/db/model.py
index 75ba873..18fb7ff 100644
--- a/buildbot_gentoo_ci/db/model.py
+++ b/buildbot_gentoo_ci/db/model.py
@@ -230,7 +230,7 @@ class Model(base.DBConnectorComponent):
sa.Column('start', sa.Integer, default=0),
sa.Column('end', sa.Integer, default=0),
sa.Column('status', sa.Enum('info', 'warning', 'ignore', 'error'),
default='info'),
- sa.Column('type', sa.Enum('info', 'qa', 'compile', 'configure',
'install', 'postinst', 'prepare', 'setup', 'test', 'unpack', 'ignore',
'issues', 'misc', 'elog'), default='info'),
+ sa.Column('type', sa.Enum('info', 'qa', 'compile', 'configure',
'install', 'postinst', 'prepare', 'pretend', 'setup', 'test', 'unpack',
'ignore', 'issues', 'misc', 'elog'), default='info'),
sa.Column('search_type', sa.Enum('in', 'startswith', 'endswith',
'search'), default='in'),
)
diff --git a/buildbot_gentoo_ci/logs/log_parser.py
b/buildbot_gentoo_ci/logs/log_parser.py
index 550573d..11faf48 100644
--- a/buildbot_gentoo_ci/logs/log_parser.py
+++ b/buildbot_gentoo_ci/logs/log_parser.py
@@ -21,7 +21,7 @@ class ProjectsPattern(Base):
start = sa.Column(sa.Integer, default=0)
end = sa.Column(sa.Integer, default=0)
status = sa.Column(sa.Enum('info', 'warning', 'ignore', 'error'),
default='info')
- type = sa.Column(sa.Enum('info', 'qa', 'compile', 'configure', 'install',
'postinst', 'prepare', 'setup', 'test', 'unpack', 'ignore', 'issues', 'misc',
'elog'), default='info')
+ type = sa.Column(sa.Enum('info', 'qa', 'compile', 'configure', 'install',
'postinst', 'prepare', 'pretend', 'setup', 'test', 'unpack', 'ignore',
'issues', 'misc', 'elog'), default='info')
search_type = sa.Column(sa.Enum('in', 'startswith', 'endswith', 'search'),
default='in')
def getDBSession(config):
@@ -31,26 +31,23 @@ def getDBSession(config):
return Session()
def getMultiprocessingPool(config):
- # Use cores/4 when multiprocessing
return Pool(processes = int(config['core']))
- # multi_pool = getMultiprocessingPool()
- # use multi_pool.apply_async(function, (args list)
- # multi_pool.close()
- # multi_pool.join()
def addPatternToList(Session, pattern_list, uuid):
for project_pattern in
Session.query(ProjectsPattern).filter_by(project_uuid=uuid).all():
# check if the search pattern is vaild
+ project_pattern_search = project_pattern.search
try:
- re.compile(project_pattern.search)
+ re.compile(project_pattern_search)
except re.error:
print("Non valid regex pattern")
- print(project_pattern)
+ print(project_pattern.search)
+ print(project_pattern.id)
else:
patten_dict = {}
patten_dict['id'] = project_pattern.id
patten_dict['project_uuid'] = project_pattern.project_uuid
- patten_dict['search'] = project_pattern.search
+ patten_dict['search'] = project_pattern_search
patten_dict['start'] = project_pattern.start
patten_dict['end'] = project_pattern.end
patten_dict['status'] = project_pattern.status
@@ -74,25 +71,34 @@ def search_buildlog(log_search_pattern_list,
logfile_text_dict, tmp_index, max_t
# loop true the pattern list for match
for search_pattern in log_search_pattern_list:
search_hit = False
- if search_pattern['search_type'] == 'in':
- if search_pattern['search'] in text_line:
- search_hit = True
- if search_pattern['search_type'] == 'startswith':
- if text_line.startswith(search_pattern['search']):
- search_hit = True
- if search_pattern['search_type'] == 'endswith':
- if text_line.endswith(search_pattern['search']):
- search_hit = True
- if search_pattern['search_type'] == 'search':
- if re.search(search_pattern['search'], text_line):
- search_hit = True
+ # check if should ignore the line
+ #FIXME take the ignore line pattern from db
+ if text_line.startswith('>>> /'):
+ pass
+ if else re.search('./\w+/'):
+ pass
+ else:
+ # search for match
+ if search_pattern['search_type'] == 'in':
+ if search_pattern['search'] in text_line:
+ search_hit = True
+ if search_pattern['search_type'] == 'startswith':
+ if text_line.startswith(search_pattern['search']):
+ search_hit = True
+ if search_pattern['search_type'] == 'endswith':
+ if text_line.endswith(search_pattern['search']):
+ search_hit = True
+ if search_pattern['search_type'] == 'search':
+ if re.search(search_pattern['search'], text_line):
+ search_hit = True
# add the line if the pattern match
if search_hit:
summery_dict[tmp_index] = {}
summery_dict[tmp_index]['text'] = text_line
summery_dict[tmp_index]['type'] = search_pattern['type']
summery_dict[tmp_index]['status'] = search_pattern['status']
- summery_dict[tmp_index]['search_pattern_id'] = search_pattern['id']
+ summery_dict[tmp_index]['id'] = search_pattern['id']
+ summery_dict[tmp_index]['search_pattern'] =
search_pattern['search']
# add upper text lines if requested
# max 5
if search_pattern['start'] != 0:
@@ -108,6 +114,8 @@ def search_buildlog(log_search_pattern_list,
logfile_text_dict, tmp_index, max_t
summery_dict[i]['text'] = logfile_text_dict[i]
summery_dict[i]['type'] = 'info'
summery_dict[i]['status'] = 'info'
+ summery_dict[i]['id'] = 0
+ summery_dict[i]['search_pattern'] = 'auto'
# add lower text lines if requested
# max 5
if search_pattern['end'] != 0:
@@ -124,6 +132,8 @@ def search_buildlog(log_search_pattern_list,
logfile_text_dict, tmp_index, max_t
summery_dict[i]['text'] = logfile_text_dict[i]
summery_dict[i]['type'] = 'info'
summery_dict[i]['status'] = 'info'
+ summery_dict[i]['id'] = 0
+ summery_dict[i]['search_pattern'] = 'auto'
else:
# we add all line that start with ' * ' as info
# we add all line that start with '>>>' but not '>>> /' as info
@@ -133,6 +143,8 @@ def search_buildlog(log_search_pattern_list,
logfile_text_dict, tmp_index, max_t
summery_dict[tmp_index]['text'] = text_line
summery_dict[tmp_index]['type'] = 'info'
summery_dict[tmp_index]['status'] = 'info'
+ summery_dict[tmp_index]['id'] = 0
+ summery_dict[tmp_index]['search_pattern'] = 'auto'
#FIXME: print json
if summery_dict == {}:
return None
@@ -154,21 +166,18 @@ def runLogParser(args):
#NOTE: The patten is from
https://github.com/toralf/tinderbox/tree/master/data files.
# Is stored in a db instead of files.
log_search_pattern_list = get_log_search_pattern(Session, args.uuid,
config['default_uuid'])
+ Session.close()
with io.TextIOWrapper(io.BufferedReader(gzip.open(args.file, 'rb'))) as f:
#FIXME: add support for multiprocessing
for text_line in f:
logfile_text_dict[index] = text_line.strip('\n')
- # run the parse patten on the line
- #search_buildlog(log_search_pattern_list, logfile_text_dict,
index, max_text_lines)
- res = mp_pool.apply_async(search_buildlog,
(log_search_pattern_list, logfile_text_dict, index, max_text_lines,))
- if res.get() is not None:
- print(json.dumps(res.get()))
- # remove text line that we don't need any more
- if index >= 20:
- del logfile_text_dict[index - 19]
index = index + 1
max_text_lines = index
- mp_pool.close()
- mp_pool.join()
f.close()
- Session.close()
+ # run the parse patten on the line
+ for tmp_index, text in logfile_text_dict.items():
+ res = mp_pool.apply_async(search_buildlog, (log_search_pattern_list,
logfile_text_dict, tmp_index, max_text_lines,))
+ if res.get() is not None:
+ print(json.dumps(res.get()))
+ mp_pool.close()
+ mp_pool.join()
diff --git a/buildbot_gentoo_ci/steps/logs.py b/buildbot_gentoo_ci/steps/logs.py
index e6e8495..dc5d3a1 100644
--- a/buildbot_gentoo_ci/steps/logs.py
+++ b/buildbot_gentoo_ci/steps/logs.py
@@ -26,13 +26,20 @@ from buildbot_gentoo_ci.steps import master as master_steps
def PersOutputOfLogParser(rc, stdout, stderr):
build_summery_output = {}
build_summery_output['rc'] = rc
- build_summery_output_json_list = []
+ summary_log_dict = {}
# split the lines
for line in stdout.split('\n'):
- #FIXME: check if line start with {[1-9]: {
+ #FIXME: check if line start with {"[1-9]": {
if line.startswith('{'):
- build_summery_output_json_list.append(json.loads(line))
- build_summery_output['build_summery_output_json'] =
build_summery_output_json_list
+ for k, v in json.loads(line).items():
+ summary_log_dict[int(k)] = {
+ 'text' : v['text'],
+ 'type' : v['type'],
+ 'status' : v['status'],
+ 'id' : v['id'],
+ 'search_pattern' : v['search_pattern']
+ }
+ build_summery_output['summary_log_dict'] = summary_log_dict
#FIXME: Handling of stderr output
return {
'build_summery_output' : build_summery_output
@@ -266,27 +273,94 @@ class MakeIssue(BuildStep):
def __init__(self, **kwargs):
super().__init__(**kwargs)
- #@defer.inlineCallbacks
+ @defer.inlineCallbacks
+ def logIssue(self):
+ separator1 = '\n'
+ separator2 = ' '
+ log = yield self.addLog('issue')
+ if self.getProperty('faild_cpv'):
+ cpv = self.getProperty('faild_cpv')
+ else:
+ cpv = self.getProperty('cpv')
+ yield log.addStdout('Titel:' + '\n')
+ yield log.addStdout(separator2.join([cpv, '-',
self.error_dict['title']]) + separator1)
+ yield log.addStdout('Summary:' + '\n')
+ for line in self.summary_log_list:
+ yield log.addStdout(line + '\n')
+ yield log.addStdout('Attachments:' + '\n')
+ yield log.addStdout('emerge_info.log' + '\n')
+ log_cpv = self.getProperty('log_build_data')[cpv]
+ yield log.addStdout(log_cpv['full_logname'] + '\n')
+ yield log.addStdout('world.log' + '\n')
+
+ def ClassifyIssue(self):
+ # get the title for the issue
+ text_issue_list = []
+ text_phase_list = []
+ for k, v in sorted(self.summary_log_dict.items()):
+ # get the issue error
+ if v['text'].startswith(' * ERROR:') and v['text'].endswith('
phase):'):
+ issue_text = self.summary_log_dict[k + 1]['text']
+ if issue_text.startswith(' * ninja -v -j'):
+ issue_text = 'ninja failed'
+ if issue_text.startswith(' * (no error'):
+ issue_text = False
+ if issue_text:
+ text_issue_list.append(issue_text)
+ # get the phase error
+ if v['type'] == self.error_dict['phase'] and v['status'] ==
'error':
+ text_phase_list.append(v['text'])
+ # if not get the first issue
+ if text_issue_list == []:
+ for k, v in self.summary_log_dict.items():
+ if v['type'] == 'issues':
+ text_issue_list.append(v['text'])
+ # if not get the first error
+ if text_phase_list == []:
+ for k, v in self.summary_log_dict.items():
+ if v['status'] == 'error':
+ text_phase_list.append(v['text'])
+ # add the issue error
+ if text_issue_list != []:
+ self.error_dict['title_issue'] = text_issue_list[0].replace('*',
'').strip()
+ else:
+ self.error_dict['title_issue'] = 'title_issue : None'
+ # add the error line
+ if text_phase_list != []:
+ self.error_dict['title_phase'] = text_phase_list[0].replace('*',
'').strip()
+ else:
+ self.error_dict['title_phase'] = 'title_phase : None'
+ #set the error title
+ self.error_dict['title'] = self.error_dict['title_issue'] + ' (' +
self.error_dict['title_phase'] + ')'
+
+ @defer.inlineCallbacks
def run(self):
self.gentooci =
self.master.namedServices['services'].namedServices['gentooci']
- summary_log_dict_list =
self.getProperty('build_summery_output')['build_summery_output_json']
+ self.summary_log_dict =
self.getProperty('build_summery_output')['summary_log_dict']
error = False
warning = False
self.summary_log_list = []
- log_hash = hashlib.sha256()
- for summary_log_dict in summary_log_dict_list:
- for k, v in sorted(summary_log_dict.items()):
- if v['status'] == 'error':
- error = True
- if v['status'] == 'warning':
- warning = True
- self.summary_log_list.append(v['text'])
- log_hash.update(v['text'].encode('utf-8'))
+ self.error_dict = {}
+ self.error_dict['hash'] = hashlib.sha256()
+ for k, v in sorted(self.summary_log_dict.items()):
+ self.summary_log_list.append(v['text'])
+ self.error_dict['hash'].update(v['text'].encode('utf-8'))
+ if v['status'] == 'warning':
+ warning = True
+ # check if the build did fail
+ if v['text'].startswith(' * ERROR:') and v['text'].endswith('
phase):'):
+ # get phase error
+ phase_error = v['text'].split(' (')[1].split(' phase')[0]
+ self.error_dict['phase'] = phase_error
+ error = True
# add build log
# add issue/bug/pr report
- self.setProperty("summary_log_list", self.summary_log_list,
'summary_log_list')
if error:
+ yield self.ClassifyIssue()
+ print(self.error_dict)
+ yield self.logIssue()
self.setProperty("status", 'failed', 'status')
+ self.setProperty("summary_log_list", self.summary_log_list,
'summary_log_list')
if warning:
self.setProperty("status", 'warning', 'status')
return SUCCESS