-
Notifications
You must be signed in to change notification settings - Fork 6.4k
Expand file tree
/
Copy pathredmine-upkeep.py
More file actions
executable file
·1558 lines (1320 loc) · 67.3 KB
/
redmine-upkeep.py
File metadata and controls
executable file
·1558 lines (1320 loc) · 67.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/python3
# Copyright 2025 IBM, Inc.
# SPDX-License-Identifier: LGPL-2.1-or-later
#
# This script was generated with the assistance of an AI language model.
#
# This is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2.1, as published by
# the Free Software Foundation. See file COPYING.
import argparse
import copy
import functools
import inspect
import itertools
import json
import logging
import os
import random
import re
import signal
import sys
import textwrap
import traceback
from datetime import datetime, timedelta, timezone
from getpass import getuser
from os.path import expanduser
import git # https://github.com/gitpython-developers/gitpython
import redminelib # https://pypi.org/project/python-redmine/
import requests
GITHUB_TOKEN = None
try:
with open(expanduser("~/.github_token")) as f:
GITHUB_TOKEN = f.read().strip()
except FileNotFoundError:
pass
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN", GITHUB_TOKEN)
GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") == 'true'
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL", "https://github.com")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "ceph/ceph")
GITHUB_RUN_ID = os.getenv("GITHUB_RUN_ID", "nil")
GITHUB_ACTION_LOG = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/actions/runs/{GITHUB_RUN_ID}"
GITHUB_API_ENDPOINT = f"https://api.github.com/repos/{GITHUB_REPOSITORY}"
REDMINE_CUSTOM_FIELD_ID_BACKPORT = 2
REDMINE_CUSTOM_FIELD_ID_RELEASE = 16
REDMINE_CUSTOM_FIELD_ID_PULL_REQUEST_ID = 21
REDMINE_CUSTOM_FIELD_ID_TAGS = 31
REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT = 33
REDMINE_CUSTOM_FIELD_ID_FIXED_IN = 34
REDMINE_CUSTOM_FIELD_ID_RELEASED_IN = 35
REDMINE_CUSTOM_FIELD_ID_UPKEEP_TIMESTAMP = 37
# Open
REDMINE_STATUS_ID_NEW = 1
REDMINE_STATUS_ID_INPROGRESS = 2
REDMINE_STATUS_ID_TRIAGED = 18
REDMINE_STATUS_ID_NEEDINFO = 11
REDMINE_STATUS_ID_FIX_UNDER_REVIEW = 13
REDMINE_STATUS_ID_PENDING_BACKPORT = 14
# Closed
REDMINE_STATUS_ID_RESOLVED = 3
REDMINE_STATUS_ID_CLOSED = 5
REDMINE_STATUS_ID_REJECTED = 6
REDMINE_STATUS_ID_WONTFIX = 8
REDMINE_STATUS_ID_CANTREPRODUCE = 9
REDMINE_STATUS_ID_DUPLICATE = 10
REDMINE_STATUS_ID_WONTFIX_EOL = 19
REDMINE_TRACKER_ID_BACKPORT = 9
REDMINE_STATUS_ID_PENDING_BACKPORT = 14
REDMINE_STATUS_ID_RESOLVED = 3
REDMINE_ENDPOINT = "https://tracker.ceph.com"
REDMINE_API_KEY = None
try:
with open(expanduser("~/.redmine_key")) as f:
REDMINE_API_KEY = f.read().strip()
except FileNotFoundError:
pass
REDMINE_API_KEY = os.getenv("REDMINE_API_KEY", REDMINE_API_KEY)
# Global flag for GitHub Actions output format
IS_GITHUB_ACTION = os.getenv("GITHUB_ACTION") is not None
class IssueLoggerAdapter(logging.LoggerAdapter):
"""
A logging adapter that adds issue ID context to log messages.
For GitHub Actions, it also handles grouping and error annotations.
"""
def process(self, msg, kwargs):
issue_id = int(self.extra['issue_id'])
transform_name = self.extra['current_transform']
if IS_GITHUB_ACTION:
if transform_name:
msg = f"[{transform_name}] {msg}"
# Handle error annotations
if self.logger.level == logging.ERROR or self.logger.level == logging.CRITICAL:
return f"::error::{msg}", kwargs
else:
if transform_name:
msg = f"[Issue #{issue_id} => {transform_name}] {msg}"
else:
msg = f"[Issue #{issue_id}] {msg}"
return msg, kwargs
log = logging.getLogger(__name__)
log_stream = logging.StreamHandler()
log.addHandler(log_stream)
log.setLevel(logging.INFO)
GITHUB_HEADERS = {
"Authorization": f"Bearer {GITHUB_TOKEN}",
"X-GitHub-Api-Version": "2022-11-28",
}
def post_github_comment(session, pr_id, body):
"""Helper to post a comment to a GitHub PR."""
if RedmineUpkeep.GITHUB_RATE_LIMITED:
log.warning("GitHub API rate limit hit previously. Skipping posting comment.")
return False
log.info(f"Posting a comment to GitHub PR #{pr_id}.")
endpoint = f"{GITHUB_API_ENDPOINT}/issues/{pr_id}/comments"
payload = {'body': body}
try:
response = session.post(endpoint, headers=GITHUB_HEADERS, json=payload)
response.raise_for_status()
log.info(f"Successfully posted comment to PR #{pr_id}.")
return True
except requests.exceptions.HTTPError as e:
if e.response.status_code == 403 and "rate limit exceeded" in e.response.text:
log.error(f"GitHub API rate limit exceeded when commenting on PR #{pr_id}.")
RedmineUpkeep.GITHUB_RATE_LIMITED = True
else:
log.error(f"GitHub API error posting comment to PR #{pr_id}: {e} - Response: {e.response.text}")
return False
except requests.exceptions.RequestException as e:
log.error(f"Network or request error posting comment to GitHub PR #{pr_id}: {e}")
return False
class UpkeepException(Exception):
def __init__(self, issue_update, exception=None, traceback=None):
self.issue_update = issue_update
self.exception = exception
self.traceback = traceback
def comment(self):
raise NotImplementedError()
class PRInvalidException(UpkeepException):
def __init__(self, issue_update, pr_id, **kwargs):
super().__init__(issue_update, **kwargs)
self.pr_id = pr_id
def __str__(self):
return "PR is invalid"
def comment(self):
return f"""
Issue #{self.issue_update.issue.id} referenced "PR #{self.pr_id}":https://github.com/ceph/ceph/pull/{self.pr_id} is invalid:
<pre>
{self.traceback.strip()}
</pre>
"""
class PRClosedException(UpkeepException):
def __init__(self, issue_update, pr_id, **kwargs):
super().__init__(issue_update, **kwargs)
self.pr_id = pr_id
def __str__(self):
return "PR is closed without merge"
def comment(self):
return f"""
Issue #{self.issue_update.issue.id} with status {self.issue_update.issue.status.name} references "PR #{self.pr_id}":https://github.com/ceph/ceph/pull/{self.pr_id} which is closed but not merged.
Possible resolutions:
* **If the PR id is wrong, please update it.**
* **If the issue was fixed through other means (e.g. in the kernel or Rook), please remove the PR id.**
* **If the PR is already merged through other means (erroneous backport), mark the issue state as "Rejected".**
* **Do nothing. This script will ignore this issue while the upkeep-failed tag is applied.**
"""
class RedmineUpdateException(UpkeepException):
def __init__(self, issue_update, **kwargs):
super().__init__(issue_update, **kwargs)
def __str__(self):
return "Update to Redmine failed"
def comment(self):
return f"""
Redmine Update failed:
<pre>
{self.traceback.strip()}
</pre>
"""
class IssueUpdate:
def __init__(self, issue, github_session, git_repo):
self.issue = issue
self.update_payload = {}
self.github_session = github_session
self.git_repo = git_repo
self._pr_cache = {}
self.has_changes = False # New flag to track if changes are made
self.transform = None
logger_extra = {
'issue_id': issue.id,
'current_transform': None,
}
self.logger = IssueLoggerAdapter(logging.getLogger(__name__), extra=logger_extra)
def set_transform(self, transform):
self.transform = transform
self.logger.extra['current_transform'] = transform
def get_raw_custom_field(self, field_id):
cf = self.issue.custom_fields.get(field_id)
try:
return cf.value if cf else None
except redminelib.exceptions.ResourceAttrError:
return None
def get_custom_field(self, field_id):
""" Get the custom field, first from update_payload otherwise issue """
custom_fields = self.update_payload.setdefault("custom_fields", [])
for field in custom_fields:
if field.get('id') == field_id:
return field['value']
return self.get_raw_custom_field(field_id)
def add_or_update_custom_field(self, field_id, value):
"""Helper to add or update a custom field in the payload."""
custom_fields = self.update_payload.setdefault("custom_fields", [])
found = False
current_value = self.get_custom_field(field_id) # Get current value from issue or payload
if current_value == value:
# Value is already the same, no change needed
self.logger.debug(f"Field {field_id} is already set to '{value}'. No update needed.")
return False
self.logger.debug(f"Updating custom field {field_id} from '{current_value}' to '{value}'.")
for field in custom_fields:
if field.get('id') == field_id:
field['value'] = value
found = True
break
if not found:
custom_fields.append({'id': field_id, 'value': value})
self.has_changes = True # Mark that a change has been made
return True
def change_field(self, field, value):
self.logger.debug(f"Changing field '{field}' to '{value}'.")
if self.update_payload.get(field) == value:
return False
else:
self.update_payload[field] = value
self.has_changes = True
return True
def add_tag(self, tag):
current_tags_str = self.get_custom_field(REDMINE_CUSTOM_FIELD_ID_TAGS)
current_tags = []
if current_tags_str:
current_tags = [current_tag.strip() for current_tag in current_tags_str.split(',') if current_tag.strip()]
if tag in current_tags:
self.logger.debug(f"tag '{tag}' already in tags")
return
else:
current_tags.append(tag)
self.logger.info(f"Adding '{tag}' tag.")
new_tags = ", ".join(current_tags)
self.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_TAGS, new_tags)
def has_open_subtasks(self):
"""
Checks if the issue has any open subtasks.
Returns True if open subtasks exist, False otherwise.
"""
self.logger.debug("Checking for open subtasks.")
try:
if not hasattr(self.issue, 'children'):
self.logger.debug("Issue has no subtasks.")
return False
open_subtasks_info = []
for subtask in self.issue.children:
subtask.refresh() # fetch status
if not subtask.status.is_closed:
open_subtasks_info.append(f"#{subtask.id} (status: {subtask.status.name})")
if open_subtasks_info:
self.logger.info(f"Cannot change status. Issue has open subtasks: {', '.join(open_subtasks_info)}.")
return True
else:
self.logger.debug("All subtasks are closed.")
return False
except redminelib.exceptions.ResourceAttrError:
self.logger.debug("Issue has no subtasks (ResourceAttrError on 'children' attribute).")
return False
def get_update_payload(self, suppress_mail=True): # Added suppress_mail parameter
today = datetime.now(timezone.utc).isoformat(timespec='seconds')
self.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_UPKEEP_TIMESTAMP, today)
current_tags_str = self.get_custom_field(REDMINE_CUSTOM_FIELD_ID_TAGS)
current_tags = []
if current_tags_str:
current_tags = [tag.strip() for tag in current_tags_str.split(',') if tag.strip()]
if "upkeep-failed" in current_tags:
self.logger.info(f"'upkeep-failed' tag found in '{current_tags_str}'. Removing for update.")
current_tags.remove("upkeep-failed")
self.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_TAGS, ", ".join(current_tags))
payload = {
'issue': self.update_payload,
}
if suppress_mail:
payload['suppress_mail'] = "1"
return payload
def get_pr_id(self):
self.logger.debug("Attempting to fetch PR data.")
pr_id = self.get_custom_field(REDMINE_CUSTOM_FIELD_ID_PULL_REQUEST_ID)
if not pr_id:
self.logger.warning("No PR ID found in 'Pull Request ID'.")
# If not found in custom field, try to find it in the issue description
if self.issue.tracker.id == REDMINE_TRACKER_ID_BACKPORT and self.issue.description:
self.logger.debug("Checking issue description for PR link.")
match = re.search(r'^https://github.com/ceph/ceph/pull/(\d+)$', self.issue.description.strip())
if match:
pr_id = match.group(1)
self.logger.info("Found PR ID #%s in issue description.", pr_id)
try:
return int(pr_id)
except (ValueError, TypeError): # Handle None or non-integer values
self.logger.warning(f"Invalid or missing PR ID '{pr_id}'. Cannot fetch PR.")
return None
def fetch_pr(self):
pr_id = self.get_pr_id()
# Check if rate limit has been hit globally
if RedmineUpkeep.GITHUB_RATE_LIMITED:
self.logger.warning("GitHub API rate limit hit previously. Skipping PR fetch.")
return None
if pr_id in self._pr_cache:
self.logger.debug("Found PR #%d in cache.", pr_id)
return self._pr_cache[pr_id]
self.logger.info("Fetching PR #%d from GitHub API.", pr_id)
endpoint = f"{GITHUB_API_ENDPOINT}/pulls/{pr_id}"
params = {}
try:
response = self.github_session.get(endpoint, headers=GITHUB_HEADERS, params=params)
response.raise_for_status()
pr_data = response.json()
self.logger.debug("PR #%d json:\n%s", pr_id, pr_data)
# If we got the PR number through other means, update the field:
self.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_PULL_REQUEST_ID, str(pr_id))
self._pr_cache[pr_id] = pr_data
return pr_data
except requests.exceptions.HTTPError as e:
if response.status_code == 404:
self.logger.warning(f"GitHub PR #{pr_id} not found (404).")
raise PRInvalidException(self, pr_id, exception=e, traceback=traceback.format_exc())
elif response.status_code == 403 and "rate limit exceeded" in response.text:
self.logger.error(f"GitHub API rate limit exceeded for PR #{pr_id}. Further GitHub API calls will be skipped.")
RedmineUpkeep.GITHUB_RATE_LIMITED = True # Set the global flag
else:
self.logger.error(f"GitHub API error for PR #{pr_id}: {e} - Response: {response.text}")
return None
except requests.exceptions.RequestException as e:
self.logger.error(f"Network or request error fetching GitHub PR #{pr_id}: {e}")
return None
def get_released(self, commit):
"""
Determines the release version a commit is part of.
"""
self.logger.debug(f"Checking release status for commit {commit}")
try:
release = self.git_repo.git.describe('--contains', '--match', 'v*.2.*', commit)
self.logger.info("Commit %s is contained in git describe output: %s", commit, release)
patt = r"v(\d+)\.(\d+)\.(\d+)"
match = re.search(patt, release)
if not match:
self.logger.warning("Release '%s' is in invalid form, pattern mismatch.", release)
return None
if int(match.group(2)) != 2:
self.logger.warning("Release '%s' is not a valid release (minor version not 2)", release)
return None
self.logger.info("Found valid release: %s", release)
return release
except git.exc.GitCommandError:
self.logger.info("Commit %s not found in any matching release tag.", commit)
return None
class RedmineUpkeep:
# Class-level flag to track GitHub API rate limit status
GITHUB_RATE_LIMITED = False
MAX_UPKEEP_FAILURES = 5
class Filter:
PRIORITY = 1000
NAME = "undefined"
@staticmethod
def get_filters():
raise NotImplementedError("NI")
@staticmethod
def requires_github_api():
raise NotImplementedError("NI")
def transformation(priority):
"""A decorator to assign a priority to a transformation method."""
def decorator(func):
func._priority = priority
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorator
def __init__(self, args):
self.G = git.Repo(args.git)
self.R = self._redmine_connect()
self.limit = args.limit
self.session = requests.Session()
self.issue_id = args.issue
self.revision_range = args.revision_range
self.pull_request_id = args.pull_request
self.merge_commit = args.merge_commit
self.remote_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
self.upkeep_failures = 0
self.issues_inspected = 0
self.issues_modified = 0
self.modifications_made = {} # Dictionary to store what transformations were applied
self.project_id = None
try:
log.info("Fetching 'Ceph' project ID from Redmine.")
project = self.R.project.get("Ceph")
self.project_id = project['id']
log.info(f"Found 'Ceph' project with ID: {self.project_id}")
except redminelib.exceptions.ResourceAttrError:
log.error("Project 'Ceph' not found in Redmine. Cannot filter issues by project.")
sys.exit(1)
# Discover transformation methods based on prefix
self.transform_methods = []
for name in dir(self):
if name.startswith('_transform_') and callable(getattr(self, name)):
self.transform_methods.append(getattr(self, name))
log.debug(f"Discovered transformation methods: {[m.__name__ for m in self.transform_methods]}")
# Sort transformations for consistent order if needed, e.g., by name
self.transform_methods.sort(key=lambda f: f._priority, reverse=True)
log.debug(f"Sorted transformation methods: {[m.__name__ for m in self.transform_methods]}")
# Discover filters based on prefix
self.filters = []
for name, v in RedmineUpkeep.__dict__.items():
if inspect.isclass(v) and issubclass(v, self.Filter) and v != self.Filter:
log.debug("discovered filter %s", v.NAME)
self.filters.append(v)
random.shuffle(self.filters) # to shuffle equivalent PRIORITY
self.filters.sort(key = lambda filter: filter.PRIORITY, reverse=True)
log.debug(f"Discovered filters: {[f.__name__ for f in self.filters]}")
self._fetch_heads()
def _fetch_heads(self):
log.info(f"Fetching remote heads from {self.remote_url}.")
self.remote_heads = []
for line in self.G.git.ls_remote('--heads', self.remote_url).split('\n'):
(sha, name) = line.split('\t')
final_component = name.rsplit('/', 1)[-1]
if not final_component.isalpha():
log.debug(f"Head {name} is not alphabetic, skipping.")
continue
log.debug(f"Adding Head {name} with commit {sha}.")
try:
self.remote_heads.append(self.G.commit(sha))
except ValueError as e:
log.debug(f"Could not load commit {sha}, attempting to fetch.")
self.G.git.fetch(self.remote_url, sha)
try:
self.remote_heads.append(self.G.commit(sha))
except ValueError as e:
log.error(f"Error: Could not fetch commit {sha}")
continue
def _redmine_connect(self):
log.info("Connecting to %s", REDMINE_ENDPOINT)
R = redminelib.Redmine(REDMINE_ENDPOINT, key=REDMINE_API_KEY)
log.info("Successfully connected to Redmine.")
return R
class FilterMergedBug1(Filter):
"""
Filter issues with erroneous merge commits.
"""
PRIORITY = 1100
NAME = "MergedBug"
@staticmethod
def get_filters():
filter_set = {
f"cf_{REDMINE_CUSTOM_FIELD_ID_PULL_REQUEST_ID}": '>=0',
f"cf_{REDMINE_CUSTOM_FIELD_ID_RELEASED_IN}": '~^',
}
yield {**filter_set, **{f"cf_{REDMINE_CUSTOM_FIELD_ID_TAGS}": "!*"}}
yield {**filter_set, **{f"cf_{REDMINE_CUSTOM_FIELD_ID_TAGS}": "!~upkeep-bad-parentage"}}
@staticmethod
def requires_github_api():
return True
@transformation(10000)
def _transform_clear_stale_merge_commit(self, issue_update):
"""
Transformation: If the "Pull Request ID" was changed after the "Merge
Commit SHA" was set, this transformation clears the merge commit and
related "Fixed In" field, as they are now considered stale.
"""
issue_update.logger.debug("Running _transform_clear_stale_merge_commit")
last_pr_id_change = None
last_merge_commit_set = None
# Journals are ordered oldest to newest, so reverse to find the latest changes first.
for journal in reversed(issue_update.issue.journals):
if last_pr_id_change and last_merge_commit_set:
break
for detail in journal.details:
if detail.get('property') == 'cf':
try:
field_id = int(detail.get('name'))
if field_id == REDMINE_CUSTOM_FIELD_ID_PULL_REQUEST_ID and not last_pr_id_change:
last_pr_id_change = journal.id
issue_update.logger.debug(f"last_pr_id_change = {last_pr_id_change}")
elif field_id == REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT and not last_merge_commit_set:
# We only care when the commit was set to a non-empty value.
if detail.get('new_value'):
last_merge_commit_set = journal.id
issue_update.logger.debug(f"last_merge_commit_set = {last_merge_commit_set}")
except (ValueError, TypeError):
continue # Ignore if 'name' is not a valid integer field ID
if not last_pr_id_change or not last_merge_commit_set:
issue_update.logger.debug("Did not find journal entries for both PR ID and Merge Commit changes. No action taken.")
return False
issue_update.logger.debug(f"Last PR ID change: {last_pr_id_change}, Last Merge Commit set: {last_merge_commit_set}")
if last_pr_id_change > last_merge_commit_set:
issue_update.logger.info("The 'Pull Request ID' was changed after the 'Merge Commit SHA' was set. Clearing the stale merge commit.")
# Clear the merge commit field and also the 'Fixed In' field which depends on it.
changed = False
changed |= issue_update.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT, "")
changed |= issue_update.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_FIXED_IN, "")
changed |= issue_update.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_RELEASED_IN, "")
return changed
return False
class FilterMerged(Filter):
"""
Filter issues that are closed but no merge commit is set.
"""
PRIORITY = 1000
NAME = "Merged"
@staticmethod
def get_filters():
statuses = [
REDMINE_STATUS_ID_PENDING_BACKPORT,
REDMINE_STATUS_ID_RESOLVED,
]
for status in statuses:
yield {
f"cf_{REDMINE_CUSTOM_FIELD_ID_PULL_REQUEST_ID}": '>=0',
f"cf_{REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT}": '!*',
"status_id": str(status),
}
@staticmethod
def requires_github_api():
return True
def _find_merge_commit(self, issue_update, HEAD, BASE):
"""
Find merge commit in revrange.
"""
# N.B. At the time of writing, using "-1" to limit output breaks the command (returns nothing always).
try:
revrange = f"{HEAD.hexsha}^..{BASE.hexsha}"
merge_commit = self.G.git.log(
'--first-parent',
'--merges',
'--reverse',
f"--ancestry-path={HEAD.hexsha}",
'--pretty=%H',
revrange
).splitlines()
if len(merge_commit) == 0:
issue_update.logger.debug(f"No commit found in first-parent lineage.")
else:
merge = self.G.commit(merge_commit[0])
m = self._confirm_merge_commit(issue_update, merge, HEAD, BASE)
if m:
return m.hexsha
except git.exc.GitCommandError as e:
issue_update.logger.error(f"Error: Could not find merge commit")
# Try non-first parent lineage.
try:
revrange = f"{HEAD.hexsha}^..{BASE.hexsha}"
merge_commit = self.G.git.log(
'--merges',
'--reverse',
f"--ancestry-path={HEAD.hexsha}",
'--pretty=%H',
revrange
).splitlines()
if len(merge_commit) == 0:
issue_update.logger.debug(f"No commit found in first-parent lineage.")
else:
merge = self.G.commit(merge_commit[0])
m = self._confirm_merge_commit(issue_update, merge, HEAD, BASE)
if m:
return m.hexsha
except git.exc.GitCommandError as e:
issue_update.logger.error(f"Error: Could not find merge commit")
def _confirm_merge_commit(self, issue_update, merge, HEAD, BASE):
"""
Confirm merge commit is correct.
"""
issue_update.logger.debug(f"Confirming merge commit {merge}")
if len(merge.parents) <= 1:
# not a merge commit
return None
second_parent = merge.parents[1]
if second_parent == HEAD:
issue_update.logger.debug(f"Found valid merge commit against {BASE}: {merge}")
return merge
else:
issue_update.logger.warning(f"Merge commit second parent is not {HEAD}. Ignoring this merge.")
return None
def _get_merge_commit(self, issue_update):
"""
Figure out the merge commit from the head reference of the PR.
"""
pr_id = issue_update.get_pr_id()
ref = f"refs/pull/{pr_id}/head"
try:
self.G.git.fetch(self.remote_url, ref)
HEAD = self.G.commit('FETCH_HEAD')
issue_update.logger.info(f"Pull Request head is {HEAD}.")
except git.exc.GitCommandError as e:
issue_update.logger.error(f"Error: Could not fetch reference '{ref}' from {self.remote_url}.")
issue_update.logger.error(f"Git Error: {e}")
return None
for BASE in self.remote_heads:
issue_update.logger.info(f"Examining remote branch HEAD {BASE}.")
m = self._find_merge_commit(issue_update, HEAD, BASE)
if m:
return m
# Fall back to API query
pr = issue_update.fetch_pr()
if not pr:
issue_update.logger.info("No PR data found. Skipping merge check.")
return None
merged = pr.get('merged')
if not merged:
if pr.get('state') == "closed":
raise PRClosedException(issue_update, pr_id)
issue_update.logger.info(f"PR #{pr_id} is not merged. Skipping merge check.")
return None
# N.B. merge_commit_sha is sometimes wrong because of branch renames.
base = pr.get('base')
if not base:
issue_update.logger.info(f"PR #{pr_id} is merged but has no base?")
return None
issue_update.logger.info(f"PR #{pr_id} base is {base['ref']}")
try:
BASE = self.G.commit(base['sha'])
except git.exc.GitCommandError as e:
issue_update.logger.debug(f"Fetching {base['ref']}")
self.G.git.fetch(self.remote_url, base['ref'])
BASE = self.G.commit('FETCH_HEAD')
m = self._find_merge_commit(issue_update, HEAD, BASE)
if m:
return m
issue_update.logger.info(f"Could not find a merge commit for PR #{pr_id}")
return None
@transformation(1000)
def _transform_merged(self, issue_update):
"""
Transformation: Checks if a PR associated with an issue has been merged
and updates the merge commit and fixed_in fields in the payload.
"""
issue_update.logger.debug("Running _transform_merged")
commit = issue_update.get_custom_field(REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT)
# Fixing bug: GitHub got confused by branch renames and gives the wrong
# merge commit. This is detectable by a "Release In" setting like:
# v15.2.0~1225^2. Note: sometimes this is self-inflicted because hotfix
# branches can mess up the first-parent line of succession.
released_in = issue_update.get_custom_field(REDMINE_CUSTOM_FIELD_ID_RELEASED_IN)
if released_in and "^" in released_in:
issue_update.logger.warning(f"Detected GitHub bug where past merge commit is wrong: {commit}")
commit = None
if not commit:
issue_update.logger.info("Merge commit not set, will check PR status.")
commit = self._get_merge_commit(issue_update)
if commit:
issue_update.logger.info(f"Merge commit is {commit}")
issue_update.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT, commit)
else:
issue_update.logger.info(f"No merge commit is known")
return False
else:
issue_update.logger.info(f"Merge commit {commit} is already set. Skipping PR fetch.")
try:
issue_update.logger.info(f"Running git describe for commit {commit}")
ref = issue_update.git_repo.git.describe('--always', '--abbrev=10', commit)
issue_update.logger.info(f"Git describe output: {ref}")
changed = issue_update.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_FIXED_IN, ref)
return changed
except git.exc.GitCommandError as e:
issue_update.logger.warning(f"Could not get git describe for commit {commit}: {e}")
return False
@transformation(10)
def _transform_backport_resolved(self, issue_update):
"""
Transformation: Changes backport trackers to "Resolved" if the associated PR is merged.
"""
issue_update.logger.debug("Running _transform_backport_resolved")
# Check if it's a backport tracker
if issue_update.issue.tracker.id != REDMINE_TRACKER_ID_BACKPORT:
issue_update.logger.info("Not a backport tracker. Skipping backport resolved check.")
return False
issue_update.logger.info("Issue is a backport tracker.")
commit = issue_update.get_custom_field(REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT)
if not commit:
issue_update.logger.info(f"No merge commit set, skipping.")
return False
# If PR is merged and it's a backport tracker with 'Pending Backport' status, update to 'Resolved'
if issue_update.issue.status.id != REDMINE_STATUS_ID_RESOLVED:
if issue_update.has_open_subtasks():
return False
issue_update.logger.info(f"Issue status is '{issue_update.issue.status.name}', which is not 'Resolved'.")
issue_update.logger.info("Updating status to 'Resolved' because its PR is merged.")
changed = issue_update.change_field('status_id', REDMINE_STATUS_ID_RESOLVED)
return changed
else:
issue_update.logger.info("Issue is already in 'Resolved' status. No change needed.")
return False
class FilterReleased(Filter):
"""
Filter for issues that are merged but not yet released.
"""
PRIORITY = 10
NAME = "Released"
@staticmethod
def get_filters():
yield {
f"cf_{REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT}": '*',
f"cf_{REDMINE_CUSTOM_FIELD_ID_RELEASED_IN}": '!*',
"status_id": "*",
}
@staticmethod
def requires_github_api():
return False
@transformation(10)
def _transform_released(self, issue_update):
"""
Transformation: Checks if a merged issue has been released and updates
the 'Released In' field in the payload.
"""
issue_update.logger.debug("Running _transform_released")
commit = issue_update.get_custom_field(REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT)
if not commit:
issue_update.logger.info("No merge commit set. Skipping released check.")
return False
issue_update.logger.info(f"Checking release status for merge commit: {commit}")
released_in = issue_update.get_custom_field(REDMINE_CUSTOM_FIELD_ID_RELEASED_IN)
issue_update.logger.info(f"'Released In' currently '{released_in}'")
release = issue_update.get_released(commit)
if release and "^" in release:
issue_update.logger.warning(f"Detected parentage linkage issue (first parent chain broken) by hotfix: {release}")
issue_update.add_tag('upkeep-bad-parentage')
if release:
issue_update.logger.info(f"Commit {commit} is part of release {release}.")
changed = issue_update.add_or_update_custom_field(REDMINE_CUSTOM_FIELD_ID_RELEASED_IN, release)
return changed
elif released_in:
issue_update.logger.error(f"'Released In' would be cleared (currently: '{released_in}')??")
else:
issue_update.logger.info(f"Commit {commit} not yet in a release. 'Released In' field will not be updated.")
return False
class FilterPendingBackport(Filter):
"""
Filter for issues that are in 'Pending Backport' status. The
transformation will then check if they are non-backport trackers and if
all their 'Copied to' backports are resolved.
"""
PRIORITY = 10
NAME = "Pending Backport"
@staticmethod
def get_filters():
yield {
f"cf_{REDMINE_CUSTOM_FIELD_ID_MERGE_COMMIT}": '*',
"status_id": str(REDMINE_STATUS_ID_PENDING_BACKPORT),
}
@staticmethod
def requires_github_api():
return False
@transformation(10)
def _transform_resolve_main_issue_from_backports(self, issue_update):
"""
Transformation: Resolves a main issue if all its "Copied to" backport
issues are resolved and correctly tagged with the expected backport
releases.
"""
issue_update.logger.debug("Running _transform_resolve_main_issue_from_backports")
if issue_update.issue.tracker.id == REDMINE_TRACKER_ID_BACKPORT:
issue_update.logger.info("Is a backport tracker. Skipping this transformation.")
return False
if issue_update.issue.status.id != REDMINE_STATUS_ID_PENDING_BACKPORT:
issue_update.logger.info(f"Not in 'Pending Backport' status ({issue_update.issue.status.name}). Skipping.")
return False
if issue_update.has_open_subtasks():
return False
issue_update.logger.info("Issue is a main tracker in 'Pending Backport' status. Checking related backports.")
expected_backport_releases_str = issue_update.get_custom_field(REDMINE_CUSTOM_FIELD_ID_BACKPORT)
if expected_backport_releases_str:
expected_backport_releases = set(
rel.strip() for rel in expected_backport_releases_str.split(',') if rel.strip()
)
issue_update.logger.info(f"Expecting backports for releases: {expected_backport_releases}")
else:
expected_backport_releases = set()
issue_update.logger.warning(f"No backport releases specified in custom field {REDMINE_CUSTOM_FIELD_ID_BACKPORT}.")
copied_to_backports_ids = []
for relation in issue_update.issue.relations:
if relation.relation_type == 'copied_to':
copied_to_backports_ids.append(relation.issue_to_id)
issue_update.logger.info(f"Found 'Copied to' issue IDs: {copied_to_backports_ids}")
if not copied_to_backports_ids and not expected_backport_releases:
# If no backports are expected and no 'Copied to' issues exist,
# then the main issue can be resolved.
issue_update.logger.info("No backports expected and no 'Copied to' issues found. Updating main issue status to 'Resolved'.")
changed = issue_update.change_field('status_id', REDMINE_STATUS_ID_RESOLVED)
return changed
elif not copied_to_backports_ids and expected_backport_releases:
# If backports are expected but no 'Copied to' issues exist,
# the main issue cannot be resolved.
issue_update.logger.info(f"Backports expected ({', '.join(expected_backport_releases)}) but no 'Copied to' issues found. Main issue cannot be resolved.")
return False
resolved_and_matched_backports = set()
all_backports_resolved_and_matched = True
for backport_id in copied_to_backports_ids:
try:
issue_update.logger.info(f"Checking status of backport issue #{backport_id}")
backport_issue = self.R.issue.get(backport_id)
# Ensure the related issue is actually a backport tracker
if backport_issue.tracker.id != REDMINE_TRACKER_ID_BACKPORT:
issue_update.logger.warning(f"Related issue #{backport_id} is 'Copied to' but not a backport tracker. Ignoring it for resolution check.")
continue
# Check backport issue's release field
cf_backport_release = backport_issue.custom_fields.get(REDMINE_CUSTOM_FIELD_ID_RELEASE)
if not cf_backport_release:
issue_update.logger.info(f"Backport issue #{backport_id} has no release specified in custom field {REDMINE_CUSTOM_FIELD_ID_RELEASE}. Cannot resolve main issue yet.")
all_backports_resolved_and_matched = False
break
backport_release = cf_backport_release.value
issue_update.logger.debug(f"Backport issue #{backport_id} is for release '{backport_release}'.")
if backport_release not in expected_backport_releases:
issue_update.logger.info(f"Backport issue #{backport_id} has release '{backport_release}' which is not in expected backports ({', '.join(expected_backport_releases)}). Main issue cannot be resolved yet.")
all_backports_resolved_and_matched = False
break
if backport_issue.status.id == REDMINE_STATUS_ID_RESOLVED:
issue_update.logger.info(f"Backport issue #{backport_id} is resolved and matches expected release '{backport_release}'.")
resolved_and_matched_backports.add(backport_release)
elif backport_issue.status.id == REDMINE_STATUS_ID_REJECTED:
issue_update.logger.info(f"Backport issue #{backport_id} is rejected and matches expected release '{backport_release}'.")
resolved_and_matched_backports.add(backport_release)
else:
issue_update.logger.info(f"Backport issue #{backport_id} is not resolved or rejected (status: {backport_issue.status.name}). Main issue cannot be resolved yet.")
all_backports_resolved_and_matched = False
break
except redminelib.exceptions.ResourceNotFoundError:
issue_update.logger.warning(f"Related backport issue #{backport_id} not found. Cannot confirm all backports resolved.")
all_backports_resolved_and_matched = False # Treat as not resolved if we can't find it
break
except redminelib.exceptions.ResourceAttrError:
issue_update.logger.warning(f"Related backport issue #{backport_id} not accessible. Cannot confirm all backports resolved.")
all_backports_resolved_and_matched = False # Treat as not resolved if we can't find it
break
# Final check: all backports found, resolved, correctly tagged, and all expected backports are covered
if all_backports_resolved_and_matched and expected_backport_releases == resolved_and_matched_backports:
issue_update.logger.info(f"All expected backport releases ({', '.join(expected_backport_releases)}) have corresponding resolved and correctly tagged 'Copied to' issues. Updating main issue status to 'Resolved'.")
issue_update.change_field('status_id', REDMINE_STATUS_ID_RESOLVED)
return True
else:
issue_update.logger.info("Not all expected backports are resolved and/or correctly tagged. Main issue status remains 'Pending Backport'.")
issue_update.logger.info(f"Expected backports: {expected_backport_releases}")
issue_update.logger.info(f"Resolved and matched backports found: {resolved_and_matched_backports}")
return False
class FilterUnresolvedMerged(Filter):