mirror of https://github.com/apache/kafka.git
MINOR: Cleanups in the release scripts (#20308)
A bunch of cleanups in the release scripts Reviewers: Luke Chen <showuon@gmail.com>
This commit is contained in:
parent
5bbc421a13
commit
30ffd42b26
|
@ -25,7 +25,7 @@ pip install -r requirements.txt
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
|
|
||||||
To start a release, first activate the virutalenv, and then run
|
To start a release, first activate the virtualenv, and then run
|
||||||
the release script.
|
the release script.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
|
@ -136,4 +136,3 @@ def push_ref(ref, remote=push_remote_name, **kwargs):
|
||||||
def merge_ref(ref, **kwargs):
|
def merge_ref(ref, **kwargs):
|
||||||
__defaults(kwargs)
|
__defaults(kwargs)
|
||||||
cmd(f"Merging ref {ref}", f"git merge {ref}")
|
cmd(f"Merging ref {ref}", f"git merge {ref}")
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ def key_exists(key_id):
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
execute(f"gpg --list-keys {key_id}")
|
execute(f"gpg --list-keys {key_id}")
|
||||||
except Exception as e:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -70,13 +70,13 @@ def valid_passphrase(key_id, passphrase):
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
content = __file__
|
content = __file__
|
||||||
signature = tmpdir + '/sig.asc'
|
signature = tmpdir + '/sig.asc'
|
||||||
# if the agent is running, the suplied passphrase may be ignored
|
# if the agent is running, the supplied passphrase may be ignored
|
||||||
agent_kill()
|
agent_kill()
|
||||||
try:
|
try:
|
||||||
sign(key_id, passphrase, content, signature)
|
sign(key_id, passphrase, content, signature)
|
||||||
verify(content, signature)
|
verify(content, signature)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError:
|
||||||
False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -88,5 +88,3 @@ def key_pass_id(key_id, passphrase):
|
||||||
h.update(key_id.encode())
|
h.update(key_id.encode())
|
||||||
h.update(passphrase.encode())
|
h.update(passphrase.encode())
|
||||||
return h.hexdigest()
|
return h.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -41,13 +41,13 @@ def query(query, **kwargs):
|
||||||
Any additional keyword arguments are forwarded to jira.search_issues.
|
Any additional keyword arguments are forwarded to jira.search_issues.
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
startAt = 0
|
start_at = 0
|
||||||
new_results = None
|
new_results = None
|
||||||
jira = JIRA(JIRA_BASE_URL)
|
jira = JIRA(JIRA_BASE_URL)
|
||||||
while new_results is None or len(new_results) == MAX_RESULTS:
|
while new_results is None or len(new_results) == MAX_RESULTS:
|
||||||
new_results = jira.search_issues(query, startAt=startAt, maxResults=MAX_RESULTS, **kwargs)
|
new_results = jira.search_issues(query, startAt=start_at, maxResults=MAX_RESULTS, **kwargs)
|
||||||
results += new_results
|
results += new_results
|
||||||
startAt += len(new_results)
|
start_at += len(new_results)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@ -172,5 +172,3 @@ if __name__ == "__main__":
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e, file=sys.stderr)
|
print(e, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -89,5 +89,3 @@ def as_json():
|
||||||
Export all saved preferences in JSON format.
|
Export all saved preferences in JSON format.
|
||||||
"""
|
"""
|
||||||
json.dumps(prefs, indent=2)
|
json.dumps(prefs, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -218,7 +218,7 @@ def verify_gpg_key():
|
||||||
if not gpg.key_exists(gpg_key_id):
|
if not gpg.key_exists(gpg_key_id):
|
||||||
fail(f"GPG key {gpg_key_id} not found")
|
fail(f"GPG key {gpg_key_id} not found")
|
||||||
if not gpg.valid_passphrase(gpg_key_id, gpg_passphrase):
|
if not gpg.valid_passphrase(gpg_key_id, gpg_passphrase):
|
||||||
fail(f"GPG passprase not valid for key {gpg_key_id}")
|
fail(f"GPG passphrase not valid for key {gpg_key_id}")
|
||||||
|
|
||||||
|
|
||||||
preferences.once("verify_requirements", lambda: confirm_or_fail(templates.requirements_instructions(preferences.FILE, preferences.as_json())))
|
preferences.once("verify_requirements", lambda: confirm_or_fail(templates.requirements_instructions(preferences.FILE, preferences.as_json())))
|
||||||
|
@ -232,12 +232,12 @@ apache_id = preferences.get('apache_id', lambda: prompt("Please enter your apach
|
||||||
jdk21_env = get_jdk(21)
|
jdk21_env = get_jdk(21)
|
||||||
|
|
||||||
|
|
||||||
def verify_prerequeisites():
|
def verify_prerequisites():
|
||||||
print("Begin to check if you have met all the pre-requisites for the release process")
|
print("Begin to check if you have met all the pre-requisites for the release process")
|
||||||
def prereq(name, soft_check):
|
def prereq(name, soft_check):
|
||||||
try:
|
try:
|
||||||
result = soft_check()
|
result = soft_check()
|
||||||
if result == False:
|
if not result:
|
||||||
fail(f"Pre-requisite not met: {name}")
|
fail(f"Pre-requisite not met: {name}")
|
||||||
else:
|
else:
|
||||||
print(f"Pre-requisite met: {name}")
|
print(f"Pre-requisite met: {name}")
|
||||||
|
@ -250,7 +250,7 @@ def verify_prerequeisites():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
preferences.once(f"verify_prerequeisites", verify_prerequeisites)
|
preferences.once(f"verify_prerequisites", verify_prerequisites)
|
||||||
|
|
||||||
# Validate that the release doesn't already exist
|
# Validate that the release doesn't already exist
|
||||||
git.fetch_tags()
|
git.fetch_tags()
|
||||||
|
@ -360,7 +360,7 @@ cmd("Building and uploading archives", "mvn deploy -Pgpg-signing", cwd=os.path.j
|
||||||
|
|
||||||
# TODO: Many of these suggested validation steps could be automated
|
# TODO: Many of these suggested validation steps could be automated
|
||||||
# and would help pre-validate a lot of the stuff voters test
|
# and would help pre-validate a lot of the stuff voters test
|
||||||
print(templates.sanity_check_instructions(release_version, rc_tag, apache_id))
|
print(templates.sanity_check_instructions(release_version, rc_tag))
|
||||||
confirm_or_fail("Have you sufficiently verified the release artifacts?")
|
confirm_or_fail("Have you sufficiently verified the release artifacts?")
|
||||||
|
|
||||||
# TODO: Can we close the staging repository via a REST API since we
|
# TODO: Can we close the staging repository via a REST API since we
|
||||||
|
@ -376,6 +376,5 @@ git.reset_hard_head()
|
||||||
git.switch_branch(starting_branch)
|
git.switch_branch(starting_branch)
|
||||||
git.delete_branch(release_version)
|
git.delete_branch(release_version)
|
||||||
|
|
||||||
rc_vote_email_text = templates.rc_vote_email_text(release_version, rc, rc_tag, dev_branch, docs_release_version, apache_id)
|
rc_vote_email_text = templates.rc_vote_email_text(release_version, rc, rc_tag, dev_branch, docs_release_version)
|
||||||
print(templates.rc_email_instructions(rc_vote_email_text))
|
print(templates.rc_email_instructions(rc_vote_email_text))
|
||||||
|
|
||||||
|
|
|
@ -108,7 +108,7 @@ def _prefix(prefix_str, value_str):
|
||||||
|
|
||||||
def cmd(action, cmd_arg, *args, **kwargs):
|
def cmd(action, cmd_arg, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Execute an external command. This should be preferered over execute()
|
Execute an external command. This should be preferred over execute()
|
||||||
when returning the output is not necessary, as the user will be given
|
when returning the output is not necessary, as the user will be given
|
||||||
the option of retrying in case of a failure.
|
the option of retrying in case of a failure.
|
||||||
"""
|
"""
|
||||||
|
@ -144,5 +144,3 @@ def cmd(action, cmd_arg, *args, **kwargs):
|
||||||
|
|
||||||
print(templates.cmd_failed())
|
print(templates.cmd_failed())
|
||||||
fail("")
|
fail("")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ from runtime import cmd
|
||||||
|
|
||||||
SVN_DEV_URL="https://dist.apache.org/repos/dist/dev/kafka"
|
SVN_DEV_URL="https://dist.apache.org/repos/dist/dev/kafka"
|
||||||
|
|
||||||
def delete_old_rc_directory_if_needed(rc_tag, src, work_dir):
|
def delete_old_rc_directory_if_needed(rc_tag, work_dir):
|
||||||
svn_dev = os.path.join(work_dir, "svn_dev")
|
svn_dev = os.path.join(work_dir, "svn_dev")
|
||||||
cmd_desc = f"Check if {rc_tag} exists in the subversion repository."
|
cmd_desc = f"Check if {rc_tag} exists in the subversion repository."
|
||||||
cmd_str = f"svn info --show-item revision {SVN_DEV_URL}/{rc_tag}"
|
cmd_str = f"svn info --show-item revision {SVN_DEV_URL}/{rc_tag}"
|
||||||
|
@ -39,7 +39,7 @@ def delete_old_rc_directory_if_needed(rc_tag, src, work_dir):
|
||||||
cmd(cmd_desc, cmd_str, cwd = svn_dev)
|
cmd(cmd_desc, cmd_str, cwd = svn_dev)
|
||||||
|
|
||||||
def commit_artifacts(rc_tag, src, work_dir):
|
def commit_artifacts(rc_tag, src, work_dir):
|
||||||
delete_old_rc_directory_if_needed(rc_tag, src, work_dir)
|
delete_old_rc_directory_if_needed(rc_tag, work_dir)
|
||||||
svn_dev = os.path.join(work_dir, "svn_dev")
|
svn_dev = os.path.join(work_dir, "svn_dev")
|
||||||
dst = os.path.join(svn_dev, rc_tag)
|
dst = os.path.join(svn_dev, rc_tag)
|
||||||
print(f"Copying {src} to {dst}")
|
print(f"Copying {src} to {dst}")
|
||||||
|
|
|
@ -154,11 +154,11 @@ Go to https://repository.apache.org/#stagingRepositories and hit 'Close' for the
|
||||||
There will be more than one repository entries created, please close all of them.
|
There will be more than one repository entries created, please close all of them.
|
||||||
In some cases, you may get errors on some repositories while closing them, see KAFKA-15033.
|
In some cases, you may get errors on some repositories while closing them, see KAFKA-15033.
|
||||||
If this is not the first RC, you need to 'Drop' the previous artifacts.
|
If this is not the first RC, you need to 'Drop' the previous artifacts.
|
||||||
Confirm the correct artifacts are visible at https://repository.apache.org/content/groups/staging/org/apache/kafka/
|
Confirm the correct artifacts are visible at https://repository.apache.org/content/groups/staging/org/apache/kafka/ and build the
|
||||||
|
jvm and native Docker images following these instructions: https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=34840886#ReleaseProcess-CreateJVMApacheKafkaDockerArtifacts(Forversions>=3.7.0)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def sanity_check_instructions(release_version, rc_tag):
|
||||||
def sanity_check_instructions(release_version, rc_tag, apache_id):
|
|
||||||
return f"""
|
return f"""
|
||||||
*******************************************************************************************************************************************************
|
*******************************************************************************************************************************************************
|
||||||
Ok. We've built and staged everything for the {rc_tag}.
|
Ok. We've built and staged everything for the {rc_tag}.
|
||||||
|
@ -189,14 +189,14 @@ Some suggested steps:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def rc_vote_email_text(release_version, rc, rc_tag, dev_branch, docs_version, apache_id):
|
def rc_vote_email_text(release_version, rc, rc_tag, dev_branch, docs_version):
|
||||||
return f"""
|
return f"""
|
||||||
To: dev@kafka.apache.org, users@kafka.apache.org, kafka-clients@googlegroups.com
|
To: dev@kafka.apache.org, users@kafka.apache.org, kafka-clients@googlegroups.com
|
||||||
Subject: [VOTE] {release_version} RC{rc}
|
Subject: [VOTE] {release_version} RC{rc}
|
||||||
|
|
||||||
Hello Kafka users, developers and client-developers,
|
Hello Kafka users, developers and client-developers,
|
||||||
|
|
||||||
This is the first candidate for release of Apache Kafka {release_version}.
|
This is the <ORDINAL> candidate for release of Apache Kafka {release_version}.
|
||||||
|
|
||||||
<DESCRIPTION OF MAJOR CHANGES, INCLUDE INDICATION OF MAJOR/MINOR RELEASE>
|
<DESCRIPTION OF MAJOR CHANGES, INCLUDE INDICATION OF MAJOR/MINOR RELEASE>
|
||||||
|
|
||||||
|
@ -221,7 +221,7 @@ apache/kafka-native:{rc_tag}
|
||||||
https://repository.apache.org/content/groups/staging/org/apache/kafka/
|
https://repository.apache.org/content/groups/staging/org/apache/kafka/
|
||||||
|
|
||||||
* Javadoc:
|
* Javadoc:
|
||||||
https://dist.apache.org/repos/dist/dev/kafka/{rc_tag}/javadoc/
|
https://dist.apache.org/repos/dist/dev/kafka/{rc_tag}/javadoc/index.html
|
||||||
|
|
||||||
* Tag to be voted upon (off {dev_branch} branch) is the {release_version} tag:
|
* Tag to be voted upon (off {dev_branch} branch) is the {release_version} tag:
|
||||||
https://github.com/apache/kafka/releases/tag/{rc_tag}
|
https://github.com/apache/kafka/releases/tag/{rc_tag}
|
||||||
|
@ -233,17 +233,16 @@ https://kafka.apache.org/{docs_version}/documentation.html
|
||||||
https://kafka.apache.org/{docs_version}/protocol.html
|
https://kafka.apache.org/{docs_version}/protocol.html
|
||||||
|
|
||||||
* Successful CI builds for the {dev_branch} branch:
|
* Successful CI builds for the {dev_branch} branch:
|
||||||
Unit/integration tests: https://ci-builds.apache.org/job/Kafka/job/kafka/job/{dev_branch}/<BUILD NUMBER>/
|
Unit/integration tests: https://github.com/apache/kafka/actions/runs/<RUN_NUMBER>
|
||||||
-- Confluent engineers can access the semphore build to provide the build number
|
System tests:
|
||||||
System tests: https://confluent-open-source-kafka-system-test-results.s3-us-west-2.amazonaws.com/{dev_branch}/<BUILD_NUMBER>/report.html
|
<Confluent engineers can access the semaphore build to provide the build number
|
||||||
|
https://confluent-open-source-kafka-system-test-results.s3-us-west-2.amazonaws.com/{dev_branch}/<BUILD_NUMBER>/report.html>
|
||||||
|
|
||||||
<USE docker/README.md FOR STEPS TO RUN DOCKER BUILD TEST GITHUB ACTIONS>
|
<USE docker/README.md FOR STEPS TO RUN DOCKER BUILD TEST GITHUB ACTIONS>
|
||||||
* Successful Docker Image Github Actions Pipeline for {dev_branch} branch:
|
* Successful Docker Image Github Actions Pipeline for {dev_branch} branch:
|
||||||
Docker Build Test Pipeline (JVM): https://github.com/apache/kafka/actions/runs/<RUN_NUMBER>
|
Docker Build Test Pipeline (JVM): https://github.com/apache/kafka/actions/runs/<RUN_NUMBER>
|
||||||
Docker Build Test Pipeline (Native): https://github.com/apache/kafka/actions/runs/<RUN_NUMBER>
|
Docker Build Test Pipeline (Native): https://github.com/apache/kafka/actions/runs/<RUN_NUMBER>
|
||||||
|
|
||||||
/**************************************
|
|
||||||
|
|
||||||
Thanks,
|
Thanks,
|
||||||
<YOU>
|
<YOU>
|
||||||
"""
|
"""
|
||||||
|
@ -294,5 +293,3 @@ IMPORTANT: Note that there are still some substitutions that need to be made in
|
||||||
- Finally, validate all the links before shipping!
|
- Finally, validate all the links before shipping!
|
||||||
Note that all substitutions are annotated with <> around them.
|
Note that all substitutions are annotated with <> around them.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -71,5 +71,3 @@ def replace(path, pattern, replacement, **kwargs):
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
for line in updated:
|
for line in updated:
|
||||||
f.write(line)
|
f.write(line)
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue