mirror of https://github.com/apache/kafka.git
KAFKA-16934: Clean up and refactor release.py (#16287)
The current release script has a couple of issues: * It's a single long file with duplicated logic, which makes it difficult to understand and make changes * When a command fails, the user is forced to start from the beginning, expanding feedback loops. e.g. publishing step fails because the credentials were set incorrectly in ~/.gradle/gradle.properties Reviewers: Mickael Maison <mickael.maison@gmail.com>
This commit is contained in:
parent
baa0fc9722
commit
9a7eee6072
845
release.py
845
release.py
|
@ -1,845 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
#
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
# contributor license agreements. See the NOTICE file distributed with
|
|
||||||
# this work for additional information regarding copyright ownership.
|
|
||||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
# (the "License"); you may not use this file except in compliance with
|
|
||||||
# the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
Utility for creating release candidates and promoting release candidates to a final release.
|
|
||||||
|
|
||||||
Usage: release.py [subcommand]
|
|
||||||
|
|
||||||
release.py stage
|
|
||||||
|
|
||||||
Builds and stages an RC for a release.
|
|
||||||
|
|
||||||
The utility is interactive; you will be prompted for basic release information and guided through the process.
|
|
||||||
|
|
||||||
This utility assumes you already have local a kafka git folder and that you
|
|
||||||
have added remotes corresponding to both:
|
|
||||||
(i) the github apache kafka mirror and
|
|
||||||
(ii) the apache kafka git repo.
|
|
||||||
|
|
||||||
release.py stage-docs [kafka-site-path]
|
|
||||||
|
|
||||||
Builds the documentation and stages it into an instance of the Kafka website repository.
|
|
||||||
|
|
||||||
This is meant to automate the integration between the main Kafka website repository (https://github.com/apache/kafka-site)
|
|
||||||
and the versioned documentation maintained in the main Kafka repository. This is useful both for local testing and
|
|
||||||
development of docs (follow the instructions here: https://cwiki.apache.org/confluence/display/KAFKA/Setup+Kafka+Website+on+Local+Apache+Server)
|
|
||||||
as well as for committers to deploy docs (run this script, then validate, commit, and push to kafka-site).
|
|
||||||
|
|
||||||
With no arguments this script assumes you have the Kafka repository and kafka-site repository checked out side-by-side, but
|
|
||||||
you can specify a full path to the kafka-site repository if this is not the case.
|
|
||||||
|
|
||||||
release.py release-email
|
|
||||||
|
|
||||||
Generates the email content/template for sending release announcement email.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
from getpass import getpass
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import time
|
|
||||||
import re
|
|
||||||
|
|
||||||
PROJECT_NAME = "kafka"
|
|
||||||
CAPITALIZED_PROJECT_NAME = "kafka".upper()
|
|
||||||
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
|
|
||||||
# Location of the local git repository
|
|
||||||
REPO_HOME = os.environ.get("%s_HOME" % CAPITALIZED_PROJECT_NAME, SCRIPT_DIR)
|
|
||||||
# Remote name, which points to Github by default
|
|
||||||
PUSH_REMOTE_NAME = os.environ.get("PUSH_REMOTE_NAME", "apache-github")
|
|
||||||
PREFS_FILE = os.path.join(SCRIPT_DIR, '.release-settings.json')
|
|
||||||
PUBLIC_HTML = "public_html"
|
|
||||||
|
|
||||||
delete_gitrefs = False
|
|
||||||
work_dir = None
|
|
||||||
|
|
||||||
def fail(msg):
|
|
||||||
if work_dir:
|
|
||||||
cmd("Cleaning up work directory", "rm -rf %s" % work_dir)
|
|
||||||
|
|
||||||
if delete_gitrefs:
|
|
||||||
try:
|
|
||||||
cmd("Resetting repository working state to branch %s" % starting_branch, "git reset --hard HEAD && git checkout %s" % starting_branch, shell=True)
|
|
||||||
cmd("Deleting git branches %s" % release_version, "git branch -D %s" % release_version, shell=True)
|
|
||||||
cmd("Deleting git tag %s" %rc_tag , "git tag -d %s" % rc_tag, shell=True)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
print("Failed when trying to clean up git references added by this script. You may need to clean up branches/tags yourself before retrying.")
|
|
||||||
print("Expected git branch: " + release_version)
|
|
||||||
print("Expected git tag: " + rc_tag)
|
|
||||||
print(msg)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def print_output(output):
|
|
||||||
if output is None or not output:
|
|
||||||
return
|
|
||||||
for line in output.split('\n'):
|
|
||||||
print(">", line)
|
|
||||||
|
|
||||||
def cmd(action, cmd_arg, *args, **kwargs):
|
|
||||||
if isinstance(cmd_arg, str) and not kwargs.get("shell", False):
|
|
||||||
cmd_arg = cmd_arg.split()
|
|
||||||
allow_failure = kwargs.pop("allow_failure", False)
|
|
||||||
num_retries = kwargs.pop("num_retries", 0)
|
|
||||||
|
|
||||||
stdin_log = ""
|
|
||||||
if "stdin" in kwargs and isinstance(kwargs["stdin"], str):
|
|
||||||
stdin_log = "--> " + kwargs["stdin"]
|
|
||||||
stdin = tempfile.TemporaryFile()
|
|
||||||
stdin.write(kwargs["stdin"].encode('utf-8'))
|
|
||||||
stdin.seek(0)
|
|
||||||
kwargs["stdin"] = stdin
|
|
||||||
|
|
||||||
print(action, cmd_arg, stdin_log)
|
|
||||||
try:
|
|
||||||
output = subprocess.check_output(cmd_arg, *args, stderr=subprocess.STDOUT, **kwargs)
|
|
||||||
print_output(output.decode('utf-8'))
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print_output(e.output.decode('utf-8'))
|
|
||||||
|
|
||||||
if num_retries > 0:
|
|
||||||
kwargs['num_retries'] = num_retries - 1
|
|
||||||
kwargs['allow_failure'] = allow_failure
|
|
||||||
print("Retrying... %d remaining retries" % (num_retries - 1))
|
|
||||||
time.sleep(4. / (num_retries + 1)) # e.g., if retries=3, sleep for 1s, 1.3s, 2s
|
|
||||||
return cmd(action, cmd_arg, *args, **kwargs)
|
|
||||||
|
|
||||||
if allow_failure:
|
|
||||||
return
|
|
||||||
|
|
||||||
print("*************************************************")
|
|
||||||
print("*** First command failure occurred here. ***")
|
|
||||||
print("*** Will now try to clean up working state. ***")
|
|
||||||
print("*************************************************")
|
|
||||||
fail("")
|
|
||||||
|
|
||||||
|
|
||||||
def cmd_output(cmd, *args, **kwargs):
|
|
||||||
if isinstance(cmd, str):
|
|
||||||
cmd = cmd.split()
|
|
||||||
return subprocess.check_output(cmd, *args, stderr=subprocess.STDOUT, **kwargs).decode('utf-8')
|
|
||||||
|
|
||||||
def replace(path, pattern, replacement):
|
|
||||||
updated = []
|
|
||||||
with open(path, 'r') as f:
|
|
||||||
for line in f:
|
|
||||||
updated.append((replacement + '\n') if line.startswith(pattern) else line)
|
|
||||||
|
|
||||||
with open(path, 'w') as f:
|
|
||||||
for line in updated:
|
|
||||||
f.write(line)
|
|
||||||
|
|
||||||
def regexReplace(path, pattern, replacement):
|
|
||||||
updated = []
|
|
||||||
with open(path, 'r') as f:
|
|
||||||
for line in f:
|
|
||||||
updated.append(re.sub(pattern, replacement, line))
|
|
||||||
|
|
||||||
with open(path, 'w') as f:
|
|
||||||
for line in updated:
|
|
||||||
f.write(line)
|
|
||||||
|
|
||||||
def user_ok(msg):
|
|
||||||
ok = sanitize_input(msg)
|
|
||||||
return ok.strip().lower() == 'y'
|
|
||||||
|
|
||||||
def sftp_mkdir(dir):
|
|
||||||
try:
|
|
||||||
cmd_str = """
|
|
||||||
mkdir %s
|
|
||||||
""" % dir
|
|
||||||
cmd("Creating '%s' in your Apache home directory if it does not exist (errors are ok if the directory already exists)" % dir, "sftp -b - %s@home.apache.org" % apache_id, stdin=cmd_str, allow_failure=True, num_retries=3)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
# This is ok. The command fails if the directory already exists
|
|
||||||
pass
|
|
||||||
|
|
||||||
def sftp_upload(dir):
|
|
||||||
try:
|
|
||||||
cmd_str = """
|
|
||||||
cd %s
|
|
||||||
put -r %s
|
|
||||||
""" % (PUBLIC_HTML, dir)
|
|
||||||
cmd("Uploading '%s' under %s in your Apache home directory" % (dir, PUBLIC_HTML), "sftp -b - %s@home.apache.org" % apache_id, stdin=cmd_str, allow_failure=True, num_retries=3)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
fail("Failed uploading %s to your Apache home directory" % dir)
|
|
||||||
|
|
||||||
def get_pref(prefs, name, request_fn):
|
|
||||||
"Get a preference from existing preference dictionary or invoke a function that can collect it from the user"
|
|
||||||
val = prefs.get(name)
|
|
||||||
if not val:
|
|
||||||
val = request_fn()
|
|
||||||
prefs[name] = val
|
|
||||||
return val
|
|
||||||
|
|
||||||
def load_prefs():
|
|
||||||
"""Load saved preferences"""
|
|
||||||
prefs = {}
|
|
||||||
if os.path.exists(PREFS_FILE):
|
|
||||||
with open(PREFS_FILE, 'r') as prefs_fp:
|
|
||||||
prefs = json.load(prefs_fp)
|
|
||||||
return prefs
|
|
||||||
|
|
||||||
def save_prefs(prefs):
|
|
||||||
"""Save preferences"""
|
|
||||||
print("Saving preferences to %s" % PREFS_FILE)
|
|
||||||
with open(PREFS_FILE, 'w') as prefs_fp:
|
|
||||||
prefs = json.dump(prefs, prefs_fp)
|
|
||||||
|
|
||||||
def get_jdk(prefs, version):
|
|
||||||
"""
|
|
||||||
Get settings for the specified JDK version.
|
|
||||||
"""
|
|
||||||
jdk_java_home = get_pref(prefs, 'jdk%d' % version, lambda: sanitize_input("Enter the path for JAVA_HOME for a JDK%d compiler (blank to use default JAVA_HOME): " % version))
|
|
||||||
jdk_env = dict(os.environ)
|
|
||||||
if jdk_java_home.strip(): jdk_env['JAVA_HOME'] = jdk_java_home
|
|
||||||
else: jdk_java_home = jdk_env['JAVA_HOME']
|
|
||||||
java_version = cmd_output("%s/bin/java -version" % jdk_java_home, env=jdk_env)
|
|
||||||
if version == 8:
|
|
||||||
if "1.8.0" not in java_version:
|
|
||||||
fail("JDK 8 is required")
|
|
||||||
elif "%d.0" % version not in java_version and '"%d"' % version not in java_version:
|
|
||||||
fail("JDK %s is required" % version)
|
|
||||||
return jdk_env
|
|
||||||
|
|
||||||
def get_version(repo=REPO_HOME):
|
|
||||||
"""
|
|
||||||
Extracts the full version information as a str from gradle.properties
|
|
||||||
"""
|
|
||||||
with open(os.path.join(repo, 'gradle.properties')) as fp:
|
|
||||||
for line in fp:
|
|
||||||
parts = line.split('=')
|
|
||||||
if parts[0].strip() != 'version': continue
|
|
||||||
return parts[1].strip()
|
|
||||||
fail("Couldn't extract version from gradle.properties")
|
|
||||||
|
|
||||||
def docs_version(version):
|
|
||||||
"""
|
|
||||||
Detects the major/minor version and converts it to the format used for docs on the website, e.g. gets 0.10.2.0-SNAPSHOT
|
|
||||||
from gradle.properties and converts it to 0102
|
|
||||||
"""
|
|
||||||
version_parts = version.strip().split('.')
|
|
||||||
# 1.0+ will only have 3 version components as opposed to pre-1.0 that had 4
|
|
||||||
major_minor = version_parts[0:3] if version_parts[0] == '0' else version_parts[0:2]
|
|
||||||
return ''.join(major_minor)
|
|
||||||
|
|
||||||
def docs_release_version(version):
|
|
||||||
"""
|
|
||||||
Detects the version from gradle.properties and converts it to a release version number that should be valid for the
|
|
||||||
current release branch. For example, 0.10.2.0-SNAPSHOT would remain 0.10.2.0-SNAPSHOT (because no release has been
|
|
||||||
made on that branch yet); 0.10.2.1-SNAPSHOT would be converted to 0.10.2.0 because 0.10.2.1 is still in development
|
|
||||||
but 0.10.2.0 should have already been released. Regular version numbers (e.g. as encountered on a release branch)
|
|
||||||
will remain the same.
|
|
||||||
"""
|
|
||||||
version_parts = version.strip().split('.')
|
|
||||||
if '-SNAPSHOT' in version_parts[-1]:
|
|
||||||
bugfix = int(version_parts[-1].split('-')[0])
|
|
||||||
if bugfix > 0:
|
|
||||||
version_parts[-1] = str(bugfix - 1)
|
|
||||||
return '.'.join(version_parts)
|
|
||||||
|
|
||||||
def command_stage_docs():
|
|
||||||
kafka_site_repo_path = sys.argv[2] if len(sys.argv) > 2 else os.path.join(REPO_HOME, '..', 'kafka-site')
|
|
||||||
if not os.path.exists(kafka_site_repo_path) or not os.path.exists(os.path.join(kafka_site_repo_path, 'powered-by.html')):
|
|
||||||
sys.exit("%s doesn't exist or does not appear to be the kafka-site repository" % kafka_site_repo_path)
|
|
||||||
|
|
||||||
prefs = load_prefs()
|
|
||||||
jdk17_env = get_jdk(prefs, 17)
|
|
||||||
save_prefs(prefs)
|
|
||||||
|
|
||||||
version = get_version()
|
|
||||||
# We explicitly override the version of the project that we normally get from gradle.properties since we want to be
|
|
||||||
# able to run this from a release branch where we made some updates, but the build would show an incorrect SNAPSHOT
|
|
||||||
# version due to already having bumped the bugfix version number.
|
|
||||||
gradle_version_override = docs_release_version(version)
|
|
||||||
|
|
||||||
cmd("Building docs", "./gradlew -Pversion=%s clean siteDocsTar aggregatedJavadoc" % gradle_version_override, cwd=REPO_HOME, env=jdk17_env)
|
|
||||||
|
|
||||||
docs_tar = os.path.join(REPO_HOME, 'core', 'build', 'distributions', 'kafka_2.13-%s-site-docs.tgz' % gradle_version_override)
|
|
||||||
|
|
||||||
versioned_docs_path = os.path.join(kafka_site_repo_path, docs_version(version))
|
|
||||||
if not os.path.exists(versioned_docs_path):
|
|
||||||
os.mkdir(versioned_docs_path, 0o755)
|
|
||||||
|
|
||||||
# The contents of the docs jar are site-docs/<docs dir>. We need to get rid of the site-docs prefix and dump everything
|
|
||||||
# inside it into the docs version subdirectory in the kafka-site repo
|
|
||||||
cmd('Extracting site-docs', 'tar xf %s --strip-components 1' % docs_tar, cwd=versioned_docs_path)
|
|
||||||
|
|
||||||
javadocs_src_dir = os.path.join(REPO_HOME, 'build', 'docs', 'javadoc')
|
|
||||||
|
|
||||||
cmd('Copying javadocs', 'cp -R %s %s' % (javadocs_src_dir, versioned_docs_path))
|
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
def validate_release_version_parts(version):
|
|
||||||
try:
|
|
||||||
version_parts = version.split('.')
|
|
||||||
if len(version_parts) != 3:
|
|
||||||
fail("Invalid release version, should have 3 version number components")
|
|
||||||
# Validate each part is a number
|
|
||||||
[int(x) for x in version_parts]
|
|
||||||
except ValueError:
|
|
||||||
fail("Invalid release version, should be a dotted version number")
|
|
||||||
|
|
||||||
def get_release_version_parts(version):
|
|
||||||
validate_release_version_parts(version)
|
|
||||||
return version.split('.')
|
|
||||||
|
|
||||||
def validate_release_num(version):
|
|
||||||
tags = cmd_output('git tag').split()
|
|
||||||
if version not in tags:
|
|
||||||
fail("The specified version is not a valid release version number")
|
|
||||||
validate_release_version_parts(version)
|
|
||||||
|
|
||||||
def sanitize_input(input_msg: str) -> str:
|
|
||||||
"""Sanitize inputs from users. This removes leading and trailing spaces.
|
|
||||||
Use this function instead of input where user input is needed.
|
|
||||||
"""
|
|
||||||
input_from_user = input(input_msg)
|
|
||||||
return input_from_user.strip()
|
|
||||||
|
|
||||||
|
|
||||||
def command_release_announcement_email():
|
|
||||||
tags = cmd_output('git tag').split()
|
|
||||||
release_tag_pattern = re.compile('^[0-9]+\.[0-9]+\.[0-9]+$')
|
|
||||||
release_tags = sorted([t for t in tags if re.match(release_tag_pattern, t)])
|
|
||||||
release_version_num = release_tags[-1]
|
|
||||||
if not user_ok("""Is the current release %s ? (y/n): """ % release_version_num):
|
|
||||||
release_version_num = sanitize_input('What is the current release version:')
|
|
||||||
validate_release_num(release_version_num)
|
|
||||||
previous_release_version_num = release_tags[-2]
|
|
||||||
if not user_ok("""Is the previous release %s ? (y/n): """ % previous_release_version_num):
|
|
||||||
previous_release_version_num = sanitize_input('What is the previous release version:')
|
|
||||||
validate_release_num(previous_release_version_num)
|
|
||||||
if release_version_num < previous_release_version_num :
|
|
||||||
fail("Current release version number can't be less than previous release version number")
|
|
||||||
number_of_contributors = int(subprocess.check_output('git shortlog -sn --group=author --group=trailer:co-authored-by --group=trailer:Reviewers --no-merges %s..%s | uniq | wc -l' % (previous_release_version_num, release_version_num) , shell=True).decode('utf-8'))
|
|
||||||
contributors = subprocess.check_output("git shortlog -sn --group=author --group=trailer:co-authored-by --group=trailer:Reviewers --no-merges %s..%s | cut -f2 | sort --ignore-case | uniq" % (previous_release_version_num, release_version_num), shell=True).decode('utf-8')
|
|
||||||
release_announcement_data = {
|
|
||||||
'number_of_contributors': number_of_contributors,
|
|
||||||
'contributors': ', '.join(str(x) for x in filter(None, contributors.split('\n'))),
|
|
||||||
'release_version': release_version_num,
|
|
||||||
'release_version_wihtout_dot': release_version_num.replace(".", "")
|
|
||||||
}
|
|
||||||
|
|
||||||
release_announcement_email = """
|
|
||||||
To: announce@apache.org, dev@kafka.apache.org, users@kafka.apache.org, kafka-clients@googlegroups.com
|
|
||||||
Subject: [ANNOUNCE] Apache Kafka %(release_version)s
|
|
||||||
|
|
||||||
The Apache Kafka community is pleased to announce the release for Apache Kafka %(release_version)s
|
|
||||||
|
|
||||||
<DETAILS OF THE CHANGES>
|
|
||||||
|
|
||||||
An overview of the release and its notable changes can be found in the
|
|
||||||
release blog post: https://kafka.apache.org/blog#apache_kafka_%(release_version_wihtout_dot)s_release_announcement
|
|
||||||
|
|
||||||
All of the changes in this release can be found in the release notes:
|
|
||||||
https://www.apache.org/dist/kafka/%(release_version)s/RELEASE_NOTES.html
|
|
||||||
|
|
||||||
|
|
||||||
You can download the source and binary release (Scala <VERSIONS>) from:
|
|
||||||
https://kafka.apache.org/downloads#%(release_version)s
|
|
||||||
|
|
||||||
---------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
Apache Kafka is a distributed streaming platform with four core APIs:
|
|
||||||
|
|
||||||
|
|
||||||
** The Producer API allows an application to publish a stream of records to
|
|
||||||
one or more Kafka topics.
|
|
||||||
|
|
||||||
** The Consumer API allows an application to subscribe to one or more
|
|
||||||
topics and process the stream of records produced to them.
|
|
||||||
|
|
||||||
** The Streams API allows an application to act as a stream processor,
|
|
||||||
consuming an input stream from one or more topics and producing an
|
|
||||||
output stream to one or more output topics, effectively transforming the
|
|
||||||
input streams to output streams.
|
|
||||||
|
|
||||||
** The Connector API allows building and running reusable producers or
|
|
||||||
consumers that connect Kafka topics to existing applications or data
|
|
||||||
systems. For example, a connector to a relational database might
|
|
||||||
capture every change to a table.
|
|
||||||
|
|
||||||
|
|
||||||
With these APIs, Kafka can be used for two broad classes of application:
|
|
||||||
|
|
||||||
** Building real-time streaming data pipelines that reliably get data
|
|
||||||
between systems or applications.
|
|
||||||
|
|
||||||
** Building real-time streaming applications that transform or react
|
|
||||||
to the streams of data.
|
|
||||||
|
|
||||||
|
|
||||||
Apache Kafka is in use at large and small companies worldwide, including
|
|
||||||
Capital One, Goldman Sachs, ING, LinkedIn, Netflix, Pinterest, Rabobank,
|
|
||||||
Target, The New York Times, Uber, Yelp, and Zalando, among others.
|
|
||||||
|
|
||||||
A big thank you for the following %(number_of_contributors)d contributors to this release! (Please report an unintended omission)
|
|
||||||
|
|
||||||
%(contributors)s
|
|
||||||
|
|
||||||
We welcome your help and feedback. For more information on how to
|
|
||||||
report problems, and to get involved, visit the project website at
|
|
||||||
https://kafka.apache.org/
|
|
||||||
|
|
||||||
Thank you!
|
|
||||||
|
|
||||||
|
|
||||||
Regards,
|
|
||||||
|
|
||||||
<YOU>
|
|
||||||
Release Manager for Apache Kafka %(release_version)s""" % release_announcement_data
|
|
||||||
|
|
||||||
print()
|
|
||||||
print("*****************************************************************")
|
|
||||||
print()
|
|
||||||
print(release_announcement_email)
|
|
||||||
print()
|
|
||||||
print("*****************************************************************")
|
|
||||||
print()
|
|
||||||
print("Use the above template to send the announcement for the release to the mailing list.")
|
|
||||||
print("IMPORTANT: Note that there are still some substitutions that need to be made in the template:")
|
|
||||||
print(" - Describe major changes in this release")
|
|
||||||
print(" - Scala versions")
|
|
||||||
print(" - Fill in your name in the signature")
|
|
||||||
print(" - You will need to use your apache email address to send out the email (otherwise, it won't be delivered to announce@apache.org)")
|
|
||||||
print(" - Finally, validate all the links before shipping!")
|
|
||||||
print("Note that all substitutions are annotated with <> around them.")
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
# Dispatch to subcommand
|
|
||||||
subcommand = sys.argv[1] if len(sys.argv) > 1 else None
|
|
||||||
if subcommand == 'stage-docs':
|
|
||||||
command_stage_docs()
|
|
||||||
elif subcommand == 'release-email':
|
|
||||||
command_release_announcement_email()
|
|
||||||
elif not (subcommand is None or subcommand == 'stage'):
|
|
||||||
fail("Unknown subcommand: %s" % subcommand)
|
|
||||||
# else -> default subcommand stage
|
|
||||||
|
|
||||||
|
|
||||||
## Default 'stage' subcommand implementation isn't isolated to its own function yet for historical reasons
|
|
||||||
|
|
||||||
prefs = load_prefs()
|
|
||||||
|
|
||||||
if not user_ok("""Requirements:
|
|
||||||
1. Updated docs to reference the new release version where appropriate.
|
|
||||||
2. JDK8 and JDK17 compilers and libraries
|
|
||||||
3. Your Apache ID, already configured with SSH keys on id.apache.org and SSH keys available in this shell session
|
|
||||||
4. All issues in the target release resolved with valid resolutions (if not, this script will report the problematic JIRAs)
|
|
||||||
5. A GPG key used for signing the release. This key should have been added to public Apache servers and the KEYS file on the Kafka site
|
|
||||||
6. Standard toolset installed -- git, gpg, gradle, sftp, etc.
|
|
||||||
7. ~/.gradle/gradle.properties configured with the signing properties described in the release process wiki, i.e.
|
|
||||||
|
|
||||||
mavenUrl=https://repository.apache.org/service/local/staging/deploy/maven2
|
|
||||||
mavenUsername=your-apache-id
|
|
||||||
mavenPassword=your-apache-passwd
|
|
||||||
signing.keyId=your-gpgkeyId
|
|
||||||
signing.password=your-gpg-passphrase
|
|
||||||
signing.secretKeyRingFile=/Users/your-id/.gnupg/secring.gpg (if you are using GPG 2.1 and beyond, then this file will no longer exist anymore, and you have to manually create it from the new private key directory with "gpg --export-secret-keys -o ~/.gnupg/secring.gpg")
|
|
||||||
8. ~/.m2/settings.xml configured for pgp signing and uploading to apache release maven, i.e.,
|
|
||||||
<server>
|
|
||||||
<id>apache.releases.https</id>
|
|
||||||
<username>your-apache-id</username>
|
|
||||||
<password>your-apache-passwd</password>
|
|
||||||
</server>
|
|
||||||
<server>
|
|
||||||
<id>your-gpgkeyId</id>
|
|
||||||
<passphrase>your-gpg-passphrase</passphrase>
|
|
||||||
</server>
|
|
||||||
<profile>
|
|
||||||
<id>gpg-signing</id>
|
|
||||||
<properties>
|
|
||||||
<gpg.keyname>your-gpgkeyId</gpg.keyname>
|
|
||||||
<gpg.passphraseServerId>your-gpgkeyId</gpg.passphraseServerId>
|
|
||||||
</properties>
|
|
||||||
</profile>
|
|
||||||
9. You may also need to update some gnupgp configs:
|
|
||||||
~/.gnupg/gpg-agent.conf
|
|
||||||
allow-loopback-pinentry
|
|
||||||
|
|
||||||
~/.gnupg/gpg.conf
|
|
||||||
use-agent
|
|
||||||
pinentry-mode loopback
|
|
||||||
|
|
||||||
echo RELOADAGENT | gpg-connect-agent
|
|
||||||
|
|
||||||
If any of these are missing, see https://cwiki.apache.org/confluence/display/KAFKA/Release+Process for instructions on setting them up.
|
|
||||||
|
|
||||||
Some of these may be used from these previous settings loaded from %s:
|
|
||||||
|
|
||||||
%s
|
|
||||||
|
|
||||||
Do you have all of of these setup? (y/n): """ % (PREFS_FILE, json.dumps(prefs, indent=2))):
|
|
||||||
fail("Please try again once you have all the prerequisites ready.")
|
|
||||||
|
|
||||||
apache_id = sanitize_input("Please enter your apache-id: ")
|
|
||||||
|
|
||||||
print("Begin to check if you have met all the pre-requisites for the release process")
|
|
||||||
|
|
||||||
try:
|
|
||||||
test_maven = cmd_output("mvn -v")
|
|
||||||
if "Apache Maven" in test_maven:
|
|
||||||
print("Pre-requisite met: You have maven cli in place")
|
|
||||||
else:
|
|
||||||
fail("Pre-requisite not met: You need to install maven CLI")
|
|
||||||
except Exception as e:
|
|
||||||
fail(f"Pre-requisite not met: Unable to check if maven cli is installed. Error: {e}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
test_sftp = subprocess.run(f"sftp {apache_id}@home.apache.org".split())
|
|
||||||
if test_sftp.returncode != 0:
|
|
||||||
fail("Pre-requisite not met: Cannot establish sftp connection. Please check your apache-id and ssh keys.")
|
|
||||||
print("Pre-requisite met: sftp connection is successful")
|
|
||||||
except Exception as e:
|
|
||||||
fail(f"Pre-requisite not met: Unable to check if sftp connection is successful. Error: {e}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
test_svn = cmd_output("svn --version")
|
|
||||||
if "svn" in test_svn:
|
|
||||||
print("Pre-requisite met: You have svn cli in place")
|
|
||||||
else:
|
|
||||||
fail("Pre-requisite not met: You need to install svn cli")
|
|
||||||
except Exception as e:
|
|
||||||
fail(f"Pre-requisite not met: Unable to check if svn cli is installed. Error: {e}")
|
|
||||||
|
|
||||||
starting_branch = cmd_output('git rev-parse --abbrev-ref HEAD')
|
|
||||||
|
|
||||||
cmd("Verifying that you have no unstaged git changes", 'git diff --exit-code --quiet')
|
|
||||||
cmd("Verifying that you have no staged git changes", 'git diff --cached --exit-code --quiet')
|
|
||||||
|
|
||||||
release_version = sanitize_input("Release version (without any RC info, e.g. 1.0.0): ")
|
|
||||||
release_version_parts = get_release_version_parts(release_version)
|
|
||||||
|
|
||||||
rc = sanitize_input("Release candidate number: ")
|
|
||||||
|
|
||||||
dev_branch = '.'.join(release_version_parts[:2])
|
|
||||||
docs_release_version = docs_version(release_version)
|
|
||||||
|
|
||||||
# Validate that the release doesn't already exist and that the
|
|
||||||
cmd("Fetching tags from upstream", 'git fetch --tags %s' % PUSH_REMOTE_NAME)
|
|
||||||
tags = cmd_output('git tag').split()
|
|
||||||
|
|
||||||
if release_version in tags:
|
|
||||||
fail("The specified version has already been tagged and released.")
|
|
||||||
|
|
||||||
# TODO promotion
|
|
||||||
if not rc:
|
|
||||||
fail("Automatic Promotion is not yet supported.")
|
|
||||||
|
|
||||||
# Find the latest RC and make sure they want to promote that one
|
|
||||||
rc_tag = sorted([t for t in tags if t.startswith(release_version + '-rc')])[-1]
|
|
||||||
if not user_ok("Found %s as latest RC for this release. Is this correct? (y/n): "):
|
|
||||||
fail("This script couldn't determine which RC tag to promote, you'll need to fix up the RC tags and re-run the script.")
|
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
# Prereq checks
|
|
||||||
apache_id = get_pref(prefs, 'apache_id', lambda: sanitize_input("Enter your apache username: "))
|
|
||||||
|
|
||||||
jdk8_env = get_jdk(prefs, 8)
|
|
||||||
jdk17_env = get_jdk(prefs, 17)
|
|
||||||
|
|
||||||
def select_gpg_key():
|
|
||||||
print("Here are the available GPG keys:")
|
|
||||||
available_keys = cmd_output("gpg --list-secret-keys")
|
|
||||||
print(available_keys)
|
|
||||||
key_name = sanitize_input("Which user name (enter the user name without email address): ")
|
|
||||||
if key_name not in available_keys:
|
|
||||||
fail("Couldn't find the requested key.")
|
|
||||||
return key_name
|
|
||||||
|
|
||||||
key_name = get_pref(prefs, 'gpg-key', select_gpg_key)
|
|
||||||
|
|
||||||
gpg_passphrase = get_pref(prefs, 'gpg-pass', lambda: getpass("Passphrase for this GPG key: "))
|
|
||||||
# Do a quick validation so we can fail fast if the password is incorrect
|
|
||||||
with tempfile.NamedTemporaryFile() as gpg_test_tempfile:
|
|
||||||
gpg_test_tempfile.write("abcdefg".encode('utf-8'))
|
|
||||||
cmd("Testing GPG key & passphrase", ["gpg", "--batch", "--pinentry-mode", "loopback", "--passphrase-fd", "0", "-u", key_name, "--armor", "--output", gpg_test_tempfile.name + ".asc", "--detach-sig", gpg_test_tempfile.name], stdin=gpg_passphrase)
|
|
||||||
|
|
||||||
save_prefs(prefs)
|
|
||||||
|
|
||||||
# Generate RC
|
|
||||||
try:
|
|
||||||
int(rc)
|
|
||||||
except ValueError:
|
|
||||||
fail("Invalid release candidate number: %s" % rc)
|
|
||||||
rc_tag = release_version + '-rc' + rc
|
|
||||||
|
|
||||||
delete_gitrefs = True # Since we are about to start creating new git refs, enable cleanup function on failure to try to delete them
|
|
||||||
cmd("Checking out current development branch", "git checkout -b %s %s" % (release_version, PUSH_REMOTE_NAME + "/" + dev_branch))
|
|
||||||
print("Updating version numbers")
|
|
||||||
replace("gradle.properties", "version", "version=%s" % release_version)
|
|
||||||
replace("tests/kafkatest/__init__.py", "__version__", "__version__ = '%s'" % release_version)
|
|
||||||
print("updating streams quickstart pom")
|
|
||||||
regexReplace("streams/quickstart/pom.xml", "-SNAPSHOT", "")
|
|
||||||
print("updating streams quickstart java pom")
|
|
||||||
regexReplace("streams/quickstart/java/pom.xml", "-SNAPSHOT", "")
|
|
||||||
print("updating streams quickstart archetype pom")
|
|
||||||
regexReplace("streams/quickstart/java/src/main/resources/archetype-resources/pom.xml", "-SNAPSHOT", "")
|
|
||||||
print("updating ducktape version.py")
|
|
||||||
regexReplace("./tests/kafkatest/version.py", "^DEV_VERSION =.*",
|
|
||||||
"DEV_VERSION = KafkaVersion(\"%s-SNAPSHOT\")" % release_version)
|
|
||||||
print("updating docs/js/templateData.js")
|
|
||||||
regexReplace("docs/js/templateData.js", "-SNAPSHOT", "")
|
|
||||||
# Command in explicit list due to messages with spaces
|
|
||||||
cmd("Committing version number updates", ["git", "commit", "-a", "-m", "Bump version to %s" % release_version])
|
|
||||||
# Command in explicit list due to messages with spaces
|
|
||||||
cmd("Tagging release candidate %s" % rc_tag, ["git", "tag", "-a", rc_tag, "-m", rc_tag])
|
|
||||||
rc_githash = cmd_output("git show-ref --hash " + rc_tag)
|
|
||||||
cmd("Switching back to your starting branch", "git checkout %s" % starting_branch)
|
|
||||||
|
|
||||||
# Note that we don't use tempfile here because mkdtemp causes problems with sftp and being able to determine the absolute path to a file.
|
|
||||||
# Instead we rely on a fixed path and if it
|
|
||||||
work_dir = os.path.join(REPO_HOME, ".release_work_dir")
|
|
||||||
if os.path.exists(work_dir):
|
|
||||||
fail("A previous attempt at a release left dirty state in the work directory. Clean up %s before proceeding. (This attempt will try to cleanup, simply retrying may be sufficient now...)" % work_dir)
|
|
||||||
os.makedirs(work_dir)
|
|
||||||
print("Temporary build working director:", work_dir)
|
|
||||||
kafka_dir = os.path.join(work_dir, 'kafka')
|
|
||||||
streams_quickstart_dir = os.path.join(kafka_dir, 'streams/quickstart')
|
|
||||||
print("Streams quickstart dir", streams_quickstart_dir)
|
|
||||||
artifact_name = "kafka-" + rc_tag
|
|
||||||
cmd("Creating staging area for release artifacts", "mkdir " + artifact_name, cwd=work_dir)
|
|
||||||
artifacts_dir = os.path.join(work_dir, artifact_name)
|
|
||||||
cmd("Cloning clean copy of repo", "git clone %s kafka" % REPO_HOME, cwd=work_dir)
|
|
||||||
cmd("Checking out RC tag", "git checkout -b %s %s" % (release_version, rc_tag), cwd=kafka_dir)
|
|
||||||
current_year = datetime.datetime.now().year
|
|
||||||
cmd("Verifying the correct year in NOTICE", "grep %s NOTICE" % current_year, cwd=kafka_dir)
|
|
||||||
|
|
||||||
with open(os.path.join(artifacts_dir, "RELEASE_NOTES.html"), 'w') as f:
|
|
||||||
print("Generating release notes")
|
|
||||||
try:
|
|
||||||
subprocess.check_call([sys.executable, "./release_notes.py", release_version], stdout=f)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print_output(e.output)
|
|
||||||
|
|
||||||
print("*************************************************")
|
|
||||||
print("*** First command failure occurred here. ***")
|
|
||||||
print("*** Will now try to clean up working state. ***")
|
|
||||||
print("*************************************************")
|
|
||||||
fail("")
|
|
||||||
|
|
||||||
|
|
||||||
params = { 'release_version': release_version,
|
|
||||||
'rc_tag': rc_tag,
|
|
||||||
'artifacts_dir': artifacts_dir
|
|
||||||
}
|
|
||||||
cmd("Creating source archive", "git archive --format tar.gz --prefix kafka-%(release_version)s-src/ -o %(artifacts_dir)s/kafka-%(release_version)s-src.tgz %(rc_tag)s" % params)
|
|
||||||
|
|
||||||
cmd("Building artifacts", "./gradlew clean && ./gradlewAll releaseTarGz", cwd=kafka_dir, env=jdk8_env, shell=True)
|
|
||||||
cmd("Copying artifacts", "cp %s/core/build/distributions/* %s" % (kafka_dir, artifacts_dir), shell=True)
|
|
||||||
cmd("Building docs", "./gradlew clean aggregatedJavadoc", cwd=kafka_dir, env=jdk17_env)
|
|
||||||
cmd("Copying docs", "cp -R %s/build/docs/javadoc %s" % (kafka_dir, artifacts_dir))
|
|
||||||
|
|
||||||
for filename in os.listdir(artifacts_dir):
|
|
||||||
full_path = os.path.join(artifacts_dir, filename)
|
|
||||||
if not os.path.isfile(full_path):
|
|
||||||
continue
|
|
||||||
# Commands in explicit list due to key_name possibly containing spaces
|
|
||||||
cmd("Signing " + full_path, ["gpg", "--batch", "--passphrase-fd", "0", "-u", key_name, "--armor", "--output", full_path + ".asc", "--detach-sig", full_path], stdin=gpg_passphrase)
|
|
||||||
cmd("Verifying " + full_path, ["gpg", "--verify", full_path + ".asc", full_path])
|
|
||||||
# Note that for verification, we need to make sure only the filename is used with --print-md because the command line
|
|
||||||
# argument for the file is included in the output and verification uses a simple diff that will break if an absolut path
|
|
||||||
# is used.
|
|
||||||
dir, fname = os.path.split(full_path)
|
|
||||||
cmd("Generating MD5 for " + full_path, "gpg --print-md md5 %s > %s.md5" % (fname, fname), shell=True, cwd=dir)
|
|
||||||
cmd("Generating SHA1 for " + full_path, "gpg --print-md sha1 %s > %s.sha1" % (fname, fname), shell=True, cwd=dir)
|
|
||||||
cmd("Generating SHA512 for " + full_path, "gpg --print-md sha512 %s > %s.sha512" % (fname, fname), shell=True, cwd=dir)
|
|
||||||
|
|
||||||
cmd("Listing artifacts to be uploaded:", "ls -R %s" % artifacts_dir)
|
|
||||||
|
|
||||||
cmd("Zipping artifacts", "tar -czf %s.tar.gz %s" % (artifact_name, artifact_name), cwd=work_dir)
|
|
||||||
sftp_mkdir(PUBLIC_HTML)
|
|
||||||
sftp_upload(artifacts_dir)
|
|
||||||
if not user_ok("Confirm the artifact is present under %s in your Apache home directory: https://home.apache.org/~%s/ (y/n)?: " % (PUBLIC_HTML, apache_id)):
|
|
||||||
fail("Ok, giving up")
|
|
||||||
|
|
||||||
with open(os.path.expanduser("~/.gradle/gradle.properties")) as f:
|
|
||||||
contents = f.read()
|
|
||||||
if not user_ok("Going to build and upload mvn artifacts based on these settings:\n" + contents + '\nOK (y/n)?: '):
|
|
||||||
fail("Retry again later")
|
|
||||||
cmd("Building and uploading archives", "./gradlewAll publish", cwd=kafka_dir, env=jdk8_env, shell=True)
|
|
||||||
cmd("Building and uploading archives", "mvn deploy -Pgpg-signing", cwd=streams_quickstart_dir, env=jdk8_env, shell=True)
|
|
||||||
|
|
||||||
release_notification_props = { 'release_version': release_version,
|
|
||||||
'rc': rc,
|
|
||||||
'rc_tag': rc_tag,
|
|
||||||
'rc_githash': rc_githash,
|
|
||||||
'dev_branch': dev_branch,
|
|
||||||
'docs_version': docs_release_version,
|
|
||||||
'apache_id': apache_id,
|
|
||||||
}
|
|
||||||
|
|
||||||
# TODO: Many of these suggested validation steps could be automated and would help pre-validate a lot of the stuff voters test
|
|
||||||
print("""
|
|
||||||
*******************************************************************************************************************************************************
|
|
||||||
Ok. We've built and staged everything for the %(rc_tag)s.
|
|
||||||
|
|
||||||
Now you should sanity check it before proceeding. All subsequent steps start making RC data public.
|
|
||||||
|
|
||||||
Some suggested steps:
|
|
||||||
|
|
||||||
* Grab the source archive and make sure it compiles: https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/kafka-%(release_version)s-src.tgz
|
|
||||||
* Grab one of the binary distros and run the quickstarts against them: https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/kafka_2.13-%(release_version)s.tgz
|
|
||||||
* Extract and verify one of the site docs jars: https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/kafka_2.13-%(release_version)s-site-docs.tgz
|
|
||||||
* Build a sample against jars in the staging repo: (TODO: Can we get a temporary URL before "closing" the staged artifacts?)
|
|
||||||
* Validate GPG signatures on at least one file:
|
|
||||||
wget https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/kafka-%(release_version)s-src.tgz &&
|
|
||||||
wget https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/kafka-%(release_version)s-src.tgz.asc &&
|
|
||||||
wget https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/kafka-%(release_version)s-src.tgz.md5 &&
|
|
||||||
wget https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/kafka-%(release_version)s-src.tgz.sha1 &&
|
|
||||||
wget https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/kafka-%(release_version)s-src.tgz.sha512 &&
|
|
||||||
gpg --verify kafka-%(release_version)s-src.tgz.asc kafka-%(release_version)s-src.tgz &&
|
|
||||||
gpg --print-md md5 kafka-%(release_version)s-src.tgz | diff - kafka-%(release_version)s-src.tgz.md5 &&
|
|
||||||
gpg --print-md sha1 kafka-%(release_version)s-src.tgz | diff - kafka-%(release_version)s-src.tgz.sha1 &&
|
|
||||||
gpg --print-md sha512 kafka-%(release_version)s-src.tgz | diff - kafka-%(release_version)s-src.tgz.sha512 &&
|
|
||||||
rm kafka-%(release_version)s-src.tgz* &&
|
|
||||||
echo "OK" || echo "Failed"
|
|
||||||
* Validate the javadocs look ok. They are at https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/javadoc/
|
|
||||||
|
|
||||||
*******************************************************************************************************************************************************
|
|
||||||
""" % release_notification_props)
|
|
||||||
if not user_ok("Have you sufficiently verified the release artifacts (y/n)?: "):
|
|
||||||
fail("Ok, giving up")
|
|
||||||
|
|
||||||
print("Next, we need to get the Maven artifacts we published into the staging repository.")
|
|
||||||
# TODO: Can we get this closed via a REST API since we already need to collect credentials for this repo?
|
|
||||||
print("Go to https://repository.apache.org/#stagingRepositories and hit 'Close' for the new repository that was created by uploading artifacts.")
|
|
||||||
print("There will be more than one repository entries created, please close all of them.")
|
|
||||||
print("In some cases, you may get errors on some repositories while closing them, see KAFKA-15033.")
|
|
||||||
print("If this is not the first RC, you need to 'Drop' the previous artifacts.")
|
|
||||||
print("Confirm the correct artifacts are visible at https://repository.apache.org/content/groups/staging/org/apache/kafka/")
|
|
||||||
if not user_ok("Have you successfully deployed the artifacts (y/n)?: "):
|
|
||||||
fail("Ok, giving up")
|
|
||||||
if not user_ok("Ok to push RC tag %s (y/n)?: " % rc_tag):
|
|
||||||
fail("Ok, giving up")
|
|
||||||
|
|
||||||
print(f"Pushing RC tag {rc_tag} to {PUSH_REMOTE_NAME}")
|
|
||||||
try:
|
|
||||||
push_command = f"git push {PUSH_REMOTE_NAME} {rc_tag}".split()
|
|
||||||
output = subprocess.check_output(push_command, stderr=subprocess.STDOUT)
|
|
||||||
print_output(output.decode('utf-8'))
|
|
||||||
if "error" in output.decode('utf-8'):
|
|
||||||
print("*********************************************")
|
|
||||||
print("*** ERROR when trying to perform git push ***")
|
|
||||||
print("*********************************************")
|
|
||||||
print(output)
|
|
||||||
print("")
|
|
||||||
print("Due the failure of git push, the program will exit here. Please note that: ")
|
|
||||||
print(f"1) You are still at branch {release_version}, not {starting_branch}")
|
|
||||||
print(f"2) Tag {rc_tag} is still present locally")
|
|
||||||
print("")
|
|
||||||
print(f"In order to restart the workflow, you will have to manually switch back to the original branch and delete the branch {release_version} and tag {rc_tag}")
|
|
||||||
sys.exit(1)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed when trying to git push {rc_tag}. Error: {e}")
|
|
||||||
print("You may need to clean up branches/tags yourself before retrying.")
|
|
||||||
print("Due the failure of git push, the program will exit here. Please note that: ")
|
|
||||||
print(f"1) You are still at branch {release_version}, not {starting_branch}")
|
|
||||||
print(f"2) Tag {rc_tag} is still present locally")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Move back to starting branch and clean out the temporary release branch (e.g. 1.0.0) we used to generate everything
|
|
||||||
cmd("Resetting repository working state", "git reset --hard HEAD && git checkout %s" % starting_branch, shell=True)
|
|
||||||
cmd("Deleting git branches %s" % release_version, "git branch -D %s" % release_version, shell=True)
|
|
||||||
|
|
||||||
|
|
||||||
email_contents = """
|
|
||||||
To: dev@kafka.apache.org, users@kafka.apache.org, kafka-clients@googlegroups.com
|
|
||||||
Subject: [VOTE] %(release_version)s RC%(rc)s
|
|
||||||
|
|
||||||
Hello Kafka users, developers and client-developers,
|
|
||||||
|
|
||||||
This is the first candidate for release of Apache Kafka %(release_version)s.
|
|
||||||
|
|
||||||
<DESCRIPTION OF MAJOR CHANGES, INCLUDE INDICATION OF MAJOR/MINOR RELEASE>
|
|
||||||
|
|
||||||
Release notes for the %(release_version)s release:
|
|
||||||
https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/RELEASE_NOTES.html
|
|
||||||
|
|
||||||
*** Please download, test and vote by <VOTING DEADLINE, e.g. Monday, March 28, 9am PT>
|
|
||||||
<THE RELEASE POLICY (https://www.apache.org/legal/release-policy.html#release-approval) REQUIRES VOTES TO BE OPEN FOR MINIMUM OF 3 DAYS THEREFORE VOTING DEADLINE SHOULD BE AT LEAST 72 HOURS FROM THE TIME THIS EMAIL IS SENT.>
|
|
||||||
|
|
||||||
Kafka's KEYS file containing PGP keys we use to sign the release:
|
|
||||||
https://kafka.apache.org/KEYS
|
|
||||||
|
|
||||||
* Release artifacts to be voted upon (source and binary):
|
|
||||||
https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/
|
|
||||||
|
|
||||||
<USE docker/README.md FOR STEPS TO CREATE RELEASE CANDIDATE DOCKER IMAGE>
|
|
||||||
* Docker release artifact to be voted upon(apache/kafka-native is supported from 3.8+ release.):
|
|
||||||
apache/kafka:%(rc_tag)s
|
|
||||||
apache/kafka-native:%(rc_tag)s
|
|
||||||
|
|
||||||
* Maven artifacts to be voted upon:
|
|
||||||
https://repository.apache.org/content/groups/staging/org/apache/kafka/
|
|
||||||
|
|
||||||
* Javadoc:
|
|
||||||
https://home.apache.org/~%(apache_id)s/kafka-%(rc_tag)s/javadoc/
|
|
||||||
|
|
||||||
* Tag to be voted upon (off %(dev_branch)s branch) is the %(release_version)s tag:
|
|
||||||
https://github.com/apache/kafka/releases/tag/%(rc_tag)s
|
|
||||||
|
|
||||||
* Documentation:
|
|
||||||
https://kafka.apache.org/%(docs_version)s/documentation.html
|
|
||||||
|
|
||||||
* Protocol:
|
|
||||||
https://kafka.apache.org/%(docs_version)s/protocol.html
|
|
||||||
|
|
||||||
* Successful Jenkins builds for the %(dev_branch)s branch:
|
|
||||||
Unit/integration tests: https://ci-builds.apache.org/job/Kafka/job/kafka/job/%(dev_branch)s/<BUILD NUMBER>/
|
|
||||||
System tests: https://jenkins.confluent.io/job/system-test-kafka/job/%(dev_branch)s/<BUILD_NUMBER>/
|
|
||||||
|
|
||||||
<USE docker/README.md FOR STEPS TO RUN DOCKER BUILD TEST GITHUB ACTIONS>
|
|
||||||
* Successful JVM based Apache Kafka Docker Image Github Actions Pipeline for %(dev_branch)s branch:
|
|
||||||
Docker Build Test Pipeline: https://github.com/apache/kafka/actions/runs/<RUN_NUMBER>
|
|
||||||
|
|
||||||
* Successful GraalVM based Native Apache Kafka Docker Image Github Actions Pipeline for %(dev_branch)s branch:
|
|
||||||
* NOTE: GraalVM based Native Apache Kafka Docker Image is supported from 3.8+ release.
|
|
||||||
Docker Build Test Pipeline: https://github.com/apache/kafka/actions/runs/<RUN_NUMBER>
|
|
||||||
|
|
||||||
/**************************************
|
|
||||||
|
|
||||||
Thanks,
|
|
||||||
<YOU>
|
|
||||||
""" % release_notification_props
|
|
||||||
|
|
||||||
print()
|
|
||||||
print()
|
|
||||||
print("*****************************************************************")
|
|
||||||
print()
|
|
||||||
print(email_contents)
|
|
||||||
print()
|
|
||||||
print("*****************************************************************")
|
|
||||||
print()
|
|
||||||
print("All artifacts should now be fully staged. Use the above template to send the announcement for the RC to the mailing list.")
|
|
||||||
print("IMPORTANT: Note that there are still some substitutions that need to be made in the template:")
|
|
||||||
print(" - Describe major changes in this release")
|
|
||||||
print(" - Deadline for voting, which should be at least 3 days after you send out the email")
|
|
||||||
print(" - Jenkins build numbers for successful unit & system test builds")
|
|
||||||
print(" - Fill in your name in the signature")
|
|
||||||
print(" - Finally, validate all the links before shipping!")
|
|
||||||
print("Note that all substitutions are annotated with <> around them.")
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
Releasing Apache Kafka
|
||||||
|
======================
|
||||||
|
|
||||||
|
This directory contains the tools used to publish a release.
|
||||||
|
|
||||||
|
# Requirements
|
||||||
|
|
||||||
|
* python 3.12
|
||||||
|
* git
|
||||||
|
* gpg 2.4
|
||||||
|
* sftp
|
||||||
|
|
||||||
|
The full instructions for producing a release are available in
|
||||||
|
https://cwiki.apache.org/confluence/display/KAFKA/Release+Process.
|
||||||
|
|
||||||
|
|
||||||
|
# Setup
|
||||||
|
|
||||||
|
Create a virtualenv for python, activate it and install dependencies:
|
||||||
|
|
||||||
|
```
|
||||||
|
python3 -m venv .venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
# Usage
|
||||||
|
|
||||||
|
To start a release, first activate the virutalenv, and then run
|
||||||
|
the release script.
|
||||||
|
|
||||||
|
```
|
||||||
|
source .venv/bin/activate
|
||||||
|
```
|
||||||
|
|
||||||
|
You'll need to setup `PUSH_REMOTE_NAME` to refer to
|
||||||
|
the git remote for `apache/kafka`.
|
||||||
|
|
||||||
|
```
|
||||||
|
export PUSH_REMOTE_NAME=<value>
|
||||||
|
```
|
||||||
|
|
||||||
|
It should be the value shown with this command:
|
||||||
|
|
||||||
|
```
|
||||||
|
git remote -v | grep -w 'github.com' | grep -w 'apache/kafka' | grep -w '(push)' | awk '{print $1}'
|
||||||
|
```
|
||||||
|
|
||||||
|
Then start the release script:
|
||||||
|
|
||||||
|
```
|
||||||
|
python release.py
|
||||||
|
```
|
||||||
|
|
|
@ -0,0 +1,135 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Auxiliary function to interact with git.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from runtime import repo_dir, execute, cmd
|
||||||
|
|
||||||
|
push_remote_name = os.environ.get("PUSH_REMOTE_NAME", "apache-github")
|
||||||
|
|
||||||
|
|
||||||
|
def __defaults(kwargs):
|
||||||
|
if "cwd" not in kwargs:
|
||||||
|
kwargs["cwd"] = repo_dir
|
||||||
|
|
||||||
|
|
||||||
|
def has_staged_changes(**kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
execute("git diff --cached --exit-code --quiet", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def has_unstaged_changes(**kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
execute("git diff --exit-code --quiet", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_tags(remote=push_remote_name, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
cmd(f"Fetching tags from {remote}", f"git fetch --tags {remote}", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def tags(**kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
return execute("git tag", **kwargs).split()
|
||||||
|
|
||||||
|
|
||||||
|
def tag_exists(tag, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
return tag in tags(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_tag(tag, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
if tag_exists(tag, **kwargs):
|
||||||
|
execute(f"git tag -d {tag}", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def current_branch(**kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
return execute("git rev-parse --abbrev-ref HEAD", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def reset_hard_head(**kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
cmd("Resetting branch", "git reset --hard HEAD", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def contributors(from_rev, to_rev, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
kwargs["shell"] = True
|
||||||
|
line = "git shortlog -sn --group=author --group=trailer:co-authored-by"
|
||||||
|
line += f" --group=trailer:Reviewers --no-merges {from_rev}..{to_rev}"
|
||||||
|
line += " | cut -f2 | sort --ignore-case | uniq"
|
||||||
|
return [str(x) for x in filter(None, execute(line, **kwargs).split('\n'))]
|
||||||
|
|
||||||
|
def branches(**kwargs):
|
||||||
|
output = execute('git branch')
|
||||||
|
return [line.replace('*', ' ').strip() for line in output.splitlines()]
|
||||||
|
|
||||||
|
|
||||||
|
def branch_exists(branch, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
return branch in branches(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_branch(branch, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
if branch_exists(branch, **kwargs):
|
||||||
|
cmd(f"Deleting git branch {branch}", f"git branch -D {branch}", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def switch_branch(branch, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
execute(f"git checkout {branch}", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def create_branch(branch, ref, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
cmd(f"Creating git branch {branch} to track {ref}", f"git checkout -b {branch} {ref}", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def clone(url, target, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
execute(f"git clone {url} {target}", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def targz(rev, prefix, target, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
line = "git archive --format tar.gz"
|
||||||
|
line += f" --prefix {prefix} --output {target} {rev}"
|
||||||
|
cmd(f"Creating targz {target} from git rev {rev}", line, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def commit(message, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
cmd("Committing git changes", ["git", "commit", "-a", "-m", message], **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def create_tag(tag, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
cmd(f"Creating git tag {tag}", ["git", "tag", "-a", tag, "-m", tag], **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def push_tag(tag, remote=push_remote_name, **kwargs):
|
||||||
|
__defaults(kwargs)
|
||||||
|
cmd("Pushing tag {tag} to {remote}", f"git push {remote} {tag}")
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Auxiliary functions to interact with GNU Privacy Guard (GPG).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from runtime import execute
|
||||||
|
|
||||||
|
|
||||||
|
def key_exists(key_id):
|
||||||
|
"""
|
||||||
|
Checks whether the specified GPG key exists locally.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
execute(f"gpg --list-keys {key_id}")
|
||||||
|
except Exception as e:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def agent_kill():
|
||||||
|
"""
|
||||||
|
Tries to kill the GPG agent process.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
execute("gpgconf --kill gpg-agent")
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
if e.filename != 'gpgconf':
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
def sign(key_id, passphrase, content, target):
|
||||||
|
"""
|
||||||
|
Generates a GPG signature, using the given key and passphrase,
|
||||||
|
of the specified content into the target path.
|
||||||
|
"""
|
||||||
|
execute(f"gpg --passphrase-fd 0 -u {key_id} --armor --output {target} --detach-sig {content}", input=passphrase.encode())
|
||||||
|
|
||||||
|
|
||||||
|
def verify(content, signature):
|
||||||
|
"""
|
||||||
|
Verify the given GPG signature for the specified content.
|
||||||
|
"""
|
||||||
|
execute(f"gpg --verify {signature} {content}")
|
||||||
|
|
||||||
|
|
||||||
|
def valid_passphrase(key_id, passphrase):
|
||||||
|
"""
|
||||||
|
Checks whether the given passphrase is workable for the given key.
|
||||||
|
"""
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
content = __file__
|
||||||
|
signature = tmpdir + '/sig.asc'
|
||||||
|
# if the agent is running, the suplied passphrase may be ignored
|
||||||
|
agent_kill()
|
||||||
|
try:
|
||||||
|
sign(key_id, passphrase, content, signature)
|
||||||
|
verify(content, signature)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def key_pass_id(key_id, passphrase):
|
||||||
|
"""
|
||||||
|
Generates a deterministic identifier for the key and passphrase combination.
|
||||||
|
"""
|
||||||
|
h = hashlib.sha512()
|
||||||
|
h.update(key_id.encode())
|
||||||
|
h.update(passphrase.encode())
|
||||||
|
return h.hexdigest()
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,177 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Usage: python notes.py <version> > RELEASE_NOTES.html
|
||||||
|
|
||||||
|
Generates release notes for a release in HTML format containing
|
||||||
|
introductory information about the release with links to the
|
||||||
|
Kafka docs and the list of issues resolved in the release.
|
||||||
|
|
||||||
|
The script will fail if there are any unresolved issues still
|
||||||
|
marked with the target release. This script should be run after either
|
||||||
|
resolving all issues or moving outstanding issues to a later release.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from jira import JIRA
|
||||||
|
import itertools, sys
|
||||||
|
|
||||||
|
|
||||||
|
JIRA_BASE_URL = 'https://issues.apache.org/jira'
|
||||||
|
MAX_RESULTS = 100 # This is constrained for cloud instances so we need to fix this value
|
||||||
|
|
||||||
|
|
||||||
|
def query(query, **kwargs):
|
||||||
|
"""
|
||||||
|
Fetch all issues matching the JQL query from JIRA and expand paginated results.
|
||||||
|
Any additional keyword arguments are forwarded to jira.search_issues.
|
||||||
|
"""
|
||||||
|
results = []
|
||||||
|
startAt = 0
|
||||||
|
new_results = None
|
||||||
|
jira = JIRA(JIRA_BASE_URL)
|
||||||
|
while new_results is None or len(new_results) == MAX_RESULTS:
|
||||||
|
new_results = jira.search_issues(query, startAt=startAt, maxResults=MAX_RESULTS, **kwargs)
|
||||||
|
results += new_results
|
||||||
|
startAt += len(new_results)
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def filter_unresolved(issues):
|
||||||
|
"""
|
||||||
|
Some resolutions, including a lack of resolution, indicate that
|
||||||
|
the bug hasn't actually been addressed and we shouldn't even
|
||||||
|
be able to create a release until they are fixed
|
||||||
|
"""
|
||||||
|
UNRESOLVED_RESOLUTIONS = [None,
|
||||||
|
"Unresolved",
|
||||||
|
"Duplicate",
|
||||||
|
"Invalid",
|
||||||
|
"Not A Problem",
|
||||||
|
"Not A Bug",
|
||||||
|
"Won't Fix",
|
||||||
|
"Incomplete",
|
||||||
|
"Cannot Reproduce",
|
||||||
|
"Later",
|
||||||
|
"Works for Me",
|
||||||
|
"Workaround",
|
||||||
|
"Information Provided"
|
||||||
|
]
|
||||||
|
return [issue for issue in issues if issue.fields.resolution in UNRESOLVED_RESOLUTIONS or issue.fields.resolution.name in UNRESOLVED_RESOLUTIONS]
|
||||||
|
|
||||||
|
|
||||||
|
def issue_link(issue):
|
||||||
|
"""
|
||||||
|
Generates a link to the specified JIRA issue.
|
||||||
|
"""
|
||||||
|
return f"{JIRA_BASE_URL}/browse/{issue.key}"
|
||||||
|
|
||||||
|
|
||||||
|
def render(version, issues):
|
||||||
|
"""
|
||||||
|
Renders the release notes HTML with the given version and issues.
|
||||||
|
"""
|
||||||
|
base_url = "https://kafka.apache.org/"
|
||||||
|
docs_path = "documentation.html"
|
||||||
|
minor_version_dotless = "".join(version.split(".")[:2]) # i.e., 10 if version == 1.0.1
|
||||||
|
def issue_type_key(issue):
|
||||||
|
if issue.fields.issuetype.name == 'New Feature':
|
||||||
|
return -2
|
||||||
|
if issue.fields.issuetype.name == 'Improvement':
|
||||||
|
return -1
|
||||||
|
return int(issue.fields.issuetype.id)
|
||||||
|
by_group = [(k,sorted(g, key=lambda issue: issue.id)) for k,g in itertools.groupby(sorted(issues, key=issue_type_key), lambda issue: issue.fields.issuetype.name)]
|
||||||
|
parts = [f"""
|
||||||
|
<h1>Release Notes - Kafka - Version {version}</h1>
|
||||||
|
<p>
|
||||||
|
Below is a summary of the JIRA issues addressed in the {version}
|
||||||
|
release of Kafka. For full documentation of the release, a guide
|
||||||
|
to get started, and information about the project, see the
|
||||||
|
<a href="{base_url}">Kafka project site</a>.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<b>Note about upgrades:</b> Please carefully review the
|
||||||
|
<a href="{base_url}{minor_version_dotless}/{docs_path}#upgrade">
|
||||||
|
upgrade documentation</a> for this release thoroughly before upgrading
|
||||||
|
your cluster. The upgrade notes discuss any critical information about
|
||||||
|
incompatibilities and breaking changes, performance changes, and any
|
||||||
|
other changes that might impact your production deployment of Kafka.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
The documentation for the most recent release can be found at
|
||||||
|
<a href="{base_url}{docs_path}">{base_url}{docs_path}</a>.
|
||||||
|
</p>
|
||||||
|
"""]
|
||||||
|
for itype, issues in by_group:
|
||||||
|
parts.append(f"<h2>{itype}</h2>")
|
||||||
|
parts.append("</ul>")
|
||||||
|
for issue in issues:
|
||||||
|
link = issue_link(issue)
|
||||||
|
key = issue.key
|
||||||
|
summary = issue.fields.summary
|
||||||
|
parts.append(f"<li>[<a href=\"{link}\">{key}</a>] - {summary}</li>")
|
||||||
|
parts.append("</ul>")
|
||||||
|
return "\n".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def issue_str(issue):
|
||||||
|
"""
|
||||||
|
Provides a human readable string representation for the given issue.
|
||||||
|
"""
|
||||||
|
key = issue.key
|
||||||
|
resolution = issue.fields.resolution
|
||||||
|
link = issue_link(issue)
|
||||||
|
return f"{key:>15} {resolution:>20} {link}"
|
||||||
|
|
||||||
|
|
||||||
|
def generate(version):
|
||||||
|
"""
|
||||||
|
Generates the release notes in HTML format for given version.
|
||||||
|
Raises an error if there are unresolved issues or no issues
|
||||||
|
at all for the specified version.
|
||||||
|
"""
|
||||||
|
issues = query(f"project=KAFKA and fixVersion={version}")
|
||||||
|
if not issues:
|
||||||
|
raise Exception(f"Didn't find any issues for version {version}")
|
||||||
|
unresolved_issues = filter_unresolved(issues)
|
||||||
|
if unresolved_issues:
|
||||||
|
issue_list = "\n".join([issue_str(issue) for issue in unresolved_issues])
|
||||||
|
raise Exception(f"""
|
||||||
|
Release {version} is not complete since there are unresolved or improperly
|
||||||
|
resolved issues tagged {version} as the fix version:
|
||||||
|
|
||||||
|
{issue_list}
|
||||||
|
|
||||||
|
Note that for some resolutions, you should simply remove the fix version
|
||||||
|
as they have not been truly fixed in this release.
|
||||||
|
""")
|
||||||
|
return render(version, issues)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
print("Usage: python notes.py <version>", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
version = sys.argv[1]
|
||||||
|
try:
|
||||||
|
print(generate(version))
|
||||||
|
except Exception as e:
|
||||||
|
print(e, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,93 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Access and manage name&value preferences, persisted in a local JSON file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
FILE = os.path.join(THIS_DIR, ".release-settings.json")
|
||||||
|
|
||||||
|
|
||||||
|
prefs = {}
|
||||||
|
if os.path.exists(FILE):
|
||||||
|
with open(FILE, "r") as prefs_fp:
|
||||||
|
prefs = json.load(prefs_fp)
|
||||||
|
if len(prefs) > 0:
|
||||||
|
print(f"Using preferences from: {FILE}")
|
||||||
|
|
||||||
|
|
||||||
|
def save():
|
||||||
|
"""
|
||||||
|
Save preferences dictionary.
|
||||||
|
"""
|
||||||
|
print(f"Saving preferences to {FILE}")
|
||||||
|
with open(FILE, "w") as prefs_fp:
|
||||||
|
json.dump(prefs, prefs_fp)
|
||||||
|
|
||||||
|
|
||||||
|
def set(name, val):
|
||||||
|
"""
|
||||||
|
Store and persist a preference.
|
||||||
|
"""
|
||||||
|
prefs[name] = val
|
||||||
|
save()
|
||||||
|
|
||||||
|
|
||||||
|
def unset(name):
|
||||||
|
"""
|
||||||
|
Removes a preference.
|
||||||
|
"""
|
||||||
|
del prefs[name]
|
||||||
|
save()
|
||||||
|
|
||||||
|
|
||||||
|
def get(name, supplier):
|
||||||
|
"""
|
||||||
|
Retrieve preference if it already exists or delegate
|
||||||
|
to the given value supplier and store the result.
|
||||||
|
"""
|
||||||
|
val = prefs.get(name)
|
||||||
|
if not val:
|
||||||
|
val = supplier()
|
||||||
|
set(name, val)
|
||||||
|
else:
|
||||||
|
print(f"Assuming: {name} = {val}")
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
def once(name, action):
|
||||||
|
"""
|
||||||
|
Performs the given action if and only if no record of it
|
||||||
|
having been executed before exists in the preferences dictionary.
|
||||||
|
"""
|
||||||
|
def run_action():
|
||||||
|
action()
|
||||||
|
return True
|
||||||
|
get(f"did_{name}", run_action)
|
||||||
|
|
||||||
|
|
||||||
|
def as_json():
|
||||||
|
"""
|
||||||
|
Export all saved preferences in JSON format.
|
||||||
|
"""
|
||||||
|
json.dumps(prefs, indent=2)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,376 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Utility for creating release candidates and promoting release candidates to a final release.
|
||||||
|
|
||||||
|
Usage: release.py [subcommand]
|
||||||
|
|
||||||
|
release.py stage
|
||||||
|
|
||||||
|
Builds and stages an RC for a release.
|
||||||
|
|
||||||
|
The utility is interactive; you will be prompted for basic release information and guided through the process.
|
||||||
|
|
||||||
|
This utility assumes you already have local a kafka git folder and that you
|
||||||
|
have added remotes corresponding to both:
|
||||||
|
(i) the github apache kafka mirror and
|
||||||
|
(ii) the apache kafka git repo.
|
||||||
|
|
||||||
|
release.py stage-docs [kafka-site-path]
|
||||||
|
|
||||||
|
Builds the documentation and stages it into an instance of the Kafka website repository.
|
||||||
|
|
||||||
|
This is meant to automate the integration between the main Kafka website repository (https://github.com/apache/kafka-site)
|
||||||
|
and the versioned documentation maintained in the main Kafka repository. This is useful both for local testing and
|
||||||
|
development of docs (follow the instructions here: https://cwiki.apache.org/confluence/display/KAFKA/Setup+Kafka+Website+on+Local+Apache+Server)
|
||||||
|
as well as for committers to deploy docs (run this script, then validate, commit, and push to kafka-site).
|
||||||
|
|
||||||
|
With no arguments this script assumes you have the Kafka repository and kafka-site repository checked out side-by-side, but
|
||||||
|
you can specify a full path to the kafka-site repository if this is not the case.
|
||||||
|
|
||||||
|
release.py release-email
|
||||||
|
|
||||||
|
Generates the email content/template for sending release announcement email.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from runtime import (
|
||||||
|
append_fail_hook,
|
||||||
|
cmd,
|
||||||
|
confirm,
|
||||||
|
confirm_or_fail,
|
||||||
|
execute,
|
||||||
|
fail,
|
||||||
|
prompt,
|
||||||
|
repo_dir,
|
||||||
|
)
|
||||||
|
import git
|
||||||
|
import gpg
|
||||||
|
import notes
|
||||||
|
import preferences
|
||||||
|
import sftp
|
||||||
|
import templates
|
||||||
|
import textfiles
|
||||||
|
|
||||||
|
|
||||||
|
def get_jdk(version):
|
||||||
|
"""
|
||||||
|
Get settings for the specified JDK version.
|
||||||
|
"""
|
||||||
|
msg = f"Enter the path for JAVA_HOME for a JDK{version} compiler (blank to use default JAVA_HOME): "
|
||||||
|
key = f"jdk{version}"
|
||||||
|
jdk_java_home = preferences.get(key, lambda: prompt(msg))
|
||||||
|
jdk_env = dict(os.environ)
|
||||||
|
if jdk_java_home.strip(): jdk_env["JAVA_HOME"] = jdk_java_home
|
||||||
|
else: jdk_java_home = jdk_env["JAVA_HOME"]
|
||||||
|
java_version = execute(f"{jdk_java_home}/bin/java -version", env=jdk_env)
|
||||||
|
if (version == 8 and "1.8.0" not in java_version) or \
|
||||||
|
(f"{version}.0" not in java_version and '"{version}"' not in java_version):
|
||||||
|
preferences.unset(key)
|
||||||
|
fail(f"JDK {version} is required")
|
||||||
|
return jdk_env
|
||||||
|
|
||||||
|
|
||||||
|
def docs_version(version):
|
||||||
|
"""
|
||||||
|
Detects the major/minor version and converts it to the format used for docs on the website, e.g. gets 0.10.2.0-SNAPSHOT
|
||||||
|
from gradle.properties and converts it to 0102
|
||||||
|
"""
|
||||||
|
version_parts = version.strip().split(".")
|
||||||
|
# 1.0+ will only have 3 version components as opposed to pre-1.0 that had 4
|
||||||
|
major_minor = version_parts[0:3] if version_parts[0] == "0" else version_parts[0:2]
|
||||||
|
return ''.join(major_minor)
|
||||||
|
|
||||||
|
|
||||||
|
def docs_release_version(version):
|
||||||
|
"""
|
||||||
|
Detects the version from gradle.properties and converts it to a release version number that should be valid for the
|
||||||
|
current release branch. For example, 0.10.2.0-SNAPSHOT would remain 0.10.2.0-SNAPSHOT (because no release has been
|
||||||
|
made on that branch yet); 0.10.2.1-SNAPSHOT would be converted to 0.10.2.0 because 0.10.2.1 is still in development
|
||||||
|
but 0.10.2.0 should have already been released. Regular version numbers (e.g. as encountered on a release branch)
|
||||||
|
will remain the same.
|
||||||
|
"""
|
||||||
|
version_parts = version.strip().split(".")
|
||||||
|
if "-SNAPSHOT" in version_parts[-1]:
|
||||||
|
bugfix = int(version_parts[-1].split("-")[0])
|
||||||
|
if bugfix > 0:
|
||||||
|
version_parts[-1] = str(bugfix - 1)
|
||||||
|
return ".".join(version_parts)
|
||||||
|
|
||||||
|
|
||||||
|
def command_stage_docs():
|
||||||
|
kafka_site_repo_path = sys.argv[2] if len(sys.argv) > 2 else os.path.join(repo_dir, "..", "kafka-site")
|
||||||
|
if not os.path.exists(kafka_site_repo_path) or not os.path.exists(os.path.join(kafka_site_repo_path, "powered-by.html")):
|
||||||
|
fail("{kafka_site_repo_path} doesn't exist or does not appear to be the kafka-site repository")
|
||||||
|
|
||||||
|
jdk17_env = get_jdk(17)
|
||||||
|
|
||||||
|
# We explicitly override the version of the project that we normally get from gradle.properties since we want to be
|
||||||
|
# able to run this from a release branch where we made some updates, but the build would show an incorrect SNAPSHOT
|
||||||
|
# version due to already having bumped the bugfix version number.
|
||||||
|
gradle_version_override = docs_release_version(project_version)
|
||||||
|
|
||||||
|
cmd("Building docs", f"./gradlew -Pversion={gradle_version_override} clean siteDocsTar aggregatedJavadoc", cwd=repo_dir, env=jdk17_env)
|
||||||
|
|
||||||
|
docs_tar = os.path.join(repo_dir, "core", "build", "distributions", f"kafka_2.13-{gradle_version_override}-site-docs.tgz")
|
||||||
|
|
||||||
|
versioned_docs_path = os.path.join(kafka_site_repo_path, docs_version(project_version))
|
||||||
|
if not os.path.exists(versioned_docs_path):
|
||||||
|
os.mkdir(versioned_docs_path, 0o755)
|
||||||
|
|
||||||
|
# The contents of the docs jar are site-docs/<docs dir>. We need to get rid of the site-docs prefix and dump everything
|
||||||
|
# inside it into the docs version subdirectory in the kafka-site repo
|
||||||
|
cmd("Extracting site-docs", f"tar xf {docs_tar} --strip-components 1", cwd=versioned_docs_path)
|
||||||
|
|
||||||
|
javadocs_src_dir = os.path.join(repo_dir, "build", "docs", "javadoc")
|
||||||
|
|
||||||
|
cmd("Copying javadocs", f"cp -R {javadocs_src_dir} {versioned_docs_path}")
|
||||||
|
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_release_version_parts(version):
|
||||||
|
try:
|
||||||
|
version_parts = version.split(".")
|
||||||
|
if len(version_parts) != 3:
|
||||||
|
fail("Invalid release version, should have 3 version number components")
|
||||||
|
# Validate each part is a number
|
||||||
|
[int(x) for x in version_parts]
|
||||||
|
except ValueError:
|
||||||
|
fail("Invalid release version, should be a dotted version number")
|
||||||
|
|
||||||
|
|
||||||
|
def get_release_version_parts(version):
|
||||||
|
validate_release_version_parts(version)
|
||||||
|
return version.split(".")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_release_num(version):
|
||||||
|
if version not in git.tags():
|
||||||
|
fail("The specified version is not a valid release version number")
|
||||||
|
validate_release_version_parts(version)
|
||||||
|
|
||||||
|
|
||||||
|
def command_release_announcement_email():
|
||||||
|
release_tag_pattern = re.compile("^[0-9]+\\.[0-9]+\\.[0-9]+$")
|
||||||
|
release_tags = sorted([t for t in git.tags() if re.match(release_tag_pattern, t)])
|
||||||
|
release_version_num = release_tags[-1]
|
||||||
|
if not confirm(f"Is the current release {release_version_num}?"):
|
||||||
|
release_version_num = prompt("What is the current release version:")
|
||||||
|
validate_release_num(release_version_num)
|
||||||
|
previous_release_version_num = release_tags[-2]
|
||||||
|
if not confirm(f"Is the previous release {previous_release_version_num}?"):
|
||||||
|
previous_release_version_num = prompt("What is the previous release version:")
|
||||||
|
validate_release_num(previous_release_version_num)
|
||||||
|
if release_version_num < previous_release_version_num :
|
||||||
|
fail("Current release version number can't be less than previous release version number")
|
||||||
|
contributors = git.contributors(previous_release_version_num, release_version_num)
|
||||||
|
release_announcement_email = templates.release_announcement_email(release_version_num, contributors)
|
||||||
|
print(templates.release_announcement_email_instructions(release_announcement_email))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
project_version = textfiles.prop(os.path.join(repo_dir, "gradle.properties"), 'version')
|
||||||
|
release_version = project_version.replace('-SNAPSHOT', '')
|
||||||
|
release_version_parts = get_release_version_parts(release_version)
|
||||||
|
dev_branch = '.'.join(release_version_parts[:2])
|
||||||
|
docs_release_version = docs_version(release_version)
|
||||||
|
|
||||||
|
# Dispatch to subcommand
|
||||||
|
subcommand = sys.argv[1] if len(sys.argv) > 1 else None
|
||||||
|
if subcommand == 'stage-docs':
|
||||||
|
command_stage_docs()
|
||||||
|
elif subcommand == 'release-email':
|
||||||
|
command_release_announcement_email()
|
||||||
|
elif not (subcommand is None or subcommand == 'stage'):
|
||||||
|
fail(f"Unknown subcommand: {subcommand}")
|
||||||
|
# else -> default subcommand stage
|
||||||
|
|
||||||
|
|
||||||
|
## Default 'stage' subcommand implementation isn't isolated to its own function yet for historical reasons
|
||||||
|
|
||||||
|
|
||||||
|
def verify_gpg_key():
|
||||||
|
if not gpg.key_exists(gpg_key_id):
|
||||||
|
fail(f"GPG key {gpg_key_id} not found")
|
||||||
|
if not gpg.valid_passphrase(gpg_key_id, gpg_passphrase):
|
||||||
|
fail(f"GPG passprase not valid for key {gpg_key_id}")
|
||||||
|
|
||||||
|
|
||||||
|
preferences.once("verify_requirements", lambda: confirm_or_fail(templates.requirements_instructions(preferences.FILE, preferences.as_json())))
|
||||||
|
global_gradle_props = os.path.expanduser("~/.gradle/gradle.properties")
|
||||||
|
gpg_key_id = textfiles.prop(global_gradle_props, "signing.keyId")
|
||||||
|
gpg_passphrase = textfiles.prop(global_gradle_props, "signing.password")
|
||||||
|
gpg_key_pass_id = gpg.key_pass_id(gpg_key_id, gpg_passphrase)
|
||||||
|
preferences.once(f"verify_gpg_key_{gpg_key_pass_id}", verify_gpg_key)
|
||||||
|
|
||||||
|
apache_id = preferences.get('apache_id', lambda: prompt("Please enter your apache-id: "))
|
||||||
|
jdk8_env = get_jdk(8)
|
||||||
|
jdk17_env = get_jdk(17)
|
||||||
|
|
||||||
|
|
||||||
|
def verify_prerequeisites():
|
||||||
|
print("Begin to check if you have met all the pre-requisites for the release process")
|
||||||
|
def prereq(name, soft_check):
|
||||||
|
try:
|
||||||
|
result = soft_check()
|
||||||
|
if result == False:
|
||||||
|
fail(f"Pre-requisite not met: {name}")
|
||||||
|
else:
|
||||||
|
print(f"Pre-requisite met: {name}")
|
||||||
|
except Exception as e:
|
||||||
|
fail(f"Pre-requisite not met: {name}. Error: {e}")
|
||||||
|
prereq('Apache Maven CLI (mvn) in PATH', lambda: "Apache Maven" in execute("mvn -v"))
|
||||||
|
prereq('Apache sftp connection', lambda: sftp.test(apache_id))
|
||||||
|
prereq("svn CLI in PATH", lambda: "svn" in execute("svn --version"))
|
||||||
|
prereq("Verifying that you have no unstaged git changes", lambda: git.has_unstaged_changes())
|
||||||
|
prereq("Verifying that you have no staged git changes", lambda: git.has_staged_changes())
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
preferences.once(f"verify_prerequeisites", verify_prerequeisites)
|
||||||
|
|
||||||
|
# Validate that the release doesn't already exist
|
||||||
|
git.fetch_tags()
|
||||||
|
if release_version in git.tags():
|
||||||
|
fail(f"Version {release_version} has already been tagged and released.")
|
||||||
|
|
||||||
|
rc = prompt(f"Release version {release_version} candidate number: ")
|
||||||
|
if not rc:
|
||||||
|
fail("Need a release candidate number.")
|
||||||
|
try:
|
||||||
|
int(rc)
|
||||||
|
except ValueError:
|
||||||
|
fail(f"Invalid release candidate number: {rc}")
|
||||||
|
rc_tag = release_version + '-rc' + rc
|
||||||
|
|
||||||
|
starting_branch = git.current_branch()
|
||||||
|
def delete_gitrefs():
|
||||||
|
try:
|
||||||
|
git.reset_hard_head()
|
||||||
|
git.switch_branch(starting_branch)
|
||||||
|
git.delete_branch(release_version)
|
||||||
|
git.delete_tag(rc_tag)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
print("Failed when trying to clean up git references added by this script. You may need to clean up branches/tags yourself before retrying.")
|
||||||
|
print("Expected git branch: " + release_version)
|
||||||
|
print("Expected git tag: " + rc_tag)
|
||||||
|
|
||||||
|
git.create_branch(release_version, f"{git.push_remote_name}/{dev_branch}")
|
||||||
|
append_fail_hook("Delete gitrefs", delete_gitrefs)
|
||||||
|
print("Updating version numbers")
|
||||||
|
textfiles.replace(f"{repo_dir}/gradle.properties", "version", f"version={release_version}")
|
||||||
|
textfiles.replace(f"{repo_dir}/tests/kafkatest/__init__.py", "__version__", f"__version__ = '{release_version}'")
|
||||||
|
print("Updating streams quickstart pom")
|
||||||
|
textfiles.replace(f"{repo_dir}/streams/quickstart/pom.xml", "-SNAPSHOT", "", regex=True)
|
||||||
|
print("Updating streams quickstart java pom")
|
||||||
|
textfiles.replace(f"{repo_dir}/streams/quickstart/java/pom.xml", "-SNAPSHOT", "", regex=True)
|
||||||
|
print("Updating streams quickstart archetype pom")
|
||||||
|
textfiles.replace(f"{repo_dir}/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml", "-SNAPSHOT", "", regex=True)
|
||||||
|
print("Updating ducktape version.py")
|
||||||
|
textfiles.replace(f"{repo_dir}/tests/kafkatest/version.py", "^DEV_VERSION =.*",
|
||||||
|
f"DEV_VERSION = KafkaVersion(\"{release_version}-SNAPSHOT\")", regex=True)
|
||||||
|
print("Updating docs templateData.js")
|
||||||
|
textfiles.replace(f"{repo_dir}/docs/js/templateData.js", "-SNAPSHOT", "", regex=True)
|
||||||
|
git.commit(f"Bump version to {release_version}")
|
||||||
|
git.create_tag(rc_tag)
|
||||||
|
git.switch_branch(starting_branch)
|
||||||
|
|
||||||
|
# Note that we don't use tempfile here because mkdtemp causes problems with sftp and being able to determine the absolute path to a file.
|
||||||
|
# Instead we rely on a fixed path
|
||||||
|
work_dir = os.path.join(repo_dir, ".release_work_dir")
|
||||||
|
clean_up_work_dir = lambda: cmd("Cleaning up work directory", f"rm -rf {work_dir}")
|
||||||
|
if os.path.exists(work_dir):
|
||||||
|
clean_up_work_dir()
|
||||||
|
os.makedirs(work_dir)
|
||||||
|
append_fail_hook("Clean up work dir", clean_up_work_dir)
|
||||||
|
print("Temporary build working directory:", work_dir)
|
||||||
|
kafka_dir = os.path.join(work_dir, 'kafka')
|
||||||
|
artifact_name = "kafka-" + rc_tag
|
||||||
|
cmd("Creating staging area for release artifacts", "mkdir " + artifact_name, cwd=work_dir)
|
||||||
|
artifacts_dir = os.path.join(work_dir, artifact_name)
|
||||||
|
git.clone(repo_dir, 'kafka', cwd=work_dir)
|
||||||
|
git.create_branch(release_version, rc_tag, cwd=kafka_dir)
|
||||||
|
current_year = datetime.datetime.now().year
|
||||||
|
cmd("Verifying the correct year in NOTICE", f"grep {current_year} NOTICE", cwd=kafka_dir)
|
||||||
|
|
||||||
|
print("Generating release notes")
|
||||||
|
try:
|
||||||
|
html = notes.generate(release_version)
|
||||||
|
release_notes_path = os.path.join(artifacts_dir, "RELEASE_NOTES.html")
|
||||||
|
textfiles.write(release_notes_path, html)
|
||||||
|
except Exception as e:
|
||||||
|
fail(f"Failed to generate release notes: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
git.targz(rc_tag, f"kafka-{release_version}-src/", f"{artifacts_dir}/kafka-{release_version}-src.tgz")
|
||||||
|
cmd("Building artifacts", "./gradlew clean && ./gradlewAll releaseTarGz", cwd=kafka_dir, env=jdk8_env, shell=True)
|
||||||
|
cmd("Copying artifacts", f"cp {kafka_dir}/core/build/distributions/* {artifacts_dir}", shell=True)
|
||||||
|
cmd("Building docs", "./gradlew clean aggregatedJavadoc", cwd=kafka_dir, env=jdk17_env)
|
||||||
|
cmd("Copying docs", f"cp -R {kafka_dir}/build/docs/javadoc {artifacts_dir}")
|
||||||
|
|
||||||
|
for filename in os.listdir(artifacts_dir):
|
||||||
|
full_path = os.path.join(artifacts_dir, filename)
|
||||||
|
if not os.path.isfile(full_path):
|
||||||
|
continue
|
||||||
|
sig_full_path = full_path + ".asc"
|
||||||
|
gpg.sign(gpg_key_id, gpg_passphrase, full_path, sig_full_path)
|
||||||
|
gpg.verify(full_path, sig_full_path)
|
||||||
|
# Note that for verification, we need to make sure only the filename is used with --print-md because the command line
|
||||||
|
# argument for the file is included in the output and verification uses a simple diff that will break if an absolute path
|
||||||
|
# is used.
|
||||||
|
dir, fname = os.path.split(full_path)
|
||||||
|
cmd(f"Generating MD5 for {full_path}", f"gpg --print-md md5 {fname} > {fname}.md5 ", shell=True, cwd=dir)
|
||||||
|
cmd(f"Generating SHA1 for {full_path}", f"gpg --print-md sha1 {fname} > {fname}.sha1 ", shell=True, cwd=dir)
|
||||||
|
cmd(f"Generating SHA512 for {full_path}", f"gpg --print-md sha512 {fname} > {fname}.sha512", shell=True, cwd=dir)
|
||||||
|
|
||||||
|
cmd("Listing artifacts to be uploaded:", f"ls -R {artifacts_dir}")
|
||||||
|
cmd("Zipping artifacts", f"tar -czf {artifact_name}.tar.gz {artifact_name}", cwd=work_dir)
|
||||||
|
sftp.upload_artifacts(apache_id, artifacts_dir)
|
||||||
|
|
||||||
|
confirm_or_fail("Going to build and upload mvn artifacts based on these settings:\n" + textfiles.read(global_gradle_props) + '\nOK?')
|
||||||
|
cmd("Building and uploading archives", "./gradlewAll publish", cwd=kafka_dir, env=jdk8_env, shell=True)
|
||||||
|
cmd("Building and uploading archives", "mvn deploy -Pgpg-signing", cwd=os.path.join(kafka_dir, "streams/quickstart"), env=jdk8_env, shell=True)
|
||||||
|
|
||||||
|
# TODO: Many of these suggested validation steps could be automated
|
||||||
|
# and would help pre-validate a lot of the stuff voters test
|
||||||
|
print(templates.sanity_check_instructions(release_version, rc_tag, apache_id))
|
||||||
|
confirm_or_fail("Have you sufficiently verified the release artifacts?")
|
||||||
|
|
||||||
|
# TODO: Can we close the staging repository via a REST API since we
|
||||||
|
# already need to collect credentials for this repo?
|
||||||
|
print(templates.deploy_instructions())
|
||||||
|
confirm_or_fail("Have you successfully deployed the artifacts?")
|
||||||
|
confirm_or_fail(f"Ok to push RC tag {rc_tag}?")
|
||||||
|
git.push_tag(rc_tag)
|
||||||
|
|
||||||
|
# Move back to starting branch and clean out the temporary release branch (e.g. 1.0.0) we used to generate everything
|
||||||
|
git.reset_hard_head()
|
||||||
|
git.switch_branch(starting_branch)
|
||||||
|
git.delete_branch(release_version)
|
||||||
|
|
||||||
|
rc_vote_email_text = templates.rc_vote_email_text(release_version, rc, rc_tag, dev_branch, docs_release_version, apache_id)
|
||||||
|
print(templates.rc_email_instructions(rc_vote_email_text))
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
jira==3.8.0
|
||||||
|
jproperties==2.1.1
|
|
@ -0,0 +1,148 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Auxiliary functions to manage the release script runtime
|
||||||
|
and launch external utilities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import templates
|
||||||
|
|
||||||
|
|
||||||
|
this_script_dir = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
repo_dir = os.environ.get("KAFKA_HOME", os.path.abspath(this_script_dir + "/.."))
|
||||||
|
|
||||||
|
fail_hooks = []
|
||||||
|
failing = False
|
||||||
|
|
||||||
|
|
||||||
|
def append_fail_hook(name, hook_fn):
|
||||||
|
"""
|
||||||
|
Register a fail hook function, to run in case fail() is called.
|
||||||
|
"""
|
||||||
|
fail_hooks.append((name, hook_fn))
|
||||||
|
|
||||||
|
|
||||||
|
def fail(msg = ""):
|
||||||
|
"""
|
||||||
|
Terminate execution with the given message,
|
||||||
|
after running any registered hooks.
|
||||||
|
"""
|
||||||
|
global failing
|
||||||
|
if failing:
|
||||||
|
raise Exception(f"Recursive fail invocation")
|
||||||
|
failing = True
|
||||||
|
|
||||||
|
for name, func in fail_hooks:
|
||||||
|
try:
|
||||||
|
func()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Exception caught in fail hook {name}: {e}")
|
||||||
|
|
||||||
|
print(f"FAILURE: {msg}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def prompt(msg):
|
||||||
|
"""
|
||||||
|
Prompt user for input with the given message.
|
||||||
|
This removes leading and trailing spaces.
|
||||||
|
"""
|
||||||
|
text = input(msg)
|
||||||
|
return text.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def confirm(msg):
|
||||||
|
"""
|
||||||
|
Prompt the user to confirm
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
text = prompt(msg + " (y/n): ").lower()
|
||||||
|
if text in ['y', 'n']:
|
||||||
|
return text == 'y'
|
||||||
|
|
||||||
|
|
||||||
|
def confirm_or_fail(msg):
|
||||||
|
"""
|
||||||
|
Prompt the user to confirm and fail on negative input.
|
||||||
|
"""
|
||||||
|
if not confirm(msg):
|
||||||
|
fail("Ok, giving up")
|
||||||
|
|
||||||
|
|
||||||
|
def execute(cmd, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Execute an external command and return its output.
|
||||||
|
"""
|
||||||
|
if "shell" not in kwargs and isinstance(cmd, str):
|
||||||
|
cmd = cmd.split()
|
||||||
|
if "input" in kwargs and isinstance(kwargs["input"], str):
|
||||||
|
kwargs["input"] = kwargs["input"].encode()
|
||||||
|
kwargs["stderr"] = stderr=subprocess.STDOUT
|
||||||
|
output = subprocess.check_output(cmd, *args, **kwargs)
|
||||||
|
return output.decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def _prefix(prefix_str, value_str):
|
||||||
|
return prefix_str + value_str.replace("\n", "\n" + prefix_str)
|
||||||
|
|
||||||
|
|
||||||
|
def cmd(action, cmd_arg, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Execute an external command. This should be preferered over execute()
|
||||||
|
when returning the output is not necessary, as the user will be given
|
||||||
|
the option of retrying in case of a failure.
|
||||||
|
"""
|
||||||
|
stdin_log = ""
|
||||||
|
if "stdin" in kwargs and isinstance(kwargs["stdin"], str):
|
||||||
|
stdin_str = kwargs["stdin"]
|
||||||
|
stdin_log = "\n" + _prefix("< ", stdin_str)
|
||||||
|
stdin = tempfile.TemporaryFile()
|
||||||
|
stdin.write(stdin_str.encode("utf-8"))
|
||||||
|
stdin.seek(0)
|
||||||
|
kwargs["stdin"] = stdin
|
||||||
|
|
||||||
|
print(f"{action}\n$ {cmd_arg}{stdin_log}")
|
||||||
|
|
||||||
|
if isinstance(cmd_arg, str) and not kwargs.get("shell", False):
|
||||||
|
cmd_arg = cmd_arg.split()
|
||||||
|
|
||||||
|
allow_failure = kwargs.pop("allow_failure", False)
|
||||||
|
|
||||||
|
retry = True
|
||||||
|
while retry:
|
||||||
|
try:
|
||||||
|
output = execute(cmd_arg, *args, stderr=subprocess.STDOUT, **kwargs)
|
||||||
|
print(_prefix("> ", output.strip()))
|
||||||
|
return
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(e.output.decode("utf-8"))
|
||||||
|
|
||||||
|
if allow_failure:
|
||||||
|
return
|
||||||
|
|
||||||
|
retry = confirm("Retry?")
|
||||||
|
|
||||||
|
print(templates.cmd_failed())
|
||||||
|
fail("")
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Auxiliary functions to interact with sftp(1).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from runtime import (
|
||||||
|
cmd,
|
||||||
|
confirm_or_fail,
|
||||||
|
execute,
|
||||||
|
fail,
|
||||||
|
)
|
||||||
|
|
||||||
|
REMOTE_DIR = "public_html"
|
||||||
|
|
||||||
|
|
||||||
|
def mkdirp(apache_id, dir):
|
||||||
|
cmd_desc = f"Creating '{dir}' in your Apache home directory"
|
||||||
|
cmd_str = f"sftp -b - {apache_id}@home.apache.org"
|
||||||
|
stdin_str = f"mkdir {dir}\n"
|
||||||
|
cmd(cmd_desc, cmd_str, stdin=stdin_str, allow_failure=True)
|
||||||
|
|
||||||
|
|
||||||
|
def upload(apache_id, destination, dir):
|
||||||
|
cmd_desc = f"Uploading '{dir}' under {REMOTE_DIR} in your Apache home directory, this may take a while."
|
||||||
|
cmd_str = f"sftp -b - {apache_id}@home.apache.org"
|
||||||
|
stdin_str = f"cd {destination}\nput -r {dir}\n"
|
||||||
|
cmd(cmd_desc, cmd_str, stdin=stdin_str)
|
||||||
|
|
||||||
|
|
||||||
|
def upload_artifacts(apache_id, dir):
|
||||||
|
mkdirp(apache_id, REMOTE_DIR)
|
||||||
|
upload(apache_id, REMOTE_DIR, dir)
|
||||||
|
confirm_or_fail(f"Are the artifacts present in your Apache home: https://home.apache.org/~{apache_id}/ ?")
|
||||||
|
|
||||||
|
|
||||||
|
def test(apache_id):
|
||||||
|
"""
|
||||||
|
Test the ability to estalish an sftp session.
|
||||||
|
"""
|
||||||
|
execute(f"sftp {apache_id}@home.apache.org", input="bye")
|
||||||
|
|
|
@ -0,0 +1,292 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Text templates for long messages with instructions for the user.
|
||||||
|
We keep these in this separate file to avoid cluttering the script.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def requirements_instructions(prefs_file, prefs):
|
||||||
|
return f"""
|
||||||
|
Requirements:
|
||||||
|
1. Updated docs to reference the new release version where appropriate.
|
||||||
|
2. JDK8 and JDK17 compilers and libraries
|
||||||
|
3. Your Apache ID, already configured with SSH keys on id.apache.org and SSH keys available in this shell session
|
||||||
|
4. All issues in the target release resolved with valid resolutions (if not, this script will report the problematic JIRAs)
|
||||||
|
5. A GPG key used for signing the release. This key should have been added to public Apache servers and the KEYS file on the Kafka site
|
||||||
|
6. Standard toolset installed -- git, gpg, gradle, sftp, etc.
|
||||||
|
7. ~/.gradle/gradle.properties configured with the signing properties described in the release process wiki, i.e.
|
||||||
|
|
||||||
|
mavenUrl=https://repository.apache.org/service/local/staging/deploy/maven2
|
||||||
|
mavenUsername=your-apache-id
|
||||||
|
mavenPassword=your-apache-passwd
|
||||||
|
signing.keyId=your-gpgkeyId
|
||||||
|
signing.password=your-gpg-passphrase
|
||||||
|
signing.secretKeyRingFile=/Users/your-id/.gnupg/secring.gpg (if you are using GPG 2.1 and beyond, then this file will no longer exist anymore, and you have to manually create it from the new private key directory with "gpg --export-secret-keys -o ~/.gnupg/secring.gpg")
|
||||||
|
8. ~/.m2/settings.xml configured for pgp signing and uploading to apache release maven, i.e.,
|
||||||
|
<server>
|
||||||
|
<id>apache.releases.https</id>
|
||||||
|
<username>your-apache-id</username>
|
||||||
|
<password>your-apache-passwd</password>
|
||||||
|
</server>
|
||||||
|
<server>
|
||||||
|
<id>your-gpgkeyId</id>
|
||||||
|
<passphrase>your-gpg-passphrase</passphrase>
|
||||||
|
</server>
|
||||||
|
<profile>
|
||||||
|
<id>gpg-signing</id>
|
||||||
|
<properties>
|
||||||
|
<gpg.keyname>your-gpgkeyId</gpg.keyname>
|
||||||
|
<gpg.passphraseServerId>your-gpgkeyId</gpg.passphraseServerId>
|
||||||
|
</properties>
|
||||||
|
</profile>
|
||||||
|
9. You may also need to update some gnupgp configs:
|
||||||
|
~/.gnupg/gpg-agent.conf
|
||||||
|
allow-loopback-pinentry
|
||||||
|
|
||||||
|
~/.gnupg/gpg.conf
|
||||||
|
use-agent
|
||||||
|
pinentry-mode loopback
|
||||||
|
|
||||||
|
echo RELOADAGENT | gpg-connect-agent
|
||||||
|
|
||||||
|
If any of these are missing, see https://cwiki.apache.org/confluence/display/KAFKA/Release+Process for instructions on setting them up.
|
||||||
|
|
||||||
|
Some of these may be used from these previous settings loaded from {prefs_file}:
|
||||||
|
{prefs}
|
||||||
|
|
||||||
|
Do you have all of of these setup?"""
|
||||||
|
|
||||||
|
|
||||||
|
def release_announcement_email(release_version, contributors):
|
||||||
|
contributors_str = ", ".join(contributors)
|
||||||
|
num_contributors = len(contributors)
|
||||||
|
return f"""
|
||||||
|
To: announce@apache.org, dev@kafka.apache.org, users@kafka.apache.org, kafka-clients@googlegroups.com
|
||||||
|
Subject: [ANNOUNCE] Apache Kafka {release_version}
|
||||||
|
|
||||||
|
The Apache Kafka community is pleased to announce the release for Apache Kafka {release_version}
|
||||||
|
|
||||||
|
<DETAILS OF THE CHANGES>
|
||||||
|
|
||||||
|
All of the changes in this release can be found in the release notes:
|
||||||
|
https://www.apache.org/dist/kafka/{release_version}/RELEASE_NOTES.html
|
||||||
|
|
||||||
|
|
||||||
|
You can download the source and binary release (Scala <VERSIONS>) from:
|
||||||
|
https://kafka.apache.org/downloads#{release_version}
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
Apache Kafka is a distributed streaming platform with four core APIs:
|
||||||
|
|
||||||
|
|
||||||
|
** The Producer API allows an application to publish a stream of records to
|
||||||
|
one or more Kafka topics.
|
||||||
|
|
||||||
|
** The Consumer API allows an application to subscribe to one or more
|
||||||
|
topics and process the stream of records produced to them.
|
||||||
|
|
||||||
|
** The Streams API allows an application to act as a stream processor,
|
||||||
|
consuming an input stream from one or more topics and producing an
|
||||||
|
output stream to one or more output topics, effectively transforming the
|
||||||
|
input streams to output streams.
|
||||||
|
|
||||||
|
** The Connector API allows building and running reusable producers or
|
||||||
|
consumers that connect Kafka topics to existing applications or data
|
||||||
|
systems. For example, a connector to a relational database might
|
||||||
|
capture every change to a table.
|
||||||
|
|
||||||
|
|
||||||
|
With these APIs, Kafka can be used for two broad classes of application:
|
||||||
|
|
||||||
|
** Building real-time streaming data pipelines that reliably get data
|
||||||
|
between systems or applications.
|
||||||
|
|
||||||
|
** Building real-time streaming applications that transform or react
|
||||||
|
to the streams of data.
|
||||||
|
|
||||||
|
|
||||||
|
Apache Kafka is in use at large and small companies worldwide, including
|
||||||
|
Capital One, Goldman Sachs, ING, LinkedIn, Netflix, Pinterest, Rabobank,
|
||||||
|
Target, The New York Times, Uber, Yelp, and Zalando, among others.
|
||||||
|
|
||||||
|
A big thank you for the following {num_contributors} contributors to this release! (Please report an unintended omission)
|
||||||
|
|
||||||
|
{contributors_str}
|
||||||
|
|
||||||
|
We welcome your help and feedback. For more information on how to
|
||||||
|
report problems, and to get involved, visit the project website at
|
||||||
|
https://kafka.apache.org/
|
||||||
|
|
||||||
|
Thank you!
|
||||||
|
|
||||||
|
|
||||||
|
Regards,
|
||||||
|
|
||||||
|
<YOU>
|
||||||
|
Release Manager for Apache Kafka {release_version}"""
|
||||||
|
|
||||||
|
|
||||||
|
def deploy_instructions():
|
||||||
|
return """
|
||||||
|
Next, we need to get the Maven artifacts we published into the staging repository.
|
||||||
|
Go to https://repository.apache.org/#stagingRepositories and hit 'Close' for the new repository that was created by uploading artifacts.
|
||||||
|
There will be more than one repository entries created, please close all of them.
|
||||||
|
In some cases, you may get errors on some repositories while closing them, see KAFKA-15033.
|
||||||
|
If this is not the first RC, you need to 'Drop' the previous artifacts.
|
||||||
|
Confirm the correct artifacts are visible at https://repository.apache.org/content/groups/staging/org/apache/kafka/
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def sanity_check_instructions(release_version, rc_tag, apache_id):
|
||||||
|
return f"""
|
||||||
|
*******************************************************************************************************************************************************
|
||||||
|
Ok. We've built and staged everything for the {rc_tag}.
|
||||||
|
|
||||||
|
Now you should sanity check it before proceeding. All subsequent steps start making RC data public.
|
||||||
|
|
||||||
|
Some suggested steps:
|
||||||
|
|
||||||
|
* Grab the source archive and make sure it compiles: https://home.apache.org/~{apache_id}/kafka-{rc_tag}/kafka-{release_version}-src.tgz
|
||||||
|
* Grab one of the binary distros and run the quickstarts against them: https://home.apache.org/~{apache_id}/kafka-{rc_tag}/kafka_2.13-{release_version}.tgz
|
||||||
|
* Extract and verify one of the site docs jars: https://home.apache.org/~{apache_id}/kafka-{rc_tag}/kafka_2.13-{release_version}-site-docs.tgz
|
||||||
|
* Build a sample against jars in the staging repo: (TODO: Can we get a temporary URL before "closing" the staged artifacts?)
|
||||||
|
* Validate GPG signatures on at least one file:
|
||||||
|
wget https://home.apache.org/~{apache_id}/kafka-{rc_tag}/kafka-{release_version}-src.tgz &&
|
||||||
|
wget https://home.apache.org/~{apache_id}/kafka-{rc_tag}/kafka-{release_version}-src.tgz.asc &&
|
||||||
|
wget https://home.apache.org/~{apache_id}/kafka-{rc_tag}/kafka-{release_version}-src.tgz.md5 &&
|
||||||
|
wget https://home.apache.org/~{apache_id}/kafka-{rc_tag}/kafka-{release_version}-src.tgz.sha1 &&
|
||||||
|
wget https://home.apache.org/~{apache_id}/kafka-{rc_tag}/kafka-{release_version}-src.tgz.sha512 &&
|
||||||
|
gpg --verify kafka-{release_version}-src.tgz.asc kafka-{release_version}-src.tgz &&
|
||||||
|
gpg --print-md md5 kafka-{release_version}-src.tgz | diff - kafka-{release_version}-src.tgz.md5 &&
|
||||||
|
gpg --print-md sha1 kafka-{release_version}-src.tgz | diff - kafka-{release_version}-src.tgz.sha1 &&
|
||||||
|
gpg --print-md sha512 kafka-{release_version}-src.tgz | diff - kafka-{release_version}-src.tgz.sha512 &&
|
||||||
|
rm kafka-{release_version}-src.tgz* &&
|
||||||
|
echo "OK" || echo "Failed"
|
||||||
|
* Validate the javadocs look ok. They are at https://home.apache.org/~{apache_id}/kafka-{rc_tag}/javadoc/
|
||||||
|
|
||||||
|
*******************************************************************************************************************************************************
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def rc_vote_email_text(release_version, rc, rc_tag, dev_branch, docs_version, apache_id):
|
||||||
|
return f"""
|
||||||
|
To: dev@kafka.apache.org, users@kafka.apache.org, kafka-clients@googlegroups.com
|
||||||
|
Subject: [VOTE] {release_version} RC{rc}
|
||||||
|
|
||||||
|
Hello Kafka users, developers and client-developers,
|
||||||
|
|
||||||
|
This is the first candidate for release of Apache Kafka {release_version}.
|
||||||
|
|
||||||
|
<DESCRIPTION OF MAJOR CHANGES, INCLUDE INDICATION OF MAJOR/MINOR RELEASE>
|
||||||
|
|
||||||
|
Release notes for the {release_version} release:
|
||||||
|
https://home.apache.org/~{apache_id}/kafka-{rc_tag}/RELEASE_NOTES.html
|
||||||
|
|
||||||
|
*** Please download, test and vote by <VOTING DEADLINE, e.g. Monday, March 28, 9am PT>
|
||||||
|
<THE RELEASE POLICY (https://www.apache.org/legal/release-policy.html#release-approval) REQUIRES VOTES TO BE OPEN FOR MINIMUM OF 3 DAYS THEREFORE VOTING DEADLINE SHOULD BE AT LEAST 72 HOURS FROM THE TIME THIS EMAIL IS SENT.>
|
||||||
|
|
||||||
|
Kafka's KEYS file containing PGP keys we use to sign the release:
|
||||||
|
https://kafka.apache.org/KEYS
|
||||||
|
|
||||||
|
* Release artifacts to be voted upon (source and binary):
|
||||||
|
https://home.apache.org/~{apache_id}/kafka-{rc_tag}/
|
||||||
|
|
||||||
|
<USE docker/README.md FOR STEPS TO CREATE RELEASE CANDIDATE DOCKER IMAGE>
|
||||||
|
* Docker release artifact to be voted upon(apache/kafka-native is supported from 3.8+ release.):
|
||||||
|
apache/kafka:{rc_tag}
|
||||||
|
|
||||||
|
* Maven artifacts to be voted upon:
|
||||||
|
https://repository.apache.org/content/groups/staging/org/apache/kafka/
|
||||||
|
|
||||||
|
* Javadoc:
|
||||||
|
https://home.apache.org/~{apache_id}/kafka-{rc_tag}/javadoc/
|
||||||
|
|
||||||
|
* Tag to be voted upon (off {dev_branch} branch) is the {release_version} tag:
|
||||||
|
https://github.com/apache/kafka/releases/tag/{rc_tag}
|
||||||
|
|
||||||
|
* Documentation:
|
||||||
|
https://kafka.apache.org/{docs_version}/documentation.html
|
||||||
|
|
||||||
|
* Protocol:
|
||||||
|
https://kafka.apache.org/{docs_version}/protocol.html
|
||||||
|
|
||||||
|
* Successful Jenkins builds for the {dev_branch} branch:
|
||||||
|
Unit/integration tests: https://ci-builds.apache.org/job/Kafka/job/kafka/job/{dev_branch}/<BUILD NUMBER>/
|
||||||
|
System tests: https://jenkins.confluent.io/job/system-test-kafka/job/{dev_branch}/<BUILD_NUMBER>/
|
||||||
|
|
||||||
|
<USE docker/README.md FOR STEPS TO RUN DOCKER BUILD TEST GITHUB ACTIONS>
|
||||||
|
* Successful Docker Image Github Actions Pipeline for {dev_branch} branch:
|
||||||
|
Docker Build Test Pipeline: https://github.com/apache/kafka/actions/runs/<RUN_NUMBER>
|
||||||
|
|
||||||
|
/**************************************
|
||||||
|
|
||||||
|
Thanks,
|
||||||
|
<YOU>
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def rc_email_instructions(rc_email_text):
|
||||||
|
return f"""
|
||||||
|
*****************************************************************
|
||||||
|
|
||||||
|
{rc_email_text}
|
||||||
|
|
||||||
|
*****************************************************************
|
||||||
|
|
||||||
|
All artifacts should now be fully staged. Use the above template to send the announcement for the RC to the mailing list.
|
||||||
|
IMPORTANT: Note that there are still some substitutions that need to be made in the template:
|
||||||
|
- Describe major changes in this release
|
||||||
|
- Deadline for voting, which should be at least 3 days after you send out the email
|
||||||
|
- Jenkins build numbers for successful unit & system test builds
|
||||||
|
- Fill in your name in the signature
|
||||||
|
- Finally, validate all the links before shipping!
|
||||||
|
Note that all substitutions are annotated with <> around them.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_failed():
|
||||||
|
return """
|
||||||
|
*************************************************
|
||||||
|
*** First command failure occurred here. ***
|
||||||
|
*** Will now try to clean up working state. ***
|
||||||
|
*************************************************
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def release_announcement_email_instructions(release_announcement_email):
|
||||||
|
return f"""
|
||||||
|
*****************************************************************
|
||||||
|
|
||||||
|
{release_announcement_email}
|
||||||
|
|
||||||
|
*****************************************************************
|
||||||
|
|
||||||
|
Use the above template to send the announcement for the release to the mailing list.
|
||||||
|
IMPORTANT: Note that there are still some substitutions that need to be made in the template:
|
||||||
|
- Describe major changes in this release
|
||||||
|
- Scala versions
|
||||||
|
- Fill in your name in the signature
|
||||||
|
- You will need to use your apache email address to send out the email (otherwise, it won't be delivered to announce@apache.org)
|
||||||
|
- Finally, validate all the links before shipping!
|
||||||
|
Note that all substitutions are annotated with <> around them.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Auxiliary functions to access and manipulate text files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from jproperties import Properties
|
||||||
|
|
||||||
|
|
||||||
|
def read(file_path):
|
||||||
|
with open(file_path) as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
|
def write(file_path, content):
|
||||||
|
with open(file_path, "w") as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
|
||||||
|
def props(properties_text):
|
||||||
|
"""
|
||||||
|
Load the keys and values into a dictionary from a .properties file.
|
||||||
|
"""
|
||||||
|
props = Properties()
|
||||||
|
props.load(properties_text, "utf-8")
|
||||||
|
return props
|
||||||
|
|
||||||
|
|
||||||
|
def prop(filepath, propname):
|
||||||
|
"""
|
||||||
|
Read the value for a given key in a .properties file.
|
||||||
|
"""
|
||||||
|
values = props(read(filepath))
|
||||||
|
value, _ = values[propname]
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def replace(path, pattern, replacement, **kwargs):
|
||||||
|
"""
|
||||||
|
Replace all occurrences of a text pattern in a text file.
|
||||||
|
"""
|
||||||
|
is_regex = kwargs.get("regex", False)
|
||||||
|
updated = []
|
||||||
|
with open(path, "r") as f:
|
||||||
|
for line in f:
|
||||||
|
modified = line
|
||||||
|
if is_regex:
|
||||||
|
modified = re.sub(pattern, replacement, line)
|
||||||
|
elif line.startswith(pattern):
|
||||||
|
modified = replacement + "\n"
|
||||||
|
|
||||||
|
updated.append(modified)
|
||||||
|
|
||||||
|
with open(path, "w") as f:
|
||||||
|
for line in updated:
|
||||||
|
f.write(line)
|
||||||
|
|
||||||
|
|
117
release_notes.py
117
release_notes.py
|
@ -1,117 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
# contributor license agreements. See the NOTICE file distributed with
|
|
||||||
# this work for additional information regarding copyright ownership.
|
|
||||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
# (the "License"); you may not use this file except in compliance with
|
|
||||||
# the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
"""Usage: release_notes.py <version> > RELEASE_NOTES.html
|
|
||||||
|
|
||||||
Generates release notes for a Kafka release by generating an HTML doc containing some introductory information about the
|
|
||||||
release with links to the Kafka docs followed by a list of issues resolved in the release. The script will fail if it finds
|
|
||||||
any unresolved issues still marked with the target release. You should run this script after either resolving all issues or
|
|
||||||
moving outstanding issues to a later release.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from jira import JIRA
|
|
||||||
import itertools, sys
|
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print("Usage: release_notes.py <version>", file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
version = sys.argv[1]
|
|
||||||
minor_version_dotless = "".join(version.split(".")[:2]) # i.e., 10 if version == 1.0.1
|
|
||||||
|
|
||||||
JIRA_BASE_URL = 'https://issues.apache.org/jira'
|
|
||||||
MAX_RESULTS = 100 # This is constrained for cloud instances so we need to fix this value
|
|
||||||
|
|
||||||
def get_issues(jira, query, **kwargs):
|
|
||||||
"""
|
|
||||||
Get all issues matching the JQL query from the JIRA instance. This handles expanding paginated results for you. Any additional keyword arguments are forwarded to the JIRA.search_issues call.
|
|
||||||
"""
|
|
||||||
results = []
|
|
||||||
startAt = 0
|
|
||||||
new_results = None
|
|
||||||
while new_results is None or len(new_results) == MAX_RESULTS:
|
|
||||||
new_results = jira.search_issues(query, startAt=startAt, maxResults=MAX_RESULTS, **kwargs)
|
|
||||||
results += new_results
|
|
||||||
startAt += len(new_results)
|
|
||||||
return results
|
|
||||||
|
|
||||||
def issue_link(issue):
|
|
||||||
return "%s/browse/%s" % (JIRA_BASE_URL, issue.key)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
apache = JIRA(JIRA_BASE_URL)
|
|
||||||
issues = get_issues(apache, 'project=KAFKA and fixVersion=%s' % version)
|
|
||||||
if not issues:
|
|
||||||
print("Didn't find any issues for the target fix version", file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Some resolutions, including a lack of resolution, indicate that the bug hasn't actually been addressed and we shouldn't even be able to create a release until they are fixed
|
|
||||||
UNRESOLVED_RESOLUTIONS = [None,
|
|
||||||
"Unresolved",
|
|
||||||
"Duplicate",
|
|
||||||
"Invalid",
|
|
||||||
"Not A Problem",
|
|
||||||
"Not A Bug",
|
|
||||||
"Won't Fix",
|
|
||||||
"Incomplete",
|
|
||||||
"Cannot Reproduce",
|
|
||||||
"Later",
|
|
||||||
"Works for Me",
|
|
||||||
"Workaround",
|
|
||||||
"Information Provided"
|
|
||||||
]
|
|
||||||
unresolved_issues = [issue for issue in issues if issue.fields.resolution in UNRESOLVED_RESOLUTIONS or issue.fields.resolution.name in UNRESOLVED_RESOLUTIONS]
|
|
||||||
if unresolved_issues:
|
|
||||||
print("The release is not completed since unresolved issues or improperly resolved issues were found still tagged with this release as the fix version:", file=sys.stderr)
|
|
||||||
for issue in unresolved_issues:
|
|
||||||
print("Unresolved issue: %15s %20s %s" % (issue.key, issue.fields.resolution, issue_link(issue)), file=sys.stderr)
|
|
||||||
print("", file=sys.stderr)
|
|
||||||
print("Note that for some resolutions, you should simply remove the fix version as they have not been truly fixed in this release.", file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Get list of (issue type, [issues]) sorted by the issue ID type, with each subset of issues sorted by their key so they
|
|
||||||
# are in increasing order of bug #. To get a nice ordering of the issue types we customize the key used to sort by issue
|
|
||||||
# type a bit to ensure features and improvements end up first.
|
|
||||||
def issue_type_key(issue):
|
|
||||||
if issue.fields.issuetype.name == 'New Feature':
|
|
||||||
return -2
|
|
||||||
if issue.fields.issuetype.name == 'Improvement':
|
|
||||||
return -1
|
|
||||||
return int(issue.fields.issuetype.id)
|
|
||||||
|
|
||||||
by_group = [(k,sorted(g, key=lambda issue: issue.id)) for k,g in itertools.groupby(sorted(issues, key=issue_type_key), lambda issue: issue.fields.issuetype.name)]
|
|
||||||
|
|
||||||
print("<h1>Release Notes - Kafka - Version %s</h1>" % version)
|
|
||||||
print("""<p>Below is a summary of the JIRA issues addressed in the %(version)s release of Kafka. For full documentation of the
|
|
||||||
release, a guide to get started, and information about the project, see the <a href="https://kafka.apache.org/">Kafka
|
|
||||||
project site</a>.</p>
|
|
||||||
|
|
||||||
<p><b>Note about upgrades:</b> Please carefully review the
|
|
||||||
<a href="https://kafka.apache.org/%(minor)s/documentation.html#upgrade">upgrade documentation</a> for this release thoroughly
|
|
||||||
before upgrading your cluster. The upgrade notes discuss any critical information about incompatibilities and breaking
|
|
||||||
changes, performance changes, and any other changes that might impact your production deployment of Kafka.</p>
|
|
||||||
|
|
||||||
<p>The documentation for the most recent release can be found at
|
|
||||||
<a href="https://kafka.apache.org/documentation.html">https://kafka.apache.org/documentation.html</a>.</p>""" % { 'version': version, 'minor': minor_version_dotless })
|
|
||||||
for itype, issues in by_group:
|
|
||||||
print("<h2>%s</h2>" % itype)
|
|
||||||
print("<ul>")
|
|
||||||
for issue in issues:
|
|
||||||
print('<li>[<a href="%(link)s">%(key)s</a>] - %(summary)s</li>' % {'key': issue.key, 'link': issue_link(issue), 'summary': issue.fields.summary})
|
|
||||||
print("</ul>")
|
|
Loading…
Reference in New Issue