Repo created
This commit is contained in:
parent
75dc487a7a
commit
39c29d175b
6317 changed files with 388324 additions and 2 deletions
3
scripts/autodiscovery
Executable file
3
scripts/autodiscovery
Executable file
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
|
||||
./gradlew --quiet ":cli:autodiscovery-cli:installDist" < /dev/null && ./cli/autodiscovery-cli/build/install/autodiscovery-cli/bin/autodiscovery-cli "$@"
|
||||
79
scripts/ci/merges/do_merge.sh
Executable file
79
scripts/ci/merges/do_merge.sh
Executable file
|
|
@ -0,0 +1,79 @@
|
|||
#!/bin/bash
|
||||
|
||||
# This script is used to perform merges of main->beta and beta->release.
|
||||
# Be sure to review merge results and ensure correctness before pushing
|
||||
# to the repository.
|
||||
# To merge into beta: do_merge.sh beta
|
||||
# To merge into release: do_merge.sh release
|
||||
|
||||
if [ "$#" -lt 1 ]; then
|
||||
echo "Usage: $0 <into-branch>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
into_branch=$1
|
||||
from_branch="main"
|
||||
if [ "${into_branch}" = "release" ]; then
|
||||
from_branch="beta"
|
||||
fi
|
||||
|
||||
echo "Before merging ${from_branch} into ${into_branch} please confirm that you have:"
|
||||
if [ "${into_branch}" = "beta" ]; then
|
||||
echo "1) Locked the main branch with the 'CLOSED TREE (main)' ruleset"
|
||||
echo "2) Sent a message to the #tb-mobile-dev:mozilla.org matrix channel to let them know:"
|
||||
echo " - You will be performing the merge from main into beta"
|
||||
echo " - The main branch is locked and cannot be changed during the merge"
|
||||
echo " - You will let them know when the merge is complete and main is re-opened"
|
||||
else
|
||||
echo "1) Sent a message to the #tb-mobile-dev:mozilla.org matrix channel to let them know"
|
||||
echo " - You will be performing the merge from beta into release"
|
||||
echo " - You will let them know when the merge is complete"
|
||||
fi
|
||||
read -p "Continue with merge? [y/N]: " answer
|
||||
answer=${answer,,}
|
||||
if [[ "$answer" == "y" || "$answer" == "yes" ]]; then
|
||||
echo "Merging ${from_branch} into ${into_branch}"
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
echo
|
||||
|
||||
set -ex
|
||||
git checkout ${into_branch}
|
||||
git pull
|
||||
git config merge.ours.driver true
|
||||
git config merge.merge_gradle.driver "python3 scripts/ci/merges/merge_gradle.py %A %B"
|
||||
set +e
|
||||
git merge "origin/${from_branch}"
|
||||
ret=$?
|
||||
set +x
|
||||
|
||||
if [ "${from_branch}" = "beta" ]; then
|
||||
if [ -e "app-thunderbird/src/beta/res/raw/changelog_master.xml" ]; then
|
||||
set -ex
|
||||
git rm --force app-thunderbird/src/beta/res/raw/changelog_master.xml
|
||||
set +ex
|
||||
fi
|
||||
fi
|
||||
|
||||
echo
|
||||
if [ "$ret" -eq 0 ]; then
|
||||
echo "Merge succeeded. Next steps:"
|
||||
echo "1) Review merge results and ensure correctness"
|
||||
echo "2) Ensure feature flags are following the rules"
|
||||
echo "3) Push the merge"
|
||||
if [ "${into_branch}" = "beta" ]; then
|
||||
echo "4) Submit a pull request that increments the version in main"
|
||||
echo "5) Open a new milestone for the new version on github"
|
||||
echo "6) Once the version increment is merged into main, unlock the branch"
|
||||
echo "7) Send a message to the #tb-mobile-dev:mozilla.org channel to notify of merge completion and that main is re-opened"
|
||||
else
|
||||
echo "4) Close the milestone for the version that was previously in release"
|
||||
echo "5) Send a message to the #tb-mobile-dev:mozilla.org channel to notify of merge completion"
|
||||
fi
|
||||
else
|
||||
echo "Merge failed. Next steps:"
|
||||
echo "1) Fix conflicts"
|
||||
echo "2) Add fixed files with: git add"
|
||||
echo "3) Continue the merge with: git merge --continue"
|
||||
fi
|
||||
82
scripts/ci/merges/merge_gradle.py
Normal file
82
scripts/ci/merges/merge_gradle.py
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
ours = sys.argv[1]
|
||||
theirs = sys.argv[2]
|
||||
|
||||
|
||||
def get_current_branch():
|
||||
result = subprocess.run(
|
||||
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode:
|
||||
raise SystemExit(f"Git error: {result.stderr.strip()}")
|
||||
return result.stdout.strip()
|
||||
|
||||
|
||||
def find_matching_line(file_path, search_term):
|
||||
"""Finds and returns the first line containing search term in file."""
|
||||
with open(file_path, "r") as file:
|
||||
for line in file:
|
||||
if re.search(search_term, line):
|
||||
return line
|
||||
return None
|
||||
|
||||
|
||||
def replace_matching_line(file_path, search_term, new_line):
|
||||
"""Finds matching line in file and replaces it with new_line."""
|
||||
with open(file_path, "r") as file:
|
||||
lines = file.readlines()
|
||||
|
||||
with open(file_path, "w") as file:
|
||||
for line in lines:
|
||||
if re.search(search_term, line):
|
||||
file.write(new_line)
|
||||
else:
|
||||
file.write(line)
|
||||
|
||||
|
||||
branch = get_current_branch()
|
||||
|
||||
search_term = "com.fsck.k9"
|
||||
is_k9 = find_matching_line(ours, search_term)
|
||||
|
||||
search_term = "net.thunderbird.android"
|
||||
is_thunderbird = find_matching_line(ours, search_term)
|
||||
|
||||
search_term = r"versionCode = "
|
||||
found_line = find_matching_line(ours, search_term)
|
||||
|
||||
shutil.copyfile(theirs, ours)
|
||||
|
||||
if found_line:
|
||||
replace_matching_line(ours, search_term, found_line)
|
||||
else:
|
||||
raise SystemExit(f"Search term '{search_term}' not found in ours file.")
|
||||
|
||||
if branch == "beta":
|
||||
if is_k9:
|
||||
search_term = r"versionNameSuffix = \"a1\""
|
||||
else:
|
||||
search_term = r"versionNameSuffix = \"b[1-9]\""
|
||||
found_line = find_matching_line(theirs, search_term)
|
||||
if found_line:
|
||||
if "b1" not in found_line:
|
||||
new_line = "{}{}\n".format(found_line.split("=")[0], '= "b1"')
|
||||
replace_matching_line(ours, search_term, new_line)
|
||||
else:
|
||||
raise SystemExit(f"Search term '{search_term}' not found in theirs file.")
|
||||
elif branch == "release":
|
||||
search_term = r"versionNameSuffix = \"b[1-9]\""
|
||||
found_line = find_matching_line(theirs, search_term)
|
||||
if found_line:
|
||||
replace_matching_line(ours, search_term, "")
|
||||
else:
|
||||
raise SystemExit(f"Search term '{search_term}' not found in theirs file.")
|
||||
227
scripts/ci/render-notes.py
Executable file
227
scripts/ci/render-notes.py
Executable file
|
|
@ -0,0 +1,227 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import requests
|
||||
import yaml
|
||||
import sys
|
||||
|
||||
from jinja2 import Template
|
||||
|
||||
|
||||
def render_notes(
|
||||
version,
|
||||
versioncode,
|
||||
application,
|
||||
applicationid,
|
||||
longform_file,
|
||||
print_only=False,
|
||||
notesrepo="thunderbird/thunderbird-notes",
|
||||
notesbranch="master",
|
||||
):
|
||||
"""Render release notes from thunderbird-notes
|
||||
|
||||
Update changelog files based on short release notes from thunderbird-notes.
|
||||
Render long-form notes to specified file,
|
||||
"""
|
||||
tb_notes_filename = f"{version}.yml"
|
||||
tb_notes_directory = "android_release"
|
||||
if "0b" in version:
|
||||
tb_notes_filename = f"{version[0:-1]}eta.yml"
|
||||
tb_notes_directory = "android_beta"
|
||||
|
||||
if application == "k9mail":
|
||||
build_type = "main"
|
||||
else:
|
||||
if applicationid == "net.thunderbird.android":
|
||||
build_type = "release"
|
||||
elif applicationid == "net.thunderbird.android.beta":
|
||||
build_type = "beta"
|
||||
else:
|
||||
# // throw error
|
||||
print("Error: Unsupported applicationid")
|
||||
sys.exit(1)
|
||||
|
||||
if os.path.isdir(os.path.expanduser(notesrepo)):
|
||||
notes_path = os.path.join(
|
||||
os.path.expanduser(notesrepo), tb_notes_directory, tb_notes_filename
|
||||
)
|
||||
with open(notes_path) as fp:
|
||||
yaml_content = yaml.safe_load(fp.read())
|
||||
else:
|
||||
tb_notes_url = (
|
||||
os.path.join(
|
||||
f"https://api.github.com/repos/{notesrepo}/",
|
||||
f"contents/{tb_notes_directory}/{tb_notes_filename}?ref={notesbranch}",
|
||||
)
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Accept": "application/vnd.github.v3.raw"
|
||||
}
|
||||
|
||||
response = requests.get(tb_notes_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
yaml_content = yaml.safe_load(response.text)
|
||||
|
||||
render_data = {"releases": {}}
|
||||
for release in reversed(yaml_content["release"]["releases"]):
|
||||
vers = release["version"]
|
||||
render_data["releases"][vers] = {}
|
||||
render_data["releases"][vers]["version"] = vers
|
||||
render_data["releases"][vers]["versioncode"] = int(versioncode)
|
||||
render_data["releases"][vers]["application"] = application
|
||||
render_data["releases"][vers]["date"] = release["release_date"]
|
||||
render_data["releases"][vers]["short_notes"] = []
|
||||
render_data["releases"][vers]["notes"] = {}
|
||||
render_data["releases"][vers]["long_notes"] = []
|
||||
for note in yaml_content["notes"]:
|
||||
if ("0b" not in version) or (
|
||||
"0b" in version and note["group"] == int(vers[-1])
|
||||
):
|
||||
if (
|
||||
note.get("thunderbird_only", False) and application == "k9mail"
|
||||
) or (note.get("k9mail_only", False) and application == "thunderbird"):
|
||||
continue
|
||||
if "note" in note:
|
||||
tag = note["tag"].lower().capitalize()
|
||||
if tag not in render_data["releases"][vers]["notes"]:
|
||||
render_data["releases"][vers]["notes"][tag] = []
|
||||
render_data["releases"][vers]["notes"][tag].append(
|
||||
note["note"].strip()
|
||||
)
|
||||
render_data["releases"][vers]["long_notes"].append(note["note"].strip())
|
||||
if "short_note" in note:
|
||||
render_data["releases"][vers]["short_notes"].append(
|
||||
note["short_note"].strip()
|
||||
)
|
||||
|
||||
render_files = {
|
||||
"changelog_master": {
|
||||
"template": "changelog_master.xml",
|
||||
"outfile": f"./app-{application}/src/{build_type}/res/raw/changelog_master.xml",
|
||||
"render_data": render_data["releases"][version],
|
||||
"autoescape": True,
|
||||
},
|
||||
"changelog": {
|
||||
"template": "changelog.txt",
|
||||
"outfile": f"./app-metadata/{applicationid}/en-US/changelogs/{versioncode}.txt",
|
||||
"render_data": render_data["releases"][version],
|
||||
"max_length": 500,
|
||||
},
|
||||
"changelog_long": {
|
||||
"template": "changelog_long.txt",
|
||||
"outfile": longform_file,
|
||||
"render_data": render_data["releases"][version],
|
||||
"autoescape": True,
|
||||
},
|
||||
}
|
||||
|
||||
template_base = os.path.join(os.path.dirname(sys.argv[0]), "templates")
|
||||
|
||||
for render_file in render_files:
|
||||
with open(os.path.join(template_base, render_files[render_file]["template"]), "r") as file:
|
||||
template = file.read()
|
||||
template = Template(template, autoescape=render_files[render_file].get("autoescape", False))
|
||||
rendered = template.render(render_files[render_file]["render_data"])
|
||||
if render_file == "changelog_master":
|
||||
if print_only:
|
||||
print(f"\n==={render_files[render_file]['outfile']}===")
|
||||
print("...")
|
||||
print(rendered)
|
||||
print("...")
|
||||
else:
|
||||
with open(render_files[render_file]["outfile"], "r") as file:
|
||||
lines = file.readlines()
|
||||
for index, line in enumerate(lines):
|
||||
if "<changelog>" in line:
|
||||
if version in lines[index + 1]:
|
||||
break
|
||||
lines.insert(index + 1, rendered)
|
||||
break
|
||||
with open(render_files[render_file]["outfile"], "w") as file:
|
||||
file.writelines(lines)
|
||||
elif render_file == "changelog" or render_file == "changelog_long":
|
||||
stripped = rendered.lstrip()
|
||||
maxlen = render_files[render_file].get("max_length", float("inf"))
|
||||
if print_only:
|
||||
print(f"\n==={render_files[render_file]['outfile']}===")
|
||||
print(stripped)
|
||||
|
||||
if len(stripped) > maxlen:
|
||||
print(
|
||||
f"Error: Maximum length of {maxlen} exceeded, {render_file} is {len(stripped)} characters"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if not print_only:
|
||||
with open(render_files[render_file]["outfile"], "x") as file:
|
||||
file.write(stripped)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--print",
|
||||
"-p",
|
||||
action="store_true",
|
||||
help="Only print the processed release notes",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--repository",
|
||||
"-r",
|
||||
default="thunderbird/thunderbird-notes",
|
||||
help="Repository or directory to retrieve thunderbird-notes from",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--branch",
|
||||
"-b",
|
||||
default="master",
|
||||
help="Branch to retrieve thunderbird-notes from",
|
||||
)
|
||||
parser.add_argument(
|
||||
"applicationid",
|
||||
type=str,
|
||||
choices=[
|
||||
"net.thunderbird.android",
|
||||
"net.thunderbird.android.beta",
|
||||
"com.fsck.k9",
|
||||
],
|
||||
help="thunderbird or k9mail",
|
||||
)
|
||||
parser.add_argument("version", type=str, help="Version name for this release")
|
||||
parser.add_argument(
|
||||
"versioncode",
|
||||
nargs="?",
|
||||
default="0",
|
||||
type=str,
|
||||
help="Version code for this release",
|
||||
)
|
||||
parser.add_argument(
|
||||
"longform_file",
|
||||
type=str,
|
||||
nargs="?",
|
||||
default="github_notes",
|
||||
help="File to render long-form notes to",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.applicationid == "com.fsck.k9":
|
||||
application = "k9mail"
|
||||
else:
|
||||
application = "thunderbird"
|
||||
|
||||
render_notes(
|
||||
args.version,
|
||||
args.versioncode,
|
||||
application,
|
||||
args.applicationid,
|
||||
args.longform_file,
|
||||
print_only=args.print,
|
||||
notesrepo=args.repository,
|
||||
notesbranch=args.branch,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
31
scripts/ci/run-fluidattacks-scanner.sh
Executable file
31
scripts/ci/run-fluidattacks-scanner.sh
Executable file
|
|
@ -0,0 +1,31 @@
|
|||
#!/bin/bash
|
||||
|
||||
function fail() {
|
||||
echo "Error: $*"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if tools are installed
|
||||
command -v docker &> /dev/null || fail "Docker is not installed"
|
||||
|
||||
# Default values
|
||||
debug=false
|
||||
|
||||
# Parse command-line arguments
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--debug)
|
||||
debug=true
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
fail "Unknown argument: $arg"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$debug" = true ]; then
|
||||
docker run --rm -v "$(pwd)":/repo -it fluidattacks/cli:latest /bin/bash
|
||||
exit
|
||||
fi
|
||||
docker run --rm -v "$(pwd)":/repo fluidattacks/cli:latest skims scan /repo/config/fluidattacks/config.yaml
|
||||
628
scripts/ci/setup_release_automation
Normal file
628
scripts/ci/setup_release_automation
Normal file
|
|
@ -0,0 +1,628 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# See docs/CI/Release_Automation.md for more details
|
||||
# Run this from the .signing directory with all the keys and properties files in it.
|
||||
|
||||
# python -m venv venv; source venv/bin/activate; pip install requests pynacl
|
||||
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
import argparse
|
||||
import requests
|
||||
import nacl.encoding
|
||||
import nacl.public
|
||||
import textwrap
|
||||
|
||||
PUBLISH_APPROVERS = ["kewisch", "coreycb", "wmontwe"]
|
||||
|
||||
CHANNEL_ENVIRONMENTS = {
|
||||
"thunderbird_release": {
|
||||
"branch": "release",
|
||||
"variables": {
|
||||
"RELEASE_TYPE": "release",
|
||||
"MATRIX_INCLUDE": [
|
||||
{
|
||||
"appName": "thunderbird",
|
||||
"releaseTarget": "ftp|github",
|
||||
"packageFormat": "apk",
|
||||
"packageFlavor": "foss",
|
||||
},
|
||||
{
|
||||
"appName": "thunderbird",
|
||||
"releaseTarget": "play",
|
||||
"playTargetTrack": "internal",
|
||||
"packageFormat": "aab",
|
||||
"packageFlavor": "full",
|
||||
},
|
||||
{
|
||||
"appName": "k9mail",
|
||||
"releaseTarget": "ftp|github",
|
||||
"packageFormat": "apk",
|
||||
"packageFlavor": "foss",
|
||||
},
|
||||
{
|
||||
"appName": "k9mail",
|
||||
"releaseTarget": "play",
|
||||
"playTargetTrack": "internal",
|
||||
"packageFormat": "apk",
|
||||
"packageFlavor": "full",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"thunderbird_beta": {
|
||||
"branch": "beta",
|
||||
"variables": {
|
||||
"RELEASE_TYPE": "beta",
|
||||
"MATRIX_INCLUDE": [
|
||||
{
|
||||
"appName": "thunderbird",
|
||||
"releaseTarget": "ftp|github",
|
||||
"packageFormat": "apk",
|
||||
"packageFlavor": "foss",
|
||||
},
|
||||
{
|
||||
"appName": "thunderbird",
|
||||
"releaseTarget": "play",
|
||||
"playTargetTrack": "internal",
|
||||
"packageFormat": "aab",
|
||||
"packageFlavor": "full",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"thunderbird_daily": {
|
||||
"branch": "main",
|
||||
"variables": {
|
||||
"RELEASE_TYPE": "daily",
|
||||
"MATRIX_INCLUDE": [
|
||||
{
|
||||
"appName": "thunderbird",
|
||||
"releaseTarget": "ftp",
|
||||
"packageFormat": "apk",
|
||||
"packageFlavor": "foss",
|
||||
},
|
||||
{
|
||||
"appName": "thunderbird",
|
||||
"releaseTarget": "play",
|
||||
"packageFormat": "aab",
|
||||
"playTargetTrack": "internal",
|
||||
"packageFlavor": "full",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
SIGNING_ENVIRONMENTS = {
|
||||
"k9mail_release_foss": {
|
||||
"props": "k9.release.signing.properties",
|
||||
"branch": "release",
|
||||
},
|
||||
"k9mail_release_full": {
|
||||
"props": "k9.release.signing.properties",
|
||||
"branch": "release",
|
||||
},
|
||||
"k9mail_beta_foss": {
|
||||
"props": "k9.release.signing.properties",
|
||||
"branch": "beta",
|
||||
},
|
||||
"k9mail_beta_full": {
|
||||
"props": "k9.release.signing.properties",
|
||||
"branch": "beta",
|
||||
},
|
||||
"thunderbird_daily_foss": {
|
||||
"props": "tb.daily.signing.properties",
|
||||
"branch": "main",
|
||||
},
|
||||
"thunderbird_daily_full": {
|
||||
"props": "tb.daily.upload.properties",
|
||||
"branch": "main",
|
||||
},
|
||||
"thunderbird_beta_foss": {
|
||||
"props": "tb.beta.signing.properties",
|
||||
"branch": "beta",
|
||||
},
|
||||
"thunderbird_beta_full": {
|
||||
"props": "tb.beta.upload.properties",
|
||||
"branch": "beta",
|
||||
},
|
||||
"thunderbird_release_foss": {
|
||||
"props": "tb.release.signing.properties",
|
||||
"branch": "release",
|
||||
},
|
||||
"thunderbird_release_full": {
|
||||
"props": "tb.release.upload.properties",
|
||||
"branch": "release",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Function to read the key properties file
|
||||
def read_key_properties(file_path):
|
||||
key_properties = {}
|
||||
with open(file_path, "r") as file:
|
||||
for line in file:
|
||||
if "=" in line:
|
||||
key, value = line.strip().split("=", 1)
|
||||
final_key = key.split(".")[-1]
|
||||
key_properties[final_key] = value
|
||||
return key_properties
|
||||
|
||||
|
||||
# Function to base64 encode the .jks file
|
||||
def encode_jks_file(jks_file_path):
|
||||
with open(jks_file_path, "rb") as file:
|
||||
encoded_key = base64.b64encode(file.read()).decode("utf-8")
|
||||
return encoded_key
|
||||
|
||||
|
||||
# Function to get the public key from GitHub for encryption
|
||||
def get_github_public_key(repo, environment_name):
|
||||
url = f"https://api.github.com/repos/{repo}/environments/{environment_name}/secrets/public-key"
|
||||
headers = {
|
||||
"Authorization": f"token {GITHUB_TOKEN}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
response = requests.get(url, headers=headers)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
return [data["key_id"], data["key"]]
|
||||
else:
|
||||
raise Exception(
|
||||
f"Failed to fetch public key from GitHub. Response: {response.status_code}, {response.text}"
|
||||
)
|
||||
|
||||
|
||||
# Function to encrypt a secret using the GitHub public key
|
||||
def encrypt_secret(public_key: str, secret_value: str):
|
||||
public_key_bytes = base64.b64decode(public_key)
|
||||
sealed_box = nacl.public.SealedBox(nacl.public.PublicKey(public_key_bytes))
|
||||
encrypted_secret = sealed_box.encrypt(secret_value.encode("utf-8"))
|
||||
return base64.b64encode(encrypted_secret).decode("utf-8")
|
||||
|
||||
|
||||
# Function to set encrypted secret in GitHub environment
|
||||
def set_github_environment_secret(
|
||||
repo, secret_name, encrypted_value, key_id, environment_name
|
||||
):
|
||||
url = f"https://api.github.com/repos/{repo}/environments/{environment_name}/secrets/{secret_name}"
|
||||
headers = {
|
||||
"Authorization": f"token {GITHUB_TOKEN}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"encrypted_value": encrypted_value, "key_id": key_id}
|
||||
response = requests.put(url, headers=headers, json=data)
|
||||
if response.status_code == 201:
|
||||
print(f"\tSecret {secret_name} created successfully in {environment_name}.")
|
||||
elif response.status_code == 204:
|
||||
print(f"\tSecret {secret_name} updated successfully in {environment_name}.")
|
||||
else:
|
||||
raise Exception(
|
||||
f"Failed to create secret {secret_name} in {environment_name}. Response: {response.status_code}, {response.text}"
|
||||
)
|
||||
|
||||
|
||||
def print_github_environment_variable(repo, environment_name):
|
||||
url = (
|
||||
f"https://api.github.com/repos/{repo}/environments/{environment_name}/variables"
|
||||
)
|
||||
headers = {
|
||||
"Authorization": f"token {GITHUB_TOKEN}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
response = requests.get(url, headers=headers)
|
||||
data = response.json()
|
||||
|
||||
if response.status_code == 200:
|
||||
for variable in data["variables"]:
|
||||
value = variable["value"]
|
||||
if value[0] in "{[":
|
||||
try:
|
||||
value = textwrap.indent(
|
||||
json.dumps(json.loads(value), indent=2), "\t\t"
|
||||
).lstrip()
|
||||
except:
|
||||
pass
|
||||
|
||||
print(f"\t{variable['name']}={value}")
|
||||
else:
|
||||
raise Exception(
|
||||
f"Unexpected response getting variables from {environment_name}: {response.status_code} {response.text}"
|
||||
)
|
||||
|
||||
|
||||
def set_github_environment_variable(repo, name, value, environment_name):
|
||||
url = (
|
||||
f"https://api.github.com/repos/{repo}/environments/{environment_name}/variables"
|
||||
)
|
||||
headers = {
|
||||
"Authorization": f"token {GITHUB_TOKEN}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"name": name, "value": value}
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
if response.status_code == 201:
|
||||
print(f"\tVariable {name} created successfully in {environment_name}.")
|
||||
elif response.status_code == 409:
|
||||
url = f"https://api.github.com/repos/{repo}/environments/{environment_name}/variables/{name}"
|
||||
response = requests.patch(url, headers=headers, json=data)
|
||||
if response.status_code == 204:
|
||||
print(f"\tVariable {name} updated successfully in {environment_name}.")
|
||||
else:
|
||||
raise Exception(
|
||||
f"Failed to update variable {name} in {environment_name}. Response: {response.status_code}, {response.text}"
|
||||
)
|
||||
else:
|
||||
raise Exception(
|
||||
f"Failed to create variable {name} in {environment_name}. Response: {response.status_code}, {response.text}"
|
||||
)
|
||||
|
||||
|
||||
def print_github_environment(repo, environment_name):
|
||||
url = f"https://api.github.com/repos/{repo}/environments/{environment_name}"
|
||||
headers = {
|
||||
"Authorization": f"token {GITHUB_TOKEN}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
print(f"Environment {environment_name}")
|
||||
print("\tProtection rules")
|
||||
needs_branch_policies = False
|
||||
for rule in data["protection_rules"]:
|
||||
if rule["type"] == "branch_policy":
|
||||
continue
|
||||
|
||||
print(f"\t\tType: {rule['type']}")
|
||||
if rule["type"] == "required_reviewers":
|
||||
reviewers = ", ".join(
|
||||
map(
|
||||
lambda reviewer: reviewer["reviewer"]["login"],
|
||||
rule["reviewers"],
|
||||
)
|
||||
)
|
||||
print(f"\t\t\tReviewers: {reviewers}")
|
||||
|
||||
print(f"\t\tBranch policy: {data['deployment_branch_policy']}")
|
||||
if (
|
||||
data["deployment_branch_policy"]
|
||||
and data["deployment_branch_policy"]["custom_branch_policies"]
|
||||
):
|
||||
url += "/deployment-branch-policies"
|
||||
response = requests.get(url, headers=headers)
|
||||
if response.status_code == 200:
|
||||
policies = map(
|
||||
lambda policy: policy["name"], response.json()["branch_policies"]
|
||||
)
|
||||
print("\t\tBranches: " + ", ".join(policies))
|
||||
|
||||
else:
|
||||
raise Exception(
|
||||
f"Unexpected response getting variables from {environment_name}: {response.status_code} {response.text}"
|
||||
)
|
||||
|
||||
|
||||
# Function to create GitHub environment if it doesn't exist
|
||||
def create_github_environment(repo, environment_name, branches=None, approvers=None):
|
||||
url = f"https://api.github.com/repos/{repo}/environments/{environment_name}"
|
||||
headers = {
|
||||
"Authorization": f"token {GITHUB_TOKEN}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {}
|
||||
if branches:
|
||||
data["deployment_branch_policy"] = {
|
||||
"custom_branch_policies": True,
|
||||
"protected_branches": False,
|
||||
}
|
||||
|
||||
if approvers:
|
||||
reviewers = map(
|
||||
lambda approver: {
|
||||
"type": "User",
|
||||
"id": get_user_id_from_username(approver),
|
||||
},
|
||||
approvers,
|
||||
)
|
||||
data["reviewers"] = list(reviewers)
|
||||
|
||||
response = requests.put(url, headers=headers, json=data)
|
||||
if response.status_code == 200:
|
||||
print(f"Environment {environment_name} created successfully.")
|
||||
elif response.status_code == 409:
|
||||
print(f"Environment {environment_name} already exists.")
|
||||
else:
|
||||
raise Exception(
|
||||
f"Failed to create environment {environment_name}. Response: {response.status_code}, {response.text}"
|
||||
)
|
||||
|
||||
for branch in branches or []:
|
||||
url = f"https://api.github.com/repos/{repo}/environments/{environment_name}/deployment-branch-policies"
|
||||
data = {"name": branch, "type": "branch"}
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
|
||||
if response.status_code == 200:
|
||||
print(
|
||||
f"\tBranch protection on {branch} for {environment_name} created successfully."
|
||||
)
|
||||
elif response.status_code == 409:
|
||||
print(
|
||||
f"\tBranch protection on {branch} for {environment_name} already exists."
|
||||
)
|
||||
else:
|
||||
raise Exception(
|
||||
f"Failed to create branch protection for {branch} on {environment_name}. Response: {response.status_code}, {response.text}"
|
||||
)
|
||||
|
||||
|
||||
# Function to get the GitHub user ID from a username
|
||||
def get_user_id_from_username(username):
|
||||
url = f"https://api.github.com/users/{username}"
|
||||
headers = {
|
||||
"Authorization": f"token {GITHUB_TOKEN}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
user_data = response.json()
|
||||
return user_data["id"]
|
||||
else:
|
||||
print(
|
||||
f"Failed to fetch user ID for username '{username}'. Response: {response.status_code}, {response.text}"
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def create_approver_environment(repo, environment_name, approvers):
|
||||
|
||||
reviewers = map(
|
||||
lambda approver: {"type": "User", "id": get_user_id_from_username(approver)},
|
||||
approvers,
|
||||
)
|
||||
|
||||
url = f"https://api.github.com/repos/{repo}/environments/{environment_name}"
|
||||
headers = {
|
||||
"Authorization": f"token {GITHUB_TOKEN}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"reviewers": list(reviewers)}
|
||||
response = requests.put(url, headers=headers, json=data)
|
||||
if response.status_code == 200:
|
||||
print(f"Environment {environment_name} created successfully.")
|
||||
elif response.status_code == 409:
|
||||
print(f"Environment {environment_name} already exists.")
|
||||
else:
|
||||
raise Exception(
|
||||
f"Failed to create environment {environment_name}. Response: {response.status_code}, {response.text}"
|
||||
)
|
||||
|
||||
|
||||
def create_signing_environment(repo, environment, branch, props_file):
|
||||
# Read the key.properties file
|
||||
key_props = read_key_properties(props_file)
|
||||
|
||||
KEY_ALIAS = key_props.get("keyAlias")
|
||||
KEY_PASSWORD = key_props.get("keyPassword")
|
||||
KEY_STORE_PASSWORD = key_props.get("storePassword")
|
||||
|
||||
if not all([KEY_ALIAS, KEY_PASSWORD, KEY_STORE_PASSWORD]):
|
||||
print(
|
||||
"Missing values in key.properties file. Please ensure all fields are present."
|
||||
)
|
||||
return
|
||||
|
||||
# Base64 encode the JKS file to create SIGNING_KEY
|
||||
SIGNING_KEY = encode_jks_file(key_props.get("storeFile"))
|
||||
|
||||
# Create the environment if it doesn't exist
|
||||
create_github_environment(repo, environment, branches=[branch])
|
||||
|
||||
# Fetch the public key from GitHub for the specific environment
|
||||
key_id, public_key = get_github_public_key(repo, environment)
|
||||
|
||||
# Encrypt the secrets using the public key
|
||||
encrypted_signing_key = encrypt_secret(public_key, SIGNING_KEY)
|
||||
encrypted_key_alias = encrypt_secret(public_key, KEY_ALIAS)
|
||||
encrypted_key_password = encrypt_secret(public_key, KEY_PASSWORD)
|
||||
encrypted_key_store_password = encrypt_secret(public_key, KEY_STORE_PASSWORD)
|
||||
|
||||
# Set the encrypted secrets in the GitHub environment
|
||||
secrets_to_set = {
|
||||
"SIGNING_KEY": encrypted_signing_key,
|
||||
"KEY_ALIAS": encrypted_key_alias,
|
||||
"KEY_PASSWORD": encrypted_key_password,
|
||||
"KEY_STORE_PASSWORD": encrypted_key_store_password,
|
||||
}
|
||||
|
||||
for secret_name, encrypted_value in secrets_to_set.items():
|
||||
set_github_environment_secret(
|
||||
repo, secret_name, encrypted_value, key_id, environment
|
||||
)
|
||||
|
||||
|
||||
def make_bot_environment(repo, environment):
|
||||
key_id, public_key = get_github_public_key(repo, environment)
|
||||
|
||||
with open("botmobile.key.pem") as fp:
|
||||
encrypted_bot_key = encrypt_secret(public_key, fp.read())
|
||||
with open("botmobile.clientid.txt") as fp:
|
||||
bot_client_id = fp.read().strip()
|
||||
with open("botmobile.userid.txt") as fp:
|
||||
bot_user_id = fp.read().strip()
|
||||
|
||||
|
||||
set_github_environment_secret(
|
||||
repo, "BOT_PRIVATE_KEY", encrypted_bot_key, key_id, environment
|
||||
)
|
||||
|
||||
set_github_environment_variable(repo, "BOT_CLIENT_ID", bot_client_id, environment)
|
||||
set_github_environment_variable(repo, "BOT_USER_ID", bot_user_id, environment)
|
||||
|
||||
|
||||
def create_channel_environment(repo, environment, branch, variables):
|
||||
create_github_environment(repo, environment, branches=[branch])
|
||||
|
||||
for name, value in variables.items():
|
||||
if isinstance(value, dict) or isinstance(value, list):
|
||||
value = json.dumps(value)
|
||||
|
||||
set_github_environment_variable(repo, name, value, environment)
|
||||
|
||||
|
||||
def create_release_environment(repo, branches):
|
||||
environment = "publish_release"
|
||||
|
||||
create_github_environment(repo, environment, branches=branches)
|
||||
|
||||
key_id, public_key = get_github_public_key(repo, environment)
|
||||
|
||||
with open("play-store-account.json") as fp:
|
||||
encrypted_play_account = encrypt_secret(public_key, fp.read())
|
||||
|
||||
set_github_environment_secret(
|
||||
repo, "PLAY_STORE_ACCOUNT", encrypted_play_account, key_id, environment
|
||||
)
|
||||
|
||||
|
||||
def create_matrix_environment(repo, branches):
|
||||
environment = "notify_matrix"
|
||||
|
||||
create_github_environment(repo, environment, branches=branches)
|
||||
|
||||
key_id, public_key = get_github_public_key(repo, environment)
|
||||
|
||||
with open("matrix-account.json") as fp:
|
||||
mxdata = json.load(fp)
|
||||
encrypted_token = encrypt_secret(public_key, mxdata["token"])
|
||||
|
||||
set_github_environment_secret(
|
||||
repo, "MATRIX_NOTIFY_TOKEN", encrypted_token, key_id, environment
|
||||
)
|
||||
|
||||
set_github_environment_variable(
|
||||
repo, "MATRIX_NOTIFY_HOMESERVER", mxdata["homeserver"], environment
|
||||
)
|
||||
set_github_environment_variable(
|
||||
repo, "MATRIX_NOTIFY_ROOM", mxdata["room"], environment
|
||||
)
|
||||
set_github_environment_variable(
|
||||
repo, "MATRIX_NOTIFY_USER_MAP", json.dumps(mxdata["userMap"]), environment
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
# Argument parsing for positional inputs and repo flag
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Set GitHub environment secrets for specific or all environments."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--repo",
|
||||
"-r",
|
||||
required=True,
|
||||
help="GitHub repository in the format 'owner/repo'.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--print", "-p", action="store_true", help="Print instead of set"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip", "-s", action="append", help="Skip this named environment"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--only", "-o", action="append", help="Only include this named environment"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
global GITHUB_TOKEN
|
||||
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
|
||||
if not GITHUB_TOKEN:
|
||||
raise Exception(
|
||||
"GITHUB_TOKEN environment variable is not set. Please set it before running the script."
|
||||
)
|
||||
|
||||
if args.skip and args.only:
|
||||
print("Error: Cannot supply both skip and only")
|
||||
return
|
||||
|
||||
includeset = set(
|
||||
list(CHANNEL_ENVIRONMENTS.keys())
|
||||
+ list(SIGNING_ENVIRONMENTS.keys())
|
||||
+ ["publish_hold", "publish_release", "notify_matrix", "botmobile"]
|
||||
)
|
||||
if args.skip:
|
||||
for skip in args.skip:
|
||||
includeset.remove(skip)
|
||||
|
||||
if args.only:
|
||||
includeset = set(args.only)
|
||||
|
||||
# Publish hold environment
|
||||
if "publish_hold" in includeset:
|
||||
if args.print:
|
||||
print_github_environment(args.repo, "publish_hold")
|
||||
else:
|
||||
create_github_environment(
|
||||
args.repo, "publish_hold", approvers=PUBLISH_APPROVERS
|
||||
)
|
||||
|
||||
# Channel environments
|
||||
for environment_name, data in CHANNEL_ENVIRONMENTS.items():
|
||||
if environment_name not in includeset:
|
||||
continue
|
||||
|
||||
if args.print:
|
||||
print(f"Environment {environment_name}")
|
||||
print_github_environment_variable(args.repo, environment_name)
|
||||
else:
|
||||
create_channel_environment(args.repo, environment_name, **data)
|
||||
make_bot_environment(args.repo, environment_name)
|
||||
|
||||
# Signing environments
|
||||
for environment_name, data in SIGNING_ENVIRONMENTS.items():
|
||||
if environment_name not in includeset:
|
||||
continue
|
||||
|
||||
if args.print:
|
||||
print_github_environment(args.repo, environment_name)
|
||||
else:
|
||||
if not os.path.exists(data["props"]):
|
||||
print(f"Skipping {environment_name}: Missing key .properties file")
|
||||
continue
|
||||
|
||||
create_signing_environment(
|
||||
args.repo, environment_name, data["branch"], data["props"]
|
||||
)
|
||||
|
||||
# Publish environment
|
||||
if "publish_release" in includeset:
|
||||
if args.print:
|
||||
print_github_environment(args.repo, "publish_release")
|
||||
else:
|
||||
create_release_environment(args.repo, ["main", "beta", "release"])
|
||||
make_bot_environment(args.repo, "publish_release")
|
||||
|
||||
# Botmobile environment
|
||||
if "botmobile" in includeset:
|
||||
if args.print:
|
||||
print_github_environment(args.repo, "botmobile")
|
||||
else:
|
||||
create_github_environment(args.repo, "botmobile", branches=["main"])
|
||||
make_bot_environment(args.repo, "botmobile")
|
||||
|
||||
# Notify
|
||||
if "notify_matrix" in includeset:
|
||||
if args.print:
|
||||
print_github_environment(args.repo, "notify_matrix")
|
||||
else:
|
||||
create_matrix_environment(args.repo, ["main", "beta", "release"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
6
scripts/ci/templates/changelog.txt
Normal file
6
scripts/ci/templates/changelog.txt
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
{%- if application == 'thunderbird' -%}
|
||||
Thunderbird for Android version {{ version }}, based on K-9 Mail. Changes include:
|
||||
{%- endif -%}
|
||||
{%- for note in short_notes %}
|
||||
- {{ note }}
|
||||
{%- endfor %}
|
||||
7
scripts/ci/templates/changelog_long.txt
Normal file
7
scripts/ci/templates/changelog_long.txt
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{% for tag, long_notes in notes.items() %}
|
||||
{{ tag }}:
|
||||
{%- for note in long_notes %}
|
||||
- {{ note }}
|
||||
{%- endfor %}
|
||||
|
||||
{% endfor %}
|
||||
6
scripts/ci/templates/changelog_master.xml
Normal file
6
scripts/ci/templates/changelog_master.xml
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
<release version="{{ version }}" versioncode="{{ versioncode }}" date="{{ date }}">
|
||||
{%- for note in long_notes %}
|
||||
<change>{{ note }}</change>
|
||||
{%- endfor %}
|
||||
</release>
|
||||
|
||||
109
scripts/ci/validate-github-actions-pinned.sh
Executable file
109
scripts/ci/validate-github-actions-pinned.sh
Executable file
|
|
@ -0,0 +1,109 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
sha1_regex='^[a-f0-9]{40}$'
|
||||
sha256_regex='^[A-Fa-f0-9]{64}$'
|
||||
|
||||
# Default values
|
||||
workflows_path=".github/workflows"
|
||||
dry_run=true
|
||||
debug=false
|
||||
action_has_error=false
|
||||
|
||||
# Parse command-line arguments
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
--workflows-path) workflows_path=$2; shift 2;;
|
||||
--no-dry-run) dry_run=false; shift;;
|
||||
--debug) debug=true; shift;;
|
||||
*) echo "Unknown argument: $1"; exit 1;;
|
||||
esac
|
||||
done
|
||||
|
||||
function debug() {
|
||||
if [[ "$debug" == true ]]; then
|
||||
echo "DEBUG: $*"
|
||||
fi
|
||||
}
|
||||
|
||||
function fail() {
|
||||
echo "ERROR: $*"
|
||||
exit 1
|
||||
}
|
||||
|
||||
function assert_uses_version() {
|
||||
local uses="$1"
|
||||
[[ "$uses" == *@* ]]
|
||||
}
|
||||
|
||||
function assert_uses_sha() {
|
||||
local uses="$1"
|
||||
if [[ "$uses" == docker://* ]]; then
|
||||
[[ "$uses" =~ sha256:$sha256_regex ]]
|
||||
else
|
||||
local sha_part
|
||||
sha_part=$(echo "$uses" | awk -F'@' '{print $2}' | awk '{print $1}')
|
||||
[[ "$sha_part" =~ $sha1_regex ]]
|
||||
fi
|
||||
}
|
||||
|
||||
function run_assertions() {
|
||||
local uses="$1"
|
||||
has_error=false
|
||||
|
||||
debug "Processing uses=$uses"
|
||||
|
||||
if assert_uses_version "$uses" && ! assert_uses_sha "$uses"; then
|
||||
local message="$uses is not pinned to a full length commit SHA."
|
||||
|
||||
if [[ "$dry_run" == true ]]; then
|
||||
echo "WARNING: $message"
|
||||
else
|
||||
echo "ERROR: $message" >&2
|
||||
fi
|
||||
|
||||
has_error=true
|
||||
else
|
||||
debug "$uses passed all checks."
|
||||
fi
|
||||
|
||||
$has_error && return 1 || return 0
|
||||
}
|
||||
|
||||
function check_workflow() {
|
||||
local file="$1"
|
||||
local file_has_error=false
|
||||
|
||||
echo ""
|
||||
echo "Processing $file..."
|
||||
|
||||
if ! grep -q "jobs:" "$file"; then
|
||||
fail "The $(basename "$file") workflow does not contain jobs."
|
||||
fi
|
||||
|
||||
jobs=$(sed -n '/jobs:/,/^[^ ]/p' "$file")
|
||||
|
||||
while read -r line; do
|
||||
if [[ "$line" =~ ^[[:space:]]*uses: || "$line" =~ ^[[:space:]]*-\ uses: ]]; then
|
||||
uses=$(echo "$line" | awk -F: '{print $2}' | xargs)
|
||||
run_assertions "$uses" || file_has_error=true
|
||||
fi
|
||||
done <<< "$jobs"
|
||||
|
||||
$file_has_error && return 1 || echo "No issues were found in $file." && return 0
|
||||
}
|
||||
|
||||
# Main script logic
|
||||
while IFS= read -r -d '' file; do
|
||||
if [[ -f "$file" ]]; then
|
||||
check_workflow "$file" || action_has_error=true
|
||||
fi
|
||||
done < <(find "$workflows_path" -type f \( -name '*.yaml' -o -name '*.yml' \) -print0)
|
||||
|
||||
if [[ "$dry_run" != true && "$action_has_error" == true ]]; then
|
||||
echo ""
|
||||
fail "At least one workflow contains an unpinned GitHub Action version." >&2
|
||||
fi
|
||||
|
||||
exit 0
|
||||
68
scripts/debian_build.sh
Executable file
68
scripts/debian_build.sh
Executable file
|
|
@ -0,0 +1,68 @@
|
|||
#!/bin/bash
|
||||
|
||||
# This script is intended to be used on Debian systems for building
|
||||
# the project. It has been tested with Debian 8
|
||||
|
||||
USERNAME=$USER
|
||||
SIGNING_NAME='k-9'
|
||||
SDK_VERSION='r24.3.3'
|
||||
SDK_DIR=$HOME/android-sdk
|
||||
|
||||
cd ..
|
||||
|
||||
PROJECT_HOME=$(pwd)
|
||||
|
||||
sudo apt-get install build-essential default-jdk \
|
||||
lib32stdc++6 lib32z1 lib32z1-dev
|
||||
|
||||
if [ ! -d $SDK_DIR ]; then
|
||||
mkdir -p $SDK_DIR
|
||||
fi
|
||||
cd $SDK_DIR
|
||||
|
||||
# download the SDK
|
||||
if [ ! -f $SDK_DIR/android-sdk_$SDK_VERSION-linux.tgz ]; then
|
||||
wget https://dl.google.com/android/android-sdk_$SDK_VERSION-linux.tgz
|
||||
tar -xzvf android-sdk_$SDK_VERSION-linux.tgz
|
||||
fi
|
||||
SDK_DIR=$SDK_DIR/android-sdk-linux
|
||||
|
||||
echo 'Check that you have the SDK tools installed for Android 17, SDK 19.1'
|
||||
if [ ! -f $SDK_DIR/tools/android ]; then
|
||||
echo "$SDK_DIR/tools/android not found"
|
||||
exit -1
|
||||
fi
|
||||
cd $SDK_DIR
|
||||
chmod -R 0755 $SDK_DIR
|
||||
chmod a+rx $SDK_DIR/tools
|
||||
|
||||
ANDROID_HOME=$SDK_DIR
|
||||
echo "sdk.dir=$SDK_DIR" > $ANDROID_HOME/local.properties
|
||||
PATH=${PATH}:$ANDROID_HOME/tools:$ANDROID_HOME/platform-tools
|
||||
|
||||
android sdk
|
||||
cd $PROJECT_HOME
|
||||
|
||||
|
||||
if [ ! -f $SDK_DIR/tools/templates/gradle/wrapper/gradlew ]; then
|
||||
echo "$SDK_DIR/tools/templates/gradle/wrapper/gradlew not found"
|
||||
exit -2
|
||||
fi
|
||||
. $SDK_DIR/tools/templates/gradle/wrapper/gradlew build
|
||||
|
||||
#cd ~/develop/$PROJECT_NAME/build/outputs/apk
|
||||
#keytool -genkey -v -keystore example.keystore -alias \
|
||||
# "$SIGNING_NAME" -keyalg RSA -keysize 4096
|
||||
#jarsigner -verbose -keystore example.keystore \
|
||||
# k9mail-release-unsigned.apk "$SIGNING_NAME"
|
||||
|
||||
# cleaning up
|
||||
cd $PROJECT_HOME/k9mail/build/outputs/apk
|
||||
if [ ! -f k9mail-debug.apk ]; then
|
||||
echo 'k9mail-debug.apk was not found'
|
||||
exit -3
|
||||
fi
|
||||
echo 'Build script ended successfully'
|
||||
echo -n 'apk is available at: '
|
||||
echo "$PROJECT_HOME/k9mail/build/outputs/apk/k9mail-debug.apk"
|
||||
exit 0
|
||||
3
scripts/html-cleaner
Executable file
3
scripts/html-cleaner
Executable file
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
|
||||
./gradlew --quiet ":cli:html-cleaner-cli:installDist" < /dev/null && ./cli/html-cleaner-cli/build/install/html-cleaner-cli/bin/html-cleaner-cli "$@"
|
||||
4
scripts/requirements.txt
Normal file
4
scripts/requirements.txt
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
PyNaCl
|
||||
PyYAML
|
||||
Jinja2
|
||||
requests
|
||||
3
scripts/resource-mover
Executable file
3
scripts/resource-mover
Executable file
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
|
||||
./gradlew --quiet ":cli:resource-mover-cli:installDist" < /dev/null && ./cli/resource-mover-cli/build/install/resource-mover-cli/bin/resource-mover-cli "$@"
|
||||
35
scripts/take_screenshot.sh
Executable file
35
scripts/take_screenshot.sh
Executable file
|
|
@ -0,0 +1,35 @@
|
|||
#!/bin/bash
|
||||
|
||||
# This script takes a screenshot of the connected device using adb.
|
||||
# The screenshot is saved in the adb-screenshots directory with the current date and time as the file name.
|
||||
# See https://android.googlesource.com/platform/frameworks/base/+/master/packages/SystemUI/docs/demo_mode.md
|
||||
|
||||
TARGET_DIR="./adb-screenshots/"
|
||||
FILE_NAME="$(date +"%Y-%m-%d_%H-%M-%S").png"
|
||||
|
||||
start_demo_mode() {
|
||||
adb shell settings put global sysui_demo_allowed 1
|
||||
adb shell am broadcast -a com.android.systemui.demo -e command enter
|
||||
adb shell am broadcast -a com.android.systemui.demo -e command clock -e hhmm 0058
|
||||
adb shell am broadcast -a com.android.systemui.demo -e command notifications -e visible false
|
||||
}
|
||||
|
||||
end_demo_mode() {
|
||||
adb shell am broadcast -a com.android.systemui.demo -e command exit
|
||||
adb shell settings put global sysui_demo_allowed 0
|
||||
}
|
||||
|
||||
mkdir -p $TARGET_DIR
|
||||
|
||||
echo "waiting for device..."
|
||||
adb wait-for-device
|
||||
|
||||
start_demo_mode
|
||||
|
||||
mkdir -p $TARGET_DIR
|
||||
adb exec-out screencap -p > $TARGET_DIR$FILE_NAME
|
||||
|
||||
end_demo_mode
|
||||
|
||||
echo ""
|
||||
echo "File: $TARGET_DIR$FILE_NAME"
|
||||
3
scripts/translation
Executable file
3
scripts/translation
Executable file
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
|
||||
./gradlew --quiet ":cli:translation-cli:installDist" < /dev/null && ./cli/translation-cli/build/install/translation-cli/bin/translation-cli "$@"
|
||||
Loading…
Add table
Add a link
Reference in a new issue