Repo created
This commit is contained in:
parent
4af19165ec
commit
68073add76
12458 changed files with 12350765 additions and 2 deletions
54
tools/unix/android_transfer_string.sh
Executable file
54
tools/unix/android_transfer_string.sh
Executable file
|
|
@ -0,0 +1,54 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Prompt for input
|
||||
read -p "From module: " from_module
|
||||
read -p "To module: " to_module
|
||||
read -p "String resource name: " string_name
|
||||
|
||||
# Find all strings.xml files in the from module
|
||||
strings_files=$(find "$from_module/src/main/res" -type f -name "strings.xml")
|
||||
|
||||
# Loop through each strings.xml file
|
||||
for from_file in $strings_files; do
|
||||
# Extract the exact <string> line (preserves backslashes)
|
||||
string_line=$(grep -oP "<string name=\"$string_name\">.*?</string>" "$from_file")
|
||||
|
||||
# Skip if string not found
|
||||
if [[ -z "$string_line" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Determine the value folder path
|
||||
relative_path="${from_file#$from_module/src/main/res/}" # e.g., values-de/strings.xml
|
||||
to_file="$to_module/src/main/res/$relative_path"
|
||||
|
||||
# Ensure the destination directory exists
|
||||
to_dir=$(dirname "$to_file")
|
||||
mkdir -p "$to_dir"
|
||||
|
||||
# Add or create the string in the destination file
|
||||
if [[ -f "$to_file" ]]; then
|
||||
# Add string if not already present
|
||||
if ! grep -q "name=\"$string_name\"" "$to_file"; then
|
||||
# Safely insert the raw string line
|
||||
sed -i.bak "/<\/resources>/i \ \ \ \ $string_line" "$to_file"
|
||||
echo "Added '$string_name' to $to_file"
|
||||
else
|
||||
echo "'$string_name' already exists in $to_file"
|
||||
fi
|
||||
else
|
||||
# Create a new strings.xml
|
||||
cat > "$to_file" <<EOF
|
||||
<resources>
|
||||
$string_line
|
||||
</resources>
|
||||
EOF
|
||||
echo "Created $to_file with '$string_name'"
|
||||
fi
|
||||
|
||||
# Remove the string from the source file
|
||||
sed -i.bak "/<string name=\"$string_name\">.*<\/string>/d" "$from_file"
|
||||
|
||||
# Cleanup backup files
|
||||
rm -f "$from_file.bak" "$to_file.bak"
|
||||
done
|
||||
43
tools/unix/benchmarks.sh
Executable file
43
tools/unix/benchmarks.sh
Executable file
|
|
@ -0,0 +1,43 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e -u
|
||||
|
||||
# displays usage and exits
|
||||
function Usage {
|
||||
echo ''
|
||||
echo "Usage: $0 <file without extension>"
|
||||
exit 0
|
||||
}
|
||||
|
||||
########## ENTRY POINT ###########
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
Usage
|
||||
fi
|
||||
|
||||
# trying to locate benchmark tool
|
||||
SCRIPT_DIR=`dirname $0`
|
||||
BENCHMARK_TOOL="$SCRIPT_DIR/../../../omim-build-release/out/release/benchmark_tool"
|
||||
FILE=$1
|
||||
|
||||
if [ ! -f $BENCHMARK_TOOL ]; then
|
||||
echo "Can't open $BENCHMARK_TOOL"
|
||||
exit -1
|
||||
fi
|
||||
|
||||
# build fresh data
|
||||
echo "**************************"
|
||||
echo "Starting benchmarking $FILE on `date`"
|
||||
echo "HEAD commit:"
|
||||
echo `git --git-dir=$SCRIPT_DIR/../../.git log -1`
|
||||
|
||||
echo "`$BENCHMARK_TOOL -input=$FILE.mwm -print_scales`"
|
||||
if [[ $FILE == World* ]]; then
|
||||
SCALES="0 1 2 3 4 5 6 7 8 9"
|
||||
else
|
||||
SCALES="10 11 12 13 14 15 16 17"
|
||||
fi
|
||||
for SCALE in $SCALES; do
|
||||
echo -n "Scale $SCALE: "
|
||||
$BENCHMARK_TOOL -lowS=$SCALE -highS=$SCALE -input=$FILE.mwm
|
||||
done
|
||||
175
tools/unix/build_omim.sh
Executable file
175
tools/unix/build_omim.sh
Executable file
|
|
@ -0,0 +1,175 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SKIP_MAP_DOWNLOAD="${SKIP_MAP_DOWNLOAD:-}"
|
||||
SKIP_GENERATE_SYMBOLS="${SKIP_GENERATE_SYMBOLS:-}"
|
||||
SKIP_GENERATE_DRULES="${SKIP_GENERATE_DRULES:-}"
|
||||
|
||||
CMAKE_CONFIG="${CMAKE_CONFIG:-} -U SKIP_QT_GUI -U GENERATOR_TOOL -U USE_PCH -U CMAKE_EXPORT_COMPILE_COMMANDS -U NJOBS -U SKIP_TESTS"
|
||||
|
||||
OPT_DEBUG=
|
||||
OPT_RELEASE=
|
||||
OPT_RELEASEDEBUGINFO=
|
||||
OPT_CLEAN=
|
||||
OPT_DESIGNER=
|
||||
OPT_GCC=
|
||||
OPT_TARGET=
|
||||
OPT_PATH=
|
||||
OPT_STANDALONE=
|
||||
OPT_COMPILE_DATABASE=
|
||||
OPT_LAUNCH_BINARY=
|
||||
OPT_NJOBS=
|
||||
while getopts ":cdrRxtagjlp:n:" opt; do
|
||||
case $opt in
|
||||
a) OPT_STANDALONE=1 ;;
|
||||
c) OPT_CLEAN=1 ;;
|
||||
d) OPT_DEBUG=1 ;;
|
||||
x) CMAKE_CONFIG="${CMAKE_CONFIG:-} -DUSE_PCH=YES" ;;
|
||||
g) OPT_GCC=1 ;;
|
||||
j) OPT_COMPILE_DATABASE=1
|
||||
CMAKE_CONFIG="${CMAKE_CONFIG:-} -DCMAKE_EXPORT_COMPILE_COMMANDS=YES"
|
||||
;;
|
||||
l) OPT_LAUNCH_BINARY=1 ;;
|
||||
n) OPT_NJOBS="$OPTARG"
|
||||
CMAKE_CONFIG="${CMAKE_CONFIG:-} -DNJOBS=${OPT_NJOBS}"
|
||||
;;
|
||||
p) OPT_PATH="$OPTARG" ;;
|
||||
r) OPT_RELEASE=1
|
||||
CMAKE_CONFIG="${CMAKE_CONFIG:-} -DSKIP_TESTS=1"
|
||||
;;
|
||||
R) OPT_RELEASEDEBUGINFO=1 ;;
|
||||
t) OPT_DESIGNER=1 ;;
|
||||
*)
|
||||
echo "Build the desktop app and other C++ targets (tests, tools...)"
|
||||
echo "Usage: $0 [-d] [-r] [-R] [-c] [-x] [-s] [-t] [-a] [-g] [-j] [-l] [-p PATH] [-n NUM] [target1 target2 ...]"
|
||||
echo
|
||||
echo "By default both debug and release versions are built in ../omim-build-<buildtype> dir."
|
||||
echo
|
||||
echo -e "-d Build debug version"
|
||||
echo -e "-r Build release version"
|
||||
echo -e "-R Build release with debug info"
|
||||
echo -e "-x Use precompiled headers"
|
||||
echo -e "-c Clean before building"
|
||||
echo -e "-t Build Qt based designer tool (Linux/MacOS only)"
|
||||
echo -e "-a Build Qt based standalone desktop app (Linux/MacOS only)"
|
||||
echo -e "-g Force use GCC (Linux/MacOS only)"
|
||||
echo -e "-p Directory for built binaries"
|
||||
echo -e "-n Number of parallel processes"
|
||||
echo -e "-j Generate compile_commands.json"
|
||||
echo -e "-l Launches built binaries, useful for tests"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
OPT_TARGET=${@:$OPTIND}
|
||||
|
||||
if [ "$OPT_TARGET" != "desktop" -a -z "$OPT_DESIGNER" -a -z "$OPT_STANDALONE"]; then
|
||||
CMAKE_CONFIG="${CMAKE_CONFIG:-} -DSKIP_QT_GUI=ON"
|
||||
fi
|
||||
|
||||
# By default build Debug and RelWithDebugInfo
|
||||
if [ -z "$OPT_DEBUG$OPT_RELEASE$OPT_RELEASEDEBUGINFO" ]; then
|
||||
OPT_DEBUG=1
|
||||
OPT_RELEASEDEBUGINFO=1
|
||||
fi
|
||||
|
||||
if [[ "$OPT_TARGET" =~ generator_tool|topography_generator_tool|world_roads_builder_tool|mwm_diff_tool ]]; then
|
||||
CMAKE_CONFIG="${CMAKE_CONFIG:-} -DGENERATOR_TOOL=ON"
|
||||
fi
|
||||
|
||||
OMIM_PATH="$(cd "${OMIM_PATH:-$(dirname "$0")/../..}"; pwd)"
|
||||
|
||||
if [ "$OPT_TARGET" != "desktop" ] && [ -z "$SKIP_MAP_DOWNLOAD$SKIP_GENERATE_SYMBOLS$SKIP_GENERATE_DRULES" ]; then
|
||||
SKIP_MAP_DOWNLOAD=1 SKIP_GENERATE_SYMBOLS=1 SKIP_GENERATE_DRULES=1 ./configure.sh
|
||||
else
|
||||
./configure.sh
|
||||
fi
|
||||
|
||||
DEVTOOLSET_PATH=/opt/rh/devtoolset-7
|
||||
if [ -d "$DEVTOOLSET_PATH" ]; then
|
||||
export MANPATH=
|
||||
source "$DEVTOOLSET_PATH/enable"
|
||||
else
|
||||
DEVTOOLSET_PATH=
|
||||
fi
|
||||
|
||||
# Find cmake
|
||||
source "$OMIM_PATH/tools/autobuild/detect_cmake.sh"
|
||||
|
||||
# OS-specific parameters
|
||||
if [ "$(uname -s)" == "Darwin" ]; then
|
||||
PROCESSES=$(sysctl -n hw.ncpu)
|
||||
|
||||
if [ -n "$OPT_GCC" ]; then
|
||||
GCC="$(ls /usr/local/bin | grep '^gcc-[6-9][0-9]\?' -m 1)" || true
|
||||
GPP="$(ls /usr/local/bin | grep '^g++-[6-9][0-9]\?' -m 1)" || true
|
||||
[ -z "$GCC" -o -z "$GPP" ] \
|
||||
&& echo "Either GCC or G++ is not found. (The minimum supported GCC version is 6)." \
|
||||
&& exit 2
|
||||
CMAKE_CONFIG="${CMAKE_CONFIG:-} -DCMAKE_C_COMPILER=/usr/local/bin/$GCC \
|
||||
-DCMAKE_CXX_COMPILER=/usr/local/bin/$GPP"
|
||||
fi
|
||||
elif [ "$(uname -s)" == "Linux" ]; then
|
||||
PROCESSES=$(nproc)
|
||||
else
|
||||
[ -n "$OPT_DESIGNER" ] \
|
||||
&& echo "Designer tool is only supported on Linux or MacOS" && exit 2
|
||||
[ -n "$OPT_STANDALONE" ] \
|
||||
&& echo "Standalone desktop app is only supported on Linux or MacOS" && exit 2
|
||||
PROCESSES=$(nproc)
|
||||
fi
|
||||
|
||||
if [ -n "$OPT_NJOBS" ]; then
|
||||
PROCESSES="$OPT_NJOBS"
|
||||
fi
|
||||
|
||||
build()
|
||||
{
|
||||
local MAKE_COMMAND=$(which ninja)
|
||||
local CMAKE_GENERATOR=
|
||||
if [ -z "$MAKE_COMMAND" ]; then
|
||||
echo "Ninja is not found, using Make instead"
|
||||
MAKE_COMMAND="make -j $PROCESSES"
|
||||
else
|
||||
CMAKE_GENERATOR=-GNinja
|
||||
fi
|
||||
|
||||
CONF=$1
|
||||
if [ -n "$OPT_PATH" ]; then
|
||||
DIRNAME="$OPT_PATH/omim-build-$(echo "$CONF" | tr '[:upper:]' '[:lower:]')"
|
||||
else
|
||||
DIRNAME="$OMIM_PATH/../omim-build-$(echo "$CONF" | tr '[:upper:]' '[:lower:]')"
|
||||
fi
|
||||
[ -d "$DIRNAME" -a -n "$OPT_CLEAN" ] && rm -r "$DIRNAME"
|
||||
if [ ! -d "$DIRNAME" ]; then
|
||||
mkdir -p "$DIRNAME"
|
||||
fi
|
||||
cd "$DIRNAME"
|
||||
if [ -z "$OPT_DESIGNER" ]; then
|
||||
"$CMAKE" "$CMAKE_GENERATOR" "$OMIM_PATH" \
|
||||
-DCMAKE_BUILD_TYPE="$CONF" \
|
||||
-DBUILD_DESIGNER:BOOL=OFF \
|
||||
-DBUILD_STANDALONE:BOOL=$([ "$OPT_STANDALONE" == 1 ] && echo "ON" || echo "OFF") \
|
||||
${CMAKE_CONFIG:-}
|
||||
echo ""
|
||||
$MAKE_COMMAND $OPT_TARGET
|
||||
if [ -n "$OPT_TARGET" ] && [ -n "$OPT_LAUNCH_BINARY" ]; then
|
||||
for target in $OPT_TARGET; do
|
||||
"$DIRNAME/$target"
|
||||
done
|
||||
fi
|
||||
else
|
||||
"$CMAKE" "$CMAKE_GENERATOR" "$OMIM_PATH" -DCMAKE_BUILD_TYPE="$CONF" -DBUILD_DESIGNER:BOOL=ON ${CMAKE_CONFIG:-}
|
||||
$MAKE_COMMAND package
|
||||
fi
|
||||
if [ -n "$OPT_COMPILE_DATABASE" ]; then
|
||||
cp "$DIRNAME/compile_commands.json" "$OMIM_PATH"
|
||||
fi
|
||||
}
|
||||
|
||||
[ -n "$OPT_DEBUG" ] && build Debug
|
||||
[ -n "$OPT_RELEASE" ] && build Release
|
||||
[ -n "$OPT_RELEASEDEBUGINFO" ] && build RelWithDebInfo
|
||||
exit 0
|
||||
18
tools/unix/clang-format.sh
Executable file
18
tools/unix/clang-format.sh
Executable file
|
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Running clang-format on all repository files..."
|
||||
|
||||
XARGS_COMMAND='xargs -n1 -0 -P0 clang-format -i'
|
||||
|
||||
# Android
|
||||
find android/{app,sdk}/src -type f -name '*.java' -print0 | $XARGS_COMMAND
|
||||
find android/sdk/src/main/cpp -type f -name '*.[hc]pp' -print0 | $XARGS_COMMAND
|
||||
|
||||
# TODO: iOS
|
||||
# find iphone -type f -name '*.[hc]pp' -o -name '*.[hm]' -o -name '*.mm' -print0 | $XARGS_COMMAND
|
||||
|
||||
# Core/C++
|
||||
find dev_sandbox generator libs qt tools -type f -name '*.[hc]pp' -print0 | $XARGS_COMMAND
|
||||
|
||||
git diff --exit-code
|
||||
9
tools/unix/clone_en_release_notes.sh
Executable file
9
tools/unix/clone_en_release_notes.sh
Executable file
|
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
ANDROID_LISTINGS=android/app/src/fdroid/play/listings
|
||||
ANDROID_NOTES=$ANDROID_LISTINGS/en-US/release-notes.txt
|
||||
IOS_METADATA=iphone/metadata
|
||||
IOS_NOTES=$IOS_METADATA/en-US/release_notes.txt
|
||||
|
||||
find $ANDROID_LISTINGS -name release-notes.txt -exec rsync -a $ANDROID_NOTES {} \;
|
||||
find $IOS_METADATA -name release_notes.txt -exec rsync -a $IOS_NOTES {} \;
|
||||
34
tools/unix/diff_features.py
Executable file
34
tools/unix/diff_features.py
Executable file
|
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys, re
|
||||
|
||||
RE_STAT = re.compile(r'(?:\d+\. )?([\w:|-]+?)\|: size = (\d+); count = (\d+); length = ([0-9.e+-]+) m; area = ([0-9.e+-]+) m.\s*')
|
||||
def parse_and_add(data, line):
|
||||
m = RE_STAT.match(line)
|
||||
if m:
|
||||
data[m.group(1)] = int(m.group(3))
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print('This tool compares type_statistics output for feature sizes')
|
||||
print('Usage: {0} <output_new> <output_old> [threshold_in_%]'.format(sys.argv[0]))
|
||||
sys.exit(0)
|
||||
|
||||
data1 = {}
|
||||
with open(sys.argv[2], 'r') as f:
|
||||
for line in f:
|
||||
parse_and_add(data1, line)
|
||||
data2 = {}
|
||||
with open(sys.argv[1], 'r') as f:
|
||||
for line in f:
|
||||
parse_and_add(data2, line)
|
||||
|
||||
threshold = (int(sys.argv[3]) if len(sys.argv) > 3 else 100) / 100.0 + 1
|
||||
min_diff = 40
|
||||
|
||||
for k in data1:
|
||||
v1 = int(data1[k])
|
||||
if k in data2:
|
||||
v2 = int(data2[k])
|
||||
if v1 == 0 or v2 == 0 or max(v1, v2) / float(min(v1, v2)) > threshold and abs(v1 - v2) > min_diff:
|
||||
print('{0}: {1} to {2}'.format(k, v1, v2))
|
||||
elif v1 > min_diff:
|
||||
print('- not found: {0}, {1}'.format(k, v1))
|
||||
26
tools/unix/diff_size.py
Executable file
26
tools/unix/diff_size.py
Executable file
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env python3
|
||||
import os, sys
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print('This tool shows very different file sizes')
|
||||
print('Usage: {0} <newdir> <olddir> [threshold_in_%]'.format(sys.argv[0]))
|
||||
sys.exit(0)
|
||||
|
||||
new_path = sys.argv[1]
|
||||
old_path = sys.argv[2]
|
||||
threshold = (int(sys.argv[3]) if len(sys.argv) > 3 else 10) / 100.0 + 1
|
||||
min_diff = 1024 * 1024
|
||||
|
||||
for f in sorted(os.listdir(old_path)):
|
||||
new_file = os.path.join(new_path, f)
|
||||
old_file = os.path.join(old_path, f)
|
||||
if '.mwm' not in new_file:
|
||||
continue
|
||||
if os.path.isfile(new_file) and os.path.isfile(old_file):
|
||||
new_size = os.path.getsize(new_file)
|
||||
old_size = os.path.getsize(old_file)
|
||||
if new_size + old_size > 0:
|
||||
if new_size == 0 or old_size == 0 or max(new_size, old_size) / float(min(new_size, old_size)) > threshold and abs(new_size - old_size) > min_diff:
|
||||
print('{0}: {1} {2} to {3} MB'.format(f, old_size / 1024 / 1024, 'up' if new_size > old_size else 'down', new_size / 1024 / 1024))
|
||||
else:
|
||||
print('Not found a mirror for {0}'.format(f))
|
||||
30
tools/unix/find_generator_tool.sh
Executable file
30
tools/unix/find_generator_tool.sh
Executable file
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env bash
|
||||
#####################################
|
||||
# Locates generator_tool executable #
|
||||
#####################################
|
||||
|
||||
# Set GENERATOR_TOOL to explicitly use one
|
||||
# Or BUILD_PATH to point to a build directory
|
||||
|
||||
OMIM_PATH="${OMIM_PATH:-$(cd "$(dirname "$0")/../.."; pwd)}"
|
||||
|
||||
if [ -z "${GENERATOR_TOOL-}" -o ! -x "${GENERATOR_TOOL-}" ]; then
|
||||
IT_PATHS_ARRAY=()
|
||||
for i in "${BUILD_PATH-}" "$OMIM_PATH" "$OMIM_PATH/.."/*omim*elease* "$OMIM_PATH/.."/*omim*ebug; do
|
||||
IT_PATHS_ARRAY+=("$i/generator_tool")
|
||||
done
|
||||
|
||||
if [ -d "$OMIM_PATH/../omim-xcode-build" ]; then
|
||||
IT_PATHS_ARRAY+=("$OMIM_PATH/../omim-xcode-build/Release" "$OMIM_PATH/../omim-xcode-build/Debug")
|
||||
fi
|
||||
|
||||
for i in "${BUILD_PATH:+$BUILD_PATH/generator_tool}" "${IT_PATHS_ARRAY[@]}"; do
|
||||
if [ -x "$i" ]; then
|
||||
GENERATOR_TOOL="$i"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
[ -z "${GENERATOR_TOOL-}" -o ! -x "${GENERATOR_TOOL-}" ] && fail "No generator_tool found in ${IT_PATHS_ARRAY[*]-${GENERATOR_TOOL-}}"
|
||||
echo "Using tool: $GENERATOR_TOOL"
|
||||
2
tools/unix/generate_categories.sh
Executable file
2
tools/unix/generate_categories.sh
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env sh
|
||||
./tools/python/categories/json_to_txt.py data/categories-strings data/categories.txt
|
||||
74
tools/unix/generate_drules.sh
Executable file
74
tools/unix/generate_drules.sh
Executable file
|
|
@ -0,0 +1,74 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e -u
|
||||
|
||||
OMIM_PATH="${OMIM_PATH:-$(dirname "$0")/../..}"
|
||||
DATA_PATH="${DATA_PATH:-$OMIM_PATH/data}"
|
||||
|
||||
function BuildDrawingRules() {
|
||||
styleType=$1
|
||||
styleName=$2
|
||||
suffix=${3-}
|
||||
echo "Building drawing rules for style $styleType/$styleName"
|
||||
# Cleanup old compiled drules and diff
|
||||
rm "$DATA_PATH"/drules_proto$suffix.{bin,txt.diff} || true
|
||||
# Store old txt version for diff
|
||||
mv -f "$DATA_PATH"/drules_proto$suffix.txt{,.prev} || true
|
||||
# Run script to build style
|
||||
python3 "$OMIM_PATH/tools/kothic/src/libkomwm.py" --txt \
|
||||
-s "$DATA_PATH/styles/$styleType/$styleName/style.mapcss" \
|
||||
-o "$DATA_PATH/drules_proto$suffix" \
|
||||
-p "$DATA_PATH/styles/$styleType/include/"
|
||||
# Output diff and store to a file
|
||||
if [ -f "$DATA_PATH/drules_proto$suffix.txt.prev" ]; then
|
||||
diff -u "$DATA_PATH/drules_proto$suffix.txt.prev" "$DATA_PATH/drules_proto$suffix.txt" > "$DATA_PATH/drules_proto$suffix.txt.diff" || true
|
||||
fi
|
||||
}
|
||||
|
||||
outputs=(classificator.txt types.txt visibility.txt colors.txt patterns.txt drules_proto.txt)
|
||||
# Store old versions for diffs
|
||||
for item in ${outputs[*]}
|
||||
do
|
||||
if [ -f "$DATA_PATH/$item" ]; then
|
||||
mv -f "$DATA_PATH/$item" "$DATA_PATH/$item.prev"
|
||||
fi
|
||||
done
|
||||
|
||||
# Building drawing rules
|
||||
BuildDrawingRules default light _default_light
|
||||
BuildDrawingRules default dark _default_dark
|
||||
BuildDrawingRules outdoors light _outdoors_light
|
||||
BuildDrawingRules outdoors dark _outdoors_dark
|
||||
# Keep vehicle style last to produce same visibility.txt & classificator.txt
|
||||
BuildDrawingRules vehicle light _vehicle_light
|
||||
BuildDrawingRules vehicle dark _vehicle_dark
|
||||
|
||||
# TODO: the designer is not used at the moment.
|
||||
# In designer mode we use drules_proto_design file instead of standard ones
|
||||
# cp $OMIM_PATH/data/drules_proto_default_light.bin $OMIM_PATH/data/drules_proto_default_design.bin
|
||||
|
||||
echo "Exporting transit colors..."
|
||||
python3 "$OMIM_PATH/tools/python/transit/transit_colors_export.py" \
|
||||
"$DATA_PATH/colors.txt" > /dev/null
|
||||
|
||||
# Merged drules_proto.bin is used by the map generator.
|
||||
# It contains max visibilities (min visible zoom) for features across all styles.
|
||||
echo "Merging styles..."
|
||||
python3 "$OMIM_PATH/tools/python/stylesheet/drules_merge.py" \
|
||||
"$DATA_PATH/drules_proto_default_light.bin" \
|
||||
"$DATA_PATH/drules_proto_vehicle_light.bin" \
|
||||
"$DATA_PATH/drules_proto_outdoors_light.bin" \
|
||||
"$DATA_PATH/drules_proto.bin" \
|
||||
"$DATA_PATH/drules_proto.txt" \
|
||||
> /dev/null
|
||||
|
||||
# Output diffs and store to files
|
||||
for item in ${outputs[*]}
|
||||
do
|
||||
if [ -f "$DATA_PATH/$item.prev" ] && [ -f "$DATA_PATH/$item" ]; then
|
||||
diff -u "$DATA_PATH/$item.prev" "$DATA_PATH/$item" > "$DATA_PATH/$item.diff" || true
|
||||
else
|
||||
echo "Skipping diff for $item (first run or file missing)"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Diffs for all changes are stored in $DATA_PATH/*.txt.diff"
|
||||
13
tools/unix/generate_proto.sh
Executable file
13
tools/unix/generate_proto.sh
Executable file
|
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e -u -x
|
||||
|
||||
OMIM_PATH="${OMIM_PATH:-$(dirname "$0")/../..}"
|
||||
|
||||
rm "$OMIM_PATH/indexer"/drules_struct.pb.* || true
|
||||
rm "$OMIM_PATH/tools/python/stylesheet"/drules_struct_pb2.* || true
|
||||
rm "$OMIM_PATH/tools/kothic/src"/drules_struct_pb2.* || true
|
||||
|
||||
PROTO="$OMIM_PATH/indexer/drules_struct.proto"
|
||||
protoc --proto_path="$OMIM_PATH/indexer" --cpp_out="$OMIM_PATH/indexer" "$PROTO"
|
||||
protoc --proto_path="$OMIM_PATH/indexer" --python_out="$OMIM_PATH/tools/python/stylesheet" "$PROTO"
|
||||
protoc --proto_path="$OMIM_PATH/indexer" --python_out="$OMIM_PATH/tools/kothic/src" "$PROTO"
|
||||
5
tools/unix/generate_styles.sh
Executable file
5
tools/unix/generate_styles.sh
Executable file
|
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e -u
|
||||
SCRIPTS_PATH="$(dirname "$0")"
|
||||
"$SCRIPTS_PATH/generate_symbols.sh"
|
||||
"$SCRIPTS_PATH/generate_drules.sh"
|
||||
93
tools/unix/generate_symbols.sh
Executable file
93
tools/unix/generate_symbols.sh
Executable file
|
|
@ -0,0 +1,93 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if ! command -v optipng &> /dev/null
|
||||
then
|
||||
echo -e "\033[1;31moptipng could not be found"
|
||||
if [[ $OSTYPE == 'darwin'* ]]; then
|
||||
echo 'run command'
|
||||
echo 'brew install optipng'
|
||||
echo 'to install it'
|
||||
exit
|
||||
fi
|
||||
echo 'take a look to http://optipng.sourceforge.net/'
|
||||
exit
|
||||
fi
|
||||
|
||||
# Add env var to disable renderer on a display (renderer not working into a Github container)
|
||||
export QT_QPA_PLATFORM=offscreen
|
||||
|
||||
BINARY_NAME=skin_generator_tool
|
||||
OMIM_PATH="${OMIM_PATH:-$(cd "$(dirname "$0")/../.."; pwd)}"
|
||||
BUILD_DIR="$OMIM_PATH/build"
|
||||
SKIN_GENERATOR="${SKIN_GENERATOR:-$BUILD_DIR/$BINARY_NAME}"
|
||||
DATA_PATH="$OMIM_PATH/data"
|
||||
|
||||
# cmake rebuilds skin generator binary if necessary.
|
||||
cmake -S "$OMIM_PATH" -B "$BUILD_DIR" -G Ninja -DCMAKE_BUILD_TYPE=Release -DSKIP_TESTS:bool=true
|
||||
cmake --build "$BUILD_DIR" --target "$BINARY_NAME"
|
||||
|
||||
|
||||
# Helper function to build skin
|
||||
# Parameter $1 - style type (default)
|
||||
# Parameter $2 - style name (light, dark, ...)
|
||||
# Parameter $3 - resource name (mdpi, hdpi, ...)
|
||||
# Parameter $4 - symbol size
|
||||
# Parameter $5 - style suffix (none, _light, _dark)
|
||||
# Parameter $6 - symbols folder (symbols)
|
||||
# Parameter $7 - symbols suffix (none, -ad)
|
||||
function BuildSkin() {
|
||||
styleType=$1
|
||||
styleName=$2
|
||||
resourceName=$3
|
||||
symbolSize=$4
|
||||
suffix=$5
|
||||
symbolsFolder=$6
|
||||
symbolsSuffix=${7-}
|
||||
|
||||
echo "Building skin for $styleName/$resourceName"
|
||||
# Set environment
|
||||
STYLE_PATH="$DATA_PATH/styles/$styleType/$styleName"
|
||||
PNG_PATH="$STYLE_PATH/symbols$symbolsSuffix/png"
|
||||
rm -rf "$PNG_PATH" || true
|
||||
ln -s "$STYLE_PATH/$resourceName$symbolsSuffix" "$PNG_PATH"
|
||||
# Run skin generator
|
||||
"$SKIN_GENERATOR" --symbolWidth $symbolSize --symbolHeight $symbolSize --symbolsDir "$STYLE_PATH/$symbolsFolder" \
|
||||
--skinName "$DATA_PATH/symbols/$resourceName/$suffix/basic" --skinSuffix="$symbolsSuffix"
|
||||
# Reset environment
|
||||
rm -r "$PNG_PATH" || true
|
||||
}
|
||||
|
||||
symbols_name=(6plus mdpi hdpi xhdpi xxhdpi xxxhdpi)
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$DATA_PATH"/symbols/*/*/symbols.*
|
||||
|
||||
# Build styles
|
||||
|
||||
BuildSkin default dark mdpi 18 dark symbols
|
||||
BuildSkin default dark hdpi 27 dark symbols
|
||||
BuildSkin default dark xhdpi 36 dark symbols
|
||||
BuildSkin default dark 6plus 43 dark symbols
|
||||
BuildSkin default dark xxhdpi 54 dark symbols
|
||||
BuildSkin default dark xxxhdpi 64 dark symbols
|
||||
|
||||
BuildSkin default light mdpi 18 light symbols
|
||||
BuildSkin default light hdpi 27 light symbols
|
||||
BuildSkin default light xhdpi 36 light symbols
|
||||
BuildSkin default light 6plus 43 light symbols
|
||||
BuildSkin default light xxhdpi 54 light symbols
|
||||
BuildSkin default light xxxhdpi 64 light symbols
|
||||
|
||||
for i in ${symbols_name[*]}; do
|
||||
optipng -zc9 -zm8 -zs0 -f0 "$DATA_PATH"/symbols/"${i}"/light/symbols.png
|
||||
optipng -zc9 -zm8 -zs0 -f0 "$DATA_PATH"/symbols/"${i}"/dark/symbols.png
|
||||
done
|
||||
|
||||
rm -rf "$DATA_PATH"/symbols/*/design/
|
||||
|
||||
# The styles designer is not used at the moment.
|
||||
# If enabled then remove design symbols from bundled android assets in android/sdk/src/main/assets/symbols
|
||||
# for i in ${symbols_name[*]}; do
|
||||
# cp -r "$DATA_PATH"/symbols/"${i}"/light/ "$DATA_PATH"/symbols/"${i}"/design/
|
||||
# done
|
||||
29
tools/unix/generate_vulkan_shaders.sh
Executable file
29
tools/unix/generate_vulkan_shaders.sh
Executable file
|
|
@ -0,0 +1,29 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e -u
|
||||
|
||||
DEBUG="${1:-empty}"
|
||||
|
||||
MY_PATH="`dirname \"$0\"`" # relative
|
||||
MY_PATH="`( cd \"$MY_PATH\" && pwd )`" # absolutized and normalized
|
||||
|
||||
source "$MY_PATH/../autobuild/ndk_helper.sh"
|
||||
export NDK_ROOT=$(GetNdkRoot)
|
||||
if [ -z "$NDK_ROOT" ]
|
||||
then
|
||||
echo "Can't find NDK root path"; exit 1
|
||||
fi
|
||||
KERNEL_NAME="$( uname -s )"
|
||||
if [[ $KERNEL_NAME == 'Darwin' ]]
|
||||
then
|
||||
GLSLC_PATH="$NDK_ROOT/shader-tools/darwin-x86_64/glslc"
|
||||
elif [[ $KERNEL_NAME == 'Linux' ]]
|
||||
then
|
||||
GLSLC_PATH="$NDK_ROOT/shader-tools/linux-x86_64/glslc"
|
||||
else
|
||||
echo "Unknown kernel"; exit 1
|
||||
fi
|
||||
|
||||
OMIM_PATH="${OMIM_PATH:-$(cd "$(dirname "$0")/../.."; pwd)}"
|
||||
SHADERS_GENERATOR="$OMIM_PATH/libs/shaders/vulkan_shaders_preprocessor.py"
|
||||
|
||||
python3 "$SHADERS_GENERATOR" "$OMIM_PATH/libs/shaders/GL" shader_index.txt shaders_lib.glsl "$OMIM_PATH/data/vulkan_shaders" "$GLSLC_PATH" "$DEBUG"
|
||||
16
tools/unix/helper_python.sh
Normal file
16
tools/unix/helper_python.sh
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
PYTHON="${PYTHON:-python3}"
|
||||
|
||||
function activate_venv_at_path() {
|
||||
path=$1
|
||||
|
||||
if [ ! -d "$path/".venv ]; then
|
||||
"$PYTHON" -m venv "$path"/.venv
|
||||
fi
|
||||
|
||||
source "$path"/.venv/bin/activate
|
||||
|
||||
if [ -f "$path"/requirements.txt ]; then
|
||||
pip install --upgrade pip
|
||||
pip install -r "$path"/requirements.txt
|
||||
fi
|
||||
}
|
||||
36
tools/unix/link_gplay_relnotes.sh
Executable file
36
tools/unix/link_gplay_relnotes.sh
Executable file
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# Linking relnotes from F-Droid to Google Play
|
||||
#
|
||||
|
||||
set -e -u
|
||||
|
||||
REPO_PATH="$(cd "$(dirname "$0")/../.."; pwd -P)"
|
||||
ANDROID_PATH="$REPO_PATH/android/app/src"
|
||||
GPLAY_PATH="$ANDROID_PATH/google/play/release-notes"
|
||||
|
||||
pushd $ANDROID_PATH >/dev/null
|
||||
|
||||
echo "Deleting all GPlay relnotes symlinks in $GPLAY_PATH"
|
||||
rm -rf $GPLAY_PATH
|
||||
mkdir -p $GPLAY_PATH
|
||||
|
||||
pushd fdroid/play/listings >/dev/null
|
||||
|
||||
echo "Symlinking to F-Droid relnotes in $(pwd)"
|
||||
|
||||
for loc in */; do
|
||||
if [ -f "$loc/release-notes.txt" ]; then
|
||||
echo "Adding $loc relnotes"
|
||||
pushd ../../../google/play/release-notes >/dev/null
|
||||
mkdir -p $loc
|
||||
cd $loc
|
||||
ln -sT "../../../../fdroid/play/listings/$loc"release-notes.txt default.txt
|
||||
popd >/dev/null
|
||||
fi
|
||||
done
|
||||
|
||||
popd >/dev/null
|
||||
popd >/dev/null
|
||||
|
||||
exit 0
|
||||
68
tools/unix/maps/Dockerfile
Normal file
68
tools/unix/maps/Dockerfile
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
# See run-docker.sh for cloning, building, and running the maps generator Docker routine
|
||||
FROM debian:latest
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
cargo \
|
||||
clang \
|
||||
cmake \
|
||||
curl \
|
||||
git \
|
||||
jq \
|
||||
libc++-dev \
|
||||
libfreetype-dev \
|
||||
libgeos-dev \
|
||||
libgl1-mesa-dev \
|
||||
libglvnd-dev \
|
||||
libharfbuzz-dev \
|
||||
libicu-dev \
|
||||
libqt6positioning6 \
|
||||
libqt6positioning6-plugins \
|
||||
libqt6svg6-dev \
|
||||
libsqlite3-dev \
|
||||
libxcursor-dev \
|
||||
libxi-dev \
|
||||
libxinerama-dev \
|
||||
libxrandr-dev \
|
||||
ninja-build \
|
||||
nodejs \
|
||||
openssh-client \
|
||||
osmctools \
|
||||
osmium-tool \
|
||||
pyosmium \
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-venv \
|
||||
python-is-python3 \
|
||||
python3-dev \
|
||||
qt6-base-dev \
|
||||
qt6-positioning-dev \
|
||||
rclone \
|
||||
rustc \
|
||||
sshpass \
|
||||
vim-tiny \
|
||||
wget \
|
||||
wget2 \
|
||||
zlib1g-dev \
|
||||
&& rm -rf /var/cache/apt/* /var/lib/apt/lists/*;
|
||||
|
||||
# Allow pip to install system-wide in the container
|
||||
RUN pip3 install "protobuf<4" --break-system-packages
|
||||
|
||||
# Mount a volume to store the large input, output, and temp files here
|
||||
RUN mkdir /mnt/4tbexternal
|
||||
# And a volume to store >10gb files for the planet output here
|
||||
RUN mkdir /home/planet
|
||||
|
||||
WORKDIR ~
|
||||
|
||||
# The actual map generation process will run something like this,
|
||||
# but ~/comaps isn't mounted nor provided in the docker image.
|
||||
#CMD ~/comaps/tools/unix/maps/docker_maps_generator.sh
|
||||
|
||||
CMD /bin/bash
|
||||
56
tools/unix/maps/docker_maps_generator.sh
Normal file
56
tools/unix/maps/docker_maps_generator.sh
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "<$(date +%T)> Starting..."
|
||||
|
||||
# Prepare paths
|
||||
# Most other paths in /mnt/4tbexternal or /home/planet are already created by Dockerfile or CI/CD.
|
||||
#
|
||||
mkdir -p /root/.config/CoMaps # Odd mkdir permission errors in generator_tool in Docker without these
|
||||
chmod -R 777 /root/.config
|
||||
mkdir -p /home/planet/postcodes/gb-postcode-data/
|
||||
mkdir -p /home/planet/postcodes/us-postcodes/
|
||||
mkdir -p /home/planet/SRTM-patched-europe/
|
||||
mkdir -p /home/planet/subway
|
||||
|
||||
echo "<$(date +%T)> Running ./configure.sh ..."
|
||||
cd ~/comaps
|
||||
export SKIP_MAP_DOWNLOAD=1 SKIP_GENERATE_SYMBOLS=1
|
||||
./configure.sh
|
||||
|
||||
echo "<$(date +%T)> Compiling tools..."
|
||||
cd ~/comaps
|
||||
./tools/unix/build_omim.sh -p ~ -R generator_tool
|
||||
./tools/unix/build_omim.sh -p ~ -R world_roads_builder_tool
|
||||
./tools/unix/build_omim.sh -p ~ -R mwm_diff_tool
|
||||
cd tools/python/maps_generator
|
||||
python3 -m venv /tmp/venv
|
||||
/tmp/venv/bin/pip3 install -r requirements_dev.txt
|
||||
|
||||
echo "<$(date +%T)> Copying map generator INI..."
|
||||
cp var/etc/map_generator.ini.prod var/etc/map_generator.ini
|
||||
|
||||
|
||||
cd ~/comaps/tools/python
|
||||
if [ $MWMCONTINUE -gt 0 ]; then
|
||||
|
||||
echo "<$(date +%T)> Continuing from preexisting generator run..."
|
||||
/tmp/venv/bin/python -m maps_generator --skip="MwmDiffs" --continue
|
||||
|
||||
else
|
||||
|
||||
if [[ -n $MWMCOUNTRIES ]]; then
|
||||
|
||||
echo "<$(date +%T)> Generating only specific maps [$MWMCOUNTRIES]..."
|
||||
/tmp/venv/bin/python -m maps_generator --countries=$MWMCOUNTRIES --skip="MwmDiffs"
|
||||
|
||||
else
|
||||
|
||||
echo "<$(date +%T)> Generating maps..."
|
||||
/tmp/venv/bin/python -m maps_generator --skip="MwmDiffs"
|
||||
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "<$(date +%T)> DONE"
|
||||
52
tools/unix/maps/generate_subways.sh
Executable file
52
tools/unix/maps/generate_subways.sh
Executable file
|
|
@ -0,0 +1,52 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e -u -o pipefail
|
||||
|
||||
# Generate subways.transit.json file consumed by the maps generator.
|
||||
# Inputs:
|
||||
# - OSM planet in pbf format
|
||||
# - csv table of subway networks
|
||||
# (auto-downloaded from https://docs.google.com/spreadsheets/d/1SEW1-NiNOnA2qDwievcxYV1FOaQl1mb1fdeyqAxHu3k)
|
||||
# Output:
|
||||
# - subways.transit.json
|
||||
|
||||
source "$(dirname "$0")/helper_settings.sh"
|
||||
source "$REPO_PATH/tools/unix/helper_python.sh"
|
||||
|
||||
# Parameters for the process_subways.sh script:
|
||||
export PLANET="$PLANET_PBF"
|
||||
export SKIP_PLANET_UPDATE="1"
|
||||
# http(s) or "file://" URL to a CSV file with a list of subway networks.
|
||||
# Auto-downloaded from https://docs.google.com/spreadsheets/d/1SEW1-NiNOnA2qDwievcxYV1FOaQl1mb1fdeyqAxHu3k
|
||||
# If unavailable then replace with a local file.
|
||||
# TODO: keep the downloaded csv file from the latest run.
|
||||
#export CITIES_INFO_URL=""
|
||||
export TMPDIR="$BUILD_PATH/subway"
|
||||
# The output file, which needs post-processing by transit_graph_generator.py
|
||||
export MAPSME="$SUBWAYS_PATH/subway.json"
|
||||
|
||||
# Produce additional files needed for https://cdn.organicmaps.app/subway/
|
||||
export HTML_DIR="$SUBWAYS_VALIDATOR_PATH"
|
||||
export DUMP="$SUBWAYS_VALIDATOR_PATH"
|
||||
export GEOJSON="$SUBWAYS_VALIDATOR_PATH"
|
||||
export DUMP_CITY_LIST="$SUBWAYS_VALIDATOR_PATH/cities.txt"
|
||||
|
||||
# cd to subways repo so relative paths work in the script
|
||||
PREVDIR=$(pwd)
|
||||
cd "$SUBWAYS_REPO_PATH"
|
||||
echo "Running process_subways.sh:"
|
||||
./scripts/process_subways.sh 2>&1 | tee "$SUBWAYS_LOG"
|
||||
cd "$PREVDIR"
|
||||
|
||||
# Make render.html available for map visualization on the web
|
||||
cp -r "$SUBWAYS_REPO_PATH"/render/* "$SUBWAYS_VALIDATOR_PATH/"
|
||||
|
||||
TRANSIT_TOOL_PATH="$REPO_PATH/tools/python/transit"
|
||||
SUBWAYS_GRAPH_FILE="$SUBWAYS_PATH/subways.transit.json"
|
||||
|
||||
activate_venv_at_path "$TRANSIT_TOOL_PATH"
|
||||
"$PYTHON" "$TRANSIT_TOOL_PATH/transit_graph_generator.py" "$MAPSME" "$SUBWAYS_GRAPH_FILE" 2>&1 | tee -a "$SUBWAYS_LOG"
|
||||
deactivate
|
||||
|
||||
echo "Generated subways transit graph file:"
|
||||
echo "$SUBWAYS_GRAPH_FILE"
|
||||
echo "Finished"
|
||||
25
tools/unix/maps/helper_settings.sh
Normal file
25
tools/unix/maps/helper_settings.sh
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
SETTINGS_FILE="${SETTINGS_FILE:-$(cd "$(dirname "$0")"; pwd -P)/settings.sh}"
|
||||
if [ -f "$SETTINGS_FILE" ]; then
|
||||
echo "Using settings from $SETTINGS_FILE"
|
||||
source "$SETTINGS_FILE"
|
||||
else
|
||||
echo "Creating a template settings file $SETTINGS_FILE"
|
||||
cat << EOF > "$SETTINGS_FILE"
|
||||
# Customize the default settings here.
|
||||
# (check the defaults in settings_default.sh)
|
||||
|
||||
# The default maps workspace base is ../maps relative to the repo.
|
||||
# All source and output and intermediate files will be organized there in corresponding subdirs.
|
||||
# E.g. set it to the user's home directory:
|
||||
# BASE_PATH="$HOME"
|
||||
EOF
|
||||
fi
|
||||
|
||||
source "$(dirname "$0")/settings_default.sh"
|
||||
|
||||
mkdir -p "$BASE_PATH"
|
||||
mkdir -p "$BUILD_PATH"
|
||||
mkdir -p "$DATA_PATH"
|
||||
|
||||
mkdir -p "$PLANET_PATH"
|
||||
mkdir -p "$SUBWAYS_PATH"
|
||||
34
tools/unix/maps/run-docker.sh
Normal file
34
tools/unix/maps/run-docker.sh
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Run the maps generator via Docker manually without CI
|
||||
# See .forgejo/workflows/map-generator.yml for steps to run before the main mapgen process,
|
||||
# e.g. clone the repos, get/update planet file, generate isolines etc.
|
||||
|
||||
# To build the docker container:
|
||||
# cd /mnt/4tbexternal/comaps/tools/unix/maps
|
||||
# docker build . -t maps_generator
|
||||
#
|
||||
# To push for ci/cd, tag for codeberg:
|
||||
# docker login codeberg.org
|
||||
# docker tag maps_generator codeberg.org/comaps/maps_generator:latest
|
||||
# docker push codeberg.org/comaps/maps_generator:latest
|
||||
# You can also tag and push the image Id for posterity: codeberg.org/comaps/maps_generator:1234abcd
|
||||
# If you get a Dockerfile not found error especially on an XFS partition, try copying Dockerfile to an ext4 partition first.
|
||||
# Or use docker via apt instead of snap.
|
||||
|
||||
# We assume that the following will be cloned into the container itself at runtime:
|
||||
# ~/comaps (comaps main app repo)
|
||||
# ~/subways (repo for processing OSM subway/transit info)
|
||||
# ~/wikiparser (repo for processing Wikipedia data)
|
||||
|
||||
# We also assume a number of files/folders/repos are pre-set-up before mounting via volumes below:
|
||||
# /mnt/4tbexternal (base folder for directory traversal)
|
||||
# /mnt/4tbexternal/osm-maps (folder for holding generated map data output)
|
||||
# /home/planet (folder for holding required input dumps)
|
||||
|
||||
docker run \
|
||||
--ulimit nofile=262144:262144 \
|
||||
-v /mnt/4tbexternal/:/mnt/4tbexternal/ \
|
||||
-v /mnt/4tbexternal/osm-planet:/home/planet \
|
||||
-it codeberg.org/comaps/maps_generator:latest \
|
||||
~/comaps/tools/unix/maps/docker_maps_generator.sh
|
||||
26
tools/unix/maps/settings.sh.prod
Normal file
26
tools/unix/maps/settings.sh.prod
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
## NOTE: edit the settings.sh file to customize/override the defaults.
|
||||
|
||||
# Absolutize & normalize paths.
|
||||
REPO_PATH="${REPO_PATH:-$(cd "$(dirname "$0")/../../.."; pwd -P)}"
|
||||
|
||||
#TODO: is base path used?
|
||||
BASE_PATH="${BASE_PATH:-$REPO_PATH/..}"
|
||||
# Source map data and processed outputs e.g. wiki articles
|
||||
DATA_PATH="${DATA_PATH:-/home/planet}"
|
||||
# Temporary files
|
||||
BUILD_PATH="${BUILD_PATH:-$DATA_PATH/build}"
|
||||
# Other code repositories, e.g. subways, wikiparser..
|
||||
CODE_PATH="${CODE_PATH:-~}"
|
||||
|
||||
# OSM planet source files
|
||||
|
||||
PLANET_PATH="${PLANET_PATH:-$DATA_PATH/planet}"
|
||||
PLANET_PBF="${PLANET_PBF:-$PLANET_PATH/planet-latest.osm.pbf}"
|
||||
PLANET_O5M="${PLANET_O5M:-$PLANET_PATH/planet-latest.o5m}"
|
||||
|
||||
# Subways
|
||||
|
||||
SUBWAYS_REPO_PATH="${SUBWAYS_REPO_PATH:-/root/subways}"
|
||||
SUBWAYS_PATH="${SUBWAYS_PATH:-$DATA_PATH/subway}"
|
||||
SUBWAYS_LOG="${SUBWAYS_LOG:-$SUBWAYS_PATH/subway.log}"
|
||||
SUBWAYS_VALIDATOR_PATH="${SUBWAYS_VALIDATOR_PATH:-$SUBWAYS_PATH/validator}"
|
||||
25
tools/unix/maps/settings_default.sh
Normal file
25
tools/unix/maps/settings_default.sh
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
# NOTE: edit the settings.sh file to customize/override the defaults.
|
||||
|
||||
# Absolutize & normalize paths.
|
||||
REPO_PATH="${REPO_PATH:-$(cd "$(dirname "$0")/../../.."; pwd -P)}"
|
||||
|
||||
BASE_PATH="${BASE_PATH:-$REPO_PATH/../maps}"
|
||||
# Temporary files
|
||||
BUILD_PATH="${BUILD_PATH:-$BASE_PATH/build}"
|
||||
# Other code repositories, e.g. subways, wikiparser..
|
||||
CODE_PATH="${CODE_PATH:-$REPO_PATH/..}"
|
||||
# Source map data and processed outputs e.g. wiki articles
|
||||
DATA_PATH="${DATA_PATH:-$BASE_PATH/data}"
|
||||
|
||||
# OSM planet source files
|
||||
|
||||
PLANET_PATH="${PLANET_PATH:-$DATA_PATH/planet}"
|
||||
PLANET_PBF="${PLANET_PBF:-$PLANET_PATH/planet-latest.osm.pbf}"
|
||||
PLANET_O5M="${PLANET_O5M:-$PLANET_PATH/planet-latest.o5m}"
|
||||
|
||||
# Subways
|
||||
|
||||
SUBWAYS_REPO_PATH="${SUBWAYS_REPO_PATH:-$CODE_PATH/subways}"
|
||||
SUBWAYS_PATH="${SUBWAYS_PATH:-$DATA_PATH/subway}"
|
||||
SUBWAYS_LOG="${SUBWAYS_LOG:-$SUBWAYS_PATH/subway.log}"
|
||||
SUBWAYS_VALIDATOR_PATH="${SUBWAYS_VALIDATOR_PATH:-$SUBWAYS_PATH/validator}"
|
||||
128
tools/unix/maps/upload_to_cdn.sh
Executable file
128
tools/unix/maps/upload_to_cdn.sh
Executable file
|
|
@ -0,0 +1,128 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Upload new maps version to all CDN nodes (in parallel) and remove old versions.
|
||||
|
||||
# Use following commands for deleting older maps:
|
||||
#
|
||||
# ru1 - keep max 3 maps versions
|
||||
# First list all maps versions on the server
|
||||
# sudo rclone lsd ru1:comaps-maps/maps
|
||||
# Delete the old version
|
||||
# sudo rclone purge -v ru1:comaps-maps/maps/250713/
|
||||
#
|
||||
# fi1 - max 3 versions
|
||||
# sudo rclone lsd fi1:/var/www/html/maps
|
||||
# sudo rclone purge -v fi1:/var/www/html/maps/250713/
|
||||
#
|
||||
# de1 - max 6 versions
|
||||
# sudo rclone lsd de1:/var/www/html/comaps-cdn/maps
|
||||
# sudo rclone purge -v de1:/var/www/html/comaps-cdn/maps/250713/
|
||||
#
|
||||
# us2 - all versions, don't delete
|
||||
# sudo rclone lsd us2:comaps-map-files/maps
|
||||
|
||||
set -e -u
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: upload_to_cdn.sh MAPS_PATH"
|
||||
echo "e.g. sudo upload_to_cdn.sh osm-maps/2025_09_06__09_48_08/250906"
|
||||
echo "uploads are run in parallel to us2,ru1,fi1,de1 servers,"
|
||||
echo "subsequent runs will update only missing/differing files,"
|
||||
echo "so its fine to run second time to ensure there were no incomplete transfers"
|
||||
echo "or to run on an unfinished generation first and then again after its fully finished."
|
||||
echo "(sudo is needed to access rclone.conf with servers credentials)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MAPS=$(basename $1)
|
||||
DIR=$(dirname $1)/$MAPS
|
||||
|
||||
echo "Uploading maps folder $DIR to $MAPS"
|
||||
|
||||
# Remove old versions before uploading new ones
|
||||
echo "Checking for old versions to remove..."
|
||||
|
||||
# ru1 - keep max 3 versions
|
||||
echo "Cleaning ru1 (keeping 3 newest versions)..."
|
||||
OLD_VERSIONS_RU1=$(rclone lsd ru1:comaps-maps/maps --max-depth 1 | awk '{print $5}' | sort -r | tail -n +4)
|
||||
for version in $OLD_VERSIONS_RU1; do
|
||||
if [ $version -gt 250101 ]; then
|
||||
echo " Deleting ru1:comaps-maps/maps/$version/"
|
||||
rclone purge -v ru1:comaps-maps/maps/$version/
|
||||
fi
|
||||
done
|
||||
|
||||
# fi1 - keep max 3 versions
|
||||
echo "Cleaning fi1 (keeping 3 newest versions)..."
|
||||
OLD_VERSIONS_FI1=$(rclone lsd fi1:/var/www/html/maps --max-depth 1 | awk '{print $5}' | sort -r | tail -n +4)
|
||||
for version in $OLD_VERSIONS_FI1; do
|
||||
if [ $version -gt 250101 ]; then
|
||||
echo " Deleting fi1:/var/www/html/maps/$version/"
|
||||
rclone purge -v fi1:/var/www/html/maps/$version/
|
||||
fi
|
||||
done
|
||||
|
||||
# de1 - keep max 6 versions
|
||||
echo "Cleaning de1 (keeping 6 newest versions)..."
|
||||
OLD_VERSIONS_DE1=$(rclone lsd de1:/var/www/html/comaps-cdn/maps --max-depth 1 | awk '{print $5}' | sort -r | tail -n +7)
|
||||
for version in $OLD_VERSIONS_DE1; do
|
||||
if [ $version -gt 250101 ]; then
|
||||
echo " Deleting de1:/var/www/html/comaps-cdn/maps/$version/"
|
||||
rclone purge -v de1:/var/www/html/comaps-cdn/maps/$version/
|
||||
fi
|
||||
done
|
||||
|
||||
# fr1 - keep max 6 versions
|
||||
echo "Cleaning fr1 (keeping 6 newest versions)..."
|
||||
OLD_VERSIONS_FR1=$(rclone lsd fr1:/data/maps --max-depth 1 | awk '{print $5}' | sort -r | tail -n +7)
|
||||
for version in $OLD_VERSIONS_FR1; do
|
||||
if [ $version -gt 250101 ]; then
|
||||
echo " Deleting fr1:/data/maps/$version/"
|
||||
rclone purge -v fr1:/data/maps/$version/
|
||||
fi
|
||||
done
|
||||
|
||||
# us2 - keep all versions (no cleanup)
|
||||
echo "Skipping us2 cleanup (keeping all versions)"
|
||||
|
||||
echo "Old version cleanup complete"
|
||||
|
||||
echo "Uploading to us2"
|
||||
# An explicit mwm/txt filter is used to skip temp files when run for an unfinished generation
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR us2:comaps-map-files/maps/$MAPS &
|
||||
|
||||
echo "Uploading to ru1"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR ru1:comaps-maps/maps/$MAPS &
|
||||
|
||||
echo "Uploading to fi1"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR fi1:/var/www/html/maps/$MAPS &
|
||||
|
||||
echo "Uploading to de1"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR de1:/var/www/html/comaps-cdn/maps/$MAPS &
|
||||
|
||||
echo "Uploading to fr1"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR fr1:/data/maps/$MAPS &
|
||||
|
||||
# us1 is not used for maps atm
|
||||
# rclone lsd us1:/home/dh_zzxxrk/cdn-us-1.comaps.app/maps
|
||||
|
||||
wait
|
||||
|
||||
echo "Running once more without parallelization to output status:"
|
||||
|
||||
echo "us2 status:"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR us2:comaps-map-files/maps/$MAPS
|
||||
|
||||
echo "ru1 status:"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR ru1:comaps-maps/maps/$MAPS
|
||||
|
||||
echo "fi1 status:"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR fi1:/var/www/html/maps/$MAPS
|
||||
|
||||
echo "de1 status:"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR de1:/var/www/html/comaps-cdn/maps/$MAPS
|
||||
|
||||
echo "fr1 status:"
|
||||
rclone copy -v --include "*.{mwm,txt}" $DIR fr1:/data/maps/$MAPS
|
||||
|
||||
echo "Upload complete"
|
||||
25
tools/unix/minimise_symbol_svg.sh
Executable file
25
tools/unix/minimise_symbol_svg.sh
Executable file
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if ! command -v scour &> /dev/null; then
|
||||
echo -e "\033[1;31mScour could not be found"
|
||||
if [[ $OSTYPE == 'darwin'* ]]; then
|
||||
echo 'run command'
|
||||
echo 'brew install scour'
|
||||
echo 'to install it'
|
||||
exit
|
||||
fi
|
||||
echo 'Take a look at https://github.com/scour-project/scour'
|
||||
exit
|
||||
fi
|
||||
|
||||
OMIM_PATH="${OMIM_PATH:-$(cd "$(dirname "$0")/../.."; pwd)}"
|
||||
|
||||
echo "Started processing"
|
||||
for i in style-clear/symbols style-night/symbols; do
|
||||
for f in $OMIM_PATH/data/styles/clear/$i/*.svg; do
|
||||
scour -q -i $f -o $f"-new" --enable-viewboxing --enable-id-stripping --enable-comment-stripping --strip-xml-prolog --protect-ids-noninkscape;
|
||||
mv -- "$f-new" "$f";
|
||||
done
|
||||
done
|
||||
echo "Done"
|
||||
21
tools/unix/osrm_online_server_generator.sh
Executable file
21
tools/unix/osrm_online_server_generator.sh
Executable file
|
|
@ -0,0 +1,21 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -u -x -e
|
||||
|
||||
export STXXLCFG=~/.stxxl
|
||||
|
||||
PLANET_FILE="$HOME/planet/planet-latest.o5m"
|
||||
OSRM_PATH=~/omim/3party/osrm/osrm-backend
|
||||
PROFILE="$OSRM_PATH/profiles/car.lua"
|
||||
BIN_PATH=~/osrm-backend-release
|
||||
EXTRACT="$BIN_PATH/osrm-extract"
|
||||
EXTRACT_CFG="$OSRM_PATH/../extractor.ini"
|
||||
PREPARE="$BIN_PATH/osrm-prepare"
|
||||
PREPARE_CFG="$OSRM_PATH/../contractor.ini"
|
||||
|
||||
echo Started at `date`
|
||||
FILENAME=`basename "$PLANET_FILE"`
|
||||
DIR=`dirname "$PLANET_FILE"`
|
||||
"$EXTRACT" --config "$EXTRACT_CFG" --profile "$PROFILE" "$PLANET_FILE"
|
||||
"$PREPARE" --config "$PREPARE_CFG" --profile "$PROFILE" "$DIR/${FILENAME/\.*/.osrm}"
|
||||
echo Finished at `date`
|
||||
14
tools/unix/refresh-embedded-faq.sh
Executable file
14
tools/unix/refresh-embedded-faq.sh
Executable file
|
|
@ -0,0 +1,14 @@
|
|||
#!/bin/sh
|
||||
|
||||
SCRIPT_DIR=$(dirname -- "${BASH_SOURCE[0]}")
|
||||
OUTPUT_FILE="$SCRIPT_DIR/../../data/faq.html"
|
||||
|
||||
echo "Downloading latest FAQ page from organicmaps.app website ..."
|
||||
curl -s -L -f -o $OUTPUT_FILE https://organicmaps.app/faq/embedded-faq
|
||||
|
||||
res=$?
|
||||
if test "$res" != "0"; then
|
||||
echo "The curl command failed with: $res"
|
||||
else
|
||||
echo "Success!"
|
||||
fi
|
||||
113
tools/unix/run_tests.sh
Normal file
113
tools/unix/run_tests.sh
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
readonly SCRIPT_NAME=$(basename "$0")
|
||||
readonly LOG=$(mktemp "/tmp/${SCRIPT_NAME}.XXXXXX")
|
||||
readonly SMOKE_SUITE=( \
|
||||
base_tests \
|
||||
coding_tests \
|
||||
generator_tests \
|
||||
indexer_tests \
|
||||
map_tests \
|
||||
mwm_tests \
|
||||
platform_tests \
|
||||
routing_tests \
|
||||
search_tests \
|
||||
)
|
||||
BUILD_DIR=.
|
||||
SUITE=full
|
||||
|
||||
log() {
|
||||
echo "$@" 2>&1 | tee -a "$LOG"
|
||||
}
|
||||
|
||||
die() {
|
||||
log "$@"
|
||||
echo "Terminated. Log is written to $LOG"
|
||||
exit 1
|
||||
}
|
||||
|
||||
usage() {
|
||||
log "Usage: $0 [options]"
|
||||
log "Options:"
|
||||
log " -b path to build directory, default: ."
|
||||
log " -s test suite, smoke or full, default: full"
|
||||
log " -f regular expression which is applied to all tests, default: .*"
|
||||
log " -h prints this help message"
|
||||
log ""
|
||||
log "Smoke test suite consists of:"
|
||||
for testName in "${SMOKE_SUITE[@]}"
|
||||
do
|
||||
log " " "$testName"
|
||||
done
|
||||
exit 1
|
||||
}
|
||||
|
||||
while [ $# -ne 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
-b) BUILD_DIR=${2?"Build directory is not set"}
|
||||
shift
|
||||
;;
|
||||
-s) SUITE=${2?"Suite name is not set"}
|
||||
shift
|
||||
;;
|
||||
-f) FILTER=${2?"Test filter regex is not set"}
|
||||
shift
|
||||
;;
|
||||
-h) usage
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [ ! -d "$BUILD_DIR" ]
|
||||
then
|
||||
die "Build directory $BUILD_DIR does not exists"
|
||||
fi
|
||||
|
||||
cd "$BUILD_DIR"
|
||||
|
||||
case "$SUITE" in
|
||||
smoke) TESTS=("${SMOKE_SUITE[@]}")
|
||||
;;
|
||||
full) TESTS=($(find . -maxdepth 1 -name '*_tests'))
|
||||
;;
|
||||
*) die "Unknown test suite: $SUITE"
|
||||
;;
|
||||
esac
|
||||
|
||||
EXIT_STATUS=0
|
||||
for testBin in "${TESTS[@]}"
|
||||
do
|
||||
if [ ! -x "$testBin" ]
|
||||
then
|
||||
die "Can't find test $testBin"
|
||||
fi
|
||||
|
||||
log "Running $testBin..."
|
||||
if [ -z "${FILTER+undefined}" ]
|
||||
then
|
||||
./$testBin 2>&1 | tee -a "$LOG"
|
||||
if [ ${PIPESTATUS[0]} -ne 0 ]
|
||||
then
|
||||
EXIT_STATUS=1
|
||||
fi
|
||||
else
|
||||
./$testBin --filter="$FILTER" 2>&1 | tee -a "$LOG"
|
||||
if [ ${PIPESTATUS[0]} -ne 0 ]
|
||||
then
|
||||
EXIT_STATUS=1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $EXIT_STATUS -eq 0 ]
|
||||
then
|
||||
log "All tests passed, see log for details."
|
||||
else
|
||||
log "Some of tests failed, see log for details."
|
||||
fi
|
||||
log "Log is written to: $LOG"
|
||||
exit $EXIT_STATUS
|
||||
121
tools/unix/test_planet.sh
Executable file
121
tools/unix/test_planet.sh
Executable file
|
|
@ -0,0 +1,121 @@
|
|||
#!/usr/bin/env bash
|
||||
#####################################################
|
||||
# Tests a planet build made with generate_planet.sh #
|
||||
#####################################################
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo
|
||||
echo "This script analyzes a generate_planet.sh run and prints all issues."
|
||||
echo "Usage: $0 <target_dir> [<old_maps_dir>]"
|
||||
echo
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -u # Fail on undefined variables
|
||||
|
||||
SCRIPT_PATH="$(dirname "$0")"
|
||||
OMIM_PATH="${OMIM_PATH:-$(cd "$SCRIPT_PATH/../.."; pwd)}"
|
||||
TARGET="$(cd "${TARGET:-$1}"; pwd)"
|
||||
LOG_PATH="${LOG_PATH:-$TARGET/logs}"
|
||||
PLANET_LOG="$LOG_PATH/generate_planet.log"
|
||||
DELTA_WITH=
|
||||
BOOKING_THRESHOLD=20
|
||||
[ $# -gt 1 -a -d "${2-}" ] && DELTA_WITH="$2"
|
||||
|
||||
source "$SCRIPT_PATH/find_generator_tool.sh"
|
||||
|
||||
# Step 1: analyze logs and find errors
|
||||
echo
|
||||
echo '### LOGS'
|
||||
grep -i 'error\|warn\|critical\|fail\|abort\|останов\|fatal' "$PLANET_LOG" | grep -v 'settings\.ini'
|
||||
for log in "$LOG_PATH"/*.log; do
|
||||
if [ "$log" != "$PLANET_LOG" -a "$log" != "$LOG_PATH/test_planet.log" ]; then
|
||||
CONTENT="$(grep -i 'error\|warn\|critical\|fail\|abort\|останов\|fatal\|fault' "$log" | \
|
||||
grep -v 'settings\.ini\|language file for co\|Zero length lin\|too many tokens\|Equal choices for way\|No feature id for way\|number of threads is\|Invalid order of edges')"
|
||||
if [ -n "$CONTENT" ]; then
|
||||
echo
|
||||
echo "$log"
|
||||
echo "$CONTENT"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Step 2.1: test if mwms and routing were made
|
||||
echo
|
||||
echo '### MISSING FILES'
|
||||
# Missing MWM files can be derived only from intermediate borders
|
||||
if [ -d "$TARGET/borders" ]; then
|
||||
for border in "$TARGET/borders"/*.poly; do
|
||||
MWM="$(basename "$border" .poly).mwm"
|
||||
[ ! -f "$TARGET/$MWM" ] && echo "$MWM"
|
||||
done
|
||||
fi
|
||||
|
||||
# Step 2.2: compare new files sizes with old
|
||||
if [ -n "$DELTA_WITH" ]; then
|
||||
echo
|
||||
echo "### SIZE DIFFERENCE WITH $DELTA_WITH"
|
||||
python "$SCRIPT_PATH/diff_size.py" "$TARGET" "$DELTA_WITH" 5
|
||||
echo
|
||||
echo "Size of old data: $(ls -l "$DELTA_WITH"/*.mwm | awk '{ total += $5 }; END { print total/1024/1024/1024 }') GB"
|
||||
echo "Size of new data: $(ls -l "$TARGET"/*.mwm | awk '{ total += $5 }; END { print total/1024/1024/1024 }') GB"
|
||||
fi
|
||||
|
||||
# For generator_tool, we create a temporary directory with symlinks to all maps
|
||||
# That way, it can be easily cleaned after routing engine creates a lot of temporary directories in it
|
||||
FTARGET="$TARGET/symlinked_copy/$(basename "$TARGET")"
|
||||
mkdir -p "$FTARGET"
|
||||
for file in "$TARGET"/*.mwm*; do
|
||||
BASENAME="$(basename "$file")"
|
||||
ln -s "$TARGET/$BASENAME" "$FTARGET/$BASENAME"
|
||||
done
|
||||
|
||||
# Step 3.1: run calc_statistics and check for sections
|
||||
echo
|
||||
echo '### MISSING MWM SECTIONS'
|
||||
FOUND_COASTS=
|
||||
for mwm in "$FTARGET"/*.mwm; do
|
||||
BASENAME="$(basename "$mwm" .mwm)"
|
||||
STAT="$("$GENERATOR_TOOL" --data_path="$FTARGET" --user_resource_path="$OMIM_PATH/data/" --output="$BASENAME" --calc_statistics 2>/dev/null)"
|
||||
[ -z "$FOUND_COASTS" -a -n "$(echo "$STAT" | grep 'natural|coastline|')" ] && FOUND_COASTS=1
|
||||
SECTIONS="$(echo "$STAT" | grep 'version : 8' | wc -l | tr -d ' ')"
|
||||
[ -f "$mwm.routing" -a "$SECTIONS" != "2" ] && echo "$BASENAME: $SECTIONS"
|
||||
done
|
||||
|
||||
[ -z "$FOUND_COASTS" ] && echo && echo 'WARNING: Did not find any coastlines in MWM files'
|
||||
|
||||
# Step 3.2: run type_statistics for old and new files to compare
|
||||
if [ -n "$DELTA_WITH" ]; then
|
||||
echo
|
||||
echo '### FEATURE DIFFERENCE'
|
||||
TMPBASE="$HOME/test_planet_tmp"
|
||||
for mwm in "$FTARGET"/*.mwm; do
|
||||
BASENAME="$(basename "$mwm" .mwm)"
|
||||
if [ -f "$DELTA_WITH/$BASENAME.mwm" ]; then
|
||||
"$GENERATOR_TOOL" --data_path="$FTARGET" --user_resource_path="$OMIM_PATH/data/" --output="$BASENAME" --type_statistics >"${TMPBASE}_new" 2>/dev/null
|
||||
"$GENERATOR_TOOL" --data_path="$DELTA_WITH" --user_resource_path="$OMIM_PATH/data/" --output="$BASENAME" --type_statistics >"${TMPBASE}_old" 2>/dev/null
|
||||
DIFFERENCE="$(python "$SCRIPT_PATH/diff_features.py" "${TMPBASE}_new" "${TMPBASE}_old" 50)"
|
||||
if [ -n "$DIFFERENCE" ]; then
|
||||
echo
|
||||
echo "$BASENAME"
|
||||
echo "$DIFFERENCE"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
rm "$TMPBASE"_*
|
||||
fi
|
||||
|
||||
# Step 3.3: check booking hotels count in new .mwm files
|
||||
if [ -n "$DELTA_WITH" ]; then
|
||||
echo
|
||||
echo '### BOOKING HOTELS COUNT DIFFERENCE'
|
||||
python "$OMIM_PATH/tools/python/mwm/mwm_feature_compare.py" -n "$TARGET" -o "$DELTA_WITH" -f "sponsored-booking" -t $BOOKING_THRESHOLD
|
||||
fi
|
||||
|
||||
# Step 4: run integration tests
|
||||
echo
|
||||
echo '### INTEGRATION TESTS'
|
||||
"$(dirname "$GENERATOR_TOOL")/routing_integration_tests" "--data_path=$FTARGET/../" "--user_resource_path=$OMIM_PATH/data/" "--suppress=online_cross_tests.*" 2>&1
|
||||
|
||||
# Clean the temporary directory
|
||||
rm -r "$FTARGET"
|
||||
48
tools/unix/translate_categories.sh
Executable file
48
tools/unix/translate_categories.sh
Executable file
|
|
@ -0,0 +1,48 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Install translate-shell before using this script:
|
||||
# https://github.com/soimort/translate-shell
|
||||
# Use `brew install translate-shell` on Mac OS X.
|
||||
|
||||
# There is a rate-limit for Google which can be work-arounded by using
|
||||
# another IP or IPv6.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "!!! This script is outdated, please use a better quality DeepL translations script"
|
||||
echo "!!! tools/python/translate.py"
|
||||
echo ""
|
||||
|
||||
DELIM=${DELIM:-:}
|
||||
|
||||
case $# in
|
||||
1) SRC=en
|
||||
WORD="$1"
|
||||
;;
|
||||
2) SRC="$1"
|
||||
WORD="$2"
|
||||
;;
|
||||
*) echo "Usage: [DELIM=' = '] $0 word_or_text_in_English"
|
||||
echo " or"
|
||||
echo " [DELIM=' = '] $0 source_language_code word_or_text_in_given_language"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Note: default Google engine doesn't properly support European Portuguese (pt-PT)
|
||||
# and always produces Brazilian translations. Need to use Deepl, see tools/python/translate.py
|
||||
LANGUAGES=( en af ar be bg ca cs da de el es et eu fa 'fi' fr he hu id it ja ko lt mr nb nl pl pt pt-BR ro ru sk sv sw th tr uk vi zh-CN zh-TW )
|
||||
|
||||
for lang in "${LANGUAGES[@]}"; do
|
||||
# -no-bidi fixes wrong characters order for RTL languages.
|
||||
TRANSLATION=$(trans -b -no-bidi "$SRC:$lang" "$WORD" | sed 's/ *//')
|
||||
# Correct language codes to ours.
|
||||
case $lang in
|
||||
zh-CN) lang="zh-Hans" ;;
|
||||
zh-TW) lang="zh-Hant" ;;
|
||||
pt-PT) lang="pt" ;;
|
||||
esac
|
||||
echo "$lang${DELIM}$(tr '[:lower:]' '[:upper:]' <<< "${TRANSLATION:0:1}")${TRANSLATION:1}"
|
||||
# To avoid quota limits.
|
||||
sleep 0.5
|
||||
done
|
||||
15
tools/unix/update_gplay_relnotes.sh
Executable file
15
tools/unix/update_gplay_relnotes.sh
Executable file
|
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Concatenates Android release notes in all languages into a single output format
|
||||
# suitable to upload to Google Play to add or update existing notes.
|
||||
|
||||
# Original relnotes files:
|
||||
GPLAY_NOTES=android/app/src/fdroid/play/listings/*/release-notes.txt
|
||||
# also symlinked for Triple-T automation to android/app/src/google/play/release-notes/*/default.txt
|
||||
|
||||
for x in $(ls $GPLAY_NOTES); do
|
||||
l=$(basename $(dirname $x));
|
||||
echo "<"$l">";
|
||||
cat $x;
|
||||
echo "</"$l">";
|
||||
done
|
||||
95
tools/unix/version.sh
Executable file
95
tools/unix/version.sh
Executable file
|
|
@ -0,0 +1,95 @@
|
|||
#!/usr/bin/env bash
|
||||
# Should be used everywhere to generate a consistent version number based
|
||||
# on the date of the last commit and a number of commits on that day.
|
||||
set -euo pipefail
|
||||
|
||||
|
||||
function init {
|
||||
if ! type git >/dev/null 2>&1 || ! git rev-parse --is-inside-work-tree >/dev/null 2>&1 ; then
|
||||
# Either git is not installed on the system or this is not a git work tree.
|
||||
# Likely because the code is built from an archive.
|
||||
# In any case determining commit count, date and hash won't be possible.
|
||||
local COUNT_AND_DATE=( 0 $(date +%Y.%m.%d) )
|
||||
GIT_HASH="00000000"
|
||||
else
|
||||
# Note: other ways to get date use the "when commit was rebased" date.
|
||||
# This approach counts a number of commits each day based on committer's commit date
|
||||
# instead of author's commit date, to avoid conflicts when old PRs are merged, but the
|
||||
# number of a day's commits stays the same.
|
||||
# Force git with TZ variable and local dates to print the UTC date.
|
||||
local COUNT_AND_DATE=( $(TZ=UTC0 git log --max-count=128 --pretty=format:%cd --date=iso-local \
|
||||
| cut -d' ' -f 1 | sed 's/-/./g' | sort | uniq -c | tail -1) )
|
||||
GIT_HASH=$(git describe --match="" --always --abbrev=8 --dirty)
|
||||
fi
|
||||
|
||||
DATE="${COUNT_AND_DATE[1]}"
|
||||
COUNT="${COUNT_AND_DATE[0]}"
|
||||
}
|
||||
|
||||
function ios_version {
|
||||
echo "$DATE"
|
||||
}
|
||||
|
||||
function ios_build {
|
||||
echo "$COUNT"
|
||||
}
|
||||
|
||||
function count {
|
||||
echo "$COUNT"
|
||||
}
|
||||
|
||||
function android_name {
|
||||
echo "$DATE-$COUNT"
|
||||
}
|
||||
|
||||
function android_code {
|
||||
# RR_yy_MM_dd_CC
|
||||
# RR - reserved to identify special markets, max value is 21.
|
||||
# yy - year
|
||||
# MM - month
|
||||
# dd - day
|
||||
# CC - the number of commits from the current day
|
||||
# 21_00_00_00_00 is the the greatest value Google Play allows for versionCode.
|
||||
# See https://developer.android.com/studio/publish/versioning for details.
|
||||
local cutYear=${DATE:2}
|
||||
echo "${cutYear//./}$(printf %02d "$COUNT")"
|
||||
}
|
||||
|
||||
function qt_int_version {
|
||||
# yy_MM_dd
|
||||
# yy - year
|
||||
# MM - month
|
||||
# dd - day
|
||||
local cutYear=${DATE:2}
|
||||
echo "${cutYear//./}"
|
||||
}
|
||||
|
||||
function qt_version {
|
||||
local OS_NAME=$(uname -s)
|
||||
echo "$DATE-$COUNT-$GIT_HASH-$OS_NAME"
|
||||
}
|
||||
|
||||
function usage {
|
||||
cat << EOF
|
||||
Prints Organic Maps version in specified format.
|
||||
Version is the last git commit's date plus a number of commits on that day.
|
||||
Usage: $0 <format>
|
||||
Where format is one of the following arguments (shows current values):
|
||||
ios_version $(ios_version)
|
||||
ios_build $(ios_build)
|
||||
android_name $(android_name)
|
||||
android_code $(android_code)
|
||||
qt_version $(qt_version)
|
||||
qt_int_version $(qt_int_version)
|
||||
count $(count)
|
||||
EOF
|
||||
}
|
||||
|
||||
init
|
||||
|
||||
if [ -z ${1:-} ] || [[ ! $(type -t "$1") == function ]]; then
|
||||
usage
|
||||
exit 1
|
||||
else
|
||||
"$1"
|
||||
fi
|
||||
Loading…
Add table
Add a link
Reference in a new issue