Merge commit 'eb264889e3' into traffic

This commit is contained in:
mvglasow
2025-10-10 18:40:35 +03:00
808 changed files with 31595 additions and 27751 deletions

View File

@@ -1,65 +0,0 @@
#!/usr/bin/env ruby
require_relative './omim_parsers'
ROOT = File.expand_path(File.dirname(__FILE__))
OMIM_ROOT = File.join(ROOT, '..', '..', '..')
CPP_CATEGORIES_FILENAME = File.join(OMIM_ROOT, 'search', 'displayed_categories.cpp')
CATEGORIES_FILENAME = File.join(OMIM_ROOT, 'data', 'categories.txt')
STRINGS_FILENAME = File.join(OMIM_ROOT, 'data', 'strings', 'strings.txt')
CATEGORIES_MATCHER = /m_keys = \{(.*)\};/m
def load_categories_from_cpp(filename)
raw_categories = File.read(CPP_CATEGORIES_FILENAME)
match = CATEGORIES_MATCHER.match(raw_categories)
if match
cpp_categories = match[1].split(/,\s+/)
# Delete quotes
cpp_categories.map { |cat| cat.gsub!(/^"|"$/, '') }
cpp_categories
end
end
def compare_categories(string_cats, search_cats)
inconsistent_strings = {}
string_cats.each do |category_name, category|
if !search_cats.include? category_name
puts "Category '#{category_name}' not found in categories.txt"
next
end
category.each do |lang, translation|
if search_cats[category_name].include? lang
if !search_cats[category_name][lang].include? translation
not_found_cats_list = search_cats[category_name][lang]
(inconsistent_strings[category_name] ||= {})[lang] = [translation, not_found_cats_list]
end
end
end
end
inconsistent_strings.each do |name, languages|
puts "\nInconsistent category \"#{name}\""
languages.each do |lang, values|
string_value, category_value = values
puts "\t#{lang} : \"#{string_value}\" is not matched by #{category_value}"
end
end
inconsistent_strings.empty?
end
def check_search_categories_consistent
cpp_categories = load_categories_from_cpp(CPP_CATEGORIES_FILENAME)
categories_txt_parser = OmimParsers::CategoriesParser.new cpp_categories
strings_txt_parser = OmimParsers::StringsParser.new cpp_categories
search_categories = categories_txt_parser.parse_file(CATEGORIES_FILENAME)
string_categories = strings_txt_parser.parse_file(STRINGS_FILENAME)
compare_categories(string_categories, search_categories) ? 0 : 1
end
if __FILE__ == $0
exit check_search_categories_consistent()
end

View File

@@ -1,104 +0,0 @@
module OmimParsers
# To update the list, run in root directory:
# sed -nEe "s/ +([a-zA-Z]{2}(-[a-zA-Z]{2,})?) = .*$/\1/p" "data/strings/strings.txt" | sort -u | tr '\n' ' ' | sed -e 's/,$//' | fold -s -w48; echo
LANGUAGES = %w(af ar be bg ca cs da de el en en-GB es es-MX et
eu fa fi fr fr-CA he hi hu id it ja ko lt mr nb
nl pl pt pt-BR ro ru sk sv sw th tr uk vi
zh-Hans zh-Hant)
class AbstractParser
def initialize(keys)
@keys = keys
end
def parse_line(line)
raise NotImplementedError.new("You must implement parse_file.")
end
def match_category(line, result)
category_match = category.match(line)
if !category_match.nil?
category = category_match[1]
if @keys.include? category
result[category] ||= {}
end
end
end
def parse_file(filename)
current_string = nil
result = {}
File.open(filename, 'r:UTF-8').each do |line|
line.strip!
next if should_exclude_line? line
# If line is empty -> next category block started
if line.empty?
current_string = nil
next
end
current_string ||= match_category(line, result)
parsed = parse_line(line)
if !parsed.nil? and !current_string.nil?
lang, translation = parsed
current_string[lang] = translation
end
end
result
end
def category
raise NotImplementedError.new("You must implement category.")
end
def should_exclude_line?(line)
false
end
end
class CategoriesParser < AbstractParser
def parse_line(line)
line_match = /^([^:]+):(\S+)$/u.match(line)
return if !line_match
lang = $1.strip
return if !LANGUAGES.include? lang
translation = $2.strip
synonyms = []
translation.split('|').each do |token|
token_match = /\d?\^?(.*)$/.match(token)
synonyms.push(token_match[1]) if token_match
end
[lang, synonyms]
end
def should_exclude_line?(line)
line.start_with? '#'
end
def category
# We match only global categories ('food', 'bank'...)
/^@([A-Za-z0-9]+)$/
end
end
class StringsParser < AbstractParser
def parse_line(line)
line_match = /^([^=]+)=(.*)$/.match(line)
if line_match
lang = $1.strip
if LANGUAGES.include? lang
[lang, $2.strip]
end
end
end
def category
/^\[(.+)\]/
end
end
end

View File

@@ -0,0 +1,114 @@
#!/usr/bin/env python3
import sys
import os
import re
import json
from string import digits
CONTENT_REGEX = re.compile(r'/\*.*?\*/', re.DOTALL)
TYPE_ENTRIES_REGEX = re.compile(r'"(.*?)"\s*=\s*"(.*?)"')
SINGLE_REPLACE = False
def main(lang, data_en):
strings_file_path = os.path.join('iphone', 'Maps', 'LocalizedStrings', f'{lang}.lproj', 'LocalizableTypes.strings')
json_file_path = os.path.join('data', 'categories-strings', f'{lang}.json', 'localize.json')
with open(strings_file_path, 'r', encoding='utf-8') as f:
content = f.read()
# Remove comments
content = re.sub(CONTENT_REGEX, '', content)
type_entries = {key[5:]: value for key, value in re.findall(TYPE_ENTRIES_REGEX, content)}
with open(json_file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
for type_name, localized_value in type_entries.items():
key_matched = False
for json_key in data.keys():
json_key_split = json_key.split('|')
for key in json_key_split:
already_there = False
_key_matched = False
if type_name == key.replace('-', '.').replace('_', '.'):
key_matched = True
data_split = data[json_key].split('|')
try:
data_split.extend([
value
for category in
[a for a in json_key_split
if a.startswith('@')]
for value in
data[category].split('|')
])
except KeyError:
pass
for value in data_split:
if value and value[0] in digits:
value = value[1:]
value = value.lower()
localized_value_lower = localized_value.lower()
# Prevents adding duplicates that differ only by the word "shop"
if value in localized_value_lower:
already_there = True
break
if localized_value_lower == value:
_key_matched = True
break
if already_there:
break
if not _key_matched:
if SINGLE_REPLACE and len(data_split) == 1:
data[json_key] = localized_value
print(f'Replaced "{data[json_key]}" with "{localized_value}" in "{json_key}"')
else:
data[json_key] = localized_value+'|'+data[json_key]
print(f'Appended "{localized_value}" to "{json_key}"')
if not key_matched:
for json_key in data.keys():
for key in json_key.split('|'):
if type_name == key.replace('-', '.').replace('_', '.'):
print(f'Created "{localized_value}" for "{json_key}"')
data.update({json_key: localized_value})
res = json.dumps(data, ensure_ascii=False, separators=(",\n", ": ")
).replace('{', '{\n').replace('}', '\n}')
with open(json_file_path, 'w', encoding='utf-8') as f:
f.write(res)
if __name__ == '__main__':
if len(sys.argv) < 2:
print(f"Usage: {sys.argv[0]} [-r] <language_codes>")
sys.exit(1)
if sys.argv[1] == '-r':
SINGLE_REPLACE = True
del sys.argv[1]
if len(sys.argv) < 2:
print("No languages specified")
sys.exit(1)
with open('data/categories-strings/en.json/localize.json', 'r', encoding='utf-8') as f:
data_en = json.load(f)
if len(sys.argv) > 2:
for lang in sys.argv[1:]:
print(f'{lang}:')
main(lang, data_en)
print('\n')
else:
main(sys.argv[1], data_en)

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
import os
import json
import sys
LANGUAGES = (
'af', 'ar', 'be', 'bg', 'ca', 'cs', 'da', 'de', 'el', 'en', 'en-AU',
'en-GB', 'en-US', 'es', 'es-MX', 'et', 'eu', 'fa', 'fi', 'fr', 'fr-CA',
'he', 'hi', 'hu', 'id', 'it', 'ja', 'ko', 'lt', 'lv', 'mr', 'nb', 'nl',
'pl', 'pt', 'pt-BR', 'ro', 'ru', 'sk', 'sr', 'sv', 'sw', 'th', 'tr', 'uk',
'vi', 'zh-Hans', 'zh-Hant'
)
def load_localize_json(lang_dir):
file_path = os.path.join(lang_dir, 'localize.json')
if not os.path.isfile(file_path):
return {}
try:
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
except json.JSONDecodeError as e:
print(f"Error decoding JSON from {file_path}: {e}")
return {}
def collect_all_keys(base_dir):
all_data = {}
lang_dirs = [d for d in os.listdir(base_dir) if d.endswith('.json')]
for lang_dir in lang_dirs:
lang = lang_dir.replace('.json', '')
if lang not in LANGUAGES:
print(f"Skipping unsupported language directory: {lang_dir}")
continue
full_path = os.path.join(base_dir, lang_dir)
if os.path.isdir(full_path):
data = load_localize_json(full_path)
for key, value in data.items():
if key not in all_data:
all_data[key] = {}
all_data[key][lang] = value
return all_data
def write_category_file(all_data, output_file):
with open(output_file, 'w', encoding='utf-8') as f:
for i, (key, translations) in enumerate(all_data.items()):
f.write(key + '\n')
for lang in LANGUAGES:
if lang in translations and translations[lang]:
f.write(f"{lang}:{translations[lang]}\n")
elif lang == 'en' and key in translations:
f.write('\n')
if i < len(all_data) - 1:
f.write('\n')
def main():
if len(sys.argv) < 2:
print(f"Usage: {sys.argv[0]} <json_directory> [categories.txt]")
sys.exit(1)
base_dir = sys.argv[1]
output_file = sys.argv[2] if len(sys.argv) > 2 else "categories.txt"
if not os.path.isdir(base_dir):
print(f"Directory not found: {base_dir}")
sys.exit(1)
all_data = collect_all_keys(base_dir)
write_category_file(all_data, output_file)
if __name__ == "__main__":
main()

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
# Check AppStore/GooglePlay metadata
# Check AppStore / GooglePlay / F-Droid metadata
#
import os
@@ -128,7 +128,9 @@ def check_raw(path, max_length, ignoreEmptyFilesAndNewLines=False):
text = text[:-1]
else:
ok = error(path, "missing new line")
else:
text = text.strip()
cur_length = len(text)
if cur_length > max_length:
ok = error(path, "too long: got={}, expected={}", cur_length, max_length)
@@ -162,28 +164,28 @@ def check_exact(path, expected):
return done(path, ok)
def check_android():
def check_android(is_gplay):
ok = True
flavor = 'android/app/src/fdroid/play/'
flavor = "google" if is_gplay else "fdroid"
flavor = f'android/app/src/{flavor}/play/'
ok = check_url(flavor + 'contact-website.txt') and ok
ok = check_email(flavor + 'contact-email.txt') and ok
ok = check_exact(flavor + 'default-language.txt', 'en-US') and ok
for locale in glob.glob(flavor + 'listings/*/'):
if locale.split('/')[-2] not in GPLAY_LOCALES:
if is_gplay and locale.split('/')[-2] not in GPLAY_LOCALES:
ok = error(locale, 'unsupported locale') and ok
continue
ok = check_text(locale + 'title.txt', 50) and ok
ok = check_text(locale + 'title-google.txt', 30) and ok
ok = check_text(locale + 'title.txt', 30 if is_gplay else 50) and ok
ok = check_text(locale + 'short-description.txt', 80) and ok
ok = check_text(locale + 'short-description-google.txt', 80, True) and ok
ok = check_text(locale + 'full-description.txt', 4000) and ok
ok = check_text(locale + 'full-description-google.txt', 4000, True) and ok
ok = check_text(locale + 'release-notes.txt', 499) and ok
ok = check_text(locale + 'release-notes.txt', 499, optional=True) and ok
''' TODO: relnotes not necessary exist for all languages, but symlinks are made for all
for locale in glob.glob(flavor + 'release-notes/*/'):
if locale.split('/')[-2] not in GPLAY_LOCALES:
ok = error(locale, 'unsupported locale') and ok
continue
ok = check_text(locale + 'default.txt', 499) and ok
'''
return ok
@@ -193,13 +195,13 @@ def check_ios():
if locale.split('/')[-2] not in APPSTORE_LOCALES:
ok = error(locale, "unsupported locale") and ok
continue
locale_complete = True
for name in ["description.txt", "keywords.txt", "marketing_url.txt", "privacy_url.txt", "subtitle.txt", "support_url.txt"]:
name_path = os.path.join(locale, name)
if not os.path.exists(name_path):
locale_complete = False
if locale_complete:
ok = check_text(locale + "subtitle.txt", 30, False, True) and ok
ok = check_text(locale + "description.txt", 4000, False, True) and ok
@@ -208,13 +210,17 @@ def check_ios():
ok = check_url(locale + "support_url.txt", True) and ok
ok = check_url(locale + "marketing_url.txt", True) and ok
ok = check_url(locale + "privacy_url.txt", True) and ok
return ok
if __name__ == "__main__":
ok = True
if len(sys.argv) == 2 and sys.argv[1] == 'android':
if check_android():
if len(sys.argv) == 2 and sys.argv[1] == 'gplay':
if check_android(is_gplay=True):
sys.exit(0)
sys.exit(2)
if len(sys.argv) == 2 and sys.argv[1] == 'fdroid':
if check_android(is_gplay=False):
sys.exit(0)
sys.exit(2)
elif len(sys.argv) == 2 and sys.argv[1] == "ios":
@@ -222,5 +228,5 @@ if __name__ == "__main__":
sys.exit(0)
sys.exit(2)
else:
print("Usage:", sys.argv[0], "android|ios", file=sys.stderr)
print("Usage:", sys.argv[0], "gplay|fdroid|ios", file=sys.stderr)
sys.exit(1)

View File

@@ -0,0 +1,168 @@
#!/usr/bin/env python3
"""
GPX to iOS Simulator simctl location command
Converts a GPX file to simctl location start command for realistic iOS location simulation.
Tested with CoMaps exported tracks
Usage:
python gpx_to_simctl.py test_route.gpx
"""
import argparse
import xml.etree.ElementTree as ET
from pathlib import Path
import sys
import subprocess
def extract_track_points_from_gpx(gpx_file: Path):
"""Extract track points from GPX file."""
tree = ET.parse(gpx_file)
root = tree.getroot()
points = []
# Find all elements with lat/lon attributes
for elem in root.findall('.//*[@lat][@lon]'):
lat = float(elem.get('lat'))
lon = float(elem.get('lon'))
points.append((lat, lon))
return points
def generate_simctl_command(points, speed_kmh=60, interval=0.1, distance=None, device="booted"):
"""Generate simctl location start command."""
if len(points) < 2:
raise ValueError("Need at least 2 waypoints for simctl location start")
# Convert km/h to m/s
speed_mps = speed_kmh / 3.6
# Format waypoints as lat,lon pairs
waypoint_strings = [f"{lat:.6f},{lon:.6f}" for lat, lon in points]
# Build command
cmd = ["xcrun", "simctl", "location", device, "start"]
cmd.append(f"--speed={speed_mps:.2f}")
if distance:
cmd.append(f"--distance={distance}")
else:
cmd.append(f"--interval={interval}")
cmd.extend(waypoint_strings)
return cmd
def main():
parser = argparse.ArgumentParser(
description="Convert GPX file to simctl location start command",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
python gpx_to_simctl.py test_route.gpx --speed 60 --interval 0.1
python gpx_to_simctl.py test_route.gpx --speed 80 --distance 10 --clear-first
python gpx_to_simctl.py test_route.gpx --speed 50 --dry-run
"""
)
parser.add_argument('gpx_file', help='Input GPX file')
parser.add_argument('--speed', type=float, default=60,
help='Speed in km/h (default: 60)')
parser.add_argument('--interval', type=float, default=0.1,
help='Update interval in seconds (default: 0.1)')
parser.add_argument('--distance', type=float,
help='Update distance in meters (overrides --interval)')
parser.add_argument('--device', default='booted',
help='Target device (default: booted)')
parser.add_argument('--dry-run', action='store_true',
help='Show command without executing (default: execute)')
parser.add_argument('--clear-first', action='store_true',
help='Clear existing location before starting')
args = parser.parse_args()
# Validate input file
gpx_file = Path(args.gpx_file)
if not gpx_file.exists():
print(f"Error: GPX file '{gpx_file}' not found", file=sys.stderr)
return 1
try:
# Extract waypoints
points = extract_track_points_from_gpx(gpx_file)
print(f"Extracted {len(points)} waypoints from {gpx_file}")
if len(points) < 2:
print("Error: Need at least 2 waypoints for location simulation", file=sys.stderr)
return 1
# Generate command
cmd = generate_simctl_command(
points,
speed_kmh=args.speed,
interval=args.interval,
distance=args.distance,
device=args.device
)
# Show command
print(f"\nGenerated simctl command:")
print(" ".join(cmd))
# Calculate simulation info
speed_mps = args.speed / 3.6
total_distance = 0
for i in range(1, len(points)):
lat1, lon1 = points[i-1]
lat2, lon2 = points[i]
# Simple distance approximation
total_distance += ((lat2-lat1)**2 + (lon2-lon1)**2)**0.5 * 111000 # rough conversion to meters
duration = total_distance / speed_mps
print(f"\nSimulation info:")
print(f" Speed: {args.speed} km/h ({speed_mps:.1f} m/s)")
print(f" Waypoints: {len(points)}")
print(f" Estimated distance: {total_distance/1000:.2f} km")
print(f" Estimated duration: {duration:.0f} seconds ({duration/60:.1f} minutes)")
if args.distance:
print(f" Update distance: {args.distance}m")
else:
print(f" Update interval: {args.interval}s")
# Execute by default unless dry-run
if args.dry_run:
print(f"\n[DRY RUN] Command that would be executed:")
print(f" {' '.join(cmd)}")
if args.clear_first:
clear_cmd = ["xcrun", "simctl", "location", args.device, "clear"]
print(f" (would clear location first: {' '.join(clear_cmd)})")
else:
print(f"\nExecuting command...")
# Clear location first if requested
if args.clear_first:
clear_cmd = ["xcrun", "simctl", "location", args.device, "clear"]
print("Clearing existing location...")
subprocess.run(clear_cmd, check=True)
# Execute the start command
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode == 0:
print("✅ Location simulation started successfully!")
if result.stdout.strip():
print(result.stdout.strip())
else:
print(f"❌ Error executing command:")
print(result.stderr.strip())
return 1
return 0
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
return 1
if __name__ == '__main__':
sys.exit(main())

View File

@@ -2,6 +2,10 @@
set -euo pipefail
SKIP_MAP_DOWNLOAD="${SKIP_MAP_DOWNLOAD:-}"
SKIP_GENERATE_SYMBOLS="${SKIP_GENERATE_SYMBOLS:-}"
SKIP_GENERATE_DRULES="${SKIP_GENERATE_DRULES:-}"
OPT_DEBUG=
OPT_RELEASE=
OPT_RELEASEDEBUGINFO=
@@ -74,10 +78,10 @@ fi
OMIM_PATH="$(cd "${OMIM_PATH:-$(dirname "$0")/../..}"; pwd)"
if [ "$OPT_TARGET" == "desktop" ]; then
./configure.sh
else
if [ "$OPT_TARGET" != "desktop" ] && [ -z "$SKIP_MAP_DOWNLOAD$SKIP_GENERATE_SYMBOLS$SKIP_GENERATE_DRULES" ]; then
SKIP_MAP_DOWNLOAD=1 SKIP_GENERATE_SYMBOLS=1 SKIP_GENERATE_DRULES=1 ./configure.sh
else
./configure.sh
fi
DEVTOOLSET_PATH=/opt/rh/devtoolset-7

View File

@@ -0,0 +1,2 @@
#!/usr/bin/env sh
./tools/python/categories/json_to_txt.py data/categories-strings data/categories.txt

View File

@@ -27,6 +27,6 @@ else
fi
OMIM_PATH="${OMIM_PATH:-$(cd "$(dirname "$0")/../.."; pwd)}"
SHADERS_GENERATOR="$OMIM_PATH/shaders/vulkan_shaders_preprocessor.py"
SHADERS_GENERATOR="$OMIM_PATH/libs/shaders/vulkan_shaders_preprocessor.py"
python3 "$SHADERS_GENERATOR" "$OMIM_PATH/shaders/GL" shader_index.txt shaders_lib.glsl "$OMIM_PATH/data/vulkan_shaders" "$GLSLC_PATH" "$DEBUG"
python3 "$SHADERS_GENERATOR" "$OMIM_PATH/libs/shaders/GL" shader_index.txt shaders_lib.glsl "$OMIM_PATH/data/vulkan_shaders" "$GLSLC_PATH" "$DEBUG"

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env bash
#
# Linking relnotes from F-Droid to Google Play
#
set -e -u
REPO_PATH="$(cd "$(dirname "$0")/../.."; pwd -P)"
ANDROID_PATH="$REPO_PATH/android/app/src"
GPLAY_PATH="$ANDROID_PATH/google/play/release-notes"
pushd $ANDROID_PATH >/dev/null
echo "Deleting all GPlay relnotes symlinks in $GPLAY_PATH"
pushd $GPLAY_PATH >/dev/null
rm -rf *
popd >/dev/null
pushd fdroid/play/listings >/dev/null
echo "Symlinking to F-Droid relnotes in $(pwd)"
for loc in */; do
if [ -f "$loc/release-notes.txt" ]; then
echo "Adding $loc relnotes"
pushd ../../../google/play/release-notes >/dev/null
mkdir -p $loc
cd $loc
ln -sT "../../../../fdroid/play/listings/$loc"release-notes.txt default.txt
popd >/dev/null
fi
done
popd >/dev/null
popd >/dev/null
exit 0

View File

@@ -0,0 +1,60 @@
#!/usr/bin/env bash
# Upload new maps version to all CDN nodes (in parallel).
# Use following commands for deleting older maps:
#
# ru1 - keep max 3 maps versions
# First list all maps versions on the server
# sudo rclone lsd ru1:comaps-maps/maps
# Delete the old version
# sudo rclone purge -v ru1:comaps-maps/maps/250713
#
# fi1 - max 3 versions
# sudo rclone lsd fi1:/var/www/html/maps
# sudo rclone purge -v fi1:/var/www/html/maps/250713
#
# de1 - max 6 versions
# sudo rclone lsd de1:/var/www/html/comaps-cdn/maps
# sudo rclone purge -v de1:/var/www/html/comaps-cdn/maps/250713
#
# us2 - all versions, don't delete
# sudo rclone lsd us2:comaps-map-files/maps
set -e -u
if [ $# -eq 0 ]; then
echo "Usage: upload_to_cdn.sh MAPS_PATH"
echo "e.g. sudo upload_to_cdn.sh osm-maps/2025_09_06__09_48_08/250906"
echo "uploads are run in parallel to us2,ru1,fi1,de1 servers,"
echo "subsequent runs will update only missing/differing files,"
echo "so its fine to run second time to ensure there were no incomplete transfers"
echo "or to run on an unfinished generation first and then again after its fully finished."
echo "(sudo is needed to access rclone.conf with servers credentials)"
exit 1
fi
MAPS=$(basename $1)
DIR=$(dirname $1)/$MAPS
echo "Uploading maps folder $DIR to $MAPS"
echo "Uploading to us2"
# An explicit mwm/txt filter is used to skip temp files when run for an unfinished generation
rclone copy -v --include "*.{mwm,txt}" $DIR us2:comaps-map-files/maps/$MAPS &
echo "Uploading to ru1"
rclone copy -v --include "*.{mwm,txt}" $DIR ru1:comaps-maps/maps/$MAPS &
echo "Uploading to fi1"
rclone copy -v --include "*.{mwm,txt}" $DIR fi1:/var/www/html/maps/$MAPS &
echo "Uploading to de1"
rclone copy -v --include "*.{mwm,txt}" $DIR de1:/var/www/html/comaps-cdn/maps/$MAPS &
# us1 is not used for maps atm
# rclone lsd us1:/home/dh_zzxxrk/cdn-us-1.comaps.app/maps
wait
echo "Upload complete"