diff --git a/.forgejo/workflows/map-generator.yml b/.forgejo/workflows/map-generator.yml index 230985fff..79fc99582 100644 --- a/.forgejo/workflows/map-generator.yml +++ b/.forgejo/workflows/map-generator.yml @@ -5,10 +5,10 @@ on: jobs: description: 'Which job(s) to run right now?' required: true - default: 'all' + default: 'all-except-upload' type: choice options: - - all + - all-except-upload - copy-coasts - planet - wiki @@ -16,30 +16,82 @@ on: - subways - tiger - maps + - upload + map-generator-continue: + description: 'Continue previous map generation?' + required: false + default: false + type: boolean + map-generator-countries: + description: 'Generate specific MWMs? (i.e. "US_New York_*, foo")' + required: false + type: string + reset: + description: 'Reset part of the system?' + required: false + default: 'no' + type: choice + options: + - 'no' + - wiki-ratelimit + +## RCLONE_CONF is multi-line text containing keys and credentials for us2,ru1,fi1,de1 servers env: + RCLONE_CONF: ${{ secrets.RCLONE_CONF }} WIKIMEDIA_USERNAME: ${{ secrets.WIKIMEDIA_USERNAME }} WIKIMEDIA_PASSWORD: ${{ secrets.WIKIMEDIA_PASSWORD }} - S3_KEY_ID: ${{ secrets.S3_KEY_ID }} - S3_SECRET_KEY: ${{ secrets.S3_SECRET_KEY }} - S3_ENDPOINT: ${{ secrets.S3_ENDPOINT }} - S3_BUCKET: ${{ secrets.S3_BUCKET }} - SFTP_USER: ${{ secrets.SFTP_USER }} - SFTP_PASSWORD: ${{ secrets.SFTP_PASSWORD }} - SFTP_HOST: ${{ secrets.SFTP_HOST }} - SFTP_PATH: ${{ secrets.SFTP_PATH }} + ZULIP_BOT_EMAIL: ${{ secrets.ZULIP_BOT_EMAIL }} + ZULIP_API_KEY: ${{ secrets.ZULIP_API_KEY }} + MWMCONTINUE: ${{ inputs.map-generator-continue }} + MWMCOUNTRIES: ${{ inputs.map-generator-countries }} DEBIAN_FRONTEND: noninteractive TZ: Etc/UTC jobs: - copy-coasts: - if: inputs.jobs == 'copy-coasts' || inputs.jobs == 'all' - name: Copy Previously Generated Coasts + clone-repos: + name: Clone Git Repos runs-on: mapfilemaker container: - image: ubuntu:latest + image: codeberg.org/comaps/maps_generator:f6d53d54f794 volumes: - - /media/4tbexternal:/media/4tbexternal + - /mnt/4tbexternal:/mnt/4tbexternal + concurrency: + group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + steps: + - uses: actions/cache@v4 + with: + path: "~" + key: cache-${{ github.run_id }}-${{ github.run_attempt }} + - name: Checkout main repo + shell: bash + run: | + echo "Cloning $FORGEJO_SERVER_URL/$FORGEJO_REPOSITORY branch $FORGEJO_REF_NAME" + cd ~ + git clone --recurse-submodules --shallow-submodules -b $FORGEJO_REF_NAME --single-branch $FORGEJO_SERVER_URL/$FORGEJO_REPOSITORY.git comaps + - name: Checkout wikiparser repo + shell: bash + run: | + cd ~ + git clone https://codeberg.org/comaps/wikiparser.git + - name: Checkout subways repo + shell: bash + run: | + cd ~ + git clone https://codeberg.org/comaps/subways.git + + copy-coasts: + if: inputs.jobs == 'copy-coasts' || inputs.jobs == 'all-except-upload' + name: Copy Previously Generated Coasts + runs-on: mapfilemaker + needs: + - clone-repos + container: + image: codeberg.org/comaps/maps_generator:f6d53d54f794 + volumes: + - /mnt/4tbexternal/:/mnt/4tbexternal/ + - /mnt/4tbexternal/osm-planet:/home/planet concurrency: group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true @@ -47,336 +99,403 @@ jobs: - name: Copy Coasts shell: bash run: | - if [ -f /media/4tbexternal/osm-maps/*/intermediate_data/WorldCoasts.geom ]; then - cp /media/4tbexternal/osm-maps/*/intermediate_data/WorldCoasts.geom /media/4tbexternal/osm-planet/latest_coasts.geom - cp /media/4tbexternal/osm-maps/*/intermediate_data/WorldCoasts.rawgeom /media/4tbexternal/osm-planet/latest_coasts.rawgeom + echo "WorldCoasts available:" + ls -al /mnt/4tbexternal/osm-maps/*/intermediate_data/WorldCoasts.* + + if [ -f /mnt/4tbexternal/osm-maps/*/intermediate_data/WorldCoasts.geom ]; then + + echo "Before:" + ls -al /home/planet/latest_coasts* + + cp -p /mnt/4tbexternal/osm-maps/*/intermediate_data/WorldCoasts.geom /home/planet/latest_coasts.geom + cp -p /mnt/4tbexternal/osm-maps/*/intermediate_data/WorldCoasts.rawgeom /home/planet/latest_coasts.rawgeom + + echo "After:" + ls -al /home/planet/latest_coasts* + + else + + echo "No WorldCoasts found." + fi update-planet: - if: inputs.jobs == 'planet' || inputs.jobs == 'all' + if: inputs.jobs == 'planet' || inputs.jobs == 'all-except-upload' name: Update Planet runs-on: mapfilemaker + needs: + - clone-repos container: - image: ubuntu:latest + image: codeberg.org/comaps/maps_generator:f6d53d54f794 volumes: - - /media/4tbexternal:/media/4tbexternal + - /mnt/4tbexternal/:/mnt/4tbexternal/ + - /mnt/4tbexternal/osm-planet:/home/planet concurrency: group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true steps: - - name: Install dependencies - shell: bash - run: | - apt-get update -y - apt-get install -y pyosmium osmium-tool python3-venv python3-pip wget2 - rm -f /usr/lib/python*/EXTERNALLY-MANAGED - pip3 install "protobuf<4" - name: Download Planet File if Absent shell: bash + # TODO: replace wget2 with curl -Z run: | - if [ ! -d /media/4tbexternal/osm-planet/planet/ ]; then - mkdir -p /media/4tbexternal/osm-planet/planet/ + if [ ! -d /home/planet/planet/ ]; then + mkdir -p /home/planet/planet/ fi - if [ ! -f /media/4tbexternal/osm-planet/planet/planet-latest.osm.pbf ]; then - cd /media/4tbexternal/osm-planet/planet/ - wget2 --verbose --progress=bar --continue --debug https://ftpmirror.your.org/pub/openstreetmap/pbf/planet-latest.osm.pbf + if [ ! -f /home/planet/planet/planet-latest.osm.pbf ]; then + cd /home/planet/planet/ + wget2 --verbose --progress=bar --continue https://ftpmirror.your.org/pub/openstreetmap/pbf/planet-latest.osm.pbf + else + echo "planet-latest.osm.pbf was found, raw download not required." fi - name: Update Planet shell: bash run: | - cd /media/4tbexternal/osm-planet/planet/ - pyosmium-up-to-date planet-latest.osm.pbf -o planet-latest-new.osm.pbf -vv --size 16384 + cd /home/planet/planet/ + rm planet-latest-new.osm.pbf + pyosmium-up-to-date planet-latest.osm.pbf -o planet-latest-new.osm.pbf -v --size 16384 mv planet-latest-new.osm.pbf planet-latest.osm.pbf - name: Converting planet-latest.osm.pbf to planet.o5m - run: /root/OM/osmctools/osmconvert planet-latest.osm.pbf -o=planet.o5m + # TODO: better to run osmupdate (not convert) just before starting the maps jobs - for max fresh data. + run: | + echo "Starting..." + cd /home/planet/planet/ + osmconvert -v --drop-author --drop-version --hash-memory=4000 planet-latest.osm.pbf -o=planet.o5m + echo "Done." + - name: Notify Zulip + run: | + curl -X POST https://comaps.zulipchat.com/api/v1/messages \ + -u $ZULIP_BOT_EMAIL:$ZULIP_API_KEY \ + --data-urlencode type=stream \ + --data-urlencode 'to="DevOps"' \ + --data-urlencode topic=codeberg-bot \ + --data-urlencode 'content=Planet update is done!' wiki-update: - if: inputs.jobs == 'wiki' || inputs.jobs == 'all' + if: inputs.jobs == 'wiki' || inputs.jobs == 'all-except-upload' name: Update Wikipedia runs-on: mapfilemaker + needs: + - clone-repos container: - image: ubuntu:latest + image: codeberg.org/comaps/maps_generator:f6d53d54f794 volumes: - - /media/4tbexternal:/media/4tbexternal + - /mnt/4tbexternal/:/mnt/4tbexternal/ + - /mnt/4tbexternal/osm-planet:/home/planet concurrency: group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true steps: - - name: Install dependencies - shell: bash - run: | - apt-get update -y - apt-get install -y jq curl wget2 rustc cargo git ca-certificates - - name: Clone wikiparser if necessary - shell: bash - run: | - if [ ! -d /media/4tbexternal/wikiparser ]; then - cd /media/4tbexternal - git clone https://codeberg.org/comaps/wikiparser.git - fi + - uses: actions/cache@v4 + with: + path: "~" + key: cache-${{ github.run_id }}-${{ github.run_attempt }} - name: Check for planet file shell: bash + # TODO: remove debug output run: | - if [ ! -f /media/4tbexternal/osm-planet/planet/planet-latest.osm.pbf ]; then - echo "ERROR: No file at /media/4tbexternal/osm-planet/planet/planet-latest.osm.pbf" - ls -al /media/4tbexternal/ - ls -al /media/4tbexternal/osm-planet/ - ls -al /media/4tbexternal/osm-planet/planet/ + if [ ! -f /home/planet/planet/planet-latest.osm.pbf ]; then + echo "ERROR: No file at /home/planet/planet/planet-latest.osm.pbf" + ls -al /home/planet/ + ls -al /home/planet/planet/ + exit 1 + fi + - name: Only get new dumps once per 30 days + shell: bash + run: | + if [[ '${{ inputs.reset }}' == 'wiki-ratelimit' ]]; then + echo "Bypassing wiki rate limit upon request." + exit 0 + fi + + datediff() { + d1=$(date -d "$1" +%s) + d2=$(date -d "$2" +%s) + echo $(( (d1 - d2) / 86400 )) + } + RECENTDUMPDATE=$(find /home/planet/wikipedia/dumps/ -mindepth 1 -maxdepth 1 -iname "2*" -type d | sort -n -r | head -1 | cut -d/ -f6) + TODAY=$(date +%Y%m%d) + DATEDIFF=$(datediff $TODAY $RECENTDUMPDATE) + if [ $DATEDIFF -lt 30 ]; then + echo "ERROR: The most recent wiki dump is from $RECENTDUMPDATE, $DATEDIFF days ago. Wikimedia limits users to 15 snapshot requests per month." + echo "Set the 'reset' option to 'wiki-ratelimit' to bypass this." + ls -al /home/planet/wikipedia/dumps/ exit 1 fi - name: Update Wikipedia from Enterprise API shell: bash run: | - mkdir -p /media/4tbexternal/osm-planet/wikipedia/dumps - mkdir -p /media/4tbexternal/osm-planet/wikipedia/build - cd /media/4tbexternal/wikiparser + #todo: curl in download.sh can fail when rate limited and even save error messages to the output. need to validate. + #downloading all languages can also trigger rate limits or fail as well. needs work. + #also: a failure to download means a failure to build, and could result in no wiki descriptions etc. + #also-also: do we want to remove old wiki data in planet between builds? pastk: no need, its being updated / augmented + mkdir -p /home/planet/wikipedia/dumps + mkdir -p /home/planet/wikipedia/build + cd ~/wikiparser ls -al echo "Downloading ..." - ./download.sh /media/4tbexternal/osm-planet/wikipedia/dumps + ./download.sh /home/planet/wikipedia/dumps + ls -al /home/planet/wikipedia/dumps/* echo "Running ..." - ./run.sh /media/4tbexternal/osm-planet/wikipedia/build \ - /media/4tbexternal/osm-planet/planet/planet-latest.osm.pbf \ - /media/4tbexternal/osm-planet/wikipedia/dumps/latest/*.tar.gz + ./run.sh /home/planet/wikipedia/build \ + /home/planet/planet/planet-latest.osm.pbf \ + /home/planet/wikipedia/dumps/latest/*.tar.gz echo "DONE" + - name: Check that the latest dumps are present, recent, and not super tiny + shell: bash + run: | + FAILCHECK=0 + + # Check all .tar.gz files in /home/planet/wikipedia/dumps/latest/ + for file in /home/planet/wikipedia/dumps/latest/*.tar.gz; do + # Check if file exists (handles case where glob doesn't match) + [ -e "$file" ] || continue + + # Get file size in MB and modification time in days + size_mb=$(stat -f%z "$file" 2>/dev/null | awk '{print int($1/1024/1024)}' || stat -c%s "$file" | awk + '{print int($1/1024/1024)}') + days_old=$(find "$file" -mtime -7 | wc -l) + + # Verify conditions + if [ "$size_mb" -lt 100 ]; then + echo "FAIL: $file is only ${size_mb}MB (< 100MB)" + FAILCHECK=1 + elif [ "$days_old" -eq 0 ]; then + echo "FAIL: $file is older than 7 days" + ls -al $file + FAILCHECK=1 + else + echo "PASS: $file (${size_mb}MB, modified within 7 days)" + fi + done + + exit $FAILCHECK + - name: Notify Zulip + run: | + curl -X POST https://comaps.zulipchat.com/api/v1/messages \ + -u $ZULIP_BOT_EMAIL:$ZULIP_API_KEY \ + --data-urlencode type=stream \ + --data-urlencode 'to="DevOps"' \ + --data-urlencode topic=codeberg-bot \ + --data-urlencode 'content=Wiki update is done!' update-isolines: - if: inputs.jobs == 'isolines' || inputs.jobs == 'all' + if: inputs.jobs == 'isolines' || inputs.jobs == 'all-except-upload' name: Update Isolines runs-on: mapfilemaker + needs: + - clone-repos container: - image: ubuntu:latest + image: codeberg.org/comaps/maps_generator:f6d53d54f794 volumes: - - /media/4tbexternal:/media/4tbexternal + - /mnt/4tbexternal/:/mnt/4tbexternal/ + - /mnt/4tbexternal/osm-planet:/home/planet concurrency: group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true steps: - - name: Install dependencies - shell: bash - run: | - apt-get update -qq \ - && apt-get install -y --no-install-recommends \ - curl \ - osmctools \ - rclone \ - git \ - ca-certificates \ - openssh-client \ - sshpass \ - vim \ - wget \ - build-essential \ - clang \ - cmake \ - python3 \ - python3-pip \ - python3.12-venv \ - qt6-base-dev \ - qt6-positioning-dev \ - libc++-dev \ - libfreetype-dev \ - libglvnd-dev \ - libgl1-mesa-dev \ - libharfbuzz-dev \ - libicu-dev \ - libqt6svg6-dev \ - libqt6positioning6-plugins \ - libqt6positioning6 \ - libsqlite3-dev \ - libxrandr-dev \ - libxinerama-dev \ - libxcursor-dev \ - libxi-dev \ - zlib1g-dev - rm -f /usr/lib/python*/EXTERNALLY-MANAGED - pip3 install "protobuf<4" - - name: Clone main repo if necessary - shell: bash - run: | - if [ ! -d /media/4tbexternal/comaps-init ]; then - apt-get update -qq && apt-get install -y --no-install-recommends git - cd /media/4tbexternal - git clone --recurse-submodules --shallow-submodules -b rebase-generator-pastk-wb251014 --single-branch https://codeberg.org/comaps/comaps.git comaps-init - fi + - uses: actions/cache@v4 + with: + path: "~" + key: cache-${{ github.run_id }}-${{ github.run_attempt }} + # TODO: we only need to update these if our SRTM or countries change + # TODO: after update, verify that sizable files exist: /home/planet/isolines/*.isolines - name: Update Isolines shell: bash + # TODO: preserve previous isolines version? + # TODO: cleanup the tmp-tiles dir after completion run: | - cd /media/4tbexternal/comaps-init/ - ./tools/unix/build_omim.sh -R topography_generator_tool - rm -rf ../osm-planet/isolines/ - mkdir ../osm-planet/isolines/ - ../omim-build-relwithdebinfo/topography_generator_tool \ + cd ~/comaps/ + ./tools/unix/build_omim.sh -p ~ -R topography_generator_tool + rm -rf /home/planet/isolines/ + mkdir /home/planet/isolines/ + ~/omim-build-relwithdebinfo/topography_generator_tool \ --profiles_path=./data/conf/isolines/isolines-profiles.json \ --countries_to_generate_path=./data/conf/isolines/countries-to-generate.json \ - --tiles_isolines_out_dir=../osm-planet/isolines/tmp-tiles/ \ - --countries_isolines_out_dir=../osm-planet/isolines/ \ + --tiles_isolines_out_dir=/home/planet/isolines/tmp-tiles/ \ + --countries_isolines_out_dir=/home/planet/isolines/ \ --data_dir=./data/ \ - --srtm_path=../osm-planet/SRTM-patched-europe/ \ - --threads=22 + --srtm_path=/home/planet/SRTM-patched-europe/ \ + --threads=96 + - name: Check isolines + shell: bash + run: | + NUMISO=$(ls -al /home/planet/isolines/*.isolines | wc -l) + echo "Found $NUMISO isolines" + if [ $NUMISO -lt 10 ]; then + echo "ERROR: Did generation fail?" + exit 1 + fi + - name: Notify Zulip + run: | + curl -X POST https://comaps.zulipchat.com/api/v1/messages \ + -u $ZULIP_BOT_EMAIL:$ZULIP_API_KEY \ + --data-urlencode type=stream \ + --data-urlencode 'to="DevOps"' \ + --data-urlencode topic=codeberg-bot \ + --data-urlencode 'content=Isolines are done!' update-subways: - if: inputs.jobs == 'subways' || inputs.jobs == 'all' + if: inputs.jobs == 'subways' || inputs.jobs == 'all-except-upload' name: Update Subways runs-on: mapfilemaker + needs: + - clone-repos container: - image: ubuntu:latest + image: codeberg.org/comaps/maps_generator:f6d53d54f794 volumes: - - /media/4tbexternal:/media/4tbexternal + - /mnt/4tbexternal/:/mnt/4tbexternal/ + - /mnt/4tbexternal/osm-planet:/home/planet concurrency: group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true steps: - - name: Install dependencies - shell: bash - run: | - apt-get update -qq && apt-get install -y --no-install-recommends curl osmctools osmium-tool python3-venv ca-certificates git python3-pip - rm -f /usr/lib/python*/EXTERNALLY-MANAGED - pip3 install "protobuf<4" - - name: Clone subways if necessary - shell: bash - run: | - if [ ! -d /media/4tbexternal/subways ]; then - cd /media/4tbexternal - git clone https://codeberg.org/comaps/subways.git - fi - - name: Clone main repo if necessary - shell: bash - run: | - if [ ! -d /media/4tbexternal/comaps-init ]; then - cd /media/4tbexternal - git clone --recurse-submodules --shallow-submodules -b rebase-generator-pastk-wb251014 --single-branch https://codeberg.org/comaps/comaps.git comaps-init - fi + - uses: actions/cache@v4 + with: + path: "~" + key: cache-${{ github.run_id }}-${{ github.run_attempt }} - name: Update Subways shell: bash run: | - cd /media/4tbexternal/comaps-init/ + cd ~/comaps/ cp tools/unix/maps/settings.sh.prod tools/unix/maps/settings.sh ./tools/unix/maps/generate_subways.sh + - name: Notify Zulip + run: | + curl -X POST https://comaps.zulipchat.com/api/v1/messages \ + -u $ZULIP_BOT_EMAIL:$ZULIP_API_KEY \ + --data-urlencode type=stream \ + --data-urlencode 'to="DevOps"' \ + --data-urlencode topic=codeberg-bot \ + --data-urlencode 'content=Subways are done!' update-tiger: - if: inputs.jobs == 'tiger' || inputs.jobs == 'all' + if: inputs.jobs == 'tiger' || inputs.jobs == 'all-except-upload' name: Update TIGER runs-on: mapfilemaker + needs: + - clone-repos container: - image: ubuntu:latest + image: codeberg.org/comaps/maps_generator:f6d53d54f794 volumes: - - /media/4tbexternal:/media/4tbexternal + - /mnt/4tbexternal/:/mnt/4tbexternal/ + - /mnt/4tbexternal/osm-planet:/home/planet concurrency: group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true steps: - - name: Install dependencies - shell: bash - run: | - apt-get update -qq && apt-get install -y --no-install-recommends \ - build-essential \ - clang \ - cmake \ - ninja-build \ - ca-certificates \ - git \ - wget2 - - name: Clone main repo if necessary - shell: bash - run: | - if [ ! -d /media/4tbexternal/comaps-init ]; then - cd /media/4tbexternal - git clone --recurse-submodules --shallow-submodules -b rebase-generator-pastk-wb251014 --single-branch https://codeberg.org/comaps/comaps.git comaps-init - fi + - uses: actions/cache@v4 + with: + path: "~" + key: cache-${{ github.run_id }}-${{ github.run_attempt }} - name: Build address_parser shell: bash run: | - cd /media/4tbexternal/comaps-init - rm -rf ../omim-build-relwithdebinfo/CMakeCache.txt - rm -rf ../omim-build-relwithdebinfo/CMakeFiles - ./tools/unix/build_omim.sh -R address_parser_tool + cd ~/comaps + #rm -rf ~/omim-build-relwithdebinfo/CMakeCache.txt + #rm -rf ~/omim-build-relwithdebinfo/CMakeFiles + ./tools/unix/build_omim.sh -p ~ -R address_parser_tool - name: Update TIGER from Nominatim shell: bash + # TODO: use curl instead of wget2 run: | - cd /media/4tbexternal/osm-planet/ + # TODO: maybe remove old osm-planet/tiger first? + cd /home/planet/ + mkdir -p tiger wget2 https://nominatim.org/data/tiger-nominatim-preprocessed-latest.csv.tar.gz - tar -xOzf tiger-nominatim-preprocessed-latest.csv.tar.gz | /media/4tbexternal/omim-build-relwithdebinfo/address_parser_tool --output_path=./tiger + cd ~/comaps + tar -xOzf /home/planet/tiger-nominatim-preprocessed-latest.csv.tar.gz | ~/omim-build-relwithdebinfo/address_parser_tool --output_path=/home/planet/tiger generate-maps: - if: inputs.jobs == 'maps' || inputs.jobs == 'all' + if: inputs.jobs == 'maps' || inputs.jobs == 'all-except-upload' name: Generate Maps runs-on: mapfilemaker + needs: + - clone-repos + timeout-minutes: 40320 container: - image: ubuntu:latest + image: codeberg.org/comaps/maps_generator:f6d53d54f794 volumes: - - /media/4tbexternal:/media/4tbexternal + - /mnt/4tbexternal/:/mnt/4tbexternal/ + - /mnt/4tbexternal/osm-planet:/home/planet options: --ulimit nofile=262144:262144 concurrency: group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true steps: - - name: Install dependencies - shell: bash - run: | - apt-get update -qq \ - && apt-get install -y --no-install-recommends \ - curl \ - osmctools \ - rclone \ - git \ - ca-certificates \ - openssh-client \ - sshpass \ - vim \ - wget \ - build-essential \ - clang \ - cmake \ - ninja-build \ - python3 \ - python3-pip \ - python3.12-venv \ - qt6-base-dev \ - qt6-positioning-dev \ - libc++-dev \ - libfreetype-dev \ - libglvnd-dev \ - libgl1-mesa-dev \ - libharfbuzz-dev \ - libicu-dev \ - libqt6svg6-dev \ - libqt6positioning6-plugins \ - libqt6positioning6 \ - libsqlite3-dev \ - libxrandr-dev \ - libxinerama-dev \ - libxcursor-dev \ - libxi-dev \ - zlib1g-dev - - name: Clone repo if necessary - shell: bash - run: | - if [ ! -d /media/4tbexternal/comaps-init ]; then - cd /media/4tbexternal - git clone --recurse-submodules --shallow-submodules -b rebase-generator-pastk-wb251014 --single-branch https://codeberg.org/comaps/comaps.git comaps-init - fi + - uses: actions/cache@v4 + with: + path: "~" + key: cache-${{ github.run_id }}-${{ github.run_attempt }} - name: Make output folders if necessary shell: bash run: | - if [ ! -d /media/4tbexternal/osm-maps ]; then - mkdir -p /media/4tbexternal/osm-maps + if [ ! -d /mnt/4tbexternal/osm-maps ]; then + mkdir -p /mnt/4tbexternal/osm-maps fi - name: Get SRTM if necessary + # TODO: it should be a separate step like Wiki or isolines shell: bash run: | - if [ ! -d /media/4tbexternal/osm-planet/SRTM-patched-europe/ ]; then + if [ ! -d /home/planet/SRTM-patched-europe/ ]; then echo "ERROR: NO SRTM" exit 1 fi - - name: Symlink paths for repo scripts - shell: bash - run: | - mkdir -p /root/OM - ln -s /media/4tbexternal/comaps-init /root/OM/organicmaps - ln -s /media/4tbexternal/osm-planet /home/planet - ln -s /media/4tbexternal/osm-maps /root/OM/maps_build - name: Run docker_maps_generator.sh shell: bash run: | - cd /root/OM/organicmaps - ./tools/unix/docker_maps_generator.sh \ No newline at end of file + cd ~/comaps + bash ./tools/unix/maps/docker_maps_generator.sh + - name: Notify Zulip + run: | + curl -X POST https://comaps.zulipchat.com/api/v1/messages \ + -u $ZULIP_BOT_EMAIL:$ZULIP_API_KEY \ + --data-urlencode type=stream \ + --data-urlencode 'to="DevOps"' \ + --data-urlencode topic=codeberg-bot \ + --data-urlencode 'content=Generator is done!' + + upload-maps: + if: inputs.jobs == 'upload' + name: Upload Maps + runs-on: mapfilemaker + container: + image: codeberg.org/comaps/maps_generator:f6d53d54f794 + volumes: + - /mnt/4tbexternal/:/mnt/4tbexternal/ + - /mnt/4tbexternal/osm-planet:/home/planet + concurrency: + group: ${{ github.workflow }}-map-generator-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + steps: + - name: Write config file + run: | + mkdir -p ~/.config/rclone/ + echo "${{ secrets.RCLONE_CONF }}" > ~/.config/rclone/rclone.conf + - name: Upload map files to CDNs + shell: bash + run: | + shopt -s nullglob + buildfolder=$(find /mnt/4tbexternal/osm-maps/ -mindepth 1 -maxdepth 1 -iname "2*" -type d | sort -n -r | head -1 | cut -d/ -f5) + builddate=$(find /mnt/4tbexternal/osm-maps/*/ -mindepth 1 -maxdepth 1 -iname "2*" -type d | sort -n -r | head -1 | cut -d/ -f6) + mwmfiles=( /mnt/4tbexternal/osm-maps/$buildfolder/$builddate/*.mwm ) + + if (( ${#mwmfiles[@]} )); then + echo "<$(date +%T)> Uploading maps from $buildfolder/$builddate..." + cd ~/comaps/tools/unix/maps + ./upload_to_cdn.sh /mnt/4tbexternal/osm-maps/$buildfolder/$builddate + echo "<$(date +%T)> Finished uploading maps from $buildfolder/$builddate." + else + echo "<$(date +%T)> No MWM files in /mnt/4tbexternal/osm-maps/$buildfolder/$builddate/*.mwm, not uploading maps." + echo "<$(date +%T)> Found top level: $(ls -alt /mnt/4tbexternal/osm-maps/*)" + echo "<$(date +%T)> Found second level: $(ls -alt /mnt/4tbexternal/osm-maps/$buildfolder/*)" + fi + - name: Notify Zulip + run: | + curl -X POST https://comaps.zulipchat.com/api/v1/messages \ + -u $ZULIP_BOT_EMAIL:$ZULIP_API_KEY \ + --data-urlencode type=stream \ + --data-urlencode 'to="DevOps"' \ + --data-urlencode topic=codeberg-bot \ + --data-urlencode 'content=Upload is done!' + + \ No newline at end of file diff --git a/.gitignore b/.gitignore index f91b69f1a..03f25e08e 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ Makefile.Release object_script.*.Debug object_script.*.Release compile_commands.json +*.local.* stxxl.errlog stxxl.log diff --git a/tools/python/maps_generator/generator/exceptions.py b/tools/python/maps_generator/generator/exceptions.py index 3f8c94f2e..dec292b9b 100644 --- a/tools/python/maps_generator/generator/exceptions.py +++ b/tools/python/maps_generator/generator/exceptions.py @@ -1,5 +1,5 @@ import os - +import subprocess class MapsGeneratorError(Exception): pass @@ -35,9 +35,24 @@ class FailedTest(MapsGeneratorError): def wait_and_raise_if_fail(p): if p.wait() != os.EX_OK: - args = p.args - logs = p.output.name - if p.error.name != logs: - logs += " and " + p.error.name - msg = f"The launch of {args.pop(0)} failed.\nArguments used: {' '.join(args)}\nSee details in {logs}" - raise BadExitStatusError(msg) + if type(p) is subprocess.Popen: + args = p.args + stdout = p.stdout + stderr = p.stderr + logs = None + errors = None + if type(stdout) is not type(None): + logs = stdout.read(256).decode() + if type(stderr) is not type(None): + errors = stderr.read(256).decode() + if errors != logs: + logs += " and " + errors + msg = f"The launch of {args.pop(0)} failed.\nArguments used: {' '.join(args)}\nSee details in {logs}" + raise BadExitStatusError(msg) + else: + args = p.args + logs = p.output.name + if p.error.name != logs: + logs += " and " + p.error.name + msg = f"The launch of {args.pop(0)} failed.\nArguments used: {' '.join(args)}\nSee details in {logs}" + raise BadExitStatusError(msg) diff --git a/tools/python/maps_generator/generator/stages_declaration.py b/tools/python/maps_generator/generator/stages_declaration.py index d57e67016..06756a3d7 100644 --- a/tools/python/maps_generator/generator/stages_declaration.py +++ b/tools/python/maps_generator/generator/stages_declaration.py @@ -11,6 +11,7 @@ import multiprocessing import os import shutil import tarfile +import errno from collections import defaultdict from concurrent.futures import ThreadPoolExecutor, as_completed from typing import AnyStr @@ -168,8 +169,18 @@ class StageDownloadDescriptions(Stage): ) """ - src = "/home/planet/descriptions" + # The src folder is hardcoded here and must be implemented on the map building machine + src = "/home/planet/wikipedia/descriptions" + # The dest folder will generally become build/*/intermediate_data/descriptions dest = env.paths.descriptions_path + # An empty source folder is a big problem + try: + if os.path.isdir(src): + print("Found %s" % (src)) + else: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), src) + except OSError as e: + print("rmtree error: %s - %s" % (e.filename, e.strerror)) # Empty folder "descriptions" can be already created. try: if os.path.isdir(dest): diff --git a/tools/python/maps_generator/generator/stages_tests.py b/tools/python/maps_generator/generator/stages_tests.py index 821ec2add..b49f335b6 100644 --- a/tools/python/maps_generator/generator/stages_tests.py +++ b/tools/python/maps_generator/generator/stages_tests.py @@ -9,6 +9,8 @@ from maps_generator.utils.file import download_file def make_test_booking_data(max_days): def test_booking_data(env: Env, logger, *args, **kwargs): + if not settings.HOTELS_URL: + return None base_url, _ = settings.HOTELS_URL.rsplit("/", maxsplit=1) url = f"{base_url}/meta.json" meta_path = os.path.join(env.paths.tmp_dir(), "hotels-meta.json") diff --git a/tools/python/maps_generator/var/etc/map_generator.ini.prod b/tools/python/maps_generator/var/etc/map_generator.ini.prod new file mode 100644 index 000000000..036893991 --- /dev/null +++ b/tools/python/maps_generator/var/etc/map_generator.ini.prod @@ -0,0 +1,107 @@ +[Developer] +# Path to the `comaps` source code repository: +OMIM_PATH: ~/comaps +# A path with the generator_tool binary: +BUILD_PATH: ~/omim-build-relwithdebinfo + + +[Main] +# A special small planet file will be downloaded if DEBUG is set to 1. +DEBUG: 0 +# A main working directory. There is a subdirectory created for each generator run +# which contains the planet and other downloads, temporary build files, logs and completed MWMs. +MAIN_OUT_PATH: /mnt/4tbexternal/osm-maps +# Path for storing caches for nodes, ways, relations. +# If it's not set then caches are stored inside the directory of the current build. +# CACHE_PATH: ${Main:MAIN_OUT_PATH}/cache + + +[Generator tool] +# Path to the data/ folder in the repository: +USER_RESOURCE_PATH: ${Developer:OMIM_PATH}/data +# Features stage only parallelism level. Set to 0 for auto detection. +THREADS_COUNT_FEATURES_STAGE: 0 +# Do not change it. This is determined automatically. +NODE_STORAGE: mem + + +[Osm tools] +# Path to osmctools binaries: +OSM_TOOLS_PATH: /usr/bin/ +# If the binaries are not found neither in the configured path nor system-wide, +# then the tools are built from the sources: +OSM_TOOLS_SRC_PATH: ${Developer:OMIM_PATH}/tools/osmctools + +[Logging] +# maps_generator's general (python output only) log file path and name. +# More detailed logs that include output of the `generator_tool` binary +# are located in the `logs/` subdir of a particular build directory, +# e.g. `maps_build/2023_06_04__20_05_07/logs/`. +LOG_FILE_PATH: ${Main:MAIN_OUT_PATH}/generation.log + + +[External] +# Planet file location. It should be a dump of OSM data in osm.pbf format. +# By default its an entire planet from "planet.openstreetmap.org". +# Or set it to a particular country/region extract from e.g. [Geofabrik](http://download.geofabrik.de/index.html). +# Note that an entire planet generation takes 40+ hours on a 256GB RAM server (and 1TB+ disk space). +# Stick to smaller extracts unless you have a machine this large. +# Here and further, its possible to specify either an URL (to be downloaded automatically) +# or a local file path like file:///path/to/file. +# A sample URL to download a latest OSM dump for North Macedonia: +PLANET_URL: file:///home/planet/planet/planet.o5m +# Location of the md5 checksum of the planet file: +PLANET_MD5_URL: ${External:PLANET_URL}.md5 +# A base url to the latest_coasts.geom and latest_coasts.rawgeom files. +# For example, if PLANET_COASTS_URL = https://somesite.com/download/ +# then the https://somesite.com/download/latest_coasts.geom url will be used to download latest_coasts.geom and +# the https://somesite.com/download/latest_coasts.rawgeom url will be used to download latest_coasts.rawgeom. +# Comment to skip getting the coastlines files. +PLANET_COASTS_URL: file:///home/planet/ +# Should be 'true' for an entire planet build to make a special routing section in World.mwm +# for alerting about absent regions without which the route can't be built. +# NEED_BUILD_WORLD_ROADS: true +# Subway file location, see docs/SUBWAY_GENERATION.md if you want to generate your own file. +# Comment to disable subway layer generation. + +SUBWAY_URL: file:///home/planet/subway/subways.transit.json + +# Location of the EXPERIMENTAL GTFS-extracted public transport transit files: +# TRANSIT_URL: + +# Urls for production maps generation. +# UGC_URL: +# HOTELS_URL: +# PROMO_CATALOG_CITIES: +# POPULARITY_URL: +# FOOD_URL: +# FOOD_TRANSLATIONS_URL: + +SRTM_PATH: /home/planet/SRTM-patched-europe/ +ISOLINES_PATH: /home/planet/isolines/ +ADDRESSES_PATH: /home/planet/tiger/ + +# Local path (not url!) to .csv files. +UK_POSTCODES_URL: /home/planet/postcodes/gb-postcode-data/gb_postcodes.csv +US_POSTCODES_URL: /home/planet/postcodes/us-postcodes/uszips.csv + + +[Stages] +# Set to 1 to update the entire OSM planet file (as taken from "planet.openstreetmap.org") +# via an osmupdate tool before the generation. Not for use with partial planet extracts. +NEED_PLANET_UPDATE: 0 +# If you want to calculate diffs you need to specify where the old maps are, +# e.g. ${Main:MAIN_OUT_PATH}/2021_03_16__09_00_00/ +DATA_ARCHIVE_DIR: ${Generator tool:USER_RESOURCE_PATH} +# How many versions in the archive to use for diff calculation: +DIFF_VERSION_DEPTH: 2 + + +[Common] +# Default parallelism level for the most of jobs. Set to 0 for auto detection. +THREADS_COUNT: 0 + + +[Stats] +# Path to rules for calculating statistics by type: +STATS_TYPES_CONFIG: ${Developer:OMIM_PATH}/tools/python/maps_generator/var/etc/stats_types_config.txt diff --git a/tools/unix/maps/Dockerfile b/tools/unix/maps/Dockerfile new file mode 100644 index 000000000..b9f732408 --- /dev/null +++ b/tools/unix/maps/Dockerfile @@ -0,0 +1,68 @@ +# See run-docker.sh for cloning, building, and running the maps generator Docker routine +FROM debian:latest + +ARG DEBIAN_FRONTEND=noninteractive +ENV TZ=Etc/UTC + +RUN apt-get update -qq \ + && apt-get install -y --no-install-recommends \ + build-essential \ + ca-certificates \ + cargo \ + clang \ + cmake \ + curl \ + git \ + jq \ + libc++-dev \ + libfreetype-dev \ + libgeos-dev \ + libgl1-mesa-dev \ + libglvnd-dev \ + libharfbuzz-dev \ + libicu-dev \ + libqt6positioning6 \ + libqt6positioning6-plugins \ + libqt6svg6-dev \ + libsqlite3-dev \ + libxcursor-dev \ + libxi-dev \ + libxinerama-dev \ + libxrandr-dev \ + ninja-build \ + nodejs \ + openssh-client \ + osmctools \ + osmium-tool \ + pyosmium \ + python3 \ + python3-pip \ + python3-venv \ + python-is-python3 \ + python3-dev \ + qt6-base-dev \ + qt6-positioning-dev \ + rclone \ + rustc \ + sshpass \ + vim-tiny \ + wget \ + wget2 \ + zlib1g-dev \ + && rm -rf /var/cache/apt/* /var/lib/apt/lists/*; + +# Allow pip to install system-wide in the container +RUN pip3 install "protobuf<4" --break-system-packages + +# Mount a volume to store the large input, output, and temp files here +RUN mkdir /mnt/4tbexternal +# And a volume to store >10gb files for the planet output here +RUN mkdir /home/planet + +WORKDIR ~ + +# The actual map generation process will run something like this, +# but ~/comaps isn't mounted nor provided in the docker image. +#CMD ~/comaps/tools/unix/maps/docker_maps_generator.sh + +CMD /bin/bash \ No newline at end of file diff --git a/tools/unix/maps/docker_maps_generator.sh b/tools/unix/maps/docker_maps_generator.sh new file mode 100644 index 000000000..d0abc0b74 --- /dev/null +++ b/tools/unix/maps/docker_maps_generator.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash + +set -e + +echo "<$(date +%T)> Starting..." + +# Prepare paths +# Most other paths in /mnt/4tbexternal or /home/planet are already created by Dockerfile or CI/CD. +# +mkdir -p /root/.config/CoMaps # Odd mkdir permission errors in generator_tool in Docker without these +chmod -R 777 /root/.config +mkdir -p /home/planet/postcodes/gb-postcode-data/ +mkdir -p /home/planet/postcodes/us-postcodes/ +mkdir -p /home/planet/SRTM-patched-europe/ +mkdir -p /home/planet/subway + +echo "<$(date +%T)> Running ./configure.sh ..." +cd ~/comaps +export SKIP_MAP_DOWNLOAD=1 SKIP_GENERATE_SYMBOLS=1 +./configure.sh + +echo "<$(date +%T)> Compiling tools..." +cd ~/comaps +./tools/unix/build_omim.sh -p ~ -R generator_tool +./tools/unix/build_omim.sh -p ~ -R world_roads_builder_tool +./tools/unix/build_omim.sh -p ~ -R mwm_diff_tool +cd tools/python/maps_generator +python3 -m venv /tmp/venv +/tmp/venv/bin/pip3 install -r requirements_dev.txt + +echo "<$(date +%T)> Copying map generator INI..." +cp var/etc/map_generator.ini.prod var/etc/map_generator.ini + + +cd ~/comaps/tools/python +if [ $MWMCONTINUE -gt 0 ]; then + +echo "<$(date +%T)> Continuing from preexisting generator run..." +/tmp/venv/bin/python -m maps_generator --skip="MwmDiffs" --continue + +else + +if [[ -n $MWMCOUNTRIES ]]; then + +echo "<$(date +%T)> Generating only specific maps [$MWMCOUNTRIES]..." +/tmp/venv/bin/python -m maps_generator --countries=$MWMCOUNTRIES --skip="MwmDiffs" + +else + +echo "<$(date +%T)> Generating maps..." +/tmp/venv/bin/python -m maps_generator --skip="MwmDiffs" + +fi +fi + +echo "<$(date +%T)> DONE" diff --git a/tools/unix/maps/generate_subways.sh b/tools/unix/maps/generate_subways.sh index 867210368..467cb38cf 100755 --- a/tools/unix/maps/generate_subways.sh +++ b/tools/unix/maps/generate_subways.sh @@ -20,9 +20,9 @@ export SKIP_PLANET_UPDATE="1" # If unavailable then replace with a local file. # TODO: keep the downloaded csv file from the latest run. #export CITIES_INFO_URL="" -export TMPDIR="$BUILD_PATH/subways" +export TMPDIR="$BUILD_PATH/subway" # The output file, which needs post-processing by transit_graph_generator.py -export MAPSME="$SUBWAYS_PATH/subways.json" +export MAPSME="$SUBWAYS_PATH/subway.json" # Produce additional files needed for https://cdn.organicmaps.app/subway/ export HTML_DIR="$SUBWAYS_VALIDATOR_PATH" @@ -30,7 +30,12 @@ export DUMP="$SUBWAYS_VALIDATOR_PATH" export GEOJSON="$SUBWAYS_VALIDATOR_PATH" export DUMP_CITY_LIST="$SUBWAYS_VALIDATOR_PATH/cities.txt" -"$SUBWAYS_REPO_PATH/scripts/process_subways.sh" 2>&1 | tee "$SUBWAYS_LOG" +# cd to subways repo so relative paths work in the script +PREVDIR=$(pwd) +cd "$SUBWAYS_REPO_PATH" +echo "Running process_subways.sh:" +./scripts/process_subways.sh 2>&1 | tee "$SUBWAYS_LOG" +cd "$PREVDIR" # Make render.html available for map visualization on the web cp -r "$SUBWAYS_REPO_PATH"/render/* "$SUBWAYS_VALIDATOR_PATH/" diff --git a/tools/unix/maps/run-docker.sh b/tools/unix/maps/run-docker.sh new file mode 100644 index 000000000..a156b3533 --- /dev/null +++ b/tools/unix/maps/run-docker.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# Run the maps generator via Docker manually without CI +# See .forgejo/workflows/map-generator.yml for steps to run before the main mapgen process, +# e.g. clone the repos, get/update planet file, generate isolines etc. + +# To build the docker container: +# cd /mnt/4tbexternal/comaps/tools/unix/maps +# docker build . -t maps_generator +# +# To push for ci/cd, tag for codeberg: +# docker login codeberg.org +# docker tag maps_generator codeberg.org/comaps/maps_generator:latest +# docker push codeberg.org/comaps/maps_generator:latest +# You can also tag and push the image Id for posterity: codeberg.org/comaps/maps_generator:1234abcd +# If you get a Dockerfile not found error especially on an XFS partition, try copying Dockerfile to an ext4 partition first. +# Or use docker via apt instead of snap. + +# We assume that the following will be cloned into the container itself at runtime: +# ~/comaps (comaps main app repo) +# ~/subways (repo for processing OSM subway/transit info) +# ~/wikiparser (repo for processing Wikipedia data) + +# We also assume a number of files/folders/repos are pre-set-up before mounting via volumes below: +# /mnt/4tbexternal (base folder for directory traversal) +# /mnt/4tbexternal/osm-maps (folder for holding generated map data output) +# /home/planet (folder for holding required input dumps) + +docker run \ + --ulimit nofile=262144:262144 \ + -v /mnt/4tbexternal/:/mnt/4tbexternal/ \ + -v /mnt/4tbexternal/osm-planet:/home/planet \ + -it codeberg.org/comaps/maps_generator:latest \ + ~/comaps/tools/unix/maps/docker_maps_generator.sh diff --git a/tools/unix/maps/settings.sh.prod b/tools/unix/maps/settings.sh.prod new file mode 100644 index 000000000..2389be8c2 --- /dev/null +++ b/tools/unix/maps/settings.sh.prod @@ -0,0 +1,26 @@ +## NOTE: edit the settings.sh file to customize/override the defaults. + +# Absolutize & normalize paths. +REPO_PATH="${REPO_PATH:-$(cd "$(dirname "$0")/../../.."; pwd -P)}" + +#TODO: is base path used? +BASE_PATH="${BASE_PATH:-$REPO_PATH/..}" +# Source map data and processed outputs e.g. wiki articles +DATA_PATH="${DATA_PATH:-/home/planet}" +# Temporary files +BUILD_PATH="${BUILD_PATH:-$DATA_PATH/build}" +# Other code repositories, e.g. subways, wikiparser.. +CODE_PATH="${CODE_PATH:-~}" + +# OSM planet source files + +PLANET_PATH="${PLANET_PATH:-$DATA_PATH/planet}" +PLANET_PBF="${PLANET_PBF:-$PLANET_PATH/planet-latest.osm.pbf}" +PLANET_O5M="${PLANET_O5M:-$PLANET_PATH/planet-latest.o5m}" + +# Subways + +SUBWAYS_REPO_PATH="${SUBWAYS_REPO_PATH:-/root/subways}" +SUBWAYS_PATH="${SUBWAYS_PATH:-$DATA_PATH/subway}" +SUBWAYS_LOG="${SUBWAYS_LOG:-$SUBWAYS_PATH/subway.log}" +SUBWAYS_VALIDATOR_PATH="${SUBWAYS_VALIDATOR_PATH:-$SUBWAYS_PATH/validator}" diff --git a/tools/unix/maps/settings_default.sh b/tools/unix/maps/settings_default.sh index 95cf37974..60594c66a 100644 --- a/tools/unix/maps/settings_default.sh +++ b/tools/unix/maps/settings_default.sh @@ -20,6 +20,6 @@ PLANET_O5M="${PLANET_O5M:-$PLANET_PATH/planet-latest.o5m}" # Subways SUBWAYS_REPO_PATH="${SUBWAYS_REPO_PATH:-$CODE_PATH/subways}" -SUBWAYS_PATH="${SUBWAYS_PATH:-$DATA_PATH/subways}" -SUBWAYS_LOG="${SUBWAYS_LOG:-$SUBWAYS_PATH/subways.log}" -SUBWAYS_VALIDATOR_PATH="${SUBWAYS_VALIDATOR_PATH:-$SUBWAYS_PATH/validator}" +SUBWAYS_PATH="${SUBWAYS_PATH:-$DATA_PATH/subway}" +SUBWAYS_LOG="${SUBWAYS_LOG:-$SUBWAYS_PATH/subway.log}" +SUBWAYS_VALIDATOR_PATH="${SUBWAYS_VALIDATOR_PATH:-$SUBWAYS_PATH/validator}" \ No newline at end of file diff --git a/tools/unix/maps/upload_to_cdn.sh b/tools/unix/maps/upload_to_cdn.sh index f3462fc66..ab0af4e98 100755 --- a/tools/unix/maps/upload_to_cdn.sh +++ b/tools/unix/maps/upload_to_cdn.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -# Upload new maps version to all CDN nodes (in parallel). +# Upload new maps version to all CDN nodes (in parallel) and remove old versions. # Use following commands for deleting older maps: # @@ -8,15 +8,15 @@ # First list all maps versions on the server # sudo rclone lsd ru1:comaps-maps/maps # Delete the old version -# sudo rclone purge -v ru1:comaps-maps/maps/250713 +# sudo rclone purge -v ru1:comaps-maps/maps/250713/ # # fi1 - max 3 versions # sudo rclone lsd fi1:/var/www/html/maps -# sudo rclone purge -v fi1:/var/www/html/maps/250713 +# sudo rclone purge -v fi1:/var/www/html/maps/250713/ # # de1 - max 6 versions # sudo rclone lsd de1:/var/www/html/comaps-cdn/maps -# sudo rclone purge -v de1:/var/www/html/comaps-cdn/maps/250713 +# sudo rclone purge -v de1:/var/www/html/comaps-cdn/maps/250713/ # # us2 - all versions, don't delete # sudo rclone lsd us2:comaps-map-files/maps @@ -39,6 +39,54 @@ DIR=$(dirname $1)/$MAPS echo "Uploading maps folder $DIR to $MAPS" +# Remove old versions before uploading new ones +echo "Checking for old versions to remove..." + +# ru1 - keep max 3 versions +echo "Cleaning ru1 (keeping 3 newest versions)..." +OLD_VERSIONS_RU1=$(rclone lsd ru1:comaps-maps/maps --max-depth 1 | awk '{print $5}' | sort -r | tail -n +4) +for version in $OLD_VERSIONS_RU1; do + if [ $version -gt 250101 ]; then + echo " Deleting ru1:comaps-maps/maps/$version/" + rclone purge -v ru1:comaps-maps/maps/$version/ + fi +done + +# fi1 - keep max 3 versions +echo "Cleaning fi1 (keeping 3 newest versions)..." +OLD_VERSIONS_FI1=$(rclone lsd fi1:/var/www/html/maps --max-depth 1 | awk '{print $5}' | sort -r | tail -n +4) +for version in $OLD_VERSIONS_FI1; do + if [ $version -gt 250101 ]; then + echo " Deleting fi1:/var/www/html/maps/$version/" + rclone purge -v fi1:/var/www/html/maps/$version/ + fi +done + +# de1 - keep max 6 versions +echo "Cleaning de1 (keeping 6 newest versions)..." +OLD_VERSIONS_DE1=$(rclone lsd de1:/var/www/html/comaps-cdn/maps --max-depth 1 | awk '{print $5}' | sort -r | tail -n +7) +for version in $OLD_VERSIONS_DE1; do + if [ $version -gt 250101 ]; then + echo " Deleting de1:/var/www/html/comaps-cdn/maps/$version/" + rclone purge -v de1:/var/www/html/comaps-cdn/maps/$version/ + fi +done + +# fr1 - keep max 6 versions +echo "Cleaning fr1 (keeping 6 newest versions)..." +OLD_VERSIONS_FR1=$(rclone lsd fr1:/data/maps --max-depth 1 | awk '{print $5}' | sort -r | tail -n +7) +for version in $OLD_VERSIONS_FR1; do + if [ $version -gt 250101 ]; then + echo " Deleting fr1:/data/maps/$version/" + rclone purge -v fr1:/data/maps/$version/ + fi +done + +# us2 - keep all versions (no cleanup) +echo "Skipping us2 cleanup (keeping all versions)" + +echo "Old version cleanup complete" + echo "Uploading to us2" # An explicit mwm/txt filter is used to skip temp files when run for an unfinished generation rclone copy -v --include "*.{mwm,txt}" $DIR us2:comaps-map-files/maps/$MAPS & @@ -52,9 +100,29 @@ rclone copy -v --include "*.{mwm,txt}" $DIR fi1:/var/www/html/maps/$MAPS & echo "Uploading to de1" rclone copy -v --include "*.{mwm,txt}" $DIR de1:/var/www/html/comaps-cdn/maps/$MAPS & +echo "Uploading to fr1" +rclone copy -v --include "*.{mwm,txt}" $DIR fr1:/data/maps/$MAPS & + # us1 is not used for maps atm # rclone lsd us1:/home/dh_zzxxrk/cdn-us-1.comaps.app/maps wait +echo "Running once more without parallelization to output status:" + +echo "us2 status:" +rclone copy -v --include "*.{mwm,txt}" $DIR us2:comaps-map-files/maps/$MAPS + +echo "ru1 status:" +rclone copy -v --include "*.{mwm,txt}" $DIR ru1:comaps-maps/maps/$MAPS + +echo "fi1 status:" +rclone copy -v --include "*.{mwm,txt}" $DIR fi1:/var/www/html/maps/$MAPS + +echo "de1 status:" +rclone copy -v --include "*.{mwm,txt}" $DIR de1:/var/www/html/comaps-cdn/maps/$MAPS + +echo "fr1 status:" +rclone copy -v --include "*.{mwm,txt}" $DIR fr1:/data/maps/$MAPS + echo "Upload complete"