Compare commits

..

1 Commits

Author SHA1 Message Date
Konstantin Pastbin
99591d1e54 [fdroid] Release version 2025.06.10-4
Signed-off-by: Konstantin Pastbin <konstantin.pastbin@gmail.com>
2025-06-10 21:15:28 +07:00
68 changed files with 667 additions and 2094 deletions

1
.gitignore vendored
View File

@@ -9,7 +9,6 @@ Makefile.Release
object_script.*.Debug
object_script.*.Release
compile_commands.json
*.local.*
stxxl.errlog
stxxl.log

View File

@@ -31,7 +31,6 @@ if (NOT WITH_SYSTEM_PROVIDED_3PARTY)
set(JANSSON_WITHOUT_TESTS ON)
add_subdirectory(jansson/jansson/)
target_include_directories(jansson INTERFACE "${PROJECT_BINARY_DIR}/3party/jansson/jansson/include")
add_library(jansson::jansson ALIAS jansson)
# Add gflags library.
set(GFLAGS_BUILD_TESTING OFF)

View File

@@ -94,11 +94,6 @@ if (PLATFORM_WIN)
)
endif()
# Try fast native arch.
if (PLATFORM_LINUX)
add_compile_options(-march=native)
endif()
# Built-in CMake configurations: Debug, Release, RelWithDebInfo, MinSizeRel
if (${CMAKE_BUILD_TYPE} STREQUAL "Debug")
add_definitions(-DDEBUG)
@@ -108,29 +103,12 @@ if (${CMAKE_BUILD_TYPE} STREQUAL "Debug")
elseif (${CMAKE_BUILD_TYPE} MATCHES "Rel")
add_definitions(-DRELEASE)
if (NOT MSVC)
add_compile_options(-Ofast $<$<CXX_COMPILER_ID:GNU>:-flto=auto>) # Also enables -ffast-math
add_compile_options(-Ofast) # Also enables -ffast-math
endif()
else()
message(FATAL_ERROR "Unknown build type: " ${CMAKE_BUILD_TYPE})
endif()
if (${CMAKE_BUILD_TYPE} STREQUAL "RelWithDebInfo")
add_compile_options(-fno-omit-frame-pointer)
endif()
# Linux GCC LTO plugin fix.
if (PLATFORM_LINUX AND (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE MATCHES "^Rel"))
# To force errors if LTO was not enabled.
add_compile_options(-fno-fat-lto-objects)
# To fix ar and ranlib "plugin needed to handle lto object".
string(REGEX MATCH "[0-9]+" GCC_MAJOR_VERSION ${CMAKE_CXX_COMPILER_VERSION})
file(GLOB_RECURSE plugin /usr/lib/gcc/*/${GCC_MAJOR_VERSION}/liblto_plugin.so)
set(CMAKE_C_ARCHIVE_CREATE "<CMAKE_AR> --plugin ${plugin} qcs <TARGET> <OBJECTS>")
set(CMAKE_C_ARCHIVE_FINISH "<CMAKE_RANLIB> --plugin ${plugin} <TARGET>")
set(CMAKE_CXX_ARCHIVE_CREATE "<CMAKE_AR> --plugin ${plugin} qcs <TARGET> <OBJECTS>")
set(CMAKE_CXX_ARCHIVE_FINISH "<CMAKE_RANLIB> --plugin ${plugin} <TARGET>")
endif()
message(STATUS "Build type: " ${CMAKE_BUILD_TYPE})
if (PLATFORM_LINUX OR PLATFORM_ANDROID)

View File

@@ -1 +1 @@
version: 2025.03.02-7-FDroid+25030207
version: 2025.06.10-4-FDroid+25061004

View File

@@ -67,11 +67,7 @@ UNIT_TEST(AlmostEqualULPs_double)
TEST(!base::AlmostEqualULPs(1.0, -1.0), ());
TEST(!base::AlmostEqualULPs(2.0, -2.0), ());
TEST(!base::AlmostEqualULPs(dmax, -dmax), ());
// That's why AlmostEqualULPs is a strange function, IMHO.
TEST(!base::AlmostEqualULPs(0.0, eps), ());
TEST(!base::AlmostEqualULPs(-eps, 0.0), ());
TEST(!base::AlmostEqualULPs(eps, 2.0*eps), ());
}
UNIT_TEST(AlmostEqualULPs_float)
@@ -95,11 +91,7 @@ UNIT_TEST(AlmostEqualULPs_float)
TEST(!base::AlmostEqualULPs(1.0f, -1.0f), ());
TEST(!base::AlmostEqualULPs(2.0f, -2.0f), ());
TEST(!base::AlmostEqualULPs(dmax, -dmax), ());
// That's why AlmostEqualULPs is a strange function, IMHO.
TEST(!base::AlmostEqualULPs(0.0f, eps), ());
TEST(!base::AlmostEqualULPs(-eps, 0.0f), ());
TEST(!base::AlmostEqualULPs(eps, 2.0f*eps), ());
}
UNIT_TEST(AlmostEqual_Smoke)

View File

@@ -34,8 +34,6 @@ public:
}
size_t size() const { return m_map.size(); }
auto begin() const { return m_map.begin(); }
auto end() const { return m_map.end(); }
protected:
/// @todo buffer_vector is not suitable now, because Key/Value is not default constructible.

View File

@@ -440,66 +440,15 @@ bool AlmostEqual(std::string const & str1, std::string const & str2, size_t mism
return false;
}
namespace
{
// Trim, unquote the string, and unescape two double quotes.
std::string & UnescapeCSVColumn(std::string & s)
{
Trim(s);
if (s.size() < 2)
return s;
if (*s.begin() == '"' && *s.rbegin() == '"')
s = s.substr(1, s.size() - 2);
for (size_t i = 1; i < s.size(); ++i)
if (s[i] == '"' && s[i - 1] == '"')
s.erase(i, 1);
return s;
}
} // namespace
void ParseCSVRow(std::string const & row, char const delimiter, std::vector<std::string> & target)
void ParseCSVRow(std::string const & s, char const delimiter, std::vector<std::string> & target)
{
target.clear();
std::string prevColumns;
for (TokenizeIterator<SimpleDelimiter, std::string::const_iterator, true /* KeepEmptyTokens */> it {row.begin(), row.end(), delimiter}; it; ++it)
TokenizeIterator<SimpleDelimiter, std::string::const_iterator, true /* KeepEmptyTokens */> it(s.begin(), s.end(), delimiter);
for (; it; ++it)
{
std::string_view column = *it;
size_t const quotesCount = std::count(column.begin(), column.end(), '"');
bool const evenQuotes = quotesCount % 2 == 0;
if (prevColumns.empty())
{
if (evenQuotes)
{
if (quotesCount == 0)
target.emplace_back(column);
else
{
std::string strColumn {column};
target.push_back(UnescapeCSVColumn(strColumn));
}
}
else
{
prevColumns = column;
prevColumns.push_back(',');
}
}
else
{
prevColumns.append(column);
if (evenQuotes)
prevColumns.push_back(',');
else
{
target.push_back(UnescapeCSVColumn(prevColumns));
prevColumns.clear();
}
}
std::string column(*it);
Trim(column);
target.push_back(std::move(column));
}
// Special case: if the string is empty, return an empty array instead of {""}.

View File

@@ -8,8 +8,6 @@
#include <string>
#include <vector>
namespace csv_reader_test
{
using platform::tests_support::ScopedFile;
using Row = coding::CSVReader::Row;
@@ -181,41 +179,3 @@ UNIT_TEST(CSVReaderIterator)
TEST_EQUAL(index, answer.size(), ());
}
}
UNIT_TEST(CSVReaderEmptyColumns)
{
auto const kContentWithEmptyColumns = ",,2,,4,\n,,,,,";
auto const fileName = "test.csv";
ScopedFile sf(fileName, kContentWithEmptyColumns);
Rows const answer = {{"", "", "2", "", "4", ""}, {"", "", "", "", "", ""}};
coding::CSVReader reader(sf.GetFullPath());
size_t index = 0;
while (auto const optionalRow = reader.ReadRow())
{
TEST_EQUAL(*optionalRow, answer[index], ());
++index;
}
TEST_EQUAL(index, answer.size(), ());
TEST(!reader.ReadRow(), ());
TEST(!reader.ReadRow(), ());
}
UNIT_TEST(CSVReaderQuotes)
{
auto const kContentWithQuotes = R"(noquotes, "" , "with space","with, comma","""double"" quotes","""double,"", commas", """""",)";
auto const fileName = "test.csv";
ScopedFile sf(fileName, kContentWithQuotes);
Rows const answer = {{"noquotes", "", "with space", "with, comma", "\"double\" quotes", "\"double,\", commas","\"\"", ""}};
coding::CSVReader reader(sf.GetFullPath());
size_t index = 0;
while (auto const optionalRow = reader.ReadRow())
{
TEST_EQUAL(*optionalRow, answer[index], ());
++index;
}
TEST_EQUAL(index, answer.size(), ());
TEST(!reader.ReadRow(), ());
TEST(!reader.ReadRow(), ());
}
} // namespace csv_reader_test

View File

@@ -1,11 +1,14 @@
#include "testing/testing.hpp"
#include "coding/internal/file_data.hpp"
#include "coding/writer.hpp"
#include "base/logging.hpp"
#include <cstring> // strlen
#include <fstream>
#include <string>
#include <vector>
namespace file_data_test
{
@@ -217,35 +220,6 @@ UNIT_TEST(EmptyFile)
TEST(DeleteFileX(copy), ());
}
UNIT_TEST(RenameOnExistingFile)
{
using namespace base;
std::string const name = "test.empty";
std::string const copy = "test.empty.copy";
{
FileData f(name, FileData::Op::WRITE_TRUNCATE);
uint8_t const x = 1;
f.Write(&x, 1);
}
{
FileData f(copy, FileData::Op::WRITE_TRUNCATE);
uint8_t const x = 2;
f.Write(&x, 1);
}
TEST(RenameFileX(name, copy), ());
{
FileData f(copy, FileData::Op::READ);
uint8_t x;
f.Read(0, &x, 1);
TEST_EQUAL(x, 1, ());
}
TEST(DeleteFileX(copy), ());
}
// Made this 'obvious' test for getline. I had (or not?) behaviour when 'while (getline)' loop
// didn't get last string in file without trailing '\n'.
UNIT_TEST(File_StdGetLine)

View File

@@ -19,7 +19,7 @@ with open('countries-to-generate.json') as f1:
entry = {
"key": c,
"value": {
"profileName": "poor",
"profileName": "normal",
"tileCoordsSubset": list(),
"tilesAreBanned": False
}

View File

@@ -27,7 +27,7 @@
{
"key": "Algeria_Central",
"value": {
"profileName": "poor",
"profileName": "extra_small",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
@@ -9691,262 +9691,6 @@
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Angola",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Australia_Northern Territory",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Australia_Queensland",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Australia_Western Australia",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Brazil_Mato Grosso",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Brazil_North Region_East",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Brazil_North Region_West",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Cameroon_Central",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Canada_Nunavut_North",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Canada_Nunavut_South",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "China_Gansu",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "China_Sichuan",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Colombia_East",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Congo-Kinshasa_West",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Indonesia_Central",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Indonesia_West",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Iran_East",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Kenya",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Libya",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Madagascar",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Malaysia",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Mali",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Mongolia",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Mozambique",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Myanmar",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Vietnam",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Nigeria_South",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Russia_Krasnoyarsk Krai_North",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Russia_Sakha Republic",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Tanzania",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Zambia",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
},
{
"key": "Zimbabwe",
"value": {
"profileName": "poor",
"tileCoordsSubset": [],
"tilesAreBanned": false
}
}
]
}
}

View File

@@ -63,13 +63,13 @@
{
"key": "poor",
"value": {
"alitudesStep": 100,
"alitudesStep": 500,
"gaussianFilterRFactor": 1.0,
"gaussianFilterStDev": 2.0,
"latLonStepFactor": 4,
"latLonStepFactor": 2,
"maxIsolinesLength": 500,
"medianFilterR": 1,
"simplificationZoom": 13
"simplificationZoom": 14
}
},
{

View File

@@ -1,7 +1,6 @@
#include "testing/testing.hpp"
#include "drape_frontend/path_text_handle.hpp"
#include "drape_frontend/visual_params.hpp"
#include "base/logging.hpp"
@@ -16,12 +15,10 @@ bool IsSmooth(m2::SplineEx const & spline)
}
return true;
}
} // namespace
}
UNIT_TEST(Rounding_Spline)
{
df::VisualParams::Init(1.0, 1024);
m2::SplineEx spline1;
df::AddPointAndRound(spline1, m2::PointD(0, 200));
df::AddPointAndRound(spline1, m2::PointD(0, 0));

View File

@@ -3,47 +3,54 @@
#include "routing/routing_helpers.hpp"
#include "indexer/altitude_loader.hpp"
#include "indexer/feature.hpp"
#include "indexer/feature_altitude.hpp"
#include "indexer/feature_data.hpp"
#include "indexer/feature_processor.hpp"
#include "coding/files_container.hpp"
#include "coding/internal/file_data.hpp"
#include "coding/read_write_utils.hpp"
#include "coding/reader.hpp"
#include "coding/succinct_mapper.hpp"
#include "coding/varint.hpp"
#include "geometry/latlon.hpp"
#include "base/assert.hpp"
#include "base/checked_cast.hpp"
#include "base/file_name_utils.hpp"
#include "base/logging.hpp"
#include "base/scope_guard.hpp"
#include "base/stl_helpers.hpp"
#include "base/string_utils.hpp"
#include "defines.hpp"
#include <algorithm>
#include <type_traits>
#include <utility>
#include <vector>
#include "3party/succinct/elias_fano.hpp"
#include "3party/succinct/mapper.hpp"
#include "3party/succinct/rs_bit_vector.hpp"
namespace routing
{
using namespace feature;
using namespace geometry;
namespace
{
using namespace routing;
class SrtmGetter : public AltitudeGetter
{
public:
explicit SrtmGetter(std::string const & srtmDir) : m_srtmManager(srtmDir) {}
// AltitudeGetter overrides:
Altitude GetAltitude(m2::PointD const & p) override
geometry::Altitude GetAltitude(m2::PointD const & p) override
{
return m_srtmManager.GetAltitude(mercator::ToLatLon(p));
}
void PrintStatsAndPurge() override
{
LOG(LINFO, ("Srtm tiles number (x26Mb):", m_srtmManager.GeTilesNumber()));
m_srtmManager.Purge();
return m_srtmManager.GetHeight(mercator::ToLatLon(p));
}
private:
@@ -55,24 +62,39 @@ class Processor
public:
struct FeatureAltitude
{
FeatureAltitude(uint32_t featureId, geometry::Altitudes && altitudes)
: m_featureId(featureId), m_altitudes(std::move(altitudes))
FeatureAltitude() : m_featureId(0) {}
FeatureAltitude(uint32_t featureId, Altitudes const & altitudes)
: m_featureId(featureId), m_altitudes(altitudes)
{
}
uint32_t m_featureId;
feature::Altitudes m_altitudes;
Altitudes m_altitudes;
};
using TFeatureAltitudes = std::vector<FeatureAltitude>;
explicit Processor(AltitudeGetter & altitudeGetter)
: m_minAltitude(geometry::kInvalidAltitude), m_altitudeGetter(altitudeGetter)
: m_altitudeGetter(altitudeGetter), m_minAltitude(geometry::kInvalidAltitude)
{
}
void operator()(FeatureType & f, uint32_t id)
TFeatureAltitudes const & GetFeatureAltitudes() const { return m_featureAltitudes; }
succinct::bit_vector_builder & GetAltitudeAvailabilityBuilder()
{
CHECK_EQUAL(f.GetID().m_index, id, ());
CHECK_EQUAL(id, m_altitudeAvailabilityBuilder.size(), ());
return m_altitudeAvailabilityBuilder;
}
geometry::Altitude GetMinAltitude() const { return m_minAltitude; }
void operator()(FeatureType & f, uint32_t const & id)
{
if (id != m_altitudeAvailabilityBuilder.size())
{
LOG(LERROR, ("There's a gap in feature id order."));
return;
}
bool hasAltitude = false;
SCOPE_GUARD(altitudeAvailabilityBuilding,
@@ -87,19 +109,12 @@ public:
return;
geometry::Altitudes altitudes;
altitudes.reserve(pointsCount);
Altitude minFeatureAltitude = geometry::kInvalidAltitude;
geometry::Altitude minFeatureAltitude = geometry::kInvalidAltitude;
for (size_t i = 0; i < pointsCount; ++i)
{
auto const & pt = f.GetPoint(i);
Altitude const a = m_altitudeGetter.GetAltitude(pt);
geometry::Altitude const a = m_altitudeGetter.GetAltitude(f.GetPoint(i));
if (a == geometry::kInvalidAltitude)
{
// Print warning for missing altitude point (if not a ferry or so).
auto const type = CarModel::AllLimitsInstance().GetHighwayType(feature::TypesHolder(f));
if (type && *type != HighwayType::RouteFerry && *type != HighwayType::RouteShuttleTrain)
LOG(LWARNING, ("Invalid altitude at:", mercator::ToLatLon(pt)));
// One invalid point invalidates the whole feature.
return;
}
@@ -113,7 +128,7 @@ public:
}
hasAltitude = true;
m_featureAltitudes.emplace_back(id, std::move(altitudes));
m_featureAltitudes.emplace_back(id, Altitudes(std::move(altitudes)));
if (m_minAltitude == geometry::kInvalidAltitude)
m_minAltitude = minFeatureAltitude;
@@ -123,15 +138,22 @@ public:
bool HasAltitudeInfo() const { return !m_featureAltitudes.empty(); }
public:
std::vector<FeatureAltitude> m_featureAltitudes;
succinct::bit_vector_builder m_altitudeAvailabilityBuilder;
Altitude m_minAltitude;
bool IsFeatureAltitudesSorted()
{
return std::is_sorted(m_featureAltitudes.begin(), m_featureAltitudes.end(),
base::LessBy(&Processor::FeatureAltitude::m_featureId));
}
private:
AltitudeGetter & m_altitudeGetter;
TFeatureAltitudes m_featureAltitudes;
succinct::bit_vector_builder m_altitudeAvailabilityBuilder;
geometry::Altitude m_minAltitude;
};
} // namespace
namespace routing
{
void BuildRoadAltitudes(std::string const & mwmPath, AltitudeGetter & altitudeGetter)
{
try
@@ -139,27 +161,28 @@ void BuildRoadAltitudes(std::string const & mwmPath, AltitudeGetter & altitudeGe
// Preparing altitude information.
Processor processor(altitudeGetter);
feature::ForEachFeature(mwmPath, processor);
processor.m_altitudeGetter.PrintStatsAndPurge();
if (!processor.HasAltitudeInfo())
{
// Possible for small islands like Bouvet or Willis.
LOG(LWARNING, ("No altitude information for road features of mwm:", mwmPath));
LOG(LINFO, ("No altitude information for road features of mwm:", mwmPath));
return;
}
CHECK(processor.IsFeatureAltitudesSorted(), ());
FilesContainerW cont(mwmPath, FileWriter::OP_WRITE_EXISTING);
auto w = cont.GetWriter(ALTITUDES_FILE_TAG);
AltitudeHeader header;
header.m_minAltitude = processor.m_minAltitude;
header.m_minAltitude = processor.GetMinAltitude();
auto const startOffset = w->Pos();
header.Serialize(*w);
{
// Altitude availability serialization.
coding::FreezeVisitor<Writer> visitor(*w);
succinct::rs_bit_vector(&processor.m_altitudeAvailabilityBuilder).map(visitor);
succinct::bit_vector_builder & builder = processor.GetAltitudeAvailabilityBuilder();
succinct::rs_bit_vector(&builder).map(visitor);
}
header.m_featureTableOffset = base::checked_cast<uint32_t>(w->Pos() - startOffset);
@@ -168,7 +191,8 @@ void BuildRoadAltitudes(std::string const & mwmPath, AltitudeGetter & altitudeGe
{
// Altitude info serialization to memory.
MemWriter<std::vector<uint8_t>> writer(deltas);
for (auto const & a : processor.m_featureAltitudes)
Processor::TFeatureAltitudes const & featureAltitudes = processor.GetFeatureAltitudes();
for (auto const & a : featureAltitudes)
{
offsets.push_back(base::checked_cast<uint32_t>(writer.Pos()));
a.m_altitudes.Serialize(header.m_minAltitude, writer);
@@ -176,7 +200,8 @@ void BuildRoadAltitudes(std::string const & mwmPath, AltitudeGetter & altitudeGe
}
{
// Altitude offsets serialization.
CHECK(base::IsSortedAndUnique(offsets.begin(), offsets.end()), ());
CHECK(std::is_sorted(offsets.begin(), offsets.end()), ());
CHECK(adjacent_find(offsets.begin(), offsets.end()) == offsets.end(), ());
succinct::elias_fano::elias_fano_builder builder(offsets.back(), offsets.size());
for (uint32_t offset : offsets)
@@ -196,10 +221,9 @@ void BuildRoadAltitudes(std::string const & mwmPath, AltitudeGetter & altitudeGe
w->Seek(startOffset);
header.Serialize(w);
w->Seek(endOffset);
LOG(LINFO, (ALTITUDES_FILE_TAG, "section is ready. The size is", header.m_endOffset));
if (processor.HasAltitudeInfo())
LOG(LINFO, ("Min altitude is", processor.m_minAltitude));
LOG(LINFO, ("Min altitude is", processor.GetMinAltitude()));
else
LOG(LINFO, ("Min altitude isn't defined."));
}

View File

@@ -3,6 +3,8 @@
#include "geometry/point2d.hpp"
#include "geometry/point_with_altitude.hpp"
#include "indexer/feature_altitude.hpp"
#include <string>
namespace routing
@@ -11,7 +13,6 @@ class AltitudeGetter
{
public:
virtual geometry::Altitude GetAltitude(m2::PointD const & p) = 0;
virtual void PrintStatsAndPurge() {}
};
/// \brief Adds altitude section to mwm. It has the following format:

View File

@@ -13,6 +13,7 @@
#include "coding/varint.hpp"
#include "geometry/mercator.hpp"
#include "geometry/parametrized_segment.hpp"
#include "geometry/simplification.hpp"
#include "base/assert.hpp"
@@ -23,18 +24,20 @@
#include <cmath>
#include <fstream>
#include <functional>
#include <iomanip>
#include <iostream>
#include <mutex>
#include <vector>
#include "base/assert.hpp"
#include "base/string_utils.hpp"
#include "defines.hpp"
namespace borders
{
namespace
{
template <class ToDo>
void ForEachCountry(std::string const & baseDir, ToDo && toDo)
{
@@ -46,11 +49,11 @@ void ForEachCountry(std::string const & baseDir, ToDo && toDo)
Platform::GetFilesByExt(bordersDir, BORDERS_EXTENSION, files);
for (std::string file : files)
{
PolygonsList polygons;
std::vector<m2::RegionD> polygons;
if (LoadBorders(bordersDir + file, polygons))
{
base::GetNameWithoutExt(file);
toDo(std::move(file), std::move(polygons));
toDo(file, polygons);
}
}
}
@@ -62,7 +65,7 @@ public:
{
}
void operator()(std::string name, PolygonsList && borders)
void operator()(std::string const & name, std::vector<m2::RegionD> const & borders)
{
// use index in vector as tag
auto w = m_writer.GetWriter(strings::to_string(m_polys.size()));
@@ -102,7 +105,7 @@ private:
std::vector<storage::CountryDef> m_polys;
};
bool ReadPolygon(std::istream & stream, Polygon & poly, std::string const & filename)
bool ReadPolygon(std::istream & stream, m2::RegionD & region, std::string const & filename)
{
std::string line, name;
double lon, lat;
@@ -127,7 +130,7 @@ bool ReadPolygon(std::istream & stream, Polygon & poly, std::string const & file
iss >> lon >> lat;
CHECK(!iss.fail(), ("Incorrect data in", filename));
poly.AddPoint(mercator::FromLatLon(lat, lon));
region.AddPoint(mercator::FromLatLon(lat, lon));
}
// drop inner rings
@@ -143,7 +146,7 @@ bool CountryPolygons::Contains(m2::PointD const & point) const
});
}
bool LoadBorders(std::string const & borderFile, PolygonsList & outBorders)
bool LoadBorders(std::string const & borderFile, std::vector<m2::RegionD> & outBorders)
{
std::ifstream stream(borderFile);
std::string line;
@@ -153,12 +156,12 @@ bool LoadBorders(std::string const & borderFile, PolygonsList & outBorders)
return false;
}
Polygon currentPolygon;
m2::RegionD currentPolygon;
while (ReadPolygon(stream, currentPolygon, borderFile))
{
CHECK(currentPolygon.IsValid(), ("Invalid region in", borderFile));
outBorders.emplace_back(std::move(currentPolygon));
currentPolygon = {};
currentPolygon = m2::RegionD();
}
CHECK(!outBorders.empty(), ("No borders were loaded from", borderFile));
@@ -175,7 +178,7 @@ bool GetBordersRect(std::string const & baseDir, std::string const & country,
return false;
}
PolygonsList borders;
std::vector<m2::RegionD> borders;
CHECK(LoadBorders(bordersFile, borders), ());
bordersRect.MakeEmpty();
for (auto const & border : borders)
@@ -189,16 +192,13 @@ CountryPolygonsCollection LoadCountriesList(std::string const & baseDir)
LOG(LINFO, ("Loading countries in", BORDERS_DIR, "folder in", baseDir));
CountryPolygonsCollection countryPolygonsCollection;
ForEachCountry(baseDir, [&](std::string name, PolygonsList && borders)
ForEachCountry(baseDir, [&](auto const & name, auto const & borders)
{
PolygonsTree polygons;
for (Polygon & border : borders)
{
auto const rect = border.GetRect();
polygons.Add(std::move(border), rect);
}
for (m2::RegionD const & border : borders)
polygons.Add(border, border.GetRect());
countryPolygonsCollection.Add(CountryPolygons(std::move(name), std::move(polygons)));
countryPolygonsCollection.Add(CountryPolygons(name, polygons));
});
LOG(LINFO, ("Countries loaded:", countryPolygonsCollection.GetSize()));
@@ -214,7 +214,7 @@ void GeneratePackedBorders(std::string const & baseDir)
}
void DumpBorderToPolyFile(std::string const & targetDir, storage::CountryId const & mwmName,
PolygonsList const & polygons)
std::vector<m2::RegionD> const & polygons)
{
CHECK(!polygons.empty(), ());
@@ -222,8 +222,7 @@ void DumpBorderToPolyFile(std::string const & targetDir, storage::CountryId cons
std::ofstream poly(filePath);
CHECK(poly.good(), ());
// Used to have fixed precicion with 6 digits. And Alaska has 4 digits after comma :) Strange, but as is.
poly << std::setprecision(6) << std::fixed;
poly << std::setprecision(20) << std::fixed;
poly << mwmName << std::endl;
size_t polygonId = 1;

View File

@@ -1,5 +1,7 @@
#pragma once
#include "generator/feature_builder.hpp"
#include "storage/storage_defines.hpp"
#include "coding/geometry_coding.hpp"
@@ -9,9 +11,12 @@
#include "geometry/region2d.hpp"
#include "geometry/tree4d.hpp"
#include <memory>
#include <mutex>
#include <string>
#include <unordered_map>
#include <unordered_set>
#include <utility>
#include <vector>
#define BORDERS_DIR "borders/"
@@ -43,8 +48,9 @@ class CountryPolygons
{
public:
CountryPolygons() = default;
explicit CountryPolygons(std::string && name, PolygonsTree && regions)
: m_name(std::move(name)), m_polygons(std::move(regions))
explicit CountryPolygons(std::string const & name, PolygonsTree const & regions)
: m_name(name)
, m_polygons(regions)
{
}
@@ -91,8 +97,6 @@ public:
private:
std::string m_name;
/// @todo Is it an overkill to store Tree4D for each country's polygon?
PolygonsTree m_polygons;
};
@@ -101,15 +105,11 @@ class CountryPolygonsCollection
public:
CountryPolygonsCollection() = default;
void Add(CountryPolygons && countryPolygons)
void Add(CountryPolygons const & countryPolygons)
{
auto const res = m_countryPolygonsMap.emplace(countryPolygons.GetName(), std::move(countryPolygons));
CHECK(res.second, ());
auto const & inserted = res.first->second;
inserted.ForEachPolygon([&inserted, this](Polygon const & polygon)
{
m_regionsTree.Add(inserted, polygon.GetRect());
auto const it = m_countryPolygonsMap.emplace(countryPolygons.GetName(), countryPolygons);
countryPolygons.ForEachPolygon([&](auto const & polygon) {
m_regionsTree.Add(it.first->second, polygon.GetRect());
});
}
@@ -119,10 +119,9 @@ public:
void ForEachCountryInRect(m2::RectD const & rect, ToDo && toDo) const
{
std::unordered_set<CountryPolygons const *> uniq;
m_regionsTree.ForEachInRect(rect, [&](CountryPolygons const & cp)
{
if (uniq.insert(&cp).second)
toDo(cp);
m_regionsTree.ForEachInRect(rect, [&](auto const & countryPolygons) {
if (uniq.emplace(&countryPolygons.get()).second)
toDo(countryPolygons);
});
}
@@ -143,10 +142,8 @@ private:
std::unordered_map<std::string, CountryPolygons> m_countryPolygonsMap;
};
using PolygonsList = std::vector<Polygon>;
/// @return false if borderFile can't be opened
bool LoadBorders(std::string const & borderFile, PolygonsList & outBorders);
bool LoadBorders(std::string const & borderFile, std::vector<m2::RegionD> & outBorders);
bool GetBordersRect(std::string const & baseDir, std::string const & country,
m2::RectD & bordersRect);
@@ -156,10 +153,10 @@ bool LoadCountriesList(std::string const & baseDir, CountryPolygonsCollection &
void GeneratePackedBorders(std::string const & baseDir);
template <typename Source>
PolygonsList ReadPolygonsOfOneBorder(Source & src)
std::vector<m2::RegionD> ReadPolygonsOfOneBorder(Source & src)
{
auto const count = ReadVarUint<uint32_t>(src);
PolygonsList result(count);
std::vector<m2::RegionD> result(count);
for (size_t i = 0; i < count; ++i)
{
std::vector<m2::PointD> points;
@@ -171,7 +168,7 @@ PolygonsList ReadPolygonsOfOneBorder(Source & src)
}
void DumpBorderToPolyFile(std::string const & filePath, storage::CountryId const & mwmName,
PolygonsList const & polygons);
std::vector<m2::RegionD> const & polygons);
void UnpackBorders(std::string const & baseDir, std::string const & targetDir);
CountryPolygonsCollection const & GetOrCreateCountryPolygonsTree(std::string const & baseDir);

View File

@@ -2,6 +2,7 @@
#include "generator/utils.hpp"
#include "indexer/feature.hpp"
#include "indexer/ftypes_matcher.hpp"
#include "indexer/feature_processor.hpp"
#include "platform/platform.hpp"
@@ -87,38 +88,45 @@ std::string DescriptionsCollectionBuilderStat::LangStatisticsToString() const
void DescriptionsCollector::operator() (FeatureType & ft, uint32_t featureId)
{
// auto const & attractionsChecker = ftypes::AttractionsChecker::Instance();
// if (!attractionsChecker(ft))
// return;
auto const & attractionsChecker = ftypes::AttractionsChecker::Instance();
if (!attractionsChecker(ft))
return;
(*this)(ft.GetMetadata().GetWikiURL(), featureId);
}
void DescriptionsCollector::operator() (std::string const & wikiUrl, uint32_t featureId)
{
descriptions::LangMeta langsMeta;
std::string path;
size_t size = 0;
// First try to get wikipedia url.
if (!wikiUrl.empty())
size = FindPageAndFill(MakePathForWikipedia(m_wikipediaDir, wikiUrl), langsMeta);
// Second try to get wikidata id.
bool const isWikiUrl = !langsMeta.empty();
if (!isWikiUrl)
bool const isWikiUrl = !wikiUrl.empty();
if (isWikiUrl)
{
path = MakePathForWikipedia(m_wikipediaDir, wikiUrl);
}
else
{
// Second try to get wikidata id.
auto const wikidataId = m_wikidataHelper.GetWikidataId(featureId);
if (wikidataId)
size = FindPageAndFill(MakePathForWikidata(m_wikipediaDir, *wikidataId), langsMeta);
path = MakePathForWikidata(m_wikipediaDir, *wikidataId);
}
if (langsMeta.empty())
if (path.empty())
return;
if (size > 0)
descriptions::LangMeta langsMeta;
int const sz = FindPageAndFill(path, langsMeta);
if (sz < 0)
{
LOG(LWARNING, ("Page", path, "not found."));
return;
}
else if (sz > 0)
{
// Add only new loaded pages (not from cache).
m_stat.AddSize(size);
m_stat.AddSize(sz);
m_stat.IncPage();
}
@@ -158,10 +166,10 @@ std::string DescriptionsCollector::FillStringFromFile(std::string const & fullPa
return std::string(std::istreambuf_iterator<char>(stream), std::istreambuf_iterator<char>());
}
size_t DescriptionsCollector::FindPageAndFill(std::string const & path, descriptions::LangMeta & meta)
int DescriptionsCollector::FindPageAndFill(std::string const & path, descriptions::LangMeta & meta)
{
size_t size = 0;
if (path.empty() || !IsValidDir(path))
int size = -1;
if (!IsValidDir(path))
return size;
Platform::FilesList filelist;
@@ -176,27 +184,24 @@ size_t DescriptionsCollector::FindPageAndFill(std::string const & path, descript
continue;
}
if (size < 0)
size = 0;
m_stat.IncCode(code);
auto res = m_path2Index.try_emplace(base::JoinPath(path, filename), 0);
if (res.second)
{
auto const & filePath = res.first->first;
auto content = FillStringFromFile(filePath);
size_t const sz = content.size();
if (sz == 0)
{
LOG(LWARNING, ("Empty descriptions file:", filePath));
m_path2Index.erase(res.first);
continue;
}
auto & strings = m_collection.m_strings;
res.first->second = strings.size();
strings.push_back(std::move(content));
strings.push_back(FillStringFromFile(filePath));
size_t const sz = strings.back().size();
CHECK(sz > 0, ("Empty file:", filePath));
size += sz;
}
m_stat.IncCode(code);
meta.emplace_back(code, res.first->second);
}

View File

@@ -78,8 +78,8 @@ public:
static std::string FillStringFromFile(std::string const & fullPath);
/// @return Aggregated loaded from disk page's size.
size_t FindPageAndFill(std::string const & wikipediaUrl, descriptions::LangMeta & meta);
/// @return -1 If page not found. 0 if page from cache. Size > 0 if page was loaded from disk.
int FindPageAndFill(std::string const & wikipediaUrl, descriptions::LangMeta & meta);
public:
DescriptionsCollectionBuilderStat m_stat;

View File

@@ -188,7 +188,7 @@ public:
for (uint32_t i = 0; i < numPoints; ++i)
{
// Feature segment altitude.
geometry::Altitude altitude = m_srtmManager.GetAltitude(mercator::ToLatLon(f.GetPoint(i)));
geometry::Altitude altitude = m_srtmManager.GetHeight(mercator::ToLatLon(f.GetPoint(i)));
pointAltitudes[i] = altitude == geometry::kInvalidAltitude ? 0 : altitude;
if (i == 0)
{

View File

@@ -113,10 +113,8 @@ public:
StringUtf8Multilang str;
std::string const badUrl = "https://en.wikipedia.org/wiki/Not_exists";
auto const path = DescriptionsCollector::MakePathForWikipedia(m_wikiDir, badUrl);
descriptions::LangMeta meta;
TEST_EQUAL(collector.FindPageAndFill(path, meta), 0, ());
TEST(meta.empty(), ());
TEST_EQUAL(collector.FindPageAndFill(path, meta), -1, ());
}
}

View File

@@ -2,26 +2,13 @@
#include "generator/srtm_parser.hpp"
#include "coding/endianness.hpp"
#include <iostream>
namespace srtm_parser_test
{
using namespace generator;
using namespace geometry;
inline std::string GetBase(ms::LatLon const & coord)
namespace
{
return SrtmTile::GetBase(coord);
}
inline std::string GetBase(ms::LatLon const & coord) { return SrtmTile::GetBase(coord); }
inline SrtmTile::LatLonKey GetKey(ms::LatLon const & coord)
{
return SrtmTile::GetKey(coord);
}
UNIT_TEST(SRTM_FilenameTest)
UNIT_TEST(FilenameTests)
{
auto name = GetBase({56.4566, 37.3467});
TEST_EQUAL(name, "N56E037", ());
@@ -29,110 +16,13 @@ UNIT_TEST(SRTM_FilenameTest)
name = GetBase({34.077433, -118.304569});
TEST_EQUAL(name, "N34W119", ());
name = GetBase({1.0, 1.0});
TEST_EQUAL(name, "N01E001", ());
name = GetBase({0.1, 0.1});
TEST_EQUAL(name, "N00E000", ());
TEST_NOT_EQUAL(GetKey({0.1, 0.1}), GetKey({1.0, 1.0}), ());
name = GetBase({-0.1, -0.1});
TEST_EQUAL(name, "S01W001", ());
TEST_NOT_EQUAL(GetKey({0.1, 0.1}), GetKey({-0.1, -0.1}), ());
name = GetBase({-0.9, -0.9});
TEST_EQUAL(name, "S01W001", ());
TEST_EQUAL(GetKey({-0.9, -0.9}), GetKey({-0.1, -0.1}), ());
name = GetBase({-1.0, -1.0});
TEST_EQUAL(name, "S01W001", ());
TEST_EQUAL(GetKey({-0.9, -0.9}), GetKey({-1.0, -1.0}), ());
name = GetBase({-1.9, -1.1});
TEST_EQUAL(name, "S02W002", ());
TEST_NOT_EQUAL(GetKey({-1.1, -1.1}), GetKey({-1.0, -1.0}), ());
name = GetBase({-35.35, -12.1});
TEST_EQUAL(name, "S36W013", ());
name = GetBase({-34.622358, -58.383654});
TEST_EQUAL(name, "S35W059", ());
}
UNIT_TEST(SRTM_TileTest)
{
SrtmTile tile;
size_t sz;
Altitude * data = tile.DataForTests(sz);
// Fill 5 last rows:
// -4 -4 -4
// -2 -2 -2
// 0 0 0
// 2 2 2
// 4 4 4
size_t row = sz - 1;
for (Altitude a = 4; a >= -4; a -= 2)
{
for (size_t i = row * sz; i < (row + 1) * sz; ++i)
data[i] = ReverseByteOrder(a);
--row;
}
double const len = 1.0 / (sz - 1);
TEST_EQUAL(tile.GetHeight({0, 0}), 4, ());
TEST_ALMOST_EQUAL_ULPS(tile.GetTriangleHeight({0, 0}), 4.0, ());
TEST_ALMOST_EQUAL_ULPS(tile.GetBilinearHeight({0, 0}), 4.0, ());
TEST_EQUAL(tile.GetHeight({len, len}), 2, ());
TEST_ALMOST_EQUAL_ULPS(tile.GetTriangleHeight({len, len}), 2.0, ());
TEST_ALMOST_EQUAL_ULPS(tile.GetBilinearHeight({len, len}), 2.0, ());
double l = len / 2;
Altitude h = tile.GetHeight({l, l});
TEST(h == 4 || h == 2, (h));
TEST_ALMOST_EQUAL_ULPS(tile.GetTriangleHeight({l, l}), 3.0, ());
TEST_ALMOST_EQUAL_ULPS(tile.GetBilinearHeight({l, l}), 3.0, ());
l = 3 * len + len / 2;
h = tile.GetHeight({l, l});
TEST(h == -4 || h == -2, (h));
TEST_ALMOST_EQUAL_ULPS(tile.GetTriangleHeight({l, l}), -3.0, ());
TEST_ALMOST_EQUAL_ULPS(tile.GetBilinearHeight({l, l}), -3.0, ());
}
/*
UNIT_TEST(SRTM_SamplesTest)
{
SrtmTileManager manager("/Users/vng/SRTM");
std::initializer_list<ms::LatLon> arr[] = {
{{ 41.899802800880578957, 12.498703841110341273}, { 41.899748897914214751, 12.498642150302543996}, { 41.899315676124750496, 12.498172763721441925}, { 41.899207869324136766, 12.498057428732948893 }},
{{ 41.900315874986389986, 12.499267105007675127}, { 41.900234022973513959, 12.499175909900486658}, { 41.899802800880578957, 12.498703841110341273 }},
{{ 41.899317672545265623, 12.499556783583443575}, { 41.899704976945002954, 12.498910371206022774}, { 41.899716955394147533, 12.49888623132471821}, { 41.899730930248637151, 12.498862091443413647}, { 41.899744905100071435, 12.49882990493497914}, { 41.899760876355117034, 12.498797718426573056}, { 41.899772854793766896, 12.498768214127181864}, { 41.899788826041820755, 12.498736027618775779}, { 41.899802800880578957, 12.498703841110341273 }},
{{ 41.900297907480371862, 12.497869674100513748}, { 41.900259976062137923, 12.497931364908311025}, { 41.899866685818835776, 12.498593870539906447}, { 41.899834743357700972, 12.498647514720602203}, { 41.899818772121129484, 12.498674336810950081}, { 41.899802800880578957, 12.498703841110341273 }},
{{ 41.899728933841061007, 12.497309092412223208}, { 41.900224041013551357, 12.497850898637267392}, { 41.90023202658165502, 12.49786430968242712}, { 41.900224041013551357, 12.497888449563731683}, { 41.899826757739916161, 12.498548272986312213}, { 41.899808790096251698, 12.498556319613442156}, { 41.899802800880578957, 12.498550955195355527}, { 41.89979281885320006, 12.498545590777297321}, { 41.899307690442057606, 12.498009148970311344}, { 41.899301701179375357, 12.497995737925151616}, { 41.899309686862821422, 12.497976962461905259}, { 41.899716955394147533, 12.497311774621238101}, { 41.899728933841061007, 12.497309092412223208 }},
{{ 41.899802800880578957, 12.498550955195355527}, { 41.899748897914214751, 12.498642150302543996}, { 41.899681020039992063, 12.498754803082022136 }},
{{ 41.899912603078725226, 12.498650196929645517}, { 41.899866685818835776, 12.498593870539906447}, { 41.899826757739916161, 12.498548272986312213 }},
{{ 41.899994455503602353, 12.498516086477906128}, { 41.899912603078725226, 12.498650196929645517}, { 41.899912603078725226, 12.498685065647094916}, { 41.900285929140210328, 12.499090079211356397}, { 41.90030589303923847, 12.499111536883646068}, { 41.90070516970908443, 12.498435620206862495}, { 41.900711158840110215, 12.49842489137071766}, { 41.900715151593857399, 12.498408798116514618}, { 41.900713155217019334, 12.498398069280369782}, { 41.90056342677709722, 12.498237136738282516}, { 41.900327853320931126, 12.497979644670920152}, { 41.900317871375655443, 12.497976962461905259}, { 41.900307889428788144, 12.497982326879963466}, { 41.899994455503602353, 12.498516086477906128 }},
{{ 44.759886801735603967, 34.316046940654871378 }, { 44.759500178870737841, 34.315553414192436321 }, { 44.759132599068138347, 34.315443443622029918 }, { 44.758765016927078761, 34.315430032576841768 }, { 44.758071746835689453, 34.315253006780551459 }, { 44.758037464032938146, 34.315255688989566352 }, { 44.757483222565575431, 34.315306650961247215 }, { 44.756708037437867631, 34.315676795808059296 }, { 44.756323297960172169, 34.315652655926726311 }, { 44.755963316624225001, 34.315430032576841768 }, { 44.755833798981250027, 34.315153765046261469 }, { 44.755789991477485046, 34.314949917159594861 }},
{{ 44.759886801735603967, 34.316046940654871378 }, { 44.760006787615907342, 34.315175222718522718 }, { 44.760048687388419353, 34.315011607967392138 }, { 44.760260090322724125, 34.314772891363304552 }, { 44.760437211104594724, 34.314665603001913041 }, { 44.760572431981174191, 34.314383971053246114 }, { 44.760701939002856875, 34.314300822573159166 }, { 44.761223773178279828, 34.314088928059419459 }, { 44.761292334982400121, 34.314091610268434351 }, { 44.761376132632506142, 34.314011143997390718 }, { 44.761667518969709079, 34.313895809008897686 }, { 44.761739889204726239, 34.31379924948365101 }, { 44.761739889204726239, 34.313705372167419227 }, { 44.76183130410882427, 34.313568579506636524 }, { 44.761930336758403826, 34.313549804043390168 }, { 44.761981757490261202, 34.313442515681998657 }, { 44.762050318394869919, 34.313396918128404423 }, { 44.762176013175370315, 34.313391553710346216 }, { 44.762316943361625476, 34.313359367201911709 }, { 44.762610229403847484, 34.313332545111563832 }, { 44.762627369451166714, 34.313343273947708667 }, { 44.762663553978839559, 34.313313769648317475 }, { 44.762673076219179791, 34.313278900930868076 }, { 44.762678789562635018, 34.313233303377273842 }, { 44.762692120695184883, 34.31320379907788265 }, { 44.762720687397411723, 34.313182341405621401 }, { 44.762734018520298207, 34.313107239552635974 }, { 44.762747349640086725, 34.313091146298432932 }, { 44.7628063874193316, 34.313101875134577767 }, { 44.76292065391750441, 34.313123332806839016 }, { 44.762977787081830172, 34.313056277580983533 }, { 44.763063486722373341, 34.313107239552635974 }, { 44.763282496337311045, 34.313088464089389618 }},
{{ 44.756521381971580809, 34.332137512655094724 }, { 44.756469956380023234, 34.331619846311355104 }, { 44.756412816780184016, 34.331308710063325407 }, { 44.756359486436011252, 34.331040489159818208 }, { 44.75633282124546497, 34.330965387306861203 }, { 44.756294728094793811, 34.330860781154484584 }, { 44.756264253556160781, 34.330694484194339111 }, { 44.756294728094793811, 34.330608653505208849 }, { 44.756323297960172169, 34.330533551652223423 }, { 44.756433767973362592, 34.330359208064976428 }, { 44.756555665673857902, 34.330109762624715586 }, { 44.756641374840555159, 34.329940783455526798 }, { 44.756755653531747896, 34.329755711032106547 }, { 44.757010875126020721, 34.329463350247323206 }, { 44.757180387352946127, 34.329117345281815687 }, { 44.757266095593031707, 34.32893495506743875 }, { 44.757448939413642108, 34.328564810220626669 }, { 44.757591785745901802, 34.328301953735206098 }, { 44.757746059388104243, 34.327797698436654628 }, { 44.757807006886359602, 34.327470468934393466 }, { 44.75787366813893442, 34.327239798957407402 }, { 44.757986039790637278, 34.327089595251464971 }, { 44.758026036427494887, 34.326995717935233188 }, { 44.758113648011466523, 34.326907205037088033 }, { 44.75817078592944398, 34.326783823421465058 }, { 44.758237446762365153, 34.326400267529493249 }, { 44.758264111073970071, 34.3263519877668557 }, { 44.758357436067782942, 34.326177644179608706 }, { 44.758412669156371066, 34.325909423276101506 }, { 44.758505993910240761, 34.325775312824362118 }, { 44.758526944344453113, 34.325598287028071809 }, { 44.758479329710219474, 34.325461494367289106 }, { 44.758391718680712756, 34.325370299260100637 }, { 44.758429810449001707, 34.32524423543546277 }, { 44.75854789477109108, 34.325085985102390396 }, { 44.758637410144764601, 34.324914323724158294 }, { 44.758685024648755757, 34.324734615718824671 }, { 44.758707879596705936, 34.324613916312273432 }, { 44.758766921503791991, 34.324568318758679197 }, { 44.758749780311177346, 34.32444225493404133 }, { 44.758799299298182461, 34.324206220538968637 }, { 44.75882786792522694, 34.323988961607142301 }, { 44.758825963350531651, 34.323667096522939346 }, { 44.758843104520543932, 34.323423015500765132 }, { 44.758862150259034252, 34.323181616687634232 }, { 44.758915478293424428, 34.322983133219054253 }, { 44.758944046863021526, 34.322875844857662742 }, { 44.759084984933011242, 34.322862433812474592 }, { 44.758966901708539865, 34.32266126813487972 }, { 44.758865959405987667, 34.32256202640058973 }, { 44.758753589465527511, 34.322331356423575244 }, { 44.75867359717138072, 34.322089957610444344 }, { 44.758690738386590624, 34.321875380887661322 }, { 44.758690738386590624, 34.321754681481081661 }, { 44.758698356702829813, 34.321642028701631943 }, { 44.758643123887310367, 34.321379172216211373 }, { 44.758641219306547043, 34.321231650719283834 }, { 44.758665978851873035, 34.321027802832645648 }, { 44.758732639113461005, 34.320837365991167189 }, { 44.75867359717138072, 34.320502089861804507 }, { 44.75867931091034535, 34.320392119291369681 }, { 44.758808822175438991, 34.320268737675775128 }, { 44.758793585571083895, 34.320099758506557919 }, { 44.758978329127899087, 34.319783257840441593 }, { 44.759109744288181787, 34.319584774371861613 }, { 44.75926782299799811, 34.31950430810081798 }, { 44.759357337256332698, 34.319305824632238 }, { 44.759387810163708821, 34.319016146056469552 }, { 44.759481133342561066, 34.318686234345193498 }, { 44.75951351073673834, 34.318530666221164438 }, { 44.759524938047988485, 34.318332182752584458 }, { 44.759637306488322395, 34.317903029306989993 }, { 44.759671588341880977, 34.317436324934931235 }, { 44.759871565415501493, 34.317044722415829483 }, { 44.759902038051663453, 34.316647755478669524 }, { 44.759915369824923914, 34.316167640061422617 }, { 44.759886801735603967, 34.316046940654871378 }},
};
using namespace std;
for (auto const & points : arr)
{
for (auto const & p : points)
cout << manager.GetTriangleHeight(p) << ", ";
cout << endl;
}
}
*/
} // namespace srtm_parser_test
} // namespace

View File

@@ -7,7 +7,6 @@
#include "routing/maxspeeds_serialization.hpp"
#include "routing/routing_helpers.hpp"
#include "routing_common/car_model_coefs.hpp"
#include "routing_common/maxspeed_conversion.hpp"
#include "indexer/feature.hpp"
@@ -38,10 +37,6 @@ using namespace routing;
using std::string;
char constexpr kDelim[] = ", \t\r\n";
double constexpr kMinDefSpeedRoadsLengthKm = 5.0;
double constexpr kMaxPossibleDefSpeedKmH = 400.0;
// This factor should be greater than sqrt(2) / 2 - prefer diagonal link to square path.
double constexpr kLinkToMainSpeedFactor = 0.85;
template <class TokenizerT> bool ParseOneSpeedValue(TokenizerT & iter, MaxspeedType & value)
{
@@ -73,21 +68,11 @@ class MaxspeedsMwmCollector
{
double m_lengthKM = 0;
double m_timeH = 0;
double m_speed = -1; // invalid initial value
friend std::string DebugPrint(AvgInfo const & i)
{
std::ostringstream ss;
ss << "AvgInfo{ " << i.m_speed << ", " << i.m_lengthKM << ", " << i.m_timeH << " }";
return ss.str();
}
};
static int constexpr kSpeedsCount = MaxspeedsSerializer::DEFAULT_SPEEDS_COUNT;
static int constexpr kOutsideCityIdx = 0;
static int constexpr SPEEDS_COUNT = MaxspeedsSerializer::DEFAULT_SPEEDS_COUNT;
// 0 - outside a city; 1 - inside a city.
std::unordered_map<HighwayType, AvgInfo> m_avgSpeeds[kSpeedsCount];
std::unordered_map<HighwayType, AvgInfo> m_avgSpeeds[SPEEDS_COUNT];
base::GeoObjectId GetOsmID(uint32_t fid) const
{
@@ -102,14 +87,10 @@ class MaxspeedsMwmCollector
return m_graph->GetRoadGeometry(fid);
}
// OSM data related warning tag for convenient grep.
std::string m_logTag;
public:
MaxspeedsMwmCollector(string const & dataPath, FeatureIdToOsmId const & ft2osm, IndexGraph * graph)
: m_dataPath(dataPath), m_ft2osm(ft2osm), m_graph(graph)
, m_converter(MaxspeedConverter::Instance())
, m_logTag("SpeedsBuilder")
{
}
@@ -165,8 +146,9 @@ public:
(*parentHwType == HighwayType::HighwayTertiary && hwType == HighwayType::HighwayTertiaryLink))
{
// Reduce factor from parent road. See DontUseLinksWhenRidingOnMotorway test.
// 0.85, this factor should be greater than sqrt(2) / 2 - prefer diagonal link to square path.
return converter.ClosestValidMacro(
{ base::asserted_cast<MaxspeedType>(std::lround(s.GetForward() * kLinkToMainSpeedFactor)), s.GetUnits() });
{ base::asserted_cast<MaxspeedType>(std::lround(s.GetForward() * 0.85)), s.GetUnits() });
}
return {};
@@ -283,16 +265,8 @@ public:
});
}
private:
void AddSpeed(uint32_t featureID, uint64_t osmID, Maxspeed const & speed)
{
MaxspeedType constexpr kMaxReasonableSpeed = 280;
if ((speed.GetSpeedKmPH(true) >= kMaxReasonableSpeed) ||
(speed.IsBidirectional() && speed.GetSpeedKmPH(false) >= kMaxReasonableSpeed))
{
LOG(LWARNING, (m_logTag, "Very big speed", speed, "for way", osmID));
}
// Add converted macro speed.
SpeedInUnits const forward(speed.GetForward(), speed.GetUnits());
CHECK(forward.IsValid(), ());
@@ -304,12 +278,12 @@ private:
if (ftSpeed.m_forward == SpeedMacro::Undefined)
{
LOG(LWARNING, (m_logTag, "Undefined forward speed macro", forward, "for way", osmID));
LOG(LWARNING, ("Undefined forward speed macro", forward, "for way", osmID));
return;
}
if (backward.IsValid() && backwardMacro == SpeedMacro::Undefined)
{
LOG(LWARNING, (m_logTag, "Undefined backward speed macro", backward, "for way", osmID));
LOG(LWARNING, ("Undefined backward speed macro", backward, "for way", osmID));
}
m_maxspeeds.push_back(ftSpeed);
@@ -337,167 +311,36 @@ private:
}
}
else
LOG(LWARNING, (m_logTag, "Undefined HighwayType for way", osmID));
LOG(LWARNING, ("Undefined HighwayType for way", osmID));
}
public:
void CalculateDefaultTypeSpeeds(MaxspeedsSerializer::HW2SpeedMap typeSpeeds[])
void SerializeMaxspeeds() const
{
std::vector<std::pair<HighwayType, InOutCitySpeedKMpH>> baseSpeeds(
kHighwayBasedSpeeds.begin(), kHighwayBasedSpeeds.end());
// Remove links, because they don't conform speed consistency.
baseSpeeds.erase(std::remove_if(baseSpeeds.begin(), baseSpeeds.end(), [](auto const & e)
if (m_maxspeeds.empty())
return;
MaxspeedsSerializer::HW2SpeedMap typeSpeeds[SPEEDS_COUNT];
for (int ind = 0; ind < SPEEDS_COUNT; ++ind)
{
return (e.first == HighwayType::HighwayMotorwayLink ||
e.first == HighwayType::HighwayTrunkLink ||
e.first == HighwayType::HighwayPrimaryLink ||
e.first == HighwayType::HighwaySecondaryLink ||
e.first == HighwayType::HighwayTertiaryLink);
}), baseSpeeds.end());
for (int ind = 0; ind < kSpeedsCount; ++ind)
{
// Calculate average speed.
for (auto & e : m_avgSpeeds[ind])
{
// Check some reasonable conditions when assigning average speed.
if (e.second.m_lengthKM > kMinDefSpeedRoadsLengthKm)
{
auto const speed = e.second.m_lengthKM / e.second.m_timeH;
if (speed < kMaxPossibleDefSpeedKmH)
e.second.m_speed = speed;
}
}
// Prepare ethalon vector.
bool const inCity = ind != kOutsideCityIdx;
std::sort(baseSpeeds.begin(), baseSpeeds.end(), [inCity](auto const & l, auto const & r)
{
// Sort from biggest to smallest.
return r.second.GetSpeed(inCity).m_weight < l.second.GetSpeed(inCity).m_weight;
});
// First of all check that calculated speed and base speed difference is less than 2x.
for (auto const & e : baseSpeeds)
{
auto & l = m_avgSpeeds[ind][e.first];
if (l.m_speed > 0)
{
double const base = e.second.GetSpeed(inCity).m_weight;
double const factor = l.m_speed / base;
if (factor > 2 || factor < 0.5)
{
LOG(LWARNING, (m_logTag, "More than 2x diff:", e.first, l.m_speed, base));
l.m_speed = -1;
}
}
}
// Check speed's pairs consistency.
// Constraints from the previous iteration can be broken if we modify l-speed on the next iteration.
for (size_t il = 0, ir = 1; ir < baseSpeeds.size(); ++ir)
{
auto & l = m_avgSpeeds[ind][baseSpeeds[il].first];
if (l.m_speed < 0)
{
++il;
continue;
}
auto & r = m_avgSpeeds[ind][baseSpeeds[ir].first];
if (r.m_speed < 0)
continue;
// |l| should be greater than |r|
if (l.m_speed < r.m_speed)
{
LOG(LWARNING, (m_logTag, "Bad def speeds pair:", baseSpeeds[il].first, baseSpeeds[ir].first, l, r));
if (l.m_lengthKM >= r.m_lengthKM)
r.m_speed = l.m_speed;
else
l.m_speed = r.m_speed;
}
il = ir;
}
auto const getSpeed = [this, ind, inCity](HighwayType type)
{
auto const s = m_avgSpeeds[ind][type].m_speed;
if (s > 0)
return s;
auto const * p = kHighwayBasedSpeeds.Find(type);
CHECK(p, ());
return p->GetSpeed(inCity).m_weight;
};
// These speeds: Primary, Secondary, Tertiary, Residential have the biggest routing quality impact.
{
double const primaryS = getSpeed(HighwayType::HighwayPrimary);
double const secondaryS = getSpeed(HighwayType::HighwaySecondary);
double const tertiaryS = getSpeed(HighwayType::HighwayTertiary);
double const residentialS = getSpeed(HighwayType::HighwayResidential);
double constexpr eps = 1.0;
if (primaryS + eps < secondaryS || secondaryS + eps < tertiaryS || tertiaryS + eps < residentialS)
{
LOG(LWARNING, (m_logTag, "Ignore primary, secondary, tertiary, residential speeds:",
primaryS, secondaryS, tertiaryS, residentialS));
m_avgSpeeds[ind][HighwayType::HighwayPrimary].m_speed = -1;
m_avgSpeeds[ind][HighwayType::HighwaySecondary].m_speed = -1;
m_avgSpeeds[ind][HighwayType::HighwayTertiary].m_speed = -1;
m_avgSpeeds[ind][HighwayType::HighwayResidential].m_speed = -1;
}
}
// Update links.
std::pair<HighwayType, HighwayType> arrLinks[] = {
{HighwayType::HighwayMotorway, HighwayType::HighwayMotorwayLink},
{HighwayType::HighwayTrunk, HighwayType::HighwayTrunkLink},
{HighwayType::HighwayPrimary, HighwayType::HighwayPrimaryLink},
{HighwayType::HighwaySecondary, HighwayType::HighwaySecondaryLink},
{HighwayType::HighwayTertiary, HighwayType::HighwayTertiaryLink},
};
for (auto const & e : arrLinks)
{
auto const main = m_avgSpeeds[ind][e.first].m_speed;
auto & link = m_avgSpeeds[ind][e.second].m_speed;
if (main > 0)
link = kLinkToMainSpeedFactor * main;
else
link = -1;
}
// Fill type-speed map.
LOG(LINFO, ("Average speeds", ind == kOutsideCityIdx ? "outside" : "inside", "a city:"));
LOG(LINFO, ("Average speeds", ind == 0 ? "outside" : "inside", "a city:"));
for (auto const & e : m_avgSpeeds[ind])
{
if (e.second.m_speed > 0)
long const speed = std::lround(e.second.m_lengthKM / e.second.m_timeH);
if (speed < routing::kInvalidSpeed)
{
// Store type speeds in Metric system, like VehicleModel profiles.
auto const speedInUnits = m_converter.ClosestValidMacro(
{ static_cast<MaxspeedType>(e.second.m_speed), measurement_utils::Units::Metric });
{ static_cast<MaxspeedType>(speed), measurement_utils::Units::Metric });
LOG(LINFO, ("*", e.first, "=", speedInUnits));
typeSpeeds[ind][e.first] = m_converter.SpeedToMacro(speedInUnits);
}
else
LOG(LWARNING, ("Large average speed for", e.first, "=", speed));
}
}
}
void SerializeMaxspeeds()
{
if (m_maxspeeds.empty())
return;
MaxspeedsSerializer::HW2SpeedMap typeSpeeds[kSpeedsCount];
/// @todo There are too many claims/bugs with Turkey calculated defaults.
/// And yes, now this dummy country check :)
if (m_dataPath.find("Turkey_") == std::string::npos)
CalculateDefaultTypeSpeeds(typeSpeeds);
// Serialize speeds.
FilesContainerW cont(m_dataPath, FileWriter::OP_WRITE_EXISTING);
auto writer = cont.GetWriter(MAXSPEEDS_FILE_TAG);
MaxspeedsSerializer::Serialize(m_maxspeeds, typeSpeeds, *writer);

View File

@@ -4,10 +4,14 @@
#include "routing_common/maxspeed_conversion.hpp"
#include "platform/measurement_utils.hpp"
#include "base/geo_object_id.hpp"
#include <cstdint>
#include <map>
#include <string>
#include <vector>
namespace routing
{

View File

@@ -209,11 +209,7 @@ void RelationTagsWay::Process(RelationElement const & e)
continue;
// Do not pass "ref" tags from boundaries and other, non-route relations to highways.
if (isHighway && p.first == "ref")
continue;
// https://github.com/organicmaps/organicmaps/issues/8246
if (type == "route" && (p.first == "oneway" || p.first == "surface"))
if (p.first == "ref" && isHighway)
continue;
Base::AddCustomTag(p);

View File

@@ -2,116 +2,72 @@
#include "routing/routing_helpers.hpp"
#include "indexer/classificator.hpp"
#include "indexer/classificator_loader.hpp"
#include "indexer/feature.hpp"
#include "indexer/feature_altitude.hpp"
#include "indexer/feature_data.hpp"
#include "indexer/feature_processor.hpp"
#include "geometry/distance_on_sphere.hpp"
#include "geometry/mercator.hpp"
#include "geometry/point_with_altitude.hpp"
#include "platform/country_file.hpp"
#include "platform/local_country_file.hpp"
#include "platform/local_country_file_utils.hpp"
#include "platform/platform.hpp"
#include "base/logging.hpp"
#include <iostream>
#include <mutex>
#include <cstddef>
#include <cstdint>
#include <limits>
#include <vector>
#include <gflags/gflags.h>
DEFINE_string(srtm_path, "", "Path to directory with SRTM files");
DEFINE_string(mwm_path, "", "Path to mwm files (writable dir)");
DEFINE_bool(check_dist, false, "Check feature sections distance");
class SafeTileManager
int main(int argc, char * argv[])
{
generator::SrtmTileManager m_manager;
std::mutex m_mutex;
gflags::SetUsageMessage("SRTM coverage checker.");
gflags::ParseCommandLineFlags(&argc, &argv, true);
uint32_t m_ferry;
Platform & platform = GetPlatform();
if (!FLAGS_mwm_path.empty())
platform.SetWritableDirForTests(FLAGS_mwm_path);
public:
explicit SafeTileManager(std::string const & dir) : m_manager(dir)
if (FLAGS_srtm_path.empty())
{
m_ferry = classif().GetTypeByPath({"route", "ferry"});
CHECK(m_ferry != Classificator::INVALID_TYPE, ());
LOG(LERROR, ("SRTM files directory is not specified."));
return -1;
}
bool IsAltitudeRoad(FeatureType & ft) const
{
feature::TypesHolder types(ft);
return (routing::IsRoad(types) && !types.Has(m_ferry));
}
LOG(LINFO, ("writable dir =", platform.WritableDir()));
LOG(LINFO, ("srtm dir =", FLAGS_srtm_path));
geometry::Altitude GetAltitude(ms::LatLon const & coord)
{
std::lock_guard guard(m_mutex);
return m_manager.GetAltitude(coord);
}
void Purge()
{
std::lock_guard guard(m_mutex);
m_manager.Purge();
}
};
template <class FnT> void ForEachMWM(SafeTileManager & manager, FnT && fn)
{
std::vector<platform::LocalCountryFile> localFiles;
FindAllLocalMapsAndCleanup(std::numeric_limits<int64_t>::max() /* latestVersion */, localFiles);
// Better use ComputationalThreadPool, but we want to call SafeTileManager::Purge after each batch.
size_t constexpr kThreadsCount = 24;
std::vector<std::thread> pool;
generator::SrtmTileManager manager(FLAGS_srtm_path);
classificator::Load();
size_t workers = 0;
for (auto & file : localFiles)
{
// Skip worlds.
if (file.GetDirectory().empty() || file.GetCountryName().starts_with("World"))
continue;
file.SyncWithDisk();
if (!file.OnDisk(MapFileType::Map))
{
LOG_SHORT(LWARNING, ("Map file not found for:", file.GetCountryName()));
LOG(LINFO, ("Warning! Routing file not found for:", file.GetCountryName()));
continue;
}
LOG_SHORT(LINFO, ("Processing", file.GetCountryName()));
auto const path = file.GetPath(MapFileType::Map);
LOG(LINFO, ("Mwm", path, "is being processed."));
pool.emplace_back([&fn, &file]() { fn(file); });
if (++workers == kThreadsCount)
{
for (auto & t : pool)
t.join();
pool.clear();
manager.Purge();
workers = 0;
}
}
for (auto & t : pool)
t.join();
}
void CheckCoverage(SafeTileManager & manager)
{
ForEachMWM(manager, [&](platform::LocalCountryFile const & file)
{
size_t all = 0;
size_t good = 0;
feature::ForEachFeature(file.GetPath(MapFileType::Map), [&](FeatureType & ft, uint32_t)
{
if (!manager.IsAltitudeRoad(ft))
feature::ForEachFeature(path, [&](FeatureType & ft, uint32_t fid) {
if (!routing::IsRoad(feature::TypesHolder(ft)))
return;
ft.ParseGeometry(FeatureType::BEST_GEOMETRY);
@@ -119,136 +75,16 @@ void CheckCoverage(SafeTileManager & manager)
for (size_t i = 0; i < ft.GetPointsCount(); ++i)
{
auto const height = manager.GetAltitude(mercator::ToLatLon(ft.GetPoint(i)));
auto const height = manager.GetHeight(mercator::ToLatLon(ft.GetPoint(i)));
if (height != geometry::kInvalidAltitude)
good++;
}
});
auto const bad = all - good;
auto const percent = (all == 0) ? 0.0 : bad * 100.0 / all;
LOG_SHORT(LINFO, (percent > 10.0 ? "Huge" : "Low", "error rate in:", file.GetCountryName(),
"good:", good, "bad:", bad, "all:", all, "%:", percent));
});
}
void CheckDistance(SafeTileManager & manager)
{
ForEachMWM(manager, [&](platform::LocalCountryFile const & file)
{
size_t all = 0;
size_t added = 0;
size_t invalid = 0;
feature::ForEachFeature(file.GetPath(MapFileType::Map), [&](FeatureType & ft, uint32_t)
{
if (!manager.IsAltitudeRoad(ft))
return;
ft.ParseGeometry(FeatureType::BEST_GEOMETRY);
all += ft.GetPointsCount();
for (size_t i = 1; i < ft.GetPointsCount(); ++i)
{
auto const ll1 = mercator::ToLatLon(ft.GetPoint(i-1));
auto const alt1 = manager.GetAltitude(ll1);
auto const ll2 = mercator::ToLatLon(ft.GetPoint(i));
auto const alt2 = manager.GetAltitude(ll2);
if (alt1 == geometry::kInvalidAltitude || alt2 == geometry::kInvalidAltitude)
{
++invalid;
continue;
}
// Divide by 1 second sections.
size_t const sections = std::round(ms::DistanceOnSphere(ll1.m_lat, ll1.m_lon, ll2.m_lat, ll2.m_lon) * 3600);
if (sections < 2)
continue;
for (size_t j = 1; j < sections; ++j)
{
double const a = j / double(sections);
ms::LatLon const ll(ll2.m_lat * a + ll1.m_lat * (1 - a), ll2.m_lon * a + ll1.m_lon * (1 - a));
// Get diff between approx altitude and real one.
auto const alt = manager.GetAltitude(ll);
if (alt == geometry::kInvalidAltitude)
{
LOG_SHORT(LWARNING, ("Invalid altitude for the middle point:", ll));
++added;
}
else
{
auto const approxAlt = static_cast<geometry::Altitude>(std::round(alt2 * a + alt1 * (1 - a)));
if (abs(alt - approxAlt) >= std::max(1, abs(alt)/10)) // 10%
++added;
}
}
}
});
auto const percent = added * 100.0 / all;
std::string prefix = "Low";
if (percent >= 1)
prefix = "Huge";
else if (added >= 1000)
prefix = "Medium";
LOG_SHORT(LINFO, (prefix, file.GetCountryName(), "all:", all, "invalid:", invalid, "added:", added, "%:", percent));
});
}
int main(int argc, char * argv[])
{
gflags::SetUsageMessage("SRTM coverage checker.");
gflags::ParseCommandLineFlags(&argc, &argv, true);
if (FLAGS_srtm_path.empty())
{
LOG_SHORT(LERROR, ("SRTM files directory is not specified."));
return -1;
}
classificator::Load();
if (!FLAGS_mwm_path.empty())
{
SafeTileManager manager(FLAGS_srtm_path);
Platform & platform = GetPlatform();
platform.SetWritableDirForTests(FLAGS_mwm_path);
if (FLAGS_check_dist)
CheckDistance(manager);
else
CheckCoverage(manager);
}
else
{
generator::SrtmTileManager manager(FLAGS_srtm_path);
using namespace std;
cout << "Enter lat lon. Or Ctrl + C to exit." << endl;
while (true)
{
double lat, lon;
cin >> lat >> lon;
if (!cin)
{
cout << "Invalid lat lon." << endl;
cin.clear();
cin.ignore(10000, '\n');
}
else
{
auto const & tile = manager.GetTile({lat, lon});
cout << "H = " << tile.GetHeight({lat, lon}) <<
"; Trg = " << tile.GetTriangleHeight({lat, lon}) <<
"; Bilinear = " << tile.GetBilinearHeight({lat, lon});
cout << endl;
}
}
auto const percent = all == 0 ? 0.0 : bad * 100.0 / all;
LOG(LINFO, (percent > 10.0 ? "Huge" : "Low", "error rate in:", file.GetCountryName(),
"good:", good, "bad:", bad, "all:", all, "%:", percent));
}
return 0;

View File

@@ -16,21 +16,15 @@ namespace generator
{
namespace
{
int constexpr kArcSecondsInDegree = 60 * 60;
size_t constexpr kArcSecondsInDegree = 60 * 60;
size_t constexpr kSrtmTileSize = (kArcSecondsInDegree + 1) * (kArcSecondsInDegree + 1) * 2;
struct UnzipMemDelegate : public ZipFileReader::Delegate
{
explicit UnzipMemDelegate(std::vector<uint8_t> & buffer) : m_buffer(buffer), m_completed(false)
{
m_buffer.reserve(kSrtmTileSize);
}
explicit UnzipMemDelegate(std::string & buffer) : m_buffer(buffer), m_completed(false) {}
// ZipFileReader::Delegate overrides:
void OnBlockUnzipped(size_t size, char const * data) override
{
m_buffer.insert(m_buffer.end(), data, data + size);
}
void OnBlockUnzipped(size_t size, char const * data) override { m_buffer.append(data, size); }
void OnStarted() override
{
@@ -40,7 +34,7 @@ struct UnzipMemDelegate : public ZipFileReader::Delegate
void OnCompleted() override { m_completed = true; }
std::vector<uint8_t> & m_buffer;
std::string & m_buffer;
bool m_completed;
};
@@ -51,6 +45,11 @@ std::string GetSrtmContFileName(std::string const & dir, std::string const & bas
} // namespace
// SrtmTile ----------------------------------------------------------------------------------------
SrtmTile::SrtmTile()
{
Invalidate();
}
SrtmTile::SrtmTile(SrtmTile && rhs) : m_data(std::move(rhs.m_data)), m_valid(rhs.m_valid)
{
rhs.Invalidate();
@@ -91,7 +90,7 @@ void SrtmTile::Init(std::string const & dir, ms::LatLon const & coord)
}
else
{
m_data = base::ReadFile(base::JoinPath(dir, file));
GetPlatform().GetReader(file)->ReadAsString(m_data);
}
if (m_data.size() != kSrtmTileSize)
@@ -104,9 +103,11 @@ void SrtmTile::Init(std::string const & dir, ms::LatLon const & coord)
m_valid = true;
}
// static
ms::LatLon SrtmTile::GetCoordInSeconds(ms::LatLon const & coord)
geometry::Altitude SrtmTile::GetHeight(ms::LatLon const & coord) const
{
if (!IsValid())
return geometry::kInvalidAltitude;
double ln = coord.m_lon - static_cast<int>(coord.m_lon);
if (ln < 0)
ln += 1;
@@ -115,98 +116,15 @@ ms::LatLon SrtmTile::GetCoordInSeconds(ms::LatLon const & coord)
lt += 1;
lt = 1 - lt; // from North to South
return { kArcSecondsInDegree * lt, kArcSecondsInDegree * ln };
}
auto const row = static_cast<size_t>(std::round(kArcSecondsInDegree * lt));
auto const col = static_cast<size_t>(std::round(kArcSecondsInDegree * ln));
geometry::Altitude SrtmTile::GetHeight(ms::LatLon const & coord) const
{
if (!IsValid())
return geometry::kInvalidAltitude;
auto const ll = GetCoordInSeconds(coord);
return GetHeightRC(static_cast<size_t>(std::round(ll.m_lat)), static_cast<size_t>(std::round(ll.m_lon)));
}
geometry::Altitude SrtmTile::GetHeightRC(size_t row, size_t col) const
{
size_t const ix = row * (kArcSecondsInDegree + 1) + col;
CHECK_LESS(ix, Size(), (row, col));
CHECK_LESS(ix, Size(), (coord));
return ReverseByteOrder(Data()[ix]);
}
double SrtmTile::GetTriangleHeight(ms::LatLon const & coord) const
{
if (!IsValid())
return geometry::kInvalidAltitude;
auto const ll = GetCoordInSeconds(coord);
m2::Point<int> const p1(static_cast<int>(std::round(ll.m_lon)), static_cast<int>(std::round(ll.m_lat)));
auto p2 = p1;
if (p2.x > ll.m_lon)
{
if (p2.x > 0)
--p2.x;
}
else if (p2.x < ll.m_lon)
{
if (p2.x < kArcSecondsInDegree)
++p2.x;
}
auto p3 = p1;
if (p3.y > ll.m_lat)
{
if (p3.y > 0)
--p3.y;
}
else if (p3.y < ll.m_lat)
{
if (p3.y < kArcSecondsInDegree)
++p3.y;
}
// Approximate height from triangle p1, p2, p3.
// p1.y == p2.y; p1.x == p3.x
// https://stackoverflow.com/questions/36090269/finding-height-of-point-on-height-map-triangles
int const det = (p2.y - p3.y) * (p1.x - p3.x) + (p3.x - p2.x) * (p1.y - p3.y);
if (det == 0)
return GetHeightRC(p1.y, p1.x);
double const a1 = ((p2.y - p3.y) * (ll.m_lon - p3.x) + (p3.x - p2.x) * (ll.m_lat - p3.y)) / det;
double const a2 = ((p3.y - p1.y) * (ll.m_lon - p3.x) + (p1.x - p3.x) * (ll.m_lat - p3.y)) / det;
double const a3 = 1 - a1 - a2;
return a1 * GetHeightRC(p1.y, p1.x) + a2 * GetHeightRC(p2.y, p2.x) + a3 * GetHeightRC(p3.y, p3.x);
}
double SrtmTile::GetBilinearHeight(ms::LatLon const & coord) const
{
if (!IsValid())
return geometry::kInvalidAltitude;
auto const ll = GetCoordInSeconds(coord);
m2::Point<int> const p1(static_cast<int>(ll.m_lon), static_cast<int>(ll.m_lat));
auto p2 = p1;
if (p2.x < kArcSecondsInDegree)
++p2.x;
if (p2.y < kArcSecondsInDegree)
++p2.y;
// https://en.wikipedia.org/wiki/Bilinear_interpolation
double const denom = (p2.x - p1.x) * (p2.y - p1.y);
if (denom == 0)
return GetHeightRC(p1.y, p1.x);
return (GetHeightRC(p1.y, p1.x) * (p2.x - ll.m_lon) * (p2.y - ll.m_lat) +
GetHeightRC(p1.y, p2.x) * (ll.m_lon - p1.x) * (p2.y - ll.m_lat) +
GetHeightRC(p2.y, p1.x) * (p2.x - ll.m_lon) * (ll.m_lat - p1.y) +
GetHeightRC(p2.y, p2.x) * (ll.m_lon - p1.x) * (ll.m_lat - p1.y)) / denom;
}
// static
std::string SrtmTile::GetPath(std::string const & dir, std::string const & base)
{
@@ -214,52 +132,42 @@ std::string SrtmTile::GetPath(std::string const & dir, std::string const & base)
}
// static
SrtmTile::LatLonKey SrtmTile::GetKey(ms::LatLon const & coord)
ms::LatLon SrtmTile::GetCenter(ms::LatLon const & coord)
{
ms::LatLon center{floor(coord.m_lat) + 0.5, floor(coord.m_lon) + 0.5};
if (coord.m_lat < 0)
center.m_lat -= 1.0;
if (coord.m_lon < 0)
center.m_lon -= 1.0;
return {static_cast<int32_t>(center.m_lat), static_cast<int32_t>(center.m_lon)};
return {floor(coord.m_lat) + 0.5, floor(coord.m_lon) + 0.5};
}
// static
std::string SrtmTile::GetBase(ms::LatLon const & coord)
{
auto key = GetKey(coord);
auto center = GetCenter(coord);
std::ostringstream ss;
if (coord.m_lat < 0)
if (center.m_lat < 0)
{
ss << "S";
key.first = -key.first;
center.m_lat *= -1;
center.m_lat += 1;
}
else
{
ss << "N";
}
ss << std::setw(2) << std::setfill('0') << static_cast<int>(center.m_lat);
ss << std::setw(2) << std::setfill('0') << key.first;
if (coord.m_lon < 0)
if (center.m_lon < 0)
{
ss << "W";
key.second = -key.second;
center.m_lon *= -1;
center.m_lon += 1;
}
else
{
ss << "E";
ss << std::setw(3) << key.second;
}
ss << std::setw(3) << static_cast<int>(center.m_lon);
return ss.str();
}
geometry::Altitude * SrtmTile::DataForTests(size_t & sz)
{
m_valid = true;
sz = kArcSecondsInDegree + 1;
m_data.resize(kSrtmTileSize, 0);
return reinterpret_cast<geometry::Altitude *>(m_data.data());
}
void SrtmTile::Invalidate()
{
m_data.clear();
@@ -268,27 +176,47 @@ void SrtmTile::Invalidate()
}
// SrtmTileManager ---------------------------------------------------------------------------------
SrtmTile const & SrtmTileManager::GetTile(ms::LatLon const & coord)
SrtmTileManager::SrtmTileManager(std::string const & dir) : m_dir(dir) {}
geometry::Altitude SrtmTileManager::GetHeight(ms::LatLon const & coord)
{
auto res = m_tiles.emplace(SrtmTile::GetKey(coord), SrtmTile());
if (res.second)
auto const key = GetKey(coord);
auto it = m_tiles.find(key);
if (it == m_tiles.end())
{
SrtmTile tile;
try
{
res.first->second.Init(m_dir, coord);
tile.Init(m_dir, coord);
}
catch (RootException const & e)
{
std::string const base = SrtmTile::GetBase(coord);
LOG(LINFO, ("Can't init SRTM tile:", base, "reason:", e.Msg()));
}
// It's OK to store even invalid tiles and return invalid height
// for them later.
it = m_tiles.emplace(key, std::move(tile)).first;
}
return res.first->second;
return it->second.GetHeight(coord);
}
void SrtmTileManager::Purge()
// static
SrtmTileManager::LatLonKey SrtmTileManager::GetKey(ms::LatLon const & coord)
{
MapT().swap(m_tiles);
auto const tileCenter = SrtmTile::GetCenter(coord);
return {static_cast<int32_t>(tileCenter.m_lat), static_cast<int32_t>(tileCenter.m_lon)};
}
SrtmTile const & SrtmTileManager::GetTile(ms::LatLon const & coord)
{
// Touch the tile to force its loading.
GetHeight(coord);
auto const key = GetKey(coord);
auto const it = m_tiles.find(key);
CHECK(it != m_tiles.end(), (coord));
return it->second;
}
} // namespace generator

View File

@@ -2,67 +2,44 @@
#include "geometry/latlon.hpp"
#include "indexer/feature_altitude.hpp"
#include "geometry/point_with_altitude.hpp"
#include "base/macros.hpp"
#include <boost/container_hash/hash.hpp>
#include <cstdint>
#include <string>
#include <unordered_map>
#include <utility>
#include <vector>
namespace generator
{
class SrtmTile
{
public:
SrtmTile() : m_valid(false) {}
SrtmTile();
SrtmTile(SrtmTile && rhs);
void Init(std::string const & dir, ms::LatLon const & coord);
inline bool IsValid() const { return m_valid; }
/// @return Height in meters at |coord| or kInvalidAltitude.
/// @{
/// Nearest serialized height.
// Returns height in meters at |coord| or kInvalidAltitude.
geometry::Altitude GetHeight(ms::LatLon const & coord) const;
/// Triangle interpolation.
double GetTriangleHeight(ms::LatLon const & coord) const;
/// Bilinear interpolation.
double GetBilinearHeight(ms::LatLon const & coord) const;
geometry::Altitude GetAltitude(ms::LatLon const & coord) const
{
return static_cast<geometry::Altitude>(std::round(GetBilinearHeight(coord)));
}
/// @}
using LatLonKey = std::pair<int32_t, int32_t>;
static LatLonKey GetKey(ms::LatLon const & coord);
static std::string GetBase(ms::LatLon const & coord);
static ms::LatLon GetCenter(ms::LatLon const & coord);
static std::string GetPath(std::string const & dir, std::string const & base);
/// Used in unit tests only to prepare mock tile.
geometry::Altitude * DataForTests(size_t & sz);
private:
static ms::LatLon GetCoordInSeconds(ms::LatLon const & coord);
geometry::Altitude GetHeightRC(size_t row, size_t col) const;
inline geometry::Altitude const * Data() const
{
return reinterpret_cast<geometry::Altitude const *>(m_data.data());
}
};
inline size_t Size() const { return m_data.size() / sizeof(geometry::Altitude); }
void Invalidate();
std::vector<uint8_t> m_data;
std::string m_data;
bool m_valid;
DISALLOW_COPY(SrtmTile);
@@ -71,35 +48,27 @@ private:
class SrtmTileManager
{
public:
explicit SrtmTileManager(std::string const & dir) : m_dir(dir) {}
explicit SrtmTileManager(std::string const & dir);
geometry::Altitude GetHeight(ms::LatLon const & coord);
SrtmTile const & GetTile(ms::LatLon const & coord);
geometry::Altitude GetAltitude(ms::LatLon const & coord)
{
return GetTile(coord).GetAltitude(coord);
}
size_t GeTilesNumber() const { return m_tiles.size(); }
void Purge();
private:
using LatLonKey = std::pair<int32_t, int32_t>;
static LatLonKey GetKey(ms::LatLon const & coord);
std::string m_dir;
struct Hash
{
size_t operator()(SrtmTile::LatLonKey const & key) const
size_t operator()(LatLonKey const & key) const
{
size_t seed = 0;
boost::hash_combine(seed, key.first);
boost::hash_combine(seed, key.second);
return seed;
return (static_cast<size_t>(key.first) << 32u) | static_cast<size_t>(key.second);
}
};
using MapT = std::unordered_map<SrtmTile::LatLonKey, SrtmTile, Hash>;
MapT m_tiles;
std::unordered_map<LatLonKey, SrtmTile, Hash> m_tiles;
DISALLOW_COPY(SrtmTileManager);
};

View File

@@ -24,7 +24,6 @@
#include <vector>
#define BOOST_STACKTRACE_GNU_SOURCE_NOT_REQUIRED
#define BOOST_STACKTRACE_USE_ADDR2LINE
#include <boost/stacktrace.hpp>
namespace generator

View File

@@ -55,10 +55,10 @@ void WikiUrlDumper::Dump(size_t cpuCount) const
// static
void WikiUrlDumper::DumpOne(std::string const & path, std::ostream & stream)
{
//auto const & needWikiUrl = ftypes::AttractionsChecker::Instance();
auto const & needWikiUrl = ftypes::AttractionsChecker::Instance();
feature::ForEachFeatureRawFormat(path, [&](FeatureBuilder const & feature, uint64_t /* pos */) {
// if (!needWikiUrl(feature.GetTypesHolder()))
// return;
if (!needWikiUrl(feature.GetTypesHolder()))
return;
auto const wikiUrl = feature.GetMetadata().GetWikiURL();
if (wikiUrl.empty())
@@ -89,10 +89,10 @@ WikiDataFilter::WikiDataFilter(std::string const & path, std::vector<std::string
void WikiDataFilter::FilterOne(std::string const & path, std::map<base::GeoObjectId, std::string> const & idToWikiData,
std::ostream & stream)
{
//auto const & needWikiUrl = ftypes::AttractionsChecker::Instance();
auto const & needWikiUrl = ftypes::AttractionsChecker::Instance();
feature::ForEachFeatureRawFormat(path, [&](FeatureBuilder const & feature, uint64_t /* pos */) {
// if (!needWikiUrl(feature.GetTypesHolder()))
// return;
if (!needWikiUrl(feature.GetTypesHolder()))
return;
auto const it = idToWikiData.find(feature.GetMostGenericOsmId());
if (it == std::end(idToWikiData))

View File

@@ -62,12 +62,8 @@ UNIT_TEST(Segment_Smoke)
TEST(!OnSegment(P(10 + eps, 10), ps), ());
TEST(!OnSegment(P(0, 0), ps), ());
}
}
// This paranoid test doesn' work with Release optimizations (LTO?).
#ifndef NDEBUG
UNIT_TEST(Segment_Paranoid)
{
// Paranoid tests.
{
P ps[] = {{0, 0}, {1e100, 1e100}};
TEST(OnSegment(ps[0], ps), ());
@@ -98,7 +94,6 @@ UNIT_TEST(Segment_Paranoid)
TEST(!OnSegment(P(1e-16, 2.0 * 1e-16), ps), ());
}
}
#endif
UNIT_TEST(Triangle_Smoke)
{
@@ -134,8 +129,6 @@ UNIT_TEST(Triangle_PointInsideSegment)
TEST(!InsideTriangle(P(eps, eps), ps), ());
}
// This paranoid test doesn' work with Release optimizations (LTO?).
#ifndef NDEBUG
UNIT_TEST(Triangle_PointInsidePoint)
{
double constexpr eps = 1.0E-10;
@@ -154,7 +147,6 @@ UNIT_TEST(Triangle_PointInsidePoint)
TEST(!InsideTriangle(P(eps, eps), ps), ());
#endif
}
#endif
UNIT_TEST(PolygonSelfIntersections_IntersectSmoke)
{

View File

@@ -12,38 +12,15 @@ using namespace std;
using m2::Spline;
using m2::PointD;
namespace
{
double constexpr kAlmostZero = 1.0E-16;
void TestEqual(double x, double y)
{
if (fabs(x) < kAlmostZero || fabs(y) < kAlmostZero)
TEST_ALMOST_EQUAL_ABS(x, y, kAlmostZero, ());
else
TEST_ALMOST_EQUAL_ULPS(x, y, ());
}
void TestPointDDir(PointD const & dst, PointD const & src)
{
double const len1 = dst.Length();
double const len2 = src.Length();
if (len1 < kAlmostZero || len2 < kAlmostZero)
{
TestEqual(dst.x, src.x);
TestEqual(dst.y, src.y);
}
else
{
TestEqual(dst.x/len1, src.x/len2);
TestEqual(dst.y/len1, src.y/len2);
}
TEST_ALMOST_EQUAL_ULPS(dst.x/len1, src.x/len2, ());
TEST_ALMOST_EQUAL_ULPS(dst.y/len1, src.y/len2, ());
}
} // namespace
UNIT_TEST(Spline_SmoothedDirections)
UNIT_TEST(SmoothedDirections)
{
vector<PointD> path;
path.push_back(PointD(0, 0));

View File

@@ -157,7 +157,6 @@ public:
}
Container const & Data() const { return m_points; }
Container & MutableData() { return m_points; }
template <typename EqualFn>
static bool IsIntersect(Coord const & x11, Coord const & y11, Coord const & x12,

View File

@@ -77,7 +77,7 @@ class Altitudes
public:
Altitudes() = default;
explicit Altitudes(geometry::Altitudes && altitudes) : m_altitudes(std::move(altitudes)) {}
explicit Altitudes(geometry::Altitudes const & altitudes) : m_altitudes(altitudes) {}
template <class TSink>
void Serialize(geometry::Altitude minAltitude, TSink & sink) const

View File

@@ -7,25 +7,20 @@
#include <memory>
#include <string>
#include <utility>
namespace feature
{
template <class ToDo>
void ForEachFeature(FilesContainerR const & cont, ToDo && toDo)
void ForEachFeature(ModelReaderPtr const & reader, ToDo && toDo)
{
FeaturesVectorTest features(cont);
features.GetVector().ForEach(toDo);
}
template <class ToDo>
void ForEachFeature(ModelReaderPtr reader, ToDo && toDo)
{
ForEachFeature(FilesContainerR(reader), toDo);
FeaturesVectorTest features((FilesContainerR(reader)));
features.GetVector().ForEach(std::forward<ToDo>(toDo));
}
template <class ToDo>
void ForEachFeature(std::string const & fPath, ToDo && toDo)
{
ForEachFeature(std::make_unique<FileReader>(fPath), toDo);
ForEachFeature(std::make_unique<FileReader>(fPath), std::forward<ToDo>(toDo));
}
} // namespace feature

View File

@@ -18,7 +18,7 @@
#include "base/thread_pool_computational.hpp"
#include <algorithm>
#include <iostream>
#include <future>
#include <tuple>
namespace
@@ -133,16 +133,22 @@ void SwapIfNeeded(size_t & a, size_t & b)
namespace poly_borders
{
// BordersData::Processor --------------------------------------------------------------------------
void BordersData::Processor::operator()(size_t borderId)
{
if (ShouldLog(borderId, m_data.m_bordersPolygons.size()))
LOG(LINFO, ("Marking:", borderId + 1, "/", m_data.m_bordersPolygons.size()));
auto const & polygon = m_data.m_bordersPolygons[borderId];
for (size_t pointId = 0; pointId < polygon.m_points.size(); ++pointId)
m_data.MarkPoint(borderId, pointId);
}
// BordersData -------------------------------------------------------------------------------------
void BordersData::Init(std::string const & bordersDir)
{
LOG(LINFO, ("Borders path:", bordersDir));
// key - coordinates
// value - {border idx, point idx}
std::unordered_map<int64_t, std::vector<std::pair<size_t, size_t>>> index;
std::vector<std::string> files;
Platform::GetFilesByExt(bordersDir, kBorderExtension, files);
@@ -153,14 +159,11 @@ void BordersData::Init(std::string const & bordersDir)
auto const fullPath = base::JoinPath(bordersDir, file);
size_t polygonId = 1;
borders::PolygonsList borders;
std::vector<m2::RegionD> borders;
borders::LoadBorders(fullPath, borders);
for (auto & region : borders)
for (auto const & region : borders)
{
auto & points = region.MutableData();
m_duplicatedPointsCount += RemoveDuplicatingPointImpl(points);
CHECK_GREATER(points.size(), 1, (fullPath));
Polygon polygon;
// Some mwms have several polygons. For example, for Japan_Kanto_Tokyo that has 2 polygons we
// will write 2 files:
// Japan_Kanto_Tokyo.poly1
@@ -169,27 +172,35 @@ void BordersData::Init(std::string const & bordersDir)
m_indexToPolyFileName[prevIndex] = fileCopy;
m_polyFileNameToIndex[fileCopy] = prevIndex++;
for (auto const & point : region.Data())
polygon.m_points.emplace_back(point);
size_t const borderIdx = m_bordersPolygons.size();
for (size_t i = 0; i < points.size(); ++i)
index[PointToInt64Obsolete(points[i], kPointCoordBits)].emplace_back(borderIdx, i);
polygon.m_rect = region.GetRect();
++polygonId;
m_bordersPolygons.emplace_back(region.GetRect(), points);
m_bordersPolygons.emplace_back(std::move(polygon));
}
}
for (auto const & [_, v] : index)
m_duplicatedPointsCount += RemoveDuplicatePoints();
LOG(LINFO, ("Removed:", m_duplicatedPointsCount, "from input data."));
}
void BordersData::MarkPoints()
{
size_t const threadsNumber = std::thread::hardware_concurrency();
LOG(LINFO, ("Start marking points, threads number:", threadsNumber));
base::ComputationalThreadPool threadPool(threadsNumber);
std::vector<std::future<void>> tasks;
for (size_t i = 0; i < m_bordersPolygons.size(); ++i)
{
for (size_t i = 0; i < v.size() - 1; ++i)
for (size_t j = i + 1; j < v.size(); ++j)
{
m_bordersPolygons[v[i].first].m_points[v[i].second].AddLink(v[j].first, v[j].second);
m_bordersPolygons[v[j].first].m_points[v[j].second].AddLink(v[i].first, v[i].second);
}
Processor processor(*this);
tasks.emplace_back(threadPool.Submit(processor, i));
}
LOG(LINFO, ("Removed:", m_duplicatedPointsCount, "from input data."));
for (auto & task : tasks)
task.wait();
}
void BordersData::DumpPolyFiles(std::string const & targetDir)
@@ -225,11 +236,70 @@ void BordersData::DumpPolyFiles(std::string const & targetDir)
size_t BordersData::RemoveDuplicatePoints()
{
size_t count = 0;
auto const pointsAreEqual = [](auto const & p1, auto const & p2) {
return base::AlmostEqualAbs(p1.m_point, p2.m_point, kEqualityEpsilon);
};
for (auto & polygon : m_bordersPolygons)
count += RemoveDuplicatingPointImpl(polygon.m_points);
{
auto & points = polygon.m_points;
auto const last = std::unique(points.begin(), points.end(), pointsAreEqual);
count += std::distance(last, points.end());
points.erase(last, points.end());
if (polygon.m_points.begin() == polygon.m_points.end())
continue;
while (points.size() > 1 && pointsAreEqual(points.front(), points.back()))
{
++count;
points.pop_back();
}
}
return count;
}
void BordersData::MarkPoint(size_t curBorderId, size_t curPointId)
{
MarkedPoint & curMarkedPoint = m_bordersPolygons[curBorderId].m_points[curPointId];
for (size_t anotherBorderId = 0; anotherBorderId < m_bordersPolygons.size(); ++anotherBorderId)
{
if (curBorderId == anotherBorderId)
continue;
if (curMarkedPoint.m_marked)
return;
Polygon & anotherPolygon = m_bordersPolygons[anotherBorderId];
if (!anotherPolygon.m_rect.IsPointInside(curMarkedPoint.m_point))
continue;
for (size_t anotherPointId = 0; anotherPointId < anotherPolygon.m_points.size(); ++anotherPointId)
{
auto & anotherMarkedPoint = anotherPolygon.m_points[anotherPointId];
if (base::AlmostEqualAbs(anotherMarkedPoint.m_point, curMarkedPoint.m_point, kEqualityEpsilon))
{
anotherMarkedPoint.m_marked = true;
curMarkedPoint.m_marked = true;
// Save info that border with id: |anotherBorderId| has the same point with id:
// |anotherPointId|.
curMarkedPoint.AddLink(anotherBorderId, anotherPointId);
// And vice versa.
anotherMarkedPoint.AddLink(curBorderId, curPointId);
return;
}
}
}
}
void BordersData::PrintDiff()
{
using Info = std::tuple<double, std::string, size_t, size_t>;

View File

@@ -14,10 +14,13 @@ namespace poly_borders
class BordersData
{
public:
inline static double const kEqualityEpsilon = 1.0E-7;
inline static double const kEqualityEpsilon = 1e-20;
inline static std::string const kBorderExtension = ".poly";
void Init(std::string const & bordersDir);
/// \brief Runs |MarkPoint(borderId, pointId)| for each borderId and its pointId. Look to
/// |MarkPoint| for more details.
void MarkPoints();
void RemoveEmptySpaceBetweenBorders();
@@ -26,29 +29,23 @@ public:
void PrintDiff();
private:
struct Processor
{
explicit Processor(BordersData & data) : m_data(data) {}
void operator()(size_t borderId);
BordersData & m_data;
};
/// \brief Some polygons can have sequentially same points - duplicates. This method removes such
/// points and leaves only unique.
size_t RemoveDuplicatePoints();
template <class PointsT> static size_t RemoveDuplicatingPointImpl(PointsT & points)
{
auto const equalFn = [](auto const & p1, auto const & p2)
{
return p1.EqualDxDy(p2, kEqualityEpsilon);
};
auto const last = std::unique(points.begin(), points.end(), equalFn);
size_t count = std::distance(last, points.end());
points.erase(last, points.end());
while (points.size() > 1 && equalFn(points.front(), points.back()))
{
++count;
points.pop_back();
}
return count;
}
/// \brief Finds point on other polygons equal to points passed as in the argument. If such point
/// is found, the method will create a link of the form "some border (with id = anotherBorderId)
/// has the same point (with id = anotherPointId)".
// If point belongs to more than 2 polygons, the link will be created for an arbitrary pair.
void MarkPoint(size_t curBorderId, size_t curPointId);
/// \brief Checks whether we can replace points from segment: [curLeftPointId, curRightPointId]
/// of |curBorderId| to points from another border in order to get rid of empty space

View File

@@ -21,6 +21,7 @@ bool ReplaceData::operator<(ReplaceData const & rhs) const
// MarkedPoint -------------------------------------------------------------------------------------
void MarkedPoint::AddLink(size_t borderId, size_t pointId)
{
std::lock_guard<std::mutex> lock(*m_mutex);
m_links.emplace(borderId, pointId);
}

View File

@@ -1,19 +1,40 @@
#pragma once
#include "geometry/rect2d.hpp"
#include "geometry/point2d.hpp"
#include "base/assert.hpp"
#include "base/non_intersecting_intervals.hpp"
#include <atomic>
#include <limits>
#include <map>
#include <memory>
#include <mutex>
#include <optional>
#include <set>
#include <string>
#include <utility>
#include <vector>
namespace poly_borders
{
struct AtomicBoolWrapper
{
AtomicBoolWrapper() { m_value = false; }
AtomicBoolWrapper(bool value) { m_value = value; }
AtomicBoolWrapper(std::atomic<bool> const & rhs) { m_value = rhs.load(); }
AtomicBoolWrapper(AtomicBoolWrapper const & rhs) { m_value = rhs.m_value.load(); }
AtomicBoolWrapper operator=(AtomicBoolWrapper const & rhs)
{
m_value = rhs.m_value.load();
return *this;
}
explicit operator bool() const { return m_value.load(); }
std::atomic<bool> m_value;
};
struct Link
{
@@ -56,32 +77,23 @@ struct ReplaceData
struct MarkedPoint
{
MarkedPoint() = default;
MarkedPoint(m2::PointD const & point) : m_point(point) {}
explicit MarkedPoint(m2::PointD const & point) : m_point(point) {}
void AddLink(size_t borderId, size_t pointId);
std::optional<Link> GetLink(size_t curBorderId) const;
bool EqualDxDy(MarkedPoint const & p, double eps) const
{
return m_point.EqualDxDy(p.m_point, eps);
}
m2::PointD m_point;
AtomicBoolWrapper m_marked;
std::set<Link> m_links;
std::unique_ptr<std::mutex> m_mutex = std::make_unique<std::mutex>();
};
struct Polygon
{
Polygon() = default;
Polygon(m2::RectD const & rect, std::vector<m2::PointD> const & points) : m_rect(rect)
{
m_points.assign(points.begin(), points.end());
}
Polygon(m2::RectD const & rect, std::vector<MarkedPoint> && points)
: m_rect(rect), m_points(std::move(points))
{
}
: m_rect(rect), m_points(std::move(points)) {}
Polygon(Polygon &&) = default;
Polygon & operator=(Polygon &&) noexcept = default;

View File

@@ -27,12 +27,12 @@ static string const kTestDir = "borders_poly_dir";
void TestMarked(Polygon const & polygon, size_t i)
{
TEST(!polygon.m_points[i].m_links.empty(), (i, "th point point must be marked."));
TEST(polygon.m_points[i].m_marked, (i, "th point point must be marked."));
}
void TestNotMarked(Polygon const & polygon, size_t i)
{
TEST(polygon.m_points[i].m_links.empty(), (i, "th point must not be marked."));
TEST(!polygon.m_points[i].m_marked, (i, "th point must not be marked."));
}
void CheckByMask(Polygon const & polygons, vector<bool> markedMask)
@@ -77,6 +77,7 @@ UNIT_TEST(PolyBordersPostprocessor_MarkPoints_1)
BordersData bordersData;
bordersData.Init(bordersDir);
bordersData.MarkPoints();
auto const & bordersPolygon1 = bordersData.GetBordersPolygonByName("First" + BordersData::kBorderExtension + "1");
CheckByMask(bordersPolygon1, markedMask1[0]);
@@ -112,6 +113,7 @@ UNIT_TEST(PolyBordersPostprocessor_MarkPoints_2)
BordersData bordersData;
bordersData.Init(bordersDir);
bordersData.MarkPoints();
auto const & bordersPolygon1 = bordersData.GetBordersPolygonByName("First" + BordersData::kBorderExtension + "1");
CheckByMask(bordersPolygon1, markedMask1[0]);
@@ -172,6 +174,7 @@ UNIT_TEST(PolyBordersPostprocessor_MarkPoints_3)
BordersData bordersData;
bordersData.Init(bordersDir);
bordersData.MarkPoints();
auto const & bordersPolygon1 = bordersData.GetBordersPolygonByName("First" + BordersData::kBorderExtension + "1");
CheckByMask(bordersPolygon1, markedMask1[0]);
@@ -193,7 +196,7 @@ UNIT_TEST(PolyBordersPostprocessor_MarkPoints_4)
m2::PointD a(6.0, 2.0);
m2::PointD b(6.0, 4.0);
vector<vector<m2::PointD>> polygons1 = {
{{-2.0, -2.0}, {-2.0, 2.0}, {2.0, 2.0}, {2.0, -2.0}},
{{4.0, 2.0}, {4.0, 4.0}, a, b}
@@ -218,6 +221,7 @@ UNIT_TEST(PolyBordersPostprocessor_MarkPoints_4)
BordersData bordersData;
bordersData.Init(bordersDir);
bordersData.MarkPoints();
auto const & firstBordersPolygon1 = bordersData.GetBordersPolygonByName("First" + BordersData::kBorderExtension + "1");
CheckByMask(firstBordersPolygon1, markedMask1[0]);

View File

@@ -27,6 +27,7 @@ auto constexpr kSmallPointShift = m2::PointD(kSmallShift, kSmallShift);
void Process(BordersData & bordersData, string const & bordersDir)
{
bordersData.Init(bordersDir);
bordersData.MarkPoints();
bordersData.RemoveEmptySpaceBetweenBorders();
}

View File

@@ -42,7 +42,9 @@ int main(int argc, char ** argv)
{
BordersData data;
data.Init(FLAGS_borders_path);
data.MarkPoints();
data.RemoveEmptySpaceBetweenBorders();
data.MarkPoints();
data.PrintDiff();
data.DumpPolyFiles(FLAGS_output_path);
}

View File

@@ -6,7 +6,6 @@
#include "indexer/classificator.hpp"
#include "indexer/classificator_loader.hpp"
#include "indexer/data_source.hpp"
#include "indexer/feature_algo.hpp"
#include "indexer/feature_altitude.hpp"
#include "indexer/feature_data.hpp"
#include "indexer/feature_processor.hpp"
@@ -28,42 +27,27 @@
namespace get_altitude_tests
{
using namespace feature;
using namespace geometry;
using namespace platform;
using namespace std;
class FeaturesGuard
{
public:
FrozenDataSource m_dataSource;
MwmSet::MwmHandle m_handle;
unique_ptr<AltitudeLoaderCached> m_altitudes;
explicit FeaturesGuard(string const & countryId)
{
LocalCountryFile const country = integration::GetLocalCountryFileByCountryId(CountryFile(countryId));
TEST_NOT_EQUAL(country, LocalCountryFile(), ());
TEST(country.HasFiles(), (country));
pair<MwmSet::MwmId, MwmSet::RegResult> const res = m_dataSource.RegisterMap(country);
TEST_EQUAL(res.second, MwmSet::RegResult::Success, ());
m_handle = m_dataSource.GetMwmHandleById(res.first);
TEST(m_handle.IsAlive(), ());
TEST(GetValue(), ());
m_altitudes = make_unique<AltitudeLoaderCached>(*GetValue());
}
MwmValue const * GetValue() { return m_handle.GetValue(); }
};
void TestAltitudeOfAllMwmFeatures(string const & countryId,
Altitude const altitudeLowerBoundMeters,
Altitude const altitudeUpperBoundMeters)
geometry::Altitude const altitudeLowerBoundMeters,
geometry::Altitude const altitudeUpperBoundMeters)
{
FeaturesGuard features(countryId);
FrozenDataSource dataSource;
ForEachFeature(features.GetValue()->m_cont, [&](FeatureType & f, uint32_t const & id)
LocalCountryFile const country = integration::GetLocalCountryFileByCountryId(CountryFile(countryId));
TEST_NOT_EQUAL(country, LocalCountryFile(), ());
TEST(country.HasFiles(), (country));
pair<MwmSet::MwmId, MwmSet::RegResult> const res = dataSource.RegisterMap(country);
TEST_EQUAL(res.second, MwmSet::RegResult::Success, ());
auto const handle = dataSource.GetMwmHandleById(res.first);
TEST(handle.IsAlive(), ());
auto altitudeLoader = make_unique<AltitudeLoaderCached>(*handle.GetValue());
ForEachFeature(country.GetPath(MapFileType::Map), [&](FeatureType & f, uint32_t const & id)
{
if (!routing::IsRoad(TypesHolder(f)))
return;
@@ -73,7 +57,7 @@ void TestAltitudeOfAllMwmFeatures(string const & countryId,
if (pointsCount == 0)
return;
auto const & altitudes = features.m_altitudes->GetAltitudes(id, pointsCount);
geometry::Altitudes const & altitudes = altitudeLoader->GetAltitudes(id, pointsCount);
TEST(!altitudes.empty(),
("Empty altitude vector. MWM:", countryId, ", feature id:", id, ", altitudes:", altitudes));
@@ -85,7 +69,7 @@ void TestAltitudeOfAllMwmFeatures(string const & countryId,
});
}
UNIT_TEST(GetAltitude_AllMwmFeaturesTest)
UNIT_TEST(AllMwmFeaturesGetAltitudeTest)
{
classificator::Load();
@@ -96,49 +80,4 @@ UNIT_TEST(GetAltitude_AllMwmFeaturesTest)
TestAltitudeOfAllMwmFeatures("Netherlands_North Holland_Amsterdam", -25 /* altitudeLowerBoundMeters */,
50 /* altitudeUpperBoundMeters */);
}
/*
void PrintGeometryAndAltitude(std::string const & countryID, ms::LatLon const & ll, double distM)
{
FeaturesGuard features(countryID);
auto const point = mercator::FromLatLon(ll);
m2::RectD const rect = mercator::RectByCenterXYAndSizeInMeters(point, distM);
features.m_dataSource.ForEachInRect([&](FeatureType & ft)
{
if (!routing::IsRoad(TypesHolder(ft)))
return;
ft.ParseGeometry(FeatureType::BEST_GEOMETRY);
size_t const pointsCount = ft.GetPointsCount();
if (pointsCount == 0)
return;
if (GetMinDistanceMeters(ft, point) > distM)
return;
stringstream geomSS;
geomSS.precision(20);
for (size_t i = 0; i < pointsCount; ++i)
{
auto const ll = mercator::ToLatLon(ft.GetPoint(i));
geomSS << "{ " << ll.m_lat << ", " << ll.m_lon << " }, ";
}
LOG(LINFO, (geomSS.str()));
auto const & altitudes = features.m_altitudes->GetAltitudes(ft.GetID().m_index, pointsCount);
LOG(LINFO, (ft.GetName(StringUtf8Multilang::kDefaultCode), altitudes));
}, rect, scales::GetUpperScale());
}
UNIT_TEST(GetAltitude_SamplesTest)
{
classificator::Load();
PrintGeometryAndAltitude("Italy_Lazio", {41.8998667, 12.4985937}, 15.0);
PrintGeometryAndAltitude("Crimea", { 44.7598876, 34.3160482 }, 5.0);
}
*/
} // namespace get_altitude_tests

View File

@@ -12,52 +12,58 @@ from descriptions.descriptions_downloader import log
def parse_args():
parser = argparse.ArgumentParser(description="Download wiki pages.", usage="python3 -m descriptions "
"--output_dir ~/maps_build/descriptions "
"--wikipedia ~/maps_build/wiki_urls.txt "
"--wikidata ~/maps_build/id_to_wikidata.csv "
"--langs en de fr es ru tr"
parser = argparse.ArgumentParser(description="Download wiki pages.")
parser.add_argument(
"--output_dir", metavar="PATH", type=str, help="Output dir for saving pages"
)
parser.add_argument(
"--output_dir", metavar="PATH", type=str, help="Output dir for saving pages."
"--popularity",
metavar="PATH",
type=str,
help="File with popular object ids for which we "
"download wikipedia data. If not given, download "
"for all objects.",
)
parser.add_argument(
"--popularity", metavar="PATH", type=str,
help="File with popular object ids with wikipedia data to download. If not given, download all objects.",
)
parser.add_argument(
"--wikipedia", metavar="PATH", type=str, required=True, help="Input file with wikipedia url.",
"--wikipedia",
metavar="PATH",
type=str,
required=True,
help="Input file with wikipedia url.",
)
parser.add_argument(
"--wikidata", metavar="PATH", type=str, help="Input file with wikidata ids."
)
parser.add_argument("--langs", metavar="LANGS", type=str, nargs="+", action="append",
help="Languages for pages. If left blank, pages in all available languages will be loaded.",
parser.add_argument(
"--langs",
metavar="LANGS",
type=str,
nargs="+",
action="append",
help="Languages for pages. If left blank, pages in all "
"available languages will be loaded.",
)
return parser.parse_args()
def main():
log.setLevel(logging.WARNING)
wikipediaapi.log.setLevel(logging.DEBUG)
wikipediaapi.log.setLevel(logging.WARNING)
args = parse_args()
wikipedia_file = args.wikipedia
wikidata_file = args.wikidata
output_dir = args.output_dir
popularity_file = args.popularity
langs = list(itertools.chain.from_iterable(args.langs))
os.makedirs(output_dir, exist_ok=True)
checker = check_and_get_checker(popularity_file)
download_from_wikipedia_tags(wikipedia_file, output_dir, langs, checker)
if wikidata_file is None:
log.warning(f"Wikidata file not set.")
elif os.path.exists(wikidata_file):
download_from_wikidata_tags(wikidata_file, output_dir, langs, checker)
else:
log.warning(f"Wikidata ({wikidata_file}) file not found.")
log.warning(f"Wikidata ({wikidata_file}) file not set.")
main()

View File

@@ -1,3 +1,4 @@
import functools
import json
import logging
import os
@@ -6,9 +7,7 @@ import time
import types
import urllib.error
import urllib.parse
import http.client
from concurrent.futures import ThreadPoolExecutor
from multiprocessing.pool import ThreadPool
import htmlmin
import requests
@@ -16,7 +15,8 @@ import wikipediaapi
from bs4 import BeautifulSoup
from wikidata.client import Client
from descriptions.exceptions import GettingError, ParseError
from descriptions.exceptions import GettingError
from descriptions.exceptions import ParseError
"""
This script downloads Wikipedia pages for different languages.
@@ -24,8 +24,9 @@ This script downloads Wikipedia pages for different languages.
log = logging.getLogger(__name__)
WORKERS = 80
REQUEST_ATTEMPTS = 8
ATTEMPTS_PAUSE_SECONDS = 4.0
CHUNK_SIZE = 16
REQUEST_ATTEMPTS = 32
ATTEMPTS_PAUSE_MS = 4000
HEADERS = {f"h{x}" for x in range(1, 7)}
BAD_SECTIONS = {
@@ -37,6 +38,7 @@ BAD_SECTIONS = {
"Further reading",
"References",
],
"ru": ["Литература", "Ссылки", "См. также", "Библиография", "Примечания"],
"de": [
"Einzelnachweise",
"Weblinks",
@@ -47,6 +49,7 @@ BAD_SECTIONS = {
"Filme",
"Einzelbelege",
],
"es": ["Vínculos de interés", "Véase también", "Enlaces externos", "Referencias"],
"fr": [
"Bibliographie",
"Lien externe",
@@ -56,9 +59,6 @@ BAD_SECTIONS = {
"Notes et références",
"Articles connexes",
],
"es": ["Vínculos de interés", "Véase también", "Enlaces externos", "Referencias"],
"ru": ["Литература", "Ссылки", "См. также", "Библиография", "Примечания"],
"tr": ["Kaynakça", "Ayrıca bakınız", "Dış bağlantılar", "Notlar", "Dipnot"],
}
@@ -73,9 +73,9 @@ def try_get(obj, prop, *args, **kwargs):
requests.exceptions.ConnectionError,
requests.exceptions.ReadTimeout,
json.decoder.JSONDecodeError,
http.client.HTTPException,
) as e:
log.debug(e)
):
time.sleep(random.uniform(0.0, 1.0 / 1000.0 * ATTEMPTS_PAUSE_MS))
attempts -= 1
except urllib.error.HTTPError as e:
if e.code == 404:
raise GettingError(f"Page not found {e.msg}")
@@ -84,10 +84,9 @@ def try_get(obj, prop, *args, **kwargs):
except urllib.error.URLError:
raise GettingError(f"URLError: {obj}, {prop}, {args}, {kwargs}")
time.sleep(random.uniform(0.0, ATTEMPTS_PAUSE_SECONDS))
attempts -= 1
raise GettingError(f"Getting {prop} field failed")
raise GettingError(
f"Getting {prop} field failed. " f"All {REQUEST_ATTEMPTS} attempts are spent"
)
def read_popularity(path):
@@ -106,6 +105,7 @@ def read_popularity(path):
def should_download_page(popularity_set):
@functools.wraps(popularity_set)
def wrapped(ident):
return popularity_set is None or ident in popularity_set
@@ -173,21 +173,18 @@ def download(directory, url):
try:
lang, page_name = get_page_info(url)
except ParseError:
log.exception(f"Parsing failed. {url} is incorrect.")
log.exception("Parsing failed. {url} is incorrect.")
return None
path = os.path.join(directory, f"{lang}.html")
if os.path.exists(path):
log.debug(f"{path} already exists.")
log.warning(f"{path} already exists.")
return None
page = get_wiki_page(lang, page_name)
try:
text = try_get(page, "text")
except GettingError as e:
log.exception(f"Error: page {page_name} is not downloaded for lang {lang} and url {url} ({e}).")
except GettingError:
log.exception(f"Error: page is not downloaded {page_name}.")
return None
page_size = len(text)
if page_size > 0:
os.makedirs(directory, exist_ok=True)
@@ -197,15 +194,15 @@ def download(directory, url):
file.write(text)
else:
log.warning(f"Page {url} is empty. It has not been saved.")
return text
def get_wiki_langs(url):
lang, page_name = get_page_info(url)
page = get_wiki_page(lang, page_name)
curr_lang = [(lang, url)]
curr_lang = [
(lang, url),
]
try:
langlinks = try_get(page, "langlinks")
return (
@@ -213,7 +210,7 @@ def get_wiki_langs(url):
+ curr_lang
)
except GettingError as e:
log.exception(f"Error: no languages for page {page_name} with url {url} ({e}).")
log.warning(f"Error: no languages for {url} ({e}).")
return curr_lang
@@ -229,17 +226,17 @@ def download_all_from_wikipedia(path, url, langs):
def wikipedia_worker(output_dir, checker, langs):
@functools.wraps(wikipedia_worker)
def wrapped(line):
if not line.strip():
return
try:
# First param is mwm_path, which added this line entry.
_, ident, url = line.split("\t")
mwm_path, ident, url = line.split("\t")
ident = int(ident)
if not checker(ident):
return
url = url.strip()
except (AttributeError, ValueError):
except (AttributeError, IndexError):
log.exception(f"{line} is incorrect.")
return
parsed = urllib.parse.urlparse(url)
@@ -251,9 +248,11 @@ def wikipedia_worker(output_dir, checker, langs):
def download_from_wikipedia_tags(input_file, output_dir, langs, checker):
with open(input_file) as file:
_ = file.readline() # skip header
with ThreadPoolExecutor(WORKERS) as pool:
pool.map(wikipedia_worker(output_dir, checker, langs), file)
_ = file.readline()
pool = ThreadPool(processes=WORKERS)
pool.map(wikipedia_worker(output_dir, checker, langs), file, CHUNK_SIZE)
pool.close()
pool.join()
def get_wikidata_urls(entity, langs):
@@ -270,6 +269,7 @@ def get_wikidata_urls(entity, langs):
def wikidata_worker(output_dir, checker, langs):
@functools.wraps(wikidata_worker)
def wrapped(line):
if not line.strip():
return
@@ -279,7 +279,7 @@ def wikidata_worker(output_dir, checker, langs):
wikidata_id = wikidata_id.strip()
if not checker(ident):
return
except (AttributeError, ValueError):
except (AttributeError, IndexError):
log.exception(f"{line} is incorrect.")
return
client = Client()
@@ -302,8 +302,10 @@ def download_from_wikidata_tags(input_file, output_dir, langs, checker):
wikidata_output_dir = os.path.join(output_dir, "wikidata")
os.makedirs(wikidata_output_dir, exist_ok=True)
with open(input_file) as file:
with ThreadPoolExecutor(WORKERS) as pool:
pool.map(wikidata_worker(wikidata_output_dir, checker, langs), file)
with ThreadPool(processes=WORKERS) as pool:
pool.map(
wikidata_worker(wikidata_output_dir, checker, langs), file, CHUNK_SIZE
)
def check_and_get_checker(popularity_file):

View File

@@ -1,5 +1,5 @@
import os
import subprocess
class MapsGeneratorError(Exception):
pass
@@ -35,24 +35,9 @@ class FailedTest(MapsGeneratorError):
def wait_and_raise_if_fail(p):
if p.wait() != os.EX_OK:
if type(p) is subprocess.Popen:
args = p.args
stdout = p.stdout
stderr = p.stderr
logs = None
errors = None
if type(stdout) is not type(None):
logs = stdout.read(256).decode()
if type(stderr) is not type(None):
errors = stderr.read(256).decode()
if errors != logs:
logs += " and " + errors
msg = f"The launch of {args.pop(0)} failed.\nArguments used: {' '.join(args)}\nSee details in {logs}"
raise BadExitStatusError(msg)
else:
args = p.args
logs = p.output.name
if p.error.name != logs:
logs += " and " + p.error.name
msg = f"The launch of {args.pop(0)} failed.\nArguments used: {' '.join(args)}\nSee details in {logs}"
raise BadExitStatusError(msg)
args = p.args
logs = p.output.name
if p.error.name != logs:
logs += " and " + p.error.name
msg = f"The launch of {args.pop(0)} failed.\nArguments used: {' '.join(args)}\nSee details in {logs}"
raise BadExitStatusError(msg)

View File

@@ -83,7 +83,7 @@ MAIN_OUT_PATH = os.path.join(_WORK_PATH, "generation")
CACHE_PATH = ""
# Developer section:
BUILD_PATH = os.path.join(_WORK_PATH, "omim-build-relwithdebinfo")
BUILD_PATH = os.path.join(_WORK_PATH, "omim-build-release")
OMIM_PATH = os.path.join(_WORK_PATH, "omim")
# Osm tools section:

View File

@@ -12,15 +12,15 @@ import os
import shutil
import tarfile
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor, as_completed
from multiprocessing.pool import ThreadPool
from typing import AnyStr
from typing import Type
import maps_generator.generator.diffs as diffs
import maps_generator.generator.stages_tests as st
# from descriptions.descriptions_downloader import check_and_get_checker
# from descriptions.descriptions_downloader import download_from_wikidata_tags
# from descriptions.descriptions_downloader import download_from_wikipedia_tags
from descriptions.descriptions_downloader import check_and_get_checker
from descriptions.descriptions_downloader import download_from_wikidata_tags
from descriptions.descriptions_downloader import download_from_wikipedia_tags
from maps_generator.generator import coastline
from maps_generator.generator import settings
from maps_generator.generator import steps
@@ -140,10 +140,10 @@ class StageFeatures(Stage):
@outer_stage
@production_only
@helper_stage_for("StageDescriptions")
class StageDownloadDescriptions(Stage):
def apply(self, env: Env):
"""
run_gen_tool(
env.gen_tool,
out=env.get_subprocess_out(),
@@ -157,8 +157,7 @@ class StageDownloadDescriptions(Stage):
threads_count=settings.THREADS_COUNT,
)
# https://en.wikipedia.org/wiki/Wikipedia:Multilingual_statistics
langs = ("en", "de", "fr", "es", "ru", "tr")
langs = ("en", "ru", "es", "fr", "de")
checker = check_and_get_checker(env.paths.popularity_path)
download_from_wikipedia_tags(
env.paths.wiki_url_path, env.paths.descriptions_path, langs, checker
@@ -166,20 +165,6 @@ class StageDownloadDescriptions(Stage):
download_from_wikidata_tags(
env.paths.id_to_wikidata_path, env.paths.descriptions_path, langs, checker
)
"""
src = "/home/planet/descriptions"
dest = env.paths.descriptions_path
# Empty folder "descriptions" can be already created.
try:
if os.path.isdir(dest):
shutil.rmtree(dest)
else:
os.remove(dest)
except OSError as e:
print("rmtree error: %s - %s" % (e.filename, e.strerror))
os.symlink(src, dest)
@outer_stage
@@ -189,10 +174,11 @@ class StageMwm(Stage):
tmp_mwm_names = env.get_tmp_mwm_names()
if len(tmp_mwm_names):
logger.info(f'Number of feature data .mwm.tmp country files to process: {len(tmp_mwm_names)}')
with ThreadPoolExecutor(settings.THREADS_COUNT) as pool:
with ThreadPool(settings.THREADS_COUNT) as pool:
pool.map(
lambda c: StageMwm.make_mwm(c, env),
tmp_mwm_names
tmp_mwm_names,
chunksize=1,
)
else:
# TODO: list all countries that were not found?
@@ -306,6 +292,7 @@ class StageIsolinesInfo(Stage):
@country_stage
@production_only
class StageDescriptions(Stage):
def apply(self, env: Env, country, **kwargs):
steps.step_description(env, country, **kwargs)

View File

@@ -9,8 +9,6 @@ from maps_generator.utils.file import download_file
def make_test_booking_data(max_days):
def test_booking_data(env: Env, logger, *args, **kwargs):
if not settings.HOTELS_URL:
return None
base_url, _ = settings.HOTELS_URL.rsplit("/", maxsplit=1)
url = f"{base_url}/meta.json"
meta_path = os.path.join(env.paths.tmp_dir(), "hotels-meta.json")

View File

@@ -33,12 +33,10 @@ logger = logging.getLogger("maps_generator")
def multithread_run_if_one_country(func):
@functools.wraps(func)
def wrap(env, country, **kwargs):
def wrap(env, *args, **kwargs):
if len(env.countries) == 1:
kwargs.update({"threads_count": settings.THREADS_COUNT})
elif country == 'Taiwan_North':
kwargs.update({"threads_count": 4})
func(env, country, **kwargs)
func(env, *args, **kwargs)
return wrap

View File

@@ -1,109 +0,0 @@
[Developer]
# Path to the `organicmaps` source code repository:
OMIM_PATH: ~/OM/organicmaps
# A path with the generator_tool binary:
BUILD_PATH: /root/OM/omim-build-relwithdebinfo
#${Developer:OMIM_PATH}/../omim-build-release
[Main]
# A special small planet file will be downloaded if DEBUG is set to 1.
DEBUG: 0
# A main working directory. There is a subdirectory created for each generator run
# which contains the planet and other downloads, temporary build files, logs and completed MWMs.
MAIN_OUT_PATH: ${Developer:OMIM_PATH}/../maps_build
# Path for storing caches for nodes, ways, relations.
# If it's not set then caches are stored inside the directory of the current build.
# CACHE_PATH: ${Main:MAIN_OUT_PATH}/cache
[Generator tool]
# Path to the data/ folder in the repository:
USER_RESOURCE_PATH: ${Developer:OMIM_PATH}/data
# Features stage only parallelism level. Set to 0 for auto detection.
THREADS_COUNT_FEATURES_STAGE: 16
# Do not change it. This is determined automatically.
NODE_STORAGE: mem
[Osm tools]
# Path to osmctools binaries:
OSM_TOOLS_PATH: /usr/bin/
#${Developer:OMIM_PATH}/../osmctools
# If the binaries are not found neither in the configured path nor system-wide,
# then the tools are built from the sources:
OSM_TOOLS_SRC_PATH: ${Developer:OMIM_PATH}/tools/osmctools
[Logging]
# maps_generator's general (python output only) log file path and name.
# More detailed logs that include output of the `generator_tool` binary
# are located in the `logs/` subdir of a particular build directory,
# e.g. `maps_build/2023_06_04__20_05_07/logs/`.
LOG_FILE_PATH: ${Main:MAIN_OUT_PATH}/generation.log
[External]
# Planet file location. It should be a dump of OSM data in osm.pbf format.
# By default its an entire planet from "planet.openstreetmap.org".
# Or set it to a particular country/region extract from e.g. [Geofabrik](http://download.geofabrik.de/index.html).
# Note that an entire planet generation takes 40+ hours on a 256GB RAM server (and 1TB+ disk space).
# Stick to smaller extracts unless you have a machine this large.
# Here and further, its possible to specify either an URL (to be downloaded automatically)
# or a local file path like file:///path/to/file.
# A sample URL to download a latest OSM dump for North Macedonia:
PLANET_URL: file:///home/planet/planet/laos.o5m
# Location of the md5 checksum of the planet file:
PLANET_MD5_URL: ${External:PLANET_URL}.md5
# A base url to the latest_coasts.geom and latest_coasts.rawgeom files.
# For example, if PLANET_COASTS_URL = https://somesite.com/download/
# then the https://somesite.com/download/latest_coasts.geom url will be used to download latest_coasts.geom and
# the https://somesite.com/download/latest_coasts.rawgeom url will be used to download latest_coasts.rawgeom.
# Comment to skip getting the coastlines files.
PLANET_COASTS_URL: file:///home/planet/
# Should be 'true' for an entire planet build to make a special routing section in World.mwm
# for alerting about absent regions without which the route can't be built.
# NEED_BUILD_WORLD_ROADS: true
# Subway file location, see docs/SUBWAY_GENERATION.md if you want to generate your own file.
# Comment to disable subway layer generation.
SUBWAY_URL: file:///home/planet/subway/subway.transit.json
# Location of the EXPERIMENTAL GTFS-extracted public transport transit files:
# TRANSIT_URL:
# Urls for production maps generation.
# UGC_URL:
# HOTELS_URL:
# PROMO_CATALOG_CITIES:
# POPULARITY_URL:
# FOOD_URL:
# FOOD_TRANSLATIONS_URL:
SRTM_PATH: /home/planet/SRTM-patched-europe/
ISOLINES_PATH: /home/planet/isolines-pastk3/
ADDRESSES_PATH: /home/planet/tiger/
# Local path (not url!) to .csv files.
UK_POSTCODES_URL: /home/planet/postcodes/gb-postcode-data/gb_postcodes.csv
US_POSTCODES_URL: /home/planet/postcodes/us-postcodes/uszips.csv
[Stages]
# Set to 1 to update the entire OSM planet file (as taken from "planet.openstreetmap.org")
# via an osmupdate tool before the generation. Not for use with partial planet extracts.
NEED_PLANET_UPDATE: 0
# If you want to calculate diffs you need to specify where the old maps are,
# e.g. ${Main:MAIN_OUT_PATH}/2021_03_16__09_00_00/
DATA_ARCHIVE_DIR: ${Generator tool:USER_RESOURCE_PATH}
# How many versions in the archive to use for diff calculation:
DIFF_VERSION_DEPTH: 2
[Common]
# Default parallelism level for the most of jobs. Set to 0 for auto detection.
THREADS_COUNT: 0
[Stats]
# Path to rules for calculating statistics by type:
STATS_TYPES_CONFIG: ${Developer:OMIM_PATH}/tools/python/maps_generator/var/etc/stats_types_config.txt

View File

@@ -1,37 +0,0 @@
import sys
import os
import shutil
import zipfile
def main():
if len(sys.argv) != 4:
print("Usage: tif2hgt.py <src aster path> <tmp path> <dest srtm path>")
return
aster_path = str(sys.argv[1])
tmp_path = str(sys.argv[2])
srtm_path = str(sys.argv[3])
for file in os.listdir(aster_path):
if file.endswith(".zip"):
dest_dir = tmp_path + '/' + file
with zipfile.ZipFile(aster_path + '/' + file, 'r') as zip_ref:
os.mkdir(dest_dir)
zip_ref.extractall(dest_dir)
for tif_file in os.listdir(dest_dir):
# Sample: ASTGTMV003_N61E010_dem.tif
if tif_file.endswith("dem.tif"):
print("Process: " + tif_file[11:18])
arch_name = tif_file[11:18] + '.SRTMGL1.hgt'
out_file = srtm_path + '/' + arch_name
os.system('gdal_translate -of SRTMHGT ' + dest_dir + '/' + tif_file + ' ' + out_file)
zipfile.ZipFile(out_file + '.zip', mode='w', compression=zipfile.ZIP_DEFLATED).write(out_file, arch_name)
os.remove(out_file)
shutil.rmtree(dest_dir)
break
main()

View File

@@ -1,55 +0,0 @@
# See run-docker.sh for cloning, building, and running the maps generator Docker routine
FROM ubuntu:noble
ARG DEBIAN_FRONTEND=noninteractive
ENV TZ=Etc/UTC
RUN apt-get update -qq \
&& apt-get install -y --no-install-recommends \
curl \
osmctools \
rclone \
sftp \
sshpass \
vim \
wget \
build-essential \
clang \
cmake \
ninja-build \
python3 \
python3-pip \
python3.12-venv \
qt6-base-dev \
qt6-positioning-dev \
libc++-dev \
libfreetype-dev \
libglvnd-dev \
libgl1-mesa-dev \
libharfbuzz-dev \
libicu-dev \
libqt6svg6-dev \
libqt6positioning6-plugins \
libqt6positioning6 \
libsqlite3-dev \
libxrandr-dev \
libxinerama-dev \
libxcursor-dev \
libxi-dev \
zlib1g-dev \
&& rm -rf /var/cache/apt/* /var/lib/apt/lists/*;
RUN mkdir /root/OM
# When running, bind mount the repo here
RUN mkdir /root/OM/organicmaps
# And a volume to store the large output/temp files here
RUN mkdir /root/OM/maps_build
# And a volume to store >10gb files for the planet here
RUN mkdir /home/planet
WORKDIR /root/OM/organicmaps
# For debugging
#CMD /bin/bash
CMD /root/OM/organicmaps/tools/unix/docker_maps_generator.sh

View File

@@ -1,10 +1,9 @@
#!/usr/bin/env bash
set -euo pipefail
set -eu
OPT_DEBUG=
OPT_RELEASE=
OPT_RELEASEDEBUGINFO=
OPT_CLEAN=
OPT_DESIGNER=
OPT_GCC=
@@ -14,7 +13,7 @@ OPT_STANDALONE=
OPT_COMPILE_DATABASE=
OPT_LAUNCH_BINARY=
OPT_NJOBS=
while getopts ":cdrRxtagjlp:n:" opt; do
while getopts ":cdrxtagjlp:n:" opt; do
case $opt in
a) OPT_STANDALONE=1 ;;
c) OPT_CLEAN=1 ;;
@@ -30,18 +29,16 @@ while getopts ":cdrRxtagjlp:n:" opt; do
;;
p) OPT_PATH="$OPTARG" ;;
r) OPT_RELEASE=1 ;;
R) OPT_RELEASEDEBUGINFO=1 ;;
t) OPT_DESIGNER=1 ;;
*)
echo "Build the desktop app and other C++ targets (tests, tools...)"
echo "Usage: $0 [-d] [-r] [-R] [-c] [-x] [-s] [-t] [-a] [-g] [-j] [-l] [-p PATH] [-n NUM] [target1 target2 ...]"
echo "Usage: $0 [-d] [-r] [-c] [-x] [-s] [-t] [-a] [-g] [-j] [-l] [-p PATH] [-n NUM] [target1 target2 ...]"
echo
echo "By default both debug and release versions are built in ../omim-build-<buildtype> dir."
echo
echo -e "-d Build debug version"
echo -e "-r Build release version"
echo -e "-R Build release with debug info"
echo -e "-x Use precompiled headers"
echo -e "-d Build a debug version"
echo -e "-r Build a release version"
echo -e "-x Use pre-compiled headers"
echo -e "-c Clean before building"
echo -e "-t Build Qt based designer tool (Linux/MacOS only)"
echo -e "-a Build Qt based standalone desktop app (Linux/MacOS only)"
@@ -62,10 +59,10 @@ if [ "$OPT_TARGET" != "desktop" -a -z "$OPT_DESIGNER" -a -z "$OPT_STANDALONE" ];
CMAKE_CONFIG="${CMAKE_CONFIG:-} -DSKIP_QT_GUI=ON"
fi
# By default build Debug and RelWithDebugInfo
if [ -z "$OPT_DEBUG$OPT_RELEASE$OPT_RELEASEDEBUGINFO" ]; then
# By default build everything
if [ -z "$OPT_DEBUG$OPT_RELEASE" ]; then
OPT_DEBUG=1
OPT_RELEASEDEBUGINFO=1
OPT_RELEASE=1
fi
OMIM_PATH="$(cd "${OMIM_PATH:-$(dirname "$0")/../..}"; pwd)"
@@ -158,5 +155,4 @@ build()
[ -n "$OPT_DEBUG" ] && build Debug
[ -n "$OPT_RELEASE" ] && build Release
[ -n "$OPT_RELEASEDEBUGINFO" ] && build RelWithDebInfo
exit 0

View File

@@ -1,161 +0,0 @@
#!/usr/bin/env bash
set -e
#Volumes/paths for downloads:
#home/planet/planet/planet.o5m
#home/planet/planet/planet.o5m.md5
#PLANET_COASTS_URL:file:///home/planet/planet/
#home/planet/planet/latest_coasts.geom and latest_coasts.rawgeom
#SUBWAY_URL: file:///home/planet/subway/beta.json
#home/planet/subway/beta.json
#HOTELS_URL:/home/planet/planet/kayak/
#home/planet/planet/kayak/
#SRTM_PATH:/home/planet/SRTM-patched-europe/
#ISOLINES_PATH:/home/planet/planet/isolines/
#ADDRESSES_PATH:/home/planet/planet/tiger/
#UK_POSTCODES_URL:/home/planet/postcodes/gb-postcode-data/gb_postcodes.csv
#US_POSTCODES_URL:/home/planet/postcodes/us-postcodes/uszips.csv
echo "<$(date +%T)> Starting..."
# Prepare paths
#
# Already created by Dockerfile:
# /root/OM
# /root/OM/organicmaps
# /root/OM/maps_build
# /home/planet
#
mkdir -p /root/.config/CoMaps # Odd mkdir permission errors in generator_tool in Docker without these
chmod -R 777 /root/.config
mkdir -p ~/.config/rclone
mkdir -p ~/OM/maps_build
mkdir -p ~/OM/omim-build-release
mkdir -p ~/OM/osmctools
mkdir -p /home/planet/planet/isolines/
mkdir -p /home/planet/planet/kayak/
mkdir -p /home/planet/planet/tiger/
mkdir -p /home/planet/postcodes/gb-postcode-data/
mkdir -p /home/planet/postcodes/us-postcodes/
mkdir -p /home/planet/SRTM-patched-europe/
mkdir -p /home/planet/subway
echo "Writing rclone config..."
echo "[r2]" > ~/.config/rclone/rclone.conf
echo "type = s3" >> ~/.config/rclone/rclone.conf
echo "provider = Cloudflare" >> ~/.config/rclone/rclone.conf
echo "access_key_id = $S3_KEY_ID" >> ~/.config/rclone/rclone.conf
echo "secret_access_key = $S3_SECRET_KEY" >> ~/.config/rclone/rclone.conf
echo "region = auto" >> ~/.config/rclone/rclone.conf
echo "endpoint = $S3_ENDPOINT" >> ~/.config/rclone/rclone.conf
# S3_BUCKET is used below during uploading
echo "<$(date +%T)> Running ./configure.sh ..."
cd ~/OM/organicmaps
./configure.sh --skip-map-download --skip-generate-symbols
echo "<$(date +%T)> Compiling tools..."
cd ~/OM/organicmaps
./tools/unix/build_omim.sh -R generator_tool
./tools/unix/build_omim.sh -R world_roads_builder_tool
./tools/unix/build_omim.sh -R mwm_diff_tool
cd tools/python/maps_generator
python3 -m venv /tmp/venv
/tmp/venv/bin/pip3 install -r requirements_dev.txt
echo "<$(date +%T)> Copying map generator INI..."
cp var/etc/map_generator.ini.prod var/etc/map_generator.ini
#TODO: may be duplicated by maps_generator at "osmctools are not found, building from the sources"
#echo "<$(date +%T)> Prebuild some tools so we can make an o5m file or run update_planet..."
#cd ~/OM/organicmaps/tools/osmctools
#gcc osmupdate.c -l z -o ~/OM/osmctools/osmupdate
#gcc osmconvert.c -l z -o ~/OM/osmctools/osmconvert
# May be unnecessary when running world
# /tmp/venv/bin/python -m maps_generator --coasts
# save to /path/to/coasts WorldCoasts.geom as latest_coasts.geom and WorldCoasts.rawgeom latest_coasts.rawgeom
# (from https://github.com/mapsme/omim/issues/11994)
cd /home/planet/planet
if [ ! -f planet-latest.osm.pbf ]; then
echo "<$(date +%T)> Downloading planet-latest.osm.pbf..."
curl -OL https://ftpmirror.your.org/pub/openstreetmap/pbf/planet-latest.osm.pbf
echo "<$(date +%T)> Downloading planet-latest.osm.pbf.md5..."
curl -OL https://ftpmirror.your.org/pub/openstreetmap/pbf/planet-latest.osm.pbf.md5
else
echo "<$(date +%T)> planet-latest.osm.pbf exists, not downloading..."
fi
#curl -OL https://download.geofabrik.de/north-america/us-west-latest.osm.pbf
#curl -OL https://download.geofabrik.de/north-america/us-west-latest.osm.pbf.md5
# (rename us-west-latest to planet-latest and edit the md5 file accordingly)
if [ ! -f planet.o5m ]; then
echo "<$(date +%T)> Converting planet-latest.osm.pbf to planet.o5m..."
~/OM/osmctools/osmconvert planet-latest.osm.pbf -o=planet.o5m
else
echo "<$(date +%T)> planet.o5m exists, not converting..."
fi
# (currently unused:) ~/OM/organicmaps/tools/unix/update_planet.sh planet.o5m
echo "<$(date +%T)> Generating maps..."
cd ~/OM/organicmaps/tools/python
/tmp/venv/bin/python -m maps_generator --countries="Laos" --skip="Coastline,Ugc,Popularity,Descriptions,Routing,RoutingTransit,MwmDiffs,Statistics"
#/tmp/venv/bin/python -m maps_generator --skip="MwmDiffs" --continue
# do not use --production except for Kayak/recommendation/popularity/food data
#/tmp/venv/bin/python -m maps_generator --countries="World, WorldCoasts, US_Oregon_*, US_California_*, US_Washington_*" --production
#/tmp/venv/bin/python -m maps_generator --countries="US_Oregon_Portland" --skip="MwmDiffs"
#/tmp/venv/bin/python -m maps_generator --countries="Macedonia" --skip="MwmDiffs"
shopt -s nullglob
buildfolder=$(find ~/OM/maps_build/ -mindepth 1 -maxdepth 1 -iname 2* -type d | sort -n -r | head -1 | cut -d/ -f5)
builddate=$(find ~/OM/maps_build/*/ -mindepth 1 -maxdepth 1 -iname 2* -type d | sort -n -r | head -1 | cut -d/ -f6)
mwmfiles=( ~/OM/maps_build/$buildfolder/$builddate/*.mwm )
if (( ${#mwmfiles[@]} )); then
echo "<$(date +%T)> Uploading maps to sftp..."
# upload limited files via SFTP to Dreamhost (cdn-us-1.comaps.app)
# Needs StrictHostKeyChecking=no otherwise new containers/SFTP_HOSTs will require a manual ssh attempt
sshpass -p $SFTP_PASSWORD sftp -o StrictHostKeyChecking=no $SFTP_USER@$SFTP_HOST:$SFTP_PATH <<EOF
lcd ~/OM/maps_build/$buildfolder/$builddate
mkdir maps/$builddate
cd maps/$builddate
put countries.txt
put World.mwm
put WorldCoasts.mwm
cd ..
rm latest
ln -s $builddate latest
cd ..
lcd /home/planet/subway/
put subway.json
put subway.log
put subway.transit.json
lcd /home/planet/subway/subway/validator
rm subway/js/*
rmdir subway/js
rm subway/*
rmdir subway
mkdir subway
cd subway
put *
exit
EOF
# upload all files via rclone to Cloudflare (R2)
echo "<$(date +%T)> Uploading maps to cloudflare..."
rclone --progress copy ~/OM/maps_build/$buildfolder/$builddate r2:$S3_BUCKET/maps/$builddate/
else
echo "<$(date +%T)> No MWM files in ~/OM/maps_build/$buildfolder/$builddate/*.mwm, not uploading maps."
echo "<$(date +%T)> Found: $(ls -alt ~/OM/maps_build/*)"
fi
echo "<$(date +%T)> Temporarily NOT Removing intermediate data..."
#rm -rf ~/OM/maps_build/*/intermediate_data
echo "<$(date +%T)> DONE"

View File

@@ -20,9 +20,9 @@ export SKIP_PLANET_UPDATE="1"
# If unavailable then replace with a local file.
# TODO: keep the downloaded csv file from the latest run.
#export CITIES_INFO_URL=""
export TMPDIR="$BUILD_PATH/subway"
export TMPDIR="$BUILD_PATH/subways"
# The output file, which needs post-processing by transit_graph_generator.py
export MAPSME="$SUBWAYS_PATH/subway.json"
export MAPSME="$SUBWAYS_PATH/subways.json"
# Produce additional files needed for https://cdn.organicmaps.app/subway/
export HTML_DIR="$SUBWAYS_VALIDATOR_PATH"
@@ -36,7 +36,7 @@ export DUMP_CITY_LIST="$SUBWAYS_VALIDATOR_PATH/cities.txt"
cp -r "$SUBWAYS_REPO_PATH"/render/* "$SUBWAYS_VALIDATOR_PATH/"
TRANSIT_TOOL_PATH="$REPO_PATH/tools/python/transit"
SUBWAYS_GRAPH_FILE="$SUBWAYS_PATH/subway.transit.json"
SUBWAYS_GRAPH_FILE="$SUBWAYS_PATH/subways.transit.json"
activate_venv_at_path "$TRANSIT_TOOL_PATH"
"$PYTHON" "$TRANSIT_TOOL_PATH/transit_graph_generator.py" "$MAPSME" "$SUBWAYS_GRAPH_FILE" 2>&1 | tee -a "$SUBWAYS_LOG"

View File

@@ -1,15 +1,15 @@
## NOTE: edit the settings.sh file to customize/override the defaults.
# NOTE: edit the settings.sh file to customize/override the defaults.
# Absolutize & normalize paths.
REPO_PATH="${REPO_PATH:-$(cd "$(dirname "$0")/../../.."; pwd -P)}"
BASE_PATH="${BASE_PATH:-$REPO_PATH/..}"
BASE_PATH="${BASE_PATH:-$REPO_PATH/../maps}"
# Temporary files
BUILD_PATH="${BUILD_PATH:-$BASE_PATH/osm-planet/build}"
BUILD_PATH="${BUILD_PATH:-$BASE_PATH/build}"
# Other code repositories, e.g. subways, wikiparser..
CODE_PATH="${CODE_PATH:-$REPO_PATH/..}"
# Source map data and processed outputs e.g. wiki articles
DATA_PATH="${DATA_PATH:-$BASE_PATH/osm-planet}"
DATA_PATH="${DATA_PATH:-$BASE_PATH/data}"
# OSM planet source files
@@ -20,6 +20,6 @@ PLANET_O5M="${PLANET_O5M:-$PLANET_PATH/planet-latest.o5m}"
# Subways
SUBWAYS_REPO_PATH="${SUBWAYS_REPO_PATH:-$CODE_PATH/subways}"
SUBWAYS_PATH="${SUBWAYS_PATH:-$DATA_PATH/subway}"
SUBWAYS_LOG="${SUBWAYS_LOG:-$SUBWAYS_PATH/subway.log}"
SUBWAYS_PATH="${SUBWAYS_PATH:-$DATA_PATH/subways}"
SUBWAYS_LOG="${SUBWAYS_LOG:-$SUBWAYS_PATH/subways.log}"
SUBWAYS_VALIDATOR_PATH="${SUBWAYS_VALIDATOR_PATH:-$SUBWAYS_PATH/validator}"

View File

@@ -1,65 +0,0 @@
#!/usr/bin/env bash
# Master file documenting how to clone, build, and run the maps generator via Docker
# Prerequisutes:
# sudo apt install docker git
# To bootstrap the repo:
#TODO: rename comaps-init to comaps here and throughout
#cd /media/4tbexternal
#if [ ! -f /media/4tbexternal/comaps-init ]; then
# git clone --recurse-submodules --shallow-submodules https://codeberg.org/comaps/comaps-init.git
# cd /media/4tbexternal/comaps-init
#else
# cd /media/4tbexternal/comaps-init
# git pull origin
#fi
# And data:
# cd /media/4tbexternal/comaps-init/data/
# wget World.mwm //pastk - not needed
# wget WorldCoasts.mwm
#TODO: isolines, postcodes, subways, wiki
# In tools/unix/maps, copy settings.sh.dist to settings.sh and modify if needed
# In tools/python/maps_generator, copy map_generator.ini.prod to map_generator.ini and modify if needed
#cd /media/4tbexternal/comaps-init/tools/unix
# Build with: docker build . -t maps_generator
# (Good to rebuild each time just in case)
# If you get a Dockerfile not found error especially on an XFS partition, try copying Dockerfile to an ext4 partition first.
#
# Edit as appropriate and run with:
# docker run \
# -e S3_KEY_ID=changeme \
# -e S3_SECRET_KEY=changeme \
# -e S3_ENDPOINT=https://changeme.r2.cloudflarestorage.com/ \
# -e S3_BUCKET=comaps-map-files \
# -e SFTP_USER=changeme \
# -e SFTP_PASSWORD=changeme \
# -e SFTP_HOST=changeme.dreamhost.com \
# -e SFTP_PATH=cdn-us-1.comaps.app \
# --ulimit nofile=262144:262144 \
# -v /media/4tbexternal/comaps-init:/root/OM/organicmaps \
# -v /media/4tbexternal/osm-planet:/home/planet \
# -v /media/4tbexternal/osm-maps:/root/OM/maps_build \
# -it maps_generator \
# /root/OM/organicmaps/tools/unix/docker_maps_generator.sh
docker run \
-e S3_KEY_ID=changeme \
-e S3_SECRET_KEY=changeme \
-e S3_ENDPOINT=https://changeme.r2.cloudflarestorage.com/ \
-e S3_BUCKET=comaps-map-files \
-e SFTP_USER=changeme \
-e SFTP_PASSWORD=changeme \
-e SFTP_HOST=changeme.dreamhost.com \
-e SFTP_PATH=cdn-us-1.comaps.app \
--ulimit nofile=262144:262144 \
-v /media/4tbexternal/comaps-init:/root/OM/organicmaps \
-v /media/4tbexternal/osm-planet:/home/planet \
-v /media/4tbexternal/osm-maps:/root/OM/maps_build \
-it maps_generator \
/root/OM/organicmaps/tools/unix/docker_maps_generator.sh

View File

@@ -1,17 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
OSMUPDATE=~/osmctools/osmupdate
# osmconvert should be accessible in PATH.
PATH="$(dirname "$OSMUPDATE"):$PATH"
# Pass pbf or o5m file as a parameter
OLD="$1"
NEW="${1/.pbf/.new.pbf}"
NEW="${NEW/.o5m/.new.o5m}"
"$OSMUPDATE" -v --drop-authors --drop-version --hash-memory=512000 "$OLD" "$NEW"
# Uncomment to replace old planet.
mv "$NEW" "$OLD"
#md5sum -b "$OLD" > "$OLD.md5"
echo "Successfully updated $OLD"

View File

@@ -9,6 +9,8 @@
#include "geometry/mercator.hpp"
#include "base/scope_guard.hpp"
#include "base/string_utils.hpp"
#include "base/thread_pool_computational.hpp"
#include <algorithm>
@@ -100,33 +102,30 @@ public:
}
private:
Altitude GetValueImpl(ms::LatLon pos)
Altitude GetValueImpl(ms::LatLon const & pos)
{
if (m_preferredTile != nullptr)
{
using mercator::kPointEqualityEps;
// Each SRTM tile overlaps the top row in the bottom tile and the right row in the left tile.
// Try to prevent loading a new tile if the position can be found in the loaded one.
auto const latDist = pos.m_lat - m_leftBottomOfPreferredTile.m_lat;
auto const lonDist = pos.m_lon - m_leftBottomOfPreferredTile.m_lon;
if (latDist > -kPointEqualityEps && latDist < 1.0 + kPointEqualityEps &&
lonDist > -kPointEqualityEps && lonDist < 1.0 + kPointEqualityEps)
if (latDist > -mercator::kPointEqualityEps && latDist < 1.0 + mercator::kPointEqualityEps && lonDist > -mercator::kPointEqualityEps && lonDist < 1.0 + mercator::kPointEqualityEps)
{
ms::LatLon innerPos = pos;
if (latDist < 0.0)
pos.m_lat += kPointEqualityEps;
innerPos.m_lat += mercator::kPointEqualityEps;
else if (latDist >= 1.0)
pos.m_lat -= kPointEqualityEps;
innerPos.m_lat -= mercator::kPointEqualityEps;
if (lonDist < 0.0)
pos.m_lon += kPointEqualityEps;
innerPos.m_lon += mercator::kPointEqualityEps;
else if (lonDist >= 1.0)
pos.m_lon -= kPointEqualityEps;
return m_preferredTile->GetAltitude(pos);
innerPos.m_lon -= mercator::kPointEqualityEps;
return m_preferredTile->GetHeight(innerPos);
}
}
return m_srtmManager.GetAltitude(pos);
return m_srtmManager.GetHeight(pos);
}
Altitude GetMedianValue(ms::LatLon const & pos)
@@ -180,8 +179,6 @@ public:
, m_bottomLat(bottomLat)
{}
/// @todo Should we use the same approach as in SrtmTile::GetTriangleHeight/GetBilinearHeight?
/// This function is used in ASTER filter only.
Altitude GetValue(ms::LatLon const & pos) override
{
double ln = pos.m_lon - m_leftLon;
@@ -243,7 +240,7 @@ class TileIsolinesTask
public:
TileIsolinesTask(int left, int bottom, int right, int top, std::string const & srtmDir,
TileIsolinesParams const * params, bool forceRegenerate)
: m_srtmDir(srtmDir)
: m_strmDir(srtmDir)
, m_srtmProvider(srtmDir)
, m_params(params)
, m_forceRegenerate(forceRegenerate)
@@ -254,7 +251,7 @@ public:
TileIsolinesTask(int left, int bottom, int right, int top, std::string const & srtmDir,
TileIsolinesProfileParams const * profileParams, bool forceRegenerate)
: m_srtmDir(srtmDir)
: m_strmDir(srtmDir)
, m_srtmProvider(srtmDir)
, m_profileParams(profileParams)
, m_forceRegenerate(forceRegenerate)
@@ -299,9 +296,8 @@ private:
return;
}
}
auto const & pl = GetPlatform();
if (!pl.IsFileExistsByFullPath(base::JoinPath(m_srtmDir, tileName + ".hgt")) &&
!pl.IsFileExistsByFullPath(generator::SrtmTile::GetPath(m_srtmDir, tileName)))
if (!GetPlatform().IsFileExistsByFullPath(generator::SrtmTile::GetPath(m_strmDir, tileName)))
{
LOG(LINFO, ("SRTM tile", tileName, "doesn't exist, skip processing."));
return;
@@ -377,10 +373,11 @@ private:
ValuesProvider<Altitude> & altProvider,
Contours<Altitude> & contours)
{
// Avoid seam between SRTM and ASTER.
if ((lat == kAsterTilesLatTop) || (lat == kAsterTilesLatBottom - 1))
auto const avoidSeam = lat == kAsterTilesLatTop || (lat == kAsterTilesLatBottom - 1);
if (avoidSeam)
{
m_srtmProvider.SetPrefferedTile(ms::LatLon(lat == kAsterTilesLatTop ? lat - 0.5 : lat + 0.5, lon));
m_srtmProvider.SetPrefferedTile(ms::LatLon(lat == kAsterTilesLatTop ? lat - 0.5 : lat + 0.5,
lon));
SeamlessAltitudeProvider seamlessAltProvider(m_srtmProvider, altProvider,
[](ms::LatLon const & pos)
{
@@ -416,7 +413,7 @@ private:
int m_bottom;
int m_right;
int m_top;
std::string m_srtmDir;
std::string m_strmDir;
SrtmProvider m_srtmProvider;
TileIsolinesParams const * m_params = nullptr;
TileIsolinesProfileParams const * m_profileParams = nullptr;
@@ -438,14 +435,14 @@ void RunGenerateIsolinesTasks(int left, int bottom, int right, int top,
int tilesRowPerTask = top - bottom;
int tilesColPerTask = right - left;
if (tilesRowPerTask * tilesColPerTask <= static_cast<long>(threadsCount))
if (tilesRowPerTask * tilesColPerTask <= threadsCount)
{
tilesRowPerTask = 1;
tilesColPerTask = 1;
}
else
{
while (tilesRowPerTask * tilesColPerTask > static_cast<long>(maxCachedTilesPerThread))
while (tilesRowPerTask * tilesColPerTask > maxCachedTilesPerThread)
{
if (tilesRowPerTask > tilesColPerTask)
tilesRowPerTask = (tilesRowPerTask + 1) / 2;
@@ -496,9 +493,8 @@ void Generator::GenerateIsolines(int left, int bottom, int right, int top,
void Generator::GenerateIsolinesForCountries()
{
auto const & pl = GetPlatform();
if (!pl.IsFileExistsByFullPath(m_isolinesTilesOutDir) &&
!pl.MkDirRecursively(m_isolinesTilesOutDir))
if (!GetPlatform().IsFileExistsByFullPath(m_isolinesTilesOutDir) &&
!GetPlatform().MkDirRecursively(m_isolinesTilesOutDir))
{
LOG(LERROR, ("Can't create directory", m_isolinesTilesOutDir));
return;
@@ -512,8 +508,8 @@ void Generator::GenerateIsolinesForCountries()
continue;
checkedProfiles.insert(profileName);
auto const profileTilesDir = GetTilesDir(m_isolinesTilesOutDir, profileName);
if (!pl.IsFileExistsByFullPath(profileTilesDir) &&
!pl.MkDirChecked(profileTilesDir))
if (!GetPlatform().IsFileExistsByFullPath(profileTilesDir) &&
!GetPlatform().MkDirChecked(profileTilesDir))
{
LOG(LERROR, ("Can't create directory", profileTilesDir));
return;
@@ -523,7 +519,7 @@ void Generator::GenerateIsolinesForCountries()
auto const tmpTileProfilesDir = GetTileProfilesDir(m_isolinesTilesOutDir);
Platform::RmDirRecursively(tmpTileProfilesDir);
if (!pl.MkDirChecked(tmpTileProfilesDir))
if (!GetPlatform().MkDirChecked(tmpTileProfilesDir))
{
LOG(LERROR, ("Can't create directory", tmpTileProfilesDir));
return;
@@ -537,7 +533,7 @@ void Generator::GenerateIsolinesForCountries()
auto const countryFile = GetIsolinesFilePath(countryId, m_isolinesCountriesOutDir);
if (!m_forceRegenerate && pl.IsFileExistsByFullPath(countryFile))
if (!m_forceRegenerate && GetPlatform().IsFileExistsByFullPath(countryFile))
{
LOG(LINFO, ("Isolines for", countryId, "are ready, skip processing."));
continue;
@@ -697,8 +693,9 @@ void Generator::InitCountryInfoGetter(std::string const & dataDir)
GetPlatform().SetResourceDir(dataDir);
m_infoReader = storage::CountryInfoReader::CreateCountryInfoReader(GetPlatform());
CHECK(m_infoReader, ());
m_infoGetter = storage::CountryInfoReader::CreateCountryInfoReader(GetPlatform());
CHECK(m_infoGetter, ());
m_infoReader = static_cast<storage::CountryInfoReader *>(m_infoGetter.get());
}
void Generator::InitProfiles(std::string const & isolinesProfilesFileName,
@@ -763,8 +760,6 @@ void Generator::GetCountryRegions(storage::CountryId const & countryId, m2::Rect
}
CHECK_LESS(id, m_infoReader->GetCountries().size(), ());
/// @todo Refactor using Memory[Mapped] reader for countries.
std::lock_guard guard(m_infoMutex);
m_infoReader->LoadRegionsFromDisk(id, countryRegions);
}
} // namespace topography_generator

View File

@@ -91,8 +91,8 @@ private:
ProfileToTileIsolinesParams m_profileToTileParams;
ProfileToIsolinesPackingParams m_profileToPackingParams;
std::mutex m_infoMutex;
std::unique_ptr<storage::CountryInfoReader> m_infoReader;
std::unique_ptr<storage::CountryInfoGetter> m_infoGetter;
storage::CountryInfoReader * m_infoReader = nullptr;
// They can't be negative, it is done to avoid compiler warnings.
long m_threadsCount;

View File

@@ -1,5 +1,4 @@
#include "generator.hpp"
#include "generator/utils.hpp"
#include "base/assert.hpp"
#include "base/logging.hpp"
@@ -54,7 +53,7 @@ DEFINE_uint64(median_r, 1, "Custom isolines generating mode. Median filter radiu
using namespace topography_generator;
MAIN_WITH_ERROR_HANDLING([](int argc, char ** argv)
int main(int argc, char ** argv)
{
gflags::SetUsageMessage(
"\n\nThis tool generates isolines and works in the following modes:\n"
@@ -206,4 +205,4 @@ MAIN_WITH_ERROR_HANDLING([](int argc, char ** argv)
generator.GenerateIsolines(FLAGS_left, FLAGS_bottom, FLAGS_right, FLAGS_top, params);
return EXIT_SUCCESS;
});
}

View File

@@ -3,7 +3,6 @@
#include "topography_generator/marching_squares/contours_builder.hpp"
#include "topography_generator/marching_squares/square.hpp"
#include "topography_generator/utils/contours.hpp"
#include "topography_generator/utils/values_provider.hpp"
#include "base/logging.hpp"
@@ -35,9 +34,7 @@ public:
void GenerateContours(Contours<ValueType> & result)
{
std::vector<ValueType> grid((m_stepsCountLat + 1) * (m_stepsCountLon + 1));
ScanValuesInRect(result, grid);
ScanValuesInRect(result.m_minValue, result.m_maxValue, result.m_invalidValuesCount);
result.m_valueStep = m_valueStep;
auto const levelsCount = static_cast<size_t>(result.m_maxValue - result.m_minValue) / m_valueStep;
@@ -48,72 +45,60 @@ public:
}
ContoursBuilder contoursBuilder(levelsCount, m_debugId);
Square<ValueType> square(result.m_minValue, m_valueStep, m_debugId);
for (size_t i = 0; i < m_stepsCountLat; ++i)
{
contoursBuilder.BeginLine();
for (size_t j = 0; j < m_stepsCountLon; ++j)
{
// This point should be calculated _exact_ the same way as in ScanValuesInRect.
// leftBottom + m_step doesn't work due to different floating results.
square.Init(
m_leftBottom.m_lon + m_step * j, // Left
m_leftBottom.m_lat + m_step * i, // Bottom
m_leftBottom.m_lon + m_step * (j + 1), // Right
m_leftBottom.m_lat + m_step * (i + 1), // Top
grid[Idx(i, j)], // LB
grid[Idx(i, j + 1)], // RB
grid[Idx(i + 1, j)], // LT
grid[Idx(i + 1, j + 1)], // RT
m_valuesProvider.GetInvalidValue());
auto const leftBottom = ms::LatLon(m_leftBottom.m_lat + m_step * i,
m_leftBottom.m_lon + m_step * j);
// Use std::min to prevent floating-point number precision error.
auto const rightTop = ms::LatLon(std::min(leftBottom.m_lat + m_step, m_rightTop.m_lat),
std::min(leftBottom.m_lon + m_step, m_rightTop.m_lon));
Square<ValueType> square(leftBottom, rightTop, result.m_minValue, m_valueStep,
m_valuesProvider, m_debugId);
square.GenerateSegments(contoursBuilder);
}
contoursBuilder.EndLine(i == m_stepsCountLat - 1 /* finalLine */);
auto const isLastLine = i == m_stepsCountLat - 1;
contoursBuilder.EndLine(isLastLine);
}
contoursBuilder.GetContours(result.m_minValue, result.m_valueStep, result.m_contours);
}
private:
size_t Idx(size_t iLat, size_t jLon) const { return iLat * (m_stepsCountLon + 1) + jLon; }
void ScanValuesInRect(Contours<ValueType> & res, std::vector<ValueType> & grid) const
void ScanValuesInRect(ValueType & minValue, ValueType & maxValue, size_t & invalidValuesCount) const
{
res.m_minValue = res.m_maxValue = m_valuesProvider.GetValue(m_leftBottom);
res.m_invalidValuesCount = 0;
minValue = maxValue = m_valuesProvider.GetValue(m_leftBottom);
invalidValuesCount = 0;
for (size_t i = 0; i <= m_stepsCountLat; ++i)
{
for (size_t j = 0; j <= m_stepsCountLon; ++j)
{
ms::LatLon const pos(m_leftBottom.m_lat + m_step * i, m_leftBottom.m_lon + m_step * j);
auto const pos = ms::LatLon(m_leftBottom.m_lat + m_step * i,
m_leftBottom.m_lon + m_step * j);
auto const value = m_valuesProvider.GetValue(pos);
grid[Idx(i, j)] = value;
if (value == m_valuesProvider.GetInvalidValue())
{
++res.m_invalidValuesCount;
++invalidValuesCount;
continue;
}
if (value < res.m_minValue)
res.m_minValue = value;
if (value > res.m_maxValue)
res.m_maxValue = value;
if (value < minValue)
minValue = value;
if (value > maxValue)
maxValue = value;
}
}
if (res.m_invalidValuesCount > 0)
LOG(LWARNING, ("Tile", m_debugId, "contains", res.m_invalidValuesCount, "invalid values."));
if (invalidValuesCount > 0)
LOG(LWARNING, ("Tile", m_debugId, "contains", invalidValuesCount, "invalid values."));
Square<ValueType>::ToLevelsRange(m_valueStep, res.m_minValue, res.m_maxValue);
Square<ValueType>::ToLevelsRange(m_valueStep, minValue, maxValue);
CHECK_GREATER_OR_EQUAL(res.m_maxValue, res.m_minValue, (m_debugId));
CHECK_GREATER_OR_EQUAL(maxValue, minValue, (m_debugId));
}
ms::LatLon const m_leftBottom;

View File

@@ -1,7 +1,7 @@
#pragma once
#include "topography_generator/marching_squares/contours_builder.hpp"
#include "topography_generator/utils/values_provider.hpp"
namespace topography_generator
{
@@ -9,31 +9,28 @@ template <typename ValueType>
class Square
{
public:
Square(ValueType minValue, ValueType valueStep, std::string const & debugId)
Square(ms::LatLon const & leftBottom,
ms::LatLon const & rightTop,
ValueType minValue, ValueType valueStep,
ValuesProvider<ValueType> & valuesProvider,
std::string const & debugId)
: m_minValue(minValue)
, m_valueStep(valueStep)
, m_left(leftBottom.m_lon)
, m_right(rightTop.m_lon)
, m_bottom(leftBottom.m_lat)
, m_top(rightTop.m_lat)
, m_debugId(debugId)
{
static_assert(std::is_integral<ValueType>::value && std::is_signed<ValueType>::value);
static_assert(std::is_integral<ValueType>::value, "Only integral types are supported.");
m_valueLB = GetValue(leftBottom, valuesProvider);
m_valueLT = GetValue(ms::LatLon(m_top, m_left), valuesProvider);
m_valueRT = GetValue(ms::LatLon(m_top, m_right), valuesProvider);
m_valueRB = GetValue(ms::LatLon(m_bottom, m_right), valuesProvider);
}
void Init(double left, double bottom, double right, double top,
ValueType lb, ValueType rb, ValueType lt, ValueType rt, ValueType invalid)
{
m_isValid = true;
m_left = left;
m_bottom = bottom;
m_right = right;
m_top = top;
m_valueLB = GetValue(lb, invalid);
m_valueRB = GetValue(rb, invalid);
m_valueLT = GetValue(lt, invalid);
m_valueRT = GetValue(rt, invalid);
}
void GenerateSegments(ContoursBuilder & builder) const
void GenerateSegments(ContoursBuilder & builder)
{
if (!m_isValid)
return;
@@ -73,13 +70,14 @@ private:
Unclear,
};
ValueType GetValue(ValueType val, ValueType invalid)
ValueType GetValue(ms::LatLon const & pos, ValuesProvider<ValueType> & valuesProvider)
{
// If a contour goes right through the corner of the square false segments can be generated.
// Shift the value slightly from the corner.
if (val == invalid)
ValueType val = valuesProvider.GetValue(pos);
if (val == valuesProvider.GetInvalidValue())
{
//LOG(LWARNING, ("Invalid value at the position", pos, m_debugId));
LOG(LWARNING, ("Invalid value at the position", pos, m_debugId));
m_isValid = false;
return val;
}
@@ -89,7 +87,7 @@ private:
return val;
}
void AddSegments(ValueType val, uint16_t ind, ContoursBuilder & builder) const
void AddSegments(ValueType val, uint16_t ind, ContoursBuilder & builder)
{
// Segment is a vector directed so that higher values is on the right.
static const std::pair<Rib, Rib> intersectedRibs[] =
@@ -161,7 +159,7 @@ private:
}
}
ms::LatLon InterpolatePoint(Square::Rib rib, ValueType val) const
ms::LatLon InterpolatePoint(Square::Rib rib, ValueType val)
{
double val1;
double val2;
@@ -214,6 +212,9 @@ private:
return {lat, lon};
}
ValueType m_minValue;
ValueType m_valueStep;
double m_left;
double m_right;
double m_bottom;
@@ -224,10 +225,7 @@ private:
ValueType m_valueRT;
ValueType m_valueRB;
ValueType m_minValue;
ValueType m_valueStep;
bool m_isValid = true;
std::string m_debugId;
bool m_isValid;
};
} // topography_generator

View File

@@ -117,8 +117,8 @@ bool SaveContrours(std::string const & filePath,
LOG(LWARNING, ("File writer exception raised:", ex.what(), ", file", tmpFilePath));
return false;
}
CHECK(base::RenameFileX(tmpFilePath, filePath), (tmpFilePath, filePath));
base::DeleteFileX(filePath);
VERIFY(base::RenameFileX(tmpFilePath, filePath), (tmpFilePath, filePath));
return true;
}