318 lines
		
	
	
		
			13 KiB
		
	
	
	
		
			Diff
		
	
	
	
	
	
			
		
		
	
	
			318 lines
		
	
	
		
			13 KiB
		
	
	
	
		
			Diff
		
	
	
	
	
	
https://bugs.gentoo.org/887037
 | 
						|
 | 
						|
Workaround https://github.com/boostorg/container/commit/99091420ae553b27345e04279fd19fe24fb684c1
 | 
						|
in Boost 1.81.
 | 
						|
 | 
						|
Upstream s2 (as in real upstream, not MongoDB) has deviated substantially
 | 
						|
from the version vendored.
 | 
						|
--- a/src/third_party/s2/base/stl_decl_msvc.h
 | 
						|
+++ b/src/third_party/s2/base/stl_decl_msvc.h
 | 
						|
@@ -118,8 +118,8 @@ namespace msvchash {
 | 
						|
   class hash_multimap;
 | 
						|
 }  // end namespace  msvchash
 | 
						|
 
 | 
						|
-using msvchash::hash_set;
 | 
						|
-using msvchash::hash_map;
 | 
						|
+using msvchash::hash_set = my_hash_set;
 | 
						|
+using msvchash::hash_map = my_hash_map;
 | 
						|
 using msvchash::hash;
 | 
						|
 using msvchash::hash_multimap;
 | 
						|
 using msvchash::hash_multiset;
 | 
						|
--- a/src/third_party/s2/base/stl_decl_osx.h
 | 
						|
+++ b/src/third_party/s2/base/stl_decl_osx.h
 | 
						|
@@ -68,8 +68,8 @@ using std::string;
 | 
						|
 
 | 
						|
 using namespace std;
 | 
						|
 using __gnu_cxx::hash;
 | 
						|
-using __gnu_cxx::hash_set;
 | 
						|
-using __gnu_cxx::hash_map;
 | 
						|
+using __gnu_cxx::hash_set = my_hash_set;
 | 
						|
+using __gnu_cxx::hash_map = my_hash_map;
 | 
						|
 using __gnu_cxx::select1st;
 | 
						|
 
 | 
						|
 /* On Linux (and gdrive on OSX), this comes from places like
 | 
						|
--- a/src/third_party/s2/hash.h
 | 
						|
+++ b/src/third_party/s2/hash.h
 | 
						|
@@ -2,10 +2,10 @@
 | 
						|
 #define THIRD_PARTY_S2_HASH_H_
 | 
						|
 
 | 
						|
 #include <unordered_map>
 | 
						|
-#define hash_map std::unordered_map
 | 
						|
+#define my_hash_map std::unordered_map
 | 
						|
 
 | 
						|
 #include <unordered_set>
 | 
						|
-#define hash_set std::unordered_set
 | 
						|
+#define my_hash_set std::unordered_set
 | 
						|
 
 | 
						|
 #define HASH_NAMESPACE_START namespace std {
 | 
						|
 #define HASH_NAMESPACE_END }
 | 
						|
--- a/src/third_party/s2/s2_test.cc
 | 
						|
+++ b/src/third_party/s2/s2_test.cc
 | 
						|
@@ -10,7 +10,7 @@ using std::reverse;
 | 
						|
 
 | 
						|
 #include <hash_set>
 | 
						|
 #include <hash_map>
 | 
						|
-using __gnu_cxx::hash_set;
 | 
						|
+using __gnu_cxx::hash_set = my_hash_map;
 | 
						|
 
 | 
						|
 #include "s2.h"
 | 
						|
 #include "base/logging.h"
 | 
						|
@@ -709,8 +709,8 @@ TEST(S2, Frames) {
 | 
						|
 #if 0
 | 
						|
 TEST(S2, S2PointHashSpreads) {
 | 
						|
   int kTestPoints = 1 << 16;
 | 
						|
-  hash_set<size_t> set;
 | 
						|
-  hash_set<S2Point> points;
 | 
						|
+  my_hash_set<size_t> set;
 | 
						|
+  my_hash_set<S2Point> points;
 | 
						|
   hash<S2Point> hasher;
 | 
						|
   S2Point base = S2Point(1, 1, 1);
 | 
						|
   for (int i = 0; i < kTestPoints; ++i) {
 | 
						|
@@ -733,7 +733,7 @@ TEST(S2, S2PointHashCollapsesZero) {
 | 
						|
   double minus_zero = -zero;
 | 
						|
   EXPECT_NE(*reinterpret_cast<uint64 const*>(&zero),
 | 
						|
             *reinterpret_cast<uint64 const*>(&minus_zero));
 | 
						|
-  hash_map<S2Point, int> map;
 | 
						|
+  my_hash_map<S2Point, int> map;
 | 
						|
   S2Point zero_pt(zero, zero, zero);
 | 
						|
   S2Point minus_zero_pt(minus_zero, minus_zero, minus_zero);
 | 
						|
 
 | 
						|
--- a/src/third_party/s2/s2cellid_test.cc
 | 
						|
+++ b/src/third_party/s2/s2cellid_test.cc
 | 
						|
@@ -10,7 +10,7 @@ using std::reverse;
 | 
						|
 
 | 
						|
 #include <cstdio>
 | 
						|
 #include <hash_map>
 | 
						|
-using __gnu_cxx::hash_map;
 | 
						|
+using __gnu_cxx::hash_map = my_hash_map;
 | 
						|
 
 | 
						|
 #include <sstream>
 | 
						|
 #include <vector>
 | 
						|
@@ -170,7 +170,7 @@ TEST(S2CellId, Tokens) {
 | 
						|
 static const int kMaxExpandLevel = 3;
 | 
						|
 
 | 
						|
 static void ExpandCell(S2CellId const& parent, vector<S2CellId>* cells,
 | 
						|
-                       hash_map<S2CellId, S2CellId>* parent_map) {
 | 
						|
+                       my_hash_map<S2CellId, S2CellId>* parent_map) {
 | 
						|
   cells->push_back(parent);
 | 
						|
   if (parent.level() == kMaxExpandLevel) return;
 | 
						|
   int i, j, orientation;
 | 
						|
@@ -194,7 +194,7 @@ static void ExpandCell(S2CellId const& parent, vector<S2CellId>* cells,
 | 
						|
 
 | 
						|
 TEST(S2CellId, Containment) {
 | 
						|
   // Test contains() and intersects().
 | 
						|
-  hash_map<S2CellId, S2CellId> parent_map;
 | 
						|
+  my_hash_map<S2CellId, S2CellId> parent_map;
 | 
						|
   vector<S2CellId> cells;
 | 
						|
   for (int face = 0; face < 6; ++face) {
 | 
						|
     ExpandCell(S2CellId::FromFacePosLevel(face, 0, 0), &cells, &parent_map);
 | 
						|
--- a/src/third_party/s2/s2loop.cc
 | 
						|
+++ b/src/third_party/s2/s2loop.cc
 | 
						|
@@ -120,7 +120,7 @@ bool S2Loop::IsValid(string* err) const {
 | 
						|
     }
 | 
						|
   }
 | 
						|
   // Loops are not allowed to have any duplicate vertices.
 | 
						|
-  hash_map<S2Point, int> vmap;
 | 
						|
+  my_hash_map<S2Point, int> vmap;
 | 
						|
   for (int i = 0; i < num_vertices(); ++i) {
 | 
						|
     if (!vmap.insert(make_pair(vertex(i), i)).second) {
 | 
						|
       VLOG(2) << "Duplicate vertices: " << vmap[vertex(i)] << " and " << i;
 | 
						|
--- a/src/third_party/s2/s2polygon.cc
 | 
						|
+++ b/src/third_party/s2/s2polygon.cc
 | 
						|
@@ -117,7 +117,7 @@ HASH_NAMESPACE_END
 | 
						|
 bool S2Polygon::IsValid(const vector<S2Loop*>& loops, string* err) {
 | 
						|
   // If a loop contains an edge AB, then no other loop may contain AB or BA.
 | 
						|
   if (loops.size() > 1) {
 | 
						|
-    hash_map<S2PointPair, pair<int, int> > edges;
 | 
						|
+    my_hash_map<S2PointPair, pair<int, int> > edges;
 | 
						|
     for (size_t i = 0; i < loops.size(); ++i) {
 | 
						|
       S2Loop* lp = loops[i];
 | 
						|
       for (int j = 0; j < lp->num_vertices(); ++j) {
 | 
						|
--- a/src/third_party/s2/s2polygonbuilder.cc
 | 
						|
+++ b/src/third_party/s2/s2polygonbuilder.cc
 | 
						|
@@ -175,7 +175,7 @@ S2Loop* S2PolygonBuilder::AssembleLoop(S2Point const& v0, S2Point const& v1,
 | 
						|
   // This ensures that only CCW loops are constructed when possible.
 | 
						|
 
 | 
						|
   vector<S2Point> path;          // The path so far.
 | 
						|
-  hash_map<S2Point, int> index;  // Maps a vertex to its index in "path".
 | 
						|
+  my_hash_map<S2Point, int> index;  // Maps a vertex to its index in "path".
 | 
						|
   path.push_back(v0);
 | 
						|
   path.push_back(v1);
 | 
						|
   index[v1] = 1;
 | 
						|
@@ -361,7 +361,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
 | 
						|
 
 | 
						|
   // First, we build the set of all the distinct vertices in the input.
 | 
						|
   // We need to include the source and destination of every edge.
 | 
						|
-  hash_set<S2Point> vertices;
 | 
						|
+  my_hash_set<S2Point> vertices;
 | 
						|
   for (EdgeSet::const_iterator i = edges_->begin(); i != edges_->end(); ++i) {
 | 
						|
     vertices.insert(i->first);
 | 
						|
     VertexSet const& vset = i->second;
 | 
						|
@@ -370,7 +370,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
 | 
						|
   }
 | 
						|
 
 | 
						|
   // Build a spatial index containing all the distinct vertices.
 | 
						|
-  for (hash_set<S2Point>::const_iterator i = vertices.begin();
 | 
						|
+  for (my_hash_set<S2Point>::const_iterator i = vertices.begin();
 | 
						|
        i != vertices.end(); ++i) {
 | 
						|
     index->Insert(*i);
 | 
						|
   }
 | 
						|
@@ -378,7 +378,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
 | 
						|
   // Next, we loop through all the vertices and attempt to grow a maximial
 | 
						|
   // mergeable group starting from each vertex.
 | 
						|
   vector<S2Point> frontier, mergeable;
 | 
						|
-  for (hash_set<S2Point>::const_iterator vstart = vertices.begin();
 | 
						|
+  for (my_hash_set<S2Point>::const_iterator vstart = vertices.begin();
 | 
						|
        vstart != vertices.end(); ++vstart) {
 | 
						|
     // Skip any vertices that have already been merged with another vertex.
 | 
						|
     if (merge_map->find(*vstart) != merge_map->end()) continue;
 | 
						|
--- a/src/third_party/s2/s2polygonbuilder.h
 | 
						|
+++ b/src/third_party/s2/s2polygonbuilder.h
 | 
						|
@@ -262,7 +262,7 @@ class S2PolygonBuilder {
 | 
						|
   // current position to a new position, and also returns a spatial index
 | 
						|
   // containing all of the vertices that do not need to be moved.
 | 
						|
   class PointIndex;
 | 
						|
-  typedef hash_map<S2Point, S2Point> MergeMap;
 | 
						|
+  typedef my_hash_map<S2Point, S2Point> MergeMap;
 | 
						|
   void BuildMergeMap(PointIndex* index, MergeMap* merge_map);
 | 
						|
 
 | 
						|
   // Moves a set of vertices from old to new positions.
 | 
						|
@@ -282,7 +282,7 @@ class S2PolygonBuilder {
 | 
						|
   // once.  We could have also used a multiset<pair<S2Point, S2Point> >,
 | 
						|
   // but this representation is a bit more convenient.
 | 
						|
   typedef multiset<S2Point> VertexSet;
 | 
						|
-  typedef hash_map<S2Point, VertexSet> EdgeSet;
 | 
						|
+  typedef my_hash_map<S2Point, VertexSet> EdgeSet;
 | 
						|
   scoped_ptr<EdgeSet> edges_;
 | 
						|
 
 | 
						|
   // Unique collection of the starting (first) vertex of all edges,
 | 
						|
--- a/src/third_party/s2/s2regioncoverer.cc
 | 
						|
+++ b/src/third_party/s2/s2regioncoverer.cc
 | 
						|
@@ -321,7 +321,7 @@ void S2RegionCoverer::GetInteriorCellUnion(S2Region const& region,
 | 
						|
 
 | 
						|
 void S2RegionCoverer::FloodFill(
 | 
						|
     S2Region const& region, S2CellId const& start, vector<S2CellId>* output) {
 | 
						|
-  hash_set<S2CellId> all;
 | 
						|
+  my_hash_set<S2CellId> all;
 | 
						|
   vector<S2CellId> frontier;
 | 
						|
   output->clear();
 | 
						|
   all.insert(start);
 | 
						|
--- a/src/third_party/s2/s2regioncoverer_test.cc
 | 
						|
+++ b/src/third_party/s2/s2regioncoverer_test.cc
 | 
						|
@@ -11,7 +11,7 @@ using std::swap;
 | 
						|
 using std::reverse;
 | 
						|
 
 | 
						|
 #include <hash_map>
 | 
						|
-using __gnu_cxx::hash_map;
 | 
						|
+using __gnu_cxx::hash_map = my_hash_map;
 | 
						|
 
 | 
						|
 #include <queue>
 | 
						|
 using std::priority_queue;
 | 
						|
@@ -65,7 +65,7 @@ static void CheckCovering(S2RegionCoverer const& coverer,
 | 
						|
                           vector<S2CellId> const& covering,
 | 
						|
                           bool interior) {
 | 
						|
   // Keep track of how many cells have the same coverer.min_level() ancestor.
 | 
						|
-  hash_map<S2CellId, int> min_level_cells;
 | 
						|
+  my_hash_map<S2CellId, int> min_level_cells;
 | 
						|
   for (int i = 0; i < covering.size(); ++i) {
 | 
						|
     int level = covering[i].level();
 | 
						|
     EXPECT_GE(level, coverer.min_level());
 | 
						|
@@ -76,7 +76,7 @@ static void CheckCovering(S2RegionCoverer const& coverer,
 | 
						|
   if (covering.size() > coverer.max_cells()) {
 | 
						|
     // If the covering has more than the requested number of cells, then check
 | 
						|
     // that the cell count cannot be reduced by using the parent of some cell.
 | 
						|
-    for (hash_map<S2CellId, int>::const_iterator i = min_level_cells.begin();
 | 
						|
+    for (my_hash_map<S2CellId, int>::const_iterator i = min_level_cells.begin();
 | 
						|
          i != min_level_cells.end(); ++i) {
 | 
						|
       EXPECT_EQ(i->second, 1);
 | 
						|
     }
 | 
						|
--- a/src/third_party/s2/strings/split.cc
 | 
						|
+++ b/src/third_party/s2/strings/split.cc
 | 
						|
@@ -156,7 +156,7 @@ struct simple_insert_iterator {
 | 
						|
 // SplitStringToIterator{Using|AllowEmpty}().
 | 
						|
 template <typename T>
 | 
						|
 struct simple_hash_map_iterator {
 | 
						|
-  typedef hash_map<T, T> hashmap;
 | 
						|
+  typedef my_hash_map<T, T> hashmap;
 | 
						|
   hashmap* t;
 | 
						|
   bool even;
 | 
						|
   typename hashmap::iterator curr;
 | 
						|
@@ -246,8 +246,8 @@ void SplitStringAllowEmpty(const string& full, const char* delim,
 | 
						|
 }
 | 
						|
 
 | 
						|
 void SplitStringToHashsetAllowEmpty(const string& full, const char* delim,
 | 
						|
-                                    hash_set<string>* result) {
 | 
						|
-  simple_insert_iterator<hash_set<string> > it(result);
 | 
						|
+                                    my_hash_set<string>* result) {
 | 
						|
+  simple_insert_iterator<my_hash_set<string> > it(result);
 | 
						|
   SplitStringToIteratorAllowEmpty(full, delim, 0, it);
 | 
						|
 }
 | 
						|
 
 | 
						|
@@ -258,7 +258,7 @@ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
 | 
						|
 }
 | 
						|
 
 | 
						|
 void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
 | 
						|
-                                    hash_map<string, string>* result) {
 | 
						|
+                                    my_hash_map<string, string>* result) {
 | 
						|
   simple_hash_map_iterator<string> it(result);
 | 
						|
   SplitStringToIteratorAllowEmpty(full, delim, 0, it);
 | 
						|
 }
 | 
						|
@@ -352,8 +352,8 @@ void SplitStringUsing(const string& full,
 | 
						|
 }
 | 
						|
 
 | 
						|
 void SplitStringToHashsetUsing(const string& full, const char* delim,
 | 
						|
-                               hash_set<string>* result) {
 | 
						|
-  simple_insert_iterator<hash_set<string> > it(result);
 | 
						|
+                               my_hash_set<string>* result) {
 | 
						|
+  simple_insert_iterator<my_hash_set<string> > it(result);
 | 
						|
   SplitStringToIteratorUsing(full, delim, it);
 | 
						|
 }
 | 
						|
 
 | 
						|
@@ -364,7 +364,7 @@ void SplitStringToSetUsing(const string& full, const char* delim,
 | 
						|
 }
 | 
						|
 
 | 
						|
 void SplitStringToHashmapUsing(const string& full, const char* delim,
 | 
						|
-                               hash_map<string, string>* result) {
 | 
						|
+                               my_hash_map<string, string>* result) {
 | 
						|
   simple_hash_map_iterator<string> it(result);
 | 
						|
   SplitStringToIteratorUsing(full, delim, it);
 | 
						|
 }
 | 
						|
--- a/src/third_party/s2/strings/split.h
 | 
						|
+++ b/src/third_party/s2/strings/split.h
 | 
						|
@@ -41,7 +41,7 @@ using namespace std;
 | 
						|
 void SplitStringAllowEmpty(const string& full, const char* delim,
 | 
						|
                            vector<string>* res);
 | 
						|
 void SplitStringToHashsetAllowEmpty(const string& full, const char* delim,
 | 
						|
-                                    hash_set<string>* res);
 | 
						|
+                                    my_hash_set<string>* res);
 | 
						|
 void SplitStringToSetAllowEmpty(const string& full, const char* delim,
 | 
						|
                                 set<string>* res);
 | 
						|
 // The even-positioned (0-based) components become the keys for the
 | 
						|
@@ -50,7 +50,7 @@ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
 | 
						|
 // if the key was already present in the hash table, or will be the
 | 
						|
 // empty string if the key is a newly inserted key.
 | 
						|
 void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
 | 
						|
-                                    hash_map<string, string>* result);
 | 
						|
+                                    my_hash_map<string, string>* result);
 | 
						|
 
 | 
						|
 // ----------------------------------------------------------------------
 | 
						|
 // SplitStringUsing()
 | 
						|
@@ -66,7 +66,7 @@ void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
 | 
						|
 void SplitStringUsing(const string& full, const char* delim,
 | 
						|
                       vector<string>* res);
 | 
						|
 void SplitStringToHashsetUsing(const string& full, const char* delim,
 | 
						|
-                               hash_set<string>* res);
 | 
						|
+                               my_hash_set<string>* res);
 | 
						|
 void SplitStringToSetUsing(const string& full, const char* delim,
 | 
						|
                            set<string>* res);
 | 
						|
 // The even-positioned (0-based) components become the keys for the
 | 
						|
@@ -75,7 +75,7 @@ void SplitStringToSetUsing(const string& full, const char* delim,
 | 
						|
 // if the key was already present in the hash table, or will be the
 | 
						|
 // empty string if the key is a newly inserted key.
 | 
						|
 void SplitStringToHashmapUsing(const string& full, const char* delim,
 | 
						|
-                               hash_map<string, string>* result);
 | 
						|
+                               my_hash_map<string, string>* result);
 | 
						|
 
 | 
						|
 // ----------------------------------------------------------------------
 | 
						|
 // SplitOneIntToken()
 |