summaryrefslogtreecommitdiff
blob: 331f0c5b922aa909227e239db07dc72a3c32cd34 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
https://bugs.gentoo.org/887037

Workaround https://github.com/boostorg/container/commit/99091420ae553b27345e04279fd19fe24fb684c1
in Boost 1.81.

Upstream s2 (as in real upstream, not MongoDB) has deviated substantially
from the version vendored.
--- a/src/third_party/s2/base/stl_decl_msvc.h
+++ b/src/third_party/s2/base/stl_decl_msvc.h
@@ -118,8 +118,8 @@ namespace msvchash {
   class hash_multimap;
 }  // end namespace  msvchash
 
-using msvchash::hash_set;
-using msvchash::hash_map;
+using msvchash::hash_set = my_hash_set;
+using msvchash::hash_map = my_hash_map;
 using msvchash::hash;
 using msvchash::hash_multimap;
 using msvchash::hash_multiset;
--- a/src/third_party/s2/base/stl_decl_osx.h
+++ b/src/third_party/s2/base/stl_decl_osx.h
@@ -68,8 +68,8 @@ using std::string;
 
 using namespace std;
 using __gnu_cxx::hash;
-using __gnu_cxx::hash_set;
-using __gnu_cxx::hash_map;
+using __gnu_cxx::hash_set = my_hash_set;
+using __gnu_cxx::hash_map = my_hash_map;
 using __gnu_cxx::select1st;
 
 /* On Linux (and gdrive on OSX), this comes from places like
--- a/src/third_party/s2/hash.h
+++ b/src/third_party/s2/hash.h
@@ -2,10 +2,10 @@
 #define THIRD_PARTY_S2_HASH_H_
 
 #include <unordered_map>
-#define hash_map std::unordered_map
+#define my_hash_map std::unordered_map
 
 #include <unordered_set>
-#define hash_set std::unordered_set
+#define my_hash_set std::unordered_set
 
 #define HASH_NAMESPACE_START namespace std {
 #define HASH_NAMESPACE_END }
--- a/src/third_party/s2/s2_test.cc
+++ b/src/third_party/s2/s2_test.cc
@@ -10,7 +10,7 @@ using std::reverse;
 
 #include <hash_set>
 #include <hash_map>
-using __gnu_cxx::hash_set;
+using __gnu_cxx::hash_set = my_hash_map;
 
 #include "s2.h"
 #include "base/logging.h"
@@ -709,8 +709,8 @@ TEST(S2, Frames) {
 #if 0
 TEST(S2, S2PointHashSpreads) {
   int kTestPoints = 1 << 16;
-  hash_set<size_t> set;
-  hash_set<S2Point> points;
+  my_hash_set<size_t> set;
+  my_hash_set<S2Point> points;
   hash<S2Point> hasher;
   S2Point base = S2Point(1, 1, 1);
   for (int i = 0; i < kTestPoints; ++i) {
@@ -733,7 +733,7 @@ TEST(S2, S2PointHashCollapsesZero) {
   double minus_zero = -zero;
   EXPECT_NE(*reinterpret_cast<uint64 const*>(&zero),
             *reinterpret_cast<uint64 const*>(&minus_zero));
-  hash_map<S2Point, int> map;
+  my_hash_map<S2Point, int> map;
   S2Point zero_pt(zero, zero, zero);
   S2Point minus_zero_pt(minus_zero, minus_zero, minus_zero);
 
--- a/src/third_party/s2/s2cellid_test.cc
+++ b/src/third_party/s2/s2cellid_test.cc
@@ -10,7 +10,7 @@ using std::reverse;
 
 #include <cstdio>
 #include <hash_map>
-using __gnu_cxx::hash_map;
+using __gnu_cxx::hash_map = my_hash_map;
 
 #include <sstream>
 #include <vector>
@@ -170,7 +170,7 @@ TEST(S2CellId, Tokens) {
 static const int kMaxExpandLevel = 3;
 
 static void ExpandCell(S2CellId const& parent, vector<S2CellId>* cells,
-                       hash_map<S2CellId, S2CellId>* parent_map) {
+                       my_hash_map<S2CellId, S2CellId>* parent_map) {
   cells->push_back(parent);
   if (parent.level() == kMaxExpandLevel) return;
   int i, j, orientation;
@@ -194,7 +194,7 @@ static void ExpandCell(S2CellId const& parent, vector<S2CellId>* cells,
 
 TEST(S2CellId, Containment) {
   // Test contains() and intersects().
-  hash_map<S2CellId, S2CellId> parent_map;
+  my_hash_map<S2CellId, S2CellId> parent_map;
   vector<S2CellId> cells;
   for (int face = 0; face < 6; ++face) {
     ExpandCell(S2CellId::FromFacePosLevel(face, 0, 0), &cells, &parent_map);
--- a/src/third_party/s2/s2loop.cc
+++ b/src/third_party/s2/s2loop.cc
@@ -120,7 +120,7 @@ bool S2Loop::IsValid(string* err) const {
     }
   }
   // Loops are not allowed to have any duplicate vertices.
-  hash_map<S2Point, int> vmap;
+  my_hash_map<S2Point, int> vmap;
   for (int i = 0; i < num_vertices(); ++i) {
     if (!vmap.insert(make_pair(vertex(i), i)).second) {
       VLOG(2) << "Duplicate vertices: " << vmap[vertex(i)] << " and " << i;
--- a/src/third_party/s2/s2polygon.cc
+++ b/src/third_party/s2/s2polygon.cc
@@ -117,7 +117,7 @@ HASH_NAMESPACE_END
 bool S2Polygon::IsValid(const vector<S2Loop*>& loops, string* err) {
   // If a loop contains an edge AB, then no other loop may contain AB or BA.
   if (loops.size() > 1) {
-    hash_map<S2PointPair, pair<int, int> > edges;
+    my_hash_map<S2PointPair, pair<int, int> > edges;
     for (size_t i = 0; i < loops.size(); ++i) {
       S2Loop* lp = loops[i];
       for (int j = 0; j < lp->num_vertices(); ++j) {
--- a/src/third_party/s2/s2polygonbuilder.cc
+++ b/src/third_party/s2/s2polygonbuilder.cc
@@ -175,7 +175,7 @@ S2Loop* S2PolygonBuilder::AssembleLoop(S2Point const& v0, S2Point const& v1,
   // This ensures that only CCW loops are constructed when possible.
 
   vector<S2Point> path;          // The path so far.
-  hash_map<S2Point, int> index;  // Maps a vertex to its index in "path".
+  my_hash_map<S2Point, int> index;  // Maps a vertex to its index in "path".
   path.push_back(v0);
   path.push_back(v1);
   index[v1] = 1;
@@ -361,7 +361,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
 
   // First, we build the set of all the distinct vertices in the input.
   // We need to include the source and destination of every edge.
-  hash_set<S2Point> vertices;
+  my_hash_set<S2Point> vertices;
   for (EdgeSet::const_iterator i = edges_->begin(); i != edges_->end(); ++i) {
     vertices.insert(i->first);
     VertexSet const& vset = i->second;
@@ -370,7 +370,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
   }
 
   // Build a spatial index containing all the distinct vertices.
-  for (hash_set<S2Point>::const_iterator i = vertices.begin();
+  for (my_hash_set<S2Point>::const_iterator i = vertices.begin();
        i != vertices.end(); ++i) {
     index->Insert(*i);
   }
@@ -378,7 +378,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
   // Next, we loop through all the vertices and attempt to grow a maximial
   // mergeable group starting from each vertex.
   vector<S2Point> frontier, mergeable;
-  for (hash_set<S2Point>::const_iterator vstart = vertices.begin();
+  for (my_hash_set<S2Point>::const_iterator vstart = vertices.begin();
        vstart != vertices.end(); ++vstart) {
     // Skip any vertices that have already been merged with another vertex.
     if (merge_map->find(*vstart) != merge_map->end()) continue;
--- a/src/third_party/s2/s2polygonbuilder.h
+++ b/src/third_party/s2/s2polygonbuilder.h
@@ -262,7 +262,7 @@ class S2PolygonBuilder {
   // current position to a new position, and also returns a spatial index
   // containing all of the vertices that do not need to be moved.
   class PointIndex;
-  typedef hash_map<S2Point, S2Point> MergeMap;
+  typedef my_hash_map<S2Point, S2Point> MergeMap;
   void BuildMergeMap(PointIndex* index, MergeMap* merge_map);
 
   // Moves a set of vertices from old to new positions.
@@ -282,7 +282,7 @@ class S2PolygonBuilder {
   // once.  We could have also used a multiset<pair<S2Point, S2Point> >,
   // but this representation is a bit more convenient.
   typedef multiset<S2Point> VertexSet;
-  typedef hash_map<S2Point, VertexSet> EdgeSet;
+  typedef my_hash_map<S2Point, VertexSet> EdgeSet;
   scoped_ptr<EdgeSet> edges_;
 
   // Unique collection of the starting (first) vertex of all edges,
--- a/src/third_party/s2/s2regioncoverer.cc
+++ b/src/third_party/s2/s2regioncoverer.cc
@@ -321,7 +321,7 @@ void S2RegionCoverer::GetInteriorCellUnion(S2Region const& region,
 
 void S2RegionCoverer::FloodFill(
     S2Region const& region, S2CellId const& start, vector<S2CellId>* output) {
-  hash_set<S2CellId> all;
+  my_hash_set<S2CellId> all;
   vector<S2CellId> frontier;
   output->clear();
   all.insert(start);
--- a/src/third_party/s2/s2regioncoverer_test.cc
+++ b/src/third_party/s2/s2regioncoverer_test.cc
@@ -11,7 +11,7 @@ using std::swap;
 using std::reverse;
 
 #include <hash_map>
-using __gnu_cxx::hash_map;
+using __gnu_cxx::hash_map = my_hash_map;
 
 #include <queue>
 using std::priority_queue;
@@ -65,7 +65,7 @@ static void CheckCovering(S2RegionCoverer const& coverer,
                           vector<S2CellId> const& covering,
                           bool interior) {
   // Keep track of how many cells have the same coverer.min_level() ancestor.
-  hash_map<S2CellId, int> min_level_cells;
+  my_hash_map<S2CellId, int> min_level_cells;
   for (int i = 0; i < covering.size(); ++i) {
     int level = covering[i].level();
     EXPECT_GE(level, coverer.min_level());
@@ -76,7 +76,7 @@ static void CheckCovering(S2RegionCoverer const& coverer,
   if (covering.size() > coverer.max_cells()) {
     // If the covering has more than the requested number of cells, then check
     // that the cell count cannot be reduced by using the parent of some cell.
-    for (hash_map<S2CellId, int>::const_iterator i = min_level_cells.begin();
+    for (my_hash_map<S2CellId, int>::const_iterator i = min_level_cells.begin();
          i != min_level_cells.end(); ++i) {
       EXPECT_EQ(i->second, 1);
     }
--- a/src/third_party/s2/strings/split.cc
+++ b/src/third_party/s2/strings/split.cc
@@ -156,7 +156,7 @@ struct simple_insert_iterator {
 // SplitStringToIterator{Using|AllowEmpty}().
 template <typename T>
 struct simple_hash_map_iterator {
-  typedef hash_map<T, T> hashmap;
+  typedef my_hash_map<T, T> hashmap;
   hashmap* t;
   bool even;
   typename hashmap::iterator curr;
@@ -246,8 +246,8 @@ void SplitStringAllowEmpty(const string& full, const char* delim,
 }
 
 void SplitStringToHashsetAllowEmpty(const string& full, const char* delim,
-                                    hash_set<string>* result) {
-  simple_insert_iterator<hash_set<string> > it(result);
+                                    my_hash_set<string>* result) {
+  simple_insert_iterator<my_hash_set<string> > it(result);
   SplitStringToIteratorAllowEmpty(full, delim, 0, it);
 }
 
@@ -258,7 +258,7 @@ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
 }
 
 void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
-                                    hash_map<string, string>* result) {
+                                    my_hash_map<string, string>* result) {
   simple_hash_map_iterator<string> it(result);
   SplitStringToIteratorAllowEmpty(full, delim, 0, it);
 }
@@ -352,8 +352,8 @@ void SplitStringUsing(const string& full,
 }
 
 void SplitStringToHashsetUsing(const string& full, const char* delim,
-                               hash_set<string>* result) {
-  simple_insert_iterator<hash_set<string> > it(result);
+                               my_hash_set<string>* result) {
+  simple_insert_iterator<my_hash_set<string> > it(result);
   SplitStringToIteratorUsing(full, delim, it);
 }
 
@@ -364,7 +364,7 @@ void SplitStringToSetUsing(const string& full, const char* delim,
 }
 
 void SplitStringToHashmapUsing(const string& full, const char* delim,
-                               hash_map<string, string>* result) {
+                               my_hash_map<string, string>* result) {
   simple_hash_map_iterator<string> it(result);
   SplitStringToIteratorUsing(full, delim, it);
 }
--- a/src/third_party/s2/strings/split.h
+++ b/src/third_party/s2/strings/split.h
@@ -41,7 +41,7 @@ using namespace std;
 void SplitStringAllowEmpty(const string& full, const char* delim,
                            vector<string>* res);
 void SplitStringToHashsetAllowEmpty(const string& full, const char* delim,
-                                    hash_set<string>* res);
+                                    my_hash_set<string>* res);
 void SplitStringToSetAllowEmpty(const string& full, const char* delim,
                                 set<string>* res);
 // The even-positioned (0-based) components become the keys for the
@@ -50,7 +50,7 @@ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
 // if the key was already present in the hash table, or will be the
 // empty string if the key is a newly inserted key.
 void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
-                                    hash_map<string, string>* result);
+                                    my_hash_map<string, string>* result);
 
 // ----------------------------------------------------------------------
 // SplitStringUsing()
@@ -66,7 +66,7 @@ void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
 void SplitStringUsing(const string& full, const char* delim,
                       vector<string>* res);
 void SplitStringToHashsetUsing(const string& full, const char* delim,
-                               hash_set<string>* res);
+                               my_hash_set<string>* res);
 void SplitStringToSetUsing(const string& full, const char* delim,
                            set<string>* res);
 // The even-positioned (0-based) components become the keys for the
@@ -75,7 +75,7 @@ void SplitStringToSetUsing(const string& full, const char* delim,
 // if the key was already present in the hash table, or will be the
 // empty string if the key is a newly inserted key.
 void SplitStringToHashmapUsing(const string& full, const char* delim,
-                               hash_map<string, string>* result);
+                               my_hash_map<string, string>* result);
 
 // ----------------------------------------------------------------------
 // SplitOneIntToken()