001// License: GPL. For details, see LICENSE file.
002package org.openstreetmap.josm.data.validation.tests;
003
004import static org.openstreetmap.josm.data.validation.tests.CrossingWays.HIGHWAY;
005import static org.openstreetmap.josm.data.validation.tests.CrossingWays.RAILWAY;
006import static org.openstreetmap.josm.data.validation.tests.CrossingWays.WATERWAY;
007import static org.openstreetmap.josm.tools.I18n.tr;
008
009import java.util.ArrayList;
010import java.util.HashMap;
011import java.util.Iterator;
012import java.util.LinkedHashSet;
013import java.util.List;
014import java.util.Map;
015import java.util.Map.Entry;
016import java.util.Objects;
017import java.util.Set;
018import java.util.stream.Collectors;
019import java.util.stream.Stream;
020
021import org.openstreetmap.josm.actions.MergeNodesAction;
022import org.openstreetmap.josm.command.Command;
023import org.openstreetmap.josm.data.coor.LatLon;
024import org.openstreetmap.josm.data.osm.Hash;
025import org.openstreetmap.josm.data.osm.Node;
026import org.openstreetmap.josm.data.osm.OsmPrimitive;
027import org.openstreetmap.josm.data.osm.Storage;
028import org.openstreetmap.josm.data.osm.Way;
029import org.openstreetmap.josm.data.validation.Severity;
030import org.openstreetmap.josm.data.validation.Test;
031import org.openstreetmap.josm.data.validation.TestError;
032import org.openstreetmap.josm.gui.progress.ProgressMonitor;
033import org.openstreetmap.josm.spi.preferences.Config;
034import org.openstreetmap.josm.tools.MultiMap;
035
036/**
037 * Tests if there are duplicate nodes
038 *
039 * @author frsantos
040 */
041public class DuplicateNode extends Test {
042
043    protected static class NodeHash implements Hash<Object, Object> {
044
045        private final double precision = Config.getPref().getDouble("validator.duplicatenodes.precision", 0.);
046
047        /**
048         * Returns the rounded coordinated according to {@link #precision}
049         * @see LatLon#roundToOsmPrecision
050         */
051        private LatLon roundCoord(LatLon coor) {
052            return new LatLon(
053                    Math.round(coor.lat() / precision) * precision,
054                    Math.round(coor.lon() / precision) * precision
055                    );
056        }
057
058        @SuppressWarnings("unchecked")
059        protected LatLon getLatLon(Object o) {
060            if (o instanceof Node) {
061                LatLon coor = ((Node) o).getCoor();
062                if (coor == null)
063                    return null;
064                if (precision == 0)
065                    return coor.getRoundedToOsmPrecision();
066                return roundCoord(coor);
067            } else if (o instanceof List<?>) {
068                LatLon coor = ((List<Node>) o).get(0).getCoor();
069                if (coor == null)
070                    return null;
071                if (precision == 0)
072                    return coor.getRoundedToOsmPrecision();
073                return roundCoord(coor);
074            } else
075                throw new AssertionError();
076        }
077
078        @Override
079        public boolean equals(Object k, Object t) {
080            LatLon coorK = getLatLon(k);
081            LatLon coorT = getLatLon(t);
082            return coorK == coorT || (coorK != null && coorT != null && coorK.equals(coorT));
083        }
084
085        @Override
086        public int getHashCode(Object k) {
087            LatLon coorK = getLatLon(k);
088            return coorK == null ? 0 : coorK.hashCode();
089        }
090    }
091
092    protected static final int DUPLICATE_NODE = 1;
093    protected static final int DUPLICATE_NODE_MIXED = 2;
094    protected static final int DUPLICATE_NODE_OTHER = 3;
095    protected static final int DUPLICATE_NODE_BUILDING = 10;
096    protected static final int DUPLICATE_NODE_BOUNDARY = 11;
097    protected static final int DUPLICATE_NODE_HIGHWAY = 12;
098    protected static final int DUPLICATE_NODE_LANDUSE = 13;
099    protected static final int DUPLICATE_NODE_NATURAL = 14;
100    protected static final int DUPLICATE_NODE_POWER = 15;
101    protected static final int DUPLICATE_NODE_RAILWAY = 16;
102    protected static final int DUPLICATE_NODE_WATERWAY = 17;
103
104    private static final String[] TYPES = {
105            "none", HIGHWAY, RAILWAY, WATERWAY, "boundary", "power", "natural", "landuse", "building"};
106
107    /** The map of potential duplicates.
108     *
109     * If there is exactly one node for a given pos, the map includes a pair &lt;pos, Node&gt;.
110     * If there are multiple nodes for a given pos, the map includes a pair
111     * &lt;pos, List&lt;Node&gt;&gt;
112     */
113    private Storage<Object> potentialDuplicates;
114
115    /**
116     * Constructor
117     */
118    public DuplicateNode() {
119        super(tr("Duplicated nodes"),
120                tr("This test checks that there are no nodes at the very same location."));
121    }
122
123    @Override
124    public void startTest(ProgressMonitor monitor) {
125        super.startTest(monitor);
126        potentialDuplicates = new Storage<>(new NodeHash());
127    }
128
129    @SuppressWarnings("unchecked")
130    @Override
131    public void endTest() {
132        for (Object v: potentialDuplicates) {
133            if (v instanceof Node) {
134                // just one node at this position. Nothing to report as error
135                continue;
136            }
137
138            // multiple nodes at the same position -> check if all nodes have a distinct elevation
139            List<Node> nodes = (List<Node>) v;
140            Set<String> eles = nodes.stream()
141                    .map(n -> n.get("ele"))
142                    .filter(Objects::nonNull)
143                    .collect(Collectors.toSet());
144            if (eles.size() == nodes.size()) {
145                // All nodes at this position have a distinct elevation.
146                // This is normal in some particular cases (for example, geodesic points in France)
147                // Do not report this as an error
148                continue;
149            }
150
151            // report errors
152            errors.addAll(buildTestErrors(this, nodes));
153        }
154        super.endTest();
155        potentialDuplicates = null;
156    }
157
158    /**
159     * Returns the list of "duplicate nodes" errors for the given selection of node and parent test
160     * @param parentTest The parent test of returned errors
161     * @param nodes The nodes selection to look into
162     * @return the list of "duplicate nodes" errors
163     */
164    public List<TestError> buildTestErrors(Test parentTest, List<Node> nodes) {
165        List<TestError> errors = new ArrayList<>();
166
167        MultiMap<Map<String, String>, Node> mm = new MultiMap<>();
168        for (Node n: nodes) {
169            mm.put(n.getKeys(), n);
170        }
171
172        Map<String, Boolean> typeMap = new HashMap<>();
173
174        // check whether we have multiple nodes at the same position with the same tag set
175        for (Iterator<Map<String, String>> it = mm.keySet().iterator(); it.hasNext();) {
176            Set<Node> primitives = mm.get(it.next());
177            if (primitives.size() > 1) {
178
179                for (String type: TYPES) {
180                    typeMap.put(type, Boolean.FALSE);
181                }
182
183                for (Node n : primitives) {
184                    for (Way w: n.getParentWays()) {
185                        boolean typed = false;
186                        Map<String, String> keys = w.getKeys();
187                        for (Entry<String, Boolean> e : typeMap.entrySet()) {
188                            if (keys.containsKey(e.getKey())) {
189                                e.setValue(Boolean.TRUE);
190                                typed = true;
191                            }
192                        }
193                        if (!typed) {
194                            typeMap.put("none", Boolean.TRUE);
195                        }
196                    }
197                }
198
199                long nbType = typeMap.entrySet().stream().filter(Entry::getValue).count();
200                final TestError.Builder builder;
201                if (nbType > 1) {
202                    builder = TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_MIXED)
203                            .message(tr("Mixed type duplicated nodes"));
204                } else if (Boolean.TRUE.equals(typeMap.get(HIGHWAY))) {
205                    builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_HIGHWAY)
206                            .message(tr("Highway duplicated nodes"));
207                } else if (Boolean.TRUE.equals(typeMap.get(RAILWAY))) {
208                    builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_RAILWAY)
209                            .message(tr("Railway duplicated nodes"));
210                } else if (Boolean.TRUE.equals(typeMap.get(WATERWAY))) {
211                    builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_WATERWAY)
212                            .message(tr("Waterway duplicated nodes"));
213                } else if (Boolean.TRUE.equals(typeMap.get("boundary"))) {
214                    builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BOUNDARY)
215                            .message(tr("Boundary duplicated nodes"));
216                } else if (Boolean.TRUE.equals(typeMap.get("power"))) {
217                    builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_POWER)
218                            .message(tr("Power duplicated nodes"));
219                } else if (Boolean.TRUE.equals(typeMap.get("natural"))) {
220                    builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_NATURAL)
221                            .message(tr("Natural duplicated nodes"));
222                } else if (Boolean.TRUE.equals(typeMap.get("building"))) {
223                    builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BUILDING)
224                            .message(tr("Building duplicated nodes"));
225                } else if (Boolean.TRUE.equals(typeMap.get("landuse"))) {
226                    builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_LANDUSE)
227                            .message(tr("Landuse duplicated nodes"));
228                } else {
229                    builder = TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_OTHER)
230                            .message(tr("Other duplicated nodes"));
231                }
232                errors.add(builder.primitives(primitives).build());
233                it.remove();
234            }
235        }
236
237        // check whether we have multiple nodes at the same position with differing tag sets
238        if (!mm.isEmpty()) {
239            List<OsmPrimitive> duplicates = new ArrayList<>();
240            for (Set<Node> l: mm.values()) {
241                duplicates.addAll(l);
242            }
243            if (duplicates.size() > 1) {
244                errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE)
245                        .message(tr("Nodes at same position"))
246                        .primitives(duplicates)
247                        .build());
248            }
249        }
250        return errors;
251    }
252
253    @SuppressWarnings("unchecked")
254    @Override
255    public void visit(Node n) {
256        if (n.isUsable()) {
257            Object old = potentialDuplicates.get(n);
258            if (old == null) {
259                // in most cases there is just one node at a given position. We
260                // avoid to create an extra object and add remember the node
261                // itself at this position
262                potentialDuplicates.put(n);
263            } else if (old instanceof Node) {
264                // we have an additional node at the same position. Create an extra
265                // object to keep track of the nodes at this position.
266                //
267                potentialDuplicates.put(Stream.of((Node) old, n).collect(Collectors.toList()));
268            } else {
269                // we have more  than two nodes at the same position.
270                //
271                ((List<Node>) old).add(n);
272            }
273        }
274    }
275
276    /**
277     * Merge the nodes into one.
278     * Copied from UtilsPlugin.MergePointsAction
279     */
280    @Override
281    public Command fixError(TestError testError) {
282        if (!isFixable(testError))
283            return null;
284        final Set<Node> nodes = testError.primitives(Node.class)
285                // Filter nodes that have already been deleted (see #5764 and #5773)
286                .filter(n -> !n.isDeleted())
287                .collect(Collectors.toCollection(LinkedHashSet::new));
288
289        // Use first existing node or first node if all nodes are new
290        Node target = nodes.stream()
291                .filter(n -> !n.isNew())
292                .findFirst()
293                .orElseGet(() -> nodes.iterator().next());
294
295        return MergeNodesAction.mergeNodes(nodes, target);
296    }
297
298    @Override
299    public boolean isFixable(TestError testError) {
300        if (!(testError.getTester() instanceof DuplicateNode)) return false;
301        // never merge nodes with different tags.
302        if (testError.getCode() == DUPLICATE_NODE) return false;
303        // cannot merge nodes outside download area
304        return testError.getPrimitives().stream().filter(p -> !p.isDeleted()).count() > 1
305                && Command.checkOutlyingOrIncompleteOperation(testError.getPrimitives(), null) == Command.IS_OK;
306        // everything else is ok to merge
307    }
308}