diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..bee8a64 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +__pycache__ diff --git a/data/raw_to_dataset/mini_one_minute.py b/data/raw_to_dataset/mini_one_minute.py new file mode 100644 index 0000000..6f6e4f6 --- /dev/null +++ b/data/raw_to_dataset/mini_one_minute.py @@ -0,0 +1,43 @@ +# Extracts train/valid/test for caldot1 and caldot2 datasets. +# This is not needed unless preparing video from scratch. + +import os +import random +import subprocess +import sys + + +data_root = data_root = sys.argv[1] +video_path = '/data/processed/full-dataset/mini/videos/' + +out_paths = [ + os.path.join(data_root, 'dataset', 'mini', 'train/video/'), + os.path.join(data_root, 'dataset', 'mini', 'valid/video/'), + os.path.join(data_root, 'dataset', 'mini', 'test/video/'), + os.path.join(data_root, 'dataset', 'mini', 'tracker/video/'), +] + +def get_duration(fname): + output = subprocess.check_output(['ffprobe', '-select_streams', 'v:0', '-show_entries', 'stream=duration', '-of', 'csv=s=,:p=0', fname]) + print(f"duration is {float(output.strip())}") + return float(output.strip()) + +# list of tuples (fname, seconds) +segments = [] + +for fname in os.listdir(video_path): + if not fname.endswith('.mp4'): + continue + duration = int(get_duration(video_path+fname)) + for skip in range(0, duration, 2): + segments.append((fname, skip)) + +random.shuffle(segments) +print('got {} segments'.format(len(segments))) + +for out_path in out_paths: + cur_segments = segments[0:60] + segments = segments[60:] + for i, (fname, skip) in enumerate(cur_segments): + ffmpeg_args = ['ffmpeg', '-ss', str(skip), '-i', video_path+fname, '-t', '60', out_path+str(i)+'.mp4'] + subprocess.call(ffmpeg_args) diff --git a/data/raw_to_dataset/one_minute_samples.py b/data/raw_to_dataset/one_minute_samples.py index 97f3ad0..21200c2 100644 --- a/data/raw_to_dataset/one_minute_samples.py +++ b/data/raw_to_dataset/one_minute_samples.py @@ -37,7 +37,7 @@ files = random.sample(files, 60) counter = 0 for label, id in files: - fnames = [os.path.join(video_path, label, '{}.mp4'.format(id+i) for i in range(skip)] + fnames = [os.path.join(video_path, label, '{}.mp4'.format(id+i)) for i in range(skip)] if not all([os.path.exists(fname) for fname in fnames]): print('skip {}/{} since some not exist'.format(label, id)) continue diff --git a/model/apply_dyn.py b/model/apply_dyn.py index 09f8b6a..4d0c9f8 100644 --- a/model/apply_dyn.py +++ b/model/apply_dyn.py @@ -10,7 +10,9 @@ import subprocess import sys import tensorflow as tf -#tf.disable_eager_execution() +# tf.disable_eager_execution() +# import tensorflow.compat.v1 as tf +# tf.disable_v2_behavior() import time def eprint(s): diff --git a/model/model_dyn.py b/model/model_dyn.py index 0bbe5b3..4edd23e 100644 --- a/model/model_dyn.py +++ b/model/model_dyn.py @@ -1,6 +1,8 @@ import numpy -import tensorflow as tf -#tf.disable_eager_execution() +# import tensorflow as tf +# tf.disable_eager_execution() +import tensorflow.compat.v1 as tf +# tf.disable_v2_behavior() import os import os.path import random diff --git a/pipeline/eval.py b/pipeline/eval.py index 7508540..3e729ae 100644 --- a/pipeline/eval.py +++ b/pipeline/eval.py @@ -13,7 +13,7 @@ def track_one(args): def track(label, in_path, out_path): fnames = os.listdir(in_path) - if label in ['amsterdam', 'jackson', 'shibuya', 'caldot1', 'caldot2', 'warsaw', 'uav']: + if label in ['amsterdam', 'jackson', 'shibuya', 'caldot1', 'caldot2', 'warsaw', 'uav', 'mini']: cls = 'car' elif label == 'taipei': cls = 'bus' diff --git a/pipeline2/10_detect.go b/pipeline2/10_detect.go index e5067c9..188d334 100644 --- a/pipeline2/10_detect.go +++ b/pipeline2/10_detect.go @@ -1,7 +1,7 @@ package main import ( - "./lib" + "otif-pipeline/lib" "encoding/json" "fmt" diff --git a/pipeline2/30_speed_accuracy.go b/pipeline2/30_speed_accuracy.go index 1fd75fa..ba579ed 100644 --- a/pipeline2/30_speed_accuracy.go +++ b/pipeline2/30_speed_accuracy.go @@ -1,7 +1,7 @@ package main import ( - "./lib" + "otif-pipeline/lib" "fmt" "os" "time" diff --git a/pipeline2/40_evaltest.go b/pipeline2/40_evaltest.go index 6b0f80b..034292a 100644 --- a/pipeline2/40_evaltest.go +++ b/pipeline2/40_evaltest.go @@ -1,7 +1,7 @@ package main import ( - "./lib" + "otif-pipeline/lib" "fmt" "io/ioutil" "os" diff --git a/pipeline2/README b/pipeline2/README new file mode 100644 index 0000000..752442b --- /dev/null +++ b/pipeline2/README @@ -0,0 +1,122 @@ +Steps: + +(0) Installation +(1) Prepare satellite imagery into individual tile images +(2) Convert road network map into .graph files +(3) Create tile list +(4) Train a model +(5) Infer roads in a new region + + +(0) Installation +---------------- + +These Python packages are required: +* tensorflow-gpu +* fiona +* rtree + + +(1) Prepare satellite imagery +----------------------------- + +Satellite imagery is read from 4096x4096 pixel .png images. +These images form a 2D grid. +The filenames should be like: + + imagery/REGION_0_0_sat.png + imagery/REGION_0_1_sat.png + imagery/REGION_1_0_sat.png + imagery/REGION_1_1_sat.png + +Each filename is of the format REGION_X_Y_sat.png. + +* REGION is a label for one portion of satellite imagery. + You can have multiple regions, like this: + imagery/region1_0_0_sat.png + imagery/region1_0_1_sat.png + imagery/region2_0_0_sat.png + imagery/region2_1_0_sat.png + +* X and Y are the coordinates of the image in the grid. + For example, REGION_0_1 should be to the right of REGION_0_0. + Similarly, REGION_1_0 should be below REGION_0_0. + If we concatenate the images like this, the result should be a large coherent image: + + REGION_0_0 REGION_0_1 + REGION_1_0 REGION_1_1 + + +(2) Convert road network map into .graph files +---------------------------------------------- + +The road network map must be represented as an undirected graph in a custom text format. + +The format has two sections, vertices then edges, which together look like this: + + 100 200 + 100 400 + 100 600 + 200 400 + + 0 1 + 1 0 + 1 2 + 2 1 + 1 3 + 3 1 + +The first four lines are vertices, while the last six are edges. +Each vertex line has two parts: the X coordinate and the Y coordinate. +These coordinates must be in the imagery coordinate system. + +For example, because our images are 4096x4096, the coordinate (8192, 8192) is in +REGION_2_2_sat.png at (0, 0). Similarly, (6000, 1000) is in REGION_1_0_sat.png +at (1904, 1000). + +Each edge line has two parts: a source vertex and a destination vertex. +The vertices are referenced by their 0-indexed line number. So "0 1" connects +vertex (100, 200) to vertex (100, 400). + +The graph above looks like this: + + (100, 200) -- (100, 400) -- (100, 600) + | + (200, 400) + +util_shp_to_graph.py converts a shapefile to the graph file format. +However, you still need to convert the longitude-latitude coordinates to pixel coordinates. + +There must be one graph file per region, and they must be stored in a folder like this: + + graphs/region1.graph + graphs/region2.graph + +The filenames (region1 and region2) should correspond to the regions in the imagery filenames. + + +(3) Create tile list +-------------------- + +The model training code needs to know which parts of the road network map to train on. +If you want to train the model everywhere where there is map data, then you can run: + +$ python util_create_tile_list.py region1,region2 graphs/region1.graph,graphs/region2.graph tile_list.json + + +(4) Train a model +----------------- + +$ mkdir model/ model/model_latest/ model/model_best/ +$ python run_m5_point.py model/ imagery/ graphs/ tile_list.json + + +(5) Infer roads in a new region +------------------------------- + +To infer roads in a new region, you need a basemap (also in the .graph file with +coordinates matching the imagery pixels) and the region label for the imagery. + +Then just run: + +$ python eval_m5_point.py model/model_best/model imagery/ graphs/ tile_list.json REGION basemap.graph diff --git a/pipeline2/discoverlib/__init__.py b/pipeline2/discoverlib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pipeline2/discoverlib/coords.py b/pipeline2/discoverlib/coords.py new file mode 100644 index 0000000..f5baa10 --- /dev/null +++ b/pipeline2/discoverlib/coords.py @@ -0,0 +1,57 @@ +import geom + +import math + +ORIGIN_SHIFT = 2 * math.pi * 6378137 / 2.0 + +def lonLatToMeters(p): + mx = p.x * ORIGIN_SHIFT / 180.0 + my = math.log(math.tan((90 + p.y) * math.pi / 360.0)) / (math.pi / 180.0) + my = my * ORIGIN_SHIFT / 180.0 + return geom.FPoint(mx, my) + +def metersToLonLat(p): + lon = (p.x / ORIGIN_SHIFT) * 180.0 + lat = (p.y / ORIGIN_SHIFT) * 180.0 + lat = 180 / math.pi * (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0) + return geom.FPoint(lon, lat) + +def getMetersPerPixel(zoom): + return 2 * math.pi * 6378137 / (2**zoom) / 256 + +def lonLatToPixel(p, origin, zoom): + p = lonLatToMeters(p).sub(lonLatToMeters(origin)) + p = p.scale(1 / getMetersPerPixel(zoom)) + p = geom.FPoint(p.x, -p.y) + p = p.add(geom.FPoint(256, 256)) + return p + +def pixelToLonLat(p, origin, zoom): + p = p.sub(geom.FPoint(256, 256)) + p = geom.FPoint(p.x, -p.y) + p = p.scale(getMetersPerPixel(zoom)) + p = metersToLonLat(p.add(lonLatToMeters(origin))) + return p + +def lonLatToMapboxTile(p, zoom): + n = 2**zoom + xtile = int((p.x + 180.0) / 360 * n) + ytile = int((1 - math.log(math.tan(p.y * math.pi / 180) + (1 / math.cos(p.y * math.pi / 180))) / math.pi) / 2 * n) + return (xtile, ytile) + +def lonLatToMapbox(p, zoom, origin_tile): + n = 2**zoom + x = (p.x + 180.0) / 360 * n + y = (1 - math.log(math.tan(p.y * math.pi / 180) + (1 / math.cos(p.y * math.pi / 180))) / math.pi) / 2 * n + xoff = x - origin_tile[0] + yoff = y - origin_tile[1] + return geom.FPoint(xoff, yoff).scale(256) + +def mapboxToLonLat(p, zoom, origin_tile): + n = 2**zoom + x = p.x / 256.0 + origin_tile[0] + y = p.y / 256.0 + origin_tile[1] + x = x * 360.0 / n - 180 + y = math.atan(math.sinh(math.pi * (1 - 2.0 * y / n))) + y = y * 180 / math.pi + return geom.FPoint(x, y) diff --git a/pipeline2/discoverlib/geom.py b/pipeline2/discoverlib/geom.py new file mode 100644 index 0000000..af580aa --- /dev/null +++ b/pipeline2/discoverlib/geom.py @@ -0,0 +1,328 @@ +import math +import numpy + +class Point(object): + def __init__(self, x, y): + self.x = int(x) + self.y = int(y) + + def distance(self, other): + dx = self.x - other.x + dy = self.y - other.y + return math.sqrt(dx * dx + dy * dy) + + def sub(self, other): + return Point(self.x - other.x, self.y - other.y) + + def add(self, other): + return Point(self.x + other.x, self.y + other.y) + + def scale(self, f): + return Point(self.x * f, self.y * f) + + def magnitude(self): + return math.sqrt(self.x * self.x + self.y * self.y) + + def angle_to(self, other): + if self.magnitude() == 0 or other.magnitude() == 0: + return 0 + s = (self.x * other.x + self.y * other.y) / self.magnitude() / other.magnitude() + if abs(s) > 1: s = s / abs(s) + angle = math.acos(s) + if angle > math.pi: + return 2 * math.pi - angle + else: + return angle + + def signed_angle(self, other): + return math.atan2(other.y, other.x) - math.atan2(self.y, self.x) + + def bounds(self): + return Rectangle(self, self) + + def dot(self, point): + return self.x * point.x + self.y * point.y + + def rotate(self, center, angle): + dx = self.x - center.x + dy = self.y - center.y + rx = math.cos(angle)*dx - math.sin(angle)*dy + ry = math.sin(angle)*dx + math.cos(angle)*dy + return Point(center.x + int(rx), center.y + int(ry)) + + def __repr__(self): + return 'Point({}, {})'.format(self.x, self.y) + + def __eq__(self, other): + return self.x == other.x and self.y == other.y + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self.x, self.y)) + +class FPoint(object): + def __init__(self, x, y): + self.x = float(x) + self.y = float(y) + + def distance(self, other): + dx = self.x - other.x + dy = self.y - other.y + return math.sqrt(dx * dx + dy * dy) + + def sub(self, other): + return FPoint(self.x - other.x, self.y - other.y) + + def add(self, other): + return FPoint(self.x + other.x, self.y + other.y) + + def scale(self, f): + return FPoint(self.x * f, self.y * f) + + def scale_to_length(self, l): + return self.scale(l / self.magnitude()) + + def magnitude(self): + return math.sqrt(self.x * self.x + self.y * self.y) + + def angle_to(self, other): + if self.magnitude() == 0 or other.magnitude() == 0: + return 0 + s = (self.x * other.x + self.y * other.y) / self.magnitude() / other.magnitude() + if abs(s) > 1: s = s / abs(s) + angle = math.acos(s) + if angle > math.pi: + return 2 * math.pi - angle + else: + return angle + + def signed_angle(self, other): + return math.atan2(other.y, other.x) - math.atan2(self.y, self.x) + + def bounds(self): + return Rectangle(self, self) + + def dot(self, point): + return self.x * point.x + self.y * point.y + + def __repr__(self): + return 'FPoint({}, {})'.format(self.x, self.y) + + def to_point(self): + return Point(self.x, self.y) + + def __eq__(self, other): + return self.x == other.x and self.y == other.y + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self.x, self.y)) + +class Segment(object): + def __init__(self, start, end): + self.start = start + self.end = end + + def length(self): + return self.start.distance(self.end) + + def project_factor(self, point): + l = self.length() + if l == 0: + return 0 + t = point.sub(self.start).dot(self.end.sub(self.start)) / l + t = max(0, min(l, t)) + return t + + def project(self, point): + t = self.project_factor(point) + return self.point_at_factor(t) + + def point_at_factor(self, t): + l = self.length() + if l == 0: + return self.start + return self.start.add(self.end.sub(self.start).scale(t / l)) + + def distance(self, point): + p = self.project(point) + return p.distance(point) + + def intersection(self, other): + d1 = self.vector() + d2 = other.vector() + d12 = other.start.sub(self.start) + + den = d1.y * d2.x - d1.x * d2.y + u1 = d1.x * d12.y - d1.y * d12.x + u2 = d2.x * d12.y - d2.y * d12.x + + if den == 0: + # collinear + if u1 == 0 and u2 == 0: + return self.start + else: + return None + + if float(u1) / den < 0 or float(u1) / den > 1 or float(u2) / den < 0 or float(u2) / den > 1: + return None + + return self.point_at_factor(float(u2) / den * self.length()) + + def vector(self): + return self.end.sub(self.start) + + def bounds(self): + return self.start.bounds().extend(self.end) + + def extend(self, amount): + v = self.vector() + v = v.scale(amount / v.magnitude()) + return Segment( + self.start.sub(v), + self.end.add(v) + ) + + def __repr__(self): + return 'Segment({}, {})'.format(self.start, self.end) + +class Rectangle(object): + def __init__(self, start, end): + self.start = start + self.end = end + + def lengths(self): + return Point(self.end.x - self.start.x, self.end.y - self.start.y) + + def clip(self, point): + npoint = Point(point.x, point.y) + if npoint.x < self.start.x: + npoint.x = self.start.x + elif npoint.x >= self.end.x: + npoint.x = self.end.x - 1 + if npoint.y < self.start.y: + npoint.y = self.start.y + elif npoint.y >= self.end.y: + npoint.y = self.end.y - 1 + return npoint + + def clip_rect(self, r): + return Rectangle(self.clip(r.start), self.clip(r.end)) + + def add_tol(self, tol): + return Rectangle( + self.start.sub(Point(tol, tol)), + self.end.add(Point(tol, tol)) + ) + + def contains(self, point): + return point.x >= self.start.x and point.x < self.end.x and point.y >= self.start.y and point.y < self.end.y + + def extend(self, point): + return Rectangle( + Point(min(self.start.x, point.x), min(self.start.y, point.y)), + Point(max(self.end.x, point.x), max(self.end.y, point.y)) + ) + + def intersects(self, other): + return self.end.y >= other.start.y and other.end.y >= self.start.y and self.end.x >= other.start.x and other.end.x >= self.start.x + + def scale(self, f): + return Rectangle(self.start.scale(f), self.end.scale(f)) + + def intersection(self, other): + intersection = Rectangle( + Point(max(self.start.x, other.start.x), max(self.start.y, other.start.y)), + Point(min(self.end.x, other.end.x), min(self.end.y, other.end.y)) + ) + if intersection.end.x <= intersection.start.x: + intersection.end.x = intersection.start.x + if intersection.end.y <= intersection.start.y: + intersection.end.y = intersection.start.y + return intersection + + def iou(self, other): + intersection = self.intersection(other) + if intersection.area() == 0: + return 0 + return float(intersection.area()) / (self.area() + other.area() - intersection.area()) + + def area(self): + return (self.end.x - self.start.x) * (self.end.y - self.start.y) + + def __repr__(self): + return 'Rectangle({}, {})'.format(self.start, self.end) + +def draw_line(start, end, lengths): + # followX indicates whether to move along x or y coordinates + followX = abs(end.y - start.y) <= abs(end.x - start.x) + if followX: + x0 = start.x + x1 = end.x + y0 = start.y + y1 = end.y + else: + x0 = start.y + x1 = end.y + y0 = start.x + y1 = end.x + + delta = Point(abs(x1 - x0), abs(y1 - y0)) + current_error = 0 + + if x0 < x1: + xstep = 1 + else: + xstep = -1 + + if y0 < y1: + ystep = 1 + else: + ystep = -1 + + points = [] + def add_point(p): + if p.x >= 0 and p.x < lengths.x and p.y >= 0 and p.y < lengths.y: + points.append(p) + + x = x0 + y = y0 + + while x != x1 + xstep: + if followX: + add_point(Point(x, y)) + else: + add_point(Point(y, x)) + + x += xstep + current_error += delta.y + if current_error >= delta.x: + y += ystep + current_error -= delta.x + + return points + +def draw_lines(segments, im=None, shape=None): + from eyediagram._brescount import bres_segments_count + if not shape: + if not im: + raise Exception('shape or im must be provided') + shape = im.shape + tmpim = numpy.zeros((shape[0], shape[1]), dtype='int32') + + sticks = numpy.zeros((len(segments), 4), dtype='int32') + for i, segment in enumerate(segments): + sticks[i] = [segment.start.x, segment.start.y, segment.end.x, segment.end.y] + bres_segments_count(sticks, tmpim) + tmpim = tmpim > 0 + if im: + return numpy.logical_or(im, tmpim) + else: + return tmpim + +def vector_from_angle(angle, length): + return Point(math.cos(angle) * length, math.sin(angle) * length) diff --git a/pipeline2/discoverlib/graph.py b/pipeline2/discoverlib/graph.py new file mode 100644 index 0000000..74a3fdc --- /dev/null +++ b/pipeline2/discoverlib/graph.py @@ -0,0 +1,673 @@ +import geom +import grid_index + +import math +import numpy +import rtree + +class Vertex(object): + def __init__(self, id, point): + self.id = id + self.point = point + self.in_edges = [] + self.out_edges = [] + + def _neighbors(self): + n = {} + for edge in self.in_edges: + n[edge.src] = edge + for edge in self.out_edges: + n[edge.dst] = edge + return n + + def neighbors(self): + return self._neighbors().keys() + + def __repr__(self): + return 'Vertex({}, {}, {} in {} out)'.format(self.id, self.point, len(self.in_edges), len(self.out_edges)) + +class Edge(object): + def __init__(self, id, src, dst): + self.id = id + self.src = src + self.dst = dst + + def bounds(self): + return self.src.point.bounds().extend(self.dst.point) + + def segment(self): + return geom.Segment(self.src.point, self.dst.point) + + def closest_pos(self, point): + p = self.segment().project(point) + return EdgePos(self, p.distance(self.src.point)) + + def is_opposite(self, edge): + return edge.src == self.dst and edge.dst == self.src + + def get_opposite_edge(self): + for edge in self.dst.out_edges: + if self.is_opposite(edge): + return edge + return None + + def is_adjacent(self, edge): + return edge.src == self.src or edge.src == self.dst or edge.dst == self.src or edge.dst == self.dst + + def orig_id(self): + if hasattr(self, 'orig_edge_id'): + return self.orig_edge_id + else: + return self.id + +class EdgePos(object): + def __init__(self, edge, distance): + self.edge = edge + self.distance = distance + + def point(self): + segment = self.edge.segment() + vector = segment.vector() + if vector.magnitude() < 1: + return segment.start + else: + return segment.start.add(vector.scale(self.distance / vector.magnitude())) + + def reverse(self): + return EdgePos(self.edge.get_opposite_edge(), self.edge.segment().length() - self.distance) + +class Index(object): + def __init__(self, graph, index): + self.graph = graph + self.index = index + + def search(self, rect): + edge_ids = self.index.intersection((rect.start.x, rect.start.y, rect.end.x, rect.end.y)) + return [self.graph.edges[edge_id] for edge_id in edge_ids] + + def subgraph(self, rect): + return graph_from_edges(self.search(rect)) + +def graph_from_edges(edges): + ng = Graph() + vertex_map = {} + for edge in edges: + for vertex in [edge.src, edge.dst]: + if vertex not in vertex_map: + vertex_map[vertex] = ng.add_vertex(vertex.point) + nedge = ng.add_edge(vertex_map[edge.src], vertex_map[edge.dst]) + nedge.orig_edge_id = edge.orig_id() + return ng + +class Graph(object): + def __init__(self): + self.vertices = [] + self.edges = [] + + def add_vertex(self, point): + vertex = Vertex(len(self.vertices), point) + self.vertices.append(vertex) + return vertex + + def find_edge(self, src, dst): + for edge in src.out_edges: + if edge.dst == dst: + return edge + return None + + def add_edge(self, src, dst): + if src == dst: + raise Exception('cannot add edge between same vertex') + elif self.find_edge(src, dst): + return self.find_edge(src, dst) + edge = Edge(len(self.edges), src, dst) + self.edges.append(edge) + src.out_edges.append(edge) + dst.in_edges.append(edge) + return edge + + def add_bidirectional_edge(self, src, dst): + return ( + self.add_edge(src, dst), + self.add_edge(dst, src), + ) + + def edgeIndex(self): + rt = rtree.index.Index() + for edge in self.edges: + bounds = edge.bounds() + rt.insert(edge.id, (bounds.start.x, bounds.start.y, bounds.end.x, bounds.end.y)) + return Index(self, rt) + + def edge_grid_index(self, size): + index = grid_index.GridIndex(size) + for edge in self.edges: + index.insert_rect(edge.bounds(), edge) + return index + + def bounds(self): + r = None + for vertex in self.vertices: + if r is None: + r = geom.Rectangle(vertex.point, vertex.point) + else: + r = r.extend(vertex.point) + return r + + def save(self, fname): + with open(fname, 'w') as f: + for vertex in self.vertices: + f.write("{} {}\n".format(vertex.point.x, vertex.point.y)) + f.write("\n") + for edge in self.edges: + f.write("{} {}\n".format(edge.src.id, edge.dst.id)) + + def clone(self): + other = Graph() + for vertex in self.vertices: + v = other.add_vertex(vertex.point) + if hasattr(vertex, 'edge_pos'): + v.edge_pos = vertex.edge_pos + for edge in self.edges: + e = other.add_edge(other.vertices[edge.src.id], other.vertices[edge.dst.id]) + return other + + def filter_edges(self, filter_edges, keep_attrs=None): + g = Graph() + vertex_map = {} + for edge in self.edges: + if edge in filter_edges: + continue + for vertex in [edge.src, edge.dst]: + if vertex not in vertex_map: + vertex_map[vertex] = g.add_vertex(vertex.point) + new_edge = g.add_edge(vertex_map[edge.src], vertex_map[edge.dst]) + if keep_attrs: + for k in keep_attrs: + if hasattr(edge, k): + setattr(new_edge, k, getattr(edge, k)) + return g + + def split_edge(self, edge, length): + point = edge.segment().point_at_factor(length) + new_vertex = self.add_vertex(point) + + orig_src, orig_dst = edge.src, edge.dst + opp_edge = edge.get_opposite_edge() + + edge.dst = new_vertex + orig_dst.in_edges.remove(edge) + new_vertex.in_edges.append(edge) + remainder_edge = self.add_edge(new_vertex, orig_dst) + remainder_edge.orig_edge_id = edge.orig_id() + + if opp_edge: + opp_edge.src = new_vertex + orig_dst.out_edges.remove(opp_edge) + new_vertex.out_edges.append(opp_edge) + e = self.add_edge(orig_dst, new_vertex) + e.orig_edge_id = opp_edge.orig_id() + + return remainder_edge + + def union(self, other): + g = self.clone() + vertex_map = {} + for edge in other.edges: + for vertex in [edge.src, edge.dst]: + if vertex not in vertex_map: + vertex_map[vertex] = g.add_vertex(vertex.point) + g.add_edge(vertex_map[edge.src], vertex_map[edge.dst]) + return g + + def closest_vertex(self, p): + best_vertex = None + best_distance = None + for vertex in self.vertices: + d = vertex.point.distance(p) + if best_vertex is None or d < best_distance: + best_vertex = vertex + best_distance = d + return best_vertex + +def read_graph(fname, merge_duplicates=False, fpoint=False): + point_obj = geom.Point + if fpoint: + point_obj = geom.FPoint + + graph = Graph() + with open(fname, 'r') as f: + vertex_section = True + vertices = {} + next_vertex_id = 0 + seen_points = {} + for line in f: + parts = line.strip().split(' ') + if vertex_section: + if len(parts) >= 2: + point = point_obj(float(parts[0]), float(parts[1])) + if point in seen_points and merge_duplicates: + print 'merging duplicate vertex at {}'.format(point) + vertices[next_vertex_id] = seen_points[point] + else: + vertex = graph.add_vertex(point) + vertices[next_vertex_id] = vertex + seen_points[point] = vertex + next_vertex_id += 1 + else: + vertex_section = False + elif len(parts) >= 2: + src = vertices[int(parts[0])] + dst = vertices[int(parts[1])] + if src == dst and merge_duplicates: + print 'ignoring self edge at {}'.format(src.point) + continue + graph.add_edge(src, dst) + return graph + +def dijkstra_helper(src, stop_at=None, max_distance=None): + distances = {} + prev = {} + remaining = set() + seen = set() + + distances[src.id] = 0 + remaining.add(src) + seen.add(src) + + while len(remaining) > 0: + closestNode = None + closestDistance = None + for vertex in remaining: + if closestNode is None or distances[vertex.id] < closestDistance: + closestNode = vertex + closestDistance = distances[vertex.id] + remaining.remove(closestNode) + + if stop_at is not None and stop_at == closestNode: + break + elif closestDistance > max_distance: + break + + for other, edge in closestNode._neighbors().items(): + if other not in seen: + seen.add(other) + remaining.add(other) + distances[other.id] = float('inf') + prev[other.id] = None + + if other in remaining: + if hasattr(edge, 'cost'): + d = closestDistance + edge.cost + else: + d = closestDistance + closestNode.point.distance(other.point) + if d < distances[other.id]: + distances[other.id] = d + prev[other.id] = (closestNode, edge) + + return distances, prev + +def shortest_distances_from_source(src, max_distance=None): + distances, _ = dijkstra_helper(src, max_distance=max_distance) + return distances + +def shortest_path(src, dst, max_distance=None): + _, prev = dijkstra_helper(src, stop_at=dst, max_distance=max_distance) + if dst.id not in prev: + return None, None + vertex_path = [dst] + edge_path = [] + cur_node = dst + while cur_node != src: + cur_node, edge = prev[cur_node.id] + vertex_path.append(cur_node) + edge_path.append(edge) + vertex_path.reverse() + edge_path.reverse() + return vertex_path, edge_path + +# searches for the closest edge position to the specified point +# if src is specified, only looks for edges reachable from the src edge position within remaining units +def closest_reachable_edge(point, index, explored_node_pairs=None, remaining = 50, src = None, distance_threshold = 50): + closest_edge_pos = None + closest_edge_pos_path = None + smallest_distance = None + + # if src is None, get candidate edges from index + # otherwise, get candidate edges by following graph + if src is None: + candidates = [(edge, None) for edge in index.search(point.bounds().add_tol(distance_threshold))] + else: + candidates = [(src.edge, None)] + cur_explored_node_pairs = set() + cur_explored_node_pairs.add((src.edge.src.id, src.edge.dst.id)) + def search_edge(path, edge, remaining): + l = edge.segment().length() + if remaining > l: + search_vertex(path + [edge], edge.dst, remaining - l) + def search_vertex(path, vertex, remaining): + for edge in vertex.out_edges: + if (edge.src.id, edge.dst.id) in cur_explored_node_pairs or (edge.dst.id, edge.src.id) in cur_explored_node_pairs: + continue + candidates.append((edge, path)) + cur_explored_node_pairs.add((edge.src.id, edge.dst.id)) + search_edge(path, edge, remaining) + + # search forwards + l = src.edge.segment().length() - src.distance + if remaining > l: + search_vertex([], src.edge.dst, remaining - l) + + # search backwards + reverse_edge_pos = src.reverse() + l = reverse_edge_pos.edge.segment().length() - reverse_edge_pos.distance + if remaining > l: + search_vertex([], reverse_edge_pos.edge.dst, remaining - l) + + for edge, path in candidates: + if explored_node_pairs is not None and ((edge.src.id, edge.dst.id) in explored_node_pairs or (edge.dst.id, edge.src.id) in explored_node_pairs) and (src is None or edge != src.edge): + continue + distance = edge.segment().distance(point) + if distance > distance_threshold: + continue + + if closest_edge_pos is None or distance < smallest_distance: + closest_edge_pos = edge.closest_pos(point) + closest_edge_pos_path = path + smallest_distance = distance + + return closest_edge_pos, closest_edge_pos_path + +def follow_graph(edge_pos, distance, explored_node_pairs=None): + if explored_node_pairs: + explored_node_pairs = set(explored_node_pairs) + else: + explored_node_pairs = set() + explored_node_pairs.add((edge_pos.edge.src.id, edge_pos.edge.dst.id)) + positions = [] + + def search_edge(edge, remaining): + l = edge.segment().length() + if remaining > l: + search_vertex(edge.dst, remaining - l) + else: + pos = EdgePos(edge, remaining) + positions.append(pos) + + def search_vertex(vertex, remaining): + for edge in vertex.out_edges: + if (edge.src.id, edge.dst.id) in explored_node_pairs or (edge.dst.id, edge.src.id) in explored_node_pairs: + continue + explored_node_pairs.add((edge.src.id, edge.dst.id)) + search_edge(edge, remaining) + + remaining = distance + l = edge_pos.edge.segment().length() - edge_pos.distance + if remaining > l: + search_vertex(edge_pos.edge.dst, remaining - l) + else: + positions = [EdgePos(edge_pos.edge, edge_pos.distance + remaining)] + + return positions + +class RoadSegment(object): + def __init__(self, id): + self.id = id + self.edges = [] + + def add_edge(self, edge, direction): + if direction == 'forwards': + self.edges.append(edge) + elif direction == 'backwards': + self.edges = [edge] + self.edges + else: + raise Exception('bad edge') + + def compute_edge_distances(self): + l = 0 + self.edge_distances = {} + for edge in self.edges: + self.edge_distances[edge.id] = l + l += edge.segment().length() + + def distance_to_edge(self, distance, return_idx=False): + for i in xrange(len(self.edges)): + edge = self.edges[i] + distance -= edge.segment().length() + if distance <= 0: + if return_idx: + return i + else: + return edge + if return_idx: + return len(self.edges) - 1 + else: + return self.edges[-1] + + def src(self): + return self.edges[0].src + + def dst(self): + return self.edges[-1].dst + + def is_opposite(self, rs): + return self.src() == rs.dst() and self.dst() == rs.src() and self.edges[0].is_opposite(rs.edges[-1]) + + def in_rs(self, edge_to_rs): + rs_set = {} + for edge in self.src().in_edges: + rs = edge_to_rs[edge.id] + if rs.id != self.id and rs.id not in rs_set: + rs_set[rs.id] = rs + return rs_set.values() + + def out_rs(self, edge_to_rs): + rs_set = {} + for edge in self.dst().out_edges: + rs = edge_to_rs[edge.id] + if rs.id != self.id and rs.id not in rs_set: + rs_set[rs.id] = rs + return rs_set.values() + + def get_opposite_rs(self, edge_to_rs): + for rs in self.out_rs(edge_to_rs): + if self.is_opposite(rs): + return rs + return None + + def length(self): + return sum([edge.segment().length() for edge in self.edges]) + + def closest_pos(self, point): + best_edge_pos = None + best_distance = None + for edge in self.edges: + edge_pos = edge.closest_pos(point) + distance = edge_pos.point().distance(point) + if best_edge_pos is None or distance < best_distance: + best_edge_pos = edge_pos + best_distance = distance + return best_edge_pos + + def point_at_factor(self, t): + edge = self.distance_to_edge(t) + return edge.segment().point_at_factor(t - self.edge_distances[edge.id]) + +def get_graph_road_segments(g): + road_segments = [] + edge_to_rs = {} + + def search_from_edge(rs, edge, direction): + cur_edge = edge + while True: + if direction == 'forwards': + vertex = cur_edge.dst + edges = vertex.out_edges + elif direction == 'backwards': + vertex = cur_edge.src + edges = vertex.in_edges + + edges = [next_edge for next_edge in edges if not next_edge.is_opposite(cur_edge)] + + if len(edges) != 1: + # we have hit intersection vertex or a dead end + return + + next_edge = edges[0] + + if next_edge.id in edge_to_rs: + # this should only happen when we run in a segment that is actually a loop + # although really it shouldn't happen in that case either, since loops should start/end at an intersection + # TODO: think about this more + return + + rs.add_edge(next_edge, direction) + edge_to_rs[next_edge.id] = rs + cur_edge = next_edge + + for edge in g.edges: + if edge.id in edge_to_rs: + continue + + rs = RoadSegment(len(road_segments)) + rs.add_edge(edge, 'forwards') + edge_to_rs[edge.id] = rs + search_from_edge(rs, edge, 'forwards') + search_from_edge(rs, edge, 'backwards') + rs.compute_edge_distances() + road_segments.append(rs) + + return road_segments, edge_to_rs + +class GraphContainer(object): + def __init__(self, g): + self.graph = g + self.edge_index = g.edgeIndex() + self.road_segments, self.edge_to_rs = get_graph_road_segments(g) + +def mapmatch(index, road_segments, edge_to_rs, points, segment_length): + SIGMA = segment_length + START_TOL = segment_length * 2.5 + MAX_TOL = segment_length * 4 + + # probs keeps track of both the emission probability and the distance along road segment of the + # point closest to the previous path point + # on the next iteration, we restrict to choosing points at higher distances along road segment + probs = {} + + for edge in index.search(points[0].bounds().add_tol(START_TOL)): + rs = edge_to_rs[edge.id] + distance = edge.segment().distance(points[0]) + p = -0.5 * distance * distance / SIGMA / SIGMA + if rs.id not in probs or p > probs[rs.id][0]: + rs_distance = rs.edge_distances[edge.id] + edge.segment().project_factor(points[0]) + probs[rs.id] = (p, rs_distance) + + # given a road segment and a previous rs distance, returns next rs distance and the actual point distance + def distance_to_rs(rs, clip_distance, point, vector): + low_distance = clip_distance + segment_length / 2 + high_distance = clip_distance + segment_length * 2 + + if high_distance < 0 or low_distance > rs.length(): + return None, None + + # get edges between low_distance and high_distance + low_edge_idx = rs.distance_to_edge(low_distance, return_idx=True) + high_edge_idx = rs.distance_to_edge(high_distance, return_idx=True) + edges = rs.edges[low_edge_idx:high_edge_idx+1] + + best_distance = None + best_rs_distance = None + for edge in edges: + rs_distance = rs.edge_distances[edge.id] + edge.segment().project_factor(point) + rs_distance = numpy.clip(rs_distance, low_distance, high_distance) + rs_distance = numpy.clip(rs_distance, rs.edge_distances[edge.id], rs.edge_distances[edge.id] + edge.segment().length()) + edge_distance = rs_distance - rs.edge_distances[edge.id] + rs_point = edge.segment().point_at_factor(edge_distance) + distance = rs_point.distance(point) + edge.segment().vector().angle_to(vector) * 12 + if best_distance is None or distance < best_distance: + best_distance = distance + best_rs_distance = rs_distance + + return best_distance, best_rs_distance + + backpointers = [{} for _ in xrange(len(points) - 1)] + + for i in xrange(len(points) - 1): + next_probs = {} + for prev_rs_id in probs: + prev_p, prev_rs_distance = probs[prev_rs_id] + rs = road_segments[prev_rs_id] + remaining_rs_distance = rs.length() - prev_rs_distance + rs_candidates = [(rs, prev_rs_distance)] + [(next_rs, -remaining_rs_distance) for next_rs in rs.out_rs(edge_to_rs)] + for next_rs, clip_distance in rs_candidates: + if next_rs.is_opposite(rs): + continue + distance, rs_distance = distance_to_rs(next_rs, clip_distance, points[i + 1], points[i + 1].sub(points[i])) + if distance is None or distance > MAX_TOL: + continue + p = prev_p + (-0.5 * distance * distance / SIGMA / SIGMA) + #if next_rs != rs: + # p += math.log(0.1) + if next_rs.id not in next_probs or p > next_probs[next_rs.id][0]: + next_probs[next_rs.id] = (p, rs_distance) + backpointers[i][next_rs.id] = prev_rs_id + probs = next_probs + if len(probs) == 0: + return None, None + + return probs, backpointers + +def mm_best_rs(road_segments, probs, rs_blacklist=None): + best_rs = None + for rs_id in probs: + if rs_blacklist is not None and rs_id in rs_blacklist: + continue + prob = probs[rs_id][0] + if best_rs is None or prob > probs[best_rs.id][0]: + best_rs = road_segments[rs_id] + return best_rs + +def mm_follow_backpointers(road_segments, rs_id, backpointers): + rs_list = [] + for i in xrange(len(backpointers) - 1, -1, -1): + rs_id = backpointers[i][rs_id] + rs_list.append(road_segments[rs_id]) + rs_list.reverse() + return rs_list + +def get_nearby_vertices(vertex, n): + nearby_vertices = set() + def search(vertex, remaining): + if vertex in nearby_vertices: + return + nearby_vertices.add(vertex) + if remaining == 0: + return + for edge in vertex.in_edges: + search(edge.src, remaining - 1) + search(edge.dst, remaining - 1) + search(vertex, n) + return nearby_vertices + +def get_nearby_vertices_by_distance(vertex, distance): + nearby_vertices = set([vertex]) + search_queue = [] + search_queue.append((vertex, distance)) + while len(search_queue) > 0: + vertex, remaining = search_queue[0] + search_queue = search_queue[1:] + if remaining <= 0: + continue + for edge in vertex.out_edges: + if edge.dst in nearby_vertices: + continue + nearby_vertices.add(edge.dst) + search_queue.append((edge.dst, remaining - edge.segment().length())) + return nearby_vertices + +def densify(g, length, epsilon=0.1): + for edge in g.edges: + n_split = int(edge.segment().length() / length - epsilon) + for i in xrange(n_split): + edge = g.split_edge(edge, length) diff --git a/pipeline2/discoverlib/grid_index.py b/pipeline2/discoverlib/grid_index.py new file mode 100644 index 0000000..a369997 --- /dev/null +++ b/pipeline2/discoverlib/grid_index.py @@ -0,0 +1,33 @@ +import math + +class GridIndex(object): + def __init__(self, size): + self.size = size + self.grid = {} + + # Insert a point with data. + def insert(self, p, data): + self.insert_rect(p.bounds(), data) + + # Insert a data with rectangle bounds. + def insert_rect(self, rect, data): + def f(cell): + if cell not in self.grid: + self.grid[cell] = [] + self.grid[cell].append(data) + self.each_cell(rect, f) + + def each_cell(self, rect, f): + for i in xrange(int(math.floor(rect.start.x / self.size)), int(math.floor(rect.end.x / self.size)) + 1): + for j in xrange(int(math.floor(rect.start.y / self.size)), int(math.floor(rect.end.y / self.size)) + 1): + f((i, j)) + + def search(self, rect): + matches = set() + def f(cell): + if cell not in self.grid: + return + for data in self.grid[cell]: + matches.add(data) + self.each_cell(rect, f) + return matches diff --git a/pipeline2/discoverlib/tf_util.py b/pipeline2/discoverlib/tf_util.py new file mode 100644 index 0000000..c2719ce --- /dev/null +++ b/pipeline2/discoverlib/tf_util.py @@ -0,0 +1,28 @@ +import numpy + +def apply_conv(session, m, im, size=2048, stride=1024, scale=1, channels=1, outputs=None): + if outputs is None: + outputs = m.outputs + + output = numpy.zeros((im.shape[0]/scale, im.shape[1]/scale, channels), dtype='float32') + for x in range(0, im.shape[0] - size, stride) + [im.shape[0] - size]: + for y in range(0, im.shape[1] - size, stride) + [im.shape[1] - size]: + conv_input = im[x:x+size, y:y+size, :].astype('float32') / 255.0 + conv_output = session.run(outputs, feed_dict={ + m.is_training: False, + m.inputs: [conv_input], + })[0, :, :, :] + startx = size / 2 - stride / 2 + endx = size / 2 + stride / 2 + starty = size / 2 - stride / 2 + endy = size / 2 + stride / 2 + if x == 0: + startx = 0 + elif x >= im.shape[0] - size - stride: + endx = size + if y == 0: + starty = 0 + elif y >= im.shape[1] - size - stride: + endy = size + output[(x+startx)/scale:(x+endx)/scale, (y+starty)/scale:(y+endy)/scale, :] = conv_output[startx/scale:endx/scale, starty/scale:endy/scale, :] + return output diff --git a/pipeline2/eval_m5_point.py b/pipeline2/eval_m5_point.py new file mode 100644 index 0000000..c5d2016 --- /dev/null +++ b/pipeline2/eval_m5_point.py @@ -0,0 +1,287 @@ +from discoverlib import geom, graph, tf_util +import model_m5d as model +import model_utils +import tileloader_sea2 as tileloader + +import json +import numpy +import math +import os.path +from PIL import Image +import random +import scipy.ndimage +import sys +import tensorflow as tf +import time + +MODEL_PATH = sys.argv[1] +tileloader.tile_dir = sys.argv[2] +tileloader.graph_dir = sys.argv[3] +tileloader.pytiles_path = sys.argv[4] +REGION = sys.argv[5] +EXISTING_GRAPH_FNAME = sys.argv[6] + +MAX_PATH_LENGTH = 1000000 +SEGMENT_LENGTH = 20 +TILE_MODE = 'sat' +THRESHOLD_BRANCH = 0.2 +THRESHOLD_FOLLOW = 0.2 +WINDOW_SIZE = 256 +SAVE_EXAMPLES = False +ANGLE_ONEHOT = 64 +M6 = False +CACHE_M6 = False +FILTER_BY_TAG = None +USE_GRAPH_RECT = True +TILE_SIZE = 4096 + +def vector_to_action(extension_vertex, angle_outputs, threshold): + # mask out buckets that are similar to existing edges + blacklisted_buckets = set() + for edge in extension_vertex.out_edges: + angle = geom.Point(1, 0).signed_angle(edge.segment().vector()) + bucket = int((angle + math.pi) * 64.0 / math.pi / 2) + for offset in xrange(6): + clockwise_bucket = (bucket + offset) % 64 + counterclockwise_bucket = (bucket + 64 - offset) % 64 + blacklisted_buckets.add(clockwise_bucket) + blacklisted_buckets.add(counterclockwise_bucket) + + seen_vertices = set() + search_queue = [] + nearby_points = {} + seen_vertices.add(extension_vertex) + for edge in extension_vertex.out_edges: + search_queue.append((graph.EdgePos(edge, 0), 0)) + while len(search_queue) > 0: + edge_pos, distance = search_queue[0] + search_queue = search_queue[1:] + if distance > 0: + nearby_points[edge_pos.point()] = distance + if distance >= 4 * SEGMENT_LENGTH: + continue + + edge = edge_pos.edge + l = edge.segment().length() + if edge_pos.distance + SEGMENT_LENGTH < l: + search_queue.append((graph.EdgePos(edge, edge_pos.distance + SEGMENT_LENGTH), distance + SEGMENT_LENGTH)) + elif edge.dst not in seen_vertices: + seen_vertices.add(edge.dst) + for edge in edge.dst.out_edges: + search_queue.append((graph.EdgePos(edge, 0), distance + l - edge_pos.distance)) + + # any leftover targets above threshold? + best_bucket = None + best_value = None + for bucket in xrange(64): + if bucket in blacklisted_buckets: + continue + next_point = model_utils.get_next_point(extension_vertex.point, bucket, SEGMENT_LENGTH) + bad = False + for nearby_point, distance in nearby_points.items(): + if nearby_point.distance(next_point) < 0.5 * (SEGMENT_LENGTH + distance): + bad = True + break + if bad: + continue + + value = angle_outputs[bucket] + if value > threshold and (best_bucket is None or value > best_value): + best_bucket = bucket + best_value = value + + x = numpy.zeros((64,), dtype='float32') + if best_bucket is not None: + x[best_bucket] = best_value + return x + +def eval(paths, m, session, max_path_length=MAX_PATH_LENGTH, segment_length=SEGMENT_LENGTH, save=False, compute_targets=True, max_batch_size=model.BATCH_SIZE, window_size=WINDOW_SIZE, verbose=True, threshold_override=None, cache_m6=None): + angle_losses = [] + detect_losses = [] + losses = [] + path_lengths = {path_idx: 0 for path_idx in xrange(len(paths))} + + last_time = None + big_time = None + + last_extended = False + + for len_it in xrange(99999999): + if len_it % 500 == 0 and verbose: + print 'it {}'.format(len_it) + big_time = time.time() + path_indices = [] + extension_vertices = [] + for path_idx in xrange(len(paths)): + if path_lengths[path_idx] >= max_path_length: + continue + extension_vertex = paths[path_idx].pop() + if extension_vertex is None: + continue + path_indices.append(path_idx) + path_lengths[path_idx] += 1 + extension_vertices.append(extension_vertex) + + if len(path_indices) >= max_batch_size: + break + + if len(path_indices) == 0: + break + + batch_inputs = [] + batch_detect_targets = [] + batch_angle_targets = numpy.zeros((len(path_indices), 64), 'float32') + inputs_per_path = 1 + + for i in xrange(len(path_indices)): + path_idx = path_indices[i] + + path_input, path_detect_target = model_utils.make_path_input(paths[path_idx], extension_vertices[i], segment_length, window_size=window_size) + if type(path_input) == list: + batch_inputs.extend([x[:, :, 0:3] for x in path_input]) + inputs_per_path = len(path_input) + #batch_inputs.append(numpy.concatenate([x[:, :, 0:3] for x in path_input], axis=2)) + else: + batch_inputs.append(path_input[:, :, 0:3]) + #batch_detect_targets.append(path_detect_target) + batch_detect_targets.append(numpy.zeros((64, 64, 1), dtype='float32')) + + if compute_targets: + angle_targets, _ = model_utils.compute_targets_by_best(paths[path_idx], extension_vertices[i], segment_length) + batch_angle_targets[i, :] = angle_targets + + # run model + if M6: + angle_loss, detect_loss, loss = 0.0, 0.0, 0.0 + if cache_m6 is not None: + p = extension_vertices[0].point.sub(paths[0].tile_data['rect'].start).scale(0.25) + batch_angle_outputs = numpy.array([cache_m6[p.x, p.y, :]], dtype='float32') + else: + pre_outputs = session.run(m.outputs, feed_dict={ + m.is_training: False, + m.inputs: batch_inputs, + }) + batch_angle_outputs = pre_outputs[:, window_size/8, window_size/8, :] + else: + feed_dict = { + m.is_training: False, + m.inputs: batch_inputs, + m.angle_targets: [x for x in batch_angle_targets for _ in xrange(inputs_per_path)], + m.detect_targets: [x for x in batch_detect_targets for _ in xrange(inputs_per_path)], + } + if ANGLE_ONEHOT: + feed_dict[m.angle_onehot] = model_utils.get_angle_onehot(ANGLE_ONEHOT) + batch_angle_outputs, angle_loss, detect_loss, loss = session.run([m.angle_outputs, m.angle_loss, m.detect_loss, m.loss], feed_dict=feed_dict) + + if inputs_per_path > 1: + actual_outputs = numpy.zeros((len(path_indices), 64), 'float32') + for i in xrange(len(path_indices)): + actual_outputs[i, :] = batch_angle_outputs[i*inputs_per_path:(i+1)*inputs_per_path, :].max(axis=0) + batch_angle_outputs = actual_outputs + + angle_losses.append(angle_loss) + losses.append(loss) + + if (save is True and len_it % 1 == 0) or (save == 'extends' and last_extended): + fname = '/home/ubuntu/data/{}_'.format(len_it) + save_angle_targets = batch_angle_targets[0, :] + if not compute_targets: + save_angle_targets = None + model_utils.make_path_input(paths[path_indices[0]], extension_vertices[0], segment_length, fname=fname, angle_targets=save_angle_targets, angle_outputs=batch_angle_outputs[0, :], window_size=window_size) + + with open(fname + 'meta.txt', 'w') as f: + f.write('max angle output: {}\n'.format(batch_angle_outputs[0, :].max())) + + for i in xrange(len(path_indices)): + path_idx = path_indices[i] + if len(extension_vertices[i].out_edges) >= 2: + threshold = THRESHOLD_BRANCH + else: + threshold = THRESHOLD_FOLLOW + if threshold_override is not None: + threshold = threshold_override + + x = vector_to_action(extension_vertices[i], batch_angle_outputs[i, :], threshold) + last_extended = x.max() > 0 + paths[path_idx].push(extension_vertices[i], x, segment_length, training=False, branch_threshold=0.01, follow_threshold=0.01, point_reconnect=False) + + if save: + paths[0].graph.save('out.graph') + + return numpy.mean(angle_losses), numpy.mean(detect_losses), numpy.mean(losses), len_it + +if __name__ == '__main__': + print 'reading tiles' + tiles = tileloader.Tiles(SEGMENT_LENGTH) + + print 'initializing model' + m = model.Model(bn=True) + session = tf.Session() + m.saver.restore(session, MODEL_PATH) + + g = graph.read_graph(EXISTING_GRAPH_FNAME) + if not USE_GRAPH_RECT: + rect = geom.Rectangle(TILE_START, TILE_END) + g = g.edgeIndex().subgraph(rect) + r = rect.add_tol(-WINDOW_SIZE/2) + else: + r = g.bounds().add_tol(-WINDOW_SIZE/2) + graph.densify(g, SEGMENT_LENGTH) + tile_data = { + 'region': REGION, + 'rect': r.add_tol(WINDOW_SIZE/2), + 'search_rect': r, + 'cache': tiles.cache, + 'starting_locations': [], + } + path = model_utils.Path(tiles.get_gc(REGION), tile_data, g=g) + + skip_vertices = set() + if FILTER_BY_TAG: + with open(FILTER_BY_TAG, 'r') as f: + edge_tags = {int(k): v for k, v in json.load(f).items()} + for edge in g.edges: + tags = edge_tags[edge.orig_id()] + if 'highway' not in tags or tags['highway'] in ['pedestrian', 'footway', 'path', 'cycleway', 'construction']: + for vertex in [edge.src, edge.dst]: + skip_vertices.add(vertex) + for vertex in g.vertices: + vertex.edge_pos = None + if vertex not in skip_vertices: + path.prepend_search_vertex(vertex) + + cache_m6 = None + if M6 and CACHE_M6: + start_time = time.time() + big_ims = tile_data['cache'].get(tile_data['region'], tile_data['rect']) + print 'cache_m6: loaded im in {} sec'.format(time.time() - start_time) + start_time = time.time() + cache_m6 = tf_util.apply_conv(session, m, big_ims['input'], scale=4, channels=64) + print 'cache_m6: conv in {} sec'.format(time.time() - start_time) + + result = eval([path], m, session, save=SAVE_EXAMPLES, compute_targets=SAVE_EXAMPLES, cache_m6=cache_m6) + print result + + import json + ng = graph.Graph() + vertex_map = {} + orig_vertices = set() + for edge in path.graph.edges: + if not hasattr(edge, 'prob'): + orig_vertices.add(edge.src) + orig_vertices.add(edge.dst) + continue + for vertex in [edge.src, edge.dst]: + if vertex not in vertex_map: + vertex_map[vertex] = ng.add_vertex(vertex.point) + new_edge = ng.add_edge(vertex_map[edge.src], vertex_map[edge.dst]) + new_edge.prob = edge.prob + ng.save('out.graph') + edge_probs = [] + for edge in ng.edges: + edge_probs.append(int(edge.prob * 100)) + with open('out.probs.json', 'w') as f: + json.dump(edge_probs, f) + interface_vertices = [vertex_map[vertex].id for vertex in orig_vertices if vertex in vertex_map] + with open('out.iface.json', 'w') as f: + json.dump(interface_vertices, f) diff --git a/pipeline2/go.mod b/pipeline2/go.mod new file mode 100644 index 0000000..49a5469 --- /dev/null +++ b/pipeline2/go.mod @@ -0,0 +1,11 @@ +module otif-pipeline + +go 1.18 + +require ( + github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b // indirect + github.com/dhconnelly/rtreego v1.1.0 // indirect + github.com/mitroadmaps/gomapinfer v0.0.0-20210917033103-4e3dcc98a112 // indirect + github.com/qedus/osmpbf v1.2.0 // indirect + google.golang.org/protobuf v1.26.0 // indirect +) diff --git a/pipeline2/go.sum b/pipeline2/go.sum new file mode 100644 index 0000000..1330c0e --- /dev/null +++ b/pipeline2/go.sum @@ -0,0 +1,41 @@ +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= +github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= +github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b h1:slYM766cy2nI3BwyRiyQj/Ud48djTMtMebDqepE95rw= +github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= +github.com/dhconnelly/rtreego v1.1.0 h1:ejMaqN03N1s6Bdg6peGkNgBnYYSBHzcK8yhSPCB+rHE= +github.com/dhconnelly/rtreego v1.1.0/go.mod h1:SDozu0Fjy17XH1svEXJgdYq8Tah6Zjfa/4Q33Z80+KM= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/mitroadmaps/gomapinfer v0.0.0-20210917033103-4e3dcc98a112 h1:/msesy1s0b8A/xU7MSCzcDvDcrVaqvE/7Bckq1y9kAM= +github.com/mitroadmaps/gomapinfer v0.0.0-20210917033103-4e3dcc98a112/go.mod h1:60dnxKwUjhRjPfhvtjJQccZOo741GN+5WymEEc+Aa0c= +github.com/qedus/osmpbf v1.2.0 h1:yRm5ECkiUsN9sA+UN9yNnm64AVW2OYhOCb+gBa1FYCU= +github.com/qedus/osmpbf v1.2.0/go.mod h1:Cfv6JyqTZ72BjoW9FyFBQOC2DYJbL78yw+DLhBvSH+M= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= diff --git a/pipeline2/iou-tracker2.go b/pipeline2/iou-tracker2.go index 926e148..2795d73 100644 --- a/pipeline2/iou-tracker2.go +++ b/pipeline2/iou-tracker2.go @@ -1,7 +1,7 @@ package main import ( - "./lib" + "otif-pipeline/lib" "io" "io/ioutil" diff --git a/pipeline2/model_m5d.py b/pipeline2/model_m5d.py new file mode 100644 index 0000000..12e355d --- /dev/null +++ b/pipeline2/model_m5d.py @@ -0,0 +1,154 @@ +import numpy +import tensorflow as tf +import os +import os.path +import random +import math +import time +from PIL import Image + +BATCH_SIZE = 4 +KERNEL_SIZE = 3 + +class Model: + def _conv_layer(self, name, input_var, stride, in_channels, out_channels, options = {}): + activation = options.get('activation', 'relu') + dropout = options.get('dropout', None) + padding = options.get('padding', 'SAME') + batchnorm = options.get('batchnorm', False) + transpose = options.get('transpose', False) + + with tf.variable_scope(name) as scope: + if not transpose: + filter_shape = [KERNEL_SIZE, KERNEL_SIZE, in_channels, out_channels] + else: + filter_shape = [KERNEL_SIZE, KERNEL_SIZE, out_channels, in_channels] + kernel = tf.get_variable( + 'weights', + shape=filter_shape, + initializer=tf.truncated_normal_initializer(stddev=math.sqrt(2.0 / KERNEL_SIZE / KERNEL_SIZE / in_channels)), + dtype=tf.float32 + ) + biases = tf.get_variable( + 'biases', + shape=[out_channels], + initializer=tf.constant_initializer(0.0), + dtype=tf.float32 + ) + if not transpose: + output = tf.nn.bias_add( + tf.nn.conv2d( + input_var, + kernel, + [1, stride, stride, 1], + padding=padding + ), + biases + ) + else: + batch = tf.shape(input_var)[0] + side = tf.shape(input_var)[1] + output = tf.nn.bias_add( + tf.nn.conv2d_transpose( + input_var, + kernel, + [batch, side * stride, side * stride, out_channels], + [1, stride, stride, 1], + padding=padding + ), + biases + ) + if batchnorm: + output = tf.contrib.layers.batch_norm(output, center=True, scale=True, is_training=self.is_training, decay=0.99) + if dropout is not None: + output = tf.nn.dropout(output, keep_prob=1-dropout) + + if activation == 'relu': + return tf.nn.relu(output, name=scope.name) + elif activation == 'sigmoid': + return tf.nn.sigmoid(output, name=scope.name) + elif activation == 'none': + return output + else: + raise Exception('invalid activation {} specified'.format(activation)) + + def _fc_layer(self, name, input_var, input_size, output_size, options = {}): + activation = options.get('activation', 'relu') + dropout = options.get('dropout', None) + batchnorm = options.get('batchnorm', False) + + with tf.variable_scope(name) as scope: + weights = tf.get_variable( + 'weights', + shape=[input_size, output_size], + initializer=tf.truncated_normal_initializer(stddev=math.sqrt(2.0 / input_size)), + dtype=tf.float32 + ) + biases = tf.get_variable( + 'biases', + shape=[output_size], + initializer=tf.constant_initializer(0.0), + dtype=tf.float32 + ) + output = tf.matmul(input_var, weights) + biases + if batchnorm: + output = tf.contrib.layers.batch_norm(output, center=True, scale=True, is_training=self.is_training, decay=0.99) + if dropout is not None: + output = tf.nn.dropout(output, keep_prob=1-dropout) + + if activation == 'relu': + return tf.nn.relu(output, name=scope.name) + elif activation == 'sigmoid': + return tf.nn.sigmoid(output, name=scope.name) + elif activation == 'none': + return output + else: + raise Exception('invalid activation {} specified'.format(activation)) + + def __init__(self, bn=False, angle_weight=10): + tf.reset_default_graph() + + self.is_training = tf.placeholder(tf.bool) + self.inputs = tf.placeholder(tf.float32, [None, 256, 256, 3]) + self.angle_targets = tf.placeholder(tf.float32, [None, 64]) + self.detect_targets = tf.placeholder(tf.float32, [None, 64, 64, 1]) + self.learning_rate = tf.placeholder(tf.float32) + self.angle_onehot = tf.placeholder(tf.float32, [64, 64, 64]) + + batch_size = tf.shape(self.inputs)[0] + self.angle_onehot_tiled = tf.tile(tf.expand_dims(self.angle_onehot, axis=0), [batch_size, 1, 1, 1]) + + # layers + self.layer1 = self._conv_layer('layer1', self.inputs, 2, 3, 128, {'batchnorm': False}) # -> 128x128x128 + self.layer2 = self._conv_layer('layer2', self.layer1, 1, 128, 128, {'batchnorm': bn}) # -> 128x128x128 + self.layer3 = self._conv_layer('layer3', self.layer2, 2, 128, 256, {'batchnorm': bn}) # -> 64x64x256 + self.layer4 = self._conv_layer('layer4', self.layer3, 1, 256, 256, {'batchnorm': bn}) # -> 64x64x256 + self.layer5 = self._conv_layer('layer5', tf.concat([self.layer4, self.angle_onehot_tiled], axis=3), 1, 256+64, 256, {'batchnorm': bn}) # -> 64x64x256 + self.layer6 = self._conv_layer('layer6', self.layer5, 1, 256, 256, {'batchnorm': bn}) # -> 64x64x256 + self.layer7 = self._conv_layer('layer7', self.layer6, 2, 256, 512, {'batchnorm': bn}) # -> 32x32x512 + self.layer8 = self._conv_layer('layer8', self.layer7, 1, 512, 512, {'batchnorm': bn}) # -> 32x32x512 + self.layer9 = self._conv_layer('layer9', self.layer8, 2, 512, 512, {'batchnorm': bn}) # -> 16x16x512 + self.layer10 = self._conv_layer('layer10', self.layer9, 1, 512, 512, {'batchnorm': bn}) # -> 16x16x512 + self.layer11 = self._conv_layer('layer11', self.layer10, 2, 512, 512, {'batchnorm': bn}) # -> 8x8x512 + self.layer12 = self._conv_layer('layer12', self.layer11, 1, 512, 512, {'batchnorm': bn}) # -> 8x8x512 + self.layer13 = self._conv_layer('layer13', self.layer12, 1, 512, 512, {'batchnorm': bn}) # -> 8x8x512 + self.layer14 = self._conv_layer('layer14', self.layer13, 2, 512, 512, {'batchnorm': bn}) # -> 4x4x512 + self.layer15 = self._conv_layer('layer15', self.layer14, 1, 512, 512, {'batchnorm': bn}) # -> 4x4x512 + self.layer16 = self._conv_layer('layer16', self.layer15, 1, 512, 512, {'batchnorm': bn}) # -> 4x4x512 + self.layer17 = self._conv_layer('layer17', self.layer16, 2, 512, 512, {'batchnorm': bn}) # -> 2x2x512 + + self.angle_outputs = self._conv_layer('angle_outputs', self.layer17, 2, 512, 64, {'activation': 'sigmoid', 'batchnorm': False})[:, 0, 0, :] # -> 64 + self.detect_pre_outputs = self._conv_layer('detect_pre_outputs', self.layer6, 1, 256, 2, {'batchnorm': False}) # -> 64x64x2 + self.detect_outputs = tf.nn.softmax(self.detect_pre_outputs)[:, :, :, 0:1] + + self.angle_loss = tf.reduce_mean(tf.square(self.angle_targets - self.angle_outputs)) + self.detect_labels = tf.concat([self.detect_targets, 1 - self.detect_targets], axis=3) + self.detect_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=self.detect_labels, logits=self.detect_pre_outputs)) + #self.detect_loss = tf.reduce_mean(tf.square(self.detect_targets - self.detect_outputs)) + self.loss = self.angle_loss * angle_weight + self.detect_loss + + with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)): + self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss) + + self.init_op = tf.initialize_all_variables() + self.saver = tf.train.Saver(max_to_keep=None) diff --git a/pipeline2/model_utils.py b/pipeline2/model_utils.py new file mode 100644 index 0000000..5e39a27 --- /dev/null +++ b/pipeline2/model_utils.py @@ -0,0 +1,1003 @@ +from discoverlib import geom, graph + +import json +import numpy +import math +from PIL import Image +import random +import rtree +import scipy.ndimage +import sys +import time + +DEBUG = False + +class Path(object): + def __init__(self, gc, tile_data, start_loc=None, g=None): + self.gc = gc + self.tile_data = tile_data + if g is None: + self.graph = graph.Graph() + else: + self.graph = g + self.explored_pairs = {} + self.unmatched_vertices = 0 + + if start_loc: + #v1 = self.graph.add_vertex(start_loc[0]['point']) + v2 = self.graph.add_vertex(start_loc[1]['point']) + #self.graph.add_bidirectional_edge(v1, v2) + + #v1.edge_pos = start_loc[0]['edge_pos'] + v2.edge_pos = start_loc[1]['edge_pos'] + + self.search_vertices = [v2] + else: + self.search_vertices = [] + + self._load_edge_rtree() + + def _load_edge_rtree(self): + self.indexed_edges = set() + self.edge_rtree = rtree.index.Index() + for edge in self.graph.edges: + self._add_edge_to_rtree(edge) + + def _add_edge_to_rtree(self, edge): + if edge.id in self.indexed_edges: + return + self.indexed_edges.add(edge.id) + bounds = edge.segment().bounds().add_tol(1) + self.edge_rtree.insert(edge.id, (bounds.start.x, bounds.start.y, bounds.end.x, bounds.end.y)) + + def _add_bidirectional_edge(self, src, dst, prob=1.0): + edges = self.graph.add_bidirectional_edge(src, dst) + if prob is not None: + edges[0].prob = prob + edges[1].prob = prob + self._add_edge_to_rtree(edges[0]) + self._add_edge_to_rtree(edges[1]) + + def prepend_search_vertex(self, vertex): + if self.tile_data['search_rect'].contains(vertex.point): + self.search_vertices = [vertex] + self.search_vertices + return True + else: + return False + + def get_path_to(self, vertex, path=None, limit=6): + if path is None: + path = [] + def follow(vertex): + path.insert(0, vertex) + if len(path) >= limit: + return + for edge in vertex.in_edges: + if edge.src not in path: + follow(edge.src) + return + follow(vertex) + return path + + def mark_edge_explored(self, edge, distance): + l = edge.segment().length() + if (edge.src.id, edge.dst.id) in self.explored_pairs: + current_start, current_end = self.explored_pairs[(edge.src.id, edge.dst.id)] + else: + current_start, current_end = None, None + + if current_start is None: + new_start = distance + else: + new_start = max(current_start, distance) + reverse_new_end = l - new_start + + if new_start >= l: + new_end = -1 + reverse_new_start = l + 1 + elif current_end is None: + new_end = None + reverse_new_start = None + else: + new_end = current_end + reverse_new_start = l - current_end + + self.explored_pairs[(edge.src.id, edge.dst.id)] = (new_start, new_end) + self.explored_pairs[(edge.dst.id, edge.src.id)] = (reverse_new_start, reverse_new_end) + + def mark_rs_explored(self, rs, distance=None): + for edge in rs.edges: + edge_distance = rs.edge_distances[edge.id] + l = edge.segment().length() + if distance is None or distance >= edge_distance + l: + self.mark_edge_explored(edge, l + 1) + elif distance < edge_distance: + break + else: + self.mark_edge_explored(edge, distance - edge_distance) + break + + def is_explored(self, edge_pos): + if (edge_pos.edge.src.id, edge_pos.edge.dst.id) not in self.explored_pairs: + return False + start, end = self.explored_pairs[(edge_pos.edge.src.id, edge_pos.edge.dst.id)] + if (start is None or edge_pos.distance >= start) and (end is None or edge_pos.distance <= end): + return False + return True + + def push(self, extension_vertex, angle_outputs, segment_length, training=True, branch_threshold=0.2, follow_threshold=0.2, allow_reconnect=True, force_reconnect=False, point_reconnect=False): + if DEBUG: print 'extending from {}'.format(extension_vertex.point) + max_angle = numpy.max(angle_outputs) + + if force_reconnect and allow_reconnect and len(extension_vertex.in_edges) >= 1: + nearby_vertices = graph.get_nearby_vertices_by_distance(extension_vertex, 6*segment_length) + prev_vertex = extension_vertex.in_edges[0].src + + best_vertex = None + best_distance = None + possible_rect = extension_vertex.point.bounds().add_tol(6 * segment_length) + for edge_id in self.edge_rtree.intersection((possible_rect.start.x, possible_rect.start.y, possible_rect.end.x, possible_rect.end.y)): + edge = self.graph.edges[edge_id] + if edge.src in nearby_vertices or edge.dst in nearby_vertices: + continue + for vertex in [edge.src, edge.dst]: + vector_to_vertex = vertex.point.sub(extension_vertex.point) + vector_to_extension = extension_vertex.point.sub(prev_vertex.point) + if vector_to_vertex.angle_to(vector_to_extension) > math.pi / 2: + continue + + distance = vertex.point.distance(extension_vertex.point) + vector_to_vertex.angle_to(vector_to_extension) * segment_length + if len(vertex.out_edges) >= 2: + distance -= segment_length / 2 + if best_vertex is None or distance < best_distance: + best_vertex = vertex + best_distance = distance + if best_vertex is not None: + if DEBUG: print '... push: force reconnect with existing vertex at {}'.format(best_vertex.point) + + if self.gc is not None: + # mark path up to best_vertex as explored + path = self.get_path_to(extension_vertex) + if best_vertex.edge_pos is not None: + path += reversed(self.get_path_to(best_vertex, limit=3)) + else: + path.append(best_vertex) + + probs, backpointers = graph.mapmatch(self.gc.edge_index, self.gc.road_segments, self.gc.edge_to_rs, [vertex.point for vertex in path], segment_length) + if probs is not None: + best_rs = graph.mm_best_rs(self.gc.road_segments, probs) + rs_list = graph.mm_follow_backpointers(self.gc.road_segments, best_rs.id, backpointers) + if DEBUG: print '... push: force reconnect: marking explored rs: {}'.format([rs.id for rs in rs_list[2:]]) + for rs in set(rs_list[2:] + [best_rs]): + self.mark_rs_explored(rs) + + self._add_bidirectional_edge(extension_vertex, best_vertex) + return + + if max_angle < follow_threshold or (max_angle < branch_threshold and len(extension_vertex.out_edges) >= 2) or len(extension_vertex.out_edges) > 4: + if DEBUG: print '... push: decided to stop' + + if self.gc is not None and len(extension_vertex.out_edges) >= 1: + # stop; we should mark path explored + path = self.get_path_to(extension_vertex) + probs, backpointers = graph.mapmatch(self.gc.edge_index, self.gc.road_segments, self.gc.edge_to_rs, [vertex.point for vertex in path], segment_length) + if probs is not None: + best_rs = graph.mm_best_rs(self.gc.road_segments, probs) + rs_list = graph.mm_follow_backpointers(self.gc.road_segments, best_rs.id, backpointers) + if DEBUG: print '... push: stop, so marking rs explored: ({})'.format([rs.id for rs in rs_list[2:] + [best_rs]]) + for rs in set([rs for rs in rs_list[2:] if rs != best_rs]): + self.mark_rs_explored(rs) + best_pos = best_rs.closest_pos(extension_vertex.point) + self.mark_rs_explored(best_rs, distance=best_rs.edge_distances[best_pos.edge.id]+best_pos.distance) + else: + if training and random.random() < 0.1 and False: + angle_bucket = random.randint(0, 63) + else: + angle_bucket = numpy.argmax(angle_outputs) + angle_prob = angle_outputs[angle_bucket] + + next_point = get_next_point(extension_vertex.point, angle_bucket, segment_length) + + if allow_reconnect: + # if this point is close to non-nearby vertex, then connect to extension_vertex + if point_reconnect: + reconnect_threshold = 1.5 * segment_length + else: + reconnect_threshold = 3 * segment_length + + nearby_vertices = graph.get_nearby_vertices_by_distance(extension_vertex, 6*segment_length) + + best_vertex = None + best_distance = None + possible_rect = next_point.bounds().add_tol(reconnect_threshold) + for edge_id in self.edge_rtree.intersection((possible_rect.start.x, possible_rect.start.y, possible_rect.end.x, possible_rect.end.y)): + edge = self.graph.edges[edge_id] + if edge.src in nearby_vertices or edge.dst in nearby_vertices: + continue + if edge.segment().distance(next_point) > reconnect_threshold: + continue + + # parallel road constraint: don't reconnect if angle of segments are almost the same + vector_to_next = next_point.sub(extension_vertex.point) + edge_vector = edge.segment().vector() + if not point_reconnect and len(edge.dst.out_edges) >= 2 and (vector_to_next.angle_to(edge_vector) < math.pi / 10 or vector_to_next.angle_to(edge_vector) > math.pi * 9 / 10): + continue + + proj_point = edge.segment().project(next_point) + vector_to_point = proj_point.sub(extension_vertex.point) + if vector_to_point.angle_to(vector_to_next) > math.pi / 4: + continue + + distance = proj_point.distance(next_point) + if best_vertex is None or distance < best_distance: + best_vertex = (edge, proj_point) + best_distance = distance + + '''for vertex in [edge.src, edge.dst]: + vector_to_vertex = vertex.point.sub(extension_vertex.point) + if vector_to_vertex.angle_to(vector_to_next) > math.pi / 4: + continue + + distance = vertex.point.distance(next_point) + if len(vertex.out_edges) >= 2: + distance -= segment_length / 2 + if best_vertex is None or distance < best_distance: + best_vertex = vertex + best_distance = distance''' + if best_vertex is not None: + edge, proj_point = best_vertex + if DEBUG: print '... push: decided to reconnect with existing vertex at {}'.format(proj_point) + new_vertex = self.graph.add_vertex(proj_point) + if hasattr(edge.src, 'edge_pos'): + new_vertex.edge_pos = edge.src.edge_pos + #new_vertex = best_vertex + + if self.gc is not None: + # mark path up to best_vertex as explored + path = self.get_path_to(extension_vertex) + if new_vertex.edge_pos is not None: + path += reversed(self.get_path_to(new_vertex, limit=3)) + else: + path.append(new_vertex) + probs, backpointers = graph.mapmatch(self.gc.edge_index, self.gc.road_segments, self.gc.edge_to_rs, [vertex.point for vertex in path], segment_length) + if probs is not None: + best_rs = graph.mm_best_rs(self.gc.road_segments, probs) + rs_list = graph.mm_follow_backpointers(self.gc.road_segments, best_rs.id, backpointers) + if DEBUG: print '... push: reconnect: marking explored rs: {}'.format([rs.id for rs in rs_list[2:]]) + for rs in set(rs_list[2:] + [best_rs]): + self.mark_rs_explored(rs) + + # split edge + src, dst = edge.src, edge.dst + opp_edge = edge.get_opposite_edge() + edge.dst = new_vertex + opp_edge.src = new_vertex + dst.in_edges.remove(edge) + dst.out_edges.remove(opp_edge) + new_vertex.in_edges.append(edge) + new_vertex.out_edges.append(opp_edge) + if hasattr(edge, 'prob'): + self._add_bidirectional_edge(new_vertex, dst, prob=edge.prob) + else: + self._add_bidirectional_edge(new_vertex, dst, prob=None) + self._add_bidirectional_edge(extension_vertex, new_vertex, prob=angle_prob) + return + + # add vertex and map-match to find edge_pos + next_vertex = self.graph.add_vertex(next_point) + self._add_bidirectional_edge(extension_vertex, next_vertex, prob=angle_prob) + next_vertex.edge_pos = None + + self.prepend_search_vertex(extension_vertex) + in_bounds = self.prepend_search_vertex(next_vertex) + + if self.gc is not None: + path_to_next = self.get_path_to(next_vertex) + probs, backpointers = graph.mapmatch(self.gc.edge_index, self.gc.road_segments, self.gc.edge_to_rs, [vertex.point for vertex in path_to_next], segment_length) + if probs is not None: + if DEBUG: print '... push: mm probs: {}'.format(probs) + best_rs = graph.mm_best_rs(self.gc.road_segments, probs) + best_pos = best_rs.closest_pos(next_vertex.point) + + # only use best_rs if it is either not explored, or same as previous rs + if best_rs is not None and (not self.is_explored(best_pos) or (extension_vertex.edge_pos is not None and self.gc.edge_to_rs[extension_vertex.edge_pos.edge.id] == best_rs)): + next_vertex.edge_pos = best_pos + if len(path_to_next) >= 10: + rs_list = graph.mm_follow_backpointers(self.gc.road_segments, best_rs.id, backpointers) + if DEBUG: print '... push: mm: {}'.format([rs.id for rs in rs_list]) + if in_bounds: + if DEBUG: print '... push: normal extend, marking explored rs: {}'.format([rs.id for rs in rs_list[2:5] if rs not in rs_list[5:]]) + for rs in rs_list[2:4]: + if rs in rs_list[4:]: + # don't mark edges along rs that we might still be following as explored + continue + self.mark_rs_explored(rs) + else: + if DEBUG: print '... push: normal extend but out of bounds, marking explored rs: {}'.format([rs.id for rs in rs_list[2:] + [best_rs]]) + for rs in set(rs_list[2:] + [best_rs]): + self.mark_rs_explored(rs) + else: + self.unmatched_vertices += 1 + + def pop(self): + if len(self.search_vertices) == 0: + return None + vertex = self.search_vertices[0] + self.search_vertices = self.search_vertices[1:] + return vertex + + def clone(self): + other = Path(self.gc, self.tile_data, g=self.graph.clone()) + other.explored_pairs = dict(self.explored_pairs) + other.unmatched_vertices = self.unmatched_vertices + other.search_vertices = list(self.search_vertices) + return other + +def get_unexplored_graph(path, extension_vertex, origin, segment_length, window_size): + tile = numpy.zeros((128, 128), dtype='float32') + + if extension_vertex.edge_pos is None: + return tile + + def draw(rs, distance, remaining): + start_edge_idx = rs.distance_to_edge(distance, return_idx=True) + for edge_idx in xrange(start_edge_idx, len(rs.edges)): + edge = rs.edges[edge_idx] + edge_distance = distance - rs.edge_distances[edge.id] + + start_edge_pos = graph.EdgePos(edge, edge_distance) + start = start_edge_pos.point() + + if edge_distance + remaining < edge.segment().length(): + end_edge_pos = graph.EdgePos(edge, edge_distance + remaining) + end = end_edge_pos.point() + else: + end = edge.dst.point + + for p in geom.draw_line(start.sub(origin), end.sub(origin), geom.Point(window_size, window_size)): + p_small = p.scale(128.0 / window_size) + tile[p_small.x, p_small.y] = 1.0 + + remaining -= edge.segment().length() - edge_distance + distance = rs.edge_distances[edge.id] + edge.segment().length() + 0.001 + if remaining <= 0: + return + + for next_rs in rs.out_rs(path.gc.edge_to_rs): + if rs == next_rs or next_rs.is_opposite(rs): + continue + elif path.is_explored(rs.edges[0]): + continue + draw(next_rs, 0, remaining) + + cur_edge = extension_vertex.edge_pos.edge + cur_rs = path.gc.edge_to_rs[cur_edge.id] + cur_distance = cur_rs.edge_distances[cur_edge.id] + extension_vertex.edge_pos.distance + prev_rs = None + + if len(extension_vertex.in_edges) >= 1: + prev_vertex = extension_vertex.in_edges[0].src + if prev_vertex.edge_pos is not None: + prev_edge = prev_vertex.edge_pos.edge + prev_rs = path.gc.edge_to_rs[prev_edge.id] + + potential_rs = [] + if cur_distance + segment_length < cur_rs.length(): + potential_rs.append((cur_rs, 0, False)) + else: + distance_to_potential = cur_rs.length() - cur_distance + for rs in cur_rs.out_rs(path.gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs): + continue + potential_rs.append((rs, distance_to_potential, False)) + if cur_distance < segment_length / 2 and prev_rs is not None: + distance_to_potential = cur_distance + for rs in cur_rs.in_rs(path.gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs) or rs == prev_rs or rs.is_opposite(prev_rs): + continue + + # add the opposite of this rs so that we are going away from extension_vertex + opposite_rs = path.gc.edge_to_rs[rs.edges[0].get_opposite_edge().id] + potential_rs.append((opposite_rs, distance_to_potential, True)) + + if len(potential_rs) + 1 > len(extension_vertex.out_edges): + potential_rs = [(rs, d, is_behind) for rs, d, is_behind in potential_rs if not path.is_explored(rs.edges[0])] + if len(potential_rs) > 0: + if path.is_explored(cur_rs.edges[0]): + for rs, d, is_behind in potential_rs: + draw(rs, 0, segment_length * 5 - d) + else: + draw(cur_rs, cur_distance, segment_length * 5) + for rs, d, is_behind in potential_rs: + if is_behind: + draw(rs, 0, segment_length * 5 - d) + + return tile + +def get_unexplored_graph2(path, extension_vertex, origin, segment_length, window_size): + tile = numpy.zeros((128, 128), dtype='float32') + + if extension_vertex.edge_pos is None: + return tile + + # find edges and partial edge explored by most recent part of path + recent_explored = set() + recent_vertices = path.get_path_to(extension_vertex) + probs, backpointers = graph.mapmatch(path.gc.edge_index, path.gc.road_segments, path.gc.edge_to_rs, [vertex.point for vertex in recent_vertices], segment_length) + if probs is not None: + best_rs = graph.mm_best_rs(path.gc.road_segments, probs) + rs_list = graph.mm_follow_backpointers(path.gc.road_segments, best_rs.id, backpointers) + for rs in rs_list[2:] + [best_rs]: + for edge in rs.edges: + self.explored_pairs.add((edge.src.id, edge.dst.id)) + + def draw(rs, distance, remaining, is_explored=False): + start_edge_idx = rs.distance_to_edge(distance, return_idx=True) + for edge_idx in xrange(start_edge_idx, len(rs.edges)): + edge = rs.edges[edge_idx] + edge_distance = distance - rs.edge_distances[edge.id] + + start_edge_pos = graph.EdgePos(edge, edge_distance) + start = start_edge_pos.point() + + if edge_distance + remaining < edge.segment().length(): + end_edge_pos = graph.EdgePos(edge, edge_distance + remaining) + end = end_edge_pos.point() + else: + end = edge.dst.point + + if not is_explored: + for p in geom.draw_line(start.sub(origin), end.sub(origin), geom.Point(window_size, window_size)): + p_small = p.scale(128.0 / window_size) + tile[p_small.x, p_small.y] = 1.0 + + remaining -= edge.segment().length() - edge_distance + distance = rs.edge_distances[edge.id] + edge.segment().length() + 0.001 + if remaining <= 0: + return + + for next_rs in rs.out_rs(path.gc.edge_to_rs): + if rs == next_rs or next_rs.is_opposite(rs): + continue + draw(next_rs, 0, remaining, is_explored=path.is_explored(next_rs.edges[0])) + + cur_edge = extension_vertex.edge_pos.edge + cur_rs = path.gc.edge_to_rs[cur_edge.id] + cur_distance = cur_rs.edge_distances[cur_edge.id] + extension_vertex.edge_pos.distance + draw(cur_rs, cur_distance, segment_length * 5, is_explored=path.is_explored(cur_rs.edges[0])) + + opposite_rs = cur_rs.get_opposite_rs(path.gc.edge_to_rs) + opposite_pos = opposite_rs.closest_pos(extension_vertex.point) + opposite_distance = opposite_rs.edge_distances[opposite_pos.edge.id] + opposite_pos.distance + draw(opposite_rs, opposite_distance, segment_length * 5, is_explored=path.is_explored(cur_rs.edges[0])) + + return tile + +def get_compact_path_input(path, extension_vertex, window_size=512): + origin = extension_vertex.point.sub(geom.Point(window_size/2, window_size/2)) + rect = origin.bounds().extend(origin.add(geom.Point(window_size, window_size))) + + path_segments = [] + for edge_id in path.edge_rtree.intersection((rect.start.x, rect.start.y, rect.end.x, rect.end.y)): + path_segments.append(path.graph.edges[edge_id].segment()) + + gt_segments = [] + for edge in path.gc.edge_index.search(rect): + gt_segments.append(edge.segment()) + + return { + 'window_size': window_size, + 'cache': path.tile_data['cache'], + 'region': path.tile_data['region'], + 'big_rect': path.tile_data['rect'], + 'origin': origin, + 'path_segments': path_segments, + 'gt_segments': gt_segments, + } + +static_angle_onehots = {} +def get_angle_onehot(size): + if size in static_angle_onehots: + return static_angle_onehots[size] + + x = numpy.zeros((size, size, 64), dtype='float32') + for i in xrange(size): + for j in xrange(size): + di = i - size/2 + dj = j - size/2 + a = int((math.atan2(dj, di) - math.atan2(0, 1) + math.pi) * 64 / 2 / math.pi) + if a >= 64: + a = 63 + elif a < 0: + a = 0 + x[i, j, a] = 1 + static_angle_onehots[size] = x + return x + +def uncompact_path_input(d): + big_origin = d['big_rect'].start + big_ims = d['cache'].get(d['region'], d['big_rect']) + tile_origin = d['origin'].sub(big_origin) + tile_big = big_ims['input'][tile_origin.x:tile_origin.x+d['window_size'], tile_origin.y:tile_origin.y+d['window_size'], :].astype('float32') / 255.0 + + tile_path = numpy.zeros((d['window_size'], d['window_size']), dtype='float32') + for segment in d['path_segments']: + for p in geom.draw_line(segment.start.sub(d['origin']), segment.end.sub(d['origin']), geom.Point(d['window_size'], d['window_size'])): + tile_path[p.x, p.y] = 1.0 + + tile_graph = numpy.zeros((d['window_size'], d['window_size']), dtype='float32') + tile_graph_small = numpy.zeros((d['window_size']/4, d['window_size']/4), dtype='float32') + for segment in d['gt_segments']: + for p in geom.draw_line(segment.start.sub(d['origin']), segment.end.sub(d['origin']), geom.Point(d['window_size'], d['window_size'])): + tile_graph[p.x, p.y] = 1.0 + + #p_small = p.scale(128.0 / d['window_size']) + p_small = p.scale(0.25) + tile_graph_small[p_small.x, p_small.y] = 1.0 + + tile_point = numpy.zeros((d['window_size'], d['window_size']), dtype='float32') + #tile_point[d['window_size']/2, d['window_size']/2] = 1.0 + + input = numpy.concatenate([tile_big, tile_path.reshape(d['window_size'], d['window_size'], 1), tile_point.reshape(d['window_size'], d['window_size'], 1)], axis=2) + detect_target = tile_graph_small + return input, detect_target.reshape(d['window_size']/4, d['window_size']/4, 1) + +def make_path_input(path, extension_vertex, segment_length, fname=None, green_points=None, blue_points=None, angle_outputs=None, angle_targets=None, action_outputs=None, action_targets=None, detect_output=None, detect_mode='normal', window_size=512): + big_origin = path.tile_data['rect'].start + big_ims = path.tile_data['cache'].get(path.tile_data['region'], path.tile_data['rect']) + + if not path.tile_data['rect'].add_tol(-window_size/2).contains(extension_vertex.point): + raise Exception('bad path {}'.format(path)) + origin = extension_vertex.point.sub(geom.Point(window_size/2, window_size/2)) + tile_origin = origin.sub(big_origin) + rect = origin.bounds().extend(origin.add(geom.Point(window_size, window_size))) + + tile_path = numpy.zeros((window_size, window_size), dtype='float32') + for edge_id in path.edge_rtree.intersection((rect.start.x, rect.start.y, rect.end.x, rect.end.y)): + edge = path.graph.edges[edge_id] + start = edge.src.point + end = edge.dst.point + for p in geom.draw_line(start.sub(origin), end.sub(origin), geom.Point(window_size, window_size)): + tile_path[p.x, p.y] = 1.0 + #draw_segments = [] + #for edge_id in path.edge_rtree.intersection((rect.start.x, rect.start.y, rect.end.x, rect.end.y)): + # draw_segments.append(path.graph.edges[edge_id].segment()) + #tile_path = geom.draw_lines(draw_segments, shape=(window_size, window_size)).astype('float32') + + tile_point = numpy.zeros((window_size, window_size), dtype='float32') + #tile_point[window_size/2, window_size/2] = 1.0 + + tile_graph = numpy.zeros((window_size, window_size), dtype='float32') + tile_graph_small = numpy.zeros((window_size/4, window_size/4), dtype='float32') + if path.gc is not None: + for edge in path.gc.edge_index.search(rect): + start = edge.src.point + end = edge.dst.point + for p in geom.draw_line(start.sub(origin), end.sub(origin), geom.Point(window_size, window_size)): + tile_graph[p.x, p.y] = 1.0 + + #p_small = p.scale(128.0 / window_size) + p_small = p.scale(0.25) + tile_graph_small[p_small.x, p_small.y] = 1.0 + + # draw_segments = [] + # draw_segments_sm = [] + # for edge in path.gc.edge_index.search(rect): + # start = edge.src.point + # end = edge.dst.point + # draw_segments.append(geom.Segment(start, end)) + # draw_segments_sm.append(geom.Segment(start.scale(0.25), end.scale(0.25))) + # tile_graph = geom.draw_lines(draw_segments, shape=(window_size, window_size)).astype('float32') + # tile_graph_small = geom.draw_lines(draw_segments_sm, shape=(window_size/4, window_size/4)).astype('float32') + + inputs = [] + big_inputs = [v for k, v in big_ims.items() if k.startswith('input')] + for big_input in big_inputs: + tile_big = big_input[tile_origin.x:tile_origin.x+window_size, tile_origin.y:tile_origin.y+window_size, :].astype('float32') / 255.0 + input_el = numpy.concatenate([tile_big, tile_path.reshape(window_size, window_size, 1), tile_point.reshape(window_size, window_size, 1)], axis=2) + #input_el = tile_big # REMOVE ME + inputs.append(input_el) + if len(inputs) == 1: + input = inputs[0] + else: + input = inputs + + if detect_mode == 'normal': + detect_target = tile_graph_small + elif detect_mode == 'unexplored': + detect_target = get_unexplored_graph(path, extension_vertex, origin, segment_length, window_size) + elif detect_mode == 'unexplored2': + detect_target = get_unexplored_graph2(path, extension_vertex, origin, segment_length, window_size) + else: + raise Exception('unknown detect mode {}'.format(detect_mode)) + + if fname is not None: + if False: + Image.fromarray(numpy.swapaxes((tile_big[:, :, 0:3] * 255.0).astype('uint8'), 0, 1)).save(fname + 'sat.png') + + x = numpy.zeros((window_size, window_size, 3), dtype='float32') + x[:, :, 0] = tile_path + + if green_points: + for p in green_points: + p = p.sub(origin) + x[p.x-2:p.x+2, p.y-2:p.y+2, 1] = 1.0 + if blue_points: + for p in blue_points: + p = p.sub(origin) + x[p.x-2:p.x+2, p.y-2:p.y+2, 2] = 1.0 + if angle_outputs is not None or angle_targets is not None: + for i in xrange(window_size): + for j in xrange(window_size): + di = i - window_size/2 + dj = j - window_size/2 + d = math.sqrt(di * di + dj * dj) + a = int((math.atan2(dj, di) - math.atan2(0, 1) + math.pi) * 64 / 2 / math.pi) + if a >= 64: + a = 63 + elif a < 0: + a = 0 + if d > 100 and d <= 120 and angle_outputs is not None: + x[i, j, 1] = angle_outputs[a] + elif d > 140 and d <= 160 and angle_targets is not None: + x[i, j, 1] = angle_targets[a] + + if path.gc is not None: + for edge in path.gc.edge_index.search(rect): + start = edge.src.point + end = edge.dst.point + for p in geom.draw_line(start.sub(origin), end.sub(origin), geom.Point(window_size, window_size)): + x[p.x, p.y, 2] = 1.0 + if path.is_explored(edge): + x[p.x, p.y, 1] = 1.0 + + x[window_size/2-3:window_size/2+3, window_size/2-3:window_size/2+3, 1] = 1.0 + + Image.fromarray(numpy.swapaxes((x * 255.0).astype('uint8'), 0, 1)).save(fname + 'path.png') + if True: + if detect_output is not None: + x = numpy.zeros((64, 64, 3), dtype='float32') + threshold = 0.1 + x[:, :, 1] = numpy.logical_and(detect_target > threshold, detect_output > threshold).astype('float32') + x[:, :, 0] = numpy.logical_and(detect_target <= threshold, detect_output > threshold).astype('float32') + x[:, :, 2] = numpy.logical_and(detect_target > threshold, detect_output <= threshold).astype('float32') + Image.fromarray(numpy.swapaxes((x * 255.0).astype('uint8'), 0, 1)).save(fname + 'detect.png') + if True: + x = numpy.zeros((window_size, window_size, 3), dtype='float32') + x[:, :, 0:3] = tile_big[:, :, 0:3] + + for edge_id in path.edge_rtree.intersection((rect.start.x, rect.start.y, rect.end.x, rect.end.y)): + edge = path.graph.edges[edge_id] + start = edge.src.point + end = edge.dst.point + for p in geom.draw_line(start.sub(origin), end.sub(origin), geom.Point(window_size, window_size)): + x[p.x, p.y, 0] = 1.0 + x[p.x, p.y, 1] = 0.0 + x[p.x, p.y, 2] = 0.0 + + for edge in path.gc.edge_index.search(rect): + start = edge.src.point + end = edge.dst.point + for p in geom.draw_line(start.sub(origin), end.sub(origin), geom.Point(window_size, window_size)): + x[p.x, p.y, 0] = 0.0 + x[p.x, p.y, 1] = 1.0 + x[p.x, p.y, 0] = 0.0 + + if angle_outputs is not None or angle_targets is not None: + for i in xrange(window_size): + for j in xrange(window_size): + di = i - window_size/2 + dj = j - window_size/2 + d = math.sqrt(di * di + dj * dj) + a = int((math.atan2(dj, di) - math.atan2(0, 1) + math.pi) * 64 / 2 / math.pi) + if a >= 64: + a = 63 + elif a < 0: + a = 0 + if d > 100 and d <= 120 and angle_outputs is not None: + x[i, j, 0] = angle_outputs[a] + x[i, j, 1] = angle_outputs[a] + x[i, j, 2] = 0 + elif d > 140 and d <= 160 and angle_targets is not None: + x[i, j, 0] = angle_targets[a] + x[i, j, 1] = angle_targets[a] + x[i, j, 2] = 0 + + x[window_size/2-3:window_size/2+3, window_size/2-3:window_size/2+3, 2] = 1.0 + x[window_size/2-3:window_size/2+3, window_size/2-3:window_size/2+3, 0:2] = 0 + + viz_points = helper_compute_viz_points(path, extension_vertex, segment_length) + if viz_points is not None: + pp = viz_points['mm'].sub(origin) + x[pp.x-3:pp.x+3, pp.y-3:pp.y+3, 1:3] = 1.0 + for p in viz_points['nx']: + pp = p.sub(origin) + x[pp.x-3:pp.x+3, pp.y-3:pp.y+3, 0:3] = 1.0 + + Image.fromarray(numpy.swapaxes((x * 255.0).astype('uint8'), 0, 1)).save(fname + 'overlay.png') + + return input, detect_target.reshape(window_size/4, window_size/4, 1) + +def vector_from_angle(angle, scale=100): + return geom.Point(math.cos(angle) * scale, math.sin(angle) * scale) + +#cached_bucket_vectors = {} +#for bucket in xrange(64): +# angle = bucket * math.pi * 2 / 64.0 - math.pi +# cached_bucket_vectors[(bucket, 20)] = vector_from_angle(angle, 20) + +def get_next_point(prev_point, angle_bucket, segment_length): +# if segment_length == 20: +# return prev_point.add(cached_bucket_vectors[(angle_bucket, segment_length)]) + angle = angle_bucket * math.pi * 2 / 64.0 - math.pi + vector = vector_from_angle(angle, segment_length) + return prev_point.add(vector) + +def compute_targets_by_best(path, extension_vertex, segment_length): + angle_targets = numpy.zeros((64,), 'float32') + allow_reconnect = False + + def best_angle_to_pos(pos): + angle_points = [get_next_point(extension_vertex.point, angle_bucket, segment_length) for angle_bucket in xrange(64)] + distances = [angle_point.distance(pos.point()) for angle_point in angle_points] + point_angle = numpy.argmin(distances) * math.pi * 2 / 64.0 - math.pi + edge_angle = geom.Point(1, 0).signed_angle(pos.edge.segment().vector()) + avg_vector = vector_from_angle(point_angle).add(vector_from_angle(edge_angle)) + avg_angle = geom.Point(1, 0).signed_angle(avg_vector) + return int((avg_angle + math.pi) * 64.0 / math.pi / 2) + + def set_angle_bucket_soft(target_bucket): + for offset in xrange(31): + clockwise_bucket = (target_bucket + offset) % 64 + counterclockwise_bucket = (target_bucket + 64 - offset) % 64 + for bucket in [clockwise_bucket, counterclockwise_bucket]: + angle_targets[bucket] = max(angle_targets[bucket], pow(0.75, offset)) + + def set_by_positions(positions): + # get existing angle buckets, don't use any that are within 3 buckets + bad_buckets = set() + for edge in extension_vertex.out_edges: + edge_angle = geom.Point(1, 0).signed_angle(edge.segment().vector()) + edge_bucket = int((edge_angle + math.pi) * 64.0 / math.pi / 2) + for offset in xrange(3): + clockwise_bucket = (edge_bucket + offset) % 64 + counterclockwise_bucket = (edge_bucket + 64 - offset) % 64 + bad_buckets.add(clockwise_bucket) + bad_buckets.add(counterclockwise_bucket) + + for pos in positions: + best_angle_bucket = best_angle_to_pos(pos) + if best_angle_bucket in bad_buckets: + continue + set_angle_bucket_soft(best_angle_bucket) + + if extension_vertex.edge_pos is not None: + cur_edge = extension_vertex.edge_pos.edge + cur_rs = path.gc.edge_to_rs[cur_edge.id] + prev_rs = None + + if len(extension_vertex.in_edges) >= 1: + prev_vertex = extension_vertex.in_edges[0].src + if prev_vertex.edge_pos is not None: + prev_edge = prev_vertex.edge_pos.edge + prev_rs = path.gc.edge_to_rs[prev_edge.id] + + def get_potential_rs(segment_length, allow_backwards): + potential_rs = [] + if cur_rs.edge_distances[cur_edge.id] + extension_vertex.edge_pos.distance + segment_length < cur_rs.length(): + potential_rs.append(cur_rs) + else: + for rs in cur_rs.out_rs(path.gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs): + continue + potential_rs.append(rs) + if allow_backwards and cur_rs.edge_distances[cur_edge.id] + extension_vertex.edge_pos.distance < segment_length / 2 and prev_rs is not None: + for rs in cur_rs.in_rs(path.gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs) or rs == prev_rs or rs.is_opposite(prev_rs): + continue + + # add the opposite of this rs so that we are going away from extension_vertex + opposite_rs = path.gc.edge_to_rs[rs.edges[0].get_opposite_edge().id] + potential_rs.append(opposite_rs) + + # at very beginning of path, we can go in either direction + if len(path.graph.edges) == 0: + # TODO: fix get_opposite_rs for loops + # currently, if there is a loop, then the rs corresponding to the loop may start at + # any point along the loop, and get_opposite_rs will fail + # I think it may be okay if the loop isn't completely isolated (circle with no + # intersections), but definitely it fails for isolated loops + #potential_rs.append(cur_rs.get_opposite_rs(path.gc.edge_to_rs)) + opposite_rs1 = cur_rs.get_opposite_rs(path.gc.edge_to_rs) + opposite_rs2 = path.gc.edge_to_rs[cur_rs.edges[-1].get_opposite_edge().id] + potential_rs.append(opposite_rs2) + if opposite_rs1 != opposite_rs2: + if opposite_rs1 is None: + print 'warning: using opposite_rs2 for rs {}'.format(opposite_rs2.id) + else: + raise Exception('opposite_rs1 ({}) != opposite_rs2 ({})'.format(opposite_rs1.id, opposite_rs2.id)) + + return potential_rs + + potential_rs = get_potential_rs(segment_length, True) + potential_far_rs = get_potential_rs(3 * segment_length, False) + + # reconnect if there is a nearby path vertex such that: + # (1) extension_vertex and the nearby vertex are far in the path graph + # (2) but close in the ground truth graph + # (3) and rs is either same as current one, or outgoing from a ground truth vertex that we are entering + + if len(extension_vertex.in_edges) <= 1: + # first, get outgoing road segments from potential_far_rs + reconnectable_rs = set([rs for rs in potential_far_rs if path.is_explored(graph.EdgePos(rs.edges[0], 0)) and rs != cur_rs and not cur_rs.is_opposite(rs)]) + reconnectable_rs.add(cur_rs) + reconnectable_rs.add(path.gc.edge_to_rs[cur_rs.edges[-1].get_opposite_edge().id]) + + # now, satisfy 1 and 2, while using set above to satisfy 3 + nearby_path_vertices = set(graph.get_nearby_vertices(extension_vertex, 6)) + gt_distances = graph.shortest_distances_from_source(cur_edge.dst, max_distance=3*segment_length) + r = extension_vertex.point.bounds().add_tol(3*segment_length) + for nearby_edge_id in path.edge_rtree.intersection((r.start.x, r.start.y, r.end.x, r.end.y)): + nearby_edge = path.graph.edges[nearby_edge_id] + for nearby_vertex in [nearby_edge.src, nearby_edge.dst]: + if nearby_vertex.edge_pos is None: + continue + elif nearby_vertex.point.distance(extension_vertex.point) > 3*segment_length: + continue + elif nearby_vertex in nearby_path_vertices: + continue + elif path.gc.edge_to_rs[nearby_vertex.edge_pos.edge.id] not in reconnectable_rs: + continue + gt_distance = min( + gt_distances.get(nearby_vertex.edge_pos.edge.src.id, 99999) + nearby_vertex.edge_pos.distance, + gt_distances.get(nearby_vertex.edge_pos.edge.dst.id, 99999) + nearby_vertex.edge_pos.reverse().distance + ) + if gt_distance < 3*segment_length: + allow_reconnect = True + + if len(potential_rs) + 1 > len(extension_vertex.out_edges): + if DEBUG: print '... compute_targets_by_best: potential_rs={}'.format([rs.id for rs in potential_rs]) + expected_positions = [] + for rs in potential_rs: + pos = rs.closest_pos(extension_vertex.point) + if path.is_explored(pos): + continue + rs_follow_positions = graph.follow_graph(pos, segment_length, explored_node_pairs=path.explored_pairs) + if DEBUG: print '... compute_targets_by_best: rs {}: closest pos to extension point {} is on edge {}@{} at {}'.format(rs.id, extension_vertex.point, pos.edge.id, pos.distance, pos.point()) + for rs_follow_pos in rs_follow_positions: + if DEBUG: print '... compute_targets_by_best: rs {}: ... {}@{} at {}'.format(rs.id, rs_follow_pos.edge.id, rs_follow_pos.distance, rs_follow_pos.point()) + expected_positions.extend(rs_follow_positions) + set_by_positions(expected_positions) + else: + if DEBUG: print '... compute_targets_by_best: found {} potential rs but already have {} outgoing edges'.format(len(potential_rs), len(extension_vertex.out_edges)) + else: + if DEBUG: print '... compute_targets_by_best: edge_pos is None' + + return angle_targets, allow_reconnect + +def compute_targets_by_cgan(path, extension_vertex, segment_length, angle_outputs): + rotation_target = 64 + + def best_angle_to_pos(pos): + angle_points = [get_next_point(extension_vertex.point, angle_bucket, segment_length) for angle_bucket in xrange(64)] + distances = [angle_point.distance(pos.point()) for angle_point in angle_points] + point_angle = numpy.argmin(distances) * math.pi * 2 / 64.0 - math.pi + edge_angle = geom.Point(1, 0).signed_angle(pos.edge.segment().vector()) + avg_vector = vector_from_angle(point_angle).add(vector_from_angle(edge_angle)) + avg_angle = geom.Point(1, 0).signed_angle(avg_vector) + return int((avg_angle + math.pi) * 64.0 / math.pi / 2) + + if extension_vertex.edge_pos is not None: + cur_edge = extension_vertex.edge_pos.edge + cur_rs = path.gc.edge_to_rs[cur_edge.id] + prev_rs = None + + if len(extension_vertex.in_edges) >= 1: + prev_vertex = extension_vertex.in_edges[0].src + if prev_vertex.edge_pos is not None: + prev_edge = prev_vertex.edge_pos.edge + prev_rs = path.gc.edge_to_rs[prev_edge.id] + + potential_rs = [] + if cur_rs.edge_distances[cur_edge.id] + extension_vertex.edge_pos.distance + segment_length < cur_rs.length(): + potential_rs.append(cur_rs) + else: + for rs in cur_rs.out_rs(path.gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs): + continue + potential_rs.append(rs) + if cur_rs.edge_distances[cur_edge.id] + extension_vertex.edge_pos.distance < segment_length / 2 and prev_rs is not None: + for rs in cur_rs.in_rs(path.gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs) or rs == prev_rs or rs.is_opposite(prev_rs): + continue + + # add the opposite of this rs so that we are going away from extension_vertex + opposite_rs = path.gc.edge_to_rs[rs.edges[0].get_opposite_edge().id] + potential_rs.append(opposite_rs) + + if len(potential_rs) + 1 > len(extension_vertex.out_edges): + potential_rs = [rs for rs in potential_rs if not path.is_explored(rs.edges[0])] + if DEBUG: print '... compute_targets_by_best: potential_rs={}'.format([rs.id for rs in potential_rs]) + expected_positions = [] + for rs in potential_rs: + pos = rs.closest_pos(extension_vertex.point) + rs_follow_positions = graph.follow_graph(pos, segment_length, explored_node_pairs=path.explored_pairs) + if DEBUG: print '... compute_targets_by_best: rs {}: closest pos to extension point {} is on edge {}@{} at {}'.format(rs.id, extension_vertex.point, pos.edge.id, pos.distance, pos.point()) + for rs_follow_pos in rs_follow_positions: + if DEBUG: print '... compute_targets_by_best: rs {}: ... {}@{} at {}'.format(rs.id, rs_follow_pos.edge.id, rs_follow_pos.distance, rs_follow_pos.point()) + expected_positions.extend(rs_follow_positions) + + angle_buckets = [best_angle_to_pos(pos) for pos in expected_positions] + if angle_buckets: + rotation_target = random.choice(angle_buckets) + + rotation_amount = rotation_target - numpy.argmax(angle_outputs) + angle_targets = numpy.zeros((65,), dtype='float32') + for i in xrange(65): + angle_targets[i] = angle_outputs[(i - rotation_amount + 65) % 65] + + angle_targets_copy = numpy.zeros((65,), dtype='float32') + angle_targets_copy[:] = angle_targets[:] + angle_targets_copy[rotation_target] = 0 + if numpy.max(angle_targets) - numpy.max(angle_targets_copy) < 0.5: + if numpy.max(angle_targets) >= 0.5: + angle_targets = angle_targets * 0.5 + angle_targets[rotation_target] += random.random() * 0.1 + 0.4 + + return angle_targets + +def helper_compute_viz_points(path, extension_vertex, segment_length): + if extension_vertex.edge_pos is not None: + cur_edge = extension_vertex.edge_pos.edge + cur_rs = path.gc.edge_to_rs[cur_edge.id] + prev_rs = None + + if len(extension_vertex.in_edges) >= 1: + prev_vertex = extension_vertex.in_edges[0].src + if prev_vertex.edge_pos is not None: + prev_edge = prev_vertex.edge_pos.edge + prev_rs = path.gc.edge_to_rs[prev_edge.id] + + potential_rs = [] + if cur_rs.edge_distances[cur_edge.id] + extension_vertex.edge_pos.distance + segment_length < cur_rs.length(): + potential_rs.append(cur_rs) + else: + for rs in cur_rs.out_rs(path.gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs): + continue + potential_rs.append(rs) + if cur_rs.edge_distances[cur_edge.id] + extension_vertex.edge_pos.distance < segment_length / 2 and prev_rs is not None: + for rs in cur_rs.in_rs(path.gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs) or rs == prev_rs or rs.is_opposite(prev_rs): + continue + + # add the opposite of this rs so that we are going away from extension_vertex + opposite_rs = path.gc.edge_to_rs[rs.edges[0].get_opposite_edge().id] + potential_rs.append(opposite_rs) + + mm_point = extension_vertex.edge_pos.point() + nx_points = [] + + if len(potential_rs) + 1 > len(extension_vertex.out_edges): + if DEBUG: print '... compute_targets_by_best: potential_rs={}'.format([rs.id for rs in potential_rs]) + expected_positions = [] + for rs in potential_rs: + pos = rs.closest_pos(extension_vertex.point) + if path.is_explored(pos): + continue + rs_follow_positions = graph.follow_graph(pos, segment_length, explored_node_pairs=path.explored_pairs) + if DEBUG: print '... compute_targets_by_best: rs {}: closest pos to extension point {} is on edge {}@{} at {}'.format(rs.id, extension_vertex.point, pos.edge.id, pos.distance, pos.point()) + for rs_follow_pos in rs_follow_positions: + if DEBUG: print '... compute_targets_by_best: rs {}: ... {}@{} at {}'.format(rs.id, rs_follow_pos.edge.id, rs_follow_pos.distance, rs_follow_pos.point()) + nx_points.extend([pos.point() for pos in rs_follow_positions]) + else: + if DEBUG: print '... compute_targets_by_best: found {} potential rs but already have {} outgoing edges'.format(len(potential_rs), len(extension_vertex.out_edges)) + + return { + 'mm': mm_point, + 'nx': nx_points, + } + else: + if DEBUG: print '... compute_targets_by_best: edge_pos is None' + + return None diff --git a/pipeline2/run_m5_point.py b/pipeline2/run_m5_point.py new file mode 100644 index 0000000..b1c9af5 --- /dev/null +++ b/pipeline2/run_m5_point.py @@ -0,0 +1,318 @@ +from discoverlib import geom, graph +import model_m5d as model +import model_utils +import tileloader_sea2 as tileloader + +from collections import deque +import numpy +import math +import os +import os.path +from PIL import Image +import random +import scipy.ndimage +import sys +import tensorflow as tf +import time + +MODEL_BASE = sys.argv[1] +tileloader.tile_dir = sys.argv[2] +tileloader.graph_dir = sys.argv[3] +tileloader.pytiles_path = sys.argv[4] + +ROAD_WIDTH = 40 +SEGMENT_LENGTH = 20 +WINDOW_SIZE = 256 +NUM_TRAIN_TILES = 1024 +TILE_SIZE = 4096 +ANGLE_ONEHOT = 64 +PROB_FROM_ROAD = 1.0 +NO_DETECT = False +ENABLE_ROTATION = True + +tiles = tileloader.Tiles(SEGMENT_LENGTH) +tiles.prepare_training() +num_val = max(5, len(tiles.train_tiles) / 10 + 1) +print 'using {} of {} tiles as validation set'.format(num_val, len(tiles.train_tiles)) +val_tiles = tiles.train_tiles[:num_val] +train_tiles = tiles.train_tiles[num_val:] + +# initialize model and session +print 'initializing model' +m = model.Model(bn=True, angle_weight=40) +session = tf.Session() +model_path = MODEL_BASE + '/model_latest/model' +best_path = MODEL_BASE + '/model_best/model' +if os.path.isfile(model_path + '.meta'): + print '... loading existing model' + m.saver.restore(session, model_path) +else: + print '... initializing a new model' + session.run(m.init_op) + +if ENABLE_ROTATION: + FETCH_FACTOR = 2 +else: + FETCH_FACTOR = 1 + +def get_tile_rect(tile): + p = geom.Point(tile.x, tile.y) + return geom.Rectangle( + p.scale(TILE_SIZE), + p.add(geom.Point(1, 1)).scale(TILE_SIZE) + ) + +tile_edgeprobs = {} +def get_tile_edgeprobs(tile): + k = '{}_{}_{}'.format(tile.region, tile.x, tile.y) + if k not in tile_edgeprobs: + gc = tiles.get_gc(tile.region) + rect = get_tile_rect(tile).add_tol(-WINDOW_SIZE*FETCH_FACTOR/2) + edge_ids = [] + edge_lengths = [] + for edge in gc.graph.edges: + if rect.contains(edge.src.point) and rect.contains(edge.dst.point): + edge_ids.append(edge.id) + edge_lengths.append(edge.segment().length()) + edge_lengths = numpy.array(edge_lengths, dtype='float32') + edge_probs = edge_lengths / edge_lengths.sum() + tile_edgeprobs[k] = (edge_ids, edge_probs) + return tile_edgeprobs[k] + +def compute_targets(gc, point, edge_pos): + angle_targets = numpy.zeros((64,), 'float32') + + def best_angle_to_pos(pos): + angle_points = [model_utils.get_next_point(point, angle_bucket, SEGMENT_LENGTH) for angle_bucket in xrange(64)] + distances = [angle_point.distance(pos.point()) for angle_point in angle_points] + point_angle = numpy.argmin(distances) * math.pi * 2 / 64.0 - math.pi + edge_angle = geom.Point(1, 0).signed_angle(pos.edge.segment().vector()) + avg_vector = geom.vector_from_angle(point_angle, 50).add(geom.vector_from_angle(edge_angle, 50)) + avg_angle = geom.Point(1, 0).signed_angle(avg_vector) + return int((avg_angle + math.pi) * 64.0 / math.pi / 2) + + def set_angle_bucket_soft(target_bucket): + for offset in xrange(31): + clockwise_bucket = (target_bucket + offset) % 64 + counterclockwise_bucket = (target_bucket + 64 - offset) % 64 + for bucket in [clockwise_bucket, counterclockwise_bucket]: + angle_targets[bucket] = max(angle_targets[bucket], pow(0.75, offset)) + + def set_by_positions(positions): + for pos in positions: + best_angle_bucket = best_angle_to_pos(pos) + set_angle_bucket_soft(best_angle_bucket) + + cur_edge = edge_pos.edge + cur_rs = gc.edge_to_rs[cur_edge.id] + + potential_rs = [] + if cur_rs.edge_distances[cur_edge.id] + edge_pos.distance + SEGMENT_LENGTH < cur_rs.length(): + potential_rs.append(cur_rs) + else: + for rs in cur_rs.out_rs(gc.edge_to_rs): + if rs == cur_rs or rs.is_opposite(cur_rs): + continue + potential_rs.append(rs) + opposite_rs = gc.edge_to_rs[cur_rs.edges[-1].get_opposite_edge().id] + if cur_rs.edge_distances[cur_edge.id] + edge_pos.distance - SEGMENT_LENGTH > 0: + potential_rs.append(opposite_rs) + else: + for rs in opposite_rs.out_rs(gc.edge_to_rs): + if rs == opposite_rs or rs.is_opposite(opposite_rs): + continue + potential_rs.append(rs) + + expected_positions = [] + for rs in potential_rs: + pos = rs.closest_pos(point) + rs_follow_positions = graph.follow_graph(pos, SEGMENT_LENGTH) + expected_positions.extend(rs_follow_positions) + set_by_positions(expected_positions) + + return angle_targets + +def get_example(traintest='train'): + while True: + if traintest == 'train': + tile = random.choice(train_tiles) + elif traintest == 'test': + tile = random.choice(val_tiles) + + edge_ids, edge_probs = get_tile_edgeprobs(tile) + if len(edge_ids) > 80 or len(edge_ids) > 0: + break + + # determine rotation factor + rotation = None + if ENABLE_ROTATION: + rotation = random.random() * 2 * math.pi + + rect = get_tile_rect(tile) + small_rect = rect.add_tol(-WINDOW_SIZE*FETCH_FACTOR/2) + + # get random edge position + edge_id = numpy.random.choice(edge_ids, p=edge_probs) + gc = tiles.get_gc(tile.region) + edge = gc.graph.edges[edge_id] + distance = random.random() * edge.segment().length() + + # convert to point and add noise + point = graph.EdgePos(edge, distance).point() + if random.random() < PROB_FROM_ROAD: + if random.random() < 0.2: + noise_amount = 10 * SEGMENT_LENGTH + else: + noise_amount = ROAD_WIDTH / 1.5 + noise = geom.Point(random.random() * 2*noise_amount - noise_amount, random.random() * 2*noise_amount - noise_amount) + point = point.add(noise) + point = small_rect.clip(point) + else: + point = geom.Point(random.randint(0, small_rect.lengths().x - 1), random.randint(0, small_rect.lengths().y - 1)) + point = point.add(small_rect.start) + point = small_rect.clip(point) + + # match point to edge if possible + threshold = ROAD_WIDTH + closest_edge = None + closest_distance = None + for edge in gc.edge_index.search(point.bounds().add_tol(threshold)): + d = edge.segment().distance(point) + if d < threshold and (closest_edge is None or d < closest_distance): + closest_edge = edge + closest_distance = d + closest_pos = None + if closest_edge is not None: + closest_pos = closest_edge.closest_pos(point) + + # generate input + origin = point.sub(geom.Point(WINDOW_SIZE/2, WINDOW_SIZE/2)) + tile_origin = origin.sub(rect.start) + fetch_rect = geom.Rectangle(tile_origin, tile_origin.add(geom.Point(WINDOW_SIZE, WINDOW_SIZE))).add_tol(WINDOW_SIZE*(FETCH_FACTOR-1)/2) + big_ims = tiles.cache.get_window(tile.region, rect, fetch_rect) + input = big_ims['input'].astype('float32') / 255.0 + if rotation: + input = scipy.ndimage.interpolation.rotate(input, rotation * 180 / math.pi, reshape=False, order=0) + input = input[WINDOW_SIZE/2:3*WINDOW_SIZE/2, WINDOW_SIZE/2:3*WINDOW_SIZE/2, :] + + # compute targets + if closest_edge is not None: + angle_targets = compute_targets(gc, point, closest_pos) + if rotation: + shift = int(rotation * 32 / math.pi) + new_targets = numpy.zeros((64,), 'float32') + for i in xrange(64): + new_targets[(i + shift) % 64] = angle_targets[i] + angle_targets = new_targets + else: + angle_targets = numpy.zeros((64,), 'float32') + + detect_targets = numpy.zeros((64*FETCH_FACTOR, 64*FETCH_FACTOR, 1), dtype='float32') + if not NO_DETECT: + fetch_rect = geom.Rectangle(origin, origin.add(geom.Point(WINDOW_SIZE, WINDOW_SIZE))).add_tol(WINDOW_SIZE*(FETCH_FACTOR-1)/2) + for edge in gc.edge_index.search(fetch_rect.add_tol(32)): + start = edge.src.point.sub(fetch_rect.start).scale(float(64)/WINDOW_SIZE) + end = edge.dst.point.sub(fetch_rect.start).scale(float(64)/WINDOW_SIZE) + for p in geom.draw_line(start, end, geom.Point(64*FETCH_FACTOR, 64*FETCH_FACTOR)): + detect_targets[p.x, p.y, 0] = 1 + if rotation: + detect_targets = scipy.ndimage.interpolation.rotate(detect_targets, rotation * 180 / math.pi, reshape=False, order=0) + detect_targets = detect_targets[32:96, 32:96, :] + + info = { + 'region': tile.region, + 'point': point, + 'origin': origin, + 'closest_pos': closest_pos, + 'rotation': rotation, + } + + return info, input, angle_targets, detect_targets + +val_examples = [get_example('test') for _ in xrange(2048)] + +def vis_example(example, outputs=None): + info, input, angle_targets, detect_targets = example + x = numpy.zeros((WINDOW_SIZE, WINDOW_SIZE, 3), dtype='uint8') + x[:, :, :] = input * 255 + x[WINDOW_SIZE/2-2:WINDOW_SIZE/2+2, WINDOW_SIZE/2-2:WINDOW_SIZE/2+2, :] = 255 + + gc = tiles.get_gc(info['region']) + rect = geom.Rectangle(info['origin'], info['origin'].add(geom.Point(WINDOW_SIZE, WINDOW_SIZE))) + for edge in gc.edge_index.search(rect): + start = edge.src.point + end = edge.dst.point + for p in geom.draw_line(start.sub(info['origin']), end.sub(info['origin']), geom.Point(WINDOW_SIZE, WINDOW_SIZE)): + x[p.x, p.y, 0:2] = 0 + x[p.x, p.y, 2] = 255 + + if info['closest_pos'] is not None: + p = info['closest_pos'].point().sub(info['origin']) + x[p.x-2:p.x+2, p.y-2:p.y+2, 0] = 255 + x[p.x-2:p.x+2, p.y-2:p.y+2, 1:3] = 0 + + for i in xrange(WINDOW_SIZE): + for j in xrange(WINDOW_SIZE): + di = i - WINDOW_SIZE/2 + dj = j - WINDOW_SIZE/2 + d = math.sqrt(di * di + dj * dj) + a = int((math.atan2(dj, di) - math.atan2(0, 1) + math.pi) * 64 / 2 / math.pi) + if a >= 64: + a = 63 + elif a < 0: + a = 0 + elif d > 100 and d <= 120 and angle_targets is not None: + x[i, j, 0] = angle_targets[a] * 255 + x[i, j, 1] = angle_targets[a] * 255 + x[i, j, 2] = 0 + elif d > 70 and d <= 90 and outputs is not None: + x[i, j, 0] = outputs[a] * 255 + x[i, j, 1] = outputs[a] * 255 + x[i, j, 2] = 0 + return x + +best_loss = None + +for epoch in xrange(9999): + start_time = time.time() + train_losses = [] + for _ in xrange(1024): + examples = [get_example('train') for _ in xrange(model.BATCH_SIZE)] + feed_dict = { + m.is_training: True, + m.inputs: [example[1] for example in examples], + m.angle_targets: [example[2] for example in examples], + m.detect_targets: [example[3] for example in examples], + m.learning_rate: 1e-5, + } + if ANGLE_ONEHOT: + feed_dict[m.angle_onehot] = model_utils.get_angle_onehot(ANGLE_ONEHOT) + _, angle_loss, detect_loss, loss = session.run([m.optimizer, m.angle_loss, m.detect_loss, m.loss], feed_dict=feed_dict) + train_losses.append((angle_loss, detect_loss, loss)) + + train_loss = numpy.mean([l[0] for l in train_losses]), numpy.mean([l[1] for l in train_losses]), numpy.mean([l[2] for l in train_losses]) + train_time = time.time() + + val_losses = [] + for i in xrange(0, len(val_examples), model.BATCH_SIZE): + examples = val_examples[i:i+model.BATCH_SIZE] + feed_dict = { + m.is_training: False, + m.inputs: [example[1] for example in examples], + m.angle_targets: [example[2] for example in examples], + m.detect_targets: [example[3] for example in examples], + } + if ANGLE_ONEHOT: + feed_dict[m.angle_onehot] = model_utils.get_angle_onehot(ANGLE_ONEHOT) + angle_loss, detect_loss, loss = session.run([m.angle_loss, m.detect_loss, m.loss], feed_dict=feed_dict) + val_losses.append((angle_loss, detect_loss, loss)) + + val_loss = numpy.mean([l[0] for l in val_losses]), numpy.mean([l[1] for l in val_losses]), numpy.mean([l[2] for l in val_losses]) + val_time = time.time() + + print 'iteration {}: train_time={}, val_time={}, train_loss={}, val_loss={}/{}'.format(epoch, int(train_time - start_time), int(val_time - train_time), train_loss, val_loss, best_loss) + + m.saver.save(session, model_path) + if best_loss is None or val_loss[0] < best_loss: + best_loss = val_loss[0] + m.saver.save(session, best_path) diff --git a/pipeline2/tileloader_sea2.py b/pipeline2/tileloader_sea2.py new file mode 100644 index 0000000..039548e --- /dev/null +++ b/pipeline2/tileloader_sea2.py @@ -0,0 +1,155 @@ +from discoverlib import geom +from discoverlib import graph +import model_utils + +import json +import numpy +import os +import random +import rtree +import scipy.ndimage +import time + +tile_dir = None +graph_dir = None +pytiles_path = None + +angles_dir = None +tile_size = 4096 +window_size = 256 + +def get_tile_dirs(): + if isinstance(tile_dir, str): + return [tile_dir] + else: + return tile_dir + +def get_tile_keys(): + keys = [] + for i in xrange(len(get_tile_dirs())): + if i == 0: + keys.append('input') + else: + keys.append('input{}'.format(i)) + return keys + +def load_tile(region, i, j): + d = {} + for pathIdx, path in enumerate(get_tile_dirs()): + prefix = '{}/{}_{}_{}_'.format(path, region, i, j) + sat_im = scipy.ndimage.imread(prefix + 'sat.png') + if sat_im.shape == (tile_size, tile_size, 4): + sat_im = sat_im[:, :, 0:3] + sat_im = sat_im.swapaxes(0, 1) + if pathIdx == 0: + d['input'] = sat_im + else: + d['input{}'.format(pathIdx)] = sat_im + if angles_dir: + angle_im = numpy.fromfile('{}/{}_{}_{}.bin'.format(angles_dir, region, i, j), dtype='uint8') + angle_im = angle_im.reshape(tile_size/4, tile_size/4, 64) + d['angles'] = angle_im + + return d + +def load_rect(region, rect, load_func=load_tile): + # special case for fast load: rect is single tile + if rect.start.x % tile_size == 0 and rect.start.y % tile_size == 0 and rect.end.x % tile_size == 0 and rect.end.y % tile_size == 0 and rect.end.x - rect.start.x == tile_size and rect.end.y - rect.start.y == tile_size: + return load_func(region, rect.start.x / tile_size, rect.start.y / tile_size) + + tile_rect = geom.Rectangle( + geom.Point(rect.start.x / tile_size, rect.start.y / tile_size), + geom.Point((rect.end.x - 1) / tile_size + 1, (rect.end.y - 1) / tile_size + 1) + ) + full_rect = geom.Rectangle( + tile_rect.start.scale(tile_size), + tile_rect.end.scale(tile_size) + ) + full_ims = {} + + for i in xrange(tile_rect.start.x, tile_rect.end.x): + for j in xrange(tile_rect.start.y, tile_rect.end.y): + p = geom.Point(i - tile_rect.start.x, j - tile_rect.start.y).scale(tile_size) + tile_ims = load_func(region, i, j) + for k, im in tile_ims.iteritems(): + scale = tile_size / im.shape[0] + if k not in full_ims: + full_ims[k] = numpy.zeros((full_rect.lengths().x / scale, full_rect.lengths().y / scale, im.shape[2]), dtype='uint8') + full_ims[k][p.x/scale:(p.x+tile_size)/scale, p.y/scale:(p.y+tile_size)/scale, :] = im + + crop_rect = geom.Rectangle( + rect.start.sub(full_rect.start), + rect.end.sub(full_rect.start) + ) + for k in full_ims: + scale = (full_rect.end.x - full_rect.start.x) / full_ims[k].shape[0] + full_ims[k] = full_ims[k][crop_rect.start.x/scale:crop_rect.end.x/scale, crop_rect.start.y/scale:crop_rect.end.y/scale, :] + return full_ims + +class TileCache(object): + def __init__(self): + self.cache = {} + self.last_used = {} + + def get(self, region, rect): + k = '{}.{}.{}.{}.{}'.format(region, rect.start.x, rect.start.y, rect.end.x, rect.end.y) + if k not in self.cache: + self.cache[k] = load_rect(region, rect) + self.last_used[k] = time.time() + return self.cache[k] + + def get_window(self, region, big_rect, small_rect): + big_dict = self.get(region, big_rect) + small_dict = {} + for k, v in big_dict.items(): + scale = tile_size / v.shape[0] + small_dict[k] = v[small_rect.start.x/scale:small_rect.end.x/scale, small_rect.start.y/scale:small_rect.end.y/scale, :] + return small_dict + +def get_tile_list(): + tiles = [] + with open(pytiles_path, 'r') as f: + for json_tile in json.load(f): + tile = geom.Point(int(json_tile['x']), int(json_tile['y'])) + tile.region = json_tile['region'] + tiles.append(tile) + downloaded = set([fname.split('_sat.png')[0] for fname in os.listdir(get_tile_dirs()[0]) if '_sat.png' in fname]) + dl_tiles = [tile for tile in tiles if '{}_{}_{}'.format(tile.region, tile.x, tile.y) in downloaded] + print 'found {} total tiles, using {} downloaded tiles'.format(len(tiles), len(dl_tiles)) + return dl_tiles + +class Tiles(object): + def __init__(self, segment_length): + self.segment_length = segment_length + + # load tile list + # this is a list of point dicts (a point dict has keys 'x', 'y') + # don't include test tiles + print 'reading tiles' + self.all_tiles = get_tile_list() + self.regions = set([tile.region for tile in self.all_tiles]) + self.cache = TileCache() + + self.gcs = {} + + def get_gc(self, region): + if region in self.gcs: + return self.gcs[region] + print 'loading gc for {}'.format(region) + fname = os.path.join(graph_dir, region + '.graph') + g = graph.read_graph(fname) + gc = graph.GraphContainer(g) + self.gcs[region] = gc + return gc + + def cache_gcs(self, regions): + for region in regions: + self.get_gc(region) + + def prepare_training(self): + self.cache_gcs(self.regions) + self.train_tiles = list(self.all_tiles) + random.shuffle(self.train_tiles) + + def num_tiles(self): + return len(self.train_tiles) diff --git a/pipeline2/util_create_tile_list.py b/pipeline2/util_create_tile_list.py new file mode 100644 index 0000000..e561c98 --- /dev/null +++ b/pipeline2/util_create_tile_list.py @@ -0,0 +1,32 @@ +from discoverlib import graph +import json +import math +import sys + +regions = sys.argv[1].split(',') +fnames = sys.argv[2].split(',') +out_fname = sys.argv[3] +tile_size = 4096.0 + +json_tiles = [] + +for i in xrange(len(regions)): + region = regions[i] + fname = fnames[i] + + g = graph.read_graph(fname) + tiles = set() + for vertex in g.vertices: + x = int(math.floor(vertex.point.x / tile_size)) + y = int(math.floor(vertex.point.y / tile_size)) + tiles.add((x, y)) + + for x, y in tiles: + json_tiles.append({ + 'x': x, + 'y': y, + 'region': region, + }) + +with open(out_fname, 'w') as f: + json.dump(json_tiles, f) diff --git a/pipeline2/util_graph_to_shp.py b/pipeline2/util_graph_to_shp.py new file mode 100644 index 0000000..c83b918 --- /dev/null +++ b/pipeline2/util_graph_to_shp.py @@ -0,0 +1,34 @@ +from discoverlib import geom, graph +import fiona +from shapely import geometry +import sys + +fname = sys.argv[1] +out_name = sys.argv[2] +g = graph.read_graph(fname, merge_duplicates=True, fpoint=True) +road_segments, _ = graph.get_graph_road_segments(g) + +lines = [] +seen_node_pairs = set() +for rs in road_segments: + if (rs.src().id, rs.dst().id) in seen_node_pairs or (rs.dst().id, rs.src().id) in seen_node_pairs: + continue + points = [] + for edge in rs.edges: + if len(points) > 0 and edge.src.point == points[-1]: + print 'adding edge {} -> {}; points: {}; edges: {}'.format(edge.src.point, edge.dst.point, points, [(e.src.point, e.dst.point) for e in rs.edges]) + print 'skip' + continue + points.append(edge.src.point) + points.append(rs.dst().point) + points = [(p.x, p.y) for p in points] + lines.append(geometry.LineString(points)) + seen_node_pairs.add((rs.src().id, rs.dst().id)) + +schema = {'geometry': 'LineString','properties': {}} +with fiona.open(out_name, 'w', 'ESRI Shapefile', schema) as layer: + for line in lines: + layer.write({ + 'geometry': geometry.mapping(line), + 'properties': {}, + }) diff --git a/pipeline2/util_lonlat_to_pixel.py b/pipeline2/util_lonlat_to_pixel.py new file mode 100644 index 0000000..b8d10fc --- /dev/null +++ b/pipeline2/util_lonlat_to_pixel.py @@ -0,0 +1,12 @@ +from discoverlib import coords, graph +import sys + +zoom = 18 +origin_tile = [int(sys.argv[1]), int(sys.argv[2])] +in_fname = sys.argv[3] +out_fname = sys.argv[4] + +g = graph.read_graph(in_fname, fpoint=True) +for vertex in g.vertices: + vertex.point = coords.lonLatToMapbox(vertex.point, zoom, origin_tile) +g.save(out_fname) diff --git a/pipeline2/util_shp_to_graph.py b/pipeline2/util_shp_to_graph.py new file mode 100644 index 0000000..f8165f1 --- /dev/null +++ b/pipeline2/util_shp_to_graph.py @@ -0,0 +1,32 @@ +from discoverlib import geom, graph +import fiona +import sys + +fname = sys.argv[1] +out_name = sys.argv[2] +f = fiona.open(fname) +g = graph.Graph() +vertex_map = {} + +def add(coords): + points = [geom.FPoint(c[0], c[1]) for c in coords] + vertices = [] + for point in points: + if point not in vertex_map: + vertex_map[point] = g.add_vertex(point) + vertices.append(vertex_map[point]) + for i in xrange(len(points) - 1): + src = vertices[i] + dst = vertices[i + 1] + if src == dst: + continue + g.add_bidirectional_edge(src, dst) + +for shape in f: + t = shape['geometry']['type'] + if t == 'MultiLineString': + for coords in shape['geometry']['coordinates']: + add(coords) + else: + add(shape['geometry']['coordinates']) +g.save(out_name) diff --git a/rnn/model_infer4b.py b/rnn/model_infer4b.py index 5cef122..842668c 100644 --- a/rnn/model_infer4b.py +++ b/rnn/model_infer4b.py @@ -1,6 +1,8 @@ import numpy import tensorflow as tf -#tf.disable_eager_execution() +# import tensorflow.compat.v1 as tf +# tf.disable_v2_behavior() +# tf.disable_eager_execution() import os import os.path import random diff --git a/rnn/tracker.py b/rnn/tracker.py index 599f803..c8da191 100644 --- a/rnn/tracker.py +++ b/rnn/tracker.py @@ -7,7 +7,9 @@ import struct import sys import tensorflow as tf -#tf.disable_eager_execution() +# tf.disable_eager_execution() +# import tensorflow.compat.v1 as tf +# tf.disable_v2_behavior() # run tracker on CPU import os diff --git a/tracker-miris/tracker.py b/tracker-miris/tracker.py index 102d49a..069358f 100644 --- a/tracker-miris/tracker.py +++ b/tracker-miris/tracker.py @@ -4,11 +4,12 @@ import json import math import numpy +import os import skimage.transform import struct import sys import tensorflow as tf -#tf.disable_eager_execution() +tf.disable_eager_execution() import graph_nets data_root = sys.argv[1]