|
1 | 1 | #!/usr/bin/env python
|
2 |
| - |
3 | 2 | # MBUtil: a tool for MBTiles files
|
4 | 3 | # Supports importing, exporting, and more
|
5 | 4 | #
|
6 | 5 | # (c) Development Seed 2011
|
7 | 6 | # Licensed under BSD
|
8 |
| - |
9 |
| -import sqlite3, uuid, sys, logging, time, os, json |
| 7 | +import logging |
| 8 | +import os, sys |
10 | 9 | from optparse import OptionParser
|
11 | 10 |
|
12 |
| -logger = logging.getLogger(__name__) |
13 |
| - |
14 |
| - |
15 |
| -def mbtiles_setup(cur): |
16 |
| - cur.execute(""" |
17 |
| - create table tiles ( |
18 |
| - zoom_level integer, |
19 |
| - tile_column integer, |
20 |
| - tile_row integer, |
21 |
| - tile_data blob); |
22 |
| - """) |
23 |
| - cur.execute("""create table metadata |
24 |
| - (name text, value text);""") |
25 |
| - cur.execute("""create unique index name on metadata (name);""") |
26 |
| - cur.execute("""create unique index tile_index on tiles |
27 |
| - (zoom_level, tile_column, tile_row);""") |
28 |
| - |
29 |
| -def mbtiles_connect(mbtiles_file): |
30 |
| - try: |
31 |
| - con = sqlite3.connect(mbtiles_file) |
32 |
| - return con |
33 |
| - except Exception, e: |
34 |
| - logger.error("Could not connect to database") |
35 |
| - logger.exception(e) |
36 |
| - sys.exit(1) |
37 |
| - |
38 |
| -def optimize_connection(cur): |
39 |
| - cur.execute("""PRAGMA synchronous=0""") |
40 |
| - cur.execute("""PRAGMA locking_mode=EXCLUSIVE""") |
41 |
| - cur.execute("""PRAGMA journal_mode=TRUNCATE""") |
42 |
| - |
43 |
| -def compression_prepare(cur, con): |
44 |
| - cur.execute(""" |
45 |
| - CREATE TABLE if not exists images ( |
46 |
| - tile_data blob, |
47 |
| - tile_id VARCHAR(256)); |
48 |
| - """) |
49 |
| - cur.execute(""" |
50 |
| - CREATE TABLE if not exists map ( |
51 |
| - zoom_level integer, |
52 |
| - tile_column integer, |
53 |
| - tile_row integer, |
54 |
| - tile_id VARCHAR(256)); |
55 |
| - """) |
56 |
| - |
57 |
| -def optimize_database(cur): |
58 |
| - logger.debug('analyzing db') |
59 |
| - cur.execute("""ANALYZE;""") |
60 |
| - logger.debug('cleaning db') |
61 |
| - cur.execute("""VACUUM;""") |
62 |
| - |
63 |
| -def compression_do(cur, con, chunk): |
64 |
| - overlapping = 0 |
65 |
| - unique = 0 |
66 |
| - total = 0 |
67 |
| - cur.execute("select count(zoom_level) from tiles") |
68 |
| - res = cur.fetchone() |
69 |
| - total_tiles = res[0] |
70 |
| - logging.debug("%d total tiles to fetch" % total_tiles) |
71 |
| - for i in range(total_tiles / chunk): |
72 |
| - logging.debug("%d / %d rounds done" % (i, (total_tiles / chunk))) |
73 |
| - ids = [] |
74 |
| - files = [] |
75 |
| - start = time.time() |
76 |
| - cur.execute("""select zoom_level, tile_column, tile_row, tile_data |
77 |
| - from tiles where rowid > ? and rowid <= ?""", ((i * chunk), ((i + 1) * chunk))) |
78 |
| - logger.debug("select: %s" % (time.time() - start)) |
79 |
| - rows = cur.fetchall() |
80 |
| - for r in rows: |
81 |
| - total = total + 1 |
82 |
| - if r[3] in files: |
83 |
| - overlapping = overlapping + 1 |
84 |
| - start = time.time() |
85 |
| - query = """insert into map |
86 |
| - (zoom_level, tile_column, tile_row, tile_id) |
87 |
| - values (?, ?, ?, ?)""" |
88 |
| - logger.debug("insert: %s" % (time.time() - start)) |
89 |
| - cur.execute(query, (r[0], r[1], r[2], ids[files.index(r[3])])) |
90 |
| - else: |
91 |
| - unique = unique + 1 |
92 |
| - id = str(uuid.uuid4()) |
93 |
| - |
94 |
| - ids.append(id) |
95 |
| - files.append(r[3]) |
96 |
| - |
97 |
| - start = time.time() |
98 |
| - query = """insert into images |
99 |
| - (tile_id, tile_data) |
100 |
| - values (?, ?)""" |
101 |
| - cur.execute(query, (str(id), sqlite3.Binary(r[3]))) |
102 |
| - logger.debug("insert into images: %s" % (time.time() - start)) |
103 |
| - start = time.time() |
104 |
| - query = """insert into map |
105 |
| - (zoom_level, tile_column, tile_row, tile_id) |
106 |
| - values (?, ?, ?, ?)""" |
107 |
| - cur.execute(query, (r[0], r[1], r[2], id)) |
108 |
| - logger.debug("insert into map: %s" % (time.time() - start)) |
109 |
| - con.commit() |
110 |
| - |
111 |
| -def compression_finalize(cur): |
112 |
| - cur.execute("""drop table tiles;""") |
113 |
| - cur.execute("""create view tiles as |
114 |
| - select map.zoom_level as zoom_level, |
115 |
| - map.tile_column as tile_column, |
116 |
| - map.tile_row as tile_row, |
117 |
| - images.tile_data as tile_data FROM |
118 |
| - map JOIN images on images.tile_id = map.tile_id;""") |
119 |
| - cur.execute(""" |
120 |
| - CREATE UNIQUE INDEX map_index on map |
121 |
| - (zoom_level, tile_column, tile_row);""") |
122 |
| - cur.execute(""" |
123 |
| - CREATE UNIQUE INDEX images_id on images |
124 |
| - (tile_id);""") |
125 |
| - cur.execute("""vacuum;""") |
126 |
| - cur.execute("""analyze;""") |
127 |
| - |
128 |
| -def disk_to_mbtiles(directory_path, mbtiles_file): |
129 |
| - logger.info("Importing disk to MBTiles") |
130 |
| - logger.debug("%s --> %s" % (directory_path, mbtiles_file)) |
131 |
| - con = mbtiles_connect(mbtiles_file) |
132 |
| - cur = con.cursor() |
133 |
| - optimize_connection(cur) |
134 |
| - mbtiles_setup(cur) |
135 |
| - try: |
136 |
| - metadata = json.load(open('%s/metadata.json' % directory_path, 'r')) |
137 |
| - for name, value in metadata.items(): |
138 |
| - cur.execute('insert into metadata (name, value) values (?, ?)', |
139 |
| - (name, value)) |
140 |
| - logger.info('metadata from metadata.json restored') |
141 |
| - except IOError, e: |
142 |
| - logger.warning('metadata.json not found') |
143 |
| - |
144 |
| - count = 0 |
145 |
| - start_time = time.time() |
146 |
| - msg = "" |
147 |
| - for r1, zs, ignore in os.walk(directory_path): |
148 |
| - for z in zs: |
149 |
| - for r2, xs, ignore in os.walk(os.path.join(r1, z)): |
150 |
| - for x in xs: |
151 |
| - for r2, ignore, ys in os.walk(os.path.join(r1, z, x)): |
152 |
| - for y in ys: |
153 |
| - f = open(os.path.join(r1, z, x, y), 'rb') |
154 |
| - cur.execute("""insert into tiles (zoom_level, |
155 |
| - tile_column, tile_row, tile_data) values |
156 |
| - (?, ?, ?, ?);""", |
157 |
| - (z, x, y.split('.')[0], sqlite3.Binary(f.read()))) |
158 |
| - f.close() |
159 |
| - count = count + 1 |
160 |
| - if (count % 100) == 0: |
161 |
| - for c in msg: sys.stdout.write(chr(8)) |
162 |
| - msg = "%s tiles inserted (%d tiles/sec)" % (count, count / (time.time() - start_time)) |
163 |
| - sys.stdout.write(msg) |
164 |
| - logger.debug('tiles inserted.') |
165 |
| - optimize_database(con) |
| 11 | +from mbutil import mbtiles_to_disk, disk_to_mbtiles |
166 | 12 |
|
167 |
| -def mbtiles_to_disk(mbtiles_file, directory_path): |
168 |
| - logger.debug("Exporting MBTiles to disk") |
169 |
| - logger.debug("%s --> %s" % (mbtiles_file, directory_path)) |
170 |
| - con = mbtiles_connect(mbtiles_file) |
171 |
| - cur = con.cursor() |
172 |
| - os.mkdir("%s" % directory_path) |
173 |
| - metadata = dict(con.execute('select name, value from metadata;').fetchall()) |
174 |
| - json.dump(metadata, open('%s/metadata.json' % directory_path, 'w')) |
175 |
| - count = con.execute('select count(zoom_level) from tiles;').fetchone()[0] |
176 |
| - done = 0 |
177 |
| - msg ='' |
178 |
| - tiles = con.execute('select zoom_level, tile_column, tile_row, tile_data from tiles;') |
179 |
| - t = tiles.fetchone() |
180 |
| - while t: |
181 |
| - if not os.path.isdir("%s/%s/%s/" % (directory_path, t[0], t[1])): |
182 |
| - os.makedirs("%s/%s/%s/" % (directory_path, t[0], t[1])) |
183 |
| - f = open('%s/%s/%s/%s.%s' % |
184 |
| - (directory_path, t[0], t[1], t[2], metadata.get('format', 'png')), 'wb') |
185 |
| - f.write(t[3]) |
186 |
| - f.close() |
187 |
| - done = done + 1 |
188 |
| - for c in msg: sys.stdout.write(chr(8)) |
189 |
| - logger.info('%s / %s tiles exported' % (done, count)) |
190 |
| - t = tiles.fetchone() |
191 | 13 |
|
192 | 14 | if __name__ == '__main__':
|
193 | 15 |
|
|
0 commit comments