mirror of
https://github.com/DerTyp7/local-analyzer-python.git
synced 2025-10-30 04:47:11 +01:00
get rid of sql
This commit is contained in:
2
old/.gitattributes
vendored
Normal file
2
old/.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
152
old/.gitignore
vendored
Normal file
152
old/.gitignore
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
2
old/README.md
Normal file
2
old/README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
# LocalAnalyzer
|
||||
|
||||
BIN
old/database.db
Normal file
BIN
old/database.db
Normal file
Binary file not shown.
165
old/init_sql.py
Normal file
165
old/init_sql.py
Normal file
@@ -0,0 +1,165 @@
|
||||
# Intilializes/Creates the database with all tables and table contents
|
||||
import sqlite3
|
||||
|
||||
def createNode(conn, lines):
|
||||
nodeId = ""
|
||||
cur = conn.cursor()
|
||||
|
||||
|
||||
for line in lines:
|
||||
if "<node" in line:
|
||||
nodeId = str(line.split('id="')[1].split('"')[0])
|
||||
nodeLon = str(line.split('lon="')[1].split('"')[0])
|
||||
nodeLat = str(line.split('lat="')[1].split('"')[0])
|
||||
nodeVersion = str(line.split('version="')[1].split('"')[0])
|
||||
nodeTimestamp = str(line.split('timestamp="')[1].split('"')[0])
|
||||
nodeChangeset = str(line.split('changeset="')[1].split('"')[0])
|
||||
nodeUid = str(line.split('uid="')[1].split('"')[0])
|
||||
nodeUser = str(line.split('user="')[1].split('"')[0])
|
||||
|
||||
cur.execute('INSERT INTO nodes("id", "lon", "lat", "version", "timestamp", "changeset", "uid", "user") VALUES (?, ?, ?, ?, ?, ?, ?, ?)',
|
||||
(nodeId, nodeLon, nodeLat, nodeVersion, nodeTimestamp, nodeChangeset, nodeUid, nodeUser))
|
||||
|
||||
elif '<tag ' in line:
|
||||
print("HALLO")
|
||||
key = str(line.split('k="')[1].split('"')[0])
|
||||
value = str(line.split('v="')[1].split('"')[0])
|
||||
cur.execute('INSERT INTO node_tags("nodeId", "key", "value") VALUES (?,?,?)', (nodeId, key, value))
|
||||
|
||||
|
||||
def createWay(conn, lines):
|
||||
wayId = ""
|
||||
wayNodes = []
|
||||
cur = conn.cursor()
|
||||
|
||||
for line in lines:
|
||||
if "<way" in line:
|
||||
wayId = str(line.split('id="')[1].split('"')[0])
|
||||
wayVersion = str(line.split('version="')[1].split('"')[0])
|
||||
wayTimestamp = str(line.split('timestamp="')[1].split('"')[0])
|
||||
wayChangeset = str(line.split('changeset="')[1].split('"')[0])
|
||||
wayUid = str(line.split('uid="')[1].split('"')[0])
|
||||
wayUser = str(line.split('user="')[1].split('"')[0])
|
||||
|
||||
cur.execute('INSERT INTO ways("id", "version", "timestamp", "changeset", "uid", "user") VALUES (?,?,?,?,?,?)',
|
||||
(wayId, wayVersion, wayTimestamp, wayChangeset, wayUid, wayUser))
|
||||
|
||||
elif "<nd ref=" in line:
|
||||
wayNodes.append(str(line.split('ref="')[1].split('"')[0]))
|
||||
elif '<tag ' in line:
|
||||
key = str(line.split('k="')[1].split('"')[0])
|
||||
value = str(line.split('v="')[1].split('"')[0])
|
||||
cur.execute('INSERT INTO way_tags("wayId", "key", "value") VALUES (?,?,?)', (wayId, key, value))
|
||||
|
||||
|
||||
|
||||
for nodeId in wayNodes:
|
||||
createNodeWayJunction(conn, wayId, nodeId)
|
||||
|
||||
def createNodeWayJunction(conn, wayId, nodeId):
|
||||
cur = conn.cursor()
|
||||
cur.execute('INSERT INTO node_way(wayId, nodeId) VALUES (?, ?)', (wayId, nodeId))
|
||||
|
||||
def createDatabase(path):
|
||||
try:
|
||||
print("Generate database structure")
|
||||
conn = sqlite3.connect(path)
|
||||
cur = conn.cursor()
|
||||
|
||||
cur.execute('''CREATE TABLE "nodes" (
|
||||
"id" INTEGER NOT NULL UNIQUE,
|
||||
"lon" TEXT NOT NULL,
|
||||
"lat" TEXT NOT NULL,
|
||||
"version" INTEGER NOT NULL,
|
||||
"timestamp" TEXT NOT NULL,
|
||||
"changeset" TEXT NOT NULL,
|
||||
"uid" INTEGER NOT NULL,
|
||||
"user" TEXT NOT NULL,
|
||||
PRIMARY KEY("id")
|
||||
);''')
|
||||
|
||||
cur.execute('''CREATE TABLE "ways" (
|
||||
"id" INTEGER NOT NULL UNIQUE,
|
||||
"version" INTEGER NOT NULL,
|
||||
"timestamp" TEXT NOT NULL,
|
||||
"changeset" TEXT NOT NULL,
|
||||
"uid" INTEGER NOT NULL,
|
||||
"user" TEXT NOT NULL,
|
||||
PRIMARY KEY("id")
|
||||
);''')
|
||||
|
||||
cur.execute('''CREATE TABLE "way_tags" (
|
||||
"id" INTEGER NOT NULL UNIQUE,
|
||||
"wayId" INTEGER NOT NULL,
|
||||
"key" TEXT,
|
||||
"value" TEXT,
|
||||
PRIMARY KEY("id" AUTOINCREMENT),
|
||||
FOREIGN KEY("wayId") REFERENCES "ways"("id")
|
||||
);''')
|
||||
|
||||
cur.execute('''CREATE TABLE "node_tags" (
|
||||
"id" INTEGER NOT NULL UNIQUE,
|
||||
"nodeId" INTEGER NOT NULL,
|
||||
"key" TEXT,
|
||||
"value" TEXT,
|
||||
PRIMARY KEY("id" AUTOINCREMENT),
|
||||
FOREIGN KEY("nodeId") REFERENCES "nodes"("id")
|
||||
);''')
|
||||
|
||||
cur.execute('''CREATE TABLE "node_way" (
|
||||
"id" INTEGER NOT NULL UNIQUE,
|
||||
"wayId" INTEGER NOT NULL,
|
||||
"nodeId" INTEGER NOT NULL,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("wayId") REFERENCES "ways"("id"),
|
||||
FOREIGN KEY("nodeId") REFERENCES "nodes"("id")
|
||||
);''')
|
||||
|
||||
return conn
|
||||
except:
|
||||
return sqlite3.connect(path)
|
||||
|
||||
# INIT
|
||||
def parseOsmToSql(osmContent, path):
|
||||
print("Initializing database. This may take a while.")
|
||||
conn = createDatabase(path)
|
||||
|
||||
print("Parsing nodes and ways of the OSM file into the database.")
|
||||
wayLines = []
|
||||
nodeLines = []
|
||||
lines = osmContent.split("\n")
|
||||
|
||||
for line in lines:
|
||||
if "<node" in line:
|
||||
if "/>" in line:
|
||||
try:
|
||||
createNode(conn, [line])
|
||||
except:
|
||||
print("Node could not be inserted")
|
||||
else:
|
||||
nodeLines.append(line)
|
||||
elif "</node>" in line:
|
||||
print("CREATE")
|
||||
try:
|
||||
createNode(conn, nodeLines)
|
||||
except:
|
||||
print("Node could not be inserted")
|
||||
nodeLines = []
|
||||
elif len(nodeLines) > 0:
|
||||
print("Append")
|
||||
nodeLines.append(line)
|
||||
|
||||
elif "<way " in line:
|
||||
wayLines.append(line)
|
||||
elif "</way>" in line:
|
||||
try:
|
||||
createWay(conn, wayLines)
|
||||
except:
|
||||
print("Way could not be inserted")
|
||||
wayLines = []
|
||||
elif len(wayLines) > 0:
|
||||
wayLines.append(line)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
print("Done: Initializing database")
|
||||
76308
old/map.xml
Normal file
76308
old/map.xml
Normal file
File diff suppressed because it is too large
Load Diff
31
old/osm_analyzer.py
Normal file
31
old/osm_analyzer.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# https://overpass-api.de/api/map?bbox=8.59993,53.52150,8.61004,53.52484
|
||||
# links - unten - rechts - oben
|
||||
# minLon - minLat - maxLon - maxLat
|
||||
import requests
|
||||
from init_sql import parseOsmToSql
|
||||
|
||||
lon = 8.6039883
|
||||
lat = 52.51608
|
||||
|
||||
# TODO Get real value based on a metric radius
|
||||
areaHeightRadius = 0.01 # 0.01
|
||||
areaWidthRadius = 0.013 # 0.013
|
||||
|
||||
minLon = round(float(lon) - areaWidthRadius, 5)
|
||||
maxLon = round(float(lon) + areaWidthRadius, 5)
|
||||
|
||||
minLat = round(float(lat) - areaHeightRadius, 5)
|
||||
maxLat = round(float(lat) + areaHeightRadius, 5)
|
||||
|
||||
requestUrl = "https://overpass-api.de/api/map"
|
||||
requestsUrlParams = f"?bbox={minLon},{minLat},{maxLon},{maxLat}"
|
||||
|
||||
|
||||
|
||||
print(requestUrl + requestsUrlParams)
|
||||
|
||||
headers = {'Content-Type': 'application/xml'}
|
||||
r = requests.get(requestUrl + requestsUrlParams, headers=headers)
|
||||
|
||||
osmContent = r.text
|
||||
parseOsmToSql(osmContent, "database.db")
|
||||
15
old/text.txt
Normal file
15
old/text.txt
Normal file
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
|
||||
<head>
|
||||
<meta http-equiv="content-type" content="text/html; charset=utf-8" lang="en"/>
|
||||
<title>OSM3S Response</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<p>The data included in this document is from www.openstreetmap.org. The data is made available under ODbL.</p>
|
||||
<p><strong style="color:#FF0000">Error</strong>: runtime error: open64: 0 Success /osm3s_v0.7.57_osm_base Dispatcher_Client::request_read_and_idx::rate_limited. Please check /api/status for the quota of your IP address. </p>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
Reference in New Issue
Block a user