Add ONISR status

This commit is contained in:
Dryusdan 2023-10-14 23:45:16 +02:00
parent 3e2c963b7e
commit 7521bdf289
3 changed files with 266 additions and 44 deletions

154
.gitignore vendored Normal file
View file

@ -0,0 +1,154 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

View file

@ -25,11 +25,11 @@ app = typer.Typer()
def create_db():
db = database.Database()
# schemas.Categories.__table__.create(db.engine)
schemas.Points.__table__.create(db.engine)
#schemas.Points.__table__.create(db.engine)
# schemas.VigiloCategories.__table__.create(db.engine)
# schemas.VigiloObservations.__table__.create(db.engine)
# schemas.UnMetre.__table__.create(db.engine)
# schemas.Onisr_2021_Processed.__table__.create(db.engine)
#schemas.Onisr_2021_Processed.__table__.create(db.engine)
#schemas.Onisr_2022_Processed.__table__.create(db.engine)
@ -177,7 +177,7 @@ def process_raw_onisr_value():
numero_vehicule=data[4],
intersection=data[5],
datetime=dt.to_iso8601_string(),
geolocalisation=f"POINT({data[10]} {data[11]})",
geolocalisation=f"POINT({data[11]} {data[10]})",
)
crud.add_onisr_processed(onisr_processed)
@ -208,30 +208,16 @@ def get_all_streets():
log.info(f"Geojson writen. Wait 10 seconds")
time.sleep(10)
log.info("Sleep well. Next city !")
log.info("Oups, is not the next city, but ALL CITIES IN ONE FILE ! BOUYA")
list_insee='|'.join(map(str,insee_code))
query = f"""
area["ref:INSEE"~"{list_insee}"];
(
way["highway"~"primary|secondary|tertiary|unclassified|residential|primary_link|secondary_link|tertiary_link|living_street|track|path|cycleway"]["bicycle"!~"no"]["access"!~"no"](area);
way["bicycle"="yes"]["access"!~"no"](area);
);
"""
log.info("Run query")
r = api.get(query, verbosity='geom', responseformat="geojson")
log.info(f"Write geojson /var/www/opendata_nm/www/streets/all_insee.geojson")
with open(f"/var/www/opendata_nm/www/streets/all_insee.geojson",mode="w") as f:
geojson.dump(r,f)
log.info("And of getting all cities")
@app.command()
def analyze_unmetre_streets(onlyscore: Annotated[bool,typer.Option(help="Run only score calculation")] = False,):
def analyze_unmetre_streets(onlyscore: Annotated[bool,typer.Option(help="Run only score calculation")] = False, reset: Annotated[bool,typer.Option(help="Reset count")] = False,):
category = "unmetre"
log.info("Get all cities")
communes = crud.get_all_communes()
log.info(f"Analyze streets with {category} data")
data_categories = [
{"type": f"{category}_all", "where": "", "score": 0},
{"type": f"{category}_legal", "where": "WHERE distance_overtaker >= 1", "score": 0},
{"type": f"{category}_illegal", "where": "WHERE distance_overtaker > 0.5 AND distance_overtaker < 1", "score": 1},
{"type": f"{category}_dangerous", "where": "WHERE distance_overtaker <= 0.5", "score": 2},
@ -245,12 +231,52 @@ def analyze_unmetre_streets(onlyscore: Annotated[bool,typer.Option(help="Run onl
multipoint = shapely.from_wkb(un_metre_data.wkb_geometry)
point = shapely.get_geometry(multipoint, 0)
points_list.append(point)
analyze_streets(category, points_list, data_category['type'], communes)
analyze_streets(category=category, data_type=data_category['type'], points=points_list, communes=communes, reset=reset)
score_streets(category, communes)
for data_category in data_categories:
score_streets(category=category, data_type=data_category['type'], category_score=data_category["score"], communes=communes, reset=reset)
log.info(f"End of analyze streets with {category} data")
def analyze_streets(category: str, points: list, data_type: str, communes):
@app.command()
def analyze_onisr_streets(onlyscore: Annotated[bool,typer.Option(help="Run only score calculation")] = False, reset: Annotated[bool,typer.Option(help="Reset count")] = False,):
from geoalchemy2.shape import to_shape
log.info("Get all cities")
communes = crud.get_all_communes()
category = "onisr"
data_categories = [
{"type": f"{category}_2021", "score": 0.5},
{"type": f"{category}_2022", "score": 1},
]
if not onlyscore:
points_list = []
for data_category in data_categories:
log.info(f"Get all points for {data_category['type']}")
if data_category['type'] == f"{category}_all":
onisr_datas = []
onisr_2021_datas = crud.get_all_onisr_2021()
for onisr_2021 in onisr_2021_datas:
onisr_datas.append(onisr_2021)
onisr_2022_datas = crud.get_all_onisr_2022()
for onisr_2022 in onisr_2022_datas:
onisr_datas.append(onisr_2022)
elif data_category['type'] == f"{category}_2021":
log.info(f"Use {category}_2021")
onisr_datas = crud.get_all_onisr_2021()
elif data_category['type'] == f"{category}_2022":
log.info(f"Use {category}_2022")
onisr_datas = crud.get_all_onisr_2022()
for onisr_data in onisr_datas:
point = shapely.get_geometry(to_shape(onisr_data.geolocalisation), 0)
points_list.append(point)
log.debug(f"{data_category['type']} have {len(points_list)} points")
analyze_streets(category=category, data_type=data_category['type'], points=points_list, communes=communes, reset=reset, precision=1e-4)
for data_category in data_categories:
score_streets(category=category, data_type=data_category['type'], category_score=data_category["score"], communes=communes, reset=reset)
def analyze_streets(category: str, data_type: str, points: list, communes, reset: bool=False, precision: float=1e-7):
if reset:
log.warning("Reset asked. Reseting count")
log.info(f"Process category {data_type}")
for commune in communes:
insee = int(commune.identifiant_insee)
@ -258,26 +284,72 @@ def analyze_streets(category: str, points: list, data_type: str, communes):
log.info(f"Load geojson for {name} ({insee})")
with open(f"/var/www/opendata_nm/www/streets/{insee}.geojson",mode="r") as f:
data = geojson.load(f)
if reset:
for feature in data['features']:
feature["properties"][f"{category}_all"] = 0
feature["properties"][f"{data_type}"] = 0
for feature in data['features']:
if feature["properties"].get(f"{category}_all") is None:
feature["properties"][f"{category}_all"] = 0
linestring = shape(feature["geometry"])
for point in points:
if linestring.distance(point) < 1e-7:
if linestring.distance(point) < precision:
if feature["properties"].get("name") is not None:
street_name = feature["properties"]["name"]
else:
street_name = "unknown"
if feature["properties"].get(f"{data_type}") is None:
log.info(f"Point {point} found in a new street ({street_name})")
log.info(f"[{data_type}] Point {point} found in a new street ({street_name})")
feature["properties"][f"{data_type}"] = 1
else:
feature["properties"][f"{data_type}"] += 1
log.info(f"Point {point} found in an already known street ({street_name}).")
log.info(f"[{data_type}]Point {point} found in an already known street ({street_name}).")
log.debug("Adding one to {}".format(feature["properties"][f"{category}_all"]))
feature["properties"][f"{category}_all"] += 1
with open(f"/var/www/opendata_nm/www/streets/{insee}.geojson",mode="w") as f:
geojson.dump(data,f)
log.info("Check streets complete")
log.info(f"Check streets complete for {data_type}")
def score_streets(category: str, communes):
def score_streets(category: str, data_type: str, category_score: int, communes, reset: bool = False):
if category_score == 0:
log.debug("category_score is 0")
return
for commune in communes:
insee = int(commune.identifiant_insee)
name = commune.toponyme
log.info(f"[{data_type}] Load geojson for {name} ({insee})")
with open(f"/var/www/opendata_nm/www/streets/{insee}.geojson",mode="r") as f:
data = geojson.load(f)
for feature in data['features']:
score = 0
if reset:
feature["properties"][f"{category}_score"] = 0
if feature["properties"].get(f"{category}_score") is None:
feature["properties"][f"{category}_score"] = 0
if feature["properties"].get(f"{data_type}") is not None and feature["properties"][f"{category}_all"] != 0:
score = feature["properties"][f"{data_type}"] / feature["properties"][f"{category}_all"] * 100 * category_score
#if feature["properties"].get("name") is not None and feature["properties"]["name"] == "Rue de Strasbourg":
# print(score)
# print(category_score)
# print(feature["properties"][f"{data_type}"])
# print(feature["properties"][f"{category}_all"])
#log.debug("In {} ctg there are {} with total of {}. Score of {}".format(data_type, feature["properties"][f"{data_type}"], feature["properties"][f"{category}_all"], category_score))
if score > 0:
feature["properties"][f"{category}_score"] += score
with open(f"/var/www/opendata_nm/www/streets/{insee}.geojson",mode="w") as f:
geojson.dump(data,f)
log.info("Process complete")
@app.command()
def calculate_score_streets(reset: Annotated[bool,typer.Option(help="Reset result")] = False,):
data_categories = [
{"score_name": f"onisr_score", "ponderate": 0.35},
{"score_name": f"unmetre_score", "ponderate": 0.65},
]
log.info("Get all cities")
communes = crud.get_all_communes()
for commune in communes:
@ -287,27 +359,23 @@ def score_streets(category: str, communes):
with open(f"/var/www/opendata_nm/www/streets/{insee}.geojson",mode="r") as f:
data = geojson.load(f)
for feature in data['features']:
if feature["properties"].get(f"{category}_all") is None:
#log.info("No unmetre_all found")
feature["properties"][f"{category}_score"] = -1
continue
else:
log.info(f"{category}_all found")
score = 0
if feature["properties"].get(f"{category}_score") is None:
feature["properties"][f"{category}_score"] = 0
if feature["properties"].get("score") is None or reset:
feature["properties"]["score"] = 0
for data_category in data_categories:
if feature["properties"].get(f"{data_category['type']}") is not None:
score += feature["properties"][f"{data_category['type']}"] / feature["properties"]["unmetre_all"] * 100 * data_category['score']
print("In {} ctg there are {} with total of {}. Score of {}".format(data_category['type'], feature["properties"][f"{data_category['type']}"], feature["properties"]["unmetre_all"], data_category['score']))
#log.debug(feature["properties"]["unmetre_all"])
#log.debug(data_category['score'])
feature["properties"][f"{category}_score"] += score
if feature["properties"].get("name") is not None:
log.info(f"{feature['properties']['name']} have a {category}_score of {score}")
if feature["properties"].get(data_category["score_name"]) is None:
feature["properties"][data_category["score_name"]] = 0
score += feature["properties"][data_category["score_name"]] * data_category["ponderate"]
if score < 0:
feature["properties"]["score"] = -1
else:
feature["properties"]["score"] = score
if feature["properties"]["score"] == 0:
feature["properties"]["score"] = -1
with open(f"/var/www/opendata_nm/www/streets/{insee}.geojson",mode="w") as f:
geojson.dump(data,f)
log.info("Process complete")
if __name__ == "__main__":
app()

View file

@ -139,7 +139,7 @@ def get_all_onisr_2022():
def get_all_onisr_2021():
with Session() as session:
return (
session.query(schemas.Onisr_2022_Processed).all()
session.query(schemas.Onisr_2021_Processed).all()
)
def drop_unmetre():