Lbasara commited on
Commit
30daa5a
·
1 Parent(s): cc48a7f

v0.1.0 first online test

Browse files
Files changed (5) hide show
  1. CHANGELOG.md +40 -0
  2. app.py +45 -139
  3. autoroute.py +174 -0
  4. requirements.txt +10 -6
  5. tarifs2025.csv +0 -0
CHANGELOG.md ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Changelog
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog],
6
+ and this project adheres to [Semantic Versioning].
7
+
8
+ ## [Unreleased]
9
+
10
+ - /
11
+
12
+
13
+ ## [0.1.0] - 2025-02-27
14
+
15
+ ### Added
16
+
17
+ - First online demo
18
+
19
+ ### Changed
20
+
21
+ ### Deprecated
22
+
23
+ ### Removed
24
+
25
+ ### Fixed
26
+
27
+
28
+
29
+ ### Security
30
+
31
+
32
+
33
+ <!-- Links -->
34
+ [keep a changelog]: https://keepachangelog.com/en/1.0.0/
35
+ [semantic versioning]: https://semver.org/spec/v2.0.0.html
36
+
37
+ <!-- Versions -->
38
+ [unreleased]: https://huggingface.co/spaces/Lbasara/Autoroute/compare/v0.0.2...HEAD
39
+ [0.0.2]: https://huggingface.co/spaces/Lbasara/Autoroute/v0.0.1...v0.0.2
40
+ [0.0.1]: https://huggingface.co/spaces/Lbasara/Autoroute/tag/v0.0.1
app.py CHANGED
@@ -1,147 +1,53 @@
1
- import io
2
- import random
3
- from typing import List, Tuple
4
-
5
- import aiohttp
6
  import panel as pn
7
- from PIL import Image
8
- from transformers import CLIPModel, CLIPProcessor
9
-
10
- pn.extension(design="bootstrap", sizing_mode="stretch_width")
11
-
12
- ICON_URLS = {
13
- "brand-github": "https://github.com/holoviz/panel",
14
- "brand-twitter": "https://twitter.com/Panel_Org",
15
- "brand-linkedin": "https://www.linkedin.com/company/panel-org",
16
- "message-circle": "https://discourse.holoviz.org/",
17
- "brand-discord": "https://discord.gg/AXRHnJU6sP",
18
- }
19
-
20
-
21
- async def random_url(_):
22
- pet = random.choice(["cat", "dog"])
23
- api_url = f"https://api.the{pet}api.com/v1/images/search"
24
- async with aiohttp.ClientSession() as session:
25
- async with session.get(api_url) as resp:
26
- return (await resp.json())[0]["url"]
27
-
28
-
29
- @pn.cache
30
- def load_processor_model(
31
- processor_name: str, model_name: str
32
- ) -> Tuple[CLIPProcessor, CLIPModel]:
33
- processor = CLIPProcessor.from_pretrained(processor_name)
34
- model = CLIPModel.from_pretrained(model_name)
35
- return processor, model
36
-
37
-
38
- async def open_image_url(image_url: str) -> Image:
39
- async with aiohttp.ClientSession() as session:
40
- async with session.get(image_url) as resp:
41
- return Image.open(io.BytesIO(await resp.read()))
42
-
43
-
44
- def get_similarity_scores(class_items: List[str], image: Image) -> List[float]:
45
- processor, model = load_processor_model(
46
- "openai/clip-vit-base-patch32", "openai/clip-vit-base-patch32"
47
- )
48
- inputs = processor(
49
- text=class_items,
50
- images=[image],
51
- return_tensors="pt", # pytorch tensors
52
- )
53
- outputs = model(**inputs)
54
- logits_per_image = outputs.logits_per_image
55
- class_likelihoods = logits_per_image.softmax(dim=1).detach().numpy()
56
- return class_likelihoods[0]
57
-
58
-
59
- async def process_inputs(class_names: List[str], image_url: str):
60
- """
61
- High level function that takes in the user inputs and returns the
62
- classification results as panel objects.
63
- """
64
- try:
65
- main.disabled = True
66
- if not image_url:
67
- yield "##### ⚠️ Provide an image URL"
68
- return
69
-
70
- yield "##### ⚙ Fetching image and running model..."
71
- try:
72
- pil_img = await open_image_url(image_url)
73
- img = pn.pane.Image(pil_img, height=400, align="center")
74
- except Exception as e:
75
- yield f"##### 😔 Something went wrong, please try a different URL!"
76
- return
77
-
78
- class_items = class_names.split(",")
79
- class_likelihoods = get_similarity_scores(class_items, pil_img)
80
 
81
- # build the results column
82
- results = pn.Column("##### 🎉 Here are the results!", img)
83
-
84
- for class_item, class_likelihood in zip(class_items, class_likelihoods):
85
- row_label = pn.widgets.StaticText(
86
- name=class_item.strip(), value=f"{class_likelihood:.2%}", align="center"
87
- )
88
- row_bar = pn.indicators.Progress(
89
- value=int(class_likelihood * 100),
90
- sizing_mode="stretch_width",
91
- bar_color="secondary",
92
- margin=(0, 10),
93
- design=pn.theme.Material,
94
- )
95
- results.append(pn.Column(row_label, row_bar))
96
- yield results
97
- finally:
98
- main.disabled = False
99
-
100
 
101
- # create widgets
102
- randomize_url = pn.widgets.Button(name="Randomize URL", align="end")
103
 
104
- image_url = pn.widgets.TextInput(
105
- name="Image URL to classify",
106
- value=pn.bind(random_url, randomize_url),
107
- )
108
- class_names = pn.widgets.TextInput(
109
- name="Comma separated class names",
110
- placeholder="Enter possible class names, e.g. cat, dog",
111
- value="cat, dog, parrot",
112
- )
113
-
114
- input_widgets = pn.Column(
115
- "##### 😊 Click randomize or paste a URL to start classifying!",
116
- pn.Row(image_url, randomize_url),
117
- class_names,
118
- )
119
-
120
- # add interactivity
121
- interactive_result = pn.panel(
122
- pn.bind(process_inputs, image_url=image_url, class_names=class_names),
123
- height=600,
124
- )
125
 
126
- # add footer
127
- footer_row = pn.Row(pn.Spacer(), align="center")
128
- for icon, url in ICON_URLS.items():
129
- href_button = pn.widgets.Button(icon=icon, width=35, height=35)
130
- href_button.js_on_click(code=f"window.open('{url}')")
131
- footer_row.append(href_button)
132
- footer_row.append(pn.Spacer())
133
 
134
- # create dashboard
135
- main = pn.WidgetBox(
136
- input_widgets,
137
- interactive_result,
138
- footer_row,
139
  )
140
 
141
- title = "Panel Demo - Image Classification"
142
- pn.template.BootstrapTemplate(
143
- title=title,
144
- main=main,
145
- main_max_width="min(50%, 698px)",
146
- header_background="#F08080",
147
- ).servable(title=title)
 
1
+ import osmnx as ox
 
 
 
 
2
  import panel as pn
3
+ import param
4
+ import colorcet as cc
5
+ import autoroute
6
+
7
+ map_kwds={"font_size": 15}
8
+ transformer = autoroute.to_Lambert93()
9
+
10
+ pn.extension()
11
+
12
+ class Pars(param.Parameterized):
13
+ orig_adr= param.String(label="Origine")
14
+ dest_adr= param.String(label="Destination")
15
+ precision = param.Integer(label="Granularité routes", bounds=(2, 6), default=4)
16
+ par=Pars()
17
+
18
+ calcul=pn.widgets.Button(button_type= "primary", name="Calculer parcours")
19
+ carte=pn.pane.plot.Folium(min_height=1000, sizing_mode='stretch_both')
20
+
21
+ def calculs(dummy):
22
+ orig_coo=ox.geocoder.geocode(par.orig_adr)
23
+ dest_coo=ox.geocoder.geocode(par.dest_adr)
24
+ calcul.name="Téléchargement carte"
25
+ G=autoroute.download_graph(orig_coo, dest_coo, par.precision)
26
+ calcul.name="Calcul itinéraires"
27
+ Gc=autoroute.add_tarifs(G)
28
+ orig_lamb=transformer.transform(*orig_coo)
29
+ dest_lamb=transformer.transform(*dest_coo)
30
+ orig_id=ox.nearest_nodes(Gc, *orig_lamb)
31
+ dest_id=ox.nearest_nodes(Gc, *dest_lamb)
32
+ df=autoroute.tariftimedf(Gc, orig_id, dest_id)
33
+ calcul.name="Affichage résultats"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
 
35
+ m=None
36
+ for irow, row in df.iterrows():
37
+ gfast=ox.routing.route_to_gdf(Gc, row["path"], weight= "travel_time")
38
+ m=gfast.explore(m=m, map_kwds=map_kwds, color=cc.b_glasbey_category10[irow],
39
+ tooltip= set(gfast.columns) & {"travel_time", "tarif", "length"},
40
+ highlight_kwds={"color": "green", "weight": 3} )
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
+ carte.object=m
 
43
 
44
+ calcul.on_click(calculs)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
 
 
 
 
 
 
 
 
46
 
47
+ app=pn.template.MaterialTemplate(
48
+ title="Autoroute v0.1.0",
49
+ sidebar=[pn.Param(par, name="Paramètres"), calcul],
50
+ main=carte,
 
51
  )
52
 
53
+ app.servable()
 
 
 
 
 
 
autoroute.py ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import geopandas as gpd
2
+ import pandas as pd
3
+ import networkx as nx
4
+ import osmnx as ox
5
+ import numpy as np
6
+ import shapely
7
+ from pyproj import Transformer
8
+ from functools import partial
9
+
10
+ ox.settings.useful_tags_node=["highway", "ref", "barrier", "highway:ref", "name"]
11
+ ox.settings.useful_tags_way=["highway", "maxspeed", "name", "ref", "oneway", "toll", "barrier"]
12
+
13
+ def custom_filter_order(order, start=0):
14
+ highway_order=["motorway", "trunk", "primary", "secondary", "tertiary",
15
+ "unclassified", "residential", "service", "pedestrian"]
16
+ return '["highway"~"'+'|'.join(highway_order[start:order])+'"]'
17
+
18
+ def osm_stations(dissolve=True):
19
+ osm_stations=gpd.read_file("export.geojson")
20
+ clean_col=osm_stations.columns[osm_stations.isna().mean()<0.9]
21
+ osm_clean=osm_stations[clean_col].drop(["barrier", "@id"], axis=1).dropna(subset="name")
22
+ osm_clean["autoroute"]=osm_clean["highway:ref"].str.replace(" ", "")
23
+ osm_clean["nref"]=osm_clean["operator:ref"]
24
+ badguys="Péage des |Péage de |Péage d'|Péage-de-|Péage du |Péage-du-|Péage "
25
+ osm_clean["name"]=osm_clean["name"].str.replace(badguys, "", regex=True)
26
+ osm_clean["osmid"]=osm_clean["id"].str.split("/").str[1].astype(int)
27
+ if dissolve:
28
+ osm_clean=osm_clean.dissolve(by='name', aggfunc='first')
29
+ osm_clean['geometry'] = osm_clean.geometry.to_crs("2154").centroid
30
+ osm_clean['latlon'] = osm_clean.geometry.to_crs("WGS84")
31
+ osm_clean=osm_clean.drop(columns=["id", "highway:ref", "operator:ref"])
32
+ return osm_clean
33
+
34
+
35
+ def rebuild_highway(Gsub, inter, weight="travel_time"):
36
+ """
37
+ On s'inspire de osmnx.simplification::simplify_graph (l. 275)
38
+ On prend un graph correspondant à un composant connecté.
39
+ On calcule les distances minimales deux à deux, on ajoute les tarifs.
40
+ Pour chaque paire :
41
+ - on récupère le chemin.
42
+ - on fusionne les géométries des edges, on ajoute le temps, le tarif, début et fin
43
+ - on met les edges dans un set
44
+ - on met les noeuds non-toll_booth dans un set
45
+ On supprime edges et nodes.
46
+ On ajoute les edges recalculés.
47
+ """
48
+
49
+ dftarifs=pd.read_csv("tarifs2025.csv")
50
+ dosm=osm_stations(dissolve=False).set_index("osmid").name.to_dict()
51
+
52
+ nodes_to_remove = set()
53
+ len_path = dict(nx.all_pairs_dijkstra(Gsub, weight=weight))
54
+ ltup=[]
55
+
56
+ for k, v in len_path.items():
57
+ if k not in inter:
58
+ continue
59
+ for k1, v1 in v[0].items():
60
+ if k1 not in inter:
61
+ continue
62
+ ent, sor = dosm[k], dosm[k1]
63
+ if ent == sor:
64
+ continue
65
+ tarif = dftarifs.query(f'E=="{ent}" and S=="{sor}"').Tarif.to_list()
66
+ if len(tarif)!=1:
67
+ continue
68
+ route=v[1][k1]
69
+ edges=list(Gsub.edges[edge] for edge in nx.utils.pairwise(route))
70
+ nodes_to_remove.update(route)
71
+ #weight_sum=np.sum([edge[weight] for edge in edges])
72
+ length_sum=np.sum([edge["length"] for edge in edges])
73
+ time_sum=np.sum([edge["travel_time"] for edge in edges])
74
+ geometry_sum=shapely.ops.linemerge([edge["geometry"] for edge in edges])
75
+
76
+ #dic={"E": k, "S": k1, weight: v1, "route": route, "tarif": tarif[0], "geometry": geometry_sum}
77
+ dic={"key": 0, "length" : length_sum, "travel_time": time_sum, "route": route,
78
+ "tarif": tarif[0], "geometry": geometry_sum}
79
+ ltup.append((k, k1, 0, dic))
80
+
81
+ return ltup, nodes_to_remove.difference(inter)
82
+
83
+
84
+ def rebuild_highways(Ga, toll_nodes):
85
+ ltup=[]
86
+ nodes_to_remove=set()
87
+ for wcc in nx.weakly_connected_components(Ga):
88
+ inter=wcc.intersection(toll_nodes)
89
+ if len(inter) > 1 :
90
+ Gsub=Ga.subgraph(wcc).copy()
91
+ lt, to_remove=rebuild_highway(Gsub, inter)
92
+ ltup.extend(lt)
93
+ nodes_to_remove.update(to_remove)
94
+ return ltup, nodes_to_remove
95
+
96
+ def lamb93():
97
+ return "EPSG:2154"
98
+
99
+ def to_Lambert93():
100
+ transformer = Transformer.from_crs("WGS84", lamb93())
101
+ return transformer
102
+
103
+ def from_Lambert93():
104
+ transformer = Transformer.from_crs(lamb93(), "WGS84")
105
+ return transformer
106
+
107
+ def get_gdf_ellipse(orig_coo, dest_coo):
108
+ orig_lamb=np.array(to_Lambert93().transform(*orig_coo))
109
+ dest_lamb=np.array(to_Lambert93().transform(*dest_coo))
110
+ C=(orig_lamb+dest_lamb)/2
111
+ D=orig_lamb-dest_lamb
112
+ a=1.15 * np.linalg.norm(D) /2
113
+ c = 0.4 * np.linalg.norm(D) /2
114
+ theta=np.arctan2(D[1], D[0]) # coo sont lat, lon donc y, x
115
+ circ = shapely.geometry.Point(C).buffer(1)
116
+ ell = shapely.affinity.scale(circ, a, c)
117
+ ellr = shapely.affinity.rotate(ell, theta, use_radians=True)
118
+
119
+ return gpd.GeoSeries(ellr, crs= lamb93())
120
+
121
+ def get_shapely_ellipse(orig_coo, dest_coo):
122
+ return get_gdf_ellipse(orig_coo, dest_coo).to_crs("WGS84").geometry[0]
123
+
124
+
125
+ def tariftime(u, v, d, l):
126
+ d=d[0]
127
+ return d["travel_time"] if "tarif" not in d else d["travel_time"] + l*d["tarif"]
128
+
129
+
130
+ def tariftimedf(Gc, orig_id, dest_id, weight="travel_time"):
131
+ ltup=[]
132
+
133
+ for l in np.arange(0, 250, 5):
134
+ tarif= partial(tariftime, l=l)
135
+ fastest=nx.shortest_path(Gc, orig_id, dest_id, weight=tarif)
136
+ gfast=ox.routing.route_to_gdf(Gc, fastest, weight= weight)
137
+ prix=float(gfast["tarif"].sum()) if "tarif" in gfast else 0
138
+ ltup.append((prix, float(gfast["travel_time"].sum()), fastest ))
139
+ if prix==0: break
140
+
141
+ df=pd.DataFrame(ltup, columns=["tarif", "time", "path"]).drop_duplicates(subset=["tarif", "time"]).reset_index()
142
+ df["time (mn)"]=df["time"]/60
143
+ return df
144
+
145
+
146
+ def download_graph(orig_coo, dest_coo, precision):
147
+ buf=get_shapely_ellipse(orig_coo , dest_coo)
148
+ cf=custom_filter_order(precision)
149
+ G=ox.graph.graph_from_polygon(buf, network_type='drive', custom_filter=cf, simplify=False)
150
+ G=ox.simplify_graph(G, node_attrs_include=["barrier"])
151
+ G = ox.add_edge_speeds(G)
152
+ G = ox.add_edge_travel_times(G)
153
+ G = ox.project_graph(G, to_crs=lamb93())
154
+ return G
155
+
156
+ def add_tarifs(G):
157
+ gdfn, gdfe=ox.graph_to_gdfs(G)
158
+ if "toll" not in gdfe.columns or "barrier" not in gdfn.columns:
159
+ return G
160
+ gae=gdfe[((gdfe.highway=="motorway") & (gdfe.toll == "yes")) | (gdfe.highway=="motorway_link") ]
161
+ gbn=gdfn.query("barrier=='toll_booth'")
162
+ u, v, k = zip(*gae.index)
163
+ uv = set(u).union(v)
164
+ gan=gdfn[gdfn.index.isin(uv)]
165
+ Ga=ox.convert.to_digraph(ox.convert.graph_from_gdfs(gan, gae))
166
+ toll_nodes=gbn.index.to_list()
167
+ ltup, nodes_to_remove = rebuild_highways(Ga, toll_nodes)
168
+ Gc=G.copy()
169
+ Gc.remove_nodes_from(nodes_to_remove)
170
+ Gc.add_edges_from(ltup)
171
+ return Gc
172
+
173
+
174
+
requirements.txt CHANGED
@@ -1,6 +1,10 @@
1
- panel
2
- jupyter
3
- transformers
4
- numpy
5
- torch
6
- aiohttp
 
 
 
 
 
1
+ osmnx>=2.0.0
2
+ colorcet>=3.1.0
3
+ geopandas>=1.0.0
4
+ panel>=1.5.4
5
+ param>=2.1.1
6
+ networkx>=3.4.2
7
+ pandas>=2.2.3
8
+ numpy>=2.2.2
9
+ shapely>=2.0.7
10
+ pyproj>=3.7.0
tarifs2025.csv ADDED
The diff for this file is too large to render. See raw diff