'OSMnx : normalised measure of orientation order

Hi I am trying to calculate the normalised orientation order

  • First calculate the bearings of every edge of the street networking into 36 equal sized bins (representing 360 degree with each bin representing 10 degrees)

using

import datetime
import matplotlib.pyplot as plt
import numpy as np
import osmnx as ox
import pandas as pd

ox.config(log_console=True, use_cache=True)
weight_by_length = False

places = {'Atlanta'       : 'Atlanta, GA, USA'}

# verify OSMnx geocodes each query to what you expect (a [multi]polygon geometry)
gdf = ox.gdf_from_places(places.values())
gdf

def reverse_bearing(x):
    return x + 180 if x < 180 else x - 180

bearings = {}
for place in sorted(places.keys()):
    print(datetime.datetime.now(), place)

    # get the graph
    query = places[place]
    G = ox.graph_from_place(query, network_type='drive')

    # calculate edge bearings
    Gu = ox.add_edge_bearings(ox.get_undirected(G))

    if weight_by_length:
        # weight bearings by length (meters)
        city_bearings = []
        for u, v, k, d in Gu.edges(keys=True, data=True):
            city_bearings.extend([d['bearing']] * int(d['length']))
        b = pd.Series(city_bearings)
        bearings[place] = pd.concat([b, b.map(reverse_bearing)]).reset_index(drop='True')
    else:
        # don't weight bearings, just take one value per street segment
        b = pd.Series([d['bearing'] for u, v, k, d in Gu.edges(keys=True, data=True)])
        bearings[place] = pd.concat([b, b.map(reverse_bearing)]).reset_index(drop='True')

def count_and_merge(n, bearings):
    # make twice as many bins as desired, then merge them in pairs
    # prevents bin-edge effects around common values like 0° and 90°
    n = n * 2
    bins = np.arange(n + 1) * 360 / n
    count, _ = np.histogram(bearings, bins=bins)

    # move the last bin to the front, so eg 0.01° and 359.99° will be binned together
    count = np.roll(count, 1)
    return count[::2] + count[1::2]

When I execute

count_and_merge(36, bearings['Atlanta'])

I am getting ( I believe number of edges in each bin )

array([4400,  768,  617,  631,  614,  601,  613,  672,  781, 4405,  813,
    577,  609,  602,  614,  591,  739,  814, 4400,  768,  617,  631,
    614,  601,  613,  672,  781, 4405,  813,  577,  609,  602,  614,
    591,  739,  814])

Then I calculate the entropy using

    data = [4400,  768,  617,  631,  614,  601,  613,  672,  781, 4405,  813,
            577,  609,  602,  614,  591,  739,  814, 4400,  768,  617,  631,
            614,  601,  613,  672,  781, 4405,  813,  577,  609,  602,  614,
            591,  739,  814]
pd_series = pd.Series(data)
counts = pd_series.value_counts()
entropy(counts)

I got 2.8133554045006157

When I normalize it using

=1-((2.81-1.38)/2.198)^2

The answer is 0.57

Which is slightly larger than 0.3 (what stated in https://link.springer.com/content/pdf/10.1007/s41109-019-0189-1.pdf )

any suggestions?



Solution 1:[1]

I'm the author of the journal article you linked to and the research code you are seeking to adapt. To use that code for the city of Atlanta, just adapt it from the notebook and add the orientation order equation from the journal article, like this:

import numpy as np
import osmnx as ox
import pandas as pd
from scipy import stats
ox.config(use_cache=True, log_console=True)

query = 'Atlanta, GA, USA'
entropy_bins = 36

def reverse_bearing(x):
    return x + 180 if x < 180 else x - 180

def count_and_merge(n, bearings):
    n = n * 2
    bins = np.arange(n + 1) * 360 / n
    count, _ = np.histogram(bearings, bins=bins)
    count = np.roll(count, 1)
    return count[::2] + count[1::2]

Gu = ox.add_edge_bearings(ox.get_undirected(ox.graph_from_place(query, network_type='drive')))
b = pd.Series([d['bearing'] for u, v, k, d in Gu.edges(keys=True, data=True)])
bearings = pd.concat([b, b.map(reverse_bearing)]).reset_index(drop='True')
bin_counts = count_and_merge(entropy_bins, bearings)
orientation_entropy = stats.entropy(bin_counts)

perfect_grid = [1] * 4 + [0] * (entropy_bins - 4)
min_entropy = stats.entropy(perfect_grid)
max_entropy = np.log(entropy_bins)    
orientation_order = 1 - ((orientation_entropy - min_entropy) / (max_entropy - min_entropy)) ** 2
print(orientation_order) #0.3115

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1 gboeing