WIP different small python scripts to generate corresponding images

The final API will be derived from these scripts into a different
repository, which then only holds the corresponding functions that
provide the corresponding functionalities described in the associated
master thesis.
This commit is contained in:
2026-03-28 15:04:38 +01:00
parent ead3d70c35
commit 7581966c88
8 changed files with 469 additions and 198 deletions

View File

@@ -0,0 +1,40 @@
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
import numpy as np
b = 0.7
c0 = 0.2
m = 0.85
d_curve = np.linspace(0.3, 0.9, 500)
C_curve = np.piecewise(
d_curve,
[d_curve <= b, d_curve > b],
[lambda x: m * x + c0, lambda x: m * b + c0]
)
fig, ax = plt.subplots(figsize=(15, 12))
ax.set_xlabel('Distance to Bounding-Box')
ax.set_ylabel('Centrality Value')
ex = [0.5, 0.7, 0.5, 0.5]
ey = [m*b + c0, m*b + c0, m*0.5 + c0, m*b + c0]
ch = LineCollection([np.column_stack([ex, ey])], colors=['r'], linewidths=0.5)
ax.add_collection(ch)
ax.annotate("$𝛿$", xy=(0.48, 0.71), xytext=(0.48, 0.71), fontsize=18)
ax.scatter([0.5, 0.8, 0.85], [0.5, 0.75, 0.9], color='w', s=1)
ax.plot(d_curve, C_curve, color='k', linewidth=0.8)
ax.annotate("$x$", xy=(0.5, m*0.5 + c0 - 0.01), xytext=(0.5, m*0.5 + c0 - 0.01), fontsize=10)
ax.annotate("$b$", xy=(b, m*b + c0 + 0.005), xytext=(b, m*b + c0 + 0.005), fontsize=10)
# ax.annotate("$f(x) = m*x + c_0$", xy=(0.32, 0.58), xytext=(0.32, 0.58), fontsize=10)
# ax.annotate("$g(x) = m*b + c_0$", xy=(0.75, 0.8), xytext=(0.75, 0.8), fontsize=10)
ax.axis('off')
# ax.get_xaxis().set_visible(False)
# ax.get_yaxis().set_visible(False)
ax.set_aspect('equal')
fig.savefig("model_correction_visualization.svg", format='svg', bbox_inches='tight', pad_inches=0)

View File

@@ -108,8 +108,8 @@ def apply(g, seed, weight, convex_hull, ax, method, method_name):
vp.a = np.nan_to_num(vp.a) # correct floating point values
# normalization
min_val, max_val = vp.a.min(), vp.a.max()
vp.a = (vp.a - min_val) / (max_val - min_val)
# min_val, max_val = vp.a.min(), vp.a.max()
# vp.a = (vp.a - min_val) / (max_val - min_val)
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)
@@ -147,8 +147,8 @@ def apply_corrected(g, seed, weight, convex_hull, ax, method, method_name):
vp.a = np.nan_to_num(vp.a) # correct floating point values
# normalization
min_val, max_val = vp.a.min(), vp.a.max()
vp.a = (vp.a - min_val) / (max_val - min_val)
# min_val, max_val = vp.a.min(), vp.a.max()
# vp.a = (vp.a - min_val) / (max_val - min_val)
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)

73
distance_types.py Normal file
View File

@@ -0,0 +1,73 @@
import math
import matplotlib.pyplot as plt
import numpy as np
from graph_tool.all import *
from src import centrality
from src import plot
from src import fitting
def random_graph(n=5000, seed=None):
"""
Uniformly random point cloud generation.
`n` [int] Number of points to generate. Default 5000 seems like a good starting point in point density and corresponding runtime for the subsequent calculations.
@return [numpy.ndarray] Array of shape(n, 2) containing the coordinates for each point of the generated point cloud.
"""
if seed is None:
import secrets
seed = secrets.randbits(128)
rng = np.random.default_rng(seed=seed)
return rng.random((n, 2)), seed
def spatial_graph(adata):
"""
Generate the spatial graph using delaunay for the given `adata`.
`adata` will contain the calculated spatial graph contents in the keys
adata.obsm['spatial']` in case the `adata` is created from a dataset of *squidpy*.
@return [Graph] generated networkx graph from adata.obsp['spatial_distances']
"""
g, pos = graph_tool.generation.triangulation(adata, type="delaunay")
g.vp["pos"] = pos
weight = g.new_edge_property("double")
for e in g.edges():
weight[e] = math.sqrt(sum(map(abs, pos[e.source()].a - pos[e.target()].a)))**2
return g, weight
def apply(g, seed, weight, convex_hull, ax, ax2, method):
# calculate centrality values
vp, ep = method(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
# euklidian distance
quantification = plot.quantification_data(g, vp, convex_hull)
plot.quantification_plot(ax, quantification, None, None, "Euklidian Distance", None)
# generate model based on convex hull and associated centrality values
# path distance
quantification = plot.quantification_data_path_distance(g, weight, vp, convex_hull)
plot.quantification_plot(ax2, quantification, None, None, "Shortest Path Distance", None)
points, seed = random_graph(n=5000)
g, weight = spatial_graph(points)
g = GraphView(g)
# calculate convex hull
convex_hull = centrality.convex_hull(g)
fig = plt.figure(figsize=(21, 5))
ax1, ax2, ax3 = fig.subplots(1, 3)
# plot graph with convex_hull
# draw without any centrality measure `vp`
vp, ep = betweenness(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
plot.graph_plot(fig, ax1, g, vp, convex_hull, f"Pointcloud (seed: {seed})")
apply(g, seed, weight, convex_hull, ax2, ax3, betweenness)
fig.savefig(f"Distance_5000_betweenness_euklidian.svg", format='svg')

91
effected_visualization.py Normal file
View File

@@ -0,0 +1,91 @@
import math
import matplotlib.pyplot as plt
import numpy as np
from graph_tool.all import *
from src import centrality
from src import plot
from src import fitting
def random_graph(n=5000, seed=None):
"""
Uniformly random point cloud generation.
`n` [int] Number of points to generate. Default 5000 seems like a good starting point in point density and corresponding runtime for the subsequent calculations.
@return [numpy.ndarray] Array of shape(n, 2) containing the coordinates for each point of the generated point cloud.
"""
if seed is None:
import secrets
seed = secrets.randbits(128)
rng = np.random.default_rng(seed=seed)
return rng.random((n, 2)), seed
def spatial_graph(adata):
"""
Generate the spatial graph using delaunay for the given `adata`.
`adata` will contain the calculated spatial graph contents in the keys
adata.obsm['spatial']` in case the `adata` is created from a dataset of *squidpy*.
@return [Graph] generated networkx graph from adata.obsp['spatial_distances']
"""
g, pos = graph_tool.generation.triangulation(adata, type="delaunay")
g.vp["pos"] = pos
weight = g.new_edge_property("double")
for e in g.edges():
weight[e] = math.sqrt(sum(map(abs, pos[e.source()].a - pos[e.target()].a)))**2
return g, weight
points, seed = random_graph()
g, weight = spatial_graph(points)
g = GraphView(g)
# calculate centrality values
vp = closeness(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
# ep.a = np.nan_to_num(ep.a) # correct floating point values
# calculate convex hull
convex_hull = centrality.convex_hull(g)
# plot graph with convex_hull
fig = plt.figure(figsize=(15, 12))
ax0 = fig.subplots(1, 1)
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)
# optimize model's piece-wise linear function
d = quantification[:, 0]
C = quantification[:, 1]
m_opt, c0_opt, b_opt, aic_opt = fitting.fit_piece_wise_linear(d, C)
# TODO
# should this be part of the plotting function itself, it should not be necessary for me to do this
d_curve = np.linspace(min(d), max(d), 500)
C_curve = np.piecewise(
d_curve,
[d_curve <= b_opt, d_curve > b_opt],
[lambda x: m_opt * x + c0_opt, lambda x: m_opt * b_opt + c0_opt]
)
# plot graphs effected / uneffected nodes
plot.graph_plot_effected(fig, ax0, g, vp, convex_hull, b_opt, f"Random Graph (seed: {seed})")
# # linear regression model
# m_reg, c0_reg, b_reg, aic_reg = fitting.fit_cut(d, C)
# print(f"m_reg = {m_reg}")
# # TODO
# # should this be part of the plotting function itself, it should not be necessary for me to do this
# d_curve = np.linspace(min(d), max(d), 500)
# C_curve = np.piecewise(
# d_curve,
# [d_curve <= b_reg, d_curve > b_reg],
# [lambda x: m_reg * x + c0_reg, lambda x: m_reg * b_reg + c0_reg]
# )
# ax1.plot(d_curve, C_curve, color='k', linewidth=1, label=f"Top Cut | AIC: {aic_reg}")
# ax1.legend()
fig.savefig(f"model_closeness_5000_effected_vs_uneffected.svg", format='svg')

View File

@@ -2,7 +2,7 @@ import math
import matplotlib.pyplot as plt
import numpy as np
import squidpy as sq
# import squidpy as sq
from graph_tool.all import *
from src import centrality
@@ -29,6 +29,7 @@ def mibitof():
adata = sq.datasets.mibitof()
return adata
def random_graph(n=5000, seed=None):
"""
Uniformly random point cloud generation.
@@ -96,77 +97,19 @@ def spatial_graph(adata):
weight[e] = math.sqrt(sum(map(abs, pos[e.source()].a - pos[e.target()].a)))**2
return g, weight
def merfish_example():
# generate spatial graph from a given dataset
g, weight = spatial_graph(merfish().obsm['spatial'])
g = GraphView(g)
x_spatial = []
for v in g.vertices():
x_spatial.append(g.vp["pos"][v][0])
# calculate centrality values
vp = closeness(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
# normalization
min_val, max_val = vp.a.min(), vp.a.max()
vp.a = (vp.a - min_val) / (max_val - min_val)
# calculate convex hull
convex_hull = centrality.convex_hull(g)
# plot graph with convex_hull
fig = plt.figure(figsize=(15, 5))
ax0, ax1 = fig.subplots(1, 2)
plot.graph_plot(fig, ax0, g, vp, convex_hull, f"Merfish\nCloseness")
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)
# optimize model's piece-wise linear function
d = quantification[:, 0]
C = quantification[:, 1]
m_opt, c0_opt, b_opt, aic_opt = fitting.fit_piece_wise_linear(d, C)
# TODO
# should this be part of the plotting function itself, it should not be necessary for me to do this
d_curve = np.linspace(min(d), max(d), 500)
C_curve = np.piecewise(
d_curve,
[d_curve <= b_opt, d_curve > b_opt],
[lambda x: m_opt * x + c0_opt, lambda x: m_opt * b_opt + c0_opt]
)
# plot model containing modeled piece-wise linear function
plot.quantification_plot(ax1, quantification, d_curve, C_curve, 'Models', aic_opt)
# linear regression model
m_reg, c_reg, aic_reg = fitting.fit_linear_regression(d, C)
x = np.linspace(min(d), max(d), 500)
y = m_reg * x + c_reg
ax1.plot(x, y, color='k', linewidth=1, label=f"Simple Linear Regression | AIC: {aic_reg}")
ax1.legend()
fig.savefig(f"Merfish_closeness.svg", format='svg')
for i in range(1, 6):
points, seed = random_graph()
g, weight = spatial_graph(points)
g = GraphView(g)
x_spatial = []
for v in g.vertices():
x_spatial.append(g.vp["pos"][v][0])
# calculate centrality values
vp = closeness(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
# ep.a = np.nan_to_num(ep.a) # correct floating point values
# normalization
min_val, max_val = vp.a.min(), vp.a.max()
vp.a = (vp.a - min_val) / (max_val - min_val)
# min_val, max_val = vp.a.min(), vp.a.max()
# vp.a = (vp.a - min_val) / (max_val - min_val)
# calculate convex hull
convex_hull = centrality.convex_hull(g)
@@ -174,7 +117,7 @@ for i in range(1, 6):
# plot graph with convex_hull
fig = plt.figure(figsize=(15, 5))
ax0, ax1 = fig.subplots(1, 2)
plot.graph_plot(fig, ax0, g, vp, convex_hull, f"Random Graph (seed: {seed})\nCloseness")
plot.graph_plot(fig, ax0, g, vp, convex_hull, f"Random Graph (seed: {seed})")
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)
@@ -193,110 +136,21 @@ for i in range(1, 6):
[lambda x: m_opt * x + c0_opt, lambda x: m_opt * b_opt + c0_opt]
)
# plot model containing modeled piece-wise linear function
plot.quantification_plot(ax1, quantification, d_curve, C_curve, 'Models', aic_opt)
plot.quantification_plot(ax1, quantification, d_curve, C_curve, 'Closeness', aic_opt)
# linear regression model
m_reg, c_reg, aic_reg = fitting.fit_linear_regression(d, C)
# # linear regression model
# m_reg, c0_reg, b_reg, aic_reg = fitting.fit_cut(d, C)
# print(f"m_reg = {m_reg}")
x = np.linspace(min(d), max(d), 500)
y = m_reg * x + c_reg
ax1.plot(x, y, color='k', linewidth=1, label=f"Simple Linear Regression | AIC: {aic_reg}")
ax1.legend()
# # TODO
# # should this be part of the plotting function itself, it should not be necessary for me to do this
# d_curve = np.linspace(min(d), max(d), 500)
# C_curve = np.piecewise(
# d_curve,
# [d_curve <= b_reg, d_curve > b_reg],
# [lambda x: m_reg * x + c0_reg, lambda x: m_reg * b_reg + c0_reg]
# )
# ax1.plot(d_curve, C_curve, color='k', linewidth=1, label=f"Top Cut | AIC: {aic_reg}")
# ax1.legend()
fig.savefig(f"uniform_random_point_clouds/{i}_closeness.svg", format='svg')
# ---------------------------------------------------------------------------------------------
# calculate centrality values
vp, ep = betweenness(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
# ep.a = np.nan_to_num(ep.a) # correct floating point values
# normalization
min_val, max_val = vp.a.min(), vp.a.max()
vp.a = (vp.a - min_val) / (max_val - min_val)
# calculate convex hull
convex_hull = centrality.convex_hull(g)
# plot graph with convex_hull
fig = plt.figure(figsize=(15, 5))
ax0, ax1 = fig.subplots(1, 2)
plot.graph_plot(fig, ax0, g, vp, convex_hull, f"Random Graph (seed: {seed})\nBetweenness")
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)
# optimize model's piece-wise linear function
d = quantification[:, 0]
C = quantification[:, 1]
m_opt, c0_opt, b_opt, aic_opt = fitting.fit_piece_wise_linear(d, C)
# TODO
# should this be part of the plotting function itself, it should not be necessary for me to do this
d_curve = np.linspace(min(d), max(d), 500)
C_curve = np.piecewise(
d_curve,
[d_curve <= b_opt, d_curve > b_opt],
[lambda x: m_opt * x + c0_opt, lambda x: m_opt * b_opt + c0_opt]
)
# plot model containing modeled piece-wise linear function
plot.quantification_plot(ax1, quantification, d_curve, C_curve, 'Models', aic_opt)
# linear regression model
m_reg, c_reg, aic_reg = fitting.fit_linear_regression(d, C)
x = np.linspace(min(d), max(d), 500)
y = m_reg * x + c_reg
ax1.plot(x, y, color='k', linewidth=1, label=f"Simple Linear Regression | AIC: {aic_reg}")
ax1.legend()
fig.savefig(f"uniform_random_point_clouds/{i}_betweenness.svg", format='svg')
# ---------------------------------------------------------------------------------------------
# calculate centrality values
vp = pagerank(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
# ep.a = np.nan_to_num(ep.a) # correct floating point values
# normalization
min_val, max_val = vp.a.min(), vp.a.max()
vp.a = (vp.a - min_val) / (max_val - min_val)
# calculate convex hull
convex_hull = centrality.convex_hull(g)
# plot graph with convex_hull
fig = plt.figure(figsize=(15, 5))
ax0, ax1 = fig.subplots(1, 2)
plot.graph_plot(fig, ax0, g, vp, convex_hull, f"Random Graph (seed: {seed})\nPageRank")
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)
# optimize model's piece-wise linear function
d = quantification[:, 0]
C = quantification[:, 1]
m_opt, c0_opt, b_opt, aic_opt = fitting.fit_piece_wise_linear(d, C)
# TODO
# should this be part of the plotting function itself, it should not be necessary for me to do this
d_curve = np.linspace(min(d), max(d), 500)
C_curve = np.piecewise(
d_curve,
[d_curve <= b_opt, d_curve > b_opt],
[lambda x: m_opt * x + c0_opt, lambda x: m_opt * b_opt + c0_opt]
)
# plot model containing modeled piece-wise linear function
plot.quantification_plot(ax1, quantification, d_curve, C_curve, 'Models', aic_opt)
# linear regression model
m_reg, c_reg, aic_reg = fitting.fit_linear_regression(d, C)
x = np.linspace(min(d), max(d), 500)
y = m_reg * x + c_reg
ax1.plot(x, y, color='k', linewidth=1, label=f"Simple Linear Regression | AIC: {aic_reg}")
ax1.legend()
fig.savefig(f"uniform_random_point_clouds/{i}_pagerank.svg", format='svg')
fig.savefig(f"model_closeness_5000_fitted.svg", format='svg')

61
model_based_correction.py Normal file
View File

@@ -0,0 +1,61 @@
import math
import matplotlib.pyplot as plt
import numpy as np
from graph_tool.all import *
from src import centrality
from src import plot
from src import fitting
def random_graph(n=5000, seed=None):
"""
Uniformly random point cloud generation.
`n` [int] Number of points to generate. Default 5000 seems like a good starting point in point density and corresponding runtime for the subsequent calculations.
@return [numpy.ndarray] Array of shape(n, 2) containing the coordinates for each point of the generated point cloud.
"""
if seed is None:
import secrets
seed = secrets.randbits(128)
rng = np.random.default_rng(seed=seed)
return rng.random((n, 2)), seed
def spatial_graph(adata):
g, pos = graph_tool.generation.triangulation(adata, type="delaunay")
g.vp["pos"] = pos
weight = g.new_edge_property("double")
for e in g.edges():
weight[e] = math.sqrt(sum(map(abs, pos[e.source()].a - pos[e.target()].a)))**2
return g, weight
points, seed = random_graph()
g, weight = spatial_graph(points)
g = GraphView(g)
# calculate centrality values
vp = closeness(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
# ep.a = np.nan_to_num(ep.a) # correct floating point values
# calculate convex hull
convex_hull = centrality.convex_hull(g)
# plot graph with convex_hull
fig = plt.figure(figsize=(15, 5))
ax0, ax1 = fig.subplots(1, 2)
plot.graph_plot(fig, ax0, g, vp, convex_hull, f"Closeness without prediction")
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)
# optimize model's piece-wise linear function
d = quantification[:, 0]
C = quantification[:, 1]
m_opt, c0_opt, b_opt, aic_opt = fitting.fit_piece_wise_linear(d, C)
vp = centrality.correct(g, vp, m_opt, c0_opt, b_opt)
plot.graph_plot(fig, ax1, g, vp, convex_hull, f"Closeness with model prediction")
fig.savefig(f"model_prediction_comparison.svg", format='svg')

76
point_cloud_example.py Normal file
View File

@@ -0,0 +1,76 @@
import math
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
import numpy as np
from graph_tool.all import *
def random_graph(n=5000, seed=None):
"""
Uniformly random point cloud generation.
`n` [int] Number of points to generate. Default 5000 seems like a good starting point in point density and corresponding runtime for the subsequent calculations.
@return [numpy.ndarray] Array of shape(n, 2) containing the coordinates for each point of the generated point cloud.
"""
if seed is None:
import secrets
seed = secrets.randbits(128)
rng = np.random.default_rng(seed=seed)
return rng.random((n, 2)), seed
def spatial_graph(adata):
"""
Generate the spatial graph using delaunay for the given `adata`.
`adata` will contain the calculated spatial graph contents in the keys
adata.obsm['spatial']` in case the `adata` is created from a dataset of *squidpy*.
@return [Graph] generated networkx graph from adata.obsp['spatial_distances']
"""
g, pos = graph_tool.generation.triangulation(adata, type="delaunay")
g.vp["pos"] = pos
weight = g.new_edge_property("double")
for e in g.edges():
weight[e] = math.sqrt(sum(map(abs, pos[e.source()].a - pos[e.target()].a)))**2
return g, weight
def draw_graph(G, ax, name):
pos = G.vp["pos"]
x = []
y = []
for v in G.vertices():
# print(pos[v])
ver = pos[v]
x.append(ver[0])
y.append(ver[1])
# convex hull -> Bounding-Box
# ch = LineCollection([convex_hull], colors=['g'], linewidths=1)
# ax.add_collection(ch)
# edges
for e in G.edges():
ex = [pos[e.source()][0], pos[e.target()][0]]
ey = [pos[e.source()][1], pos[e.target()][1]]
ax.add_collection(LineCollection([np.column_stack([ex, ey])], colors=['k'], linewidths=0.1))
ax.scatter(x, y, s=1) # map closeness values as color mapping on the verticies
ax.set_title(name)
#
# - Create a random point cloud and calculate a triangulation on it
# - For that graph calculate the convex hull
# - Draw the graph with the convex hull
# - For each centrality measure
# - apply centrality measure to the next axis
# - Draw the corresponding resulting models into a grid
#
points, seed = random_graph(n=3000)
g, weight = spatial_graph(points)
g = GraphView(g)
# plot graph with convex_hull
fig_graph, ax_graph = plt.subplots(figsize=(15, 12))
draw_graph(g, ax_graph, f"Pointcould (seed: {seed} | n: 500)")
fig_graph.savefig("point_cloud_example.svg", format='svg')

View File

@@ -6,6 +6,7 @@ import matplotlib.colors as mcolors
from matplotlib.collections import LineCollection
from src import centrality
from graph_tool.all import *
class Vector:
"""
@@ -71,6 +72,49 @@ def graph_plot(fig, ax, G, measures, convex_hull, name, show_edges=False):
fig.colorbar(sc, ax=ax)
def graph_plot_effected(fig, ax, G, measures, convex_hull, b, name, show_edges=False):
"""
Plot relationship data of effected vs uneffected nodes determined through model.
"""
quantification = []
pos = G.vp["pos"]
x = []
y = []
for v in G.vertices():
# print(pos[v])
ver = pos[v]
x.append(ver[0])
y.append(ver[1])
measures = measures.a
keys = iter(measures)
points = np.stack((np.array(x), np.array(y)), axis=-1)
for point in points:
min_distance = math.inf
key = next(keys)
for edge in convex_hull:
vector = Vector.vec(point, edge)
distance = Vector.vec_len(vector)
if distance < min_distance:
min_distance = distance
quantification.append([min_distance, key])
# ax.scatter(quantification[:, 0], quantification[:, 1], c=quantification[:, 1], cmap=plt.cm.plasma, s=0.2)
c = list(map(lambda q: 'b' if q[0] > b else 'r', quantification))
# convex hull -> Bounding-Box
# ch = LineCollection([convex_hull], colors=['g'], linewidths=1)
# ax.add_collection(ch)
if show_edges:
for e in G.edges():
ex = [pos[e.source()][0], pos[e.target()][0]]
ey = [pos[e.source()][1], pos[e.target()][1]]
ax.add_collection(LineCollection([np.column_stack([ex, ey])], colors=['k'], linewidths=0.1))
sc = ax.scatter(x, y, s=1, c=c) # map closeness values as color mapping on the verticies
ax.set_title(name)
def normalize_dict(d):
max = np.max(list(d.values()))
return {k: (v / max) for k, v in d.items()}
@@ -112,6 +156,38 @@ def quantification_data(G, measures, convex_hull):
return np.array(quantification)
def quantification_data_path_distance(G, weights, measures, convex_hull):
quantification = []
pos = G.vp["pos"]
x = []
y = []
convex_hull_verticies = []
for v in G.vertices():
ver = pos[v]
for n in convex_hull:
if np.equal(n, np.array([ver[0], ver[1]])).all():
convex_hull_verticies.append(v)
measures = measures.a
keys = iter(measures)
points = np.stack((np.array(x), np.array(y)), axis=-1)
for v in G.vertices():
min_distance = math.inf
key = next(keys)
for h in convex_hull_verticies:
vertices, edges = graph_tool.topology.shortest_path(G, v, h, weights=weights)
# TODO calculate the total distance
path_length = sum([weights[edge] for edge in edges])
if path_length < min_distance:
min_distance = path_length
quantification.append([min_distance, key])
# sort by distance
quantification.sort(key=lambda entry: entry[0])
return np.array(quantification)
def quantification_plot(ax, quantification, d_curve, C_curve, metric_name, aic_score):
"""
Plot relationship data.