WIP different small python scripts to generate corresponding images

The final API will be derived from these scripts into a different
repository, which then only holds the corresponding functions that
provide the corresponding functionalities described in the associated
master thesis.
This commit is contained in:
2026-03-28 15:04:38 +01:00
parent ead3d70c35
commit 7581966c88
8 changed files with 469 additions and 198 deletions

91
effected_visualization.py Normal file
View File

@@ -0,0 +1,91 @@
import math
import matplotlib.pyplot as plt
import numpy as np
from graph_tool.all import *
from src import centrality
from src import plot
from src import fitting
def random_graph(n=5000, seed=None):
"""
Uniformly random point cloud generation.
`n` [int] Number of points to generate. Default 5000 seems like a good starting point in point density and corresponding runtime for the subsequent calculations.
@return [numpy.ndarray] Array of shape(n, 2) containing the coordinates for each point of the generated point cloud.
"""
if seed is None:
import secrets
seed = secrets.randbits(128)
rng = np.random.default_rng(seed=seed)
return rng.random((n, 2)), seed
def spatial_graph(adata):
"""
Generate the spatial graph using delaunay for the given `adata`.
`adata` will contain the calculated spatial graph contents in the keys
adata.obsm['spatial']` in case the `adata` is created from a dataset of *squidpy*.
@return [Graph] generated networkx graph from adata.obsp['spatial_distances']
"""
g, pos = graph_tool.generation.triangulation(adata, type="delaunay")
g.vp["pos"] = pos
weight = g.new_edge_property("double")
for e in g.edges():
weight[e] = math.sqrt(sum(map(abs, pos[e.source()].a - pos[e.target()].a)))**2
return g, weight
points, seed = random_graph()
g, weight = spatial_graph(points)
g = GraphView(g)
# calculate centrality values
vp = closeness(g, weight=weight)
vp.a = np.nan_to_num(vp.a) # correct floating point values
# ep.a = np.nan_to_num(ep.a) # correct floating point values
# calculate convex hull
convex_hull = centrality.convex_hull(g)
# plot graph with convex_hull
fig = plt.figure(figsize=(15, 12))
ax0 = fig.subplots(1, 1)
# generate model based on convex hull and associated centrality values
quantification = plot.quantification_data(g, vp, convex_hull)
# optimize model's piece-wise linear function
d = quantification[:, 0]
C = quantification[:, 1]
m_opt, c0_opt, b_opt, aic_opt = fitting.fit_piece_wise_linear(d, C)
# TODO
# should this be part of the plotting function itself, it should not be necessary for me to do this
d_curve = np.linspace(min(d), max(d), 500)
C_curve = np.piecewise(
d_curve,
[d_curve <= b_opt, d_curve > b_opt],
[lambda x: m_opt * x + c0_opt, lambda x: m_opt * b_opt + c0_opt]
)
# plot graphs effected / uneffected nodes
plot.graph_plot_effected(fig, ax0, g, vp, convex_hull, b_opt, f"Random Graph (seed: {seed})")
# # linear regression model
# m_reg, c0_reg, b_reg, aic_reg = fitting.fit_cut(d, C)
# print(f"m_reg = {m_reg}")
# # TODO
# # should this be part of the plotting function itself, it should not be necessary for me to do this
# d_curve = np.linspace(min(d), max(d), 500)
# C_curve = np.piecewise(
# d_curve,
# [d_curve <= b_reg, d_curve > b_reg],
# [lambda x: m_reg * x + c0_reg, lambda x: m_reg * b_reg + c0_reg]
# )
# ax1.plot(d_curve, C_curve, color='k', linewidth=1, label=f"Top Cut | AIC: {aic_reg}")
# ax1.legend()
fig.savefig(f"model_closeness_5000_effected_vs_uneffected.svg", format='svg')