Commit 63f52135 authored by Hippalectryon's avatar Hippalectryon

group size & fit

fix polygon visualisation
parent c8aab711
......@@ -5,6 +5,7 @@ from typing import List, Dict, Tuple
import numpy as np
import scipy.optimize
import scipy.spatial
import God.SaveAndLoad as SaveAndLoad
from God.Physics import Physics
......@@ -16,7 +17,7 @@ log.setLevel(logging.INFO)
def get_group_size_occurences(groups: List[list]) -> Tuple[List[int], List[int]]:
size_groups = [len(group) for group in groups ]
size_groups = [len(group) for group in groups]
size_groups_curated = [i for i in size_groups if i > 0]
max_size_group = np.max(size_groups_curated)
size_x = range(1, max_size_group + 1)
......@@ -52,6 +53,7 @@ class Processor:
self.to_process["group_size_avg"] = "group_size_avg" in to_process
self.to_process["group_size_avg_fit"] = self.to_process["group_size_avg"] and (
"group_size_avg_fit" in to_process)
self.to_process["group_hulls"] = "group_hulls" in to_process
self.to_process["avg_speed"] = "avg_speed" in to_process
self.to_process["avg_angle"] = "avg_angle" in to_process
self.to_process["correlations"] = "correlations" in to_process
......@@ -76,6 +78,12 @@ class Processor:
self.data_holders["group_size_combined"] = []
if self.to_process["group_size_avg_fit"]:
self.data_holders["group_size_avg_fit"] = []
if self.to_process["group_hulls"]:
self.data_holders["group_hulls"] = []
self.data_holders["group_hulls_dimensions"] = []
self.data_holders["group_hulls_dimensions_avg"] = []
self.data_holders["group_dimensions_combined"] = []
self.data_holders["group_hulls_dimensions_avg_fit"] = []
if self.to_process["correlations"]:
self.data_holders["correlations"] = []
if self.to_process["correlations_fit"]:
......@@ -177,6 +185,88 @@ class Processor:
log.warning("Exception in group size fit: %s" % e)
self.data_holders["group_size_avg_fit"].append(None)
def process_group_hulls(self, sky: Sky, group_to_size: List[int]) -> None:
groups = self.data_holders["groups"][-1]
positions = np.array([bird.pos for bird in sky.birds])
indexes_of_bird_by_group = [np.array([i for i in range(len(groups)) if groups[i] == j]) for j in
range(len(groups))]
all_hulls = []
all_hulls_dimensions = np.zeros(np.max(group_to_size) + 1)
for group in range(len(group_to_size)):
birds_indexes = indexes_of_bird_by_group[group]
if len(birds_indexes) == 0:
all_hulls.append((0, []))
continue
elif len(birds_indexes) == 1:
all_hulls.append((0, [birds_indexes[0]]))
continue
positions_hull = positions[birds_indexes]
if len(birds_indexes) == 2:
hull_indexes = list(range(len(birds_indexes)))
scipy_hull = None
periodic_shift = np.array((0, 0))
else:
elements_shift_x = [pos[0] for pos in positions_hull if pos[0] > sky.L / 2]
elements_shift_y = [pos[1] for pos in positions_hull if pos[1] > sky.L / 2]
shift_x = (np.min(elements_shift_x)-sky.L/2) if len(elements_shift_x) > 0 else 0
shift_y = (np.min(elements_shift_y)-sky.L/2) if len(elements_shift_y) > 0 else 0
periodic_shift = np.array((shift_x, shift_y))
positions_hull -= periodic_shift # offset to avoid boundary issues
positions_hull %= sky.L
scipy_hull = scipy.spatial.ConvexHull(positions_hull)
hull_indexes = scipy_hull.vertices
positions_hull += periodic_shift # offset to avoid boundary issues
positions_hull %= sky.L
all_hulls.append((periodic_shift, birds_indexes[hull_indexes]))
all_hulls_dimensions[group_to_size[group]] += (scipy_hull.volume if scipy_hull is not None else 0)
self.data_holders["group_hulls"].append(all_hulls)
self.data_holders["group_hulls_dimensions"].append(all_hulls_dimensions)
# average
group_dimensions_combined = self.data_holders["group_dimensions_combined"]
# adjust size of group_size_combined if "new sizes" have appeared
size_diff = len(all_hulls_dimensions) - len(group_dimensions_combined)
if size_diff > 0:
for _ in range(size_diff): # avoid shenanigans with mutable lists
group_dimensions_combined += [[]]
# register sizes observes this frame
for size in range(len(all_hulls_dimensions)):
if all_hulls_dimensions[size] > 0:
group_dimensions_combined[size].append(all_hulls_dimensions[size])
self.data_holders["group_hulls_dimensions_avg"].append(
[np.mean(group_dimensions) for group_dimensions in group_dimensions_combined])
# avg fit
def fit(x, a1):
return x**a1
group_dimension_avg = np.array(self.data_holders["group_hulls_dimensions_avg"][-1])
size_x = np.array(range(len(group_dimension_avg)))
try:
nonzero_indexes = [i for i in range(len(group_dimension_avg)) if not np.isnan(group_dimension_avg[i])]
group_dimension_avg = group_dimension_avg[nonzero_indexes]
size_x = size_x[nonzero_indexes]
popt, _ = scipy.optimize.curve_fit(fit, size_x, group_dimension_avg)
a, = popt
residuals = group_dimension_avg - fit(size_x, a)
ss_res = np.sum(residuals ** 2)
ss_tot = np.sum((group_dimension_avg - np.mean(group_dimension_avg)) ** 2)
r_squared = 1 - (ss_res / ss_tot)
self.data_holders["group_hulls_dimensions_avg_fit"].append([a, r_squared])
except Exception as e:
log.warning("Exception in group dimensions fit: %s" % e)
self.data_holders["group_hulls_dimensions_avg_fit"].append(None)
def process_correlations(self, sky: Sky, L: float) -> None:
correlations_stochastic_points = self.options["correlations_stochastic_points"]
dists, corrs = sky.get_angles_correlations(n=correlations_stochastic_points)
......@@ -237,7 +327,8 @@ class Processor:
if self.to_process["avg_angle"]:
self.process_avg_angle(sky)
if self.to_process["groups"] or self.to_process["group_size"] or self.to_process["group_size_avg"]:
if self.to_process["groups"] or self.to_process["group_size"] or self.to_process["group_size_avg"] or \
self.to_process["group_hulls"]:
groups, bird_to_group = physics.get_groups()
group_to_size, size_occurences = get_group_size_occurences(groups)
if self.to_process["group_to_size"]:
......@@ -250,6 +341,8 @@ class Processor:
self.process_group_size_avg(size_occurences, frame_number)
if self.to_process["group_size_avg_fit"]:
self.process_group_size_avg_fit()
if self.to_process["group_hulls"]:
self.process_group_hulls(sky, group_to_size)
if self.to_process["correlations"]:
self.process_correlations(sky, L)
......@@ -261,11 +354,16 @@ class Processor:
SaveAndLoad.save_data_dirname(self.data_holders[prop_name], output_file, "%s.json" % prop_name)
# save the actual data
simple_propreties = ["avg_speed", "avg_angle", "groups", "group_size", "group_to_size", "group_size_avg", "group_size_avg_fit",
simple_propreties = ["avg_speed", "avg_angle", "groups", "group_size", "group_to_size", "group_size_avg",
"group_size_avg_fit", "group_hulls",
"correlations", "correlations_fit"]
for property_name in simple_propreties:
if self.to_process[property_name]:
save_prop_name(property_name)
if self.to_process["group_hulls"]:
save_prop_name("group_hulls_dimensions")
save_prop_name("group_hulls_dimensions_avg")
save_prop_name("group_hulls_dimensions_avg_fit")
# save the simulation's parameters
self.simulation_params["processing_options"] = self.options
......
This diff is collapsed.
......@@ -6,6 +6,8 @@ class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
if isinstance(obj, np.int64) or isinstance(obj, np.int32):
return int(obj)
return json.JSONEncoder.default(self, obj)
......
......@@ -38,21 +38,23 @@ def launch_two_groups(output_file: str, L: float, n_birds_1: int, n_birds_2: int
Life.simulate(physics, dt, total_time, verbose_prop=.1, output_file=output_file)
launch_two_groups("simulation_data/test.json", L=100, n_birds_1=200, n_birds_2=100, radius_1=5, radius_2=5,
total_time=100, center_1=[20, 50], center_2=[80, 50], angle_1=0, angle_2=np.pi)
# launch_simulation_random("simulation_data/test.json", L=100, n_birds=1000, eta=.3, total_time=50) # , evolve=evolve)
to_process = ["avg_speed", "avg_angle", "group_size", "group_size_avg", "group_size_avg_fit", "groups", "correlations",
"correlations_fit", "group_to_size"]
Processor().process("simulation_data/test.json", "processing_data/test", verbose_prop=.1, to_process=to_process,
options={"correlations_stochastic_points": 5000})
# launch_two_groups("simulation_data/test.json", L=100, n_birds_1=100, n_birds_2=0, radius_1=5, radius_2=5,
# total_time=60, center_1=[50, 50], center_2=[80, 50], angle_1=0, angle_2=np.pi, eta=.4)
# to_process = ["avg_speed", "avg_angle", "group_size", "group_size_avg", "group_size_avg_fit", "groups", "correlations",
# "correlations_fit", "group_to_size", "group_hulls"]
# Processor().process("simulation_data/test.json", "processing_data/test", verbose_prop=.1, to_process=to_process,
# options={"correlations_stochastic_points": 5000})
to_draw = ["avg_speed", "avg_angle", "avg_polar", "correlations", "correlations_fit", "correlation_length",
"group_size", "group_size_avg", "group_size_avg_fit", "quiver", "evolution_group_size"]
"group_size", "group_size_avg", "group_size_avg_fit", "group_dimension", "group_dimension_avg", "quiver", "evolution_group_size"]
Visualiser("processing_data/test", "visualisations/test.mp4", simulation_data_file="simulation_data/test.json",
verbose_prop=.1,
to_draw=to_draw,
options={"quiver_color_by_group": False, "quiver_draw_by_group": False, "quiver_color_single": True, "max_group_size": 5000,
to_draw=to_draw, t_start=0,
options={"quiver_color_by_group": True, "quiver_draw_by_group": True, "quiver_color_single": True, "max_group_size": 50,
"max_num_groups": 500}).vizualize()
# N = [10000]
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment