Commit f9fa10f1 authored by Hippalectryon's avatar Hippalectryon

add remote

parent 962bb158
......@@ -12,8 +12,11 @@ from God.Pandora import NumpyEncoder
from God.Sky import Sky
# init dropbox
dbx = dropbox.Dropbox('token ici')
dbx.users_get_current_account()
try:
dbx = dropbox.Dropbox('token ici')
dbx.users_get_current_account()
except:
print("Dropbox loading failed")
def read_in_chunks(file_object, chunk_size=1024 * 1000 * 100) -> Iterable:
......
import logging
from typing import Callable
import numpy as np
import paramiko
import God.Life as Life
from God.Bird import Bird
......@@ -9,6 +11,10 @@ from God.DataVisualisation import Visualiser
from God.Physics import Physics
from God.Sky import Sky
log = logging.getLogger('experiments')
log.addHandler(logging.StreamHandler())
log.setLevel(logging.INFO)
def launch_simulation_random(output_file: str, L: float, n_birds: int, vel: float = 1, ang_vel: float = np.pi / 2,
interaction_radius: float = 1,
......@@ -37,60 +43,52 @@ def launch_two_groups(output_file: str, L: float, n_birds_1: int, n_birds_2: int
physics = Physics(sky, interaction_radius, eta)
Life.simulate(physics, dt, total_time, verbose_prop=.1, output_file=output_file)
launch_simulation_random("simulation_data/test-machine.json", L=1000, n_birds=10000, eta=.2, total_time=10) # , evolve=evolve)
#
# # launch_two_groups("simulation_data/test.json", L=100, n_birds_1=100, n_birds_2=0, radius_1=5, radius_2=5,
# # total_time=60, center_1=[50, 50], center_2=[80, 50], angle_1=0, angle_2=np.pi, eta=.4)
to_process = ["avg_speed", "avg_angle", "group_size", "group_size_avg", "group_size_avg_fit", "groups", "correlations",
"correlations_fit", "group_to_size", "group_hulls"]
Processor().process("simulation_data/test-machine.json", "processing_data/test-machine", verbose_prop=.1, to_process=to_process,
options={"correlations_stochastic_points": 5000})
to_draw = ["avg_speed", "avg_angle", "avg_polar", "angle_pdf", "correlations", "correlations_fit", "correlation_length",
"group_size", "group_size_avg", "group_size_avg_fit", "group_dimension", "group_dimension_avg", "quiver", "evolution_group_size"]
Visualiser("processing_data/test-machine", "visualisations/test-machine.mp4", simulation_data_file="simulation_data/test-machine.json",
verbose_prop=.1,
to_draw=to_draw,
options={"quiver_color_by_group": True, "quiver_draw_by_group": True, "quiver_color_single": True, "max_group_size": 5000,
"max_num_groups": 50000}).vizualize()
# N = [10000]
#
# L = [1000]
#
# Eta = [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
#
# angle_vel = 2 * np.pi
#
# T = 200
#
# for i in range(len(N)):
#
# for j in range(len(L)):
#
# for k in range(len(Eta)):
# name = "N_" + str(N[i]) + "_L_" + str(L[j]) + "_Eta_" + str(Eta[k]) + "_angle_vel_" + "pi" + "_T_" + str(T)
#
# launch_simulation("simulation_data/04_11_18/" + name + ".json", L=L[j], n_birds=N[i], ang_vel=angle_vel,
# eta=Eta[k], total_time=T) # , evolve=evolve)
#
# to_process = ["avg_speed", "avg_angle", "group_size", "group_size_avg", "group_size_avg_fit",
# "correlations", "correlations_fit"]
#
# Processor().process("simulation_data/04_11_18/" + name + ".json", "processing_data/04_11_18/" + name,
# verbose_prop=.1, to_process=to_process)
#
# to_draw = ["avg_speed", "avg_angle", "avg_polar", "correlations", "correlations_fit", "correlation_length",
#
# "group_size", "group_size_avg", "group_size_avg_fit", "quiver"]
#
# Visualiser("processing_data/04_11_18/" + name, "visualisations/04_11_18/" + name + ".mp4",
# simulation_data_file="simulation_data/04_11_18/" + name + ".json",
#
# verbose_prop=.1,
#
# to_draw=to_draw,
# options={"quiver_color_by_group": False, "max_group_size": 60, "max_num_groups": 60}).vizualize()
def start_experiment(output_name: str, L: float, n_birds: int, eta: float = .5, total_time: float = 100) -> None:
launch_simulation_random(f"simulation_data/{output_name}.json", L=L, n_birds=n_birds, eta=eta,
total_time=total_time)
to_process = ["avg_speed", "avg_angle", "group_size", "group_size_avg", "group_size_avg_fit", "groups",
"correlations",
"correlations_fit", "group_to_size", "group_hulls"]
Processor().process("simulation_data/test-machine.json", "processing_data/test-machine", verbose_prop=.1,
to_process=to_process,
options={"correlations_stochastic_points": 5000})
to_draw = ["avg_speed", "avg_angle", "avg_polar", "angle_pdf", "correlations", "correlations_fit",
"correlation_length",
"group_size", "group_size_avg", "group_size_avg_fit", "group_dimension", "group_dimension_avg", "quiver",
"evolution_group_size"]
Visualiser("processing_data/test-machine", "visualisations/test-machine.mp4",
simulation_data_file="simulation_data/test-machine.json",
verbose_prop=.1,
to_draw=to_draw,
options={"quiver_color_by_group": True, "quiver_draw_by_group": True, "quiver_color_single": True,
"max_group_size": 5000,
"max_num_groups": 50000}).vizualize()
def ssh_experiments():
addresses = ["0000.000.000.00"]
login, passw = "", ""
Ns = [10, 100]
ts = [10, 15]
l = 10
eta = .5
current_server = -1
for n in Ns:
for t in ts:
current_server += 1
current_server_ip = addresses[current_server % len(addresses)]
ssh = paramiko.SSHClient()
ssh.connect(current_server_ip, username=login, password=passw)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
f"./python3.6 remote.py --n {n} --l {l} --eta {eta} --t {t}")
log.info(f"server: {current_server_ip}, stdin: {ssh_stdin}, stdout: {ssh_stdout}, stderr: {ssh_stderr}")
if __name__ == "__main__":
ssh_experiments()
import logging
import click
from experiments import start_experiment
log = logging.getLogger('remote')
log.addHandler(logging.StreamHandler())
log.setLevel(logging.INFO)
@click.command()
@click.option('--n', help="number of birds", type=int)
@click.option('--l', help="size of sky", type=float)
@click.option('--eta', help="noise", type=float)
@click.option('--t', help="simulation time", type=float)
def main(n, l, eta, t):
name = f"N_{n}_L_{l}_Eta_{eta}_T_{t}"
log.info(f"Starting remote experiment {name}")
start_experiment(name, l, n, eta, t)
if __name__ == "__main__":
main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment