Skip to content

Commit

Permalink
Merge pull request #321 from gnn-tracking/graph-param-scan
Browse files Browse the repository at this point in the history
  • Loading branch information
klieret authored Apr 26, 2023
2 parents 0b57382 + d499aa4 commit 75cc602
Show file tree
Hide file tree
Showing 3 changed files with 75 additions and 1 deletion.
2 changes: 1 addition & 1 deletion src/gnn_tracking/graph_construction/build_graphs.slurm
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# To submit jobs for every stream:
# for i in {1..9}; do export PART=$i; sbatch build_graphs.slurm; done

#SBATCH --job-name=build-point-clouds # create a short name for your job
#SBATCH --job-name=build-graphs # create a short name for your job
#SBATCH --nodes=1 # node count
#SBATCH --ntasks=1 # total number of tasks across all nodes
#SBATCH --cpus-per-task=1 # cpu-cores per task (>1 if multi-threaded tasks)
Expand Down
57 changes: 57 additions & 0 deletions src/gnn_tracking/graph_construction/build_graphs_hpo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from __future__ import annotations

import json
import random
import uuid
from datetime import datetime
from pathlib import Path
from tempfile import TemporaryDirectory

from graph_builder import GraphBuilder

from gnn_tracking.utils.log import logger
from gnn_tracking.utils.versioning import get_commit_hash

N_GRAPHS_MEASURED = 10
N_EXPERIMENTS = 1
GRAPH_INDIR = Path(
"/scratch/gpfs/IOJALVO/gnn-tracking/object_condensation/point_clouds_v2/part_1"
)
MEASUREMENT_OUTDIR = Path(
"/scratch/gpfs/IOJALVO/gnn-tracking/object_condensation/measurements"
)

if __name__ == "__main__":
results = {
"gnn_tracking_commit_hash": get_commit_hash(),
"date": datetime.now().strftime("%Y-%m-%d, %H:%M:%S"),
"results": [],
"input_dir": str(GRAPH_INDIR),
"n_graphs_measured": N_GRAPHS_MEASURED,
}
logger.debug(results)
for _ in range(N_EXPERIMENTS):
params = dict(
phi_slope_max=random.uniform(0.002, 0.008),
z0_max=random.uniform(150, 400),
dR_max=random.uniform(1, 5.0),
)
logger.debug(params)
graph_builder = GraphBuilder(
indir=GRAPH_INDIR,
outdir=Path(TemporaryDirectory().name),
redo=True,
measurement_mode=True,
log_level=1,
collect_data=False,
**params,
)
graph_builder.process(stop=N_GRAPHS_MEASURED)
measurements = graph_builder.get_measurements()
logger.debug(measurements)
results["results"].append(measurements | params)

random_id = uuid.uuid1()
MEASUREMENT_OUTDIR.mkdir(parents=True, exist_ok=True)
with (MEASUREMENT_OUTDIR / f"results-{random_id}.json").open("w") as outf:
json.dump(results, outf)
17 changes: 17 additions & 0 deletions src/gnn_tracking/graph_construction/build_graphs_hpo.slurm
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/bin/bash

#SBATCH --job-name=build-graphs-hpo # create a short name for your job
#SBATCH --nodes=1 # node count
#SBATCH --ntasks=1 # total number of tasks across all nodes
#SBATCH --cpus-per-task=1 # cpu-cores per task (>1 if multi-threaded tasks)
#SBATCH --mem=32G # total memory per node (4 GB per cpu-core is default)
#SBATCH --time=1:02:00 # total run time limit (HH:MM:SS)
#SBATCH --output=build-graphs-hpo-%j.log

# bash strict mode
set -euo pipefail
IFS=$'\n\t'

python build_graphs_hpo.py

echo "Finished"

0 comments on commit 75cc602

Please sign in to comment.