Add files via upload

This commit is contained in:
David Rotermund 2023-02-06 09:56:18 +01:00 committed by GitHub
parent a537f3e356
commit 7c2a974e67
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 35 additions and 31 deletions

View file

@ -1,47 +1,47 @@
import os
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
import matplotlib.pyplot as plt
which_scalar = "Test Error"
from tensorboard.backend.event_processing import event_accumulator
import numpy as np
import json
from jsmin import jsmin
import glob
# -------------------------------
log_paths: str = "Log*"
log_paths_list = glob.glob(log_paths)
assert len(log_paths_list) > 0
filename:str = "def.json"
with open(filename) as json_file:
minified = jsmin(json_file.read())
data = json.loads(minified)
for path in log_paths_list:
print(path)
temp = path.split("_")
if len(temp) == 2:
parameter:str | None = temp[-1]
else:
parameter = None
# ----------------------
temp = glob.glob(path)
assert len(temp) == 1
acc = event_accumulator.EventAccumulator(path)
acc.Reload()
# -------------------------------
# Check if the requested scalar exists
available_scalar = acc.Tags()["scalars"]
# available_histograms = acc.Tags()["histograms"]
available_scalar.index(which_scalar)
te = acc.Scalars(which_scalar)
path_runs: str = "./Log/*"
np_temp = np.zeros((len(te), 2))
temp = glob.glob(path_runs)
assert len(temp) == 1
path = temp[0]
for id in range(0, len(te)):
np_temp[id, 0] = te[id][1]
np_temp[id, 1] = te[id][2]
print(np_temp)
acc = event_accumulator.EventAccumulator(path)
acc.Reload()
available_scalar = acc.Tags()["scalars"]
available_histograms = acc.Tags()["histograms"]
which_scalar = "Test Error"
te = acc.Scalars(which_scalar)
temp = []
for te_item in te:
temp.append((te_item[1], te_item[2]))
temp = np.array(temp)
print(temp)
np.save(f"test_error.npy", temp)
if parameter is not None:
np.save(f"result_{parameter}.npy", np_temp)
else:
np.save(f"result.npy", np_temp)

View file

@ -30,8 +30,10 @@ from network.loop_train_test import (
run_lr_scheduler,
loop_test_reconstruction,
)
from network.SbSReconstruction import SbSReconstruction
from network.InputSpikeImage import InputSpikeImage
from network.SbSLayer import SbSLayer
from torch.utils.tensorboard import SummaryWriter
@ -155,6 +157,8 @@ if order_id is not None:
if isinstance(network[0], InputSpikeImage) is True:
network[0].number_of_spikes = number_of_spikes_in_input_layer
if isinstance(network[0], SbSLayer) is True:
network[0]._number_of_spikes = number_of_spikes_in_input_layer
last_test_performance: float = -1.0
with torch.no_grad():