Changed metadata directory
This commit is contained in:
parent
3b022b33ad
commit
1485ade538
@ -231,26 +231,26 @@ class SimulationDeSerializer:
|
||||
self._save_dir = save_dir
|
||||
self._results_dir = results_dir
|
||||
|
||||
Path(self._save_dir).mkdir(parents=True, exist_ok=True)
|
||||
Path(self._results_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _get_savefile_path(self, sim_name):
|
||||
return f"{self._save_dir}/{misc.slugify(sim_name)}.pickle"
|
||||
return f"{self._save_dir}/{misc.slugify(sim_name)}_sim_state.pickle"
|
||||
|
||||
def _get_metadata_path(self, sim_name):
|
||||
return f"{self._save_dir}/{misc.slugify(sim_name)}.json"
|
||||
return f"{self._results_dir}/{misc.slugify(sim_name)}_metadata.json"
|
||||
|
||||
def _get_results_path(self, sim_name):
|
||||
return f"{self._results_dir}/{misc.slugify(sim_name)}.csv"
|
||||
|
||||
def unfinished_sim_present(self, sim_name: str):
|
||||
return os.path.isfile(self._get_savefile_path(sim_name)) \
|
||||
and os.path.isfile(self._get_metadata_path(sim_name))
|
||||
return os.path.isfile(self._get_savefile_path(sim_name))
|
||||
|
||||
def remove_unfinished_sim(self, sim_name):
|
||||
os.remove(self._get_savefile_path(sim_name))
|
||||
os.remove(self._get_metadata_path(sim_name))
|
||||
# os.remove(self._get_metadata_path(sim_name))
|
||||
|
||||
def save_state(self, simulator: typing.Any, sim_name: str, metadata: typing.Dict) -> None:
|
||||
Path(self._save_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Save metadata
|
||||
with open(self._get_metadata_path(sim_name), 'w+', encoding='utf-8') as f:
|
||||
json.dump(metadata, f, ensure_ascii=False, indent=4)
|
||||
@ -259,9 +259,6 @@ class SimulationDeSerializer:
|
||||
with open(self._get_savefile_path(sim_name), "wb") as file:
|
||||
pickle.dump(simulator, file)
|
||||
|
||||
# Save results
|
||||
self.save_results(simulator, sim_name, metadata)
|
||||
|
||||
def read_state(self, sim_name: str) -> typing.Tuple[typing.Any, typing.Dict]:
|
||||
metadata = None
|
||||
simulator = None
|
||||
@ -277,8 +274,11 @@ class SimulationDeSerializer:
|
||||
return simulator, metadata
|
||||
|
||||
def save_results(self, simulator: typing.Any, sim_name: str, metadata: typing.Dict) -> None:
|
||||
Path(self._save_dir).mkdir(parents=True, exist_ok=True)
|
||||
# Save metadata
|
||||
with open(self._get_metadata_path(sim_name), 'w+', encoding='utf-8') as f:
|
||||
json.dump(metadata, f, ensure_ascii=False, indent=4)
|
||||
|
||||
# Save results
|
||||
SNRs, BERs = simulator.SNRs_and_BERs
|
||||
|
||||
data_dict = {"SNR": SNRs}
|
||||
@ -288,11 +288,15 @@ class SimulationDeSerializer:
|
||||
df = pd.DataFrame(data_dict)
|
||||
df.to_csv(self._get_results_path(sim_name))
|
||||
|
||||
# TODO: Read metadata
|
||||
def read_results(self, sim_name: str) -> pd.DataFrame:
|
||||
data = pd.read_csv(self._get_results_path(sim_name))
|
||||
def read_results(self, sim_name: str) -> typing.Tuple[pd.DataFrame, typing.Dict]:
|
||||
# Read metadata
|
||||
with open(self._get_metadata_path(sim_name), 'r', encoding='utf-8') as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
return data
|
||||
# Read results
|
||||
results = pd.read_csv(self._get_results_path(sim_name))
|
||||
|
||||
return results, metadata
|
||||
|
||||
|
||||
# TODO: Fix typing.Any or Simulator
|
||||
@ -324,11 +328,11 @@ class SimulationManager:
|
||||
and (self._sim_name is not None)\
|
||||
and (self._metadata is not None)
|
||||
|
||||
def configure_simulation(self, simulator: typing.Any, name: str, column_names: typing.Sequence[str]) -> None:
|
||||
def configure_simulation(self, simulator: typing.Any, name: str, column_labels: typing.Sequence[str]) -> None:
|
||||
"""Configure a new simulation."""
|
||||
self._simulator = simulator
|
||||
self._sim_name = name
|
||||
self._metadata["column_names"] = column_names
|
||||
self._metadata["labels"] = column_labels
|
||||
|
||||
def unfinished_simulation_present(self, sim_name: str) -> bool:
|
||||
"""Check whether the savefile of a previously unfinished simulation is present."""
|
||||
|
||||
Loading…
Reference in New Issue
Block a user