Skip to content

Commit

Permalink
finished saving and loading of processed models
Browse files Browse the repository at this point in the history
  • Loading branch information
julrog committed Jun 9, 2020
1 parent c73188e commit 156f92f
Show file tree
Hide file tree
Showing 6 changed files with 81 additions and 33 deletions.
10 changes: 10 additions & 0 deletions definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,13 @@ def pairwise(it, size: int):
except StopIteration:
# no more elements in the iterator
return


def vec4wise(it):
it = iter(it)
while True:
try:
yield next(it), next(it), next(it), next(it),
except StopIteration:
# no more elements in the iterator
return
7 changes: 4 additions & 3 deletions gui/data_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def __init__(self, path: str):

class ProcessedNNHandler:
def __init__(self, path: str):
layer_data, node_data, edge_data, sample_data = np.load(path, allow_pickle=True)['arr_0']
layer_data, node_data, edge_data, sample_data, max_sample_points = np.load(path, allow_pickle=True)['arr_0']
self.layer_data: List[int] = layer_data

self.node_data: List[np.array] = []
Expand All @@ -25,5 +25,6 @@ def __init__(self, path: str):
self.node_data.append(raw_node_data[node_data_offset:(node_data_offset + nodes)])
node_data_offset += nodes

self.edge_data: List[List[List[float]]] = edge_data
self.sample_data: List[List[float]] = sample_data
self.edge_data: np.array = edge_data.reshape(-1, 28)

self.sample_data: np.array = sample_data.reshape(-1, max_sample_points * 4)
46 changes: 33 additions & 13 deletions models/edge.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,43 @@
import random
import numpy as np
from typing import List

from pyrr import Vector4, Vector3
from pyrr import Vector4

from definitions import vec4wise
from models.node import Node


class Edge:
def __init__(self, start_node: Node, end_node: Node, importance: float = None):
self.start_node: Node = start_node
self.end_node: Node = end_node
self.start: Vector4 = Vector4([start_node.position.x, start_node.position.y, start_node.position.z, 1.0])
self.end: Vector4 = Vector4([end_node.position.x, end_node.position.y, end_node.position.z, 0.0])
self.initial_data: List[float] = [start_node.position.x, start_node.position.y, start_node.position.z, 1.0,
end_node.position.x, end_node.position.y, end_node.position.z, 0.0]
self.sample_points: List[Vector4] = [self.start, self.end]
if importance is None:
importance = 1.0 # random.random()
self.data: List[float] = [2.0, start_node.output_edges, end_node.input_edges, importance, start_node.data[15],
end_node.data[15], start_node.data[14], end_node.data[14]]
def __init__(self):
self.data: List[float] = []
self.sample_data: List[float] = []

def data_init(self, data: np.array, sample_data: np.array):
self.data = []
for d in data:
self.data.append(d)
self.sample_data = []
for sd in sample_data:
self.sample_data.append(sd)
return self

def importance_init(self, start_node: Node, end_node: Node, layer_id: int, layer_edge_id: int, importance: float):
self.data = []
self.data = [2.0, layer_id, layer_edge_id, importance, start_node.data[15], end_node.data[15],
start_node.data[14], end_node.data[14]]
self.data.extend(start_node.data[4:14])
self.data.extend(end_node.data[4:14])
self.sample_data = [start_node.position.x, start_node.position.y, start_node.position.z, 1.0,
end_node.position.x, end_node.position.y, end_node.position.z, 0.0]
return self

def random_importance_init(self, start_node: Node, end_node: Node, layer_id: int, layer_edge_id: int):
importance: float = random.random()
self.data = [2.0, layer_id, layer_edge_id, importance, start_node.data[15], end_node.data[15],
start_node.data[14], end_node.data[14]]
self.data.extend(start_node.data[4:14])
self.data.extend(end_node.data[4:14])
self.sample_data = [start_node.position.x, start_node.position.y, start_node.position.z, 1.0,
end_node.position.x, end_node.position.y, end_node.position.z, 0.0]
return self
33 changes: 22 additions & 11 deletions models/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ def __init__(self, layer: List[int], layer_width: float, layer_distance: float,
[abs(self.bounding_range.x), abs(self.bounding_range.y), abs(self.bounding_range.z)])

self.layer_nodes: List[List[Node]] = []
self.edge_data: List[np.array] = []
self.edge_data: np.array = []
self.sample_data: np.array = []
self.edge_importance_only: bool = False

if importance_data is not None:
self.layer_nodes: List[List[Node]] = create_nodes_with_importance(self.layer, self.bounding_mid,
Expand All @@ -41,9 +43,11 @@ def __init__(self, layer: List[int], layer_width: float, layer_distance: float,
self.bounding_volume[1].z),
importance_data.node_importance_data)
self.edge_data = importance_data.edge_importance_data
self.edge_importance_only = True
elif processed_nn is not None:
self.layer_nodes: List[List[Node]] = create_nodes_from_data(self.layer, processed_nn.node_data)
self.edge_data = processed_nn.edge_data
self.sample_data = processed_nn.sample_data
else:
self.layer_nodes: List[List[Node]] = create_random_nodes(self.layer, self.bounding_mid,
(self.bounding_volume[0].x,
Expand Down Expand Up @@ -82,21 +86,29 @@ def generate_edges(self) -> List[Edge]:
for i in range(len(self.layer) - 1):
for node_one_i, node_one in enumerate(self.layer_nodes[i]):
for node_two_i, node_two in enumerate(self.layer_nodes[i + 1]):
new_edge: Edge = Edge(node_one, node_two)
new_edge: Edge = Edge().random_importance_init(node_one, node_two, i, node_one_i * len(
self.layer_nodes[i + 1]) + node_two_i)

if new_edge.data[3] * new_edge.data[4] > self.importance_prune_threshold:
edges.append(new_edge)
else:
self.pruned_edges += 1
else:
for i in range(len(self.layer) - 1):
for node_one_i, node_one in enumerate(self.layer_nodes[i]):
for node_two_i, node_two in enumerate(self.layer_nodes[i + 1]):
new_edge: Edge = Edge(node_one, node_two,
self.edge_data[i][node_one_i][node_two_i])
if new_edge.data[3] * new_edge.data[6] > self.importance_prune_threshold:
edges.append(new_edge)
else:
self.pruned_edges += 1
if self.edge_importance_only:
for node_one_i, node_one in enumerate(self.layer_nodes[i]):
for node_two_i, node_two in enumerate(self.layer_nodes[i + 1]):
new_edge: Edge = Edge().importance_init(node_one, node_two, i, node_one_i * len(
self.layer_nodes[i + 1]) + node_two_i, self.edge_data[i][node_one_i][node_two_i])

if new_edge.data[3] * new_edge.data[6] > self.importance_prune_threshold:
edges.append(new_edge)
else:
self.pruned_edges += 1
else:
for edge_data, sample_data in zip(self.edge_data, self.sample_data):
new_edge: Edge = Edge().data_init(edge_data, sample_data)
edges.append(new_edge)
return edges

def generate_max_distance(self) -> float:
Expand Down Expand Up @@ -136,7 +148,6 @@ def get_average_node_distance(self) -> float:
distance_value_count += len(self.layer_nodes[i]) * (len(self.layer_nodes[i]) - 1)
for i in range(len(self.layer)):
layer_distance_sum: float = 0.0
nodes: List[Node] = self.layer_nodes[i]
for node_one in self.layer_nodes[i]:
for node_two in self.layer_nodes[i]:
layer_distance_sum += math.sqrt(
Expand Down
11 changes: 7 additions & 4 deletions processing/edge_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,17 @@ def set_data(self, network: NetworkModel):
# estimate a suitable sample size for buffer objects
max_distance: float = network.generate_max_distance()
self.max_sample_points = int((max_distance * 2.0) / self.sample_length) + 2
self.smooth_radius = (self.max_sample_points * 8.0)/100.0
self.smooth_radius = (self.max_sample_points * 8.0) / 100.0

# generate and load initial data for the buffer
initial_data: List[float] = []
if len(self.edges[0].sample_data) > 8:
self.sampled = True
self.max_sample_points = int(len(self.edges[0].sample_data) / 4)
for edge in self.edges:
initial_data.extend(edge.initial_data)
initial_data.extend([0] * (self.max_sample_points * 4 - len(edge.initial_data)))
initial_data.extend(edge.sample_data)
if self.max_sample_points * 4 - len(edge.sample_data) > 0:
initial_data.extend([0] * (self.max_sample_points * 4 - len(edge.sample_data)))
transfer_data = np.array(initial_data, dtype=np.float32)
self.sample_buffer.load(transfer_data)
self.sample_buffer.swap()
Expand Down Expand Up @@ -203,4 +207,3 @@ def delete(self):
self.sample_buffer.delete()
self.edge_buffer.delete()
self.ssbo_handler.delete()

7 changes: 5 additions & 2 deletions processing/network_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ def __init__(self, layer_nodes: List[int],
print("[%s] Prepare edge processing..." % LOG_SOURCE)
self.edge_processor: EdgeProcessor = EdgeProcessor(self.sample_length)
self.edge_processor.set_data(self.network)
self.edge_processor.init_sample_edge()
if not self.edge_processor.sampled:
self.edge_processor.init_sample_edge()
self.edge_renderer: EdgeRenderer = EdgeRenderer(self.edge_processor, self.grid)

print("[%s] Prepare grid processing..." % LOG_SOURCE)
Expand Down Expand Up @@ -185,7 +186,9 @@ def save_model(self, file_path: str):
edge_data: List[float] = self.edge_processor.read_edges_from_buffer(raw=True)
sample_data: List[float] = self.edge_processor.read_samples_from_sample_storage(raw=True,
auto_resize_enabled=False)
np.savez(file_path, (layer_data, node_data, edge_data, sample_data))
max_sample_points: int = self.edge_processor.max_sample_points
print(max_sample_points)
np.savez(file_path, (layer_data, node_data, edge_data, sample_data, max_sample_points))

def delete(self):
self.node_processor.delete()
Expand Down

0 comments on commit 156f92f

Please sign in to comment.