Skip to content

Commit

Permalink
improved visualization
Browse files Browse the repository at this point in the history
  • Loading branch information
julrog committed Jun 24, 2020
1 parent 1caa5a4 commit 23aa733
Show file tree
Hide file tree
Showing 21 changed files with 353 additions and 208 deletions.
7 changes: 4 additions & 3 deletions automation/create_processed_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def process_loop(processor: NetworkProcessor):
processor.process(4, True)


def process_network(network_name: str, importance_type: str, prune_rate: float = 0.9):
def process_network(network_name: str, importance_type: str, prune_rate: float = 0.9, edge_importance_type: int = 0):
window_handler: WindowHandler = WindowHandler()
window: Window = window_handler.create_window("Testing", 1, 1, 1)
window.set_position(0, 0)
Expand All @@ -38,11 +38,12 @@ def process_network(network_name: str, importance_type: str, prune_rate: float =
prune_percentage=prune_rate,
node_bandwidth_reduction=0.98,
edge_bandwidth_reduction=0.9,
edge_importance_type=0)
edge_importance_type=edge_importance_type)

process_loop(network_processor)

network_processor.save_model(DATA_PATH + "model/%s/%s_processed.npz" % (network_name, importance_type))
network_processor.save_model(
DATA_PATH + "model/%s/%s_processed_eit%i.npz" % (network_name, importance_type, edge_importance_type))

network_processor.delete()
window_handler.destroy()
6 changes: 3 additions & 3 deletions gui/window.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,13 +238,13 @@ def __init__(self):

self.grid_render_settings: RenderSettings = RenderSettings(self.render_frame, "Grid", self.change_setting,
["None", "Cube", "Point"], 0, row=0, column=0)
edge_shader_settings: Dict[str, any] = {"Size": 0.05, "Base Opacity": 0.0, "Base Density Opacity": 0.0,
edge_shader_settings: Dict[str, any] = {"Size": 0.1, "Base Opacity": 0.0, "Importance Opacity": 1.0,
"Density Exponent": 0.1, "Importance Threshold": 0.01}
self.edge_render_settings: RenderSettings = RenderSettings(self.render_frame, "Edge", self.change_setting,
["None", "Sphere", "Sphere_Transparent",
"Ellipsoid_Transparent", "Line", "Point"],
4, edge_shader_settings, row=1, column=0)
node_shader_settings: Dict[str, any] = {"Size": 0.05, "Base Opacity": 0.0, "Base Density Opacity": 0.0,
3, edge_shader_settings, row=1, column=0)
node_shader_settings: Dict[str, any] = {"Size": 0.05, "Base Opacity": 0.0, "Importance Opacity": 1.0,
"Density Exponent": 0.1, "Importance Threshold": 0.01}
self.node_render_settings: RenderSettings = RenderSettings(self.render_frame, "Node", self.change_setting,
["None", "Sphere", "Sphere_Transparent", "Point"], 2,
Expand Down
64 changes: 63 additions & 1 deletion models/edge.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import random
import numpy as np
from typing import List
from typing import List, Tuple

from pyrr import Vector4

Expand Down Expand Up @@ -64,3 +64,65 @@ def split_edges_for_buffer(edges: List[List[Edge]], edge_container_size: int = 1
split_edges.append(split_layer_edge_container)

return split_edges


def create_edges_random(layer_nodes: List[List[Node]]) -> List[List[Edge]]:
edges: List[List[Edge]] = []
for i in range(len(layer_nodes) - 1):
layer_edges: List[Edge] = []
for node_one_i, node_one in enumerate(layer_nodes[i]):
for node_two_i, node_two in enumerate(layer_nodes[i + 1]):
new_edge: Edge = Edge().random_importance_init(node_one, node_two, i, node_one_i * len(
layer_nodes[i + 1]) + node_two_i)
layer_edges.append(new_edge)
edges.append(layer_edges)
return edges


def create_edges_importance(layer_nodes: List[List[Node]], edge_data: np.array) -> List[List[Edge]]:
edges: List[List[Edge]] = []
for i in range(len(layer_nodes) - 1):
layer_edges: List[Edge] = []
for node_one_i, node_one in enumerate(layer_nodes[i]):
for node_two_i, node_two in enumerate(layer_nodes[i + 1]):
new_edge: Edge = Edge().importance_init(node_one, node_two, i, node_one_i * len(
layer_nodes[i + 1]) + node_two_i, edge_data[i][node_one_i][node_two_i])
layer_edges.append(new_edge)
edges.append(layer_edges)
return edges


def create_edges_processed(edge_data: np.array, sample_data: np.array) -> List[List[Edge]]:
edges: List[List[Edge]] = []
for layer_edge_data, layer_sample_data in zip(edge_data, sample_data):
layer_edges: List[Edge] = []
for container_edge_data, container_sample_data in zip(layer_edge_data, layer_sample_data):
for edge_data, sample_data in zip(container_edge_data, container_sample_data):
edge: Edge = Edge().data_init(edge_data, sample_data)
layer_edges.append(edge)
edges.append(layer_edges)
return edges


'''def create_edges_processed(edge_data: np.array, sample_data: np.array) -> Tuple[
List[List[List[Edge]]], int, float, float]:
edge_count: int = 0
edge_min_importance: float = 10000.0
edge_max_importance: float = 0.0
edges: List[List[List[Edge]]] = []
for layer_edge_data, layer_sample_data in zip(self.edge_data, self.sample_data):
layer_edges: List[List[Edge]] = []
for container_edge_data, container_sample_data in zip(layer_edge_data, layer_sample_data):
container_edges: List[Edge] = []
for edge_data, sample_data in zip(container_edge_data, container_sample_data):
edge: Edge = Edge().data_init(edge_data, sample_data)
container_edges.append(edge)
if edge_min_importance > edge.data[3] * edge.data[6]:
edge_min_importance = edge.data[3] * edge.data[6]
if edge_max_importance < edge.data[3] * edge.data[6]:
edge_max_importance = edge.data[3] * edge.data[6]
edge_count += len(container_edges)
layer_edges.append(container_edges)
edges.append(layer_edges)
return edges, edge_count, edge_min_importance, edge_max_importance'''
187 changes: 118 additions & 69 deletions models/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from pyrr import Vector3

from data.data_handler import ImportanceDataHandler, ProcessedNNHandler
from models.edge import Edge, split_edges_for_buffer
from models.edge import Edge, split_edges_for_buffer, create_edges_processed, create_edges_random, \
create_edges_importance
from models.node import Node, create_random_nodes, create_nodes_from_data, create_nodes_with_importance

LOG_SOURCE: str = "NETWORK_MODEL"
Expand Down Expand Up @@ -65,6 +66,11 @@ def __init__(self, layer: List[int], layer_width: float, layer_distance: float,
self.average_node_distance: float = layer_width / 2.0 # self.get_average_node_distance()
self.average_edge_distance: float = layer_width / 2.0 # self.get_average_edge_distance()

self.node_min_importance: float = self.read_node_min_importance()
self.node_max_importance: float = self.read_node_max_importance()
self.edge_min_importance: float = 0.0
self.edge_max_importance: float = 1.0

def get_nodes(self) -> List[Node]:
node_data: List[Node] = []
for layer in self.layer_nodes:
Expand All @@ -81,75 +87,92 @@ def set_nodes(self, node_data: List[Node]):
read_node_index += 1
self.layer_nodes[i] = new_nodes

def generate_edges(self, edge_container_size: int = 500) -> List[List[List[Edge]]]:
self.pruned_edges = 0
edge_importance_values: List[float] = []
if len(self.edge_data) == 0:
for i in range(len(self.layer) - 1):
for node_one_i, node_one in enumerate(self.layer_nodes[i]):
for node_two_i, node_two in enumerate(self.layer_nodes[i + 1]):
new_edge: Edge = Edge().random_importance_init(node_one, node_two, i, node_one_i * len(
self.layer_nodes[i + 1]) + node_two_i)
edge_importance_values.append(new_edge.data[3] * new_edge.data[6])

min_importance_value: float = -1.0
if self.prune_percentage > 0.0:
min_importance_value: float = np.sort(np.array(edge_importance_values))[
int(len(edge_importance_values) * self.prune_percentage)]

edges: List[List[Edge]] = []
for i in range(len(self.layer) - 1):
layer_edge: List[Edge] = []
for node_one_i, node_one in enumerate(self.layer_nodes[i]):
for node_two_i, node_two in enumerate(self.layer_nodes[i + 1]):
self.edge_count = 0
for i in range(len(self.layer) - 1):
self.edge_count += len(self.layer_nodes[i]) * len(self.layer_nodes[i + 1])

self.node_min_importance = self.read_node_min_importance()
self.node_max_importance = self.read_node_max_importance()

def create_edges(self) -> List[List[Edge]]:
edges: List[List[Edge]] = []
for i in range(len(self.layer) - 1):
layer_edge: List[Edge] = []
for node_one_i, node_one in enumerate(self.layer_nodes[i]):
for node_two_i, node_two in enumerate(self.layer_nodes[i + 1]):
if len(self.edge_data) == 0:
new_edge: Edge = Edge().random_importance_init(node_one, node_two, i, node_one_i * len(
self.layer_nodes[i + 1]) + node_two_i)
else:
new_edge: Edge = Edge().importance_init(node_one, node_two, i, node_one_i * len(
self.layer_nodes[i + 1]) + node_two_i, self.edge_data[i][node_one_i][node_two_i])
layer_edge.append(new_edge)
edges.append(layer_edge)
return edges

if new_edge.data[3] * new_edge.data[6] > min_importance_value:
layer_edge.append(new_edge)
else:
self.pruned_edges += 1
edges.append(layer_edge)
return split_edges_for_buffer(edges, edge_container_size)
else:
if self.edge_importance_only:
for i in range(len(self.layer) - 1):
for node_one_i, node_one in enumerate(self.layer_nodes[i]):
for node_two_i, node_two in enumerate(self.layer_nodes[i + 1]):
new_edge: Edge = Edge().importance_init(node_one, node_two, i, node_one_i * len(
self.layer_nodes[i + 1]) + node_two_i, self.edge_data[i][node_one_i][node_two_i])
edge_importance_values.append(new_edge.data[3] * new_edge.data[6])

min_importance_value: float = -1.0
if self.prune_percentage > 0.0:
min_importance_value: float = np.sort(np.array(edge_importance_values))[
int(len(edge_importance_values) * self.prune_percentage)]

edges: List[List[Edge]] = []
for i in range(len(self.layer) - 1):
layer_edge: List[Edge] = []
for node_one_i, node_one in enumerate(self.layer_nodes[i]):
for node_two_i, node_two in enumerate(self.layer_nodes[i + 1]):
new_edge: Edge = Edge().importance_init(node_one, node_two, i, node_one_i * len(
self.layer_nodes[i + 1]) + node_two_i, self.edge_data[i][node_one_i][node_two_i])

if new_edge.data[3] * new_edge.data[6] > min_importance_value:
layer_edge.append(new_edge)
else:
self.pruned_edges += 1
edges.append(layer_edge)
return split_edges_for_buffer(edges, edge_container_size)
else:
edges: List[List[List[Edge]]] = []
for layer_edge_data, layer_sample_data in zip(self.edge_data, self.sample_data):
layer_edges: List[List[Edge]] = []
for container_edge_data, container_sample_data in zip(layer_edge_data, layer_sample_data):
container_edges: List[Edge] = []
for edge_data, sample_data in zip(container_edge_data, container_sample_data):
container_edges.append(Edge().data_init(edge_data, sample_data))
layer_edges.append(container_edges)
edges.append(layer_edges)
return edges
def set_edges(self) -> List[List[List[Edge]]]:
self.edge_count = 0
edges: List[List[List[Edge]]] = []
for layer_edge_data, layer_sample_data in zip(self.edge_data, self.sample_data):
layer_edges: List[List[Edge]] = []
for container_edge_data, container_sample_data in zip(layer_edge_data, layer_sample_data):
container_edges: List[Edge] = []
for edge_data, sample_data in zip(container_edge_data, container_sample_data):
edge: Edge = Edge().data_init(edge_data, sample_data)
container_edges.append(edge)
if self.edge_min_importance > edge.data[3] * edge.data[6]:
self.edge_min_importance = edge.data[3] * edge.data[6]
if self.edge_max_importance < edge.data[3] * edge.data[6]:
self.edge_max_importance = edge.data[3] * edge.data[6]
self.edge_count += len(container_edges)
layer_edges.append(container_edges)
edges.append(layer_edges)

max_edge_count: int = 0
for i in range(len(self.layer) - 1):
max_edge_count += self.layer[i] * self.layer[i + 1]

self.pruned_edges = max_edge_count - self.edge_count
return edges

def generate_filtered_edges(self, edge_container_size: int = 500) -> List[List[List[Edge]]]:
self.pruned_edges = 0
self.edge_min_importance = 10000.0
self.edge_max_importance = 0.0

edges: List[List[Edge]] = create_edges_random(self.layer_nodes) if len(self.edge_data) == 0 \
else create_edges_importance(self.layer_nodes, self.edge_data) if self.edge_importance_only \
else create_edges_processed(self.edge_data, self.sample_data)

existing_edges: int = 0
for layer_edge in edges:
existing_edges += len(layer_edge)
self.pruned_edges = self.edge_count - existing_edges

edge_importance_values: List[float] = []
for layer_edge in edges:
for edge in layer_edge:
edge_importance_values.append(edge.data[3] * edge.data[6])

importance_prune_threshold: float = -1.0
if self.prune_percentage > 0.0:
importance_prune_threshold: float = np.sort(np.array(edge_importance_values))[
int(len(edge_importance_values) * self.prune_percentage)]

filtered_edges: List[List[Edge]] = []
for layer_edge in edges:
filtered_layer_edges: List[Edge] = []
for edge in layer_edge:
if edge.data[3] * edge.data[6] > importance_prune_threshold:
if self.edge_min_importance > edge.data[3] * edge.data[6]:
self.edge_min_importance = edge.data[3] * edge.data[6]
if self.edge_max_importance < edge.data[3] * edge.data[6]:
self.edge_max_importance = edge.data[3] * edge.data[6]
filtered_layer_edges.append(edge)
else:
self.pruned_edges += 1
filtered_edges.append(filtered_layer_edges)
return split_edges_for_buffer(filtered_edges, edge_container_size)

def generate_max_distance(self) -> float:
max_distance: float = 0.0
Expand Down Expand Up @@ -198,14 +221,40 @@ def get_average_node_distance(self) -> float:
return distance_sum

def get_node_mid(self) -> Vector3:
node_position_min_x: float = 0.0
node_position_max_x: float = 0.0
mid_position_x: float = 0.0
node_position_min_y: float = 0.0
node_position_max_y: float = 0.0
mid_position_y: float = 0.0
position_count: int = 0
for i in range(len(self.layer)):
position_count += len(self.layer_nodes[i])
for i in range(len(self.layer)):
for node_one in self.layer_nodes[i]:
mid_position_x += node_one.position.x
mid_position_y += node_one.position.x
mid_position_y += node_one.position.y
node_position_min_x = node_one.position.x if node_one.position.x < node_position_min_x else node_position_min_x
node_position_min_y = node_one.position.y if node_one.position.y < node_position_min_y else node_position_min_y
node_position_max_x = node_one.position.x if node_one.position.x > node_position_max_x else node_position_max_x
node_position_max_y = node_one.position.y if node_one.position.y > node_position_max_y else node_position_max_y
return Vector3(
[mid_position_x / position_count, mid_position_y / position_count, 0.0])
[(node_position_min_x + node_position_max_x) * 0.5,
(node_position_min_y + node_position_max_y) * 0.5,
0.0])

def read_node_min_importance(self) -> float:
min_importance: float = 10000.0
for nodes in self.layer_nodes:
for node in nodes:
if node.data[14] < min_importance:
min_importance = node.data[14]
return min_importance

def read_node_max_importance(self) -> float:
max_importance: float = 0.0
for nodes in self.layer_nodes:
for node in nodes:
if node.data[14] > max_importance:
max_importance = node.data[14]
return max_importance
4 changes: 2 additions & 2 deletions models/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,12 @@ def random_importance_init(self, position: Vector3):
position_max_0: int = int(random.random() * 10)
for i in range(10):
if i == position_max_0:
random_value: float = random.random() / 2.0 + 0.5
random_value: float = random.random()
importance_sum += random_value
importance_squared_sum += random_value * random_value
self.data.append(random_value)
else:
random_value: float = random.random() / 4.0
random_value: float = random.random() / 5.0
importance_sum += random_value
importance_squared_sum += random_value * random_value
self.data.append(random_value)
Expand Down
3 changes: 3 additions & 0 deletions processing/advection_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,6 @@ def iterate(self):

def get_advection_strength(self) -> float:
return self.current_bandwidth * self.advection_direction

def get_bandwidth_reduction(self):
return math.pow(self.bandwidth_reduction, self.iteration)
Loading

0 comments on commit 23aa733

Please sign in to comment.