Skip to content

Commit

Permalink
Feature/fix tests (#104)
Browse files Browse the repository at this point in the history
* Updated version checking

* Removed duplicated code
  • Loading branch information
jernsting authored Nov 24, 2022
1 parent a500874 commit 9ac4336
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 183 deletions.
62 changes: 4 additions & 58 deletions photonai_graph/NeuralNets/GATModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,20 +53,7 @@ def __init__(self,
hidden_dim: int = 256,
heads: List = None,
agg_mode="mean",
nn_epochs: int = 200,
learning_rate: float = 0.001,
batch_size: int = 32,
adjacency_axis: int = 0,
feature_axis: int = 1,
add_self_loops: bool = True,
allow_zero_in_degree: bool = False,
validation_score: bool = False,
early_stopping: bool = False,
es_patience: int = 10,
es_tolerance: int = 9,
es_delta: float = 0,
verbose: bool = False,
logs: str = '',
*args,
**kwargs):
"""
Graph Attention Network for graph classification. GAT Layers
Expand All @@ -93,21 +80,7 @@ def __init__(self,
aggregation mode for the graph convolutional layers
"""
super(GATClassifierModel, self).__init__(nn_epochs=nn_epochs,
learning_rate=learning_rate,
batch_size=batch_size,
adjacency_axis=adjacency_axis,
feature_axis=feature_axis,
add_self_loops=add_self_loops,
allow_zero_in_degree=allow_zero_in_degree,
validation_score=validation_score,
early_stopping=early_stopping,
es_patience=es_patience,
es_tolerance=es_tolerance,
es_delta=es_delta,
verbose=verbose,
logs=logs,
**kwargs)
super(GATClassifierModel, self).__init__(*args, **kwargs)
if heads is None:
heads = [2, 2]
# Todo: if heads is not length of hidden layers +1 (bc of the first layer)
Expand All @@ -130,21 +103,8 @@ def __init__(self,
hidden_layers: int = 2,
hidden_dim: int = 256,
heads: List = None,
nn_epochs: int = 200,
learning_rate: float = 0.001,
batch_size: int = 32,
adjacency_axis: int = 0,
feature_axis: int = 1,
add_self_loops: bool = True,
allow_zero_in_degree: bool = False,
logs: str = None,
validation_score: bool = False,
early_stopping: bool = False,
es_patience: int = 10,
es_tolerance: int = 9,
es_delta: float = 0,
verbose: bool = False,
agg_mode: str = None,
*args,
**kwargs):
"""
Graph Attention Network for graph regression. GAT Layers
Expand All @@ -168,21 +128,7 @@ def __init__(self,
verbose: bool,default=False
If true verbose output is generated
"""
super(GATRegressorModel, self).__init__(nn_epochs=nn_epochs,
learning_rate=learning_rate,
batch_size=batch_size,
adjacency_axis=adjacency_axis,
feature_axis=feature_axis,
add_self_loops=add_self_loops,
allow_zero_in_degree=allow_zero_in_degree,
validation_score=validation_score,
early_stopping=early_stopping,
es_patience=es_patience,
es_tolerance=es_tolerance,
es_delta=es_delta,
verbose=verbose,
logs=logs,
**kwargs)
super(GATRegressorModel, self).__init__(*args, **kwargs)
if heads is None:
heads = [2, 2]
# Todo: if heads is not length of hidden layers +1 (bc of the first layer)
Expand Down
62 changes: 4 additions & 58 deletions photonai_graph/NeuralNets/GCNModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,20 +40,7 @@ def __init__(self,
in_dim: int = 1,
hidden_layers: int = 2,
hidden_dim: int = 256,
nn_epochs: int = 200,
learning_rate: float = 0.001,
batch_size: int = 32,
adjacency_axis: int = 0,
feature_axis: int = 1,
add_self_loops: bool = True,
allow_zero_in_degree: bool = False,
validation_score: bool = False,
early_stopping: bool = False,
es_patience: int = 10,
es_tolerance: int = 9,
es_delta: float = 0,
verbose: bool = False,
logs: str = '',
*args,
**kwargs):
"""
Graph Attention Network for graph classification. GCN Layers
Expand All @@ -75,21 +62,7 @@ def __init__(self,
verbose: bool,default=False
If true verbose output is generated
"""
super(GCNClassifierModel, self).__init__(nn_epochs=nn_epochs,
learning_rate=learning_rate,
batch_size=batch_size,
adjacency_axis=adjacency_axis,
feature_axis=feature_axis,
add_self_loops=add_self_loops,
allow_zero_in_degree=allow_zero_in_degree,
validation_score=validation_score,
early_stopping=early_stopping,
es_patience=es_patience,
es_tolerance=es_tolerance,
es_delta=es_delta,
verbose=verbose,
logs=logs,
**kwargs)
super(GCNClassifierModel, self).__init__(*args, **kwargs)
self.in_dim = in_dim
self.hidden_dim = hidden_dim
self.hidden_layers = hidden_layers
Expand All @@ -108,20 +81,7 @@ def __init__(self,
in_dim: int = 1,
hidden_layers: int = 2,
hidden_dim: int = 256,
nn_epochs: int = 200,
learning_rate: float = 0.001,
batch_size: int = 32,
adjacency_axis: int = 0,
feature_axis: int = 1,
add_self_loops: bool = True,
allow_zero_in_degree: bool = False,
validation_score: bool = False,
early_stopping: bool = False,
es_patience: int = 10,
es_tolerance: int = 9,
es_delta: float = 0,
verbose: bool = False,
logs: str = '',
*args,
**kwargs):
"""
Graph convolutional Network for graph regression. GCN Layers
Expand All @@ -142,21 +102,7 @@ def __init__(self,
verbose: bool,default=False
If true verbose output is generated
"""
super(GCNRegressorModel, self).__init__(nn_epochs=nn_epochs,
learning_rate=learning_rate,
batch_size=batch_size,
adjacency_axis=adjacency_axis,
feature_axis=feature_axis,
add_self_loops=add_self_loops,
allow_zero_in_degree=allow_zero_in_degree,
validation_score=validation_score,
early_stopping=early_stopping,
es_patience=es_patience,
es_tolerance=es_tolerance,
es_delta=es_delta,
verbose=verbose,
logs=logs,
**kwargs)
super(GCNRegressorModel, self).__init__(*args, **kwargs)
self.in_dim = in_dim
self.hidden_dim = hidden_dim
self.hidden_layers = hidden_layers
Expand Down
62 changes: 4 additions & 58 deletions photonai_graph/NeuralNets/SGCModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,20 +43,7 @@ def __init__(self,
in_dim: int = 1,
hidden_layers: int = 2,
hidden_dim: int = 256,
nn_epochs: int = 200,
learning_rate: float = 0.001,
batch_size: int = 32,
adjacency_axis: int = 0,
feature_axis: int = 1,
add_self_loops: bool = True,
allow_zero_in_degree: bool = False,
validation_score: bool = False,
early_stopping: bool = False,
es_patience: int = 10,
es_tolerance: int = 9,
es_delta: float = 0,
verbose: bool = False,
logs: str = '',
*args,
**kwargs):
"""
Graph convolutional network for graph classification. Simple Graph
Expand All @@ -78,21 +65,7 @@ def __init__(self,
verbose: bool,default=False
If true verbose output is generated
"""
super(SGConvClassifierModel, self).__init__(nn_epochs=nn_epochs,
learning_rate=learning_rate,
batch_size=batch_size,
adjacency_axis=adjacency_axis,
feature_axis=feature_axis,
add_self_loops=add_self_loops,
allow_zero_in_degree=allow_zero_in_degree,
validation_score=validation_score,
early_stopping=early_stopping,
es_patience=es_patience,
es_tolerance=es_tolerance,
es_delta=es_delta,
verbose=verbose,
logs=logs,
**kwargs)
super(SGConvClassifierModel, self).__init__(*args, **kwargs)
self.in_dim = in_dim
self.hidden_layers = hidden_layers
self.hidden_dim = hidden_dim
Expand All @@ -109,20 +82,7 @@ def __init__(self,
in_dim: int = 1,
hidden_layers: int = 2,
hidden_dim: int = 256,
nn_epochs: int = 200,
learning_rate: float = 0.001,
batch_size: int = 32,
adjacency_axis: int = 0,
feature_axis: int = 1,
add_self_loops: bool = True,
allow_zero_in_degree: bool = False,
validation_score: bool = False,
early_stopping: bool = False,
es_patience: int = 10,
es_tolerance: int = 9,
es_delta: float = 0,
verbose: bool = False,
logs: str = '',
*args,
**kwargs):
"""
Graph convolutional network for graph regression. Simple Graph
Expand All @@ -144,21 +104,7 @@ def __init__(self,
verbose: bool,default=False
If true verbose output is generated
"""
super(SGConvRegressorModel, self).__init__(nn_epochs=nn_epochs,
learning_rate=learning_rate,
batch_size=batch_size,
adjacency_axis=adjacency_axis,
feature_axis=feature_axis,
add_self_loops=add_self_loops,
allow_zero_in_degree=allow_zero_in_degree,
validation_score=validation_score,
early_stopping=early_stopping,
es_patience=es_patience,
es_tolerance=es_tolerance,
es_delta=es_delta,
verbose=verbose,
logs=logs,
**kwargs)
super(SGConvRegressorModel, self).__init__(*args, **kwargs)
self.in_dim = in_dim
self.hidden_layers = hidden_layers
self.hidden_dim = hidden_dim
Expand Down
35 changes: 26 additions & 9 deletions photonai_graph/__init__.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,32 @@
import os
from datetime import datetime
from photonai.base import PhotonRegistry
from photonai.photonlogger import logger
from photonai_graph.version import __version__

current_path = os.path.dirname(os.path.abspath(__file__))
registered_file = os.path.join(current_path, "registered")
logger.info("Checking Graph Module Registration")
if not os.path.isfile(registered_file): # pragma: no cover
logger.info("Registering Graph Module")
from photonai.base import PhotonRegistry

from .version import __version__


def do_register(current_path, registered_file):
reg = PhotonRegistry()
reg.add_module(os.path.join(current_path, "photonai_graph.json"))
with open(os.path.join(registered_file), "w") as f:
f.write(str(datetime.now()))
f.write(str(__version__))


def register():
current_path = os.path.dirname(os.path.abspath(__file__))
registered_file = os.path.join(current_path, "registered")
logger.info("Checking PHOTONAI Graph Module Registration")
if not os.path.isfile(registered_file): # pragma: no cover
logger.info("Registering Graph Module")
do_register(current_path=current_path, registered_file=registered_file)
else:
with open(os.path.join(registered_file), "r") as f:
if f.read() == __version__:
logger.info("Current version already registered")
else:
logger.info("Updating Graph Module")
do_register(current_path=current_path, registered_file=registered_file)


register()

0 comments on commit 9ac4336

Please sign in to comment.