diff --git a/photonai_graph/NeuralNets/GATModel.py b/photonai_graph/NeuralNets/GATModel.py index 33a0d0b..b479f62 100644 --- a/photonai_graph/NeuralNets/GATModel.py +++ b/photonai_graph/NeuralNets/GATModel.py @@ -53,20 +53,7 @@ def __init__(self, hidden_dim: int = 256, heads: List = None, agg_mode="mean", - nn_epochs: int = 200, - learning_rate: float = 0.001, - batch_size: int = 32, - adjacency_axis: int = 0, - feature_axis: int = 1, - add_self_loops: bool = True, - allow_zero_in_degree: bool = False, - validation_score: bool = False, - early_stopping: bool = False, - es_patience: int = 10, - es_tolerance: int = 9, - es_delta: float = 0, - verbose: bool = False, - logs: str = '', + *args, **kwargs): """ Graph Attention Network for graph classification. GAT Layers @@ -93,21 +80,7 @@ def __init__(self, aggregation mode for the graph convolutional layers """ - super(GATClassifierModel, self).__init__(nn_epochs=nn_epochs, - learning_rate=learning_rate, - batch_size=batch_size, - adjacency_axis=adjacency_axis, - feature_axis=feature_axis, - add_self_loops=add_self_loops, - allow_zero_in_degree=allow_zero_in_degree, - validation_score=validation_score, - early_stopping=early_stopping, - es_patience=es_patience, - es_tolerance=es_tolerance, - es_delta=es_delta, - verbose=verbose, - logs=logs, - **kwargs) + super(GATClassifierModel, self).__init__(*args, **kwargs) if heads is None: heads = [2, 2] # Todo: if heads is not length of hidden layers +1 (bc of the first layer) @@ -130,21 +103,8 @@ def __init__(self, hidden_layers: int = 2, hidden_dim: int = 256, heads: List = None, - nn_epochs: int = 200, - learning_rate: float = 0.001, - batch_size: int = 32, - adjacency_axis: int = 0, - feature_axis: int = 1, - add_self_loops: bool = True, - allow_zero_in_degree: bool = False, - logs: str = None, - validation_score: bool = False, - early_stopping: bool = False, - es_patience: int = 10, - es_tolerance: int = 9, - es_delta: float = 0, - verbose: bool = False, agg_mode: str = None, + *args, **kwargs): """ Graph Attention Network for graph regression. GAT Layers @@ -168,21 +128,7 @@ def __init__(self, verbose: bool,default=False If true verbose output is generated """ - super(GATRegressorModel, self).__init__(nn_epochs=nn_epochs, - learning_rate=learning_rate, - batch_size=batch_size, - adjacency_axis=adjacency_axis, - feature_axis=feature_axis, - add_self_loops=add_self_loops, - allow_zero_in_degree=allow_zero_in_degree, - validation_score=validation_score, - early_stopping=early_stopping, - es_patience=es_patience, - es_tolerance=es_tolerance, - es_delta=es_delta, - verbose=verbose, - logs=logs, - **kwargs) + super(GATRegressorModel, self).__init__(*args, **kwargs) if heads is None: heads = [2, 2] # Todo: if heads is not length of hidden layers +1 (bc of the first layer) diff --git a/photonai_graph/NeuralNets/GCNModel.py b/photonai_graph/NeuralNets/GCNModel.py index 5cf8fe0..dcb07f7 100644 --- a/photonai_graph/NeuralNets/GCNModel.py +++ b/photonai_graph/NeuralNets/GCNModel.py @@ -40,20 +40,7 @@ def __init__(self, in_dim: int = 1, hidden_layers: int = 2, hidden_dim: int = 256, - nn_epochs: int = 200, - learning_rate: float = 0.001, - batch_size: int = 32, - adjacency_axis: int = 0, - feature_axis: int = 1, - add_self_loops: bool = True, - allow_zero_in_degree: bool = False, - validation_score: bool = False, - early_stopping: bool = False, - es_patience: int = 10, - es_tolerance: int = 9, - es_delta: float = 0, - verbose: bool = False, - logs: str = '', + *args, **kwargs): """ Graph Attention Network for graph classification. GCN Layers @@ -75,21 +62,7 @@ def __init__(self, verbose: bool,default=False If true verbose output is generated """ - super(GCNClassifierModel, self).__init__(nn_epochs=nn_epochs, - learning_rate=learning_rate, - batch_size=batch_size, - adjacency_axis=adjacency_axis, - feature_axis=feature_axis, - add_self_loops=add_self_loops, - allow_zero_in_degree=allow_zero_in_degree, - validation_score=validation_score, - early_stopping=early_stopping, - es_patience=es_patience, - es_tolerance=es_tolerance, - es_delta=es_delta, - verbose=verbose, - logs=logs, - **kwargs) + super(GCNClassifierModel, self).__init__(*args, **kwargs) self.in_dim = in_dim self.hidden_dim = hidden_dim self.hidden_layers = hidden_layers @@ -108,20 +81,7 @@ def __init__(self, in_dim: int = 1, hidden_layers: int = 2, hidden_dim: int = 256, - nn_epochs: int = 200, - learning_rate: float = 0.001, - batch_size: int = 32, - adjacency_axis: int = 0, - feature_axis: int = 1, - add_self_loops: bool = True, - allow_zero_in_degree: bool = False, - validation_score: bool = False, - early_stopping: bool = False, - es_patience: int = 10, - es_tolerance: int = 9, - es_delta: float = 0, - verbose: bool = False, - logs: str = '', + *args, **kwargs): """ Graph convolutional Network for graph regression. GCN Layers @@ -142,21 +102,7 @@ def __init__(self, verbose: bool,default=False If true verbose output is generated """ - super(GCNRegressorModel, self).__init__(nn_epochs=nn_epochs, - learning_rate=learning_rate, - batch_size=batch_size, - adjacency_axis=adjacency_axis, - feature_axis=feature_axis, - add_self_loops=add_self_loops, - allow_zero_in_degree=allow_zero_in_degree, - validation_score=validation_score, - early_stopping=early_stopping, - es_patience=es_patience, - es_tolerance=es_tolerance, - es_delta=es_delta, - verbose=verbose, - logs=logs, - **kwargs) + super(GCNRegressorModel, self).__init__(*args, **kwargs) self.in_dim = in_dim self.hidden_dim = hidden_dim self.hidden_layers = hidden_layers diff --git a/photonai_graph/NeuralNets/SGCModel.py b/photonai_graph/NeuralNets/SGCModel.py index b71bcf3..c8bddbc 100644 --- a/photonai_graph/NeuralNets/SGCModel.py +++ b/photonai_graph/NeuralNets/SGCModel.py @@ -43,20 +43,7 @@ def __init__(self, in_dim: int = 1, hidden_layers: int = 2, hidden_dim: int = 256, - nn_epochs: int = 200, - learning_rate: float = 0.001, - batch_size: int = 32, - adjacency_axis: int = 0, - feature_axis: int = 1, - add_self_loops: bool = True, - allow_zero_in_degree: bool = False, - validation_score: bool = False, - early_stopping: bool = False, - es_patience: int = 10, - es_tolerance: int = 9, - es_delta: float = 0, - verbose: bool = False, - logs: str = '', + *args, **kwargs): """ Graph convolutional network for graph classification. Simple Graph @@ -78,21 +65,7 @@ def __init__(self, verbose: bool,default=False If true verbose output is generated """ - super(SGConvClassifierModel, self).__init__(nn_epochs=nn_epochs, - learning_rate=learning_rate, - batch_size=batch_size, - adjacency_axis=adjacency_axis, - feature_axis=feature_axis, - add_self_loops=add_self_loops, - allow_zero_in_degree=allow_zero_in_degree, - validation_score=validation_score, - early_stopping=early_stopping, - es_patience=es_patience, - es_tolerance=es_tolerance, - es_delta=es_delta, - verbose=verbose, - logs=logs, - **kwargs) + super(SGConvClassifierModel, self).__init__(*args, **kwargs) self.in_dim = in_dim self.hidden_layers = hidden_layers self.hidden_dim = hidden_dim @@ -109,20 +82,7 @@ def __init__(self, in_dim: int = 1, hidden_layers: int = 2, hidden_dim: int = 256, - nn_epochs: int = 200, - learning_rate: float = 0.001, - batch_size: int = 32, - adjacency_axis: int = 0, - feature_axis: int = 1, - add_self_loops: bool = True, - allow_zero_in_degree: bool = False, - validation_score: bool = False, - early_stopping: bool = False, - es_patience: int = 10, - es_tolerance: int = 9, - es_delta: float = 0, - verbose: bool = False, - logs: str = '', + *args, **kwargs): """ Graph convolutional network for graph regression. Simple Graph @@ -144,21 +104,7 @@ def __init__(self, verbose: bool,default=False If true verbose output is generated """ - super(SGConvRegressorModel, self).__init__(nn_epochs=nn_epochs, - learning_rate=learning_rate, - batch_size=batch_size, - adjacency_axis=adjacency_axis, - feature_axis=feature_axis, - add_self_loops=add_self_loops, - allow_zero_in_degree=allow_zero_in_degree, - validation_score=validation_score, - early_stopping=early_stopping, - es_patience=es_patience, - es_tolerance=es_tolerance, - es_delta=es_delta, - verbose=verbose, - logs=logs, - **kwargs) + super(SGConvRegressorModel, self).__init__(*args, **kwargs) self.in_dim = in_dim self.hidden_layers = hidden_layers self.hidden_dim = hidden_dim diff --git a/photonai_graph/__init__.py b/photonai_graph/__init__.py index 379bb06..1aa07a3 100644 --- a/photonai_graph/__init__.py +++ b/photonai_graph/__init__.py @@ -1,15 +1,32 @@ import os from datetime import datetime +from photonai.base import PhotonRegistry from photonai.photonlogger import logger -from photonai_graph.version import __version__ - -current_path = os.path.dirname(os.path.abspath(__file__)) -registered_file = os.path.join(current_path, "registered") -logger.info("Checking Graph Module Registration") -if not os.path.isfile(registered_file): # pragma: no cover - logger.info("Registering Graph Module") - from photonai.base import PhotonRegistry + +from .version import __version__ + + +def do_register(current_path, registered_file): reg = PhotonRegistry() reg.add_module(os.path.join(current_path, "photonai_graph.json")) with open(os.path.join(registered_file), "w") as f: - f.write(str(datetime.now())) + f.write(str(__version__)) + + +def register(): + current_path = os.path.dirname(os.path.abspath(__file__)) + registered_file = os.path.join(current_path, "registered") + logger.info("Checking PHOTONAI Graph Module Registration") + if not os.path.isfile(registered_file): # pragma: no cover + logger.info("Registering Graph Module") + do_register(current_path=current_path, registered_file=registered_file) + else: + with open(os.path.join(registered_file), "r") as f: + if f.read() == __version__: + logger.info("Current version already registered") + else: + logger.info("Updating Graph Module") + do_register(current_path=current_path, registered_file=registered_file) + + +register()