Crossover implementation#
- class DAGTwoPoint(size=10)[source]
Bases:
object
Implementation of a two-point crossover on an array. A DAG-based crossover is used if some of the exchanged values are DAGs.
- Parameters:
size (int, default=10) – Maximum size a DAG may have.
Examples
>>> from dragon.search_operators.crossover import DAGTwoPoint >>> from dragon.search_space.base_variables import Constant, FloatVar, IntVar, CatVar, ArrayVar >>> arr = ArrayVar(IntVar("Number of features", 1, 10), CatVar("Optimizer", features=['Adam', 'SGD', 'AdamMax']), FloatVar('learning rate', 0.001, 0.5), Constant('Seed', value=0)) >>> parent_1 = arr.random() >>> parent_2 = arr.random() >>> crossover = DAGTwoPoint(arr) >>> print(parent_1) [5, 'AdamMax', 0.16718361674068502, 0] >>> print(parent_2) [9, 'SGD', 0.28364322926906005, 0] >>> print(crossover(parent_1, parent_2)) ([5, 'AdamMax', 0.28364322926906005, 0], [9, 'SGD', 0.16718361674068502, 0])
>>> from dragon.search_space.dag_variables import HpVar, NodeVariable, EvoDagVariable >>> from dragon.search_space.bricks import MLP, MaxPooling1D, AVGPooling1D >>> from dragon.search_space.base_variables import DynamicBlock >>> from dragon.search_space.bricks_variables import activation_var >>> mlp = HpVar("Operation", Constant("MLP operation", MLP), hyperparameters={"out_channels": IntVar("out_channels", 1, 10)}) >>> pooling = HpVar("Operation", CatVar("Pooling operation", [MaxPooling1D, AVGPooling1D]), hyperparameters={"pool_size": IntVar("pool_size", 1, 5)}) >>> candidates = NodeVariable(label = "Candidates", ... combiner=CatVar("Combiner", features=['add', 'concat']), ... operation=CatVar("Candidates", [mlp, pooling]), ... activation_function=activation_var("Activation")) >>> operations = DynamicBlock("Operations", candidates, repeat=3) >>> dag = EvoDagVariable(label="DAG", operations=operations) >>> arr = ArrayVar(dag, dag, Constant('Seed', value=0)) >>> parent_1 = arr.random() >>> parent_2 = arr.random() >>> crossover = DAGTwoPoint(arr) >>> print(parent_1) [NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Identity() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.pooling.AVGPooling1D'> -- (hp) {'pool_size': 3} -- (activation) ELU(alpha=1.0) -- ] | MATRIX:[[0, 1], [0, 0]], NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Identity() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 10} -- (activation) SiLU() -- , (combiner) add -- (name) <class 'dragon.search_space.bricks.pooling.MaxPooling1D'> -- (hp) {'pool_size': 4} -- (activation) Sigmoid() -- ] | MATRIX:[[0, 1, 1], [0, 0, 1], [0, 0, 0]], 0] >>> print(parent_2) [NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Identity() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.pooling.AVGPooling1D'> -- (hp) {'pool_size': 1} -- (activation) LeakyReLU(negative_slope=0.01) -- ] | MATRIX:[[0, 1], [0, 0]], NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Identity() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 9} -- (activation) ELU(alpha=1.0) -- ] | MATRIX:[[0, 1], [0, 0]], 0] >>> print(crossover(parent_1, parent_2)) ([NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Identity() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.pooling.AVGPooling1D'> -- (hp) {'pool_size': 3} -- (activation) ELU(alpha=1.0) -- ] | MATRIX:[[0, 1], [0, 0]], NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Identity() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 10} -- (activation) SiLU() -- , (combiner) add -- (name) <class 'dragon.search_space.bricks.pooling.MaxPooling1D'> -- (hp) {'pool_size': 4} -- (activation) Sigmoid() -- ] | MATRIX:[[0, 1, 1], [0, 0, 1], [0, 0, 0]], 0], [NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Identity() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.pooling.AVGPooling1D'> -- (hp) {'pool_size': 1} -- (activation) LeakyReLU(negative_slope=0.01) -- ] | MATRIX:[[0, 1], [0, 0]], NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Identity() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 9} -- (activation) ELU(alpha=1.0) -- ] | MATRIX:[[0, 1], [0, 0]], 0])
- adj_matrix_crossover(p1, p2)[source]
- DAG-based crossover
Select the index of the operations that would be exchange in each graphs.
Remove the corresponding lines and columns from the adjacency matrices
Compute the index where the new nodes will be inserted
Insert the new rows and columns within the adjacency matrices
Make sure no nodes without incoming or outgoing connections are remaining within the matrices
Make sure the new matrices are upper-triangular
Recreate the list of nodes
Create new AdjMatrix variables with the new nodes and matrices
- Parameters:
p1 (AdjMatrix) – First parent
p2 (`AdjMatrix) – Second parent
size (int, default=10) – Maximum number of nodes that an offspring graph could have
- Returns:
f1 (AdjMatrix) – First offspring
f2 (AdjMatrix) – Second offspring
Examples
>>> import numpy as np >>> from dragon.search_space.dag_variables import AdjMatrix >>> import torch.nn as nn >>> from dragon.search_space.bricks import MLP, Identity >>> from dragon.search_space.dag_variables import Node >>> node_1 = Node(combiner="add", operation=MLP, hp={"out_channels": 10}, activation=nn.ReLU()) >>> node_2 = Node(combiner="add", operation=MLP, hp={"out_channels": 5}, activation=nn.ReLU()) >>> node_3 = Node(combiner="concat", operation=Identity, hp={}, activation=nn.Softmax()) >>> op1 = [node_1, node_2, node_3] >>> op2 = [node_1, node_3] >>> m1 = np.array([[0, 1, 1], ... [0, 0, 1], ... [0, 0, 0]]) >>> m2 = np.array([[0, 1], ... [0, 0]]) >>> print(AdjMatrix(op1, m1)) NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 10} -- (activation) ReLU() -- , (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 5} -- (activation) ReLU() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Softmax(dim=None) -- ] | MATRIX:[[0, 1, 1], [0, 0, 1], [0, 0, 0]] >>> print(AdjMatrix(op2, m2)) NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 10} -- (activation) ReLU() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Softmax(dim=None) -- ] | MATRIX:[[0, 1], [0, 0]] >>> f1, f2 = adj_matrix_crossover(AdjMatrix(op1, m1), AdjMatrix(op2, m2)) >>> print(f1) NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 10} -- (activation) ReLU() -- , (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 5} -- (activation) ReLU() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Softmax(dim=None) -- ] | MATRIX:[[0, 1, 0], [0, 0, 1], [0, 0, 0]] >>> print(f2) NODES: [ (combiner) add -- (name) <class 'dragon.search_space.bricks.basics.MLP'> -- (hp) {'out_channels': 10} -- (activation) ReLU() -- , (combiner) concat -- (name) <class 'dragon.search_space.bricks.basics.Identity'> -- (hp) {} -- (activation) Softmax(dim=None) -- ] | MATRIX:[[0, 1], [0, 0]]