linkpred_cora_sage.yaml 1.69 KB
Newer Older
1
version: 0.0.2
Jinjing Zhou's avatar
Jinjing Zhou committed
2
pipeline_name: linkpred
3
pipeline_mode: train
Jinjing Zhou's avatar
Jinjing Zhou committed
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
device: cuda
data:
  name: cora
  split_ratio: [0.8, 0.1, 0.1]               # List of float, e.q. [0.8, 0.1, 0.1]. Split ratios for training, validation and test sets. Must sum to one. Leave blank to use builtin split in original dataset
  neg_ratio: 3                 # Int, e.q. 2. Indicate how much negative samples to be sampled per positive samples. Leave blank to use builtin split in original dataset
node_model:
  name: sage
  embed_size: -1              # The dimension of created embedding table. -1 means using original node embedding
  hidden_size: 32             # Hidden size.
  num_layers: 2               # Number of hidden layers.
  activation: relu
  dropout: 0.5                # Dropout rate.
  aggregator_type: gcn        # Aggregator type to use (``mean``, ``gcn``, ``pool``, ``lstm``).
edge_model:
  name: ele
19
  hidden_size: 64             # Hidden size.
Jinjing Zhou's avatar
Jinjing Zhou committed
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
  num_layers: 2               # Number of hidden layers.
  bias: true                  # Whether to use bias in the linaer layer.
neg_sampler:
  name: persource
  k: 3                        # The number of negative samples per edge.
general_pipeline:
  hidden_size: 256            # The intermediate hidden size between node model and edge model
  eval_batch_size: 32769      # Edge batch size when evaluating
  train_batch_size: 32769     # Edge batch size when training
  num_epochs: 200             # Number of training epochs
  eval_period: 5              # Interval epochs between evaluations
  optimizer:
    name: Adam
    lr: 0.005
  loss: BCELoss
35
  save_path: "results"        # Directory to save the experiment results
Jinjing Zhou's avatar
Jinjing Zhou committed
36
  num_runs: 1                 # Number of experiments to run