Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MetaLink code release #46

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions graphgym/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,9 @@ def set_cfg(cfg):
# Number of dataset splits: train/val/test
cfg.share.num_splits = 1

# Number of task in targets. For multilabel tasks, this number > 1
cfg.share.num_task = 1

# ----------------------------------------------------------------------- #
# Dataset options
# ----------------------------------------------------------------------- #
Expand Down
4 changes: 3 additions & 1 deletion graphgym/contrib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@
from .layer import * # noqa
from .loader import * # noqa
from .loss import * # noqa
from .network import * # noqa
from .optimizer import * # noqa
from .pooling import * # noqa
from .stage import * # noqa
from .train import * # noqa
from .transform import * # noqa

# import in the end
from .network import * # noqa
101 changes: 101 additions & 0 deletions graphgym/contrib/config/metalink.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
from yacs.config import CfgNode as CN

from graphgym.register import register_config


def set_cfg_example(cfg):
r'''
This function sets the default config value for customized options
:return: customized configuration use by the experiment.
'''

# ----------------------------------------------------------------------- #
# MetaLink KG options
# ----------------------------------------------------------------------- #

cfg.kg = CN()

cfg.kg.kg_mode = True

cfg.kg.dim_emb = 64

# type of prediction head: direct, mp, bilinear
cfg.kg.head = 'direct'

# how to decode pred
cfg.kg.decode = 'dot'

cfg.kg.layer_type = 'kgheteconv'

# normalize embedding
cfg.kg.norm_emb = False

# if do fine_tune after training
cfg.kg.fine_tune = False

# kg message passing layers
cfg.kg.layers_mp = 0

# kg aggregation
cfg.kg.agg = 'mean'

# kg message direction
cfg.kg.msg_direction = 'single'

# kg gate function
cfg.kg.gate_self = True
cfg.kg.gate_msg = True

# kg self transform
cfg.kg.self_trans = True

# Add self msg passing
cfg.kg.self_msg = 'none'

cfg.kg.has_act = True # not sure

cfg.kg.has_bn = False # picked

cfg.kg.hete = True # picked

# last, every
cfg.kg.pred = 'every' # picked

# no, every, last
cfg.kg.has_l2norm = 'no' # picked, every if needed

# positioning l2 norm: pre, post
cfg.kg.pos_l2norm = 'post'

cfg.kg.gate_bias = False

# raw, loss, both
cfg.kg.edge_feature = 'raw'

# pertask, standard
cfg.kg.split = 'pertask'

# new, standard
cfg.kg.experiment = 'new'

# standard, relation
cfg.kg.setting = 'standard'

# Whether do meta inductive learning
cfg.kg.meta = False

# cfg.kg.meta_num = None

cfg.kg.meta_ratio = 0.2

# whether add aux target (logp, qed)
cfg.kg.aux = True

# keep what percentage of edges
cfg.kg.setting_ratio = 0.5

# number of repeats for evaluation
cfg.kg.repeat = 1


register_config('metalink', set_cfg_example)
Loading