Skip to content
This repository has been archived by the owner on Apr 27, 2023. It is now read-only.

Commit

Permalink
Add demo notebook.
Browse files Browse the repository at this point in the history
  • Loading branch information
shyuep committed Jul 23, 2022
1 parent 8306299 commit 0d7f75e
Showing 1 changed file with 155 additions and 0 deletions.
155 changes: 155 additions & 0 deletions notebooks/MEGNet_demo.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "RUDLgYFjpgK2",
"outputId": "8c5b1c1f-edb2-4e2f-92dd-dfb550659eec"
},
"source": [
"# Introduction\n",
"\n",
"This notebook is written as part of the LLNL CCMS Summer Institute Seminar 2022."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"# !pip install megnet"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "kcBP6XS7phgR",
"outputId": "854a7e34-2b81-4d17-a41c-7e84bfdbc5fc"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['Eform_MP_2019', 'Eform_MP_2018', 'Efermi_MP_2019', 'Bandgap_classifier_MP_2018', 'Bandgap_MP_2018', 'logK_MP_2018', 'logG_MP_2018', 'logK_MP_2019', 'logG_MP_2019', 'QM9_omega1_2018', 'QM9_alpha_2018', 'QM9_H_2018', 'QM9_gap_2018', 'QM9_ZPVE_2018', 'QM9_HOMO_2018', 'QM9_R2_2018', 'QM9_U_2018', 'QM9_LUMO_2018', 'QM9_Cv_2018', 'QM9_mu_2018', 'QM9_U0_2018', 'QM9_G_2018']\n"
]
}
],
"source": [
"from megnet.utils.models import load_model, AVAILABLE_MODELS\n",
"from pymatgen.core import Structure, Lattice\n",
"from pymatgen.ext.matproj import MPRester\n",
"print(AVAILABLE_MODELS)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"id": "l7Un9g1LrPuT"
},
"outputs": [],
"source": [
"mpr = MPRester()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"id": "z7dt0j1fpyNW"
},
"outputs": [],
"source": [
"# Mo: mp-129\n",
"# Li10GeP2S12: mp-696128\n",
"\n",
"structures = {}\n",
"\n",
"structures[\"Mo\"] = mpr.get_structure_by_material_id(\"mp-129\")\n",
"structures[\"LGPS\"] = mpr.get_structure_by_material_id(\"mp-696128\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "rRPndHDArtdl",
"outputId": "aa6c40ba-7e6c-4405-d831-1d218af7791f"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1/1 [==============================] - 1s 1s/step\n",
"The predicted Eform for Mo is -0.003 eV/atom.\n",
"1/1 [==============================] - 1s 1s/step\n",
"The predicted Eform for LGPS is -1.278 eV/atom.\n",
"1/1 [==============================] - 1s 1s/step\n",
"The predicted Efermi for Mo is 8.401 eV.\n",
"1/1 [==============================] - 1s 1s/step\n",
"The predicted Efermi for LGPS is 1.467 eV.\n"
]
}
],
"source": [
"for model_name in AVAILABLE_MODELS:\n",
" if model_name.endswith(\"_2019\"):\n",
" model = load_model(model_name)\n",
" model.metadata\n",
" for name, structure in structures.items():\n",
" if model_name.startswith(\"log\"):\n",
" prediction = 10 ** model.predict_structure(structure).ravel()[0]\n",
" else:\n",
" prediction = model.predict_structure(structure).ravel()[0]\n",
" prop_name = model_name.split(\"_\")[0].removeprefix(\"log\")\n",
" print(f'The predicted {prop_name} for {name} is {prediction:.3f} {model.metadata[\"unit\"]}.')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"colab": {
"collapsed_sections": [],
"name": "MEGNet demo.ipynb",
"provenance": []
},
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.12"
}
},
"nbformat": 4,
"nbformat_minor": 1
}

0 comments on commit 0d7f75e

Please sign in to comment.