Skip to content

Commit

Permalink
Merge pull request #167 from Aydinhamedi:Alpha-b
Browse files Browse the repository at this point in the history
	modified:   BETA_E_Model_T&T.ipynb
  • Loading branch information
Aydinhamedi authored Feb 21, 2024
2 parents dd4fb1e + 440038e commit 5edecac
Show file tree
Hide file tree
Showing 8 changed files with 7,868 additions and 491 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/dependency-review.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# Source repository: https://github.com/actions/dependency-review-action
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
name: 'Dependency Review'
on: pull_request
on: [pull_request, push]

permissions:
contents: read
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -89,4 +89,5 @@ Samples/*
/scc.exe
/SCC_Auto.cmd
/Microsoft.PowerShell_profile_Nvidia_smi.ps1
/Data/image_SUB_generator.pkl
/Data/image_SUB_generator.pkl
/GPU_Info.txt
5,373 changes: 5,076 additions & 297 deletions BETA_E_Model_T&T.ipynb

Large diffs are not rendered by default.

Binary file added Data/image_SUB_generator.pkl
Binary file not shown.
2,910 changes: 2,726 additions & 184 deletions Model_T&T.ipynb

Large diffs are not rendered by default.

25 changes: 25 additions & 0 deletions Utils/Other.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,35 @@
from Utils.print_color_V2_NEW import print_Color_V2
from Utils.print_color_V1_OLD import print_Color
from tabulate import tabulate
from numba import cuda
import numpy as np
import pickle
import gzip

def GPU_memUsage(Print=True):
"""Prints GPU memory usage for each GPU.
Args:
Print (bool): Whether to print the memory usage.
If True, prints the memory usage.
If False, returns the free and total memory as a tuple.
Returns:
If Print is False, returns a tuple (free, total) with the free
and total memory in bytes for the GPU.
"""
gpus = cuda.gpus.lst
for gpu in gpus:
with gpu:
meminfo = cuda.current_context().get_memory_info()
if Print:
print_Color(
f'~*(GPU-MEM)~*--{gpu}--[free: {meminfo.free / (1024 ** 3):.2f}GB, used: {meminfo.total / (1024 ** 3) - meminfo.free / (1024 ** 3):.2f}GB, total, {meminfo.total / (1024 ** 3):.2f}GB]',
['green', 'cyan'],
advanced_mode=True)
else:
return meminfo.free, meminfo.total

def save_list(history, filename, compress=True):
"""Saves a list to a file.
Expand Down
45 changes: 37 additions & 8 deletions env/Test_ENV3.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 2,
"metadata": {},
"outputs": [
{
Expand Down Expand Up @@ -108,24 +108,53 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[0m\u001b[0m\u001b[0;33mImproved model accuracy from\u001b[0m\u001b[0;32m 0.458953\u001b[0m\u001b[0;33m to \u001b[0m\u001b[0;32m 0.458953\u001b[0m\u001b[0;33m. \u001b[0m\u001b[0;96mSaving model.\u001b[0m\n"
"\u001b[0m\u001b[0m\u001b[0;32m(GPU-MEM)\u001b[0m\u001b[0;36m--<Managed Device 0>--[free: 22.77GB, used: 1.23GB, total, 24.00GB]\u001b[0m\n",
"Realising all memory...\n",
"\u001b[0m\u001b[0m\u001b[0;32m(GPU-MEM)\u001b[0m\u001b[0;36m--<Managed Device 0>--[free: 22.77GB, used: 1.23GB, total, 24.00GB]\u001b[0m\n",
"done.\n"
]
}
],
"source": [
"# Copyright (c) 2024 Aydin Hamedi\n",
"# \n",
"# This software is released under the MIT License.\n",
"# https://opensource.org/licenses/MIT\n",
"from numba import cuda\n",
" \n",
"def GPU_memUsage(Print=True):\n",
" \"\"\"Prints GPU memory usage for each GPU.\n",
"\n",
"print_Color_V2(f'<yellow>Improved model accuracy from<green>{0.4589532546:10f}<yellow> to <green>{0.4589532546:10f}<yellow>. <light_cyan>Saving model.')"
" Args:\n",
" Print (bool): Whether to print the memory usage. \n",
" If True, prints the memory usage. \n",
" If False, returns the free and total memory as a tuple.\n",
"\n",
" Returns:\n",
" If Print is False, returns a tuple (free, total) with the free \n",
" and total memory in bytes for the GPU.\n",
" \"\"\"\n",
" gpus = cuda.gpus.lst\n",
" for gpu in gpus:\n",
" with gpu:\n",
" meminfo = cuda.current_context().get_memory_info()\n",
" if Print:\n",
" print_Color(\n",
" f'~*(GPU-MEM)~*--{gpu}--[free: {meminfo.free / (1024 ** 3):.2f}GB, used: {meminfo.total / (1024 ** 3) - meminfo.free / (1024 ** 3):.2f}GB, total, {meminfo.total / (1024 ** 3):.2f}GB]',\n",
" ['green', 'cyan'],\n",
" advanced_mode=True)\n",
" else:\n",
" return meminfo.free, meminfo.total\n",
" \n",
"device = cuda.get_current_device()\n",
"GPU_memUsage()\n",
"print('Realising all memory...')\n",
"device.reset()\n",
"GPU_memUsage()\n",
"print('done.')"
]
}
],
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,4 @@ tensorflow==2.10.1
tensorflow-addons==0.22.0
tensorflow-model-optimization==0.7.5
tqdm==4.66.1
numba==0.59.0

0 comments on commit 5edecac

Please sign in to comment.