Skip to content

Commit

Permalink
Merge pull request #44 from the-database/dev
Browse files Browse the repository at this point in the history
optimizations - pyvips, bf16, multithread, update deps
  • Loading branch information
the-database authored Nov 28, 2024
2 parents b05080b + 3ae010d commit 6a461d0
Show file tree
Hide file tree
Showing 10 changed files with 138 additions and 53 deletions.
3 changes: 3 additions & 0 deletions MangaJaNaiConverterGui/Services/IPythonService.cs
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
using Avalonia.Collections;
using System.Threading.Tasks;

namespace MangaJaNaiConverterGui.Services
{
public interface IPythonService
{
bool IsPythonInstalled();

Task<bool> IsPythonUpdated();
bool AreModelsInstalled();
string BackendDirectory { get; }
string PythonDirectory { get; }
Expand Down
76 changes: 67 additions & 9 deletions MangaJaNaiConverterGui/Services/PythonService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace MangaJaNaiConverterGui.Services
{
Expand All @@ -26,9 +27,9 @@ public class PythonService : IPythonService
"win32",
new PythonDownload
{
Url = "https://github.com/indygreg/python-build-standalone/releases/download/20240415/cpython-3.11.9+20240415-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
Url = "https://github.com/indygreg/python-build-standalone/releases/download/20241016/cpython-3.12.7+20241016-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
Path = "python/python.exe",
Version = "3.11.9",
Version = "3.12.7",
Filename = "Python.tar.gz"
}
},
Expand All @@ -46,6 +47,62 @@ public PythonService(IUpdateManagerService? updateManagerService = null)
public string PythonPath => Path.GetFullPath(Path.Join(PythonDirectory, PYTHON_DOWNLOADS["win32"].Path));

public bool IsPythonInstalled() => File.Exists(PythonPath);

public async Task<bool> IsPythonUpdated()
{
var relPythonPath = @".\python\python\python.exe";

var cmd = $@"{relPythonPath} -V";

// Create a new process to run the CMD command
using (var process = new Process())
{
process.StartInfo.FileName = "cmd.exe";
process.StartInfo.Arguments = @$"/C {cmd}";
process.StartInfo.RedirectStandardOutput = true;
process.StartInfo.RedirectStandardError = true;
process.StartInfo.UseShellExecute = false;
process.StartInfo.CreateNoWindow = true;
process.StartInfo.StandardOutputEncoding = Encoding.UTF8;
process.StartInfo.StandardErrorEncoding = Encoding.UTF8;
process.StartInfo.WorkingDirectory = BackendDirectory;

Version? result = null;

// Create a StreamWriter to write the output to a log file
try
{
process.ErrorDataReceived += (sender, e) =>
{
if (!string.IsNullOrEmpty(e.Data))
{
// ignore
}
};

process.OutputDataReceived += (sender, e) =>
{
if (!string.IsNullOrEmpty(e.Data))
{
result = new Version(e.Data.Replace("Python ", ""));
}
};

process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine(); // Start asynchronous reading of the output
await process.WaitForExitAsync();
}
catch (IOException) { }

if (result == null || result.CompareTo(new Version(PYTHON_DOWNLOADS["win32"].Version)) < 0)
{
return false;
}
}

return true;
}
public bool AreModelsInstalled() => Directory.Exists(ModelsDirectory) && Directory.GetFiles(ModelsDirectory).Length > 0;

public class PythonDownload
Expand Down Expand Up @@ -120,8 +177,8 @@ public void ExtractZip(string archivePath, string outFolder, ProgressChanged pro

public void AddPythonPth(string destFolder)
{
string[] lines = { "python311.zip", "DLLs", "Lib", ".", "Lib/site-packages" };
var filename = "python311._pth";
string[] lines = { "python312.zip", "DLLs", "Lib", ".", "Lib/site-packages" };
var filename = "python312._pth";

using var outputFile = new StreamWriter(Path.Combine(destFolder, filename));

Expand All @@ -134,21 +191,22 @@ public string InstallUpdatePythonDependenciesCommand
get
{
string[] dependencies = {
"spandrel==0.3.4",
"spandrel_extra_arches==0.1.1",
"spandrel==0.4.0",
"spandrel_extra_arches==0.2.0",
"opencv-python==4.10.0.84",
"pillow-avif-plugin==1.4.6",
"rarfile==4.2",
"multiprocess==0.70.16",
"chainner_ext==0.3.10",
"sanic==24.6.0",
"pynvml==11.5.3",
"psutil==6.0.0"
"psutil==6.1.0",
"pyvips==2.2.3",
"pyvips-binary==8.16.0"
};

var relPythonPath = @".\python\python\python.exe";

return $@"{relPythonPath} -m pip install torch==2.1.2 torchvision==0.16.2 --index-url https://download.pytorch.org/whl/cu121 && {relPythonPath} -m pip install {string.Join(" ", dependencies)}";
return $@"{relPythonPath} -m pip install torch==2.5.1 torchvision --index-url https://download.pytorch.org/whl/cu124 && {relPythonPath} -m pip install {string.Join(" ", dependencies)}";
}
}

Expand Down
2 changes: 1 addition & 1 deletion MangaJaNaiConverterGui/ViewModels/MainWindowViewModel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1259,7 +1259,7 @@ await Task.Run(async () =>
await DownloadModels();
}

if (!_pythonService.IsPythonInstalled())
if (!_pythonService.IsPythonInstalled() || !(await _pythonService.IsPythonUpdated()))
{
// Download Python tgz
BackendSetupMainStatus = "Downloading Python...";
Expand Down
2 changes: 1 addition & 1 deletion MangaJaNaiConverterGui/backend/src/gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,4 +132,4 @@ def _get_nvidia_info() -> NvInfo:
nvidia = _get_nvidia_info()


__all__ = ["nvidia", "NvInfo", "NvDevice", "MemoryUsage"]
__all__ = ["MemoryUsage", "NvDevice", "NvInfo", "nvidia"]
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@

import numpy as np
import torch
from nodes.utils.utils import get_h_w_c
from spandrel import ImageModelDescriptor

from api import Progress

from ..upscale.auto_split import Split, Tiler, auto_split
from .utils import safe_cuda_cache_empty
from nodes.utils.utils import get_h_w_c


def _into_standard_image_form(t: torch.Tensor) -> torch.Tensor:
Expand Down Expand Up @@ -64,7 +64,9 @@ def _into_tensor(
except Exception:
# Some arrays cannot be made writeable, and we need to copy them
img = np.copy(img)
input_tensor = torch.from_numpy(img).to(device, dtype)
input_tensor = (
torch.from_numpy(img).pin_memory().to(device, dtype, non_blocking=True)
)
return input_tensor
finally:
img.flags.writeable = writeable
Expand All @@ -79,9 +81,16 @@ def pytorch_auto_split(
tiler: Tiler,
progress: Progress,
) -> np.ndarray:
dtype = torch.float16 if use_fp16 else torch.float32
dtype = torch.float32
if use_fp16:
if model.supports_half:
dtype = torch.float16
elif torch.cuda.is_bf16_supported():
dtype = torch.bfloat16
# print("dtype", dtype, use_fp16, flush=True)
if model.dtype != dtype or model.device != device:
model = model.to(device, dtype)
# print("move model", flush=True)
model = model.to(device, dtype, memory_format=torch.channels_last)

def upscale(img: np.ndarray, _: object):
progress.check_aborted()
Expand All @@ -102,16 +111,25 @@ def upscale(img: np.ndarray, _: object):
else:
input_tensor = _rgb_to_bgr(input_tensor)
input_tensor = _into_batched_form(input_tensor)
input_tensor = input_tensor.to(
memory_format=torch.channels_last
) # TODO refactor
# inference
output_tensor = model(input_tensor)
with torch.autocast(device_type="cuda", dtype=dtype, enabled=True):
output_tensor = model(input_tensor)

# convert back to numpy
output_tensor = _into_standard_image_form(output_tensor)
if input_channels == 1:
output_tensor = output_tensor[:, :, 0].unsqueeze(-1)
else:
output_tensor = _rgb_to_bgr(output_tensor)
result = output_tensor.detach().cpu().detach().float().numpy()
print("out dtype", output_tensor.dtype, flush=True)
# result = output_tensor.detach().cpu().detach().float().numpy()
result = output_tensor.detach().cpu().detach()
if result.dtype == torch.bfloat16:
result = result.float()
result = result.numpy()

return result
except RuntimeError as e:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@

import numpy as np
from nodes.impl.image_op import ImageOp
from nodes.impl.image_utils import BorderType, create_border
from nodes.impl.image_utils import BorderType
from nodes.impl.resize import ResizeFilter, resize
from nodes.utils.utils import Padding, get_h_w_c
from nodes.utils.utils import get_h_w_c

from .convenient_upscale import convenient_upscale

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import navi
from nodes.impl.upscale.basic_upscale import PaddingType

from ...impl.color.convert_data import (
color_spaces,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
from pathlib import Path

import torch
from nodes.properties.inputs import PthFileInput
from nodes.properties.outputs import DirectoryOutput, FileNameOutput, ModelOutput
from nodes.utils.utils import split_file_path
Expand Down Expand Up @@ -86,9 +87,17 @@ def load_model_node(
v.requires_grad = False
model_descriptor.model.eval()
model_descriptor = model_descriptor.to(pytorch_device)
should_use_fp16 = exec_options.use_fp16 and model_descriptor.supports_half
if should_use_fp16:
model_descriptor.model.half()
# if should_use_fp16:
# model_descriptor.model.half()
# else:
# model_descriptor.model.float()
if exec_options.use_fp16:
if model_descriptor.supports_half:
model_descriptor.model.half()
elif torch.cuda.is_bf16_supported():
model_descriptor.model.bfloat16()
else:
model_descriptor.model.float()
else:
model_descriptor.model.float()
except Exception as e:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
estimate_tile_size,
parse_tile_size_input,
)
from nodes.impl.upscale.basic_upscale import PaddingType, UpscaleInfo, basic_upscale
from nodes.impl.upscale.basic_upscale import UpscaleInfo, basic_upscale
from nodes.impl.upscale.tiler import MaxTileSize
from nodes.properties.inputs import (
BoolInput,
Expand Down Expand Up @@ -49,7 +49,7 @@ def upscale(
logger.debug("Upscaling image")

# TODO: use bfloat16 if RTX
use_fp16 = options.use_fp16 and model.supports_half
use_fp16 = options.use_fp16 # and model.supports_half
device = options.device

if model.tiling == ModelTiling.INTERNAL:
Expand Down
Loading

0 comments on commit 6a461d0

Please sign in to comment.