"git@developer.sourcefind.cn:OpenDAS/mmcv.git" did not exist on "31b8829de88c9144040ffca42a14441dfe9b7b34"
Unverified Commit 042c60fb authored by Yang Chen's avatar Yang Chen Committed by GitHub
Browse files

[Enhancement] Output cache-file-related messages with verbose=True (#683)

This is a minor enhancement to output verbose messages indicating where
cache files are saved and loaded. These messages are useful for
examining the relevant intermediate files.
parent eb026b79
...@@ -149,14 +149,14 @@ class AutotuneResult: ...@@ -149,14 +149,14 @@ class AutotuneResult:
func: Optional[Callable] = None func: Optional[Callable] = None
kernel: Optional[Callable] = None kernel: Optional[Callable] = None
def _save_kernel_to_disk(self, cache_path: Path, kernel: JITKernel): def _save_kernel_to_disk(self, cache_path: Path, kernel: JITKernel, verbose: bool = False):
""" """
Persists a compiled kernel to disk cache. Persists a compiled kernel to disk cache.
Args: Args:
key (str): The hash key identifying the kernel. cache_path (Path): The root path for the cache files.
kernel (JITKernel): The compiled kernel to be saved. kernel (JITKernel): The compiled kernel to be saved.
func (Callable, optional): The original function. verbose (bool): Enable verbose log messages.
Note: Note:
Saves the following files: Saves the following files:
...@@ -170,6 +170,8 @@ class AutotuneResult: ...@@ -170,6 +170,8 @@ class AutotuneResult:
# Save kernel source code # Save kernel source code
try: try:
kernel_path = os.path.join(cache_path, KERNEL_PATH) kernel_path = os.path.join(cache_path, KERNEL_PATH)
if verbose:
logger.debug(f"Saving kernel source code to file: {kernel_path}")
if kernel.artifact.kernel_source is not None: if kernel.artifact.kernel_source is not None:
with open(kernel_path, "w") as f: with open(kernel_path, "w") as f:
f.write(kernel.artifact.kernel_source) f.write(kernel.artifact.kernel_source)
...@@ -179,6 +181,8 @@ class AutotuneResult: ...@@ -179,6 +181,8 @@ class AutotuneResult:
# Save wrapped kernel source code # Save wrapped kernel source code
try: try:
wrapped_kernel_path = os.path.join(cache_path, WRAPPED_KERNEL_PATH) wrapped_kernel_path = os.path.join(cache_path, WRAPPED_KERNEL_PATH)
if verbose:
logger.debug(f"Saving wrapped kernel source code to file: {wrapped_kernel_path}")
with open(wrapped_kernel_path, "w") as f: with open(wrapped_kernel_path, "w") as f:
f.write(kernel.get_kernel_source()) f.write(kernel.get_kernel_source())
except Exception as e: except Exception as e:
...@@ -188,6 +192,8 @@ class AutotuneResult: ...@@ -188,6 +192,8 @@ class AutotuneResult:
try: try:
kernel_lib_path = os.path.join(cache_path, KERNEL_LIB_PATH) kernel_lib_path = os.path.join(cache_path, KERNEL_LIB_PATH)
src_lib_path = kernel.adapter.libpath src_lib_path = kernel.adapter.libpath
if verbose:
logger.debug(f"Saving kernel library to file: {kernel_lib_path}")
shutil.copy(src_lib_path, kernel_lib_path) shutil.copy(src_lib_path, kernel_lib_path)
except Exception as e: except Exception as e:
logger.error(f"Error saving kernel library to disk: {e}") logger.error(f"Error saving kernel library to disk: {e}")
...@@ -195,6 +201,8 @@ class AutotuneResult: ...@@ -195,6 +201,8 @@ class AutotuneResult:
# Save kernel parameters # Save kernel parameters
try: try:
params_path = os.path.join(cache_path, PARAMS_PATH) params_path = os.path.join(cache_path, PARAMS_PATH)
if verbose:
logger.debug(f"Saving kernel parameters to disk: {params_path}")
with open(params_path, "wb") as f: with open(params_path, "wb") as f:
cloudpickle.dump(kernel.params, f) cloudpickle.dump(kernel.params, f)
except Exception as e: except Exception as e:
...@@ -209,6 +217,7 @@ class AutotuneResult: ...@@ -209,6 +217,7 @@ class AutotuneResult:
execution_backend: Literal["dlpack", "ctypes", "cython"] = "cython", execution_backend: Literal["dlpack", "ctypes", "cython"] = "cython",
pass_configs: dict = None, pass_configs: dict = None,
func: Callable = None, func: Callable = None,
verbose: bool = False,
) -> JITKernel: ) -> JITKernel:
""" """
Loads a previously compiled kernel from disk cache. Loads a previously compiled kernel from disk cache.
...@@ -221,6 +230,7 @@ class AutotuneResult: ...@@ -221,6 +230,7 @@ class AutotuneResult:
execution_backend (Literal): Backend type for execution. Defaults to "cython". execution_backend (Literal): Backend type for execution. Defaults to "cython".
pass_configs (dict, optional): Configuration for compiler passes. pass_configs (dict, optional): Configuration for compiler passes.
func (Callable, optional): The original function. func (Callable, optional): The original function.
verbose (bool): Enable verbose log messages.
Returns: Returns:
JITKernel: The loaded kernel if found, None otherwise. JITKernel: The loaded kernel if found, None otherwise.
...@@ -234,6 +244,8 @@ class AutotuneResult: ...@@ -234,6 +244,8 @@ class AutotuneResult:
try: try:
wrapped_kernel_path = os.path.join(cache_path, WRAPPED_KERNEL_PATH) wrapped_kernel_path = os.path.join(cache_path, WRAPPED_KERNEL_PATH)
if verbose:
logger.debug(f"Loading wrapped kernel source code from file: {wrapped_kernel_path}")
with open(wrapped_kernel_path, "r") as f: with open(wrapped_kernel_path, "r") as f:
kernel_global_source = f.read() kernel_global_source = f.read()
except Exception as e: except Exception as e:
...@@ -244,6 +256,8 @@ class AutotuneResult: ...@@ -244,6 +256,8 @@ class AutotuneResult:
# Load kernel parameters # Load kernel parameters
try: try:
params_path = os.path.join(cache_path, PARAMS_PATH) params_path = os.path.join(cache_path, PARAMS_PATH)
if verbose:
logger.debug(f"Loading kernel parameters from file: {params_path}")
with open(params_path, "rb") as f: with open(params_path, "rb") as f:
kernel_params = cloudpickle.load(f) kernel_params = cloudpickle.load(f)
except Exception as e: except Exception as e:
...@@ -264,19 +278,25 @@ class AutotuneResult: ...@@ -264,19 +278,25 @@ class AutotuneResult:
else: else:
return None return None
def save_to_disk(self, path: Path): def save_to_disk(self, path: Path, verbose: bool = False):
if not os.path.exists(path): if not os.path.exists(path):
os.makedirs(path) os.makedirs(path)
# save best config # save best config
if verbose:
logger.debug(f"Saving best config to file: {path / BEST_CONFIG_PATH}")
with open(path / BEST_CONFIG_PATH, "w") as f: with open(path / BEST_CONFIG_PATH, "w") as f:
json.dump(self.config, f) json.dump(self.config, f)
# save function # save function
if verbose:
logger.debug(f"Saving function to file: {path / FUNCTION_PATH}")
with open(path / FUNCTION_PATH, "wb") as f: with open(path / FUNCTION_PATH, "wb") as f:
cloudpickle.dump(self.func, f) cloudpickle.dump(self.func, f)
# save ref latency # save ref latency
if verbose:
logger.debug(f"Saving latency to file: {path / LATENCY_PATH}")
with open(path / LATENCY_PATH, "w") as f: with open(path / LATENCY_PATH, "w") as f:
json.dump({ json.dump({
"latency": self.latency, "latency": self.latency,
...@@ -291,15 +311,22 @@ class AutotuneResult: ...@@ -291,15 +311,22 @@ class AutotuneResult:
if not os.path.exists(path): if not os.path.exists(path):
return None return None
verbose = compile_args.verbose
# load best config # load best config
if verbose:
logger.debug(f"Loading best config from file: {path / BEST_CONFIG_PATH}")
with open(path / BEST_CONFIG_PATH, "r") as f: with open(path / BEST_CONFIG_PATH, "r") as f:
config = json.load(f) config = json.load(f)
# load function # load function
if verbose:
logger.debug(f"Loading function from file: {path / FUNCTION_PATH}")
with open(path / FUNCTION_PATH, "rb") as f: with open(path / FUNCTION_PATH, "rb") as f:
func = cloudpickle.load(f) func = cloudpickle.load(f)
# load latency # load latency
if verbose:
logger.debug(f"Loading latency from file: {path / LATENCY_PATH}")
with open(path / LATENCY_PATH, "r") as f: with open(path / LATENCY_PATH, "r") as f:
latency = json.load(f) latency = json.load(f)
latency, ref_latency = latency["latency"], latency["ref_latency"] latency, ref_latency = latency["latency"], latency["ref_latency"]
......
...@@ -257,7 +257,7 @@ class AutoTuner: ...@@ -257,7 +257,7 @@ class AutoTuner:
return hashlib.sha256(key_string.encode()).hexdigest() return hashlib.sha256(key_string.encode()).hexdigest()
def _save_result_to_disk(self, key, result: AutotuneResult): def _save_result_to_disk(self, key, result: AutotuneResult):
result.save_to_disk(self.cache_dir / key) result.save_to_disk(self.cache_dir / key, self.compile_args.verbose)
def _load_result_from_disk(self, key) -> AutotuneResult: def _load_result_from_disk(self, key) -> AutotuneResult:
result = AutotuneResult.load_from_disk(self.cache_dir / key, self.compile_args) result = AutotuneResult.load_from_disk(self.cache_dir / key, self.compile_args)
......
...@@ -165,7 +165,7 @@ class KernelCache: ...@@ -165,7 +165,7 @@ class KernelCache:
# Then check disk cache # Then check disk cache
kernel = self._load_kernel_from_disk(key, target, target_host, out_idx, kernel = self._load_kernel_from_disk(key, target, target_host, out_idx,
execution_backend, pass_configs, func) execution_backend, pass_configs, func, verbose)
if kernel is not None: if kernel is not None:
if verbose: if verbose:
self.logger.debug( self.logger.debug(
...@@ -174,6 +174,8 @@ class KernelCache: ...@@ -174,6 +174,8 @@ class KernelCache:
self._memory_cache[key] = kernel self._memory_cache[key] = kernel
return kernel return kernel
if verbose:
self.logger.debug(f"No cached kernel for {func.attrs['global_symbol']}")
# Compile kernel if cache miss; leave critical section # Compile kernel if cache miss; leave critical section
kernel = JITKernel( kernel = JITKernel(
func, func,
...@@ -189,7 +191,7 @@ class KernelCache: ...@@ -189,7 +191,7 @@ class KernelCache:
else: else:
with self._lock: with self._lock:
if is_cache_enabled(): if is_cache_enabled():
self._save_kernel_to_disk(key, kernel, func) self._save_kernel_to_disk(key, kernel, func, verbose)
# Store in memory cache after compilation # Store in memory cache after compilation
self._memory_cache[key] = kernel self._memory_cache[key] = kernel
...@@ -231,7 +233,11 @@ class KernelCache: ...@@ -231,7 +233,11 @@ class KernelCache:
# Use atomic POSIX replace, so other processes cannot see a partial write # Use atomic POSIX replace, so other processes cannot see a partial write
os.replace(temp_path, path) os.replace(temp_path, path)
def _save_kernel_to_disk(self, key: str, kernel: JITKernel, func: Callable = None): def _save_kernel_to_disk(self,
key: str,
kernel: JITKernel,
func: Callable = None,
verbose: bool = False):
""" """
Persists a compiled kernel to disk cache. Persists a compiled kernel to disk cache.
...@@ -239,6 +245,7 @@ class KernelCache: ...@@ -239,6 +245,7 @@ class KernelCache:
key (str): The hash key identifying the kernel. key (str): The hash key identifying the kernel.
kernel (JITKernel): The compiled kernel to be saved. kernel (JITKernel): The compiled kernel to be saved.
func (Callable, optional): The original function. func (Callable, optional): The original function.
verbose (bool): Enable verbose log messages.
Note: Note:
Saves the following files: Saves the following files:
...@@ -253,6 +260,8 @@ class KernelCache: ...@@ -253,6 +260,8 @@ class KernelCache:
# Save kernel source code # Save kernel source code
try: try:
kernel_path = os.path.join(cache_path, KERNEL_PATH) kernel_path = os.path.join(cache_path, KERNEL_PATH)
if verbose:
self.logger.debug(f"Saving kernel source code to file: {kernel_path}")
if kernel.artifact.kernel_source is not None: if kernel.artifact.kernel_source is not None:
KernelCache._safe_write_file(kernel_path, "w", KernelCache._safe_write_file(kernel_path, "w",
lambda file: file.write(kernel.artifact.kernel_source)) lambda file: file.write(kernel.artifact.kernel_source))
...@@ -262,6 +271,9 @@ class KernelCache: ...@@ -262,6 +271,9 @@ class KernelCache:
# Save wrapped kernel source code # Save wrapped kernel source code
try: try:
wrapped_kernel_path = os.path.join(cache_path, WRAPPED_KERNEL_PATH) wrapped_kernel_path = os.path.join(cache_path, WRAPPED_KERNEL_PATH)
if verbose:
self.logger.debug(
f"Saving wrapped kernel source code to file: {wrapped_kernel_path}")
KernelCache._safe_write_file( KernelCache._safe_write_file(
wrapped_kernel_path, "w", wrapped_kernel_path, "w",
lambda file: file.write(kernel.adapter.get_kernel_source())) lambda file: file.write(kernel.adapter.get_kernel_source()))
...@@ -274,6 +286,8 @@ class KernelCache: ...@@ -274,6 +286,8 @@ class KernelCache:
kernel_lib_path = KERNEL_CUBIN_PATH if self.execution_backend == "nvrtc" else KERNEL_LIB_PATH kernel_lib_path = KERNEL_CUBIN_PATH if self.execution_backend == "nvrtc" else KERNEL_LIB_PATH
kernel_lib_path = os.path.join(cache_path, kernel_lib_path) kernel_lib_path = os.path.join(cache_path, kernel_lib_path)
src_lib_path = kernel.adapter.libpath src_lib_path = kernel.adapter.libpath
if verbose:
self.logger.debug(f"Saving kernel library to file: {kernel_lib_path}")
KernelCache._safe_write_file( KernelCache._safe_write_file(
kernel_lib_path, "wb", kernel_lib_path, "wb",
lambda file: file.write(KernelCache._load_binary(src_lib_path))) lambda file: file.write(KernelCache._load_binary(src_lib_path)))
...@@ -282,6 +296,8 @@ class KernelCache: ...@@ -282,6 +296,8 @@ class KernelCache:
if self.execution_backend == "nvrtc": if self.execution_backend == "nvrtc":
kernel_py_path = os.path.join(cache_path, KERNEL_PY_PATH) kernel_py_path = os.path.join(cache_path, KERNEL_PY_PATH)
src_lib_path = src_lib_path.replace(".cubin", ".py") src_lib_path = src_lib_path.replace(".cubin", ".py")
if verbose:
self.logger.debug(f"Saving kernel nvrtc python code to file: {kernel_py_path}")
KernelCache._safe_write_file( KernelCache._safe_write_file(
kernel_py_path, "wb", kernel_py_path, "wb",
lambda file: file.write(KernelCache._load_binary(src_lib_path))) lambda file: file.write(KernelCache._load_binary(src_lib_path)))
...@@ -291,6 +307,8 @@ class KernelCache: ...@@ -291,6 +307,8 @@ class KernelCache:
# Save kernel parameters # Save kernel parameters
try: try:
params_path = os.path.join(cache_path, PARAMS_PATH) params_path = os.path.join(cache_path, PARAMS_PATH)
if verbose:
self.logger.debug(f"Saving kernel parameters to disk: {params_path}")
KernelCache._safe_write_file(params_path, "wb", KernelCache._safe_write_file(params_path, "wb",
lambda file: cloudpickle.dump(kernel.params, file)) lambda file: cloudpickle.dump(kernel.params, file))
except Exception as e: except Exception as e:
...@@ -305,6 +323,7 @@ class KernelCache: ...@@ -305,6 +323,7 @@ class KernelCache:
execution_backend: Literal["dlpack", "ctypes", "cython", "nvrtc"] = "cython", execution_backend: Literal["dlpack", "ctypes", "cython", "nvrtc"] = "cython",
pass_configs: dict = None, pass_configs: dict = None,
func: Callable = None, func: Callable = None,
verbose: bool = False,
) -> Optional[JITKernel]: ) -> Optional[JITKernel]:
""" """
Loads a previously compiled kernel from disk cache. Loads a previously compiled kernel from disk cache.
...@@ -317,6 +336,7 @@ class KernelCache: ...@@ -317,6 +336,7 @@ class KernelCache:
execution_backend (Literal): Backend type for execution. Defaults to "cython". execution_backend (Literal): Backend type for execution. Defaults to "cython".
pass_configs (dict, optional): Configuration for compiler passes. pass_configs (dict, optional): Configuration for compiler passes.
func (Callable, optional): The original function. func (Callable, optional): The original function.
verbose (bool): Enable verbose log messages.
Returns: Returns:
JITKernel: The loaded kernel if found, None otherwise. JITKernel: The loaded kernel if found, None otherwise.
...@@ -334,6 +354,9 @@ class KernelCache: ...@@ -334,6 +354,9 @@ class KernelCache:
# Load the kernel source file (optional) # Load the kernel source file (optional)
try: try:
if verbose:
self.logger.debug(
f"Loading wrapped kernel source code from file: {wrapped_kernel_path}")
with open(wrapped_kernel_path, "r") as f: with open(wrapped_kernel_path, "r") as f:
kernel_global_source = f.read() kernel_global_source = f.read()
except Exception as e: except Exception as e:
...@@ -341,6 +364,8 @@ class KernelCache: ...@@ -341,6 +364,8 @@ class KernelCache:
# Load kernel parameters # Load kernel parameters
try: try:
if verbose:
self.logger.debug(f"Loading kernel parameters from file: {params_path}")
with open(params_path, "rb") as f: with open(params_path, "rb") as f:
kernel_params = cloudpickle.load(f) kernel_params = cloudpickle.load(f)
except Exception as e: except Exception as e:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment