From: Joel Rosdahl Date: Tue, 20 Oct 2020 18:49:50 +0000 (+0200) Subject: Detect errors in log strings at compile time X-Git-Tag: v4.1~46 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=60904e7b571642ec67ec750edfde724f735a0d2e;p=thirdparty%2Fccache.git Detect errors in log strings at compile time fmtlib can detect format string errors at compile time if (1) applying FMT_STRING to the format string literal and (2) compiling for C++14 or higher. Requirement 1 is implemented by introducing a LOG macro which applies FMT_STRING to the first argument and calls Logging::log (if logging is enabled). Also added are a companion LOG_RAW macro (since C++11 requires at least one argument for the “...” part in variadic macros) and a BULK_LOG macro which calls Logging::bulk_log (if logging is enabled). Requirement 2 is implemented by setting CMAKE_CXX_STANDARD to 14 for one CI build with a known C++14-capable compiler. We can’t set it to 14 by default since we still want the code to be buildable with C++11 compilers. This will catch errors such as the one fixed by PR #691. --- diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 5bd2fec05..b60ab4e0f 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -58,14 +58,14 @@ jobs: fail-fast: false matrix: config: - - name: Linux GCC debug + in source + tracing + - name: Linux GCC debug + C++14 + in source + tracing os: ubuntu-18.04 CC: gcc CXX: g++ ENABLE_CACHE_CLEANUP_TESTS: 1 BUILDDIR: . CCACHE_LOC: . - CMAKE_PARAMS: -DCMAKE_BUILD_TYPE=Debug -DENABLE_TRACING=1 + CMAKE_PARAMS: -DCMAKE_BUILD_TYPE=Debug -DENABLE_TRACING=1 -DCMAKE_CXX_STANDARD=14 apt_get: elfutils libzstd1-dev - name: Linux GCC 32-bit diff --git a/CMakeLists.txt b/CMakeLists.txt index ebecf441a..14ff96905 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,7 +3,9 @@ cmake_minimum_required(VERSION 3.4.3) project(ccache LANGUAGES C CXX ASM) set(CMAKE_PROJECT_DESCRIPTION "a fast C/C++ compiler cache") -set(CMAKE_CXX_STANDARD 11) +if(NOT "${CMAKE_CXX_STANDARD}") + set(CMAKE_CXX_STANDARD 11) +endif() set(CMAKE_CXX_STANDARD_REQUIRED YES) set(CMAKE_CXX_EXTENSIONS NO) diff --git a/cmake/StandardWarnings.cmake b/cmake/StandardWarnings.cmake index dbb45f16d..350773767 100644 --- a/cmake/StandardWarnings.cmake +++ b/cmake/StandardWarnings.cmake @@ -95,8 +95,10 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") -Wno-global-constructors -Wno-implicit-fallthrough -Wno-padded + -Wno-shadow # Warnings in fmtlib -Wno-shorten-64-to-32 -Wno-sign-conversion + -Wno-signed-enum-bitfield # Warnings in fmtlib -Wno-weak-vtables -Wno-old-style-cast) diff --git a/src/.clang-tidy b/src/.clang-tidy index c24085009..0f3fa7534 100644 --- a/src/.clang-tidy +++ b/src/.clang-tidy @@ -65,13 +65,13 @@ CheckOptions: - key: readability-function-size.LineThreshold value: 700 - key: readability-function-size.StatementThreshold - value: 500 + value: 999999 - key: readability-function-size.BranchThreshold value: 170 - key: readability-function-size.ParameterThreshold value: 6 - key: readability-function-size.NestingThreshold - value: 6 + value: 999999 - key: readability-function-size.VariableThreshold value: 80 ... diff --git a/src/Context.cpp b/src/Context.cpp index bb639462e..7706b7d94 100644 --- a/src/Context.cpp +++ b/src/Context.cpp @@ -28,7 +28,6 @@ #include #include -using Logging::log; using nonstd::string_view; Context::Context() @@ -83,7 +82,7 @@ Context::set_ignore_options(const std::vector& options) if (n_wildcards == 0 || (n_wildcards == 1 && option.back() == '*')) { m_ignore_options.push_back(option); } else { - log("Skipping malformed ignore_options item: {}", option); + LOG("Skipping malformed ignore_options item: {}", option); continue; } } diff --git a/src/Hash.cpp b/src/Hash.cpp index bb186b70e..3db2b54c3 100644 --- a/src/Hash.cpp +++ b/src/Hash.cpp @@ -21,7 +21,6 @@ #include "Fd.hpp" #include "Logging.hpp" -using Logging::log; using nonstd::string_view; const string_view HASH_DELIMITER("\000cCaChE\000", 8); @@ -114,7 +113,7 @@ Hash::hash_file(const std::string& path) { Fd fd(open(path.c_str(), O_RDONLY | O_BINARY)); if (!fd) { - log("Failed to open {}: {}", path, strerror(errno)); + LOG("Failed to open {}: {}", path, strerror(errno)); return false; } diff --git a/src/InodeCache.cpp b/src/InodeCache.cpp index eeef9ac88..a0f6f76c5 100644 --- a/src/InodeCache.cpp +++ b/src/InodeCache.cpp @@ -32,8 +32,6 @@ #include #include -using Logging::log; - // The inode cache resides on a file that is mapped into shared memory by // running processes. It is implemented as a two level structure, where the top // level is a hash table consisting of buckets. Each bucket contains entries @@ -132,12 +130,12 @@ InodeCache::mmap_file(const std::string& inode_cache_file) } Fd fd(open(inode_cache_file.c_str(), O_RDWR)); if (!fd) { - log("Failed to open inode cache {}: {}", inode_cache_file, strerror(errno)); + LOG("Failed to open inode cache {}: {}", inode_cache_file, strerror(errno)); return false; } bool is_nfs; if (Util::is_nfs_fd(*fd, &is_nfs) == 0 && is_nfs) { - log( + LOG( "Inode cache not supported because the cache file is located on nfs: {}", inode_cache_file); return false; @@ -146,13 +144,13 @@ InodeCache::mmap_file(const std::string& inode_cache_file) nullptr, sizeof(SharedRegion), PROT_READ | PROT_WRITE, MAP_SHARED, *fd, 0)); fd.close(); if (sr == reinterpret_cast(-1)) { - log("Failed to mmap {}: {}", inode_cache_file, strerror(errno)); + LOG("Failed to mmap {}: {}", inode_cache_file, strerror(errno)); return false; } // Drop the file from disk if the found version is not matching. This will // allow a new file to be generated. if (sr->version != k_version) { - log( + LOG( "Dropping inode cache because found version {} does not match expected" " version {}", sr->version, @@ -163,7 +161,7 @@ InodeCache::mmap_file(const std::string& inode_cache_file) } m_sr = sr; if (m_config.debug()) { - log("inode cache file loaded: {}", inode_cache_file); + LOG("inode cache file loaded: {}", inode_cache_file); } return true; } @@ -175,7 +173,7 @@ InodeCache::hash_inode(const std::string& path, { Stat stat = Stat::stat(path); if (!stat) { - log("Could not stat {}: {}", path, strerror(stat.error_number())); + LOG("Could not stat {}: {}", path, strerror(stat.error_number())); return false; } @@ -215,18 +213,18 @@ InodeCache::acquire_bucket(uint32_t index) } err = pthread_mutex_consistent(&bucket->mt); if (err) { - log( + LOG( "Can't consolidate stale mutex at index {}: {}", index, strerror(err)); - log("Consider removing the inode cache file if the problem persists"); + LOG_RAW("Consider removing the inode cache file if the problem persists"); return nullptr; } - log("Wiping bucket at index {} because of stale mutex", index); + LOG("Wiping bucket at index {} because of stale mutex", index); memset(bucket->entries, 0, sizeof(Bucket::entries)); } else { #endif if (err) { - log("Failed to lock mutex at index {}: {}", index, strerror(err)); - log("Consider removing the inode cache file if problem persists"); + LOG("Failed to lock mutex at index {}: {}", index, strerror(err)); + LOG_RAW("Consider removing the inode cache file if problem persists"); ++m_sr->errors; return nullptr; } @@ -253,7 +251,7 @@ InodeCache::release_bucket(Bucket* bucket) bool InodeCache::create_new_file(const std::string& filename) { - log("Creating a new inode cache"); + LOG_RAW("Creating a new inode cache"); // Create the new file to a temporary name to prevent other processes from // mapping it before it is fully initialized. @@ -263,7 +261,7 @@ InodeCache::create_new_file(const std::string& filename) bool is_nfs; if (Util::is_nfs_fd(*tmp_file.fd, &is_nfs) == 0 && is_nfs) { - log( + LOG( "Inode cache not supported because the cache file would be located on" " nfs: {}", filename); @@ -271,7 +269,7 @@ InodeCache::create_new_file(const std::string& filename) } int err = Util::fallocate(*tmp_file.fd, sizeof(SharedRegion)); if (err) { - log("Failed to allocate file space for inode cache: {}", strerror(err)); + LOG("Failed to allocate file space for inode cache: {}", strerror(err)); return false; } SharedRegion* sr = @@ -282,7 +280,7 @@ InodeCache::create_new_file(const std::string& filename) *tmp_file.fd, 0)); if (sr == reinterpret_cast(-1)) { - log("Failed to mmap new inode cache: {}", strerror(errno)); + LOG("Failed to mmap new inode cache: {}", strerror(errno)); return false; } @@ -307,7 +305,7 @@ InodeCache::create_new_file(const std::string& filename) // which will make us use the first created file even if we didn't win the // race. if (link(tmp_file.path.c_str(), filename.c_str()) != 0) { - log("Failed to link new inode cache: {}", strerror(errno)); + LOG("Failed to link new inode cache: {}", strerror(errno)); return false; } @@ -397,7 +395,7 @@ InodeCache::get(const std::string& path, } release_bucket(bucket); - log("inode cache {}: {}", found ? "hit" : "miss", path); + LOG("inode cache {}: {}", found ? "hit" : "miss", path); if (m_config.debug()) { if (found) { @@ -405,7 +403,7 @@ InodeCache::get(const std::string& path, } else { ++m_sr->misses; } - log("accumulated stats for inode cache: hits={}, misses={}, errors={}", + LOG("accumulated stats for inode cache: hits={}, misses={}, errors={}", m_sr->hits.load(), m_sr->misses.load(), m_sr->errors.load()); @@ -444,7 +442,7 @@ InodeCache::put(const std::string& path, release_bucket(bucket); - log("inode cache insert: {}", path); + LOG("inode cache insert: {}", path); return true; } diff --git a/src/Lockfile.cpp b/src/Lockfile.cpp index cd7290008..e37c8dc71 100644 --- a/src/Lockfile.cpp +++ b/src/Lockfile.cpp @@ -31,8 +31,6 @@ #include #include -using Logging::log; - namespace { #ifndef _WIN32 @@ -59,7 +57,7 @@ do_acquire_posix(const std::string& lockfile, uint32_t staleness_limit) } int saved_errno = errno; - log("lockfile_acquire: symlink {}: {}", lockfile, strerror(saved_errno)); + LOG("lockfile_acquire: symlink {}: {}", lockfile, strerror(saved_errno)); if (saved_errno == ENOENT) { // Directory doesn't exist? if (Util::create_dir(Util::dir_name(lockfile))) { @@ -86,41 +84,41 @@ do_acquire_posix(const std::string& lockfile, uint32_t staleness_limit) // acquiring it. continue; } else { - log("lockfile_acquire: readlink {}: {}", lockfile, strerror(errno)); + LOG("lockfile_acquire: readlink {}: {}", lockfile, strerror(errno)); return false; } } if (content == my_content) { // Lost NFS reply? - log("lockfile_acquire: symlink {} failed but we got the lock anyway", + LOG("lockfile_acquire: symlink {} failed but we got the lock anyway", lockfile); return true; } // A possible improvement here would be to check if the process holding the // lock is still alive and break the lock early if it isn't. - log("lockfile_acquire: lock info for {}: {}", lockfile, content); + LOG("lockfile_acquire: lock info for {}: {}", lockfile, content); if (initial_content.empty()) { initial_content = content; } if (slept <= staleness_limit) { - log("lockfile_acquire: failed to acquire {}; sleeping {} microseconds", + LOG("lockfile_acquire: failed to acquire {}; sleeping {} microseconds", lockfile, to_sleep); usleep(to_sleep); slept += to_sleep; to_sleep = std::min(max_to_sleep, 2 * to_sleep); } else if (content != initial_content) { - log("lockfile_acquire: gave up acquiring {}", lockfile); + LOG("lockfile_acquire: gave up acquiring {}", lockfile); return false; } else { // The lock seems to be stale -- break it and try again. - log("lockfile_acquire: breaking {}", lockfile); + LOG("lockfile_acquire: breaking {}", lockfile); if (!Util::unlink_tmp(lockfile)) { - log("Failed to unlink {}: {}", lockfile, strerror(errno)); + LOG("Failed to unlink {}: {}", lockfile, strerror(errno)); return false; } to_sleep = 1000; @@ -155,7 +153,7 @@ do_acquire_win32(const std::string& lockfile, uint32_t staleness_limit) } DWORD error = GetLastError(); - log("lockfile_acquire: CreateFile {}: {} ({})", + LOG("lockfile_acquire: CreateFile {}: {} ({})", lockfile, Win32Util::error_message(error), error); @@ -175,11 +173,11 @@ do_acquire_win32(const std::string& lockfile, uint32_t staleness_limit) } if (slept > staleness_limit) { - log("lockfile_acquire: gave up acquiring {}", lockfile); + LOG("lockfile_acquire: gave up acquiring {}", lockfile); break; } - log("lockfile_acquire: failed to acquire {}; sleeping {} microseconds", + LOG("lockfile_acquire: failed to acquire {}; sleeping {} microseconds", lockfile, to_sleep); usleep(to_sleep); @@ -203,19 +201,19 @@ Lockfile::Lockfile(const std::string& path, uint32_t staleness_limit) m_handle = do_acquire_win32(m_lockfile, staleness_limit); #endif if (acquired()) { - log("Acquired lock {}", m_lockfile); + LOG("Acquired lock {}", m_lockfile); } else { - log("Failed to acquire lock {}", m_lockfile); + LOG("Failed to acquire lock {}", m_lockfile); } } Lockfile::~Lockfile() { if (acquired()) { - log("Releasing lock {}", m_lockfile); + LOG("Releasing lock {}", m_lockfile); #ifndef _WIN32 if (!Util::unlink_tmp(m_lockfile)) { - log("Failed to unlink {}: {}", m_lockfile, strerror(errno)); + LOG("Failed to unlink {}: {}", m_lockfile, strerror(errno)); } #else CloseHandle(m_handle); diff --git a/src/Logging.cpp b/src/Logging.cpp index e2057b21e..cf8058447 100644 --- a/src/Logging.cpp +++ b/src/Logging.cpp @@ -182,7 +182,7 @@ dump_log(const std::string& path) if (file) { (void)fwrite(debug_log_buffer.data(), debug_log_buffer.length(), 1, *file); } else { - log("Failed to open {}: {}", path, strerror(errno)); + LOG("Failed to open {}: {}", path, strerror(errno)); } } diff --git a/src/Logging.hpp b/src/Logging.hpp index 278812bd5..38553f25f 100644 --- a/src/Logging.hpp +++ b/src/Logging.hpp @@ -23,12 +23,36 @@ #include "FormatNonstdStringView.hpp" #include "third_party/fmt/core.h" +#include "third_party/fmt/format.h" #include "third_party/nonstd/optional.hpp" #include "third_party/nonstd/string_view.hpp" #include #include +// Log a raw message (plus a newline character). +#define LOG_RAW(message_) \ + do { \ + if (Logging::enabled()) { \ + Logging::log(nonstd::string_view(message_)); \ + } \ + } while (false) + +// Log a message (plus a newline character) described by a format string with at +// least one placeholder. `format` is compile-time checked if CMAKE_CXX_STANDARD +// >= 14. +#define LOG(format_, ...) LOG_RAW(fmt::format(FMT_STRING(format_), __VA_ARGS__)) + +// Log a message (plus a newline character) described by a format string with at +// least one placeholder without flushing and with a reused timestamp. `format` +// is compile-time checked if CMAKE_CXX_STANDARD >= 14. +#define BULK_LOG(format_, ...) \ + do { \ + if (Logging::enabled()) { \ + Logging::bulk_log(fmt::format(FMT_STRING(format_), __VA_ARGS__)); \ + } \ + } while (false) + class Config; namespace Logging { @@ -50,28 +74,4 @@ void bulk_log(nonstd::string_view message); // Write the current log memory buffer `path`. void dump_log(const std::string& path); -// Log a message (plus a newline character). `args` are forwarded to -// `fmt::format`. -template -inline void -log(T&&... args) -{ - if (!enabled()) { - return; - } - log(nonstd::string_view(fmt::format(std::forward(args)...))); -} - -// Log a message (plus a newline character) without flushing and with a reused -// timestamp. `args` are forwarded to `fmt::format`. -template -inline void -bulk_log(T&&... args) -{ - if (!enabled()) { - return; - } - bulk_log(nonstd::string_view(fmt::format(std::forward(args)...))); -} - } // namespace Logging diff --git a/src/Manifest.cpp b/src/Manifest.cpp index 64ceae197..14ba23bbb 100644 --- a/src/Manifest.cpp +++ b/src/Manifest.cpp @@ -107,7 +107,6 @@ // 1: Introduced in ccache 3.0. (Files are always compressed with gzip.) // 2: Introduced in ccache 4.0. -using Logging::log; using nonstd::nullopt; using nonstd::optional; @@ -427,24 +426,24 @@ verify_result(const Context& ctx, || ctx.guessed_compiler == GuessedCompiler::unknown) && ctx.args_info.output_is_precompiled_header && !ctx.args_info.fno_pch_timestamp && fi.mtime != fs.mtime) { - log("Precompiled header includes {}, which has a new mtime", path); + LOG("Precompiled header includes {}, which has a new mtime", path); return false; } if (ctx.config.sloppiness() & SLOPPY_FILE_STAT_MATCHES) { if (!(ctx.config.sloppiness() & SLOPPY_FILE_STAT_MATCHES_CTIME)) { if (fi.mtime == fs.mtime && fi.ctime == fs.ctime) { - log("mtime/ctime hit for {}", path); + LOG("mtime/ctime hit for {}", path); continue; } else { - log("mtime/ctime miss for {}", path); + LOG("mtime/ctime miss for {}", path); } } else { if (fi.mtime == fs.mtime) { - log("mtime hit for {}", path); + LOG("mtime hit for {}", path); continue; } else { - log("mtime miss for {}", path); + LOG("mtime miss for {}", path); } } } @@ -454,7 +453,7 @@ verify_result(const Context& ctx, Hash hash; int ret = hash_source_code_file(ctx, hash, path, fs.size); if (ret & HASH_SOURCE_CODE_ERROR) { - log("Failed hashing {}", path); + LOG("Failed hashing {}", path); return false; } if (ret & HASH_SOURCE_CODE_FOUND_TIME) { @@ -492,11 +491,11 @@ get(const Context& ctx, const std::string& path) // Update modification timestamp to save files from LRU cleanup. Util::update_mtime(path); } else { - log("No such manifest file"); + LOG_RAW("No such manifest file"); return nullopt; } } catch (const Error& e) { - log("Error: {}", e.what()); + LOG("Error: {}", e.what()); return nullopt; } @@ -537,7 +536,7 @@ put(const Config& config, mf = std::make_unique(); } } catch (const Error& e) { - log("Error: {}", e.what()); + LOG("Error: {}", e.what()); // Manifest file was corrupt, ignore. mf = std::make_unique(); } @@ -553,14 +552,14 @@ put(const Config& config, // A good way of solving this would be to maintain the result entries in // LRU order and discarding the old ones. An easy way is to throw away all // entries when there are too many. Let's do that for now. - log("More than {} entries in manifest file; discarding", + LOG("More than {} entries in manifest file; discarding", k_max_manifest_entries); mf = std::make_unique(); } else if (mf->file_infos.size() > k_max_manifest_file_info_entries) { // Rarely, FileInfo entries can grow large in pathological cases where // many included files change, but the main file does not. This also puts // an upper bound on the number of FileInfo entries. - log("More than {} FileInfo entries in manifest file; discarding", + LOG("More than {} FileInfo entries in manifest file; discarding", k_max_manifest_file_info_entries); mf = std::make_unique(); } @@ -572,7 +571,7 @@ put(const Config& config, write_manifest(config, path, *mf); return true; } catch (const Error& e) { - log("Error: {}", e.what()); + LOG("Error: {}", e.what()); return false; } } diff --git a/src/Result.cpp b/src/Result.cpp index 73a2680c6..09d704c0b 100644 --- a/src/Result.cpp +++ b/src/Result.cpp @@ -89,7 +89,6 @@ // // 1: Introduced in ccache 4.0. -using Logging::log; using nonstd::nullopt; using nonstd::optional; using nonstd::string_view; @@ -204,7 +203,7 @@ Result::Reader::Reader(const std::string& result_path) optional Result::Reader::read(Consumer& consumer) { - log("Reading result {}", m_result_path); + LOG("Reading result {}", m_result_path); try { if (read_result(consumer)) { @@ -350,12 +349,12 @@ Writer::do_finalize() for (const auto& pair : m_entries_to_write) { const auto file_type = pair.first; const auto& path = pair.second; - log("Storing result {}", path); + LOG("Storing result {}", path); const bool store_raw = should_store_raw_file(m_ctx.config, file_type); uint64_t file_size = Stat::stat(path, Stat::OnError::throw_error).size(); - log("Storing {} file #{} {} ({} bytes) from {}", + LOG("Storing {} file #{} {} ({} bytes) from {}", store_raw ? "raw" : "embedded", entry_number, file_type_to_string(file_type), diff --git a/src/ResultRetriever.cpp b/src/ResultRetriever.cpp index 263f4948b..04deaaf58 100644 --- a/src/ResultRetriever.cpp +++ b/src/ResultRetriever.cpp @@ -21,7 +21,6 @@ #include "Context.hpp" #include "Logging.hpp" -using Logging::log; using Result::FileType; ResultRetriever::ResultRetriever(Context& ctx, bool rewrite_dependency_target) @@ -93,11 +92,11 @@ ResultRetriever::on_entry_start(uint32_t entry_number, } if (dest_path.empty()) { - log("Not copying"); + LOG_RAW("Not copying"); } else if (dest_path == "/dev/null") { - log("Not copying to /dev/null"); + LOG_RAW("Not copying to /dev/null"); } else { - log("Retrieving {} file #{} {} ({} bytes)", + LOG("Retrieving {} file #{} {} ({} bytes)", raw_file ? "raw" : "embedded", entry_number, Result::file_type_to_string(file_type), @@ -110,7 +109,7 @@ ResultRetriever::on_entry_start(uint32_t entry_number, // if hard-linked, to make the object file newer than the source file). Util::update_mtime(*raw_file); } else { - log("Copying to {}", dest_path); + LOG("Copying to {}", dest_path); m_dest_fd = Fd( open(dest_path.c_str(), O_WRONLY | O_CREAT | O_TRUNC | O_BINARY, 0666)); if (!m_dest_fd) { diff --git a/src/Stat.cpp b/src/Stat.cpp index 80805bb9d..ece4cc4c0 100644 --- a/src/Stat.cpp +++ b/src/Stat.cpp @@ -20,8 +20,6 @@ #include "Logging.hpp" -using Logging::log; - Stat::Stat(StatFunction stat_function, const std::string& path, Stat::OnError on_error) @@ -35,7 +33,7 @@ Stat::Stat(StatFunction stat_function, throw Error("failed to stat {}: {}", path, strerror(errno)); } if (on_error == OnError::log) { - log("Failed to stat {}: {}", path, strerror(errno)); + LOG("Failed to stat {}: {}", path, strerror(errno)); } // The file is missing, so just zero fill the stat structure. This will diff --git a/src/Statistics.cpp b/src/Statistics.cpp index 0861de63b..b8dccf873 100644 --- a/src/Statistics.cpp +++ b/src/Statistics.cpp @@ -25,15 +25,10 @@ #include "Util.hpp" #include "exceptions.hpp" -using Logging::log; -using nonstd::nullopt; -using nonstd::optional; - const unsigned FLAG_NOZERO = 1; // don't zero with the -z option const unsigned FLAG_ALWAYS = 2; // always show, even if zero const unsigned FLAG_NEVER = 4; // never show -using Logging::log; using nonstd::nullopt; using nonstd::optional; @@ -225,7 +220,7 @@ update(const std::string& path, { Lockfile lock(path); if (!lock.acquired()) { - log("failed to acquire lock for {}", path); + LOG("Failed to acquire lock for {}", path); return nullopt; } @@ -242,7 +237,7 @@ update(const std::string& path, // Make failure to write a stats file a soft error since it's not // important enough to fail whole the process and also because it is // called in the Context destructor. - log("Error: {}", e.what()); + LOG("Error: {}", e.what()); } return counters; diff --git a/src/Util.cpp b/src/Util.cpp index d95806d18..cd2966643 100644 --- a/src/Util.cpp +++ b/src/Util.cpp @@ -78,7 +78,6 @@ extern "C" { # endif #endif -using Logging::log; using nonstd::nullopt; using nonstd::optional; using nonstd::string_view; @@ -278,31 +277,31 @@ clone_hard_link_or_copy_file(const Context& ctx, { if (ctx.config.file_clone()) { #ifdef FILE_CLONING_SUPPORTED - log("Cloning {} to {}", source, dest); + LOG("Cloning {} to {}", source, dest); try { clone_file(source, dest, via_tmp_file); return; } catch (Error& e) { - log("Failed to clone: {}", e.what()); + LOG("Failed to clone: {}", e.what()); } #else - log("Not cloning {} to {} since it's unsupported", source, dest); + LOG("Not cloning {} to {} since it's unsupported", source, dest); #endif } if (ctx.config.hard_link()) { unlink(dest.c_str()); - log("Hard linking {} to {}", source, dest); + LOG("Hard linking {} to {}", source, dest); int ret = link(source.c_str(), dest.c_str()); if (ret == 0) { if (chmod(dest.c_str(), 0444) != 0) { - log("Failed to chmod: {}", strerror(errno)); + LOG("Failed to chmod: {}", strerror(errno)); } return; } - log("Failed to hard link: {}", strerror(errno)); + LOG("Failed to hard link: {}", strerror(errno)); } - log("Copying {} to {}", source, dest); + LOG("Copying {} to {}", source, dest); copy_file(source, dest, via_tmp_file); } @@ -1143,7 +1142,7 @@ read_file(const std::string& path, size_t size_hint) } if (ret == -1) { - log("Failed reading {}", path); + LOG("Failed reading {}", path); throw Error(strerror(errno)); } @@ -1459,9 +1458,9 @@ unlink_safe(const std::string& path, UnlinkLog unlink_log) } } if (success || unlink_log == UnlinkLog::log_failure) { - log("Unlink {} via {}", path, tmp_name); + LOG("Unlink {} via {}", path, tmp_name); if (!success) { - log("Unlink failed: {}", strerror(saved_errno)); + LOG("Unlink failed: {}", strerror(saved_errno)); } } @@ -1478,9 +1477,9 @@ unlink_tmp(const std::string& path, UnlinkLog unlink_log) unlink(path.c_str()) == 0 || (errno == ENOENT || errno == ESTALE); saved_errno = errno; if (success || unlink_log == UnlinkLog::log_failure) { - log("Unlink {}", path); + LOG("Unlink {}", path); if (!success) { - log("Unlink failed: {}", strerror(saved_errno)); + LOG("Unlink failed: {}", strerror(saved_errno)); } } diff --git a/src/Util.hpp b/src/Util.hpp index b3ab9e9bc..d367b9dcd 100644 --- a/src/Util.hpp +++ b/src/Util.hpp @@ -22,6 +22,8 @@ #include "CacheFile.hpp" +#include "third_party/fmt/core.h" +#include "third_party/fmt/format.h" #include "third_party/nonstd/optional.hpp" #include "third_party/nonstd/string_view.hpp" diff --git a/src/ZstdCompressor.cpp b/src/ZstdCompressor.cpp index 8b2c518b9..a53cfa2ad 100644 --- a/src/ZstdCompressor.cpp +++ b/src/ZstdCompressor.cpp @@ -24,21 +24,19 @@ #include -using Logging::log; - ZstdCompressor::ZstdCompressor(FILE* stream, int8_t compression_level) : m_stream(stream), m_zstd_stream(ZSTD_createCStream()) { if (compression_level == 0) { compression_level = default_compression_level; - log("Using default compression level {}", compression_level); + LOG("Using default compression level {}", compression_level); } // libzstd 1.3.4 and newer support negative levels. However, the query // function ZSTD_minCLevel did not appear until 1.3.6, so perform detection // based on version instead. if (ZSTD_versionNumber() < 10304 && compression_level < 1) { - log( + LOG( "Using compression level 1 (minimum level supported by libzstd) instead" " of {}", compression_level); @@ -47,7 +45,7 @@ ZstdCompressor::ZstdCompressor(FILE* stream, int8_t compression_level) m_compression_level = std::min(compression_level, ZSTD_maxCLevel()); if (m_compression_level != compression_level) { - log("Using compression level {} (max libzstd level) instead of {}", + LOG("Using compression level {} (max libzstd level) instead of {}", m_compression_level, compression_level); } diff --git a/src/argprocessing.cpp b/src/argprocessing.cpp index bda801dfb..ccb7d8e86 100644 --- a/src/argprocessing.cpp +++ b/src/argprocessing.cpp @@ -27,7 +27,6 @@ #include -using Logging::log; using nonstd::nullopt; using nonstd::optional; using nonstd::string_view; @@ -105,14 +104,14 @@ detect_pch(Context& ctx, std::string pch_file; if (option == "-include-pch" || option == "-include-pth") { if (Stat::stat(arg)) { - log("Detected use of precompiled header: {}", arg); + LOG("Detected use of precompiled header: {}", arg); pch_file = arg; } } else if (!is_cc1_option) { for (const auto& extension : {".gch", ".pch", ".pth"}) { std::string path = arg + extension; if (Stat::stat(path)) { - log("Detected use of precompiled header: {}", path); + LOG("Detected use of precompiled header: {}", path); pch_file = path; } } @@ -120,7 +119,7 @@ detect_pch(Context& ctx, if (!pch_file.empty()) { if (!ctx.included_pch_file.empty()) { - log("Multiple precompiled headers used: {} and {}", + LOG("Multiple precompiled headers used: {} and {}", ctx.included_pch_file, pch_file); return false; @@ -177,13 +176,13 @@ process_profiling_option(Context& ctx, const std::string& arg) new_profile_use = true; new_profile_path = arg.substr(arg.find('=') + 1); } else { - log("Unknown profiling option: {}", arg); + LOG("Unknown profiling option: {}", arg); return false; } if (new_profile_use) { if (ctx.args_info.profile_use) { - log("Multiple profiling options not supported"); + LOG_RAW("Multiple profiling options not supported"); return false; } ctx.args_info.profile_use = true; @@ -191,12 +190,12 @@ process_profiling_option(Context& ctx, const std::string& arg) if (!new_profile_path.empty()) { ctx.args_info.profile_path = new_profile_path; - log("Set profile directory to {}", ctx.args_info.profile_path); + LOG("Set profile directory to {}", ctx.args_info.profile_path); } if (ctx.args_info.profile_generate && ctx.args_info.profile_use) { // Too hard to figure out what the compiler will do. - log("Both generating and using profile info, giving up"); + LOG_RAW("Both generating and using profile info, giving up"); return false; } @@ -228,7 +227,7 @@ process_arg(Context& ctx, if (args[i] == "--ccache-skip") { i++; if (i == args.size()) { - log("--ccache-skip lacks an argument"); + LOG_RAW("--ccache-skip lacks an argument"); return Statistic::bad_compiler_arguments; } state.common_args.push_back(args[i]); @@ -249,7 +248,7 @@ process_arg(Context& ctx, } auto file_args = Args::from_gcc_atfile(argpath); if (!file_args) { - log("Couldn't read arg file {}", argpath); + LOG("Couldn't read arg file {}", argpath); return Statistic::bad_compiler_arguments; } @@ -262,7 +261,7 @@ process_arg(Context& ctx, if (ctx.guessed_compiler == GuessedCompiler::nvcc && (args[i] == "-optf" || args[i] == "--options-file")) { if (i == args.size() - 1) { - log("Expected argument after {}", args[i]); + LOG("Expected argument after {}", args[i]); return Statistic::bad_compiler_arguments; } ++i; @@ -272,7 +271,7 @@ process_arg(Context& ctx, for (auto it = paths.rbegin(); it != paths.rend(); ++it) { auto file_args = Args::from_gcc_atfile(*it); if (!file_args) { - log("Couldn't read CUDA options file {}", *it); + LOG("Couldn't read CUDA options file {}", *it); return Statistic::bad_compiler_arguments; } @@ -285,19 +284,19 @@ process_arg(Context& ctx, // These are always too hard. if (compopt_too_hard(args[i]) || Util::starts_with(args[i], "-fdump-") || Util::starts_with(args[i], "-MJ")) { - log("Compiler option {} is unsupported", args[i]); + LOG("Compiler option {} is unsupported", args[i]); return Statistic::unsupported_compiler_option; } // These are too hard in direct mode. if (config.direct_mode() && compopt_too_hard_for_direct_mode(args[i])) { - log("Unsupported compiler option for direct mode: {}", args[i]); + LOG("Unsupported compiler option for direct mode: {}", args[i]); config.set_direct_mode(false); } // -Xarch_* options are too hard. if (Util::starts_with(args[i], "-Xarch_")) { - log("Unsupported compiler option: {}", args[i]); + LOG("Unsupported compiler option: {}", args[i]); return Statistic::unsupported_compiler_option; } @@ -335,7 +334,7 @@ process_arg(Context& ctx, || (ctx.guessed_compiler == GuessedCompiler::nvcc && args[i] == "-Werror")) { if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } state.compiler_only_args.push_back(args[i + 1]); @@ -358,11 +357,11 @@ process_arg(Context& ctx, // flag. if (args[i] == "-fmodules") { if (!config.depend_mode() || !config.direct_mode()) { - log("Compiler option {} is unsupported without direct depend mode", + LOG("Compiler option {} is unsupported without direct depend mode", args[i]); return Statistic::could_not_use_modules; } else if (!(config.sloppiness() & SLOPPY_MODULES)) { - log( + LOG_RAW( "You have to specify \"modules\" sloppiness when using" " -fmodules to get hits"); return Statistic::could_not_use_modules; @@ -403,7 +402,7 @@ process_arg(Context& ctx, // input file and strip all -x options from the arguments. if (args[i].length() == 2) { if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } if (args_info.input_file.empty()) { @@ -423,7 +422,7 @@ process_arg(Context& ctx, // We need to work out where the output was meant to go. if (args[i] == "-o") { if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } args_info.output_obj = Util::make_relative_path(ctx, args[i + 1]); @@ -498,7 +497,7 @@ process_arg(Context& ctx, if (separate_argument) { // -MF arg if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } dep_file = args[i + 1]; @@ -524,7 +523,7 @@ process_arg(Context& ctx, if (args[i].size() == 3) { // -MQ arg or -MT arg if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } state.dep_args.push_back(args[i]); @@ -593,7 +592,7 @@ process_arg(Context& ctx, // Alternate form of specifying sysroot without = if (args[i] == "--sysroot") { if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } state.common_args.push_back(args[i]); @@ -606,7 +605,7 @@ process_arg(Context& ctx, // Alternate form of specifying target without = if (args[i] == "-target") { if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } state.common_args.push_back(args[i]); @@ -621,7 +620,7 @@ process_arg(Context& ctx, // -P removes preprocessor information in such a way that the object file // from compiling the preprocessed file will not be equal to the object // file produced when compiling without ccache. - log("Too hard option -Wp,-P detected"); + LOG_RAW("Too hard option -Wp,-P detected"); return Statistic::unsupported_compiler_option; } else if (Util::starts_with(args[i], "-Wp,-MD,") && args[i].find(',', 8) == std::string::npos) { @@ -656,7 +655,7 @@ process_arg(Context& ctx, } else if (config.direct_mode()) { // -Wp, can be used to pass too hard options to the preprocessor. // Hence, disable direct mode. - log("Unsupported compiler option for direct mode: {}", args[i]); + LOG("Unsupported compiler option for direct mode: {}", args[i]); config.set_direct_mode(false); } @@ -678,7 +677,7 @@ process_arg(Context& ctx, if (args[i] == "--serialize-diagnostics") { if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } args_info.generating_diagnostics = true; @@ -733,7 +732,7 @@ process_arg(Context& ctx, // among multiple users. i++; if (i <= args.size() - 1) { - log("Skipping argument -index-store-path {}", args[i]); + LOG("Skipping argument -index-store-path {}", args[i]); } return nullopt; } @@ -743,7 +742,7 @@ process_arg(Context& ctx, // output produced by the compiler will be normalized. if (compopt_takes_path(args[i])) { if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } @@ -795,7 +794,7 @@ process_arg(Context& ctx, // Options that take an argument. if (compopt_takes_arg(args[i])) { if (i == args.size() - 1) { - log("Missing argument to {}", args[i]); + LOG("Missing argument to {}", args[i]); return Statistic::bad_compiler_arguments; } @@ -830,7 +829,7 @@ process_arg(Context& ctx, if (args[i] != "/dev/null") { auto st = Stat::stat(args[i]); if (!st || !st.is_regular()) { - log("{} is not a regular file, not considering as input file", args[i]); + LOG("{} is not a regular file, not considering as input file", args[i]); state.common_args.push_back(args[i]); return nullopt; } @@ -838,17 +837,17 @@ process_arg(Context& ctx, if (!args_info.input_file.empty()) { if (!language_for_file(args[i]).empty()) { - log("Multiple input files: {} and {}", args_info.input_file, args[i]); + LOG("Multiple input files: {} and {}", args_info.input_file, args[i]); return Statistic::multiple_source_files; } else if (!state.found_c_opt && !state.found_dc_opt) { - log("Called for link with {}", args[i]); + LOG("Called for link with {}", args[i]); if (args[i].find("conftest.") != std::string::npos) { return Statistic::autoconf_test; } else { return Statistic::called_for_link; } } else { - log("Unsupported source extension: {}", args[i]); + LOG("Unsupported source extension: {}", args[i]); return Statistic::unsupported_source_language; } } @@ -948,24 +947,24 @@ process_args(Context& ctx) } if (state.generating_debuginfo_level_3 && !config.run_second_cpp()) { - log("Generating debug info level 3; not compiling preprocessed code"); + LOG_RAW("Generating debug info level 3; not compiling preprocessed code"); config.set_run_second_cpp(true); } handle_dependency_environment_variables(ctx, state); if (args_info.input_file.empty()) { - log("No input file found"); + LOG_RAW("No input file found"); return Statistic::no_input_file; } if (state.found_pch || state.found_fpch_preprocess) { args_info.using_precompiled_header = true; if (!(config.sloppiness() & SLOPPY_TIME_MACROS)) { - log( + LOG_RAW( "You have to specify \"time_macros\" sloppiness when using" " precompiled headers to get direct hits"); - log("Disabling direct mode"); + LOG_RAW("Disabling direct mode"); return Statistic::could_not_use_precompiled_header; } } @@ -980,7 +979,7 @@ process_args(Context& ctx) state.file_language = language_for_file(args_info.input_file); if (!state.explicit_language.empty()) { if (!language_is_supported(state.explicit_language)) { - log("Unsupported language: {}", state.explicit_language); + LOG("Unsupported language: {}", state.explicit_language); return Statistic::unsupported_source_language; } args_info.actual_language = state.explicit_language; @@ -994,7 +993,7 @@ process_args(Context& ctx) if (args_info.output_is_precompiled_header && !(config.sloppiness() & SLOPPY_PCH_DEFINES)) { - log( + LOG_RAW( "You have to specify \"pch_defines,time_macros\" sloppiness when" " creating precompiled headers"); return Statistic::could_not_use_precompiled_header; @@ -1004,7 +1003,7 @@ process_args(Context& ctx) if (args_info.output_is_precompiled_header) { state.common_args.push_back("-c"); } else { - log("No -c option found"); + LOG_RAW("No -c option found"); // Having a separate statistic for autoconf tests is useful, as they are // the dominant form of "called for link" in many cases. return args_info.input_file.find("conftest.") != std::string::npos @@ -1014,12 +1013,12 @@ process_args(Context& ctx) } if (args_info.actual_language.empty()) { - log("Unsupported source extension: {}", args_info.input_file); + LOG("Unsupported source extension: {}", args_info.input_file); return Statistic::unsupported_source_language; } if (!config.run_second_cpp() && args_info.actual_language == "cu") { - log("Using CUDA compiler; not compiling preprocessed code"); + LOG_RAW("Using CUDA compiler; not compiling preprocessed code"); config.set_run_second_cpp(true); } @@ -1027,7 +1026,7 @@ process_args(Context& ctx) if (args_info.output_is_precompiled_header && !config.run_second_cpp()) { // It doesn't work to create the .gch from preprocessed source. - log("Creating precompiled header; not compiling preprocessed code"); + LOG_RAW("Creating precompiled header; not compiling preprocessed code"); config.set_run_second_cpp(true); } @@ -1038,7 +1037,7 @@ process_args(Context& ctx) // Don't try to second guess the compilers heuristics for stdout handling. if (args_info.output_obj == "-") { - log("Output file is -"); + LOG_RAW("Output file is -"); return Statistic::output_to_stdout; } @@ -1055,7 +1054,7 @@ process_args(Context& ctx) if (args_info.seen_split_dwarf) { size_t pos = args_info.output_obj.rfind('.'); if (pos == std::string::npos || pos == args_info.output_obj.size() - 1) { - log("Badly formed object filename"); + LOG_RAW("Badly formed object filename"); return Statistic::bad_compiler_arguments; } @@ -1066,7 +1065,7 @@ process_args(Context& ctx) if (args_info.output_obj != "/dev/null") { auto st = Stat::stat(args_info.output_obj); if (st && !st.is_regular()) { - log("Not a regular file: {}", args_info.output_obj); + LOG("Not a regular file: {}", args_info.output_obj); return Statistic::bad_output_file; } } @@ -1074,7 +1073,7 @@ process_args(Context& ctx) auto output_dir = std::string(Util::dir_name(args_info.output_obj)); auto st = Stat::stat(output_dir); if (!st || !st.is_directory()) { - log("Directory does not exist: {}", output_dir); + LOG("Directory does not exist: {}", output_dir); return Statistic::bad_output_file; } diff --git a/src/ccache.cpp b/src/ccache.cpp index 4bcb9689e..f2664a0d8 100644 --- a/src/ccache.cpp +++ b/src/ccache.cpp @@ -77,12 +77,11 @@ #endif const char CCACHE_NAME[] = MYNAME; -using Logging::log; using nonstd::nullopt; using nonstd::optional; using nonstd::string_view; -const char VERSION_TEXT[] = +constexpr const char VERSION_TEXT[] = R"({} version {} Copyright (C) 2002-2007 Andrew Tridgell @@ -96,7 +95,7 @@ Foundation; either version 3 of the License, or (at your option) any later version. )"; -const char USAGE_TEXT[] = +constexpr const char USAGE_TEXT[] = R"(Usage: {} [options] {} compiler [compiler options] @@ -194,7 +193,7 @@ add_prefix(const Context& ctx, Args& args, const std::string& prefix_command) prefix.push_back(path); } - log("Using command-line prefix {}", prefix_command); + LOG("Using command-line prefix {}", prefix_command); for (size_t i = prefix.size(); i != 0; i--) { args.push_front(prefix[i - 1]); } @@ -246,7 +245,7 @@ init_hash_debug(Context& ctx, hash.enable_debug(section_name, debug_binary_file.get(), debug_text_file); ctx.hash_debug_files.push_back(std::move(debug_binary_file)); } else { - log("Failed to open {}: {}", path, strerror(errno)); + LOG("Failed to open {}: {}", path, strerror(errno)); } } @@ -323,7 +322,7 @@ do_remember_include_file(Context& ctx, } if (!st.is_regular()) { // Device, pipe, socket or other strange creature. - log("Non-regular include file {}", path); + LOG("Non-regular include file {}", path); return false; } @@ -338,14 +337,14 @@ do_remember_include_file(Context& ctx, // under "Performance" in doc/MANUAL.adoc. if (!(ctx.config.sloppiness() & SLOPPY_INCLUDE_FILE_MTIME) && st.mtime() >= ctx.time_of_compilation) { - log("Include file {} too new", path); + LOG("Include file {} too new", path); return false; } // The same >= logic as above applies to the change time of the file. if (!(ctx.config.sloppiness() & SLOPPY_INCLUDE_FILE_CTIME) && st.ctime() >= ctx.time_of_compilation) { - log("Include file {} ctime too new", path); + LOG("Include file {} ctime too new", path); return false; } @@ -355,7 +354,7 @@ do_remember_include_file(Context& ctx, is_pch = Util::is_precompiled_header(path); if (is_pch) { if (ctx.included_pch_file.empty()) { - log("Detected use of precompiled header: {}", path); + LOG("Detected use of precompiled header: {}", path); } bool using_pch_sum = false; if (ctx.config.pch_external_checksum()) { @@ -365,7 +364,7 @@ do_remember_include_file(Context& ctx, if (Stat::stat(pch_sum_path, Stat::OnError::log)) { path = std::move(pch_sum_path); using_pch_sum = true; - log("Using pch.sum file {}", path); + LOG("Using pch.sum file {}", path); } } @@ -408,7 +407,7 @@ remember_include_file(Context& ctx, { if (!do_remember_include_file(ctx, path, cpp_hash, system, depend_mode_hash) && ctx.config.direct_mode()) { - log("Disabling direct mode"); + LOG_RAW("Disabling direct mode"); ctx.config.set_direct_mode(false); } } @@ -517,7 +516,7 @@ process_preprocessed_file(Context& ctx, } q++; if (q >= end) { - log("Failed to parse included file path"); + LOG_RAW("Failed to parse included file path"); return false; } // q points to the beginning of an include file path @@ -568,7 +567,7 @@ process_preprocessed_file(Context& ctx, // part of inline assembly, refers to an external file. If the file // changes, the hash should change as well, but finding out what file to // hash is too hard for ccache, so just bail out. - log( + LOG_RAW( "Found unsupported .inc" "bin directive in source code"); throw Failure(Statistic::unsupported_code_directive); @@ -661,11 +660,12 @@ static void use_relative_paths_in_depfile(const Context& ctx) { if (ctx.config.base_dir().empty()) { - log("Base dir not set, skip using relative paths"); + LOG_RAW("Base dir not set, skip using relative paths"); return; // nothing to do } if (!ctx.has_absolute_include_headers) { - log("No absolute path for included files found, skip using relative paths"); + LOG_RAW( + "No absolute path for included files found, skip using relative paths"); return; // nothing to do } @@ -674,14 +674,14 @@ use_relative_paths_in_depfile(const Context& ctx) try { file_content = Util::read_file(output_dep); } catch (const Error& e) { - log("Cannot open dependency file {}: {}", output_dep, e.what()); + LOG("Cannot open dependency file {}: {}", output_dep, e.what()); return; } const auto new_content = rewrite_dep_file_paths(ctx, file_content); if (new_content) { Util::write_file(output_dep, *new_content); } else { - log("No paths in dependency file {} made relative", output_dep); + LOG("No paths in dependency file {} made relative", output_dep); } } @@ -694,7 +694,7 @@ result_name_from_depfile(Context& ctx, Hash& hash) try { file_content = Util::read_file(ctx.args_info.output_dep); } catch (const Error& e) { - log( + LOG( "Cannot open dependency file {}: {}", ctx.args_info.output_dep, e.what()); return nullopt; } @@ -750,19 +750,19 @@ do_execute(Context& ctx, if (errors.find("unrecognized command line option") != std::string::npos && errors.find("-fdiagnostics-color") != std::string::npos) { // Old versions of GCC do not support colored diagnostics. - log("-fdiagnostics-color is unsupported; trying again without it"); + LOG_RAW("-fdiagnostics-color is unsupported; trying again without it"); tmp_stdout.fd = Fd(open( tmp_stdout.path.c_str(), O_RDWR | O_CREAT | O_TRUNC | O_BINARY, 0600)); if (!tmp_stdout.fd) { - log("Failed to truncate {}: {}", tmp_stdout.path, strerror(errno)); + LOG("Failed to truncate {}: {}", tmp_stdout.path, strerror(errno)); throw Failure(Statistic::internal_error); } tmp_stderr.fd = Fd(open( tmp_stderr.path.c_str(), O_RDWR | O_CREAT | O_TRUNC | O_BINARY, 0600)); if (!tmp_stderr.fd) { - log("Failed to truncate {}: {}", tmp_stderr.path, strerror(errno)); + LOG("Failed to truncate {}: {}", tmp_stderr.path, strerror(errno)); throw Failure(Statistic::internal_error); } @@ -824,14 +824,14 @@ update_manifest_file(Context& ctx) (ctx.config.sloppiness() & SLOPPY_FILE_STAT_MATCHES) || ctx.args_info.output_is_precompiled_header; - log("Adding result name to {}", *ctx.manifest_path()); + LOG("Adding result name to {}", *ctx.manifest_path()); if (!Manifest::put(ctx.config, *ctx.manifest_path(), *ctx.result_name(), ctx.included_files, ctx.time_of_compilation, save_timestamp)) { - log("Failed to add result name to {}", *ctx.manifest_path()); + LOG("Failed to add result name to {}", *ctx.manifest_path()); } else { const auto new_stat = Stat::stat(*ctx.manifest_path(), Stat::OnError::log); ctx.manifest_counter_updates.increment( @@ -862,7 +862,7 @@ create_cachedir_tag(const Context& ctx) try { Util::write_file(path, cachedir_tag); } catch (const Error& e) { - log("Failed to create {}: {}", path, e.what()); + LOG("Failed to create {}: {}", path, e.what()); } } @@ -884,19 +884,19 @@ find_coverage_file(const Context& ctx) std::string unmangled_form = Result::gcno_file_in_unmangled_form(ctx); std::string found_file; if (Stat::stat(mangled_form)) { - log("Found coverage file {}", mangled_form); + LOG("Found coverage file {}", mangled_form); found_file = mangled_form; } if (Stat::stat(unmangled_form)) { - log("Found coverage file {}", unmangled_form); + LOG("Found coverage file {}", unmangled_form); if (!found_file.empty()) { - log("Found two coverage files, cannot continue"); + LOG_RAW("Found two coverage files, cannot continue"); return {}; } found_file = unmangled_form; } if (found_file.empty()) { - log("No coverage file found (tried {} and {}), cannot continue", + LOG("No coverage file found (tried {} and {}), cannot continue", unmangled_form, mangled_form); return {}; @@ -946,12 +946,12 @@ to_cache(Context& ctx, // non-existent .dwo files. if (unlink(ctx.args_info.output_dwo.c_str()) != 0 && errno != ENOENT && errno != ESTALE) { - log("Failed to unlink {}: {}", ctx.args_info.output_dwo, strerror(errno)); + LOG("Failed to unlink {}: {}", ctx.args_info.output_dwo, strerror(errno)); throw Failure(Statistic::bad_output_file); } } - log("Running real compiler"); + LOG_RAW("Running real compiler"); MTR_BEGIN("execute", "compiler"); TemporaryFile tmp_stdout( @@ -992,7 +992,7 @@ to_cache(Context& ctx, // distcc-pump outputs lines like this: // __________Using # distcc servers in pump mode if (st.size() != 0 && ctx.guessed_compiler != GuessedCompiler::pump) { - log("Compiler produced stdout"); + LOG_RAW("Compiler produced stdout"); throw Failure(Statistic::compiler_produced_stdout); } @@ -1005,7 +1005,7 @@ to_cache(Context& ctx, } if (status != 0) { - log("Compiler gave exit status {}", status); + LOG("Compiler gave exit status {}", status); // We can output stderr immediately instead of rerunning the compiler. Util::send_to_stderr(ctx, Util::read_file(tmp_stderr_path)); @@ -1031,11 +1031,11 @@ to_cache(Context& ctx, const auto obj_stat = Stat::stat(ctx.args_info.output_obj); if (!obj_stat) { - log("Compiler didn't produce an object file"); + LOG_RAW("Compiler didn't produce an object file"); throw Failure(Statistic::compiler_produced_no_output); } if (obj_stat.size() == 0) { - log("Compiler produced an empty object file"); + LOG_RAW("Compiler produced an empty object file"); throw Failure(Statistic::compiler_produced_empty_output); } @@ -1081,9 +1081,9 @@ to_cache(Context& ctx, auto error = result_writer.finalize(); if (error) { - log("Error: {}", *error); + LOG("Error: {}", *error); } else { - log("Stored in cache: {}", result_file.path); + LOG("Stored in cache: {}", result_file.path); } auto new_result_stat = Stat::stat(result_file.path, Stat::OnError::log); @@ -1148,7 +1148,7 @@ get_result_name_from_cpp(Context& ctx, Args& args, Hash& hash) } args.push_back(ctx.args_info.input_file); add_prefix(ctx, args, ctx.config.prefix_command_cpp()); - log("Running preprocessor"); + LOG_RAW("Running preprocessor"); MTR_BEGIN("execute", "preprocessor"); status = do_execute(ctx, args, std::move(tmp_stdout), std::move(tmp_stderr)); @@ -1157,7 +1157,7 @@ get_result_name_from_cpp(Context& ctx, Args& args, Hash& hash) } if (status != 0) { - log("Preprocessor gave exit status {}", status); + LOG("Preprocessor gave exit status {}", status); throw Failure(Statistic::preprocessor_error); } @@ -1170,7 +1170,7 @@ get_result_name_from_cpp(Context& ctx, Args& args, Hash& hash) hash.hash_delimiter("cppstderr"); if (!ctx.args_info.direct_i_file && !hash.hash_file(stderr_path)) { // Somebody removed the temporary file? - log("Failed to open {}: {}", stderr_path, strerror(errno)); + LOG("Failed to open {}: {}", stderr_path, strerror(errno)); throw Failure(Statistic::internal_error); } @@ -1220,7 +1220,7 @@ hash_compiler(const Context& ctx, } else { // command string if (!hash_multicommand_output( hash, ctx.config.compiler_check(), ctx.orig_args[0])) { - log("Failure running compiler check command: {}", + LOG("Failure running compiler check command: {}", ctx.config.compiler_check()); throw Failure(Statistic::compiler_check_failed); } @@ -1355,7 +1355,7 @@ hash_common_info(const Context& ctx, if (sep_pos != std::string::npos) { std::string old_path = map.substr(0, sep_pos); std::string new_path = map.substr(sep_pos + 1); - log("Relocating debuginfo from {} to {} (CWD: {})", + LOG("Relocating debuginfo from {} to {} (CWD: {})", old_path, new_path, ctx.apparent_cwd); @@ -1364,7 +1364,7 @@ hash_common_info(const Context& ctx, } } } - log("Hashing CWD {}", dir_to_hash); + LOG("Hashing CWD {}", dir_to_hash); hash.hash_delimiter("cwd"); hash.hash(dir_to_hash); } @@ -1395,14 +1395,14 @@ hash_common_info(const Context& ctx, string_view stem = Util::remove_extension(Util::base_name(ctx.args_info.output_obj)); std::string gcda_path = fmt::format("{}/{}.gcda", dir, stem); - log("Hashing coverage path {}", gcda_path); + LOG("Hashing coverage path {}", gcda_path); hash.hash_delimiter("gcda"); hash.hash(gcda_path); } // Possibly hash the sanitize blacklist file path. for (const auto& sanitize_blacklist : args_info.sanitize_blacklists) { - log("Hashing sanitize blacklist {}", sanitize_blacklist); + LOG("Hashing sanitize blacklist {}", sanitize_blacklist); hash.hash("sanitizeblacklist"); if (!hash_binary_file(ctx, hash, sanitize_blacklist)) { throw Failure(Statistic::error_hashing_extra_file); @@ -1412,7 +1412,7 @@ hash_common_info(const Context& ctx, if (!ctx.config.extra_files_to_hash().empty()) { for (const std::string& path : Util::split_into_strings( ctx.config.extra_files_to_hash(), PATH_DELIM)) { - log("Hashing extra file {}", path); + LOG("Hashing extra file {}", path); hash.hash_delimiter("extrafile"); if (!hash_binary_file(ctx, hash, path)) { throw Failure(Statistic::error_hashing_extra_file); @@ -1453,10 +1453,10 @@ hash_profile_data_file(const Context& ctx, Hash& hash) bool found = false; for (const std::string& p : paths_to_try) { - log("Checking for profile data file {}", p); + LOG("Checking for profile data file {}", p); auto st = Stat::stat(p); if (st && !st.is_directory()) { - log("Adding profile data {} to the hash", p); + LOG("Adding profile data {} to the hash", p); hash.hash_delimiter("-fprofile-use"); if (hash_binary_file(ctx, hash, p)) { found = true; @@ -1509,10 +1509,10 @@ calculate_result_name(Context& ctx, for (size_t i = 1; i < args.size(); i++) { // Trust the user if they've said we should not hash a given option. if (option_should_be_ignored(args[i], ctx.ignore_options())) { - log("Not hashing ignored option: {}", args[i]); + LOG("Not hashing ignored option: {}", args[i]); if (i + 1 < args.size() && compopt_takes_arg(args[i])) { i++; - log("Not hashing argument of ignored option: {}", args[i]); + LOG("Not hashing argument of ignored option: {}", args[i]); } continue; } @@ -1678,13 +1678,13 @@ calculate_result_name(Context& ctx, if (ctx.args_info.profile_generate) { ASSERT(!ctx.args_info.profile_path.empty()); - log("Adding profile directory {} to our hash", ctx.args_info.profile_path); + LOG("Adding profile directory {} to our hash", ctx.args_info.profile_path); hash.hash_delimiter("-fprofile-dir"); hash.hash(ctx.args_info.profile_path); } if (ctx.args_info.profile_use && !hash_profile_data_file(ctx, hash)) { - log("No profile data file found"); + LOG_RAW("No profile data file found"); throw Failure(Statistic::no_input_file); } @@ -1731,7 +1731,7 @@ calculate_result_name(Context& ctx, throw Failure(Statistic::internal_error); } if (result & HASH_SOURCE_CODE_FOUND_TIME) { - log("Disabling direct mode"); + LOG_RAW("Disabling direct mode"); ctx.config.set_direct_mode(false); return nullopt; } @@ -1744,28 +1744,28 @@ calculate_result_name(Context& ctx, ctx.set_manifest_path(manifest_file.path); if (manifest_file.stat) { - log("Looking for result name in {}", manifest_file.path); + LOG("Looking for result name in {}", manifest_file.path); MTR_BEGIN("manifest", "manifest_get"); result_name = Manifest::get(ctx, manifest_file.path); MTR_END("manifest", "manifest_get"); if (result_name) { - log("Got result name from manifest"); + LOG_RAW("Got result name from manifest"); } else { - log("Did not find result name in manifest"); + LOG_RAW("Did not find result name in manifest"); } } else { - log("No manifest with name {} in the cache", manifest_name.to_string()); + LOG("No manifest with name {} in the cache", manifest_name.to_string()); } } else { if (ctx.args_info.arch_args.empty()) { result_name = get_result_name_from_cpp(ctx, preprocessor_args, hash); - log("Got result name from preprocessor"); + LOG_RAW("Got result name from preprocessor"); } else { preprocessor_args.push_back("-arch"); for (size_t i = 0; i < ctx.args_info.arch_args.size(); ++i) { preprocessor_args.push_back(ctx.args_info.arch_args[i]); result_name = get_result_name_from_cpp(ctx, preprocessor_args, hash); - log("Got result name from preprocessor with -arch {}", + LOG("Got result name from preprocessor with -arch {}", ctx.args_info.arch_args[i]); if (i != ctx.args_info.arch_args.size() - 1) { result_name = nullopt; @@ -1803,7 +1803,7 @@ from_cache(Context& ctx, FromCacheCallMode mode) || ctx.guessed_compiler == GuessedCompiler::unknown) && ctx.args_info.output_is_precompiled_header && !ctx.args_info.fno_pch_timestamp && mode == FromCacheCallMode::cpp) { - log("Not considering cached precompiled header in preprocessor mode"); + LOG_RAW("Not considering cached precompiled header in preprocessor mode"); return nullopt; } @@ -1813,7 +1813,7 @@ from_cache(Context& ctx, FromCacheCallMode mode) const auto result_file = look_up_cache_file( ctx.config.cache_dir(), *ctx.result_name(), Result::k_file_suffix); if (!result_file.stat) { - log("No result with name {} in the cache", ctx.result_name()->to_string()); + LOG("No result with name {} in the cache", ctx.result_name()->to_string()); return nullopt; } ctx.set_result_path(result_file.path); @@ -1824,14 +1824,14 @@ from_cache(Context& ctx, FromCacheCallMode mode) auto error = result_reader.read(result_retriever); MTR_END("cache", "from_cache"); if (error) { - log("Failed to get result from cache: {}", *error); + LOG("Failed to get result from cache: {}", *error); return nullopt; } // Update modification timestamp to save file from LRU cleanup. Util::update_mtime(*ctx.result_path()); - log("Succeeded getting cached result"); + LOG_RAW("Succeeded getting cached result"); return mode == FromCacheCallMode::direct ? Statistic::direct_cache_hit : Statistic::preprocessed_cache_hit; @@ -2007,14 +2007,14 @@ initialize(Context& ctx, int argc, const char* const* argv) ctx.original_umask = umask(ctx.config.umask()); } - log("=== CCACHE {} STARTED =========================================", + LOG("=== CCACHE {} STARTED =========================================", CCACHE_VERSION); if (getenv("CCACHE_INTERNAL_TRACE")) { #ifdef MTR_ENABLED ctx.mini_trace = std::make_unique(ctx.args_info); #else - log("Error: tracing is not enabled!"); + LOG_RAW("Error: tracing is not enabled!"); #endif } } @@ -2027,7 +2027,7 @@ set_up_uncached_err() int uncached_fd = dup(STDERR_FILENO); // The file descriptor is intentionally leaked. if (uncached_fd == -1) { - log("dup(2) failed: {}", strerror(errno)); + LOG("dup(2) failed: {}", strerror(errno)); throw Failure(Statistic::internal_error); } @@ -2039,7 +2039,7 @@ configuration_logger(const std::string& key, const std::string& value, const std::string& origin) { - Logging::bulk_log("Config: ({}) {} = {}", origin, key, value); + BULK_LOG("Config: ({}) {} = {}", origin, key, value); } static void @@ -2108,7 +2108,7 @@ update_stats_and_maybe_move_cache_file(const Context& ctx, ctx.config.cache_dir(), wanted_level, name.to_string() + file_suffix); if (current_path != wanted_path) { Util::ensure_dir_exists(Util::dir_name(wanted_path)); - log("Moving {} to {}", current_path, wanted_path); + LOG("Moving {} to {}", current_path, wanted_path); try { Util::rename(current_path, wanted_path); } catch (const Error&) { @@ -2127,14 +2127,14 @@ finalize_stats_and_trigger_cleanup(Context& ctx) if (config.disable()) { // Just log result, don't update statistics. - log("Result: disabled"); + LOG_RAW("Result: disabled"); return; } if (!config.log_file().empty() || config.debug()) { const auto result = Statistics::get_result(ctx.counter_updates); if (result) { - log("Result: {}", *result); + LOG("Result: {}", *result); } } @@ -2180,7 +2180,7 @@ finalize_stats_and_trigger_cleanup(Context& ctx) if (config.max_files() != 0 && counters->get(Statistic::files_in_cache) > config.max_files() / 16) { - log("Need to clean up {} since it holds {} files (limit: {} files)", + LOG("Need to clean up {} since it holds {} files (limit: {} files)", subdir, counters->get(Statistic::files_in_cache), config.max_files() / 16); @@ -2189,7 +2189,7 @@ finalize_stats_and_trigger_cleanup(Context& ctx) if (config.max_size() != 0 && counters->get(Statistic::cache_size_kibibyte) > config.max_size() / 1024 / 16) { - log("Need to clean up {} since it holds {} KiB (limit: {} KiB)", + LOG("Need to clean up {} since it holds {} KiB (limit: {} KiB)", subdir, counters->get(Statistic::cache_size_kibibyte), config.max_size() / 1024 / 16); @@ -2213,7 +2213,7 @@ finalize_at_exit(Context& ctx) finalize_stats_and_trigger_cleanup(ctx); } catch (const ErrorBase& e) { // finalize_at_exit must not throw since it's called by a destructor. - log("Error while finalizing stats: {}", e.what()); + LOG("Error while finalizing stats: {}", e.what()); } // Dump log buffer last to not lose any logs. @@ -2266,11 +2266,11 @@ cache_compilation(int argc, const char* const* argv) ctx.orig_args.erase_with_prefix("--ccache-"); add_prefix(ctx, ctx.orig_args, ctx.config.prefix_command()); - log("Failed; falling back to running the real compiler"); + LOG_RAW("Failed; falling back to running the real compiler"); saved_orig_args = std::move(ctx.orig_args); auto execv_argv = saved_orig_args.to_argv(); - log("Executing {}", Util::format_argv_for_logging(execv_argv.data())); + LOG("Executing {}", Util::format_argv_for_logging(execv_argv.data())); // Run execv below after ctx and finalizer have been destructed. } } @@ -2288,7 +2288,7 @@ static Statistic do_cache_compilation(Context& ctx, const char* const* argv) { if (ctx.actual_cwd.empty()) { - log("Unable to determine current working directory: {}", strerror(errno)); + LOG("Unable to determine current working directory: {}", strerror(errno)); throw Failure(Statistic::internal_error); } @@ -2303,7 +2303,7 @@ do_cache_compilation(Context& ctx, const char* const* argv) } if (ctx.config.disable()) { - log("ccache is disabled"); + LOG_RAW("ccache is disabled"); // Statistic::cache_miss is a dummy to trigger stats_flush. throw Failure(Statistic::cache_miss); } @@ -2312,11 +2312,11 @@ do_cache_compilation(Context& ctx, const char* const* argv) set_up_uncached_err(); MTR_END("main", "set_up_uncached_err"); - log("Command line: {}", Util::format_argv_for_logging(argv)); - log("Hostname: {}", Util::get_hostname()); - log("Working directory: {}", ctx.actual_cwd); + LOG("Command line: {}", Util::format_argv_for_logging(argv)); + LOG("Hostname: {}", Util::get_hostname()); + LOG("Working directory: {}", ctx.actual_cwd); if (ctx.apparent_cwd != ctx.actual_cwd) { - log("Apparent working directory: {}", ctx.apparent_cwd); + LOG("Apparent working directory: {}", ctx.apparent_cwd); } ctx.config.set_limit_multiple( @@ -2338,28 +2338,28 @@ do_cache_compilation(Context& ctx, const char* const* argv) && (!ctx.args_info.generating_dependencies || ctx.args_info.output_dep == "/dev/null" || !ctx.config.run_second_cpp())) { - log("Disabling depend mode"); + LOG_RAW("Disabling depend mode"); ctx.config.set_depend_mode(false); } - log("Source file: {}", ctx.args_info.input_file); + LOG("Source file: {}", ctx.args_info.input_file); if (ctx.args_info.generating_dependencies) { - log("Dependency file: {}", ctx.args_info.output_dep); + LOG("Dependency file: {}", ctx.args_info.output_dep); } if (ctx.args_info.generating_coverage) { - log("Coverage file is being generated"); + LOG_RAW("Coverage file is being generated"); } if (ctx.args_info.generating_stackusage) { - log("Stack usage file: {}", ctx.args_info.output_su); + LOG("Stack usage file: {}", ctx.args_info.output_su); } if (ctx.args_info.generating_diagnostics) { - log("Diagnostics file: {}", ctx.args_info.output_dia); + LOG("Diagnostics file: {}", ctx.args_info.output_dia); } if (!ctx.args_info.output_dwo.empty()) { - log("Split dwarf file: {}", ctx.args_info.output_dwo); + LOG("Split dwarf file: {}", ctx.args_info.output_dwo); } - log("Object file: {}", ctx.args_info.output_obj); + LOG("Object file: {}", ctx.args_info.output_obj); MTR_META_THREAD_NAME(ctx.args_info.output_obj.c_str()); if (ctx.config.debug()) { @@ -2369,7 +2369,7 @@ do_cache_compilation(Context& ctx, const char* const* argv) if (debug_text_file) { ctx.hash_debug_files.push_back(std::move(debug_text_file)); } else { - log("Failed to open {}: {}", path, strerror(errno)); + LOG("Failed to open {}: {}", path, strerror(errno)); } } @@ -2402,7 +2402,7 @@ do_cache_compilation(Context& ctx, const char* const* argv) optional result_name; optional result_name_from_manifest; if (ctx.config.direct_mode()) { - log("Trying direct lookup"); + LOG_RAW("Trying direct lookup"); MTR_BEGIN("hash", "direct_hash"); Args dummy_args; result_name = @@ -2429,7 +2429,7 @@ do_cache_compilation(Context& ctx, const char* const* argv) } if (ctx.config.read_only_direct()) { - log("Read-only direct mode; running real compiler"); + LOG_RAW("Read-only direct mode; running real compiler"); throw Failure(Statistic::cache_miss); } @@ -2472,9 +2472,9 @@ do_cache_compilation(Context& ctx, const char* const* argv) // The best thing here would probably be to remove the hash entry from // the manifest. For now, we use a simpler method: just remove the // manifest file. - log("Hash from manifest doesn't match preprocessor output"); - log("Likely reason: different CCACHE_BASEDIRs used"); - log("Removing manifest as a safety measure"); + LOG_RAW("Hash from manifest doesn't match preprocessor output"); + LOG_RAW("Likely reason: different CCACHE_BASEDIRs used"); + LOG_RAW("Removing manifest as a safety measure"); Util::unlink_safe(*ctx.manifest_path()); put_result_in_manifest = true; @@ -2491,7 +2491,7 @@ do_cache_compilation(Context& ctx, const char* const* argv) } if (ctx.config.read_only()) { - log("Read-only mode; running real compiler"); + LOG_RAW("Read-only mode; running real compiler"); throw Failure(Statistic::cache_miss); } diff --git a/src/cleanup.cpp b/src/cleanup.cpp index 3754c327a..6d91d777d 100644 --- a/src/cleanup.cpp +++ b/src/cleanup.cpp @@ -31,8 +31,6 @@ #include -using Logging::log; - static void delete_file(const std::string& path, uint64_t size, @@ -41,7 +39,7 @@ delete_file(const std::string& path, { bool deleted = Util::unlink_safe(path, Util::UnlinkLog::ignore_failure); if (!deleted && errno != ENOENT && errno != ESTALE) { - log("Failed to unlink {} ({})", path, strerror(errno)); + LOG("Failed to unlink {} ({})", path, strerror(errno)); } else if (cache_size && files_in_cache) { // The counters are intentionally subtracted even if there was no file to // delete since the final cache size calculation will be incorrect if they @@ -90,7 +88,7 @@ clean_up_dir(const std::string& subdir, uint64_t max_age, const Util::ProgressReceiver& progress_receiver) { - log("Cleaning up cache directory {}", subdir); + LOG("Cleaning up cache directory {}", subdir); std::vector> files; Util::get_level_1_files( @@ -128,7 +126,7 @@ clean_up_dir(const std::string& subdir, return f1->lstat().mtime() < f2->lstat().mtime(); }); - log("Before cleanup: {:.0f} KiB, {:.0f} files", + LOG("Before cleanup: {:.0f} KiB, {:.0f} files", static_cast(cache_size) / 1024, static_cast(files_in_cache)); @@ -173,12 +171,12 @@ clean_up_dir(const std::string& subdir, cleaned = true; } - log("After cleanup: {:.0f} KiB, {:.0f} files", + LOG("After cleanup: {:.0f} KiB, {:.0f} files", static_cast(cache_size) / 1024, static_cast(files_in_cache)); if (cleaned) { - log("Cleaned up cache directory {}", subdir); + LOG("Cleaned up cache directory {}", subdir); } update_counters(subdir, files_in_cache, cache_size, cleaned); @@ -207,7 +205,7 @@ static void wipe_dir(const std::string& subdir, const Util::ProgressReceiver& progress_receiver) { - log("Clearing out cache directory {}", subdir); + LOG("Clearing out cache directory {}", subdir); std::vector> files; Util::get_level_1_files( @@ -220,7 +218,7 @@ wipe_dir(const std::string& subdir, const bool cleared = !files.empty(); if (cleared) { - log("Cleared out cache directory {}", subdir); + LOG("Cleared out cache directory {}", subdir); } update_counters(subdir, 0, 0, cleared); } diff --git a/src/compress.cpp b/src/compress.cpp index 1e0982fb4..36a490984 100644 --- a/src/compress.cpp +++ b/src/compress.cpp @@ -37,7 +37,6 @@ #include #include -using Logging::log; using nonstd::optional; namespace { @@ -170,7 +169,7 @@ recompress_file(RecompressionStatistics& statistics, return; } - log("Recompressing {} to {}", + LOG("Recompressing {} to {}", cache_file.path(), level ? fmt::format("level {}", wanted_level) : "uncompressed"); AtomicFile atomic_new_file(cache_file.path(), AtomicFile::Mode::binary); @@ -203,7 +202,7 @@ recompress_file(RecompressionStatistics& statistics, statistics.update(content_size, old_stat.size(), new_stat.size(), 0); - log("Recompression of {} done", cache_file.path()); + LOG("Recompression of {} done", cache_file.path()); } } // namespace diff --git a/src/execute.cpp b/src/execute.cpp index 211d4ba53..32164929a 100644 --- a/src/execute.cpp +++ b/src/execute.cpp @@ -32,7 +32,6 @@ # include "Win32Util.hpp" #endif -using Logging::log; using nonstd::string_view; #ifdef _WIN32 @@ -133,7 +132,7 @@ win32execute(const char* path, } if (ret == 0) { DWORD error = GetLastError(); - log("failed to execute {}: {} ({})", + LOG("failed to execute {}: {} ({})", full_path, Win32Util::error_message(error), error); @@ -158,7 +157,7 @@ win32execute(const char* path, int execute(const char* const* argv, Fd&& fd_out, Fd&& fd_err, pid_t* pid) { - log("Executing {}", Util::format_argv_for_logging(argv)); + LOG("Executing {}", Util::format_argv_for_logging(argv)); { SignalHandlerBlocker signal_handler_blocker; @@ -218,7 +217,7 @@ find_executable(const Context& ctx, path = getenv("PATH"); } if (path.empty()) { - log("No PATH variable"); + LOG_RAW("No PATH variable"); return {}; } diff --git a/src/hashutil.cpp b/src/hashutil.cpp index 1aa4b90ea..db5ad3723 100644 --- a/src/hashutil.cpp +++ b/src/hashutil.cpp @@ -42,7 +42,6 @@ # include #endif -using Logging::log; using nonstd::string_view; namespace { @@ -233,7 +232,7 @@ hash_source_code_string(const Context& ctx, hash.hash(str); if (result & HASH_SOURCE_CODE_FOUND_DATE) { - log("Found __DATE__ in {}", path); + LOG("Found __DATE__ in {}", path); // Make sure that the hash sum changes if the (potential) expansion of // __DATE__ changes. @@ -252,10 +251,10 @@ hash_source_code_string(const Context& ctx, // not very useful since the chance that we get a cache hit later the same // second should be quite slim... So, just signal back to the caller that // __TIME__ has been found so that the direct mode can be disabled. - log("Found __TIME__ in {}", path); + LOG("Found __TIME__ in {}", path); } if (result & HASH_SOURCE_CODE_FOUND_TIMESTAMP) { - log("Found __TIMESTAMP__ in {}", path); + LOG("Found __TIMESTAMP__ in {}", path); // Make sure that the hash sum changes if the (potential) expansion of // __TIMESTAMP__ changes. @@ -384,7 +383,7 @@ hash_command_output(Hash& hash, } auto argv = args.to_argv(); - log("Executing compiler check command {}", + LOG("Executing compiler check command {}", Util::format_argv_for_logging(argv.data())); #ifdef _WIN32 @@ -436,7 +435,7 @@ hash_command_output(Hash& hash, int fd = _open_osfhandle((intptr_t)pipe_out[0], O_BINARY); bool ok = hash.hash_fd(fd); if (!ok) { - log("Error hashing compiler check command output: {}", strerror(errno)); + LOG("Error hashing compiler check command output: {}", strerror(errno)); } WaitForSingleObject(pi.hProcess, INFINITE); DWORD exitcode; @@ -445,7 +444,7 @@ hash_command_output(Hash& hash, CloseHandle(pi.hProcess); CloseHandle(pi.hThread); if (exitcode != 0) { - log("Compiler check command returned {}", exitcode); + LOG("Compiler check command returned {}", exitcode); return false; } return ok; @@ -473,7 +472,7 @@ hash_command_output(Hash& hash, close(pipefd[1]); bool ok = hash.hash_fd(pipefd[0]); if (!ok) { - log("Error hashing compiler check command output: {}", strerror(errno)); + LOG("Error hashing compiler check command output: {}", strerror(errno)); } close(pipefd[0]); @@ -483,11 +482,11 @@ hash_command_output(Hash& hash, if (result == -1 && errno == EINTR) { continue; } - log("waitpid failed: {}", strerror(errno)); + LOG("waitpid failed: {}", strerror(errno)); return false; } if (!WIFEXITED(status) || WEXITSTATUS(status) != 0) { - log("Compiler check command returned {}", WEXITSTATUS(status)); + LOG("Compiler check command returned {}", WEXITSTATUS(status)); return false; } return ok; diff --git a/unittest/.clang-tidy b/unittest/.clang-tidy index 79cbdb273..d311fb572 100644 --- a/unittest/.clang-tidy +++ b/unittest/.clang-tidy @@ -17,8 +17,6 @@ CheckOptions: value: 999999 - key: readability-function-size.ParameterThreshold value: 7 - - key: readability-function-size.NestingThreshold - value: 6 - key: readability-function-size.NestingThreshold value: 999999 - key: readability-function-size.VariableThreshold