Skip to content

Commit 94cb95a

Browse files
committed
Enhance scraper log archiving
This commit implements putting the log archive files in the logarchive subdirectory to prevent clutter. It also implements deleting old log files per the following configuration file (or command line) parameters: deleteoldlogarchives=0|1 (default true) logarchiveretainnumfiles=x (default 14) deleteoldlogarchives defaults to true, which then means that every time the log archiver is triggered (typically at midnight UTC) it will archive the current scraper log file into the logarchive directory, and then apply the rule to retain the x most recent files, where x is specified by logarchiveretainnumfiles. An immediate archive (and directory cleanup) can be triggered by issuing the rpc command archivescraperlog. If deletelogarchives is set to false, then the scraper.log files are archived into the logarchive directory, but never deleted.
1 parent ad8bfc6 commit 94cb95a

File tree

1 file changed

+53
-7
lines changed

1 file changed

+53
-7
lines changed

src/scraper/scraper.cpp

Lines changed: 53 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,23 @@ class logger
208208

209209
if (fDebug) LogPrintf("INFO: Scraper: Logger: ArchiveCheckDate %s, PrevArchiveCheckDate %s", ssArchiveCheckDate.str(), ssPrevArchiveCheckDate.str());
210210

211+
fs::path LogArchiveDir = pathDataDir / "logarchive";
212+
213+
// Check to see if the log archive directory exists and is a directory. If not create it.
214+
if (fs::exists(LogArchiveDir))
215+
{
216+
// If it is a normal file, this is not right. Remove the file and replace with the log archive directory.
217+
if (fs::is_regular_file(LogArchiveDir))
218+
{
219+
fs::remove(LogArchiveDir);
220+
fs::create_directory(LogArchiveDir);
221+
}
222+
}
223+
else
224+
{
225+
fs::create_directory(LogArchiveDir);
226+
}
227+
211228
if (fImmediate || (fArchiveDaily && ArchiveCheckDate > PrevArchiveCheckDate))
212229
{
213230
{
@@ -221,7 +238,7 @@ class logger
221238

222239
plogfile = pathDataDir / "scraper.log";
223240
pfile_temp = pathDataDir / ("scraper-" + DateTimeStrFormat("%Y%m%d%H%M%S", nTime) + ".log");
224-
pfile_out = pathDataDir / ("scraper-" + DateTimeStrFormat("%Y%m%d%H%M%S", nTime) + ".log.gz");
241+
pfile_out = LogArchiveDir / ("scraper-" + DateTimeStrFormat("%Y%m%d%H%M%S", nTime) + ".log.gz");
225242

226243
try
227244
{
@@ -264,6 +281,40 @@ class logger
264281

265282
fs::remove(pfile_temp);
266283

284+
bool fDeleteOldLogArchives = GetBoolArg("-deleteoldlogarchives", true);
285+
286+
if (fDeleteOldLogArchives)
287+
{
288+
unsigned int nRetention = (unsigned int)GetArg("-logarchiveretainnumfiles", 14);
289+
290+
std::set<fs::directory_entry, std::greater <fs::directory_entry>> SortedDirEntries;
291+
292+
// Iterate through the log archive directory and delete the oldest files beyond the retention rule
293+
// The names are in format scraper-YYYYMMDDHHMMSS for the scraper logs, so filter by containing scraper
294+
// The greater than sort in the set should then return descending order by datetime.
295+
for (fs::directory_entry& DirEntry : fs::directory_iterator(LogArchiveDir))
296+
{
297+
std::string sFilename = DirEntry.path().filename().string();
298+
size_t FoundPos = sFilename.find("scraper");
299+
300+
if (FoundPos != string::npos) SortedDirEntries.insert(DirEntry);
301+
}
302+
303+
// Now iterate through set of filtered filenames. Delete all files greater than retention count.
304+
unsigned int i = 0;
305+
for (auto const& iter : SortedDirEntries)
306+
{
307+
if (i >= nRetention)
308+
{
309+
fs::remove(iter.path());
310+
311+
LogPrintf("INFO: logger: Removed old archive gzip file %s.", iter.path().filename().string());
312+
}
313+
314+
++i;
315+
}
316+
}
317+
267318
return true;
268319
}
269320
else
@@ -718,7 +769,7 @@ void Scraper(bool bSingleShot)
718769
fs::path plogfile_out;
719770

720771
if (log.archive(false, plogfile_out))
721-
_log(logattribute::INFO, "Scraper", "Archived scraper.log to " + plogfile_out.string());
772+
_log(logattribute::INFO, "Scraper", "Archived scraper.log to " + plogfile_out.filename().string());
722773

723774

724775
sbage = SuperblockAge();
@@ -4184,11 +4235,6 @@ UniValue archivescraperlog(const UniValue& params, bool fHelp)
41844235
fs::path pfile_out;
41854236
bool ret = log.archive(true, pfile_out);
41864237

4187-
if(!ret)
4188-
return UniValue(ret);
4189-
else
4190-
return UniValue(pfile_out.c_str());
4191-
41924238
return UniValue(ret);
41934239
}
41944240

0 commit comments

Comments
 (0)