Hacked By AnonymousFox
import getpass
import glob
import gzip
import logging
import logging.config
import logging.handlers
import os
import shutil
import sys
import time
import traceback
import uuid
from contextlib import contextmanager, suppress
from functools import lru_cache
import sentry_sdk
import yaml
from defence360agent.contracts import config, sentry
from defence360agent.contracts.config import AcronisBackup
from defence360agent.contracts.config import Logger as Config
from defence360agent.contracts.config import Sentry
from defence360agent.utils import antivirus_mode, is_root_user
PREFIX = os.environ.get("IMUNIFY360_LOGGING_PREFIX", "")
logger = logging.getLogger(__name__)
class RotatingCompressionFileHandler(logging.handlers.RotatingFileHandler):
def shouldRollover(self, record):
try:
return super().shouldRollover(record)
except Exception:
self.stream = self._open()
logger.exception("Failed on shouldRollover to rollover log file")
return False
def doRollover(self):
if self.stream:
self.stream.close()
if self.backupCount > 0:
try:
for i in range(self.backupCount - 1, 0, -1):
sfn = "%s.%d.gz" % (self.baseFilename, i)
dfn = "%s.%d.gz" % (self.baseFilename, i + 1)
if os.path.exists(sfn):
if os.path.exists(dfn):
os.remove(dfn)
os.rename(sfn, dfn)
dfn = self.baseFilename + ".1"
if os.path.exists(dfn):
os.remove(dfn)
shutil.copy2(self.baseFilename, dfn)
os.truncate(self.baseFilename, 0)
with open(dfn, "rb") as f_in, gzip.open(
"%s.gz" % dfn, "wb"
) as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(dfn)
except OSError:
logger.exception("Failed on doRollover to rollover log file")
self.mode = "w"
self.stream = self._open()
def _sentry_init(debug=False):
# if config invalid, we still need to be able to configure logging
try:
error_reporting = Sentry.ENABLE
except (KeyError, AssertionError):
error_reporting = True
if error_reporting:
sentry_sdk.init(
dsn=Sentry.DSN,
debug=debug,
release=config.Core.VERSION,
attach_stacktrace="on",
)
with sentry_sdk.configure_scope() as scope:
for tag, value in sentry.tags().items():
scope.set_tag(tag, value)
scope.user = {"id": sentry.tag("server_id")}
return {
"level": "ERROR",
"class": "sentry_sdk.integrations.logging.SentryHandler",
}
else:
return {
"level": "NOTSET",
"class": "logging.NullHandler",
}
class _LoggerDynConfig:
_ROOT_LOG_DIR = "/var/log/%s" % config.Core.PRODUCT
@staticmethod
def _user_log_dir():
return "/var/log/%s_user_logs/%s" % (
config.Core.PRODUCT,
getpass.getuser() or os.getuid(),
)
def __init__(self):
self.log_dir = (
self._ROOT_LOG_DIR if is_root_user() else self._user_log_dir()
)
self.mutableDictConfig = {
"loggers": {
"network": {
"level": "DEBUG",
# network_log is disabled by default'
"handlers": [],
},
"defence360agent.internals.the_sink": {
"level": "DEBUG",
# process_message_log is disabled by default'
"handlers": [],
},
"event_hook": {
"level": "INFO",
"handlers": [],
},
},
"version": 1,
"handlers": {
"sentry": _sentry_init(),
"error_log": {
"level": "WARNING",
"formatter": "abstimestamp",
"filename": "%s/error.log" % self.log_dir,
"class": (
"defence360agent.internals.logger."
"RotatingCompressionFileHandler"
),
"maxBytes": Config.MAX_LOG_FILE_SIZE,
"backupCount": Config.BACKUP_COUNT,
"encoding": "utf8",
},
"network_log": {
"level": "DEBUG",
"formatter": "abstimestamp",
"filename": "%s/network.log" % self.log_dir,
"class": (
"defence360agent.internals.logger."
"RotatingCompressionFileHandler"
),
"maxBytes": Config.MAX_LOG_FILE_SIZE,
"backupCount": Config.BACKUP_COUNT,
"encoding": "utf8",
},
"debug_log": {
"level": "DEBUG",
"formatter": "abstimestamp",
"filename": "%s/debug.log" % self.log_dir,
"class": (
"defence360agent.internals.logger."
"RotatingCompressionFileHandler"
),
"maxBytes": Config.MAX_LOG_FILE_SIZE,
"backupCount": Config.BACKUP_COUNT,
"encoding": "utf8",
},
"console_log": {
"level": "INFO",
"formatter": "abstimestamp",
"filename": "%s/console.log" % self.log_dir,
"class": (
"defence360agent.internals.logger."
"RotatingCompressionFileHandler"
),
"maxBytes": Config.MAX_LOG_FILE_SIZE,
"backupCount": Config.BACKUP_COUNT,
"encoding": "utf8",
},
"hook_log": {
"level": "INFO",
"formatter": "eventhook",
"filename": "%s/hook.log" % self.log_dir,
"class": (
"defence360agent.internals.logger."
"RotatingCompressionFileHandler"
),
"maxBytes": Config.MAX_LOG_FILE_SIZE,
"backupCount": Config.BACKUP_COUNT,
"encoding": "utf8",
},
"console": {
"formatter": "abstimestamp",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
"level": "INFO",
},
"process_message_log": {
"formatter": "reltimestamp",
"mode": "w",
"level": "DEBUG",
"filename": "%s/process_message.log" % self.log_dir,
"class": (
"defence360agent.internals.logger."
"RotatingCompressionFileHandler"
),
"maxBytes": Config.MAX_LOG_FILE_SIZE,
"backupCount": Config.BACKUP_COUNT,
"encoding": "utf8",
},
},
"root": {
"level": "NOTSET",
"handlers": [
"console_log",
# 'debug_log' is disabled by default,
"error_log",
"sentry",
],
},
"mkdir": "logs",
"formatters": {
"reltimestamp": {
"format": (
"%(levelname)-7s [+%(relativeCreated)5dms] "
f"{PREFIX}%(name)50s|%(message)s"
)
},
"abstimestamp": {
"format": (
f"%(levelname)-7s [%(asctime)s] {PREFIX}%(name)s:"
" %(message)s"
)
},
"eventhook": {"format": "%(created)d : %(message)s"},
},
"disable_existing_loggers": False,
}
self.mutableDictConfig["loggers"]["AcronisClientInstaller"] = {
"level": "INFO",
"handlers": [],
}
self.mutableDictConfig["handlers"]["acronis_installer_log"] = {
"formatter": "abstimestamp",
"mode": "w",
"level": "INFO",
"filename": os.path.join(self.log_dir, AcronisBackup.LOG_NAME),
"class": (
"defence360agent.internals.logger."
"RotatingCompressionFileHandler"
),
"maxBytes": Config.MAX_LOG_FILE_SIZE,
"backupCount": Config.BACKUP_COUNT,
"encoding": "utf8",
}
@lru_cache(1)
def _late_init():
return _LoggerDynConfig()
def _we_are_in_cagefs():
"""
:return bool: True if python interpreter is being run in CageFS container,
otherwise False
:raise: never
Current implementation simply checks "/var/.cagefs" presence, as
Anton Volkov consulted us to do.
Placing this function not in 'subsys' package, because 'logger' module
is one of cornerstones dependency for 'subsys' package as well.
"""
with suppress(OSError):
return os.path.exists("/var/.cagefs")
def _chmod_log_dirs(dirname, dir_perm, file_perm):
"""Change file/dir modes recursively.
Starting at dirname, change all inner directory permissions to dir_perm,
file permissions to file_perm
Permission errors are logged to stderr and are ignored in any case.
"""
def _os_chmod(file_dir_path, permission):
try:
os.chmod(file_dir_path, permission)
except PermissionError as e:
sys.stderr.write(
"[WARNING] cannot chmod on {}: {}".format(file_dir_path, e)
)
_os_chmod(dirname, dir_perm)
for path, dirs, files in os.walk(dirname):
for directory in dirs:
_os_chmod(os.path.join(path, directory), dir_perm)
for name in files:
_os_chmod(os.path.join(path, name), file_perm)
def reconfigure():
"""
Re-catch with _LoggerDynConfig and re-open log files
"""
if os.getenv("IMUNIFY360_DISABLE_LOGGING"):
pass
else:
try:
log_dir = _late_init().log_dir
os.makedirs(log_dir, Config.LOG_DIR_PERM, exist_ok=True)
_chmod_log_dirs(log_dir, Config.LOG_DIR_PERM, Config.LOG_FILE_PERM)
logging.config.dictConfig(_late_init().mutableDictConfig)
except OSError:
# We do not create user logs to keep user isolation
# level high.
#
# Another alternative is
# cagefs.mp:%/var/log/imunify360_user_log
# but it is not working for some reason, we need to find out
# later why.
if not _we_are_in_cagefs():
traceback.print_exc(file=sys.stderr)
sys.stderr.write(
"%s logger is not available.\n" % config.Core.PRODUCT
)
except Exception:
# be robust: do not die if dictConfig fails
traceback.print_exc(file=sys.stderr)
sys.stderr.write(
"%s logger is not available.\n" % config.Core.PRODUCT
)
else: # logging is configured successfully
sys.excepthook = _log_uncaught_exceptions
def _log_uncaught_exceptions(exc_type, exc_value, exc_traceback):
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return
logger.critical(
"uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
)
def update_logging_config_from_file(filename):
with open(filename) as config_file:
config = yaml.safe_load(config_file)
_late_init().mutableDictConfig.update(config)
reconfigure()
def get_fds():
handlers = logging.root.handlers
for _logger in _late_init().mutableDictConfig["loggers"].keys():
handlers.extend(logging.getLogger(_logger).handlers)
return [
h.stream
for h in handlers
if hasattr(h, "stream")
and hasattr(h.stream, "fileno")
and h.stream != sys.stderr
]
def get_log_file_names():
return [
values["filename"]
for _, values in _late_init().mutableDictConfig["handlers"].items()
if "filename" in values
]
def getNetworkLogger(name):
if name in sys.modules:
return logging.getLogger("network." + sys.modules[name].__name__)
else:
return logging.getLogger("network." + name)
def setLogLevel(verbose):
# FIXME
if antivirus_mode.disabled:
_late_init().mutableDictConfig["loggers"]["AcronisClientInstaller"][
"handlers"
].append("acronis_installer_log")
if verbose >= 2:
_late_init().mutableDictConfig["loggers"]["network"][
"handlers"
].append("network_log")
if verbose >= 3:
_late_init().mutableDictConfig["loggers"][
"defence360agent.internals.the_sink"
]["handlers"].append("process_message_log")
if verbose >= 4:
_late_init().mutableDictConfig["root"]["handlers"].append("debug_log")
_late_init().mutableDictConfig["loggers"]["event_hook"]["handlers"].append(
"hook_log"
)
reconfigure()
def setConsoleLogLevel(newloglevel):
"""
also results in reconfigure()
"""
_late_init().mutableDictConfig["handlers"]["console"][
"level"
] = newloglevel
reconfigure()
def _runMalwareScanLogGC():
MALWARESCANLOG_GLOB = "%s/%s" % (
_late_init().log_dir,
"malware_scan_*.log",
)
# keep only Config.BACKUP_COUNT number of last log
files_run_gc_for = sorted(glob.glob(MALWARESCANLOG_GLOB))[
: -Config.BACKUP_COUNT
]
for oldlogfile in files_run_gc_for:
with suppress(OSError):
os.unlink(oldlogfile)
@contextmanager
def openMalwareScanLog():
_runMalwareScanLogGC()
path = "%s/%s" % (
_late_init().log_dir,
time.strftime("malware_scan_%y%m%d%H%M.log"),
)
with open(path, "w") as f:
yield f
class EventHookLogger:
class _EventLogger:
class _HookLogger:
tpl = (
"{uuid:s} : {action:s} {native:s}: "
"{event:s} : {subtype:s} : {path:s}"
)
def __init__(self, parent, path, native):
self.path = path
self.event = parent.event
self.subtype = parent.subtype
self.uuid = parent.uuid
self.log = parent.log
self.native = native
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def _log(self, action, message=""):
data = {
"uuid": str(self.uuid),
"action": action,
"native": "native " if self.native else "",
"event": self.event,
"subtype": self.subtype,
"path": self.path,
}
msg = self.tpl.format(**data)
if message:
msg = " : ".join([msg, message])
self.log(msg)
def begin(self):
self._log("started")
def finish(self, exit_code, err):
message = "OK" if exit_code == 0 else "ERROR"
if exit_code:
message = ":".join([message, str(exit_code)])
if err:
if isinstance(err, bytes):
err = err.decode(errors="backslashreplace")
message = "\n".join([message, err])
self._log("done", message)
def __init__(self, parent, event, subtype):
self.event = event
self.subtype = subtype
self.uuid = uuid.uuid4()
self.log = parent.log
def __call__(self, path, native=False):
return self._HookLogger(self, path, native=native)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def __init__(self):
logger = logging.getLogger("event_hook")
self.log = logger.info
def __call__(self, event, subtype):
return self._EventLogger(self, event, subtype)
Hacked By AnonymousFox1.0, Coded By AnonymousFox