1import os
2import time
3from sys import stdout
4from zope.interface import implementer
5from twisted.python.log import startLoggingWithObserver, textFromEventDict, msg, err, ILogObserver  # NOQA
6from twisted.python.syslog import SyslogObserver
7from twisted.python.logfile import DailyLogFile
8
9
10class CarbonLogFile(DailyLogFile):
11  """Overridden to support logrotate.d"""
12  def __init__(self, *args, **kwargs):
13    DailyLogFile.__init__(self, *args, **kwargs)
14    # avoid circular dependencies
15    from carbon.conf import settings
16    self.enableRotation = settings.ENABLE_LOGROTATION
17
18  def _openFile(self):
19    """
20    Fix Umask Issue https://twistedmatrix.com/trac/ticket/7026
21    """
22    openMode = self.defaultMode or 0o777
23    self._file = os.fdopen(os.open(
24      self.path, os.O_CREAT | os.O_RDWR, openMode), 'rb+', 1)
25    self.closed = False
26    # Try our best to update permissions for files which already exist.
27    if self.defaultMode:
28      try:
29        os.chmod(self.path, self.defaultMode)
30      except OSError:
31        pass
32    # Seek is needed for uniformity of stream positioning
33    # for read and write between Linux and BSD systems due
34    # to differences in fopen() between operating systems.
35    self._file.seek(0, os.SEEK_END)
36    self.lastDate = self.toDate(os.stat(self.path)[8])
37
38  def shouldRotate(self):
39    if self.enableRotation:
40      return DailyLogFile.shouldRotate(self)
41    else:
42      return False
43
44  def write(self, data):
45    if not self.enableRotation:
46      if not os.path.exists(self.path):
47        self.reopen()
48      else:
49        path_stat = os.stat(self.path)
50        fd_stat = os.fstat(self._file.fileno())
51        if not (path_stat.st_ino == fd_stat.st_ino and path_stat.st_dev == fd_stat.st_dev):
52          self.reopen()
53    DailyLogFile.write(self, data)
54
55  # Backport from twisted >= 10
56  def reopen(self):
57    self.close()
58    self._openFile()
59
60
61@implementer(ILogObserver)
62class CarbonLogObserver(object):
63
64  def __init__(self):
65    self._raven_client = None
66
67  def raven_client(self):
68    if self._raven_client is not None:
69      return self._raven_client
70
71    # Import here to avoid dependency hell.
72    try:
73      import raven
74    except ImportError:
75      return None
76    from carbon.conf import settings
77
78    if settings.RAVEN_DSN is None:
79      return None
80    self._raven_client = raven.Client(dsn=settings.RAVEN_DSN)
81    return self._raven_client
82
83  def log_to_raven(self, event):
84    if not event.get('isError') or 'failure' not in event:
85      return
86    client = self.raven_client()
87    if client is None:
88      return
89    f = event['failure']
90    client.captureException(
91      (f.type, f.value, f.getTracebackObject())
92    )
93
94  def log_to_dir(self, logdir):
95    self.logdir = logdir
96    self.console_logfile = CarbonLogFile('console.log', logdir)
97    self.custom_logs = {}
98    self.observer = self.logdir_observer
99
100  def log_to_syslog(self, prefix):
101    observer = SyslogObserver(prefix).emit
102
103    def syslog_observer(event):
104      event["system"] = event.get("type", "console")
105      observer(event)
106    self.observer = syslog_observer
107
108  def __call__(self, event):
109    self.log_to_raven(event)
110    return self.observer(event)
111
112  @staticmethod
113  def stdout_observer(event):
114    stdout.write(formatEvent(event, includeType=True) + '\n')
115    stdout.flush()
116
117  def logdir_observer(self, event):
118    message = formatEvent(event)
119    log_type = event.get('type')
120
121    if log_type is not None and log_type not in self.custom_logs:
122      self.custom_logs[log_type] = CarbonLogFile(log_type + '.log', self.logdir)
123
124    logfile = self.custom_logs.get(log_type, self.console_logfile)
125    logfile.write(message + '\n')
126    logfile.flush()
127
128  # Default to stdout
129  observer = stdout_observer
130
131
132carbonLogObserver = CarbonLogObserver()
133
134
135def formatEvent(event, includeType=False):
136  event['isError'] = 'failure' in event
137  message = textFromEventDict(event)
138
139  if includeType:
140    typeTag = '[%s] ' % event.get('type', 'console')
141  else:
142    typeTag = ''
143
144  timestamp = time.strftime("%d/%m/%Y %H:%M:%S")
145  return "%s :: %s%s" % (timestamp, typeTag, message)
146
147
148logToDir = carbonLogObserver.log_to_dir
149
150logToSyslog = carbonLogObserver.log_to_syslog
151
152
153def logToStdout():
154  startLoggingWithObserver(carbonLogObserver)
155
156
157def cache(message, **context):
158  context['type'] = 'cache'
159  msg(message, **context)
160
161
162def clients(message, **context):
163  context['type'] = 'clients'
164  msg(message, **context)
165
166
167def creates(message, **context):
168  context['type'] = 'creates'
169  msg(message, **context)
170
171
172def updates(message, **context):
173  context['type'] = 'updates'
174  msg(message, **context)
175
176
177def listener(message, **context):
178  context['type'] = 'listener'
179  msg(message, **context)
180
181
182def relay(message, **context):
183  context['type'] = 'relay'
184  msg(message, **context)
185
186
187def aggregator(message, **context):
188  context['type'] = 'aggregator'
189  msg(message, **context)
190
191
192def query(message, **context):
193  context['type'] = 'query'
194  msg(message, **context)
195
196
197def debug(message, **context):
198  if debugEnabled:
199    msg(message, **context)
200
201
202debugEnabled = False
203
204
205def setDebugEnabled(enabled):
206  global debugEnabled
207  debugEnabled = enabled
208