diff options
-rw-r--r-- | Dockerfile | 11 | ||||
-rw-r--r-- | conf/nginx.conf | 5 | ||||
-rw-r--r-- | conf/supervisord.conf | 5 | ||||
-rw-r--r-- | deploy/settings.yml.example | 12 | ||||
-rw-r--r-- | omaha_server/crash/utils.py | 2 | ||||
-rw-r--r-- | omaha_server/omaha/limitation.py | 34 | ||||
-rw-r--r-- | omaha_server/omaha/tasks.py | 72 | ||||
-rw-r--r-- | omaha_server/omaha/tests/test_forms.py | 2 | ||||
-rw-r--r-- | omaha_server/omaha/tests/test_limitation.py | 63 | ||||
-rw-r--r-- | omaha_server/omaha/tests/test_s3.py | 5 | ||||
-rw-r--r-- | omaha_server/omaha/tests/test_tasks.py | 318 | ||||
-rw-r--r-- | omaha_server/omaha_server/settings_prod.py | 27 | ||||
-rw-r--r-- | omaha_server/omaha_server/settings_test.py | 1 | ||||
-rw-r--r-- | omaha_server/omaha_server/tests/test_utils.py | 16 | ||||
-rw-r--r-- | omaha_server/omaha_server/utils.py | 11 | ||||
-rw-r--r-- | pavement.py | 6 | ||||
-rw-r--r-- | requirements/base.txt | 3 |
17 files changed, 511 insertions, 82 deletions
@@ -17,7 +17,8 @@ RUN \ ./configure --prefix=/usr && \ make && \ make install && \ - mkdir /srv/omaha_s3 + mkdir /srv/omaha_s3 && \ + rm /usr/src/v1.78.tar.gz RUN mkdir -p $omaha/requirements @@ -39,5 +40,13 @@ RUN \ ln -s /srv/omaha/conf/nginx-app.conf /etc/nginx/sites-enabled/ && \ ln -s /srv/omaha/conf/supervisord.conf /etc/supervisor/conf.d/ +RUN \ + wget -O /tmp/splunkforwarder-6.3.1-f3e41e4b37b2-linux-2.6-amd64.deb 'http://www.splunk.com/bin/splunk/DownloadActivityServlet?architecture=x86_64&platform=linux&version=6.3.1&product=universalforwarder&filename=splunkforwarder-6.3.1-f3e41e4b37b2-linux-2.6-amd64.deb&wget=true' && \ + dpkg -i /tmp/splunkforwarder-6.3.1-f3e41e4b37b2-linux-2.6-amd64.deb && \ + /opt/splunkforwarder/bin/splunk start --accept-license&& \ + /opt/splunkforwarder/bin/splunk add forward-server splunk.viasat.omaha-server.com:9997 -auth admin:changeme && \ + /opt/splunkforwarder/bin/splunk add monitor /var/log/nginx -index main -sourcetype Nginx && \ + rm /tmp/splunkforwarder-6.3.1-f3e41e4b37b2-linux-2.6-amd64.deb + EXPOSE 80 CMD ["paver", "docker_run"] diff --git a/conf/nginx.conf b/conf/nginx.conf index 117bb9d..b1faf6d 100644 --- a/conf/nginx.conf +++ b/conf/nginx.conf @@ -9,7 +9,10 @@ events { } http { - + real_ip_header X-Forwarded-For; + set_real_ip_from 172.17.42.1; + set_real_ip_from 10.0.0.0/8; + real_ip_recursive on; ## # Basic Settings ## diff --git a/conf/supervisord.conf b/conf/supervisord.conf index f8915a7..117403a 100644 --- a/conf/supervisord.conf +++ b/conf/supervisord.conf @@ -27,3 +27,8 @@ killasgroup=true command=/usr/bin/s3fs %(ENV_AWS_STORAGE_BUCKET_NAME)s /srv/omaha_s3 -f -ouse_cache=/tmp -oiam_role=%(ENV_AWS_ROLE)s autostart=true autorestart=true + +[program:splunk-forwarder] +command=/opt/splunkforwarder/bin/splunk start +autostart=true +autorestart=true
\ No newline at end of file diff --git a/deploy/settings.yml.example b/deploy/settings.yml.example index c07a0e4..cf3fb94 100644 --- a/deploy/settings.yml.example +++ b/deploy/settings.yml.example @@ -43,7 +43,12 @@ app: DB_PUBLIC_USER: 'omaha_public' DB_PUBLIC_PASSWORD: 'omaha_public_password' AWS_ROLE: 'omaha-private' - + SPLUNK_LOGGING_LEVEL: 'INFO' + SPLUNK_HOST: 'splunk.example.com' + SPLUNK_PORT: 8089 + SPLUNK_USERNAME: 'admin' + SPLUNK_PASSWORD: 'password' + omaha-server-public: option_settings: 'aws:autoscaling:launchconfiguration': @@ -62,3 +67,8 @@ app: REDIS_HOST: 'redis.example.com' AWS_ROLE: 'omaha-public' DJANGO_SETTINGS_MODULE: 'omaha_server.settings_prod' + SPLUNK_LOGGING_LEVEL: 'INFO ' + SPLUNK_HOST: 'splunk.example.com' + SPLUNK_PORT: 8089 + SPLUNK_USERNAME: 'admin' + SPLUNK_PASSWORD: 'password'
\ No newline at end of file diff --git a/omaha_server/crash/utils.py b/omaha_server/crash/utils.py index 8d6664d..24e7483 100644 --- a/omaha_server/crash/utils.py +++ b/omaha_server/crash/utils.py @@ -32,7 +32,7 @@ from crash.settings import MINIDUMP_STACKWALK_PATH, SYMBOLS_PATH from crash.stacktrace_to_json import pipe_dump_to_json_dump -client = Client(getattr(settings, 'RAVEN_DSN_STACKTRACE', None)) +client = Client(getattr(settings, 'RAVEN_DSN_STACKTRACE', None), name=getattr(settings, 'HOST_NAME', None)) class FileNotFoundError(Exception): diff --git a/omaha_server/omaha/limitation.py b/omaha_server/omaha/limitation.py index 8341300..52ec0ee 100644 --- a/omaha_server/omaha/limitation.py +++ b/omaha_server/omaha/limitation.py @@ -22,8 +22,7 @@ from dynamic_preferences_registry import global_preferences_manager as gpm dsn = getattr(settings, 'RAVEN_CONFIG', None) if dsn: dsn = dsn['dsn'] -raven = Client(dsn, list_max_length=1000) - +raven = Client(dsn, name=getattr(settings, 'HOST_NAME', None)) @valuedispatch def bulk_delete(cls, qs): @@ -32,17 +31,16 @@ def bulk_delete(cls, qs): @bulk_delete.register(Crash) def _(cls, qs): - if settings.DEFAULT_FILE_STORAGE == 'storages.backends.s3boto.S3BotoStorage': + if settings.DEFAULT_FILE_STORAGE == 'omaha_server.s3utils.S3Storage': qs = s3_bulk_delete(qs, file_fields=['archive', 'upload_file_minidump'], s3_fields=['minidump_archive', 'minidump']) result = dict() result['count'] = qs.count() result['size'] = qs.get_size() - result['elements'] = list(qs.values_list('id', 'created', 'signature')) - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - signature_to_string = lambda x: 'Signature: %s' % x - result['elements'] = map(lambda x: (x[0], created_to_string(x[1]), signature_to_string(x[2])), result['elements']) + elements = list(qs.values_list('id', 'created', 'signature', 'userid', 'appid')) + result['elements'] = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p"), signature=x[2], + userid=x[3], appid=x[4]), elements) qs.delete() return result @@ -56,9 +54,8 @@ def _(cls, qs): result = dict() result['count'] = qs.count() result['size'] = qs.get_size() - result['elements'] = list(qs.values_list('id', 'created')) - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - result['elements'] = map(lambda x: (x[0], created_to_string(x[1])), result['elements']) + elements = list(qs.values_list('id', 'created')) + result['elements'] = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p")), elements) qs.delete() return result @@ -71,9 +68,8 @@ def _(cls, qs): result = dict() result['count'] = qs.count() result['size'] = qs.get_size() - result['elements'] = list(qs.values_list('id', 'created')) - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - result['elements'] = map(lambda x: (x[0], created_to_string(x[1])), result['elements']) + elements = list(qs.values_list('id', 'created')) + result['elements'] = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p")), elements) qs.delete() return result @@ -86,9 +82,8 @@ def _(cls, qs): result = dict() result['count'] = qs.count() result['size'] = qs.get_size() - result['elements'] = list(qs.values_list('id', 'created')) - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - result['elements'] = map(lambda x: (x[0], created_to_string(x[1])), result['elements']) + elements = list(qs.values_list('id', 'created')) + result['elements'] = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p")), elements) qs.delete() return result @@ -102,8 +97,8 @@ def _(cls, qs): result['count'] = qs.count() result['size'] = qs.get_size() result['elements'] = list(qs.values_list('id', 'created')) - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - result['elements'] = map(lambda x: (x[0], created_to_string(x[1])), result['elements']) + elements = list(qs.values_list('id', 'created')) + result['elements'] = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p")), elements) qs.delete() return result @@ -141,7 +136,7 @@ def delete_older_than(app, model_name, limit=None): def delete_duplicate_crashes(limit=None): - full_result = dict(count=0, size=0, signatures=dict(), elements=[]) + full_result = dict(count=0, size=0, elements=[]) if not limit: preference_key = '__'.join(['Crash', 'duplicate_number']) limit = gpm[preference_key] @@ -161,7 +156,6 @@ def delete_duplicate_crashes(limit=None): full_result['elements'] += result['elements'] dup_elements += result['elements'] dup_count -= bulk_size - full_result['signatures'].update({'%s' % (group['signature'],): dup_elements}) return full_result diff --git a/omaha_server/omaha/tasks.py b/omaha_server/omaha/tasks.py index a9dde82..95b0b8f 100644 --- a/omaha_server/omaha/tasks.py +++ b/omaha_server/omaha/tasks.py @@ -18,12 +18,16 @@ License for the specific language governing permissions and limitations under the License. """ import time +import logging +import uuid from omaha_server.celery import app +from omaha_server.utils import add_extra_to_log_message, get_splunk_url from omaha import statistics from omaha.parser import parse_request from omaha.limitation import delete_older_than, delete_size_is_exceeded, delete_duplicate_crashes, monitoring_size, raven +logger = logging.getLogger(__name__) @app.task(ignore_result=True) def collect_statistics(request, ip=None): @@ -32,6 +36,7 @@ def collect_statistics(request, ip=None): @app.task(name='tasks.auto_delete_older_then', ignore_result=True) def auto_delete_older_than(): + logger = logging.getLogger('limitation') model_list = [ ('crash', 'Crash'), ('feedback', 'Feedback') @@ -40,15 +45,23 @@ def auto_delete_older_than(): result = delete_older_than(*model) if result.get('count', 0): result['size'] /= 1024.0 * 1024 - extra = dict(elements=result['elements']) - + log_id = str(uuid.uuid4()) + params = dict(log_id=log_id) + splunk_url = get_splunk_url(params) + splunk_filter = 'log_id=%s' % log_id if splunk_url else None + raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter) raven.captureMessage("[Limitation]Periodic task 'Older than' cleaned up %d %s, total size of cleaned space is %.2f Mb[%d]" % (result['count'], model[1], result['size'], time.time()), - data=dict(level=20, logger='limitation'), extra=extra) - + data=dict(level=20, logger='limitation'), extra=raven_extra) + extra = dict(log_id=log_id, meta=True, count=result['count'], size=result['size'], model=model[1], reason='old') + logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) + for element in result['elements']: + element.update(dict(log_id=log_id)) + logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) @app.task(name='tasks.auto_delete_size_is_exceeded', ignore_result=True) def auto_delete_size_is_exceeded(): + logger = logging.getLogger('limitation') model_list = [ ('crash', 'Crash'), ('feedback', 'Feedback') @@ -56,33 +69,52 @@ def auto_delete_size_is_exceeded(): for model in model_list: result = delete_size_is_exceeded(*model) if result.get('count', 0): - extra = dict(elements=result['elements']) result['size'] /= 1024.0 * 1024 + log_id = str(uuid.uuid4()) + params = dict(log_id=log_id) + splunk_url = get_splunk_url(params) + splunk_filter = 'log_id=%s' % log_id if splunk_url else None + raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter) raven.captureMessage("[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %.2f Mb[%d]" % (result['count'], model[1], result['size'], time.time()), - data=dict(level=20, logger='limitation'), extra=extra) + data=dict(level=20, logger='limitation'), extra=raven_extra) + extra = dict(log_id=log_id, meta=True, count=result['count'], size=result['size'], model=model[1], reason='size_is_exceeded') + logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) + for element in result['elements']: + element.update(dict(log_id=log_id)) + logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) @app.task(name='tasks.auto_delete_duplicate_crashes', ignore_result=True) def auto_delete_duplicate_crashes(): + logger = logging.getLogger('limitation') result = delete_duplicate_crashes() if result.get('count', 0): result['size'] /= 1024.0 * 1024 + log_id = str(uuid.uuid4()) + params = dict(log_id=log_id) + splunk_url = get_splunk_url(params) + splunk_filter = 'log_id=%s' % log_id if splunk_url else None + raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter) raven.captureMessage("[Limitation]Periodic task 'Duplicated' cleaned up %d crashes, total size of cleaned space is %.2f Mb[%d]" % (result['count'], result['size'], time.time()), - data=dict(level=20, logger='limitation'), extra=result['signatures']) - + data=dict(level=20, logger='limitation'), extra=raven_extra) + extra = dict(log_id=log_id, meta=True, count=result['count'], size=result['size'], reason='duplicated', model='Crash') + logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) + for element in result['elements']: + element.update(dict(log_id=log_id)) + logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) @app.task(name='tasks.deferred_manual_cleanup') def deferred_manual_cleanup(model, limit_size=None, limit_days=None, limit_duplicated=None): - full_result = dict(count=0, size=0, elements=[], signatures={}) + logger = logging.getLogger('limitation') + full_result = dict(count=0, size=0, elements=[]) if limit_duplicated: result = delete_duplicate_crashes(limit=limit_duplicated) if result.get('count', 0): full_result['count'] += result['count'] full_result['size'] += result['size'] full_result['elements'] += result['elements'] - full_result['signatures'].update(result['signatures']) if limit_days: result = delete_older_than(*model, limit=limit_days) @@ -99,11 +131,21 @@ def deferred_manual_cleanup(model, limit_size=None, limit_days=None, limit_dupli full_result['elements'] += result['elements'] full_result['size'] /= 1024.0 * 1024 - extra = dict(elements=full_result['elements']) - extra.update(full_result['signatures']) - raven.captureMessage("[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %.2f Mb[%d]" % - (full_result['count'], model[1], full_result['size'], time.time()), - data=dict(level=20, logger='limitation'), extra=extra) + log_id = str(uuid.uuid4()) + params = dict(log_id=log_id) + splunk_url = get_splunk_url(params) + splunk_filter = 'log_id=%s' % log_id if splunk_url else None + raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter) + raven.captureMessage("[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %.2f Mb[%s]" % + (full_result['count'], model[1], full_result['size'], log_id), + data=dict(level=20, logger='limitation'), extra=raven_extra) + + extra = dict(log_id=log_id, meta=True, count=full_result['count'], size=full_result['size'], model=model[1], + limit_duplicated=limit_duplicated, limit_size=limit_size, limit_days=limit_days, reason='manual') + logger.info(add_extra_to_log_message('Manual cleanup', extra=extra)) + for element in full_result['elements']: + element.update(dict(log_id=log_id)) + logger.info(add_extra_to_log_message('Manual cleanup element', extra=element)) @app.task(name='tasks.auto_monitoring_size', ignore_result=True) diff --git a/omaha_server/omaha/tests/test_forms.py b/omaha_server/omaha/tests/test_forms.py index 724eb40..9cdb6ce 100644 --- a/omaha_server/omaha/tests/test_forms.py +++ b/omaha_server/omaha/tests/test_forms.py @@ -20,7 +20,7 @@ the License. from collections import OrderedDict -from django.test import TestCase, override_settings +from django.test import TestCase from omaha.forms import ApplicationAdminForm, ManualCleanupForm, CrashManualCleanupForm diff --git a/omaha_server/omaha/tests/test_limitation.py b/omaha_server/omaha/tests/test_limitation.py index 3689330..d6f7aa8 100644 --- a/omaha_server/omaha/tests/test_limitation.py +++ b/omaha_server/omaha/tests/test_limitation.py @@ -22,18 +22,15 @@ from django.test import TestCase, override_settings from django.utils import timezone from django.core.cache import cache -import moto - from crash.factories import CrashFactory +from crash.models import Crash, Symbols from feedback.factories import FeedbackFactory +from feedback.models import Feedback from omaha.dynamic_preferences_registry import global_preferences_manager as gpm from omaha.limitation import delete_older_than, delete_size_is_exceeded, delete_duplicate_crashes from omaha.limitation import monitoring_size -from omaha_server.utils import is_private -from crash.models import Crash, Symbols -from feedback.models import Feedback - from omaha.factories import VersionFactory +from omaha_server.utils import is_private from sparkle.factories import SparkleVersionFactory class DeleteOldTest(TestCase): @@ -45,12 +42,12 @@ class DeleteOldTest(TestCase): Crash.objects.update(created=old_date) self.assertEqual(Crash.objects.all().count(), 10) - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - signature_to_string = lambda x: 'Signature: %s' % x - deleted = list(Crash.objects.values_list('id', 'created', 'signature')) - deleted = map(lambda x: (x[0], created_to_string(x[1]), signature_to_string(x[2])), deleted) + deleted = list(Crash.objects.values_list('id', 'created', 'signature', 'userid', 'appid')) + deleted = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p"), signature=x[2], + userid=x[3], appid=x[4]), deleted) result = delete_older_than('crash', 'Crash') + self.assertDictEqual(result, dict(count=10, size=0, elements=deleted)) self.assertEqual(Crash.objects.all().count(), 0) @@ -62,11 +59,11 @@ class DeleteOldTest(TestCase): Feedback.objects.update(created=old_date) self.assertEqual(Feedback.objects.all().count(), 10) - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") deleted = list(Feedback.objects.values_list('id', 'created')) - deleted = map(lambda x: (x[0], created_to_string(x[1])), deleted) + deleted = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p")), deleted) result = delete_older_than('feedback', 'Feedback') + self.assertDictEqual(result, dict(count=10, size=0, elements=deleted)) self.assertEqual(Feedback.objects.all().count(), 0) @@ -76,14 +73,14 @@ class SizeExceedTest(TestCase): def test_crashes(self): gpm['Crash__limit_size'] = 1 crash_size = 10*1024*1023 - CrashFactory.create_batch(500, archive_size=crash_size, minidump_size=0) - self.assertEqual(Crash.objects.all().count(), 500) - - del_count = 398 - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - signature_to_string = lambda x: 'Signature: %s' % x - deleted = list(Crash.objects.values_list('id', 'created', 'signature'))[:del_count] - deleted = map(lambda x: (x[0], created_to_string(x[1]), signature_to_string(x[2])), deleted) + CrashFactory.create_batch(200, archive_size=crash_size, minidump_size=0) + self.assertEqual(Crash.objects.all().count(), 200) + + del_count = 98 + deleted = list(Crash.objects.values_list('id', 'created', 'signature', 'userid', 'appid'))[:del_count] + deleted = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p"), signature=x[2], + userid=x[3], appid=x[4]), deleted) + result = delete_size_is_exceeded('crash', 'Crash') self.assertDictEqual(result, dict(count=del_count, size=del_count * crash_size, elements=deleted)) @@ -93,13 +90,12 @@ class SizeExceedTest(TestCase): def test_feedbacks(self): gpm['Feedback__limit_size'] = 1 feedback_size = 10*1024*1023 - FeedbackFactory.create_batch(500, screenshot_size=feedback_size, system_logs_size=0, attached_file_size=0, blackbox_size=0) - self.assertEqual(Feedback.objects.all().count(), 500) + FeedbackFactory.create_batch(200, screenshot_size=feedback_size, system_logs_size=0, attached_file_size=0, blackbox_size=0) + self.assertEqual(Feedback.objects.all().count(), 200) - del_count = 398 - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - deleted = list(Feedback.objects.values_list('id', 'created'))[:del_count] - deleted = map(lambda x: (x[0], created_to_string(x[1])), deleted) + del_count = 98 + deleted = list(Feedback.objects.values_list('id', 'created')) + deleted = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p")), deleted)[:del_count] result = delete_size_is_exceeded('feedback', 'Feedback') self.assertDictEqual(result, dict(count=del_count, size=del_count * feedback_size, elements=deleted)) @@ -110,19 +106,18 @@ class DeleteDuplicateTest(TestCase): @is_private() def test_crashes(self): gpm['Crash__duplicate_number'] = 10 - CrashFactory.create_batch(20, signature='test1') - self.assertEqual(Crash.objects.filter(signature='test1').count(), 20) + CrashFactory.create_batch(25, signature='test1') + self.assertEqual(Crash.objects.filter(signature='test1').count(), 25) CrashFactory.create_batch(9, signature='test2') self.assertEqual(Crash.objects.filter(signature='test2').count(), 9) - created_to_string = lambda x: 'Created: %s' % x.strftime("%d. %B %Y %I:%M%p") - signature_to_string = lambda x: 'Signature: %s' % x - deleted = list(Crash.objects.filter(signature='test1').values_list('id', 'created', 'signature'))[:10] - deleted = map(lambda x: (x[0], created_to_string(x[1]), signature_to_string(x[2])), deleted) - signatures = dict(test1=deleted) + deleted = list(Crash.objects.filter(signature='test1').values_list('id', 'created', 'signature', 'userid', 'appid'))[:15] + deleted = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p"), signature=x[2], + userid=x[3], appid=x[4]), deleted) result = delete_duplicate_crashes() - self.assertDictEqual(result, dict(count=10, size=0, elements=deleted, signatures=signatures)) + + self.assertDictEqual(result, dict(count=15, size=0, elements=deleted)) self.assertEqual(Crash.objects.filter(signature='test1').count(), gpm['Crash__duplicate_number']) self.assertEqual(Crash.objects.filter(signature='test2').count(), 9) diff --git a/omaha_server/omaha/tests/test_s3.py b/omaha_server/omaha/tests/test_s3.py index b589de8..3c62899 100644 --- a/omaha_server/omaha/tests/test_s3.py +++ b/omaha_server/omaha/tests/test_s3.py @@ -1,8 +1,5 @@ -import operator - from django.test import TestCase, override_settings -from django.db import transaction -from factory.django import * + import moto import boto import mock diff --git a/omaha_server/omaha/tests/test_tasks.py b/omaha_server/omaha/tests/test_tasks.py new file mode 100644 index 0000000..b86530a --- /dev/null +++ b/omaha_server/omaha/tests/test_tasks.py @@ -0,0 +1,318 @@ +# coding: utf8 + +""" +This software is licensed under the Apache 2 license, quoted below. + +Copyright 2014 Crystalnix Limited + +Licensed under the Apache License, Version 2.0 (the "License"); you may not +use this file except in compliance with the License. You may obtain a copy of +the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations under +the License. +""" + +import uuid + +from django.test import TestCase + +from mock import patch +from freezegun import freeze_time + +from crash.models import Crash, Symbols +from crash.factories import CrashFactory, SymbolsFactory +from feedback.models import Feedback +from feedback.factories import FeedbackFactory +from omaha.dynamic_preferences_registry import global_preferences_manager as gpm +from omaha_server.utils import is_private +from omaha.models import Version +from omaha.factories import VersionFactory +from omaha.tasks import ( + auto_delete_duplicate_crashes, + auto_delete_older_than, + auto_delete_size_is_exceeded, + deferred_manual_cleanup +) +from omaha_server.utils import add_extra_to_log_message +from sparkle.models import SparkleVersion +from sparkle.factories import SparkleVersionFactory + +class DuplicatedCrashesTest(TestCase): + @freeze_time("2012-12-21 12:00:00") + @patch('logging.getLogger') + @is_private() + def test_crashes(self, mocked_get_logger): + gpm['Crash__duplicate_number'] = 2 + crashes = CrashFactory.create_batch(10, signature='test') + deleted_crash = crashes[7] + self.assertEqual(Crash.objects.all().count(), 10) + + extra_meta = dict(count=8, reason='duplicated', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Crash', size=0.0) + log_extra_msg = add_extra_to_log_message('Automatic cleanup', extra=extra_meta) + + extra = dict(id=deleted_crash.id, element_created=deleted_crash.created.strftime("%d. %B %Y %I:%M%p"), + signature=deleted_crash.signature, userid=deleted_crash.userid, appid=deleted_crash.appid, + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Automatic cleanup element', extra=extra) + + mocked_logger = mocked_get_logger.return_value + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + auto_delete_duplicate_crashes() + + self.assertEqual(mocked_logger.info.call_count, 9) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + +class OldObjectsTest(TestCase): + @patch('logging.getLogger') + @is_private() + def test_crashes(self, mocked_get_logger): + gpm['Crash__limit_storage_days'] = 2 + with freeze_time("2012-12-21 12:00:00"): + crashes = CrashFactory.create_batch(10, signature='test') + deleted_crash = crashes[-1] + self.assertEqual(Crash.objects.all().count(), 10) + + extra_meta = dict(count=10, reason='old', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Crash', size=0.0) + log_extra_msg = add_extra_to_log_message('Automatic cleanup', extra=extra_meta) + + extra = dict(id=deleted_crash.id, element_created=deleted_crash.created.strftime("%d. %B %Y %I:%M%p"), + signature=deleted_crash.signature, userid=deleted_crash.userid, appid=deleted_crash.appid, + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Automatic cleanup element', extra=extra) + + mocked_logger = mocked_get_logger.return_value + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + auto_delete_older_than() + + self.assertEqual(mocked_logger.info.call_count, 11) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + @patch('logging.getLogger') + @is_private() + def test_feedbacks(self, mocked_get_logger): + gpm['Feedback__limit_storage_days'] = 2 + with freeze_time("2012-12-21 12:00:00"): + feedbacks = FeedbackFactory.create_batch(10) + deleted_feedback = feedbacks[-1] + self.assertEqual(Feedback.objects.all().count(), 10) + + extra_meta = dict(count=10, reason='old', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Feedback', size=0.0) + log_extra_msg = add_extra_to_log_message('Automatic cleanup', extra=extra_meta) + + extra = dict(id=deleted_feedback.id, element_created=deleted_feedback.created.strftime("%d. %B %Y %I:%M%p"), + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Automatic cleanup element', extra=extra) + + mocked_logger = mocked_get_logger.return_value + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + auto_delete_older_than() + + self.assertEqual(mocked_logger.info.call_count, 11) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + +class SizeExceedTest(TestCase): + @freeze_time("2012-12-21 12:00:00") + @patch('logging.getLogger') + @is_private() + def test_crashes(self, mocked_get_logger): + gpm['Crash__limit_size'] = 1 + crash_size = 10*1024*1023 + crashes = CrashFactory.create_batch(200, archive_size=crash_size, minidump_size=0) + deleted_crash = crashes[97] + self.assertEqual(Crash.objects.all().count(), 200) + + extra_meta = dict(count=98, reason='size_is_exceeded', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Crash', size=979.04296875) + log_extra_msg = add_extra_to_log_message('Automatic cleanup', extra=extra_meta) + + extra = dict(id=deleted_crash.id, element_created=deleted_crash.created.strftime("%d. %B %Y %I:%M%p"), + signature=deleted_crash.signature, userid=deleted_crash.userid, appid=deleted_crash.appid, + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Automatic cleanup element', extra=extra) + + mocked_logger = mocked_get_logger.return_value + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + auto_delete_size_is_exceeded() + + self.assertEqual(mocked_logger.info.call_count, 99) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + @freeze_time("2012-12-21 12:00:00") + @patch('logging.getLogger') + @is_private() + def test_feedbacks(self, mocked_get_logger): + gpm['Feedback__limit_size'] = 1 + feedback_size = 10*1024*1023 + feedbacks = FeedbackFactory.create_batch(200, screenshot_size=feedback_size, system_logs_size=0, attached_file_size=0, + blackbox_size=0) + deleted_feedback = feedbacks[97] + self.assertEqual(Feedback.objects.all().count(), 200) + + extra_meta = dict(count=98, reason='size_is_exceeded', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Feedback', size=979.04296875) + log_extra_msg = add_extra_to_log_message('Automatic cleanup', extra=extra_meta) + + extra = dict(id=deleted_feedback.id, element_created=deleted_feedback.created.strftime("%d. %B %Y %I:%M%p"), + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Automatic cleanup element', extra=extra) + + mocked_logger = mocked_get_logger.return_value + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + auto_delete_size_is_exceeded() + self.assertEqual(mocked_logger.info.call_count, 99) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + +class ManualCleanupTest(TestCase): + @freeze_time("2012-12-21 12:00:00") + @patch('logging.getLogger') + @is_private() + def test_crashes(self, mocked_get_logger): + gpm['Crash__duplicate_number'] = 2 + crashes = CrashFactory.create_batch(10, signature='test') + deleted_crash = crashes[7] + self.assertEqual(Crash.objects.count(), 10) + + extra_meta = dict(count=8, reason='manual', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Crash', limit_duplicated=2, limit_size=None, limit_days=None, size=0.0) + log_extra_msg = add_extra_to_log_message('Manual cleanup', extra=extra_meta) + + extra = dict(id=deleted_crash.id, element_created=deleted_crash.created.strftime("%d. %B %Y %I:%M%p"), + signature=deleted_crash.signature, userid=deleted_crash.userid, appid=deleted_crash.appid, + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Manual cleanup element', extra=extra) + mocked_logger = mocked_get_logger.return_value + + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + deferred_manual_cleanup(['crash', 'Crash'], limit_duplicated=2) + + self.assertEqual(mocked_logger.info.call_count, 9) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + @freeze_time("2012-12-21 12:00:00") + @patch('logging.getLogger') + @is_private() + def test_feedbacks(self, mocked_get_logger): + gpm['Feedback__limit_size'] = 1 + feedback_size = 100*1024*1023 + feedbacks = FeedbackFactory.create_batch(20, screenshot_size=feedback_size, system_logs_size=0, attached_file_size=0, + blackbox_size=0) + deleted_feedback = feedbacks[7] + self.assertEqual(Feedback.objects.count(), 20) + + extra_meta = dict(count=10, reason='manual', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Feedback', limit_duplicated=None, limit_size=1, limit_days=None, size=999.0234375) + log_extra_msg = add_extra_to_log_message('Manual cleanup', extra=extra_meta) + + extra = dict(id=deleted_feedback.id, element_created=deleted_feedback.created.strftime("%d. %B %Y %I:%M%p"), + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Manual cleanup element', extra=extra) + mocked_logger = mocked_get_logger.return_value + + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + deferred_manual_cleanup(['feedback', 'Feedback'], limit_size=1) + self.assertEqual(mocked_logger.info.call_count, 11) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + @freeze_time("2012-12-21 12:00:00") + @patch('logging.getLogger') + @is_private() + def test_symbols(self, mocked_get_logger): + gpm['Feedback__limit_size'] = 1 + symbols_size = 100*1024*1023 + symbols = SymbolsFactory.create_batch(20, file_size=symbols_size) + deleted_symbols = symbols[7] + self.assertEqual(Symbols.objects.count(), 20) + + extra_meta = dict(count=10, reason='manual', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Symbols', limit_duplicated=None, limit_size=1, limit_days=None, size=999.0234375) + log_extra_msg = add_extra_to_log_message('Manual cleanup', extra=extra_meta) + + extra = dict(id=deleted_symbols.id, element_created=deleted_symbols.created.strftime("%d. %B %Y %I:%M%p"), + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Manual cleanup element', extra=extra) + mocked_logger = mocked_get_logger.return_value + + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + deferred_manual_cleanup(['crash', 'Symbols'], limit_size=1) + self.assertEqual(mocked_logger.info.call_count, 11) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + @freeze_time("2012-12-21 12:00:00") + @patch('logging.getLogger') + @is_private() + def test_omaha_versions(self, mocked_get_logger): + gpm['Version__limit_size'] = 1 + version_size = 1000*1024*1023 + versions = VersionFactory.create_batch(2, file_size=version_size) + deleted_version = versions[0] + self.assertEqual(Version.objects.count(), 2) + + extra_meta = dict(count=1, reason='manual', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='Version', limit_duplicated=None, limit_size=1, limit_days=None, size=999.0234375) + log_extra_msg = add_extra_to_log_message('Manual cleanup', extra=extra_meta) + + extra = dict(id=deleted_version.id, element_created=deleted_version.created.strftime("%d. %B %Y %I:%M%p"), + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Manual cleanup element', extra=extra) + mocked_logger = mocked_get_logger.return_value + + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + deferred_manual_cleanup(['omaha', 'Version'], limit_size=1) + self.assertEqual(mocked_logger.info.call_count, 2) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) + + @freeze_time("2012-12-21 12:00:00") + @patch('logging.getLogger') + @is_private() + def test_sparkle_versions(self, mocked_get_logger): + gpm['SparkleVersion__limit_size'] = 1 + version_size = 1000*1024*1023 + versions = SparkleVersionFactory.create_batch(2, file_size=version_size) + deleted_version = versions[0] + self.assertEqual(SparkleVersion.objects.count(), 2) + + extra_meta = dict(count=1, reason='manual', meta=True, log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00', + model='SparkleVersion', limit_duplicated=None, limit_size=1, limit_days=None, size=999.0234375) + log_extra_msg = add_extra_to_log_message('Manual cleanup', extra=extra_meta) + + extra = dict(id=deleted_version.id, element_created=deleted_version.created.strftime("%d. %B %Y %I:%M%p"), + log_id='36446dc3-ae7c-42ad-ae4e-6a826dcf0a00') + log_msg = add_extra_to_log_message('Manual cleanup element', extra=extra) + mocked_logger = mocked_get_logger.return_value + + with patch('uuid.uuid4') as mocked_uuid4: + mocked_uuid4.side_effect = (uuid.UUID('36446dc3-ae7c-42ad-ae4e-6a826dcf0a%02d' % x) for x in range(100)) + deferred_manual_cleanup(['sparkle', 'SparkleVersion'], limit_size=1) + self.assertEqual(mocked_logger.info.call_count, 2) + mocked_logger.info.assert_any_call(log_extra_msg) + mocked_logger.info.assert_any_call(log_msg) diff --git a/omaha_server/omaha_server/settings_prod.py b/omaha_server/omaha_server/settings_prod.py index 44e1483..799debd 100644 --- a/omaha_server/omaha_server/settings_prod.py +++ b/omaha_server/omaha_server/settings_prod.py @@ -26,10 +26,13 @@ AWS_IS_GZIPPED = True RAVEN_CONFIG = { 'dsn': os.environ.get('RAVEN_DNS'), + 'name': HOST_NAME, } RAVEN_DSN_STACKTRACE = os.environ.get('RAVEN_DSN_STACKTRACE', RAVEN_CONFIG['dsn']) +SPLUNK_HOST = os.environ.get('SPLUNK_HOST') + INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', ) @@ -38,15 +41,18 @@ CELERYD_HIJACK_ROOT_LOGGER = False LOGGING = { 'version': 1, - 'disable_existing_loggers': True, + 'disable_existing_loggers': False, 'root': { - 'level': 'WARNING', + 'level': 'INFO', 'handlers': ['sentry', 'console'], }, 'formatters': { 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' }, + 'splunk_format':{ + 'format': 'level=%(levelname)s logger=%(name)s timestamp=%(asctime)s module=%(module)s process=%(process)d thread=%(thread)d message=%(message)s\n\r' + } }, 'handlers': { 'sentry': { @@ -77,3 +83,20 @@ LOGGING = { }, }, } + +if SPLUNK_HOST: + LOGGING['handlers']['splunk'] = { + 'level': os.environ.get('SPLUNK_LOGGING_LEVEL', 'INFO'), + 'class': 'splunk_handler.SplunkHandler', + 'formatter': 'splunk_format', + 'host': SPLUNK_HOST, + 'port': os.environ.get('SPLUNK_PORT', 8089), + 'username': os.environ.get('SPLUNK_USERNAME', 'admin'), + 'password': os.environ.get('SPLUNK_PASSWORD', 'changeme'), + 'hostname': HOST_NAME or 'Unknown', + 'index': 'main', + 'source': 'omaha', + 'sourcetype': 'omaha-server', + 'verify': False, + } + LOGGING['root']['handlers'].append('splunk') diff --git a/omaha_server/omaha_server/settings_test.py b/omaha_server/omaha_server/settings_test.py index dd68077..cc12d6d 100644 --- a/omaha_server/omaha_server/settings_test.py +++ b/omaha_server/omaha_server/settings_test.py @@ -30,6 +30,7 @@ NOSE_ARGS = [ '--cover-package=omaha_server,omaha,crash,feedback,sparkle,healthcheck', '--cover-inclusive', '--nologcapture', + '-s' ] MIGRATION_MODULES = DisableMigrations() diff --git a/omaha_server/omaha_server/tests/test_utils.py b/omaha_server/omaha_server/tests/test_utils.py index f3dc284..9985e88 100644 --- a/omaha_server/omaha_server/tests/test_utils.py +++ b/omaha_server/omaha_server/tests/test_utils.py @@ -22,7 +22,7 @@ from django.test import TestCase from django.test import override_settings from mock import Mock -from omaha_server.utils import show_toolbar +from omaha_server.utils import show_toolbar, add_extra_to_log_message, get_splunk_url class UtilsTest(TestCase): @@ -42,3 +42,17 @@ class UtilsTest(TestCase): def test_show_toolbar_debug_false(self): self.request.is_ajax = lambda: False self.assertFalse(show_toolbar(self.request)) + + def test_add_extra_to_log_message(self): + msg = 'test' + extra = dict(a=1, c=3, b=2, d=4) + expected_msg = 'test, a=1 , b=2 , c=3 , d=4' + actual_msg = add_extra_to_log_message(msg, extra) + self.assertEqual(actual_msg, expected_msg) + + @override_settings(SPLUNK_HOST='splunk.example.com') + def test_add_extra_to_log_message(self): + params = dict(a=1, c=3, b=2, d=4) + actual_msg = get_splunk_url(params) + expected_msg = 'http://splunk.example.com/en-US/app/search/search?q=search a=1 b=2 c=3 d=4' + self.assertEqual(actual_msg, expected_msg) diff --git a/omaha_server/omaha_server/utils.py b/omaha_server/omaha_server/utils.py index 9a831d8..ab6f851 100644 --- a/omaha_server/omaha_server/utils.py +++ b/omaha_server/omaha_server/utils.py @@ -1,6 +1,7 @@ # coding: utf8 from functools import wraps + from django.conf import settings @@ -41,3 +42,13 @@ def get_client_ip(request): else: ip = request.META.get('REMOTE_ADDR') return ip + + +def add_extra_to_log_message(msg, extra): + return msg + ' '.join(", %s=%s" % (key, val) for (key, val) in sorted(extra.items())) + +def get_splunk_url(params): + SEARCH_TEMPLATE = 'http://%s/en-US/app/search/search?q=search %s' + splunk_host = getattr(settings, 'SPLUNK_HOST', None) + string_params = ' '.join("%s=%s" % (key, val) for (key, val) in sorted(params.items())) + return SEARCH_TEMPLATE % (splunk_host, string_params) if splunk_host else None diff --git a/pavement.py b/pavement.py index 0c5c6d0..592ade1 100644 --- a/pavement.py +++ b/pavement.py @@ -91,6 +91,11 @@ def create_admin(): sh('./createadmin.py', cwd='omaha_server') @task +def configure_splunk_forwarder(): + hostname = os.environ.get('HOST_NAME') + sh('echo "[default] \nhost = %s \n" > /opt/splunkforwarder/etc/system/local/inputs.conf' % hostname) + +@task def docker_run(): try: is_private = True if os.environ.get('OMAHA_SERVER_PRIVATE', '').title() == 'True' else False @@ -101,6 +106,7 @@ def docker_run(): create_admin() collectstatic() + configure_splunk_forwarder() sh('/usr/bin/supervisord') except: client.captureException() diff --git a/requirements/base.txt b/requirements/base.txt index 6f336a9..785ba1a 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -31,7 +31,8 @@ django-bootstrap3==6.2.2 protobuf==3.0.0a3 protobuf-to-dict==0.1.0 django-dynamic-preferences==0.6.1 +splunk-handler==1.1.3 # Only dev #django-httplog==0.2.3 -https://github.com/Crystalnix/django-httplog/archive/5a148dae1d6608aeccb90f77aa7a2b4a2015f52f.zip +https://github.com/Crystalnix/django-httplog/archive/6ed2a8a4e3d606443492998cbac93a71f4cee7d1.zip |