diff options
Diffstat (limited to 'gitautodeploy')
-rw-r--r-- | gitautodeploy/cli/config.py | 2 | ||||
-rw-r--r-- | gitautodeploy/events.py | 101 | ||||
-rw-r--r-- | gitautodeploy/gitautodeploy.py | 14 | ||||
-rw-r--r-- | gitautodeploy/httpserver.py | 564 | ||||
-rw-r--r-- | gitautodeploy/parsers/__init__.py | 3 | ||||
-rw-r--r-- | gitautodeploy/parsers/bitbucket.py | 16 | ||||
-rw-r--r-- | gitautodeploy/parsers/coding.py | 26 | ||||
-rw-r--r-- | gitautodeploy/parsers/common.py | 7 | ||||
-rw-r--r-- | gitautodeploy/parsers/generic.py | 16 | ||||
-rw-r--r-- | gitautodeploy/parsers/github.py | 36 | ||||
-rw-r--r-- | gitautodeploy/parsers/gitlab.py | 73 | ||||
-rw-r--r-- | gitautodeploy/parsers/gitlabci.py | 28 |
12 files changed, 491 insertions, 395 deletions
diff --git a/gitautodeploy/cli/config.py b/gitautodeploy/cli/config.py index 17ab683..69491c5 100644 --- a/gitautodeploy/cli/config.py +++ b/gitautodeploy/cli/config.py @@ -27,6 +27,8 @@ def get_config_defaults(): config['log-test-case'] = False config['log-test-case-dir'] = None + config['remote-whitelist'] = ['127.0.0.1'] + return config def get_config_from_environment(): diff --git a/gitautodeploy/events.py b/gitautodeploy/events.py new file mode 100644 index 0000000..b021165 --- /dev/null +++ b/gitautodeploy/events.py @@ -0,0 +1,101 @@ +class Action(object): + + def __init__(self, name=None): + import logging + + self.logger = logging.getLogger() + self.hub = None + self.messages = [] + self.name = name + + def __repr__(self): + if self.name: + return "<Action:%s>" % self.name + else: + return "<Action>" + + def dict_repr(self): + return { + "messages": self.messages + } + + def register_hub(self, hub): + self.hub = hub + + def register_message(self, message, level="INFO"): + self.messages.append(message) + self.hub.update_action(self, message) + + def log_debug(self, message): + self.logger.debug(message) + self.register_message(message, "DEBUG") + + def log_info(self, message): + self.logger.info(message) + self.register_message(message, "INFO") + + def log_warning(self, message): + self.logger.warning(message) + self.register_message(message, "WARNING") + + def log_error(self, message): + self.logger.error(message) + self.register_message(message, "ERROR") + + def log_critical(self, message): + self.register_message(message, "CRITICAL") + + def update(self): + self.hub.update_action(self) + + +class WebhookAction(Action): + """Represents a webhook request event and keeps a copy of all incoming and outgoing data for monitoring purposes.""" + + def __init__(self, client_address, request_headers, request_body): + self.client_address = client_address + self.request_headers = request_headers + self.request_body = request_body + super(WebhookAction, self).__init__() + + def __repr__(self): + return "<WebhookAction>" + + def dict_repr(self): + data = super(WebhookAction, self).dict_repr() + data['request_headers'] = self.request_headers + data['request_body'] = self.request_body + return data + + +class EventStore(object): + + def __init__(self): + self.actions = [] + self.observers = [] + + def register_observer(self, observer): + self.observers.append(observer) + + def unregister_observer(self, observer): + if observer in self.observers: + self.observers.remove(observer) + + def update_observers(self, *args, **kwargs): + for observer in self.observers: + observer.update(*args, **kwargs) + + def register_action(self, action): + action.register_hub(self) + self.actions.append(action) + self.update_observers(action) + + # Store max 100 actions + if len(self.actions) > 100: + self.actions.pop(0) + + def update_action(self, action, message=None): + self.update_observers(action, message) + + def dict_repr(self): + return map(lambda x: x.dict_repr(), self.actions) diff --git a/gitautodeploy/gitautodeploy.py b/gitautodeploy/gitautodeploy.py index 829d76c..167fae8 100644 --- a/gitautodeploy/gitautodeploy.py +++ b/gitautodeploy/gitautodeploy.py @@ -227,6 +227,12 @@ class GitAutoDeploy(object): return 0 + def update(self, action, message=None): + pass + #print "%s was updated" % action, +# if message: +# print "Message: %s" % message + def setup(self, config): """Setup an instance of GAD based on the provided config object.""" import sys @@ -236,6 +242,7 @@ class GitAutoDeploy(object): import logging from lock import Lock from httpserver import WebhookRequestHandlerFactory + from events import EventStore # This solves https://github.com/olipo186/Git-Auto-Deploy/issues/118 try: @@ -247,7 +254,6 @@ class GitAutoDeploy(object): def emit(self, record): pass - # Attatch config values to this instance self._config = config @@ -320,8 +326,12 @@ class GitAutoDeploy(object): Lock(os.path.join(repo_config['path'], 'status_waiting')).clear() try: + + event_store = EventStore() + event_store.register_observer(self) + # Create web hook request handler class - WebhookRequestHandler = WebhookRequestHandlerFactory(self._config) + WebhookRequestHandler = WebhookRequestHandlerFactory(self._config, event_store) self._server = HTTPServer((self._config['host'], self._config['port']), diff --git a/gitautodeploy/httpserver.py b/gitautodeploy/httpserver.py index d10097c..49a2342 100644 --- a/gitautodeploy/httpserver.py +++ b/gitautodeploy/httpserver.py @@ -1,356 +1,407 @@ from BaseHTTPServer import BaseHTTPRequestHandler -class FilterMatchError(Exception): - """Used to describe when a filter does not match a request.""" - pass +class WebbhookRequestProcessor(object): -def WebhookRequestHandlerFactory(config): - class WebhookRequestHandler(BaseHTTPRequestHandler, object): - """Extends the BaseHTTPRequestHandler class and handles the incoming - HTTP requests.""" + def get_service_handler(self, request_headers, request_body, action): + """Parses the incoming request and attempts to determine whether + it originates from GitHub, GitLab or any other known service.""" + import json + import logging + import parsers - def __init__(self, *args, **kwargs): - #do_stuff_with(self, init_args) - self._config = config - super(WebhookRequestHandler, self).__init__(*args, **kwargs) + logger = logging.getLogger() + payload = json.loads(request_body) - def do_POST(self): - """Invoked on incoming POST requests""" - from threading import Timer - import logging - import json + if not isinstance(payload, dict): + raise ValueError("Invalid JSON object") - logger = logging.getLogger() - logger.info('Incoming request from %s:%s' % (self.client_address[0], self.client_address[1])) + user_agent = 'user-agent' in request_headers and request_headers['user-agent'] + content_type = 'content-type' in request_headers and request_headers['content-type'] - content_type = self.headers.getheader('content-type') - content_length = int(self.headers.getheader('content-length')) - request_body = self.rfile.read(content_length) + # Assume Coding if the X-Coding-Event HTTP header is set + if 'x-coding-event' in request_headers: + return parsers.CodingRequestParser - # Test case debug data - test_case = { - 'headers': dict(self.headers), - 'payload': json.loads(request_body), - 'config': {}, - 'expected': {'status': 200, 'data': [{'deploy': 0}]} - } + # Assume GitLab if the X-Gitlab-Event HTTP header is set + elif 'x-gitlab-event' in request_headers: - # Extract request headers and make all keys to lowercase (makes them easier to compare) - request_headers = dict(self.headers) - request_headers = dict((k.lower(), v) for k, v in request_headers.iteritems()) + # Special Case for Gitlab CI + if content_type == "application/json" and "build_status" in payload: + return parsers.GitLabCIRequestParser + else: + return parsers.GitLabRequestParser - try: + # Assume GitHub if the X-GitHub-Event HTTP header is set + elif 'x-github-event' in request_headers: - # Will raise a ValueError exception if it fails - ServiceRequestParser = self.figure_out_service_from_request(request_headers, request_body) + return parsers.GitHubRequestParser - # Unable to identify the source of the request - if not ServiceRequestParser: - self.send_error(400, 'Unrecognized service') - logger.error('Unable to find appropriate handler for request. The source service is not supported.') - test_case['expected']['status'] = 400 - return + # Assume BitBucket if the User-Agent HTTP header is set to + # 'Bitbucket-Webhooks/2.0' (or something similar) + elif user_agent and user_agent.lower().find('bitbucket') != -1: - # Send HTTP response before the git pull and/or deploy commands? - if not 'detailed-response' in self._config or not self._config['detailed-response']: - self.send_response(200, 'OK') - self.send_header('Content-type', 'text/plain') - self.end_headers() + return parsers.BitBucketRequestParser - logger.info('Handling the request with %s' % ServiceRequestParser.__name__) + # This handles old GitLab requests and Gogs requests for example. + elif content_type == "application/json": - # Could be GitHubParser, GitLabParser or other - repo_configs, ref, action, webhook_urls = ServiceRequestParser(self._config).get_repo_params_from_request(request_headers, request_body) - logger.debug("Event details - ref: %s; action: %s" % (ref or "master", action)) + action.log_info("Received event from unknown origin.") + return parsers.GenericRequestParser - if not ServiceRequestParser(self._config).validate_request(request_headers, repo_configs): - self.send_error(400, 'Bad request') - test_case['expected']['status'] = 400 - return + action.log_error("Unable to recognize request origin. Don't know how to handle the request.") + return - if len(repo_configs) == 0: - self.send_error(400, 'Bad request') - logger.warning('The URLs references in the webhook did not match any repository entry in the config. For this webhook to work, make sure you have at least one repository configured with one of the following URLs; %s' % ', '.join(webhook_urls)) - test_case['expected']['status'] = 400 - return + def execute_webhook(self, repo_configs, request_headers, request_body, action): + """Verify that the suggested repositories has matching settings and + issue git pull and/or deploy commands.""" + import os + import time + import logging + from wrappers import GitWrapper + from lock import Lock + import json - # Make git pulls and trigger deploy commands - res = self.process_repositories(repo_configs, ref, action, request_body, request_headers) + logger = logging.getLogger() + payload = json.loads(request_body) - if 'detailed-response' in self._config and self._config['detailed-response']: - self.send_response(200, 'OK') - self.send_header('Content-type', 'application/json') - self.end_headers() - self.wfile.write(json.dumps(res)) - self.wfile.close() + result = [] - # Add additional test case data - test_case['config'] = { - 'url': 'url' in repo_configs[0] and repo_configs[0]['url'], - 'branch': 'branch' in repo_configs[0] and repo_configs[0]['branch'], - 'remote': 'remote' in repo_configs[0] and repo_configs[0]['remote'], - 'deploy': 'echo test!' - } + # Process each matching repository + for repo_config in repo_configs: - except ValueError, e: - self.send_error(400, 'Unprocessable request') - logger.warning('Unable to process incoming request from %s:%s' % (self.client_address[0], self.client_address[1])) - test_case['expected']['status'] = 400 - return + repo_result = {} - except Exception, e: + # In case there is no path configured for the repository, no pull will + # be made. + if not 'path' in repo_config: + res = GitWrapper.deploy(repo_config) + repo_result['deploy'] = res + result.append(repo_result) + continue - if 'detailed-response' in self._config and self._config['detailed-response']: - self.send_error(500, 'Unable to process request') + # If the path does not exist, a warning will be raised and no pull or + # deploy will be made. + if not os.path.isdir(repo_config['path']): + action.log_error("The repository '%s' does not exist locally. Make sure it was pulled properly without errors by reviewing the log." % repo_config['path']) + result.append(repo_result) + continue - test_case['expected']['status'] = 500 + # If the path is not writable, a warning will be raised and no pull or + # deploy will be made. + if not os.access(repo_config['path'], os.W_OK): + action.log_error("The path '%s' is not writable. Make sure that GAD has write access to that path." % repo_config['path']) + result.append(repo_result) + continue - raise e + running_lock = Lock(os.path.join(repo_config['path'], 'status_running')) + waiting_lock = Lock(os.path.join(repo_config['path'], 'status_waiting')) + try: + + # Attempt to obtain the status_running lock + while not running_lock.obtain(): + + # If we're unable, try once to obtain the status_waiting lock + if not waiting_lock.has_lock() and not waiting_lock.obtain(): + action.log_error("Unable to obtain the status_running lock nor the status_waiting lock. Another process is already waiting, so we'll ignore the request.") + + # If we're unable to obtain the waiting lock, ignore the request + break + + # Keep on attempting to obtain the status_running lock until we succeed + time.sleep(5) + + n = 4 + res = None + while n > 0: + + # Attempt to pull up a maximum of 4 times + res = GitWrapper.pull(repo_config) + repo_result['git pull'] = res + + # Return code indicating success? + if res == 0: + break + + n -= 1 + + if 0 < n: + res = GitWrapper.deploy(repo_config) + repo_result['deploy'] = res + + #except Exception as e: + # logger.error('Error during \'pull\' or \'deploy\' operation on path: %s' % repo_config['path']) + # logger.error(e) + # raise e finally: - # Save the request as a test case - if 'log-test-case' in self._config and self._config['log-test-case']: - self.save_test_case(test_case) + # Release the lock if it's ours + if running_lock.has_lock(): + running_lock.release() - def log_message(self, format, *args): - """Overloads the default message logging method to allow messages to - go through our custom logger instead.""" - import logging - logger = logging.getLogger() - logger.info("%s - %s" % (self.client_address[0], - format%args)) + # Release the lock if it's ours + if waiting_lock.has_lock(): + waiting_lock.release() - def figure_out_service_from_request(self, request_headers, request_body): - """Parses the incoming request and attempts to determine whether - it originates from GitHub, GitLab or any other known service.""" - import json - import logging - import parsers + result.append(repo_result) - logger = logging.getLogger() - data = json.loads(request_body) + return result - if not isinstance(data, dict): - raise ValueError("Invalid JSON object") - user_agent = 'user-agent' in request_headers and request_headers['user-agent'] - content_type = 'content-type' in request_headers and request_headers['content-type'] +class WebhookRequestFilter(object): - # Assume Coding if the X-Coding-Event HTTP header is set - if 'x-coding-event' in request_headers: - return parsers.CodingRequestParser + def passes_payload_filter(self, payload_filters, payload, action): + import logging - # Assume GitLab if the X-Gitlab-Event HTTP header is set - elif 'x-gitlab-event' in request_headers: + logger = logging.getLogger() - # Special Case for Gitlab CI - if content_type == "application/json" and "build_status" in data: - return parsers.GitLabCIRequestParser - else: - return parsers.GitLabRequestParser + # At least one filter must match + for filter in payload_filters: - # Assume GitHub if the X-GitHub-Event HTTP header is set - elif 'x-github-event' in request_headers: + # All options specified in the filter must match + for filter_key, filter_value in filter.iteritems(): - return parsers.GitHubRequestParser + # Ignore filters with value None (let them pass) + if filter_value == None: + continue - # Assume BitBucket if the User-Agent HTTP header is set to - # 'Bitbucket-Webhooks/2.0' (or something similar) - elif user_agent and user_agent.lower().find('bitbucket') != -1: + # Interpret dots in filter name as path notations + node_value = payload + for node_key in filter_key.split('.'): - return parsers.BitBucketRequestParser + # If the path is not valid the filter does not match + if not node_key in node_value: + action.log_info("Filter '%s' does not match since the path is invalid" % (filter_key)) - # This handles old GitLab requests and Gogs requests for example. - elif content_type == "application/json": + # Filter does not match, do not process this repo config + return False - logger.info("Received event from unknown origin.") - return parsers.GenericRequestParser + node_value = node_value[node_key] - logger.error("Unable to recognize request origin. Don't know how to handle the request.") - return + if filter_value == node_value: + continue - def passes_payload_filter(self, payload_filters, data, action): - import logging + # If the filter value is set to True. the filter + # will pass regardless of the actual value + if filter_value == True: + continue - logger = logging.getLogger() + action.log_debug("Filter '%s' does not match ('%s' != '%s')" % (filter_key, filter_value, (str(node_value)[:75] + '..') if len(str(node_value)) > 75 else str(node_value))) - # At least one filter must match - for filter in payload_filters: + # Filter does not match, do not process this repo config + return False - # All options specified in the filter must match - for filter_key, filter_value in filter.iteritems(): + # Filter does match, proceed + return True - # Ignore filters with value None (let them pass) - if filter_value == None: - continue + def passes_header_filter(self, header_filter, request_headers): + import logging - # Support for earlier version so it's non-breaking functionality - if filter_key == 'action' and filter_value == action: - continue + logger = logging.getLogger() - # Interpret dots in filter name as path notations - node_value = data - for node_key in filter_key.split('.'): + # At least one filter must match + for key in header_filter: - # If the path is not valid the filter does not match - if not node_key in node_value: - logger.info("Filter '%s' does not match since the path is invalid" % (filter_key)) + # Verify that the request has the required header attribute + if key.lower() not in request_headers: + return False - # Filter does not match, do not process this repo config - return False + # "True" indicates that any header value is accepted + if header_filter[key] is True: + continue - node_value = node_value[node_key] + # Verify that the request has the required header value + if header_filter[key] != request_headers[key.lower()]: + return False - if filter_value == node_value: - continue + # Filter does match, proceed + return True - # If the filter value is set to True. the filter - # will pass regardless of the actual value - if filter_value == True: - continue + def apply_filters(self, repo_configs, request_headers, request_body, action): + """Verify that the suggested repositories has matching settings and + issue git pull and/or deploy commands.""" + import os + import time + import logging + from wrappers import GitWrapper + from lock import Lock + import json - logger.info("Filter '%s'' does not match ('%s' != '%s')" % (filter_key, filter_value, (str(node_value)[:75] + '..') if len(str(node_value)) > 75 else str(node_value))) + logger = logging.getLogger() + payload = json.loads(request_body) - # Filter does not match, do not process this repo config - return False + matches = [] - # Filter does match, proceed - return True + # Process each matching repository + for repo_config in repo_configs: - def passes_header_filter(self, header_filter, request_headers): - import logging + # Verify that all payload filters matches the request (if any payload filters are specified) + if 'payload-filter' in repo_config and not self.passes_payload_filter(repo_config['payload-filter'], payload, action): - logger = logging.getLogger() + # Filter does not match, do not process this repo config + continue - # At least one filter must match - for key in header_filter: + # Verify that all header filters matches the request (if any header filters are specified) + if 'header-filter' in repo_config and not self.passes_header_filter(repo_config['header-filter'], request_headers): - # Verify that the request has the required header attribute - if key.lower() not in request_headers: - return False + # Filter does not match, do not process this repo config + continue - # "True" indicates that any header value is accepted - if header_filter[key] is True: - continue + matches.append(repo_config) - # Verify that the request has the required header value - if header_filter[key] != request_headers[key.lower()]: - return False + return matches - # Filter does match, proceed - return True - def process_repositories(self, repo_configs, ref, action, request_body, request_headers): - """Verify that the suggested repositories has matching settings and - issue git pull and/or deploy commands.""" - import os - import time +def WebhookRequestHandlerFactory(config, event_store): + """Factory method for webhook request handler class""" + + class WebhookRequestHandler(BaseHTTPRequestHandler, object): + """Extends the BaseHTTPRequestHandler class and handles the incoming + HTTP requests.""" + + def __init__(self, *args, **kwargs): + self._config = config + self.event_store = event_store + super(WebhookRequestHandler, self).__init__(*args, **kwargs) + + def do_GET(self): + import json + + if not self.client_address[0] in self._config['remote-whitelist']: + self.send_error(403) + return + + data = self.event_store.dict_repr() + + self.send_response(200, 'OK') + self.send_header('Content-type', 'application/json') + self.end_headers() + self.wfile.write(json.dumps(data)) + self.wfile.close() + + def do_POST(self): + """Invoked on incoming POST requests""" + from threading import Timer import logging - from wrappers import GitWrapper - from lock import Lock import json + from events import WebhookAction logger = logging.getLogger() - data = json.loads(request_body) - result = [] + content_length = int(self.headers.getheader('content-length')) + request_body = self.rfile.read(content_length) - # Process each matching repository - for repo_config in repo_configs: + # Extract request headers and make all keys to lowercase (makes them easier to compare) + request_headers = dict(self.headers) + request_headers = dict((k.lower(), v) for k, v in request_headers.iteritems()) - repo_result = {} + action = WebhookAction(self.client_address, request_body, request_headers) + event_store.register_action(action) - # Verify that all payload filters matches the request (if any payload filters are specified) - if 'payload-filter' in repo_config and not self.passes_payload_filter(repo_config['payload-filter'], data, action): + action.log_info('Incoming request from %s:%s' % (self.client_address[0], self.client_address[1])) - # Filter does not match, do not process this repo config - continue + # Test case debug data + test_case = { + 'headers': dict(self.headers), + 'payload': json.loads(request_body), + 'config': {}, + 'expected': {'status': 200, 'data': [{'deploy': 0}]} + } - # Verify that all header filters matches the request (if any header filters are specified) - if 'header-filter' in repo_config and not self.passes_header_filter(repo_config['header-filter'], request_headers): + try: - # Filter does not match, do not process this repo config - continue + request_processor = WebbhookRequestProcessor() - # In case there is no path configured for the repository, no pull will - # be made. - if not 'path' in repo_config: - res = GitWrapper.deploy(repo_config) - repo_result['deploy'] = res - result.append(repo_result) - continue + # Will raise a ValueError exception if it fails + ServiceRequestHandler = request_processor.get_service_handler(request_headers, request_body, action) - # If the path does not exist, a warning will be raised and no pull or - # deploy will be made. - if not os.path.isdir(repo_config['path']): - logger.error("The repository '%s' does not exist locally. Make sure it was pulled " % repo_config['path'] + - "properly without errors by reviewing the log.") - result.append(repo_result) - continue + # Unable to identify the source of the request + if not ServiceRequestHandler: + self.send_error(400, 'Unrecognized service') + test_case['expected']['status'] = 400 + action.log_error("Unable to find appropriate handler for request. The source service is not supported") + return - # If the path is not writable, a warning will be raised and no pull or - # deploy will be made. - if not os.access(repo_config['path'], os.W_OK): - logger.error("The path '%s' is not writable. Make sure that GAD has write access to that path." % repo_config['path']) - result.append(repo_result) - continue + service_handler = ServiceRequestHandler(self._config) + + action.log_info("Handling the request with %s" % ServiceRequestHandler.__name__) + + # Could be GitHubParser, GitLabParser or other + repo_configs = service_handler.get_repo_configs(request_headers, request_body, action) - running_lock = Lock(os.path.join(repo_config['path'], 'status_running')) - waiting_lock = Lock(os.path.join(repo_config['path'], 'status_waiting')) - try: + request_filter = WebhookRequestFilter() - # Attempt to obtain the status_running lock - while not running_lock.obtain(): + if len(repo_configs) == 0: + self.send_error(400, 'Bad request') + test_case['expected']['status'] = 400 + action.log_error("No matching repository config") + return + + # Apply filters + repo_configs = request_filter.apply_filters(repo_configs, request_headers, request_body, action) + + if not service_handler.validate_request(request_headers, repo_configs, action): + self.send_error(400, 'Bad request') + test_case['expected']['status'] = 400 + action.log_warning("Request not valid") + return - # If we're unable, try once to obtain the status_waiting lock - if not waiting_lock.has_lock() and not waiting_lock.obtain(): - logger.error("Unable to obtain the status_running lock nor the status_waiting lock. Another process is " + - "already waiting, so we'll ignore the request.") + # Send HTTP response before the git pull and/or deploy commands? + #if not 'detailed-response' in self._config or not self._config['detailed-response']: + self.send_response(200, 'OK') + self.send_header('Content-type', 'text/plain') + self.end_headers() + test_case['expected']['status'] = 200 - # If we're unable to obtain the waiting lock, ignore the request - break + if len(repo_configs) == 0: + action.log_info("Filter does not match") + return - # Keep on attempting to obtain the status_running lock until we succeed - time.sleep(5) + action.log_info("Deploying") - n = 4 - res = None - while n > 0: + # Schedule the execution of the webhook (git pull and trigger deploy etc) + request_processor.execute_webhook(repo_configs, request_headers, request_body, action) - # Attempt to pull up a maximum of 4 times - res = GitWrapper.pull(repo_config) - repo_result['git pull'] = res + # Add additional test case data + test_case['config'] = { + 'url': 'url' in repo_configs[0] and repo_configs[0]['url'], + 'branch': 'branch' in repo_configs[0] and repo_configs[0]['branch'], + 'remote': 'remote' in repo_configs[0] and repo_configs[0]['remote'], + 'deploy': 'echo test!' + } - # Return code indicating success? - if res == 0: - break + action.log_info("Done") - n -= 1 + except ValueError, e: + self.send_error(400, 'Unprocessable request') + action.log_warning('Unable to process incoming request from %s:%s' % (self.client_address[0], self.client_address[1])) + test_case['expected']['status'] = 400 + return - if 0 < n: - res = GitWrapper.deploy(repo_config) - repo_result['deploy'] = res + except Exception, e: - #except Exception as e: - # logger.error('Error during \'pull\' or \'deploy\' operation on path: %s' % repo_config['path']) - # logger.error(e) - # raise e + if 'detailed-response' in self._config and self._config['detailed-response']: + self.send_error(500, 'Unable to process request') - finally: + test_case['expected']['status'] = 500 + action.log_warning("Unable to process request") - # Release the lock if it's ours - if running_lock.has_lock(): - running_lock.release() + raise e - # Release the lock if it's ours - if waiting_lock.has_lock(): - waiting_lock.release() + finally: - result.append(repo_result) + # Save the request as a test case + if 'log-test-case' in self._config and self._config['log-test-case']: + self.save_test_case(test_case) - return result + def log_message(self, format, *args): + """Overloads the default message logging method to allow messages to + go through our custom logger instead.""" + import logging + logger = logging.getLogger() + logger.info("%s - %s" % (self.client_address[0], + format%args)) def save_test_case(self, test_case): """Log request information in a way it can be used as a test case.""" @@ -378,4 +429,3 @@ class WebhookRequestHandler(BaseHTTPRequestHandler): """Extends the BaseHTTPRequestHandler class and handles the incoming HTTP requests.""" - diff --git a/gitautodeploy/parsers/__init__.py b/gitautodeploy/parsers/__init__.py index 168c781..c993454 100644 --- a/gitautodeploy/parsers/__init__.py +++ b/gitautodeploy/parsers/__init__.py @@ -1,5 +1,6 @@ from bitbucket import BitBucketRequestParser from github import GitHubRequestParser -from gitlab import GitLabRequestParser, GitLabCIRequestParser +from gitlab import GitLabRequestParser +from gitlabci import GitLabCIRequestParser from generic import GenericRequestParser from coding import CodingRequestParser
\ No newline at end of file diff --git a/gitautodeploy/parsers/bitbucket.py b/gitautodeploy/parsers/bitbucket.py index 353435c..80899fd 100644 --- a/gitautodeploy/parsers/bitbucket.py +++ b/gitautodeploy/parsers/bitbucket.py @@ -2,22 +2,18 @@ from common import WebhookRequestParser class BitBucketRequestParser(WebhookRequestParser): - def get_repo_params_from_request(self, request_headers, request_body): + def get_repo_configs(self, request_headers, request_body, action): import json - import logging - logger = logging.getLogger() data = json.loads(request_body) repo_urls = [] - ref = "" - action = "" - logger.debug("Received event from BitBucket") + action.log_debug("Received event from BitBucket") if 'repository' not in data: - logger.error("Unable to recognize data format") - return [], ref or "master", action + action.log_error("Unable to recognize data format") + return [] # One repository may posses multiple URLs for different protocols for k in ['url', 'git_url', 'clone_url', 'ssh_url']: @@ -32,6 +28,6 @@ class BitBucketRequestParser(WebhookRequestParser): repo_urls.append('https://bitbucket.org/%s.git' % (data['repository']['full_name'])) # Get a list of configured repositories that matches the incoming web hook reqeust - repo_configs = self.get_matching_repo_configs(repo_urls) + repo_configs = self.get_matching_repo_configs(repo_urls, action) - return repo_configs, ref or "master", action, repo_urls
\ No newline at end of file + return repo_configs
\ No newline at end of file diff --git a/gitautodeploy/parsers/coding.py b/gitautodeploy/parsers/coding.py index 80ab77e..889c3ee 100644 --- a/gitautodeploy/parsers/coding.py +++ b/gitautodeploy/parsers/coding.py @@ -2,52 +2,38 @@ from common import WebhookRequestParser class CodingRequestParser(WebhookRequestParser): - def get_repo_params_from_request(self, request_headers, request_body): + def get_repo_configs(self, request_headers, request_body, action): import json - import logging - logger = logging.getLogger() data = json.loads(request_body) repo_urls = [] - ref = "" - action = "" coding_event = 'x-coding-event' in request_headers and request_headers['x-coding-event'] - logger.debug("Received '%s' event from Coding" % coding_event) - if 'repository' not in data: - logger.error("Unable to recognize data format") - return [], ref or "master", action + action.log_error("Unable to recognize data format") + return [] # One repository may posses multiple URLs for different protocols for k in ['web_url', 'https_url', 'ssh_url']: if k in data['repository']: repo_urls.append(data['repository'][k]) - # extract the branch - if 'ref' in data: - ref = data['ref'] - - # set the action - if 'event' in data: - action = data['event'] - # Get a list of configured repositories that matches the incoming web hook reqeust - items = self.get_matching_repo_configs(repo_urls) + items = self.get_matching_repo_configs(repo_urls, action) repo_configs = [] for repo_config in items: # Validate secret token if present if 'secret-token' in repo_config: if 'token' not in data or not self.verify_token(repo_config['secret-token'], data['token']): - logger.warning("Request token does not match the 'secret-token' configured for repository %s." % repo_config['url']) + action.log_warning("Request token does not match the 'secret-token' configured for repository %s." % repo_config['url']) continue repo_configs.append(repo_config) - return repo_configs, ref or "master", action, repo_urls + return repo_configs def verify_token(self, secret_token, request_token): diff --git a/gitautodeploy/parsers/common.py b/gitautodeploy/parsers/common.py index 1b40b73..b194062 100644 --- a/gitautodeploy/parsers/common.py +++ b/gitautodeploy/parsers/common.py @@ -6,7 +6,7 @@ class WebhookRequestParser(object): def __init__(self, config): self._config = config - def get_matching_repo_configs(self, urls): + def get_matching_repo_configs(self, urls, action): """Iterates over the various repo URLs provided as argument (git://, ssh:// and https:// for the repo) and compare them to any repo URL specified in the config""" @@ -21,7 +21,10 @@ class WebhookRequestParser(object): elif 'url_without_usernme' in repo_config and repo_config['url_without_usernme'] == url: configs.append(repo_config) + if len(configs) == 0: + action.log_warning('The URLs references in the webhook did not match any repository entry in the config. For this webhook to work, make sure you have at least one repository configured with one of the following URLs; %s' % ', '.join(urls)) + return configs - def validate_request(self, request_headers, repo_configs): + def validate_request(self, request_headers, repo_configs, action): return True
\ No newline at end of file diff --git a/gitautodeploy/parsers/generic.py b/gitautodeploy/parsers/generic.py index 7b150d2..a93e90b 100644 --- a/gitautodeploy/parsers/generic.py +++ b/gitautodeploy/parsers/generic.py @@ -2,22 +2,18 @@ from common import WebhookRequestParser class GenericRequestParser(WebhookRequestParser): - def get_repo_params_from_request(self, request_headers, request_body): + def get_repo_configs(self, request_headers, request_body, action): import json - import logging - logger = logging.getLogger() data = json.loads(request_body) repo_urls = [] - ref = "" - action = "" - logger.debug("Received event from unknown origin. Assume generic data format.") + action.log_info("Received event from unknown origin. Assume generic data format.") if 'repository' not in data: - logger.error("Unable to recognize data format") - return [], ref or "master", action + action.log_error("Unable to recognize data format") + return [] # One repository may posses multiple URLs for different protocols for k in ['url', 'git_http_url', 'git_ssh_url', 'http_url', 'ssh_url']: @@ -25,7 +21,7 @@ class GenericRequestParser(WebhookRequestParser): repo_urls.append(data['repository'][k]) # Get a list of configured repositories that matches the incoming web hook reqeust - repo_configs = self.get_matching_repo_configs(repo_urls) + repo_configs = self.get_matching_repo_configs(repo_urls, action) - return repo_configs, ref or "master", action, repo_urls + return repo_configs diff --git a/gitautodeploy/parsers/github.py b/gitautodeploy/parsers/github.py index 4d24648..bede7bb 100644 --- a/gitautodeploy/parsers/github.py +++ b/gitautodeploy/parsers/github.py @@ -2,59 +2,39 @@ from common import WebhookRequestParser class GitHubRequestParser(WebhookRequestParser): - def get_repo_params_from_request(self, request_headers, request_body): + def get_repo_configs(self, request_headers, request_body, action): import json - import logging - logger = logging.getLogger() data = json.loads(request_body) repo_urls = [] - ref = "" - action = "" github_event = 'x-github-event' in request_headers and request_headers['x-github-event'] - logger.debug("Received '%s' event from GitHub" % github_event) + action.log_info("Received '%s' event from GitHub" % github_event) if 'repository' not in data: - logger.error("Unable to recognize data format") - return [], ref or "master", action + action.log_error("Unable to recognize data format") + return [] # One repository may posses multiple URLs for different protocols for k in ['url', 'git_url', 'clone_url', 'ssh_url']: if k in data['repository']: repo_urls.append(data['repository'][k]) - if 'pull_request' in data: - if 'base' in data['pull_request']: - if 'ref' in data['pull_request']['base']: - ref = data['pull_request']['base']['ref'] - logger.debug("Pull request to branch '%s' was fired" % ref) - elif 'ref' in data: - ref = data['ref'] - logger.debug("Push to branch '%s' was fired" % ref) - - if 'action' in data: - action = data['action'] - logger.debug("Action '%s' was fired" % action) - # Get a list of configured repositories that matches the incoming web hook reqeust - repo_configs = self.get_matching_repo_configs(repo_urls) - - return repo_configs, ref or "master", action, repo_urls + repo_configs = self.get_matching_repo_configs(repo_urls, action) - def validate_request(self, request_headers, repo_configs): - import logging + return repo_configs - logger = logging.getLogger() + def validate_request(self, request_headers, repo_configs, action): for repo_config in repo_configs: # Validate secret token if present if 'secret-token' in repo_config and 'x-hub-signature' in request_headers: if not self.verify_signature(repo_config['secret-token'], request_body, request_headers['x-hub-signature']): - logger.info("Request signature does not match the 'secret-token' configured for repository %s." % repo_config['url']) + action.log_info("Request signature does not match the 'secret-token' configured for repository %s." % repo_config['url']) return False return True diff --git a/gitautodeploy/parsers/gitlab.py b/gitautodeploy/parsers/gitlab.py index 68a1982..5d0d348 100644 --- a/gitautodeploy/parsers/gitlab.py +++ b/gitautodeploy/parsers/gitlab.py @@ -2,47 +2,32 @@ from common import WebhookRequestParser class GitLabRequestParser(WebhookRequestParser): - def get_repo_params_from_request(self, request_headers, request_body): + def get_repo_configs(self, request_headers, request_body, action): import json - import logging - logger = logging.getLogger() data = json.loads(request_body) repo_urls = [] - ref = "" - action = "" gitlab_event = 'x-gitlab-event' in request_headers and request_headers['x-gitlab-event'] - logger.debug("Received '%s' event from GitLab" % gitlab_event) + action.log_info("Received '%s' event from GitLab" % gitlab_event) if 'repository' not in data: - logger.error("Unable to recognize data format") - return [], ref or "master", action + action.log_error("Unable to recognize data format") + return [] # One repository may posses multiple URLs for different protocols for k in ['url', 'git_http_url', 'git_ssh_url']: if k in data['repository']: repo_urls.append(data['repository'][k]) - # extract the branch - if 'ref' in data: - ref = data['ref'] - - # set the action - if 'object_kind' in data: - action = data['object_kind'] - # Get a list of configured repositories that matches the incoming web hook reqeust - repo_configs = self.get_matching_repo_configs(repo_urls) - - return repo_configs, ref or "master", action, repo_urls + repo_configs = self.get_matching_repo_configs(repo_urls, action) - def validate_request(self, request_headers, repo_configs): - import logging + return repo_configs - logger = logging.getLogger() + def validate_request(self, request_headers, repo_configs, action): for repo_config in repo_configs: @@ -50,49 +35,7 @@ class GitLabRequestParser(WebhookRequestParser): if 'secret-token' in repo_config and 'x-gitlab-token' in request_headers: if repo_config['secret-token'] != request_headers['x-gitlab-token']: - logger.info("Request token does not match the 'secret-token' configured for repository %s." % repo_config['url']) + action.log_info("Request token does not match the 'secret-token' configured for repository %s." % repo_config['url']) return False return True - - -class GitLabCIRequestParser(WebhookRequestParser): - - def get_repo_params_from_request(self, request_headers, request_body): - import json - import logging - - logger = logging.getLogger() - data = json.loads(request_body) - - repo_urls = [] - ref = "" - action = "" - - logger.debug('Received event from Gitlab CI') - - if 'repository' not in data: - logger.error("Unable to recognize data format") - return [], ref or "master", action - - # Only add repositories if the build is successful. Ignore it in other case. - if data['build_status'] == "success": - for k in ['url', 'git_http_url', 'git_ssh_url']: - if k in data['repository']: - repo_urls.append(data['repository'][k]) - else: - logger.warning("Gitlab CI build '%d' has status '%s'. Not pull will be done" % ( - data['build_id'], data['build_status'])) - - # extract the branch - if 'ref' in data: - ref = data['ref'] - - # set the action - if 'object_kind' in data: - action = data['object_kind'] - - # Get a list of configured repositories that matches the incoming web hook reqeust - repo_configs = self.get_matching_repo_configs(repo_urls) - - return repo_configs, ref or "master", action, repo_urls diff --git a/gitautodeploy/parsers/gitlabci.py b/gitautodeploy/parsers/gitlabci.py index 6b5e3ca..30a8b5f 100644 --- a/gitautodeploy/parsers/gitlabci.py +++ b/gitautodeploy/parsers/gitlabci.py @@ -1 +1,29 @@ from common import WebhookRequestParser + +class GitLabCIRequestParser(WebhookRequestParser): + + def get_repo_configs(self, request_headers, request_body, action): + import json + + data = json.loads(request_body) + + repo_urls = [] + + action.log_info('Received event from Gitlab CI') + + if 'repository' not in data: + action.log_error("Unable to recognize data format") + return [] + + # Only add repositories if the build is successful. Ignore it in other case. + if data['build_status'] == "success": + for k in ['url', 'git_http_url', 'git_ssh_url']: + if k in data['repository']: + repo_urls.append(data['repository'][k]) + else: + action.log_warning("Gitlab CI build '%d' has status '%s'. Not pull will be done" % (data['build_id'], data['build_status'])) + + # Get a list of configured repositories that matches the incoming web hook reqeust + repo_configs = self.get_matching_repo_configs(repo_urls, action) + + return repo_configs |