summaryrefslogtreecommitdiffstats
path: root/GitAutoDeploy.py
diff options
context:
space:
mode:
authorTorben <torben.letorbi@gmail.com>2015-12-04 16:55:14 +0100
committerTorben <torben.letorbi@gmail.com>2015-12-04 16:55:14 +0100
commitb84d8919380399dfaef1c095a99d3fd8783144a1 (patch)
treef9e2cf0d16f8939c887f0a1a5baec08545eb9a49 /GitAutoDeploy.py
parenta397e9d858e047ed8b6949a526475662165eb7cb (diff)
parent26c8d323a882003f74a77890420d9199854dc507 (diff)
downloadGit-Auto-Deploy-b84d8919380399dfaef1c095a99d3fd8783144a1.zip
Git-Auto-Deploy-b84d8919380399dfaef1c095a99d3fd8783144a1.tar.gz
Git-Auto-Deploy-b84d8919380399dfaef1c095a99d3fd8783144a1.tar.bz2
Merge remote-tracking branch 'upstream/master'
Diffstat (limited to 'GitAutoDeploy.py')
-rwxr-xr-xGitAutoDeploy.py974
1 files changed, 655 insertions, 319 deletions
diff --git a/GitAutoDeploy.py b/GitAutoDeploy.py
index 563dbb5..4f18c95 100755
--- a/GitAutoDeploy.py
+++ b/GitAutoDeploy.py
@@ -1,325 +1,661 @@
#!/usr/bin/env python
-import json, urlparse, sys, os, signal, socket, re
-from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
-from subprocess import call
-from threading import Timer
-
-class GitAutoDeploy(BaseHTTPRequestHandler):
-
- CONFIG_FILEPATH = './GitAutoDeploy.conf.json'
- config = None
- debug = True
- quiet = False
- daemon = False
-
- @classmethod
- def getConfig(myClass):
- if(myClass.config == None):
- try:
- configString = open(myClass.CONFIG_FILEPATH).read()
- except:
- print "Could not load %s file" % myClass.CONFIG_FILEPATH
- sys.exit(2)
-
- try:
- myClass.config = json.loads(configString)
- except:
- print "%s file is not valid JSON" % myClass.CONFIG_FILEPATH
- sys.exit(2)
-
- for repository in myClass.config['repositories']:
- if(not os.path.isdir(repository['path'])):
- print "Directory %s not found" % repository['path']
- call(['git clone --recursive '+repository['url']+' '+repository['path']], shell=True)
- if(not os.path.isdir(repository['path'])):
- print "Unable to clone repository %s" % repository['url']
- sys.exit(2)
- else:
- print "Repository %s successfully cloned" % repository['url']
- if(not os.path.isdir(repository['path'] + '/.git')):
- print "Directory %s is not a Git repository" % repository['path']
- sys.exit(2)
- myClass.clearLock(repository['path'])
-
- return myClass.config
-
- def do_POST(self):
- urls = self.parseRequest()
- self.respond()
- Timer(1.0, self.do_process, [urls]).start()
-
- def do_process(self, urls):
- for url in urls:
- repos = self.getMatchingPaths(url)
- for repo in repos:
- if self.lock(repo['path']):
- try:
- n = 4
- while 0 < n and 0 != self.pull(repo['path'], repo['branch']):
- --n
- if 0 < n:
- self.deploy(repo['path'])
- except:
- call(['echo "Error during \'pull\' or \'deploy\' operation on path: ' + repo['path'] + '"'], shell=True)
- finally:
- self.unlock(repo['path'])
-
- def parseRequest(self):
- contenttype = self.headers.getheader('content-type')
- length = int(self.headers.getheader('content-length'))
- body = self.rfile.read(length)
-
- items = []
-
- try:
- if contenttype == "application/json" or contenttype == "application/x-www-form-urlencoded":
- post = urlparse.parse_qs(body)
-
- # If payload is missing, we assume gitlab syntax.
- if contenttype == "application/json" and "payload" not in post:
- mode = "github"
- # If x-www-form-urlencoded, we assume bitbucket syntax.
- elif contenttype == "application/x-www-form-urlencoded":
- mode = "bitbucket"
- # Oh Gitlab, dear Gitlab...
- else:
- mode = "gitlab"
-
-
- if mode == "github":
- response = json.loads(body)
- items.append(response['repository']['url'])
-
- elif mode == "bitbucket":
- for itemString in post['payload']:
- item = json.loads(itemString)
- items.append("ssh://hg@bitbucket.org" + item['repository']['absolute_url'][0:-1])
-
- # Otherwise, we assume github/bitbucket syntax.
- elif mode == "gitlab":
- for itemString in post['payload']:
- item = json.loads(itemString)
- items.append(item['repository']['url'])
-
- # WTF?!
- else:
- pass
- except Exception:
- pass
-
- return items
-
- def getMatchingPaths(self, repoUrl):
- res = []
- config = self.getConfig()
- for repository in config['repositories']:
- if(repository['url'] == repoUrl):
- res.append({
- 'path': repository['path'],
- 'branch': ('branch' in repository) and repository['branch'] or 'master'
- })
- return res
-
- def respond(self):
- self.send_response(200)
- self.send_header('Content-type', 'text/plain')
- self.end_headers()
-
- def lock(self, path):
- return 0 == call(['sh lock.sh "' + path + '"'], shell=True)
-
- def unlock(self, path):
- call(['sh unlock.sh "' + path + '"'], shell=True)
-
- @classmethod
- def clearLock(myClass, path):
- call(['sh clear_lock.sh "' + path + '"'], shell=True)
-
- def pull(self, path, branch):
- if(not self.quiet):
- print "\nPost push request received"
- print 'Updating ' + path
- res = call(['sleep 5; cd "' + path + '" && unset GIT_DIR && git fetch origin && git update-index --refresh && git reset --hard origin/' + branch + ' && git submodule init && git submodule update'], shell=True)
- call(['echo "Pull result: ' + str(res) + '"'], shell=True)
- return res
-
- def deploy(self, path):
- config = self.getConfig()
- for repository in config['repositories']:
- if(repository['path'] == path):
- cmds = []
- if 'deploy' in repository:
- cmds.append(repository['deploy'])
-
- gd = config['global_deploy']
- if len(gd[0]) is not 0:
- cmds.insert(0, gd[0])
- if len(gd[1]) is not 0:
- cmds.append(gd[1])
-
- if(not self.quiet):
- print 'Executing deploy command(s)'
- for cmd in cmds:
- call(['cd "' + path + '" && ' + cmd], shell=True)
-
- break
-
-
-class GitAutoDeployMain:
-
- server = None
-
- def run(self):
- for arg in sys.argv:
- if(arg == '-d' or arg == '--daemon-mode'):
- GitAutoDeploy.daemon = True
- GitAutoDeploy.quiet = True
- if(arg == '-q' or arg == '--quiet'):
- GitAutoDeploy.quiet = True
- if(arg == '--ssh-keyscan'):
- print 'Scanning repository hosts for ssh keys...'
- self.ssh_key_scan()
- if(arg == '--force'):
- print '[KILLER MODE] Warning: The --force option will try to kill any process ' \
- 'using %s port. USE AT YOUR OWN RISK' %GitAutoDeploy.getConfig()['port']
- self.kill_them_all()
-
- if(GitAutoDeploy.daemon):
- pid = os.fork()
- if(pid > 0):
- sys.exit(0)
- os.setsid()
-
- self.create_pidfile()
-
- if(not GitAutoDeploy.quiet):
- print 'Github & Gitlab Autodeploy Service v 0.1 started'
- else:
- print 'Github & Gitlab Autodeploy Service v 0.1 started in daemon mode'
-
- try:
- self.server = HTTPServer((GitAutoDeploy.getConfig()['host'], GitAutoDeploy.getConfig()['port']), GitAutoDeploy)
- sa = self.server.socket.getsockname()
- print "Listeing on", sa[0], "port", sa[1]
- self.server.serve_forever()
- except socket.error, e:
- if(not GitAutoDeploy.quiet and not GitAutoDeploy.daemon):
- print "Error on socket: %s" % e
- self.debug_diagnosis()
- sys.exit(1)
-
- def ssh_key_scan(self):
- for repository in GitAutoDeploy.getConfig()['repositories']:
- url = repository['url']
- print "Scanning repository: %s" % url
- m = re.match('.*@(.*?):', url)
- if(m != None):
- port = repository['port']
- port = '' if port == None else ('-p' + port)
- call(['ssh-keyscan -t ecdsa,rsa ' + port + ' ' + m.group(1) + ' >> $HOME/.ssh/known_hosts'], shell=True)
- else:
- print 'Could not find regexp match in path: %s' % url
-
- def kill_them_all(self):
- pid = self.get_pid_on_port(GitAutoDeploy.getConfig()['port'])
- if pid == False:
- print '[KILLER MODE] I don\'t know the number of pid that is using my configured port\n ' \
- '[KILLER MODE] Maybe no one? Please, use --force option carefully'
- return False
-
- os.kill(pid, signal.SIGKILL)
- return True
-
- def create_pidfile(self):
- with open(GitAutoDeploy.getConfig()['pidfilepath'], 'w') as f:
- f.write(str(os.getpid()))
-
- def read_pidfile(self):
- with open(GitAutoDeploy.getConfig()['pidfilepath'],'r') as f:
- return f.readlines()
-
- def remove_pidfile(self):
- os.remove(GitAutoDeploy.getConfig()['pidfilepath'])
-
- def debug_diagnosis(self):
- if GitAutoDeploy.debug == False:
- return
-
- port = GitAutoDeploy.getConfig()['port']
- pid = self.get_pid_on_port(port)
- if pid == False:
- print 'I don\'t know the number of pid that is using my configured port'
- return
-
- print 'Process with pid number %s is using port %s' % (pid, port)
- with open("/proc/%s/cmdline" % pid) as f:
- cmdline = f.readlines()
- print 'cmdline ->', cmdline[0].replace('\x00', ' ')
-
- def get_pid_on_port(self,port):
- with open("/proc/net/tcp",'r') as f:
- filecontent = f.readlines()[1:]
-
- pids = [int(x) for x in os.listdir('/proc') if x.isdigit()]
- conf_port = str(GitAutoDeploy.getConfig()['port'])
- mpid = False
-
- for line in filecontent:
- if mpid != False:
- break
-
- _, laddr, _, _, _, _, _, _, _, inode = line.split()[:10]
- decport = str(int(laddr.split(':')[1], 16))
-
- if decport != conf_port:
- continue
-
- for pid in pids:
- try:
- path = "/proc/%s/fd" % pid
- if os.access(path, os.R_OK) is False:
- continue
-
- for fd in os.listdir(path):
- cinode = os.readlink("/proc/%s/fd/%s" % (pid, fd))
- minode = cinode.split(":")
-
- if len(minode) == 2 and minode[1][1:-1] == inode:
- mpid = pid
- except Exception as e:
- pass
- return mpid
-
-
- def stop(self):
- if(self.server is not None):
- self.server.socket.close()
-
- def exit(self):
- if(not GitAutoDeploy.quiet):
- print '\nGoodbye'
- self.remove_pidfile()
- sys.exit(0)
-
- def signal_handler(self, signum, frame):
- self.stop()
- if(signum == 1):
- self.run()
- return
- elif(signum == 2):
- print '\nKeyboard Interrupt!!!'
- elif(signum == 6):
- print 'Requested close by SIGABRT (process abort signal). Code 6.'
-
- self.exit()
+
+class Lock():
+ """Simple implementation of a mutex lock using the file systems. Works on *nix systems."""
+
+ path = None
+ _has_lock = False
+
+ def __init__(self, path):
+ self.path = path
+
+ def obtain(self):
+ import os
+
+ try:
+ os.open(self.path, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+ self._has_lock = True
+ print "Successfully obtained lock: %s" % self.path
+ except OSError:
+ return False
+ else:
+ return True
+
+ def release(self):
+ import os
+
+ if not self._has_lock:
+ raise Exception("Unable to release lock that is owned by another process")
+ try:
+ os.remove(self.path)
+ print "Successfully released lock: %s" % self.path
+ finally:
+ self._has_lock = False
+
+ def has_lock(self):
+ return self._has_lock
+
+ def clear(self):
+ import os
+
+ try:
+ os.remove(self.path)
+ except OSError:
+ pass
+ finally:
+ print "Successfully cleared lock: %s" % self.path
+ self._has_lock = False
+
+
+class GitWrapper():
+ """Wraps the git client. Currently uses git through shell command invocations."""
+
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def pull(repo_config):
+ """Pulls the latest version of the repo from the git server"""
+ from subprocess import call
+
+ branch = ('branch' in repo_config) and repo_config['branch'] or 'master'
+ remote = ('remote' in repo_config) and repo_config['remote'] or 'origin'
+
+ print "Post push request received"
+ print 'Updating ' + repo_config['path']
+
+ cmd = 'cd "' + repo_config['path'] + '"' \
+ '&& unset GIT_DIR ' + \
+ '&& git fetch ' + remote + \
+ '&& git reset --hard ' + remote + '/' + branch + ' ' + \
+ '&& git submodule init ' + \
+ '&& git submodule update'
+
+ # '&& git update-index --refresh ' +\
+
+ res = call([cmd], shell=True)
+ print 'Pull result: ' + str(res)
+
+ return int(res)
+
+ @staticmethod
+ def clone(url, path):
+ from subprocess import call
+
+ call(['git clone --recursive %s %s' % (url, path)], shell=True)
+
+
+ @staticmethod
+ def deploy(repo_config):
+ """Executes any supplied post-pull deploy command"""
+ from subprocess import call
+
+ path = repo_config['path']
+
+ cmds = []
+ if 'deploy' in repo_config:
+ cmds.append(repo_config['deploy'])
+
+ gd = GitAutoDeploy().get_config()['global_deploy']
+ if len(gd[0]) is not 0:
+ cmds.insert(0, gd[0])
+ if len(gd[1]) is not 0:
+ cmds.append(gd[1])
+
+ print 'Executing deploy command(s)'
+
+ for cmd in cmds:
+ call(['cd "' + path + '" && ' + cmd], shell=True)
+
+
+from BaseHTTPServer import BaseHTTPRequestHandler
+
+
+class WebhookRequestHandler(BaseHTTPRequestHandler):
+ """Extends the BaseHTTPRequestHandler class and handles the incoming HTTP requests."""
+
+ def do_POST(self):
+ """Invoked on incoming POST requests"""
+ from threading import Timer
+
+ # Extract repository URL(s) from incoming request body
+ repo_urls = self.get_repo_urls_from_request()
+
+ self.send_response(200)
+ self.send_header('Content-type', 'text/plain')
+ self.end_headers()
+
+ # Wait one second before we do git pull (why?)
+ Timer(1.0, GitAutoDeploy.process_repo_urls, [repo_urls]).start()
+
+ def get_repo_urls_from_request(self):
+ """Parses the incoming request and extracts all possible URLs to the repository in question. Since repos can
+ have both ssh://, git:// and https:// URIs, and we don't know which of them is specified in the config, we need
+ to collect and compare them all."""
+ import json
+
+ content_type = self.headers.getheader('content-type')
+ length = int(self.headers.getheader('content-length'))
+ body = self.rfile.read(length)
+
+ data = json.loads(body)
+
+ repo_urls = []
+
+ gitlab_event = self.headers.getheader('X-Gitlab-Event')
+ github_event = self.headers.getheader('X-GitHub-Event')
+ user_agent = self.headers.getheader('User-Agent')
+
+ # Assume GitLab if the X-Gitlab-Event HTTP header is set
+ if gitlab_event:
+
+ print "Received '%s' event from GitLab" % gitlab_event
+
+ if not 'repository' in data:
+ print "ERROR - Unable to recognize data format"
+ return repo_urls
+
+ # One repository may posses multiple URLs for different protocols
+ for k in ['url', 'git_http_url', 'git_ssh_url']:
+ if k in data['repository']:
+ repo_urls.append(data['repository'][k])
+
+ # Assume GitHub if the X-GitHub-Event HTTP header is set
+ elif github_event:
+
+ print "Received '%s' event from GitHub" % github_event
+
+ if not 'repository' in data:
+ print "ERROR - Unable to recognize data format"
+ return repo_urls
+
+ # One repository may posses multiple URLs for different protocols
+ for k in ['url', 'git_url', 'clone_url', 'ssh_url']:
+ if k in data['repository']:
+ repo_urls.append(data['repository'][k])
+
+ # Assume BitBucket if the User-Agent HTTP header is set to 'Bitbucket-Webhooks/2.0' (or something similar)
+ elif user_agent and user_agent.lower().find('bitbucket') != -1:
+
+ print "Received event from BitBucket"
+
+ if not 'repository' in data:
+ print "ERROR - Unable to recognize data format"
+ return repo_urls
+
+ # One repository may posses multiple URLs for different protocols
+ for k in ['url', 'git_url', 'clone_url', 'ssh_url']:
+ if k in data['repository']:
+ repo_urls.append(data['repository'][k])
+
+ if 'full_name' in data['repository']:
+ repo_urls.append('git@bitbucket.org:%s.git' % data['repository']['full_name'])
+
+ # Add a simplified version of the bitbucket HTTPS URL - without the username@bitbucket.com part. This is
+ # needed since the configured repositories might be configured using a different username.
+ repo_urls.append('https://bitbucket.org/%s.git' % (data['repository']['full_name']))
+
+ # If payload is missing, and GitLab wasn't identified through HTTP header, we assume older GitLab syntax.
+ elif content_type == "application/json" and 'payload' not in data and "build_status" not in data:
+
+ print "Received event from GitLab (old syntax)"
+
+ if not 'repository' in data:
+ print "ERROR - Unable to recognize data format"
+ return repo_urls
+
+ # One repository may posses multiple URLs for different protocols
+ for k in ['url', 'git_http_url', 'git_ssh_url']:
+ if k in data['repository']:
+ repo_urls.append(data['repository'][k])
+
+ # Special Case for Gitlab CI
+ elif content_type == "application/json" and "build_status" in data:
+
+ print 'Received event from Gitlab CI'
+
+ if not 'push_data' in data:
+ print "ERROR - Unable to recognize data format"
+ return repo_urls
+
+ # Only add repositories if the build is successful. Ignore it in other case.
+ if data['build_status'] == "success":
+ for k in ['url', 'git_http_url', 'git_ssh_url']:
+ if k in data['push_data']['repository']:
+ repo_urls.append(data['push_data']['repository'][k])
+ else:
+ print "Gitlab CI build '%d' has status '%s'. Not pull will be done" % (
+ data['build_id'], data['build_status'])
+
+ else:
+ print "ERROR - Unable to recognize request origin. Don't know how to handle the request. Outdated GitLab?"
+
+ return repo_urls
+
+
+class GitAutoDeploy(object):
+ config_path = None
+ debug = True
+ daemon = False
+
+ _instance = None
+ _server = None
+ _config = None
+
+ def __new__(cls, *args, **kwargs):
+ """Overload constructor to enable Singleton access"""
+ if not cls._instance:
+ cls._instance = super(GitAutoDeploy, cls).__new__(
+ cls, *args, **kwargs)
+ return cls._instance
+
+ @staticmethod
+ def debug_diagnosis(port):
+ if GitAutoDeploy.debug is False:
+ return
+
+ pid = GitAutoDeploy.get_pid_on_port(port)
+ if pid is False:
+ print 'I don\'t know the number of pid that is using my configured port'
+ return
+
+ print 'Process with pid number %s is using port %s' % (pid, port)
+ with open("/proc/%s/cmdline" % pid) as f:
+ cmdline = f.readlines()
+ print 'cmdline ->', cmdline[0].replace('\x00', ' ')
+
+ @staticmethod
+ def get_pid_on_port(port):
+ import os
+
+ with open("/proc/net/tcp", 'r') as f:
+ file_content = f.readlines()[1:]
+
+ pids = [int(x) for x in os.listdir('/proc') if x.isdigit()]
+ conf_port = str(port)
+ mpid = False
+
+ for line in file_content:
+ if mpid is not False:
+ break
+
+ _, laddr, _, _, _, _, _, _, _, inode = line.split()[:10]
+ decport = str(int(laddr.split(':')[1], 16))
+
+ if decport != conf_port:
+ continue
+
+ for pid in pids:
+ try:
+ path = "/proc/%s/fd" % pid
+ if os.access(path, os.R_OK) is False:
+ continue
+
+ for fd in os.listdir(path):
+ cinode = os.readlink("/proc/%s/fd/%s" % (pid, fd))
+ minode = cinode.split(":")
+
+ if len(minode) == 2 and minode[1][1:-1] == inode:
+ mpid = pid
+
+ except Exception as e:
+ pass
+
+ return mpid
+
+ @staticmethod
+ def process_repo_urls(urls):
+ import os
+ import time
+
+ # Get a list of configured repositories that matches the incoming web hook reqeust
+ repo_configs = GitAutoDeploy().get_matching_repo_configs(urls)
+
+ if len(repo_configs) == 0:
+ print 'Unable to find any of the repository URLs in the config: %s' % ', '.join(urls)
+ return
+
+ # Process each matching repository
+ for repo_config in repo_configs:
+
+ running_lock = Lock(os.path.join(repo_config['path'], 'status_running'))
+ waiting_lock = Lock(os.path.join(repo_config['path'], 'status_waiting'))
+ try:
+
+ # Attempt to obtain the status_running lock
+ while not running_lock.obtain():
+
+ # If we're unable, try once to obtain the status_waiting lock
+ if not waiting_lock.has_lock() and not waiting_lock.obtain():
+ print "Unable to obtain the status_running lock nor the status_waiting lock. Another process is " \
+ + "already waiting, so we'll ignore the request."
+
+ # If we're unable to obtain the waiting lock, ignore the request
+ break
+
+ # Keep on attempting to obtain the status_running lock until we succeed
+ time.sleep(5)
+
+ n = 4
+ while 0 < n and 0 != GitWrapper.pull(repo_config):
+ n -= 1
+
+ if 0 < n:
+ GitWrapper.deploy(repo_config)
+
+ except Exception as e:
+ print 'Error during \'pull\' or \'deploy\' operation on path: %s' % repo_config['path']
+ print e
+
+ finally:
+
+ # Release the lock if it's ours
+ if running_lock.has_lock():
+ running_lock.release()
+
+ # Release the lock if it's ours
+ if waiting_lock.has_lock():
+ waiting_lock.release()
+
+ def get_default_config_path(self):
+ import os
+ import re
+
+ if self.config_path:
+ return self.config_path
+
+ # Look for a custom config file if no path is provided as argument
+ target_directories = [
+ os.path.dirname(os.path.realpath(__file__)), # Script path
+ ]
+
+ # Add current CWD if not identical to script path
+ if not os.getcwd() in target_directories:
+ target_directories.append(os.getcwd())
+
+ target_directories.reverse()
+
+ # Look for a *conf.json or *config.json
+ for dir in target_directories:
+ for item in os.listdir(dir):
+ if re.match(r"conf(ig)?\.json$", item):
+ path = os.path.realpath(os.path.join(dir, item))
+ print "Using '%s' as config" % path
+ return path
+
+ return './GitAutoDeploy.conf.json'
+
+ def get_config(self):
+ import json
+ import sys
+ import os
+ import re
+
+ if self._config:
+ return self._config
+
+ if not self.config_path:
+ self.config_path = self.get_default_config_path()
+
+ try:
+ config_string = open(self.config_path).read()
+
+ except Exception as e:
+ print "Could not load %s file\n" % self.config_path
+ raise e
+
+ try:
+ self._config = json.loads(config_string)
+
+ except Exception as e:
+ print "%s file is not valid JSON\n" % self.config_path
+ raise e
+
+ # Translate any ~ in the path into /home/<user>
+ if 'pidfilepath' in self._config:
+ self._config['pidfilepath'] = os.path.expanduser(self._config['pidfilepath'])
+
+ for repo_config in self._config['repositories']:
+
+ # If a Bitbucket repository is configured using the https:// URL, a username is usually
+ # specified in the beginning of the URL. To be able to compare configured Bitbucket
+ # repositories with incoming web hook events, this username needs to be stripped away in a
+ # copy of the URL.
+ if 'url' in repo_config and not 'bitbucket_username' in repo_config:
+ regexp = re.search(r"^(https?://)([^@]+)@(bitbucket\.org/)(.+)$", repo_config['url'])
+ if regexp:
+ repo_config['url_without_usernme'] = regexp.group(1) + regexp.group(3) + regexp.group(4)
+
+ # Translate any ~ in the path into /home/<user>
+ if 'path' in repo_config:
+ repo_config['path'] = os.path.expanduser(repo_config['path'])
+
+ if not os.path.isdir(repo_config['path']):
+
+ print "Directory %s not found" % repo_config['path']
+ GitWrapper.clone(url=repo_config['url'], path=repo_config['path'])
+
+ if not os.path.isdir(repo_config['path']):
+ print "Unable to clone repository %s" % repo_config['url']
+ sys.exit(2)
+
+ else:
+ print "Repository %s successfully cloned" % repo_config['url']
+
+ if not os.path.isdir(repo_config['path'] + '/.git'):
+ print "Directory %s is not a Git repository" % repo_config['path']
+ sys.exit(2)
+
+ return self._config
+
+ def get_matching_repo_configs(self, urls):
+ """Iterates over the various repo URLs provided as argument (git://, ssh:// and https:// for the repo) and
+ compare them to any repo URL specified in the config"""
+
+ config = self.get_config()
+ configs = []
+
+ for url in urls:
+ for repo_config in config['repositories']:
+ if repo_config in configs:
+ continue
+ if repo_config['url'] == url:
+ configs.append(repo_config)
+ elif 'url_without_usernme' in repo_config and repo_config['url_without_usernme'] == url:
+ configs.append(repo_config)
+ return configs
+
+ def ssh_key_scan(self):
+ import re
+ from subprocess import call
+
+ for repository in self.get_config()['repositories']:
+
+ url = repository['url']
+ print "Scanning repository: %s" % url
+ m = re.match('.*@(.*?):', url)
+
+ if m is not None:
+ port = repository['port']
+ port = '' if port is None else ('-p' + port)
+ call(['ssh-keyscan -t ecdsa,rsa ' + port + ' ' + m.group(1) + ' >> $HOME/.ssh/known_hosts'], shell=True)
+
+ else:
+ print 'Could not find regexp match in path: %s' % url
+
+ def kill_conflicting_processes(self):
+ import os
+
+ pid = GitAutoDeploy.get_pid_on_port(self.get_config()['port'])
+
+ if pid is False:
+ print '[KILLER MODE] I don\'t know the number of pid that is using my configured port\n ' \
+ '[KILLER MODE] Maybe no one? Please, use --force option carefully'
+ return False
+
+ os.kill(pid, signal.SIGKILL)
+ return True
+
+ def create_pid_file(self):
+ import os
+
+ with open(self.get_config()['pidfilepath'], 'w') as f:
+ f.write(str(os.getpid()))
+
+ def read_pid_file(self):
+ with open(self.get_config()['pidfilepath'], 'r') as f:
+ return f.readlines()
+
+ def remove_pid_file(self):
+ import os
+
+ os.remove(self.get_config()['pidfilepath'])
+
+ def exit(self):
+ import sys
+
+ print '\nGoodbye'
+ self.remove_pid_file()
+ sys.exit(0)
+
+ @staticmethod
+ def create_daemon():
+ import os
+
+ try:
+ # Spawn first child
+ pid = os.fork()
+ except OSError, e:
+ raise Exception("%s [%d]" % (e.strerror, e.errno))
+
+ # First child
+ if pid == 0:
+ os.setsid()
+
+ try:
+ # Spawn second child
+ pid = os.fork()
+ except OSError, e:
+ raise Exception, "%s [%d]" % (e.strerror, e.errno)
+
+ if pid == 0:
+ os.chdir('/')
+ os.umask(0)
+ else:
+ # Kill first child
+ os._exit(0)
+ else:
+ # Kill parent of first child
+ os._exit(0)
+
+ import resource
+
+ maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
+ if maxfd == resource.RLIM_INFINITY:
+ maxfd = 1024
+
+ # Close all file descriptors
+ for fd in range(0, maxfd):
+ try:
+ os.close(fd)
+ except OSError:
+ # Ignore errors if fd isn't opened
+ pass
+
+ # Redirect standard input, output and error to devnull since we won't have a terminal
+ os.open(os.devnull, os.O_RDWR)
+ os.dup2(0, 1)
+ os.dup2(0, 2)
+
+ return 0
+
+ def run(self):
+ from sys import argv
+ import sys
+ from BaseHTTPServer import HTTPServer
+ import socket
+ import os
+
+ if '-d' in argv or '--daemon-mode' in argv:
+ self.daemon = True
+
+ if '--ssh-keygen' in argv:
+ print 'Scanning repository hosts for ssh keys...'
+ self.ssh_key_scan()
+
+ if '--force' in argv:
+ print 'Attempting to kill any other process currently occupying port %s' % self.get_config()['port']
+ self.kill_conflicting_processes()
+
+ if '--config' in argv:
+ pos = argv.index('--config')
+ if len(argv) > pos + 1:
+ self.config_path = os.path.realpath(argv[argv.index('--config') + 1])
+ print 'Using custom configuration file \'%s\'' % self.config_path
+
+ # Initialize config
+ self.get_config()
+
+ if self.daemon:
+ print 'Starting Git Auto Deploy in daemon mode'
+ GitAutoDeploy.create_daemon()
+ else:
+ print 'Git Auto Deploy started'
+
+ self.create_pid_file()
+
+ # Suppress output
+ if '-q' in argv or '--quiet' in argv:
+ sys.stdout = open(os.devnull, 'w')
+
+ # Clear any existing lock files, with no regard to possible ongoing processes
+ for repo_config in self.get_config()['repositories']:
+ Lock(os.path.join(repo_config['path'], 'status_running')).clear()
+ Lock(os.path.join(repo_config['path'], 'status_waiting')).clear()
+
+ try:
+ self._server = HTTPServer((self.get_config()['host'], self.get_config()['port']), WebhookRequestHandler)
+ sa = self._server.socket.getsockname()
+ print "Listening on", sa[0], "port", sa[1]
+ self._server.serve_forever()
+
+ except socket.error, e:
+
+ if not GitAutoDeploy.daemon:
+ print "Error on socket: %s" % e
+ GitAutoDeploy.debug_diagnosis(self.get_config()['port'])
+
+ sys.exit(1)
+
+ def stop(self):
+ if self._server is not None:
+ self._server.socket.close()
+
+ def signal_handler(self, signum, frame):
+ self.stop()
+
+ if signum == 1:
+ self.run()
+ return
+
+ elif signum == 2:
+ print '\nRequested close by keyboard interrupt signal'
+
+ elif signum == 6:
+ print 'Requested close by SIGABRT (process abort signal). Code 6.'
+
+ self.exit()
+
if __name__ == '__main__':
- gadm = GitAutoDeployMain()
+ import signal
+
+ app = GitAutoDeploy()
- signal.signal(signal.SIGHUP, gadm.signal_handler)
- signal.signal(signal.SIGINT, gadm.signal_handler)
- signal.signal(signal.SIGABRT, gadm.signal_handler)
- signal.signal(signal.SIGPIPE, signal.SIG_IGN)
+ signal.signal(signal.SIGHUP, app.signal_handler)
+ signal.signal(signal.SIGINT, app.signal_handler)
+ signal.signal(signal.SIGABRT, app.signal_handler)
+ signal.signal(signal.SIGPIPE, signal.SIG_IGN)
- gadm.run()
+ app.run()