Changeset View
Changeset View
Standalone View
Standalone View
scap/main.py
# -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||
""" | """ | ||||
scap.main | scap.main | ||||
~~~~~~~~~~ | ~~~~~~~~~~ | ||||
Command wrappers for scap tasks | Command wrappers for scap tasks | ||||
""" | """ | ||||
import argparse | import argparse | ||||
import errno | import errno | ||||
import glob | |||||
import hashlib | import hashlib | ||||
import multiprocessing | import multiprocessing | ||||
import netifaces | import netifaces | ||||
import os | import os | ||||
import psutil | import psutil | ||||
import requests | import requests | ||||
import shutil | import shutil | ||||
import subprocess | import subprocess | ||||
import time | |||||
import yaml | import yaml | ||||
from . import checks | from . import checks | ||||
from . import cli | from . import cli | ||||
from . import log | from . import log | ||||
from . import nrpe | from . import nrpe | ||||
from . import ssh | from . import ssh | ||||
from . import tasks | from . import tasks | ||||
▲ Show 20 Lines • Show All 554 Lines • ▼ Show 20 Lines | def main(self, *extra_args): | ||||
self.announce('Finished HHVM restart: %s (duration: %s)', | self.announce('Finished HHVM restart: %s (duration: %s)', | ||||
self.arguments.message, utils.human_duration(self.get_duration())) | self.arguments.message, utils.human_duration(self.get_duration())) | ||||
self.get_stats().increment('deploy.restart') | self.get_stats().increment('deploy.restart') | ||||
return exit_code | return exit_code | ||||
class DeployLocal(cli.Application): | class DeployApplication(cli.Application): | ||||
def _load_config(self): | |||||
"""Initializes commonly used attributes after the config is loaded.""" | |||||
super(DeployApplication, self)._load_config() | |||||
self.root_dir = os.path.join(self.config['git_deploy_dir'], | |||||
self.config['git_repo']) | |||||
self.scap_dir = os.path.join(self.root_dir, 'scap') | |||||
self.log_dir = os.path.join(self.scap_dir, 'log') | |||||
class DeployLocal(DeployApplication): | |||||
"""Deploy service code via git""" | """Deploy service code via git""" | ||||
STAGES = ['config_deploy', 'fetch', 'promote'] | STAGES = ['config_deploy', 'fetch', 'promote'] | ||||
EX_STAGES = ['rollback'] | EX_STAGES = ['rollback'] | ||||
rev = None | rev = None | ||||
cache_dir = None | cache_dir = None | ||||
revs_dir = None | revs_dir = None | ||||
rev_dir = None | rev_dir = None | ||||
cur_link = None | cur_link = None | ||||
progress_flag = None | progress_flag = None | ||||
done_flag = None | done_flag = None | ||||
user = None | user = None | ||||
@cli.argument('stage', metavar='STAGE', choices=STAGES + EX_STAGES, | @cli.argument('stage', metavar='STAGE', choices=STAGES + EX_STAGES, | ||||
help='Stage of the deployment to execute') | help='Stage of the deployment to execute') | ||||
def main(self, *extra_args): | def main(self, *extra_args): | ||||
self.rev = self.config['git_rev'] | self.rev = self.config['git_rev'] | ||||
self.root_dir = os.path.normpath("{0}/{1}".format( | |||||
self.config['git_deploy_dir'], self.config['git_repo'])) | |||||
# cache, revs, and current directory go under [repo]-cache and are | # cache, revs, and current directory go under [repo]-cache and are | ||||
# linked to [repo] as a final step | # linked to [repo] as a final step | ||||
root_deploy_dir = '{}-cache'.format(self.root_dir) | root_deploy_dir = '{}-cache'.format(self.root_dir) | ||||
def deploy_dir(subdir): | def deploy_dir(subdir): | ||||
return os.path.join(root_deploy_dir, subdir) | return os.path.join(root_deploy_dir, subdir) | ||||
self.cache_dir = deploy_dir('cache') | self.cache_dir = deploy_dir('cache') | ||||
▲ Show 20 Lines • Show All 296 Lines • ▼ Show 20 Lines | class DeployLocal(DeployApplication): | ||||
def _remove_progress_link(self): | def _remove_progress_link(self): | ||||
tasks.remove_symlink(self.progress_flag, user=self.user) | tasks.remove_symlink(self.progress_flag, user=self.user) | ||||
def _remove_config_digest(self): | def _remove_config_digest(self): | ||||
if os.path.exists(self.cfg_digest): | if os.path.exists(self.cfg_digest): | ||||
os.unlink(self.cfg_digest) | os.unlink(self.cfg_digest) | ||||
class Deploy(cli.Application): | class Deploy(DeployApplication): | ||||
"""Sync new service code across cluster | """Sync new service code across cluster | ||||
Uses local .scaprc as config for each host in cluster | Uses local .scaprc as config for each host in cluster | ||||
""" | """ | ||||
MAX_BATCH_SIZE = 80 | MAX_BATCH_SIZE = 80 | ||||
# Stop executing on new hosts after failure | # Stop executing on new hosts after failure | ||||
MAX_FAILURES = 0 | MAX_FAILURES = 0 | ||||
Show All 22 Lines | class Deploy(DeployApplication): | ||||
@cli.argument('-s', '--stages', choices=DeployLocal.STAGES, | @cli.argument('-s', '--stages', choices=DeployLocal.STAGES, | ||||
help='Deployment stages to execute. Used only for testing.') | help='Deployment stages to execute. Used only for testing.') | ||||
@cli.argument('-l', '--limit-hosts', default='all', | @cli.argument('-l', '--limit-hosts', default='all', | ||||
help='Limit deploy to hosts matching expression') | help='Limit deploy to hosts matching expression') | ||||
def main(self, *extra_args): | def main(self, *extra_args): | ||||
logger = self.get_logger() | logger = self.get_logger() | ||||
self.repo = self.config['git_repo'] | self.repo = self.config['git_repo'] | ||||
self.repo_dir = os.path.join(self.config['git_deploy_dir'], self.repo) | |||||
self.scap_dir = os.path.join(self.repo_dir, 'scap') | |||||
deploy_dir = self.config['git_deploy_dir'] | deploy_dir = self.config['git_deploy_dir'] | ||||
cwd = os.getcwd() | cwd = os.getcwd() | ||||
if self.arguments.stages: | if self.arguments.stages: | ||||
stages = self.arguments.stages.split(',') | stages = self.arguments.stages.split(',') | ||||
else: | else: | ||||
stages = DeployLocal.STAGES | stages = DeployLocal.STAGES | ||||
▲ Show 20 Lines • Show All 73 Lines • ▼ Show 20 Lines | def config_deploy_setup(self, commit): | ||||
os.path.join(self.scap_dir, 'config-files.yaml'), | os.path.join(self.scap_dir, 'config-files.yaml'), | ||||
self.arguments.environment | self.arguments.environment | ||||
) | ) | ||||
logger.debug('Config deploy file: {}'.format(cfg_file)) | logger.debug('Config deploy file: {}'.format(cfg_file)) | ||||
if not os.path.isfile(cfg_file): | if not os.path.isfile(cfg_file): | ||||
return | return | ||||
config_file_path = os.path.join(self.repo_dir, '.git', 'config-files') | config_file_path = os.path.join(self.root_dir, '.git', 'config-files') | ||||
utils.mkdir_p(config_file_path) | utils.mkdir_p(config_file_path) | ||||
tmp_cfg_file = os.path.join(config_file_path, '{}.yaml'.format(commit)) | tmp_cfg_file = os.path.join(config_file_path, '{}.yaml'.format(commit)) | ||||
tmp_cfg = {} | tmp_cfg = {} | ||||
with open(cfg_file, 'r') as cf: | with open(cfg_file, 'r') as cf: | ||||
config_files = yaml.load(cf.read()) | config_files = yaml.load(cf.read()) | ||||
tmp_cfg['files'] = [] | tmp_cfg['files'] = [] | ||||
▲ Show 20 Lines • Show All 82 Lines • ▼ Show 20 Lines | def execute_stage(self, stage): | ||||
return 1 | return 1 | ||||
return 0 | return 0 | ||||
def _get_batch_size(self, stage): | def _get_batch_size(self, stage): | ||||
default = self.config.get('batch_size', self.MAX_BATCH_SIZE) | default = self.config.get('batch_size', self.MAX_BATCH_SIZE) | ||||
size = int(self.config.get('{}_batch_size'.format(stage), default)) | size = int(self.config.get('{}_batch_size'.format(stage), default)) | ||||
return min(size, self.MAX_BATCH_SIZE) | return min(size, self.MAX_BATCH_SIZE) | ||||
def _setup_loggers(self): | |||||
"""Sets up additional logging to `scap/deploy.log`.""" | |||||
if not os.path.exists(self.log_dir): | |||||
os.mkdir(self.log_dir) | |||||
basename = utils.git_describe(self.root_dir).replace('/', '-') | |||||
log_file = os.path.join(self.log_dir, '{}.log'.format(basename)) | |||||
log.setup_loggers(self.config, | |||||
self.arguments.loglevel, | |||||
handlers=[log.DeployLogHandler(log_file)]) | |||||
class DeployLog(DeployApplication): | |||||
"""Tail/filter/output events from the deploy logs | |||||
examples:: | |||||
deploy-log -v | |||||
deploy-log 'host == scap-target-01' | |||||
deploy-log 'msg ~ "some important (message|msg)"' | |||||
deploy-log 'levelno >= WARNING host == scap-target-*' | |||||
thcipriani: "taret" | |||||
""" | |||||
DATE_FORMAT = '%H:%M:%S' | |||||
DIR_SCAN_DELAY = 1 | |||||
FORMAT = '%(asctime)s [%(host)s] %(message)s' | |||||
@cli.argument('expr', metavar='EXPR', nargs='?', default='', | |||||
help='Filter expression.') | |||||
@cli.argument('-f', '--file', metavar='FILE', default=None, | |||||
help='Parse and filter an existing log file') | |||||
@cli.argument('-l', '--latest', action='store_true', | |||||
help='Parse and filter the latest log file') | |||||
def main(self, *extra_args): | |||||
if self.arguments.latest: | |||||
given_log = self._latest_log_file() | |||||
else: | |||||
given_log = self.arguments.file | |||||
Not Done Inline Actionscurrently must be an absolute directory which was unexpected from a usage perspective. Might be better to do: os.path.join(os.getcwd(), self.arguments.file) thcipriani: currently must be an absolute directory which was unexpected from a usage perspective. Might be… | |||||
Not Done Inline Actionsignore me, had the paths all messed up. thcipriani: ignore me, had the paths all messed up. | |||||
filter = log.Filter.loads(self.arguments.expr, filter=False) | |||||
if not filter.isfiltering('levelno'): | |||||
filter.append({'levelno': lambda v: v >= self.arguments.loglevel}) | |||||
formatter = log.AnsiColorFormatter(self.FORMAT, self.DATE_FORMAT) | |||||
if not os.path.exists(self.log_dir): | |||||
os.mkdir(self.log_dir) | |||||
cur_log_path = given_log | |||||
cur_log_file = open(given_log, 'r') if given_log else None | |||||
last_scan = 0 | |||||
# How we do: | |||||
# 1. read the next line from the current file | |||||
# 2. if there's output, parse the line, match it, print it | |||||
# 3. if there's no output, scan the log directory for a new file | |||||
# 4. if a newer log file is found, open it instead | |||||
# 5. repeat | |||||
Not Done Inline ActionsI like this algorithm. Good thinking. mmodell: I like this algorithm. Good thinking. | |||||
while True: | |||||
line = None | |||||
if cur_log_file: | |||||
line = cur_log_file.readline() | |||||
if line: | |||||
try: | |||||
record = log.JSONFormatter.make_record(line) | |||||
if filter.filter(record): | |||||
print formatter.format(record) | |||||
except (ValueError, TypeError): | |||||
pass | |||||
else: | |||||
if given_log: | |||||
# we were given a file and there's nothing more to read | |||||
break | |||||
now = time.time() | |||||
if (now - last_scan) > self.DIR_SCAN_DELAY: | |||||
last_scan = now | |||||
log_path = self._latest_log_file() | |||||
if log_path and log_path != cur_log_path: | |||||
print "-- Opening log file: '{}'".format(log_path) | |||||
cur_log_path = log_path | |||||
if cur_log_file: | |||||
cur_log_file.close() | |||||
cur_log_file = open(cur_log_path, 'r') | |||||
else: | |||||
time.sleep(0.1) | |||||
if cur_log_file: | |||||
cur_log_file.close() | |||||
return 0 | |||||
def _latest_log_file(self): | |||||
log_glob = os.path.join(self.log_dir, '*.log') | |||||
try: | |||||
return max(glob.iglob(log_glob), key=os.path.getmtime) | |||||
except ValueError: | |||||
return None | |||||
def _setup_loggers(self): | |||||
pass |
Content licensed under Creative Commons Attribution-ShareAlike 3.0 (CC-BY-SA) unless otherwise noted; code licensed under GNU General Public License (GPL) or other open source licenses. By using this site, you agree to the Terms of Use, Privacy Policy, and Code of Conduct. · Wikimedia Foundation · Privacy Policy · Code of Conduct · Terms of Use · Disclaimer · CC-BY-SA · GPL
"taret"