diff --git a/.gitignore b/.gitignore index e276427..6318178 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,11 @@ -**/.cpcache -**/.nrepl-port -**/target +*.box +*.deb +*.pem +*.tar.* +*.tfstate +*.tfstate.backup +/share/**/.SRCINFO +/share/**/pkg +/share/**/src +/terraform/.terraform +/terraform/proposed.plan diff --git a/Makefile b/Makefile index aa1f156..24add6f 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,22 @@ -all: +.PHONY: all debug + +COMMIT_REF=$(shell git rev-parse --short HEAD) + +all: clean packer build \ - arch.json + -var "commit_ref=$(COMMIT_REF)" \ + rock.json + +# We need to clean up any build artefacts in the package directories because +# they won't have right permissions, and may conflict with what we want to build +# inside the VM. +clean: + rm -rf share/*/{src,pkg} + +debug: + packer build -debug \ + -var "commit_ref=$(COMMIT_REF)" \ + rock.json + +test: + shellcheck **/*.sh diff --git a/arch.json b/arch.json deleted file mode 100644 index 4f6367a..0000000 --- a/arch.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "variables": {}, - "builders": [ - { - "profile": "mask", - "region": "eu-west-1" - }, - { - "type": "amazon-ebs", - "region": "eu-west-1", - "source_ami": "ami-0b8ec472", - "instance_type": "t2.small", - "ssh_username": "root", - "ami_name": "juxt-arch-{{timestamp}}" - }, - { - "ami_groups": ["all"] - } - ], - "provisioners": [ - { - "type": "shell", - "script": "scripts/install-base.sh" - }, - { - "type": "file", - "source": "/etc/timesyncd.conf", - "destination": "/etc/timesyncd.conf" - }, - { - "type": "shell", - "script": "scripts/remove-llmnr.sh" - } - ], - "post-processors": [ - "vagrant" - ] -} diff --git a/etc/makepkg.conf b/etc/makepkg.conf new file mode 100644 index 0000000..a48f6d4 --- /dev/null +++ b/etc/makepkg.conf @@ -0,0 +1,147 @@ +# +# /etc/makepkg.conf +# + +######################################################################### +# SOURCE ACQUISITION +######################################################################### +# +#-- The download utilities that makepkg should use to acquire sources +# Format: 'protocol::agent' +DLAGENTS=('ftp::/usr/bin/curl -fC - --ftp-pasv --retry 3 --retry-delay 3 -o %o %u' + 'http::/usr/bin/curl -fLC - --retry 3 --retry-delay 3 -o %o %u' + 'https::/usr/bin/curl -fLC - --retry 3 --retry-delay 3 -o %o %u' + 'rsync::/usr/bin/rsync --no-motd -z %u %o' + 'scp::/usr/bin/scp -C %u %o') + +# Other common tools: +# /usr/bin/snarf +# /usr/bin/lftpget -c +# /usr/bin/wget + +#-- The package required by makepkg to download VCS sources +# Format: 'protocol::package' +VCSCLIENTS=('bzr::bzr' + 'git::git' + 'hg::mercurial' + 'svn::subversion') + +######################################################################### +# ARCHITECTURE, COMPILE FLAGS +######################################################################### +# +CARCH="x86_64" +CHOST="x86_64-pc-linux-gnu" + +#-- Compiler and Linker Flags +# -march (or -mcpu) builds exclusively for an architecture +# -mtune optimizes for an architecture, but builds for whole processor family +CPPFLAGS="-D_FORTIFY_SOURCE=2" +CFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fstack-protector-strong -fno-plt" +CXXFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fstack-protector-strong -fno-plt" +LDFLAGS="-Wl,-O1,--sort-common,--as-needed,-z,relro,-z,now" +#-- Make Flags: change this for DistCC/SMP systems +#MAKEFLAGS="-j2" +#-- Debugging flags +DEBUG_CFLAGS="-g -fvar-tracking-assignments" +DEBUG_CXXFLAGS="-g -fvar-tracking-assignments" + +######################################################################### +# BUILD ENVIRONMENT +######################################################################### +# +# Defaults: BUILDENV=(!distcc color !ccache check !sign) +# A negated environment option will do the opposite of the comments below. +# +#-- distcc: Use the Distributed C/C++/ObjC compiler +#-- color: Colorize output messages +#-- ccache: Use ccache to cache compilation +#-- check: Run the check() function if present in the PKGBUILD +#-- sign: Generate PGP signature file +# +BUILDENV=(!distcc color !ccache check !sign) +# +#-- If using DistCC, your MAKEFLAGS will also need modification. In addition, +#-- specify a space-delimited list of hosts running in the DistCC cluster. +#DISTCC_HOSTS="" +# +#-- Specify a directory for package building. +#BUILDDIR=/tmp/makepkg + +######################################################################### +# GLOBAL PACKAGE OPTIONS +# These are default values for the options=() settings +######################################################################### +# +# Default: OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !optipng !upx !debug) +# A negated option will do the opposite of the comments below. +# +#-- strip: Strip symbols from binaries/libraries +#-- docs: Save doc directories specified by DOC_DIRS +#-- libtool: Leave libtool (.la) files in packages +#-- staticlibs: Leave static library (.a) files in packages +#-- emptydirs: Leave empty directories in packages +#-- zipman: Compress manual (man and info) pages in MAN_DIRS with gzip +#-- purge: Remove files specified by PURGE_TARGETS +#-- upx: Compress binary executable files using UPX +#-- optipng: Optimize PNG images with optipng +#-- debug: Add debugging flags as specified in DEBUG_* variables +# +OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !optipng !upx !debug) + +#-- File integrity checks to use. Valid: md5, sha1, sha256, sha384, sha512 +INTEGRITY_CHECK=(md5) +#-- Options to be used when stripping binaries. See `man strip' for details. +STRIP_BINARIES="--strip-all" +#-- Options to be used when stripping shared libraries. See `man strip' for details. +STRIP_SHARED="--strip-unneeded" +#-- Options to be used when stripping static libraries. See `man strip' for details. +STRIP_STATIC="--strip-debug" +#-- Manual (man and info) directories to compress (if zipman is specified) +MAN_DIRS=({usr{,/local}{,/share},opt/*}/{man,info}) +#-- Doc directories to remove (if !docs is specified) +DOC_DIRS=(usr/{,local/}{,share/}{doc,gtk-doc} opt/*/{doc,gtk-doc}) +#-- Files to be removed from all packages (if purge is specified) +PURGE_TARGETS=(usr/{,share}/info/dir .packlist *.pod) + +######################################################################### +# PACKAGE OUTPUT +######################################################################### +# +# Default: put built package and cached source in build directory +# +#-- Destination: specify a fixed directory where all packages will be placed +PKGDEST=/var/cache/pacman/juxt +#-- Source cache: specify a fixed directory where source files will be cached +#SRCDEST=/home/sources +#-- Source packages: specify a fixed directory where all src packages will be placed +#SRCPKGDEST=/home/srcpackages +#-- Log files: specify a fixed directory where all log files will be placed +#LOGDEST=/home/makepkglogs +#-- Packager: name/email of the person or organization building packages +#PACKAGER="John Doe " +#-- Specify a key to use for package signing +#GPGKEY="" + +######################################################################### +# COMPRESSION DEFAULTS +######################################################################### +# +COMPRESSGZ=(gzip -c -f -n) +COMPRESSBZ2=(bzip2 -c -f) +COMPRESSXZ=(xz -c -z -) +COMPRESSLRZ=(lrzip -q) +COMPRESSLZO=(lzop -q) +COMPRESSZ=(compress -c -f) + +######################################################################### +# EXTENSION DEFAULTS +######################################################################### +# +# WARNING: Do NOT modify these variables unless you know what you are +# doing. +# +PKGEXT='.pkg.tar.xz' +SRCEXT='.src.tar.gz' + +# vim: set ft=sh ts=2 sw=2 et: diff --git a/etc/sudoers b/etc/sudoers new file mode 100644 index 0000000..f4a262a --- /dev/null +++ b/etc/sudoers @@ -0,0 +1,97 @@ +## sudoers file. +## +## This file MUST be edited with the 'visudo' command as root. +## Failure to use 'visudo' may result in syntax or file permission errors +## that prevent sudo from running. +## +## See the sudoers man page for the details on how to write a sudoers file. +## + +## +## Host alias specification +## +## Groups of machines. These may include host names (optionally with wildcards), +## IP addresses, network numbers or netgroups. +# Host_Alias WEBSERVERS = www1, www2, www3 + +## +## User alias specification +## +## Groups of users. These may consist of user names, uids, Unix groups, +## or netgroups. +# User_Alias ADMINS = millert, dowdy, mikef + +## +## Cmnd alias specification +## +## Groups of commands. Often used to group related commands together. +# Cmnd_Alias PROCESSES = /usr/bin/nice, /bin/kill, /usr/bin/renice, \ +# /usr/bin/pkill, /usr/bin/top +# Cmnd_Alias REBOOT = /sbin/halt, /sbin/reboot, /sbin/poweroff + +## +## Defaults specification +## +## You may wish to keep some of the following environment variables +## when running commands via sudo. +## +## Locale settings +# Defaults env_keep += "LANG LANGUAGE LINGUAS LC_* _XKB_CHARSET" +## +## Run X applications through sudo; HOME is used to find the +## .Xauthority file. Note that other programs use HOME to find +## configuration files and this may lead to privilege escalation! +# Defaults env_keep += "HOME" +## +## X11 resource path settings +# Defaults env_keep += "XAPPLRESDIR XFILESEARCHPATH XUSERFILESEARCHPATH" +## +## Desktop path settings +# Defaults env_keep += "QTDIR KDEDIR" +## +## Allow sudo-run commands to inherit the callers' ConsoleKit session +# Defaults env_keep += "XDG_SESSION_COOKIE" +## +## Uncomment to enable special input methods. Care should be taken as +## this may allow users to subvert the command being run via sudo. +# Defaults env_keep += "XMODIFIERS GTK_IM_MODULE QT_IM_MODULE QT_IM_SWITCHER" +## +## Uncomment to use a hard-coded PATH instead of the user's to find commands +# Defaults secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" +## +## Uncomment to send mail if the user does not enter the correct password. +# Defaults mail_badpass +## +## Uncomment to enable logging of a command's output, except for +## sudoreplay and reboot. Use sudoreplay to play back logged sessions. +# Defaults log_output +# Defaults!/usr/bin/sudoreplay !log_output +# Defaults!/usr/local/bin/sudoreplay !log_output +# Defaults!REBOOT !log_output + +## +## Runas alias specification +## + +## +## User privilege specification +## +root ALL=(ALL) ALL + +## Uncomment to allow members of group wheel to execute any command +# %wheel ALL=(ALL) ALL + +## Same thing without a password +%wheel ALL=(ALL) NOPASSWD: ALL + +## Uncomment to allow members of group sudo to execute any command +# %sudo ALL=(ALL) ALL + +## Uncomment to allow any user to run sudo if they know the password +## of the user they are running the command as (root by default). +# Defaults targetpw # Ask for the password of the target user +# ALL ALL=(ALL) ALL # WARNING: only use this together with 'Defaults targetpw' + +## Read drop-in files from /etc/sudoers.d +## (the '#' here does not indicate a comment) +#includedir /etc/sudoers.d diff --git a/rock.json b/rock.json new file mode 100644 index 0000000..edf5090 --- /dev/null +++ b/rock.json @@ -0,0 +1,67 @@ +{ + "_comment": "Keys prefixed with an underscore are comments.", + "variables": { + "commit_ref": "none", + "juxt_pkg_dir": "/var/lib/juxt-pkgbuilds", + "juxt_repo_dir": "/var/cache/pacman/juxt" + }, + "builders": [ + { + "type": "amazon-ebs", + "region": "eu-west-1", + "source_ami": "ami-0b8ec472", + "instance_type": "t2.small", + "ssh_username": "root", + "ami_name": "juxt-rock-{{user `commit_ref`}}-{{timestamp}}" + } + ], + "provisioners": [ + { + "type": "shell", + "script": "scripts/remove-llmnr.sh" + }, + { + "type": "file", + "source": "etc/timesyncd.conf", + "destination": "/etc/timesyncd.conf" + }, + { + "type": "file", + "source": "etc/sudoers", + "destination": "/etc/sudoers" + }, + { + "type": "file", + "source": "etc/makepkg.conf", + "destination": "/etc/makepkg.conf" + }, + { + "type": "shell", + "script": "scripts/install-base.sh", + "environment_vars": [ + "PKG_DIR={{user `juxt_pkg_dir`}}", + "REPO_DIR={{user `juxt_repo_dir`}}" + ] + }, + { + "type": "file", + "source": "share/", + "destination": "{{user `juxt_pkg_dir`}}" + }, + { + "type": "shell", + "script": "scripts/install-custom.sh", + "environment_vars": [ + "PKG_DIR={{user `juxt_pkg_dir`}}", + "REPO_DIR={{user `juxt_repo_dir`}}" + ] + }, + { + "type": "shell", + "script": "scripts/install-clojure.sh" + } + ], + "post-processors": [ + "vagrant" + ] +} diff --git a/scripts/install-base.sh b/scripts/install-base.sh index 07d29e3..25de955 100755 --- a/scripts/install-base.sh +++ b/scripts/install-base.sh @@ -2,17 +2,67 @@ set -eux -# jdk8-openjdk: many of our applications run on a JVM +# ----------------------------------------------------------------------------- +# Fix sudoers permissions + +chown root:root /etc/sudoers +chmod 440 /etc/sudoers + +# ----------------------------------------------------------------------------- +# Fix makepkg.conf permissions + +chown root:root /etc/makepkg.conf +chmod 644 /etc/makepkg.conf + +# ----------------------------------------------------------------------------- +# Rock user + +# We need a user to make packages later as Arch doesn't like you building +# packages as root. The wheel group gives us passwordless sudo via the `sudoers` +# file we install. +useradd \ + --create-home \ + --shell /usr/bin/bash \ + --groups "wheel" \ + rock + +# ----------------------------------------------------------------------------- +# Base packages + # aws-cli: command-line access to AWS API # git: version control - access to source code -# wget: access to terraform software # termite-terminfo: required to make termite work properly when ssh'ing +# wget: access to terraform software pacman --noconfirm -Syu \ aws-cli \ - clojure \ git \ - jdk8-openjdk \ termite-terminfo \ unzip \ - wget \ + wget + +# ----------------------------------------------------------------------------- +# Private repo + +cat < /etc/pacman.d/juxt +[options] +CacheDir = /var/cache/pacman/pkg +CacheDir = $REPO_DIR +CleanMethod = KeepCurrent + +[juxt] +SigLevel = Optional TrustAll +Server = file://$REPO_DIR +EOF + +cat <> /etc/pacman.conf +Include = /etc/pacman.d/juxt +EOF + +install --owner root --group wheel --mode 774 -d "$PKG_DIR" +install --owner root --group wheel --mode 774 -d "$REPO_DIR" + +# Create a new repo that the rock user can update. +repo-add "$REPO_DIR"/juxt.db.tar +chown root:wheel "$REPO_DIR"/juxt.{db,files}.tar +chmod 664 "$REPO_DIR"/juxt.{db,files}.tar diff --git a/scripts/install-clojure.sh b/scripts/install-clojure.sh new file mode 100755 index 0000000..ee88c03 --- /dev/null +++ b/scripts/install-clojure.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -eux + +# ----------------------------------------------------------------------------- +# Packages from core/extra/community etc. + +pacman --noconfirm -Sy \ + clojure \ + jdk8-openjdk + +# ----------------------------------------------------------------------------- +# AUR packages + +# First we need to pull down the package definitions, and build the packages. +# +# NOTE WE don't preserve environment because we don't want to use root's home +# directory. +su -c 'aursync boot leiningen' rock + +# With the packages now cached locally we can install them with Pacman. +pacman --noconfirm -Sy \ + boot \ + leiningen diff --git a/scripts/install-custom.sh b/scripts/install-custom.sh new file mode 100755 index 0000000..a714f9d --- /dev/null +++ b/scripts/install-custom.sh @@ -0,0 +1,88 @@ +#!/usr/bin/env bash + +set -eux + +# ----------------------------------------------------------------------------- +# Utils + +build_pkgs () { + su --preserve-environment \ + -c 'yes | aurbuild -d juxt -a <(aurqueue *)' rock +} + +install_pkgs () { + # We sync pacman to pick up changes to the juxt repo. + pacman --noconfirm -Syu "$@" +} + +# ----------------------------------------------------------------------------- +# aurutils + +AURUTILS_DIR="/tmp/aurutils" + +git clone https://aur.archlinux.org/aurutils.git "$AURUTILS_DIR" +chmod go+w "$AURUTILS_DIR" # So we can create .SRCINFO files as the rock user. +cd "$AURUTILS_DIR" + +# Install dependencies ourselves because we won't be able to elevate ourselves +# via sudo when we `makepkg`. +install_pkgs \ + jq \ + pacutils \ + repose + +# You can't run makepkg as root, so we need to demote ourselves to a regular +# user. +# +# We skip the PGP check because we won't have AladW's GPG key at this point. +# +# TODO Pull in GPG key and use it to check package. +su --preserve-environment -c 'makepkg --skippgpcheck' rock + +# We could install the build package immediately with `pacman -U` at this point, +# but to treat aurutils in the same way we will other AUR/private packages we do +# the update-repo-install-from-repo dance instead. +# +# This approach also has the advantage of keeping the packaged version of +# aurutils around if someone ever wants/needs to downgrade. +repose -vf juxt -r /var/cache/pacman/juxt +install_pkgs aurutils + +# Now that we've installed aurutils we can remove the AUR repo. +rm -rf "$AURUTILS_DIR" + +# ----------------------------------------------------------------------------- +# Custom packages + +# Recursively set permissions so `Makefile` et al can be found by the rock user. +chown -R root:wheel "$PKG_DIR" +chmod -R 774 "$PKG_DIR" + +# This is the directory in the juxt/rock repo with all the directories +# containing PKGBUILD files. +cd "$PKG_DIR" + +# Packages in AUR etc. have both a `PKGBUILD` file and a `.SRCINFO`. As we don't +# want to have to remember to create this src info file everytime we edit a +# package definition we create the files only when we need them. +# +# We use make because we can avoid all the shell escaping trouble with something +# like using `find` and redirecting output. +su --preserve-environment -c 'make' rock + +# Restore permissions. +chown -R root:wheel "$PKG_DIR" +chmod -R 774 "$PKG_DIR" + +# Now we find all the packages and write a manifest of the directories +# containing package definitions. +build_pkgs + +# Install +install_pkgs \ + codedeploy-agent \ + journald-cloud-watch-script \ + systemd-cloud-watch + +# To install any packages from the AUR can now be done like so: +# aursync ... diff --git a/share/.gitignore b/share/.gitignore deleted file mode 100644 index d7b325d..0000000 --- a/share/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -*.deb -*.tar.* -pkg -src diff --git a/share/Makefile b/share/Makefile new file mode 100644 index 0000000..5ad2090 --- /dev/null +++ b/share/Makefile @@ -0,0 +1,8 @@ +.PHONY: force + +DIRS=$(sort $(dir $(wildcard */PKGBUILD))) + +all: $(DIRS) + +$(DIRS): force + cd $@ && makepkg --printsrcinfo > .SRCINFO diff --git a/share/codedeploy/PKGBUILD b/share/codedeploy-agent/PKGBUILD similarity index 89% rename from share/codedeploy/PKGBUILD rename to share/codedeploy-agent/PKGBUILD index 582f8c1..8dd2ca4 100644 --- a/share/codedeploy/PKGBUILD +++ b/share/codedeploy-agent/PKGBUILD @@ -1,4 +1,5 @@ -# Maintainer: James Conroy-Finn +# Maintainer: JUXT +# Contributor: James Conroy-Finn pkgname=codedeploy-agent pkgver=1.0_1.1458 pkgrel=1 @@ -17,14 +18,6 @@ prepare() { tar -xf data.tar.gz } -# build() { -# cd "$srcdir" -# } - -# check() { -# cd "$srcdir" -# } - package() { cd "$srcdir" diff --git a/share/codedeploy/Makefile b/share/codedeploy/Makefile deleted file mode 100644 index ad056a6..0000000 --- a/share/codedeploy/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: clean install uninstall - -clean: - rm -rf pkg src - -pkg: - makepkg --force - -install: - makepkg --force --syncdeps --install - -uninstall: - sudo pacman -Rs codedeploy-agent diff --git a/share/codedeploy/README.org b/share/codedeploy/README.org deleted file mode 100644 index aef877e..0000000 --- a/share/codedeploy/README.org +++ /dev/null @@ -1,136 +0,0 @@ -#+TITLE: CodeDeploy Arch Package -#+PROPERTY: header-args :var region="eu-west-1", bucket="aws-codedeploy-eu-west-1", version="latest/VERSION" - -* Getting the install script -Amazon say to install Ruby, wget and then download an installation script from a -region-specific bucket. - -In our case we want to start by pulling down the install script to see what it -does. - -#+BEGIN_SRC shell - curl https://aws-codedeploy-eu-central-1.s3.amazonaws.com/latest/install > install.rb -#+END_SRC - -#+RESULTS: - -The installation script is written in Ruby, and will be downloaded to -[[file:install.rb][install.rb]]. I've added it to the repo so we can keep track of any changes that -may occur in future more easily. - -* Poking around -[[https://docs.aws.amazon.com/codedeploy/latest/userguide/codedeploy-agent-operations-install-ubuntu.html][Amazon's docs]] recommend running the following to install CodeDeploy: - -#+BEGIN_SRC shell -sudo apt-get update -sudo apt-get install ruby # Or ruby2.0 on older versions of Ubuntu. -sudo apt-get install wget -cd /home/ubuntu -wget https://bucket-name.s3.amazonaws.com/latest/install -chmod +x ./install -sudo ./install auto -#+END_SRC - -The install script checks you have a supported Ruby version where it currently -supports the following versions: - -#+BEGIN_SRC shell :results output verbatim - grep -A 2 'def supported_ruby_versions' install.rb -#+END_SRC - -#+RESULTS: -: def supported_ruby_versions -: ['2.4', '2.3', '2.2', '2.1', '2.0'] -: end - -The script may have to be run inside AWS because it looks up availability zones -etc. via a self-assigned (so that hosts can communicate without DHCP) -~169.254.169.254~ address. - -There's one big script wrapped inside a ~begin/rescue~ block. Methods are -defined outside of any explicit class. - -The ~auto~ argument runs code that will try to detect the OS package manager, -which it sets via ~@type = "rpm"~ etc. - -Ultimately, ~install_from_s3~ is called with a region, bucket, version file key, -and some additional optional arguments. - -#+BEGIN_SRC shell :results output verbatim - grep -B 1 'install_from_s3' install.rb | tail -n +3 # To get rid of the def install_from_s3 -#+END_SRC - -#+RESULTS: -: -- -: install_cmd = ['/usr/bin/yum', '-y', 'localinstall'] -: install_from_s3(region, bucket, version_file_key, @type, install_cmd) -: -- -: install_cmd = ['/usr/bin/gdebi', '-n', '-o', 'Dpkg::Options::=--force-confdef', '-o', 'Dkpg::Options::=--force-confold'] -: install_from_s3(region, bucket, version_file_key, @type, install_cmd) -: -- -: install_cmd = ['/usr/bin/zypper', 'install', '-n'] -: install_from_s3(region, bucket, version_file_key, 'rpm', install_cmd) - -** Getting CodeDeploy version info -#+BEGIN_SRC ruby - region = get_region - bucket = "aws-codedeploy-#{region}" - version_file_key = 'latest/VERSION' -#+END_SRC - -#+BEGIN_SRC ruby - def get_region - @log.info('Checking AWS_REGION environment variable for region information...') - region = ENV['AWS_REGION'] - return region if region - - @log.info('Checking EC2 metadata service for region information...') - region = get_ec2_metadata_region - return region if region - - @log.info('Using fail-safe default region: us-east-1') - return 'us-east-1' - end -#+END_SRC - -There's a version file we want to get our hands on. - -#+BEGIN_SRC shell :results output verbatim - curl https://$bucket.s3-$region.amazonaws.com/$version | jq . -#+END_SRC - -#+RESULTS: -: { -: "rpm": "releases/codedeploy-agent-1.0-1.1458.noarch.rpm", -: "deb": "releases/codedeploy-agent_1.0-1.1458_all.deb", -: "msi": "releases/codedeploy-agent-1.0.1.1458.msi" -: } - -** Debian package -Let's download the Debian pkg and see what's inside! - -#+BEGIN_SRC shell :results output raw - echo https://$bucket.s3-$region.amazonaws.com/releases/codedeploy-agent_1.0-1.1458_all.deb -#+END_SRC - -#+NAME: debian-url -#+RESULTS: -https://aws-codedeploy-eu-west-1.s3-eu-west-1.amazonaws.com/releases/codedeploy-agent_1.0-1.1458_all.deb - -#+BEGIN_SRC shell - curl https://$bucket.s3-$region.amazonaws.com/releases/codedeploy-agent_1.0-1.1458_all.deb > codedeploy-agent.deb -#+END_SRC - -#+RESULTS: - -Inside the Debian package there's a ~data.tar.gz`~ archive with all the -CodeDeploy code inside. - -*** Listing contents -#+BEGIN_SRC shell :results output verbatim - ar -x codedeploy-agent.deb - # We only need the data.tar.gz - rm control.tar.gz debian-binary - tar -tvf data.tar.gz - rm data.tar.gz -#+END_SRC diff --git a/share/codedeploy/install.rb b/share/codedeploy/install.rb deleted file mode 100644 index dea8803..0000000 --- a/share/codedeploy/install.rb +++ /dev/null @@ -1,405 +0,0 @@ -#!/usr/bin/env ruby - -################################################################## -# This part of the code might be running on Ruby versions other -# than 2.0. Testing on multiple Ruby versions is required for -# changes to this part of the code. -################################################################## - -class Proxy - instance_methods.each do |m| - undef_method m unless m =~ /(^__|^send$|^object_id$)/ - end - - def initialize(*targets) - @targets = targets - end - - protected - - def method_missing(name, *args, &block) - @targets.map do |target| - target.__send__(name, *args, &block) - end - end -end - -log_file_path = "/tmp/codedeploy-agent.update.log" - -require 'logger' - -if($stdout.isatty) - # if we are being run in a terminal, log to stdout and the log file. - @log = Logger.new(Proxy.new(File.open(log_file_path, 'a+'), $stdout)) -else - # keep at most 2MB of old logs rotating out 1MB at a time - @log = Logger.new(log_file_path, 2, 1048576) - # make sure anything coming out of ruby ends up in the log file - $stdout.reopen(log_file_path, 'a+') - $stderr.reopen(log_file_path, 'a+') -end - -@log.level = Logger::INFO - -begin - require 'fileutils' - require 'openssl' - require 'open-uri' - require 'uri' - require 'getoptlong' - require 'tempfile' - - def usage - print < - --sanity-check [optional] - --proxy [optional] - package-type: 'rpm', 'deb', or 'auto' - -Installs fetches the latest package version of the specified type and -installs it. rpms are installed with yum; debs are installed using gdebi. - -This program is invoked automatically to update the agent once per day using -the same package manager the codedeploy-agent is initially installed with. - -To use this script for a hands free install on any system specify a package -type of 'auto'. This will detect if yum or gdebi is present on the system -and select the one present if possible. If both rpm and deb package managers -are detected the automatic detection will abort -When using the automatic setup, if the system has apt-get but not gdebi, -the gdebi will be installed using apt-get first. - -If --sanity-check is specified, the install script will wait for 3 minutes post installation -to check for a running agent. - -To use a HTTP proxy, specify --proxy followed by the proxy server -defined by http://hostname:port - -This install script needs Ruby version 2.x installed as a prerequisite. -Currently recommanded Ruby versions are 2.0.0, 2.1.8, 2.2.4 and 2.3.0, and 2.4.0. -If multiple Ruby versions are installed, the default ruby version will be used. -If the default ruby version does not satisfy reqirement, the newest version will be used. -If you do not have a supported Ruby version installed, please install one of them first. - -EOF - end - - def supported_ruby_versions - ['2.4', '2.3', '2.2', '2.1', '2.0'] - end - - # check ruby version, only version 2.x works - def check_ruby_version_and_symlink - @log.info("Starting Ruby version check.") - actual_ruby_version = RUBY_VERSION.split('.').map{|s|s.to_i}[0,2] - - supported_ruby_versions.each do |version| - if ((actual_ruby_version <=> version.split('.').map{|s|s.to_i}) == 0) - return File.join(RbConfig::CONFIG["bindir"], RbConfig::CONFIG["RUBY_INSTALL_NAME"] + RbConfig::CONFIG["EXEEXT"]) - end - end - - supported_ruby_versions.each do |version| - if(File.exist?("/usr/bin/ruby#{version}")) - return "/usr/bin/ruby#{version}" - elsif (File.symlink?("/usr/bin/ruby#{version}")) - @log.error("The symlink /usr/bin/ruby#{version} exists, but it's linked to a non-existent directory or non-executable file.") - exit(1) - end - end - - unsupported_ruby_version_error - exit(1) - end - - def unsupported_ruby_version_error - @log.error("Current running Ruby version for "+ENV['USER']+" is "+RUBY_VERSION+", but Ruby version 2.x needs to be installed.") - @log.error('If you already have the proper Ruby version installed, please either create a symlink to /usr/bin/ruby2.x,') - @log.error( "or run this install script with right interpreter. Otherwise please install Ruby 2.x for "+ENV['USER']+" user.") - @log.error('You can get more information by running the script with --help option.') - end - - def parse_args() - if (ARGV.length > 4) - usage - @log.error('Too many arguments.') - exit(1) - elsif (ARGV.length < 1) - usage - @log.error('Expected package type as argument.') - exit(1) - end - - @sanity_check = false - @reexeced = false - @http_proxy = nil - - @args = Array.new(ARGV) - opts = GetoptLong.new(['--sanity-check', GetoptLong::NO_ARGUMENT], ['--help', GetoptLong::NO_ARGUMENT], ['--re-execed', GetoptLong::NO_ARGUMENT], ['--proxy', GetoptLong::OPTIONAL_ARGUMENT]) - opts.each do |opt, args| - case opt - when '--sanity-check' - @sanity_check = true - when '--help' - usage - when '--re-execed' - @reexeced = true - when '--proxy' - if (args != '') - @http_proxy = args - end - end - end - if (ARGV.length < 1) - usage - @log.error('Expected package type as argument.') - exit(1) - end - @type = ARGV.shift.downcase; - end - - def force_ruby2x(ruby_interpreter_path) - # change interpreter when symlink /usr/bin/ruby2.x exists, but running with non-supported ruby version - actual_ruby_version = RUBY_VERSION.split('.').map{|s|s.to_i} - left_bound = '2.0.0'.split('.').map{|s|s.to_i} - right_bound = '2.4.1'.split('.').map{|s|s.to_i} - if (actual_ruby_version <=> left_bound) < 0 - if(!@reexeced) - @log.info("The current Ruby version is not 2.x! Restarting the installer with #{ruby_interpreter_path}") - exec("#{ruby_interpreter_path}", __FILE__, '--re-execed' , *@args) - else - unsupported_ruby_version_error - exit(1) - end - elsif ((actual_ruby_version <=> right_bound) > 0) - @log.warn("The Ruby version in #{ruby_interpreter_path} is "+RUBY_VERSION+", . Attempting to install anyway.") - end - end - - if (Process.uid != 0) - @log.error('Must run as root to install packages') - exit(1) - end - - parse_args() - - ########## Force running as Ruby 2.x or fail here ########## - ruby_interpreter_path = check_ruby_version_and_symlink - force_ruby2x(ruby_interpreter_path) - - def run_command(*args) - exit_ok = system(*args) - $stdout.flush - $stderr.flush - @log.debug("Exit code: #{$?.exitstatus}") - return exit_ok - end - - def get_ec2_metadata_region - begin - uri = URI.parse('http://169.254.169.254/latest/meta-data/placement/availability-zone') - az = uri.read(:read_timeout => 120) - az.strip - rescue - @log.warn("Could not get region from EC2 metadata service at '#{uri.to_s}'") - return nil - end - - if (az !~ /[a-z]{2}-[a-z]+-\d+[a-z]/) - @log.warn("Invalid availability zone name: '#{az}'.") - return nil - else - return az.chop - end - end - - def get_region - @log.info('Checking AWS_REGION environment variable for region information...') - region = ENV['AWS_REGION'] - return region if region - - @log.info('Checking EC2 metadata service for region information...') - region = get_ec2_metadata_region - return region if region - - @log.info('Using fail-safe default region: us-east-1') - return 'us-east-1' - end - - def get_s3_uri(region, bucket, key) - if (region == 'us-east-1') - URI.parse("https://#{bucket}.s3.amazonaws.com/#{key}") - elsif (region.split("-")[0] == 'cn') - URI.parse("https://#{bucket}.s3.#{region}.amazonaws.com.cn/#{key}") - else - URI.parse("https://#{bucket}.s3-#{region}.amazonaws.com/#{key}") - end - end - - def get_package_from_s3(region, bucket, key, package_file) - @log.info("Downloading package from bucket #{bucket} and key #{key}...") - - uri = get_s3_uri(region, bucket, key) - - # stream package file to disk - begin - uri.open(:ssl_verify_mode => OpenSSL::SSL::VERIFY_PEER, :redirect => true, :read_timeout => 120, :proxy => @http_proxy) do |s3| - package_file.write(s3.read) - end - rescue OpenURI::HTTPError => e - @log.error("Could not find package to download at '#{uri.to_s}'") - exit(1) - end - end - - def get_version_file_from_s3(region, bucket, key) - @log.info("Downloading version file from bucket #{bucket} and key #{key}...") - - uri = get_s3_uri(region, bucket, key) - - begin - require 'json' - - version_string = uri.read(:ssl_verify_mode => OpenSSL::SSL::VERIFY_PEER, :redirect => true, :read_timeout => 120, :proxy => @http_proxy) - JSON.parse(version_string) - rescue OpenURI::HTTPError => e - @log.error("Could not find version file to download at '#{uri.to_s}'") - exit(1) - end - end - - def install_from_s3(region, bucket, version_file_key, type, install_cmd) - version_data = get_version_file_from_s3(region, bucket, version_file_key) - - package_key = version_data[type] - package_base_name = File.basename(package_key) - package_extension = File.extname(package_base_name) - package_name = File.basename(package_base_name, package_extension) - package_file = Tempfile.new(["#{package_name}.tmp-", package_extension]) # unique file with 0600 permissions - - get_package_from_s3(region, bucket, package_key, package_file) - package_file.close - - install_cmd << package_file.path - @log.info("Executing `#{install_cmd.join(" ")}`...") - - if (!run_command(*install_cmd)) - @log.error("Error installing #{package_file.path}.") - package_file.unlink - exit(1) - end - - package_file.unlink - end - - def do_sanity_check(cmd) - if @sanity_check - @log.info("Waiting for 3 minutes before I check for a running agent") - sleep(3 * 60) - res = run_command(cmd, 'codedeploy-agent', 'status') - if (res.nil? || res == false) - @log.info("No codedeploy agent seems to be running. Starting the agent.") - run_command(cmd, 'codedeploy-agent', 'start-no-update') - end - end - end - - @log.info("Starting update check.") - - if (@type == 'auto') - @log.info('Attempting to automatically detect supported package manager type for system...') - - has_yum = run_command('which yum >/dev/null 2>/dev/null') - has_apt_get = run_command('which apt-get >/dev/null 2>/dev/null') - has_gdebi = run_command('which gdebi >/dev/null 2>/dev/null') - has_zypper = run_command('which zypper >/dev/null 2>/dev/null') - - if (has_yum && (has_apt_get || has_gdebi)) - @log.error('Detected both supported rpm and deb package managers. Please specify which package type to use manually.') - exit(1) - end - - if(has_yum) - @type = 'rpm' - elsif(has_zypper) - @type = 'zypper' - elsif(has_gdebi) - @type = 'deb' - elsif(has_apt_get) - @type = 'deb' - - @log.warn('apt-get found but no gdebi. Installing gdebi with `apt-get install gdebi-core -y`...') - #use -y to answer yes to confirmation prompts - if(!run_command('/usr/bin/apt-get', 'install', 'gdebi-core', '-y')) - @log.error('Could not install gdebi.') - exit(1) - end - else - @log.error('Could not detect any supported package managers.') - exit(1) - end - end - - region = get_region - bucket = "aws-codedeploy-#{region}" - version_file_key = 'latest/VERSION' - - case @type - when 'help' - usage - when 'rpm' - running_version = `rpm -q codedeploy-agent` - running_version.strip! - target_version = get_version_file_from_s3(region, bucket, version_file_key)['rpm'] - if target_version.include? running_version - @log.info('Running version matches target version, skipping install') - else - #use -y to answer yes to confirmation prompts - install_cmd = ['/usr/bin/yum', '-y', 'localinstall'] - install_from_s3(region, bucket, version_file_key, @type, install_cmd) - do_sanity_check('/sbin/service') - end - when 'deb' - running_agent = `dpkg -s codedeploy-agent` - running_agent_info = running_agent.split - version_index = running_agent_info.index('Version:') - if !version_index.nil? - running_version = running_agent_info[version_index + 1] - else - running_version = "No running version" - end - @log.info("Running version " + running_version) - target_version = get_version_file_from_s3(region, bucket, version_file_key)['deb'] - if target_version.include? running_version - @log.info('Running version matches target version, skipping install') - else - #use -n for non-interactive mode - #use -o to not overwrite config files unless they have not been changed - install_cmd = ['/usr/bin/gdebi', '-n', '-o', 'Dpkg::Options::=--force-confdef', '-o', 'Dkpg::Options::=--force-confold'] - install_from_s3(region, bucket, version_file_key, @type, install_cmd) - do_sanity_check('/usr/sbin/service') - end - when 'zypper' - #use -n for non-interactive mode - install_cmd = ['/usr/bin/zypper', 'install', '-n'] - install_from_s3(region, bucket, version_file_key, 'rpm', install_cmd) - else - @log.error("Unsupported package type '#{@type}'") - exit(1) - end - - @log.info("Update check complete.") - @log.info("Stopping updater.") - -rescue SystemExit => e - # don't log exit() as an error - raise e -rescue Exception => e - # make sure all unhandled exceptions are logged to the log - @log.error("Unhandled exception: #{e.inspect}") - e.backtrace.each do |line| - @log.error(" at " + line) - end - exit(1) -end diff --git a/share/journald-cloud-watch-script/PKGBUILD b/share/journald-cloud-watch-script/PKGBUILD new file mode 100644 index 0000000..4df1c04 --- /dev/null +++ b/share/journald-cloud-watch-script/PKGBUILD @@ -0,0 +1,28 @@ +# Maintainer: James Conroy-Finn +pkgname=journald-cloud-watch-script +pkgver=0.1 +pkgrel=1 +pkgdesc="Bash script used to forward logs." +arch=('i686' 'x86_64') +url="https://github.com/juxt/rock" +license=('GPL') +groups=() +depends=('aws-cli' 'bash' 'jq') +source=(tail-journald + journald-cloud-watch-script.service) +md5sums=('8907d8a5eadddd7c36730b4e7eff0f11' + '957ed4f7b25b8246aa518aee1474a20e') + +package() { + cd "$srcdir" + + install -Dm755 \ + "${srcdir}"/tail-journald \ + "${pkgdir}"/usr/bin/tail-journald + + install -Dm744 \ + "${srcdir}"/journald-cloud-watch-script.service \ + "${pkgdir}"/usr/lib/systemd/system/journald-cloud-watch-script.service +} + +# vim:set ts=2 sw=2 et: diff --git a/share/journald-cloud-watch-script/journald-cloud-watch-script.service b/share/journald-cloud-watch-script/journald-cloud-watch-script.service new file mode 100644 index 0000000..a5c7433 --- /dev/null +++ b/share/journald-cloud-watch-script/journald-cloud-watch-script.service @@ -0,0 +1,12 @@ +[Unit] +Description=Forward journald logs to CloudWatch via a Bash script +Wants=basic.target +After=basic.target network.target + +[Service] +User=nobody +Group=nobody +ExecStart=/usr/bin/tail-journald +KillMode=process +Restart=on-failure +RestartSec=42s diff --git a/share/journald-cloud-watch-script/tail-journald b/share/journald-cloud-watch-script/tail-journald new file mode 100755 index 0000000..e363d6c --- /dev/null +++ b/share/journald-cloud-watch-script/tail-journald @@ -0,0 +1,84 @@ +#!/usr/bin/env bash + +set -Eeuo pipefail + +INSTANCE_ID="${INSTANCE_ID:-$(curl -s http://169.254.169.254/latest/meta-data/instance-id)}" +AWS_REGION="${AWS_REGION:-$(curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone)}" +LOG_GROUP_NAME="${LOG_GROUP_NAME:-rock}" +LOG_STREAM_NAME="$(hostname)" + +AWS_CURSOR_FILE="/var/lib/journald-cloud-watch-script/aws-cursor" +JOURNAL_CURSOR_FILE="/var/lib/journald-cloud-watch-script/journal-cursor" + +mkdir -p "/var/lib/journald-cloud-watch-script" + +BATCH_SIZE=50 + +fetch_current_cursor () { + aws logs describe-log-streams \ + --region="$AWS_REGION" \ + --log-group-name="$LOG_GROUP_NAME" \ + --log-stream-name-prefix="$LOG_STREAM_NAME" |\ + jq -r ".logStreams[0].uploadSequenceToken // empty" +} + +create_or_get_current_cursor () { + current=$(fetch_current_cursor) + if [[ $current ]]; then + echo $current > $AWS_CURSOR_FILE + else + aws logs create-log-stream \ + --region=$AWS_REGION \ + --log-group-name=$LOG_GROUP_NAME \ + --log-stream-name=$LOG_STREAM_NAME + fi +} + +journal_cursor_rows () { + if [[ $(cat $JOURNAL_CURSOR_FILE) ]]; then + rows=$(journalctl -a -n $BATCH_SIZE -o json \ + --after-cursor "$(cat $JOURNAL_CURSOR_FILE)") + else + rows=$(journalctl -a -n $BATCH_SIZE -o json) + fi + cursor=$(echo $rows | jq -s -r ". | .[-1].__CURSOR // empty") + if [[ $cursor ]]; then + echo $cursor > $JOURNAL_CURSOR_FILE + fi + echo $rows +} + +format_row () { + jq "{timestamp: $(date +%s%3N), message: @json}" +} + +write_batch () { + rows=$(journal_cursor_rows) + if [[ -z $rows ]]; then + sleep 1 + return 0 + fi + + BATCH=$(echo "$rows" |\ + format-row |\ + jq -s .) + if [[ $(cat $AWS_CURSOR_FILE) ]]; then + sequence_token="--sequence-token=$(cat $AWS_CURSOR_FILE)" + else + sequence_token="" + fi + aws logs put-log-events \ + --region="$AWS_REGION" \ + --log-group-name="$LOG_GROUP_NAME" \ + --log-stream-name="$LOG_STREAM_NAME" \ + "$sequence_token" \ + --log-events="$BATCH" |\ + jq -r ".nextSequenceToken" > "$AWS_CURSOR_FILE" +} + +touch $AWS_CURSOR_FILE +touch $JOURNAL_CURSOR_FILE +create_or_get_current_cursor +while true; do + write_batch +done diff --git a/share/systemd-cloud-watch/Makefile b/share/systemd-cloud-watch/Makefile deleted file mode 100644 index ad056a6..0000000 --- a/share/systemd-cloud-watch/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: clean install uninstall - -clean: - rm -rf pkg src - -pkg: - makepkg --force - -install: - makepkg --force --syncdeps --install - -uninstall: - sudo pacman -Rs codedeploy-agent diff --git a/share/systemd-cloud-watch/PKGBUILD b/share/systemd-cloud-watch/PKGBUILD index 415694b..c8049d4 100644 --- a/share/systemd-cloud-watch/PKGBUILD +++ b/share/systemd-cloud-watch/PKGBUILD @@ -1,4 +1,5 @@ -# Maintainer: James Conroy-Finn +# Maintainer: JUXT +# Contributor: James Conroy-Finn pkgname=systemd-cloud-watch pkgver=0.2.1 pkgrel=1 diff --git a/terraform/Makefile b/terraform/Makefile new file mode 100644 index 0000000..884682c --- /dev/null +++ b/terraform/Makefile @@ -0,0 +1,13 @@ +all: plan + +fmt: + terraform fmt + +plan: fmt + terraform plan -out proposed.plan + +apply: + terraform apply proposed.plan + +destroy: + terraform destroy diff --git a/terraform/assume-role-policy.json b/terraform/assume-role-policy.json new file mode 100644 index 0000000..5e6ad2f --- /dev/null +++ b/terraform/assume-role-policy.json @@ -0,0 +1,15 @@ +{ + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": [ + "ec2.amazonaws.com" + ] + }, + "Sid": "" + } + ], + "Version": "2012-10-17" +} diff --git a/terraform/main.tf b/terraform/main.tf new file mode 100644 index 0000000..2a7efbc --- /dev/null +++ b/terraform/main.tf @@ -0,0 +1,96 @@ +provider "aws" { + region = "eu-west-1" + version = "~> 1.14" +} + +provider "template" { + version = "~> 1.0" +} + +data "aws_ami" "rock" { + most_recent = true + + filter { + name = "name" + + values = ["juxt-rock-*"] + } + + owners = ["639331413963"] +} + +data "template_file" "user_data" { + template = "${file("${path.cwd}/rock.sh")}" +} + +resource "aws_cloudwatch_log_group" "rock" { + name = "rock" + retention_in_days = 7 +} + +resource "aws_iam_role" "rock" { + name = "rock_instance_profile" + path = "/" + + assume_role_policy = "${file("assume-role-policy.json")}" +} + +resource "aws_iam_instance_profile" "rock" { + name = "rock_instance_profile" + role = "${aws_iam_role.rock.name}" +} + +resource "aws_iam_role_policy" "rock" { + name = "rock_logs" + role = "${aws_iam_role.rock.id}" + policy = "${file("policy.json")}" +} + +resource "aws_security_group" "allow_all" { + name = "allow_all" + description = "Allow all inbound traffic." + + ingress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags { + Name = "allow_all" + Source = "juxt/rock" + } +} + +resource "aws_instance" "rock" { + ami = "${data.aws_ami.rock.id}" + instance_type = "t2.small" + availability_zone = "eu-west-1a" + iam_instance_profile = "${aws_iam_instance_profile.rock.name}" + security_groups = ["${aws_security_group.allow_all.name}"] + user_data = "${data.template_file.user_data.rendered}" + + # This needs to be created manually. + key_name = "rock" + + root_block_device { + volume_size = 20 + } + + tags { + Name = "rock" + Source = "juxt/rock" + } +} + +output "address" { + value = "${aws_instance.rock.public_dns}" +} diff --git a/terraform/policy.json b/terraform/policy.json new file mode 100644 index 0000000..1b0da5d --- /dev/null +++ b/terraform/policy.json @@ -0,0 +1,28 @@ +{ + "Version": "2012-10-17", + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents", + "logs:DescribeLogStreams" + ], + "Effect": "Allow", + "Resource": [ + "arn:aws:logs:*:*:*" + ] + }, + { + "Action": [ + "ec2:DescribeInstances", + "ec2:DescribeInstanceStatus", + "ec2:DescribeTags", + "tag:GetTags", + "tag:GetResources" + ], + "Effect": "Allow", + "Resource": "*" + } + ] +} diff --git a/terraform/rock.sh b/terraform/rock.sh new file mode 100755 index 0000000..ea0de38 --- /dev/null +++ b/terraform/rock.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# ----------------------------------------------------------------------------- +# CodeDeploy + +systemctl start codedeploy-agent + +# ----------------------------------------------------------------------------- +# JouralD -> CloudWatch + +cat < /etc/systemd-cloud-watch.conf +log_group = "rock" +EOF + +# Looks like there's a but that results in a lot of log output from this library. +# +# Fix is in this PR: https://github.com/advantageous/systemd-cloud-watch/pull/16 +# systemctl start systemd-cloud-watch