2016-01-23 00:09:38 +00:00
|
|
|
#!/usr/bin/env python
|
2016-01-08 15:47:09 +00:00
|
|
|
#
|
|
|
|
# This is the Telegraf build script.
|
|
|
|
#
|
|
|
|
# Current caveats:
|
|
|
|
# - Does not checkout the correct commit/branch (for now, you will need to do so manually)
|
|
|
|
# - Has external dependencies for packaging (fpm) and uploading (boto)
|
|
|
|
#
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import time
|
|
|
|
import datetime
|
|
|
|
import shutil
|
|
|
|
import tempfile
|
|
|
|
import hashlib
|
|
|
|
import re
|
|
|
|
|
2016-01-23 00:09:38 +00:00
|
|
|
debug = False
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
# PACKAGING VARIABLES
|
|
|
|
INSTALL_ROOT_DIR = "/usr/bin"
|
|
|
|
LOG_DIR = "/var/log/telegraf"
|
|
|
|
SCRIPT_DIR = "/usr/lib/telegraf/scripts"
|
|
|
|
CONFIG_DIR = "/etc/telegraf"
|
|
|
|
LOGROTATE_DIR = "/etc/logrotate.d"
|
|
|
|
|
|
|
|
INIT_SCRIPT = "scripts/init.sh"
|
|
|
|
SYSTEMD_SCRIPT = "scripts/telegraf.service"
|
|
|
|
LOGROTATE_SCRIPT = "etc/logrotate.d/telegraf"
|
|
|
|
DEFAULT_CONFIG = "etc/telegraf.conf"
|
2016-02-22 22:16:46 +00:00
|
|
|
DEFAULT_WINDOWS_CONFIG = "etc/telegraf_windows.conf"
|
2016-01-08 15:47:09 +00:00
|
|
|
POSTINST_SCRIPT = "scripts/post-install.sh"
|
|
|
|
PREINST_SCRIPT = "scripts/pre-install.sh"
|
|
|
|
|
|
|
|
# META-PACKAGE VARIABLES
|
|
|
|
PACKAGE_LICENSE = "MIT"
|
|
|
|
PACKAGE_URL = "https://github.com/influxdata/telegraf"
|
|
|
|
MAINTAINER = "support@influxdb.com"
|
|
|
|
VENDOR = "InfluxData"
|
|
|
|
DESCRIPTION = "Plugin-driven server agent for reporting metrics into InfluxDB."
|
|
|
|
|
|
|
|
# SCRIPT START
|
|
|
|
prereqs = [ 'git', 'go' ]
|
2016-02-18 04:57:33 +00:00
|
|
|
optional_prereqs = [ 'fpm', 'rpmbuild' ]
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
fpm_common_args = "-f -s dir --log error \
|
|
|
|
--vendor {} \
|
|
|
|
--url {} \
|
|
|
|
--license {} \
|
|
|
|
--maintainer {} \
|
|
|
|
--config-files {} \
|
|
|
|
--config-files {} \
|
|
|
|
--after-install {} \
|
|
|
|
--before-install {} \
|
|
|
|
--description \"{}\"".format(
|
|
|
|
VENDOR,
|
|
|
|
PACKAGE_URL,
|
|
|
|
PACKAGE_LICENSE,
|
|
|
|
MAINTAINER,
|
|
|
|
CONFIG_DIR + '/telegraf.conf',
|
|
|
|
LOGROTATE_DIR + '/telegraf',
|
|
|
|
POSTINST_SCRIPT,
|
|
|
|
PREINST_SCRIPT,
|
|
|
|
DESCRIPTION)
|
|
|
|
|
|
|
|
targets = {
|
|
|
|
'telegraf' : './cmd/telegraf/telegraf.go',
|
|
|
|
}
|
|
|
|
|
|
|
|
supported_builds = {
|
2016-01-23 00:09:38 +00:00
|
|
|
'darwin': [ "amd64", "i386" ],
|
2016-02-22 22:12:35 +00:00
|
|
|
'windows': [ "amd64", "i386" ],
|
2016-02-29 14:19:44 +00:00
|
|
|
'linux': [ "amd64", "i386", "arm" ],
|
|
|
|
'freebsd': [ "amd64" ]
|
2016-01-08 15:47:09 +00:00
|
|
|
}
|
|
|
|
supported_packages = {
|
|
|
|
"darwin": [ "tar", "zip" ],
|
|
|
|
"linux": [ "deb", "rpm", "tar", "zip" ],
|
2016-02-22 22:16:46 +00:00
|
|
|
"windows": [ "zip" ],
|
2016-02-29 14:19:44 +00:00
|
|
|
'freebsd': [ "tar" ]
|
2016-01-08 15:47:09 +00:00
|
|
|
}
|
2016-02-18 04:57:33 +00:00
|
|
|
supported_tags = {
|
|
|
|
# "linux": {
|
|
|
|
# "amd64": ["sensors"]
|
|
|
|
# }
|
|
|
|
}
|
|
|
|
prereq_cmds = {
|
|
|
|
# "linux": "sudo apt-get install lm-sensors libsensors4-dev"
|
|
|
|
}
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def run(command, allow_failure=False, shell=False):
|
|
|
|
out = None
|
2016-01-23 00:09:38 +00:00
|
|
|
if debug:
|
|
|
|
print("[DEBUG] {}".format(command))
|
2016-01-08 15:47:09 +00:00
|
|
|
try:
|
|
|
|
if shell:
|
|
|
|
out = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=shell)
|
|
|
|
else:
|
|
|
|
out = subprocess.check_output(command.split(), stderr=subprocess.STDOUT)
|
2016-01-12 21:10:36 +00:00
|
|
|
out = out.decode("utf8")
|
2016-01-08 15:47:09 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("")
|
|
|
|
print("")
|
|
|
|
print("Executed command failed!")
|
|
|
|
print("-- Command run was: {}".format(command))
|
|
|
|
print("-- Failure was: {}".format(e.output))
|
2016-01-08 15:47:09 +00:00
|
|
|
if allow_failure:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("Continuing...")
|
2016-01-08 15:47:09 +00:00
|
|
|
return None
|
|
|
|
else:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("")
|
|
|
|
print("Stopping.")
|
2016-01-08 15:47:09 +00:00
|
|
|
sys.exit(1)
|
|
|
|
except OSError as e:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("")
|
|
|
|
print("")
|
|
|
|
print("Invalid command!")
|
|
|
|
print("-- Command run was: {}".format(command))
|
|
|
|
print("-- Failure was: {}".format(e))
|
2016-01-08 15:47:09 +00:00
|
|
|
if allow_failure:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("Continuing...")
|
2016-01-08 15:47:09 +00:00
|
|
|
return out
|
|
|
|
else:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("")
|
|
|
|
print("Stopping.")
|
2016-01-08 15:47:09 +00:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
return out
|
|
|
|
|
2016-01-23 00:09:38 +00:00
|
|
|
def create_temp_dir(prefix=None):
|
|
|
|
if prefix is None:
|
|
|
|
return tempfile.mkdtemp(prefix="telegraf-build.")
|
|
|
|
else:
|
|
|
|
return tempfile.mkdtemp(prefix=prefix)
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def get_current_version():
|
|
|
|
command = "git describe --always --tags --abbrev=0"
|
|
|
|
out = run(command)
|
|
|
|
return out.strip()
|
|
|
|
|
|
|
|
def get_current_commit(short=False):
|
|
|
|
command = None
|
|
|
|
if short:
|
|
|
|
command = "git log --pretty=format:'%h' -n 1"
|
|
|
|
else:
|
|
|
|
command = "git rev-parse HEAD"
|
|
|
|
out = run(command)
|
|
|
|
return out.strip('\'\n\r ')
|
|
|
|
|
|
|
|
def get_current_branch():
|
|
|
|
command = "git rev-parse --abbrev-ref HEAD"
|
|
|
|
out = run(command)
|
|
|
|
return out.strip()
|
|
|
|
|
|
|
|
def get_system_arch():
|
|
|
|
arch = os.uname()[4]
|
|
|
|
if arch == "x86_64":
|
|
|
|
arch = "amd64"
|
|
|
|
return arch
|
|
|
|
|
|
|
|
def get_system_platform():
|
|
|
|
if sys.platform.startswith("linux"):
|
|
|
|
return "linux"
|
|
|
|
else:
|
|
|
|
return sys.platform
|
|
|
|
|
|
|
|
def get_go_version():
|
|
|
|
out = run("go version")
|
|
|
|
matches = re.search('go version go(\S+)', out)
|
|
|
|
if matches is not None:
|
|
|
|
return matches.groups()[0].strip()
|
|
|
|
return None
|
|
|
|
|
|
|
|
def check_path_for(b):
|
|
|
|
def is_exe(fpath):
|
|
|
|
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
|
|
|
|
|
|
|
|
for path in os.environ["PATH"].split(os.pathsep):
|
|
|
|
path = path.strip('"')
|
|
|
|
full_path = os.path.join(path, b)
|
|
|
|
if os.path.isfile(full_path) and os.access(full_path, os.X_OK):
|
|
|
|
return full_path
|
|
|
|
|
|
|
|
def check_environ(build_dir = None):
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\nChecking environment:")
|
2016-01-08 15:47:09 +00:00
|
|
|
for v in [ "GOPATH", "GOBIN", "GOROOT" ]:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t- {} -> {}".format(v, os.environ.get(v)))
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
cwd = os.getcwd()
|
|
|
|
if build_dir == None and os.environ.get("GOPATH") and os.environ.get("GOPATH") not in cwd:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\n!! WARNING: Your current directory is not under your GOPATH. This may lead to build failures.")
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def check_prereqs():
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\nChecking for dependencies:")
|
2016-01-08 15:47:09 +00:00
|
|
|
for req in prereqs:
|
|
|
|
path = check_path_for(req)
|
2016-01-23 00:09:38 +00:00
|
|
|
if path is None:
|
|
|
|
path = '?'
|
|
|
|
print("\t- {} -> {}".format(req, path))
|
2016-01-08 15:47:09 +00:00
|
|
|
for req in optional_prereqs:
|
|
|
|
path = check_path_for(req)
|
2016-01-23 00:09:38 +00:00
|
|
|
if path is None:
|
|
|
|
path = '?'
|
|
|
|
print("\t- {} (optional) -> {}".format(req, path))
|
2016-01-12 21:10:36 +00:00
|
|
|
print("")
|
2016-01-08 15:47:09 +00:00
|
|
|
|
2016-01-23 00:09:38 +00:00
|
|
|
def upload_packages(packages, bucket_name=None, nightly=False):
|
|
|
|
if debug:
|
|
|
|
print("[DEBUG] upload_packags: {}".format(packages))
|
|
|
|
try:
|
|
|
|
import boto
|
|
|
|
from boto.s3.key import Key
|
|
|
|
except ImportError:
|
|
|
|
print "!! Cannot upload packages without the 'boto' python library."
|
|
|
|
return 1
|
2016-01-12 21:10:36 +00:00
|
|
|
print("Uploading packages to S3...")
|
|
|
|
print("")
|
2016-01-08 15:47:09 +00:00
|
|
|
c = boto.connect_s3()
|
2016-01-23 00:09:38 +00:00
|
|
|
if bucket_name is None:
|
|
|
|
bucket_name = 'get.influxdb.org/telegraf'
|
|
|
|
bucket = c.get_bucket(bucket_name.split('/')[0])
|
|
|
|
print("\t - Using bucket: {}".format(bucket_name))
|
2016-01-08 15:47:09 +00:00
|
|
|
for p in packages:
|
2016-01-23 00:09:38 +00:00
|
|
|
if '/' in bucket_name:
|
|
|
|
# Allow for nested paths within the bucket name (ex:
|
|
|
|
# bucket/telegraf). Assuming forward-slashes as path
|
|
|
|
# delimiter.
|
|
|
|
name = os.path.join('/'.join(bucket_name.split('/')[1:]),
|
|
|
|
os.path.basename(p))
|
|
|
|
else:
|
|
|
|
name = os.path.basename(p)
|
2016-01-08 15:47:09 +00:00
|
|
|
if bucket.get_key(name) is None or nightly:
|
2016-01-23 00:09:38 +00:00
|
|
|
print("\t - Uploading {} to {}...".format(name, bucket_name))
|
2016-01-08 15:47:09 +00:00
|
|
|
k = Key(bucket)
|
|
|
|
k.key = name
|
|
|
|
if nightly:
|
|
|
|
n = k.set_contents_from_filename(p, replace=True)
|
|
|
|
else:
|
|
|
|
n = k.set_contents_from_filename(p, replace=False)
|
|
|
|
k.make_public()
|
|
|
|
else:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t - Not uploading {}, already exists.".format(p))
|
|
|
|
print("")
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def build(version=None,
|
|
|
|
branch=None,
|
|
|
|
commit=None,
|
|
|
|
platform=None,
|
|
|
|
arch=None,
|
|
|
|
nightly=False,
|
|
|
|
rc=None,
|
|
|
|
race=False,
|
|
|
|
clean=False,
|
|
|
|
outdir=".",
|
|
|
|
goarm_version="6"):
|
2016-01-12 21:10:36 +00:00
|
|
|
print("-------------------------")
|
|
|
|
print("")
|
|
|
|
print("Build plan:")
|
|
|
|
print("\t- version: {}".format(version))
|
2016-01-08 15:47:09 +00:00
|
|
|
if rc:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t- release candidate: {}".format(rc))
|
|
|
|
print("\t- commit: {}".format(commit))
|
|
|
|
print("\t- branch: {}".format(branch))
|
|
|
|
print("\t- platform: {}".format(platform))
|
|
|
|
print("\t- arch: {}".format(arch))
|
2016-01-08 15:47:09 +00:00
|
|
|
if arch == 'arm' and goarm_version:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t- ARM version: {}".format(goarm_version))
|
|
|
|
print("\t- nightly? {}".format(str(nightly).lower()))
|
|
|
|
print("\t- race enabled? {}".format(str(race).lower()))
|
|
|
|
print("")
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
if not os.path.exists(outdir):
|
|
|
|
os.makedirs(outdir)
|
|
|
|
elif clean and outdir != '/':
|
2016-01-12 21:10:36 +00:00
|
|
|
print("Cleaning build directory...")
|
2016-01-08 15:47:09 +00:00
|
|
|
shutil.rmtree(outdir)
|
|
|
|
os.makedirs(outdir)
|
|
|
|
|
|
|
|
if rc:
|
|
|
|
# If a release candidate, update the version information accordingly
|
|
|
|
version = "{}rc{}".format(version, rc)
|
|
|
|
|
2016-01-23 00:09:38 +00:00
|
|
|
# Set architecture to something that Go expects
|
|
|
|
if arch == 'i386':
|
|
|
|
arch = '386'
|
|
|
|
elif arch == 'x86_64':
|
|
|
|
arch = 'amd64'
|
|
|
|
|
2016-01-12 21:10:36 +00:00
|
|
|
print("Starting build...")
|
|
|
|
for b, c in targets.items():
|
2016-02-22 22:12:35 +00:00
|
|
|
if platform == 'windows':
|
|
|
|
b = b + '.exe'
|
2016-01-23 00:09:38 +00:00
|
|
|
print("\t- Building '{}'...".format(os.path.join(outdir, b)))
|
2016-01-08 15:47:09 +00:00
|
|
|
build_command = ""
|
|
|
|
build_command += "GOOS={} GOARCH={} ".format(platform, arch)
|
|
|
|
if arch == "arm" and goarm_version:
|
|
|
|
if goarm_version not in ["5", "6", "7", "arm64"]:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("!! Invalid ARM build version: {}".format(goarm_version))
|
2016-01-08 15:47:09 +00:00
|
|
|
build_command += "GOARM={} ".format(goarm_version)
|
|
|
|
build_command += "go build -o {} ".format(os.path.join(outdir, b))
|
|
|
|
if race:
|
|
|
|
build_command += "-race "
|
2016-02-18 04:57:33 +00:00
|
|
|
if platform in supported_tags:
|
|
|
|
if arch in supported_tags[platform]:
|
|
|
|
build_tags = supported_tags[platform][arch]
|
|
|
|
for build_tag in build_tags:
|
|
|
|
build_command += "-tags "+build_tag+" "
|
2016-01-08 15:47:09 +00:00
|
|
|
go_version = get_go_version()
|
|
|
|
if "1.4" in go_version:
|
|
|
|
build_command += "-ldflags=\"-X main.buildTime '{}' ".format(datetime.datetime.utcnow().isoformat())
|
|
|
|
build_command += "-X main.Version {} ".format(version)
|
2016-01-23 00:09:38 +00:00
|
|
|
build_command += "-X main.Branch {} ".format(get_current_branch())
|
2016-01-08 15:47:09 +00:00
|
|
|
build_command += "-X main.Commit {}\" ".format(get_current_commit())
|
|
|
|
else:
|
|
|
|
build_command += "-ldflags=\"-X main.buildTime='{}' ".format(datetime.datetime.utcnow().isoformat())
|
|
|
|
build_command += "-X main.Version={} ".format(version)
|
2016-01-23 00:09:38 +00:00
|
|
|
build_command += "-X main.Branch={} ".format(get_current_branch())
|
2016-01-08 15:47:09 +00:00
|
|
|
build_command += "-X main.Commit={}\" ".format(get_current_commit())
|
|
|
|
build_command += c
|
|
|
|
run(build_command, shell=True)
|
2016-01-12 21:10:36 +00:00
|
|
|
print("")
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def create_dir(path):
|
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
|
|
|
except OSError as e:
|
2016-01-12 21:10:36 +00:00
|
|
|
print(e)
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def rename_file(fr, to):
|
|
|
|
try:
|
|
|
|
os.rename(fr, to)
|
|
|
|
except OSError as e:
|
2016-01-12 21:10:36 +00:00
|
|
|
print(e)
|
2016-01-08 15:47:09 +00:00
|
|
|
# Return the original filename
|
|
|
|
return fr
|
|
|
|
else:
|
|
|
|
# Return the new filename
|
|
|
|
return to
|
|
|
|
|
|
|
|
def copy_file(fr, to):
|
|
|
|
try:
|
|
|
|
shutil.copy(fr, to)
|
|
|
|
except OSError as e:
|
2016-01-12 21:10:36 +00:00
|
|
|
print(e)
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def create_package_fs(build_root):
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t- Creating a filesystem hierarchy from directory: {}".format(build_root))
|
2016-01-08 15:47:09 +00:00
|
|
|
# Using [1:] for the path names due to them being absolute
|
|
|
|
# (will overwrite previous paths, per 'os.path.join' documentation)
|
2016-01-08 21:34:11 +00:00
|
|
|
dirs = [ INSTALL_ROOT_DIR[1:], LOG_DIR[1:], SCRIPT_DIR[1:], CONFIG_DIR[1:], LOGROTATE_DIR[1:] ]
|
2016-01-08 15:47:09 +00:00
|
|
|
for d in dirs:
|
|
|
|
create_dir(os.path.join(build_root, d))
|
2016-01-12 21:10:36 +00:00
|
|
|
os.chmod(os.path.join(build_root, d), 0o755)
|
2016-01-08 15:47:09 +00:00
|
|
|
|
2016-02-22 22:16:46 +00:00
|
|
|
def package_scripts(build_root, windows=False):
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t- Copying scripts and sample configuration to build directory")
|
2016-02-22 22:16:46 +00:00
|
|
|
if windows:
|
|
|
|
shutil.copyfile(DEFAULT_WINDOWS_CONFIG, os.path.join(build_root, "telegraf.conf"))
|
|
|
|
os.chmod(os.path.join(build_root, "telegraf.conf"), 0o644)
|
|
|
|
else:
|
|
|
|
shutil.copyfile(INIT_SCRIPT, os.path.join(build_root, SCRIPT_DIR[1:], INIT_SCRIPT.split('/')[1]))
|
|
|
|
os.chmod(os.path.join(build_root, SCRIPT_DIR[1:], INIT_SCRIPT.split('/')[1]), 0o644)
|
|
|
|
shutil.copyfile(SYSTEMD_SCRIPT, os.path.join(build_root, SCRIPT_DIR[1:], SYSTEMD_SCRIPT.split('/')[1]))
|
|
|
|
os.chmod(os.path.join(build_root, SCRIPT_DIR[1:], SYSTEMD_SCRIPT.split('/')[1]), 0o644)
|
|
|
|
shutil.copyfile(LOGROTATE_SCRIPT, os.path.join(build_root, LOGROTATE_DIR[1:], "telegraf"))
|
|
|
|
os.chmod(os.path.join(build_root, LOGROTATE_DIR[1:], "telegraf"), 0o644)
|
|
|
|
shutil.copyfile(DEFAULT_CONFIG, os.path.join(build_root, CONFIG_DIR[1:], "telegraf.conf"))
|
|
|
|
os.chmod(os.path.join(build_root, CONFIG_DIR[1:], "telegraf.conf"), 0o644)
|
2016-01-08 15:47:09 +00:00
|
|
|
|
2016-02-18 04:57:33 +00:00
|
|
|
def go_get():
|
2016-01-12 21:10:36 +00:00
|
|
|
print("Retrieving Go dependencies...")
|
2016-02-18 04:57:33 +00:00
|
|
|
run("go get github.com/sparrc/gdm")
|
2016-02-22 22:16:46 +00:00
|
|
|
run("gdm restore -f Godeps_windows")
|
2016-02-18 04:57:33 +00:00
|
|
|
run("gdm restore")
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def generate_md5_from_file(path):
|
|
|
|
m = hashlib.md5()
|
|
|
|
with open(path, 'rb') as f:
|
2016-01-12 21:10:36 +00:00
|
|
|
while True:
|
|
|
|
data = f.read(4096)
|
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
m.update(data)
|
2016-01-08 15:47:09 +00:00
|
|
|
return m.hexdigest()
|
|
|
|
|
2016-02-09 16:47:51 +00:00
|
|
|
def build_packages(build_output, version, pkg_arch, nightly=False, rc=None, iteration=1):
|
2016-01-08 15:47:09 +00:00
|
|
|
outfiles = []
|
|
|
|
tmp_build_dir = create_temp_dir()
|
2016-01-23 00:09:38 +00:00
|
|
|
if debug:
|
|
|
|
print("[DEBUG] build_output = {}".format(build_output))
|
2016-01-08 15:47:09 +00:00
|
|
|
try:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("-------------------------")
|
|
|
|
print("")
|
|
|
|
print("Packaging...")
|
2016-01-08 15:47:09 +00:00
|
|
|
for p in build_output:
|
|
|
|
# Create top-level folder displaying which platform (linux, etc)
|
|
|
|
create_dir(os.path.join(tmp_build_dir, p))
|
|
|
|
for a in build_output[p]:
|
|
|
|
current_location = build_output[p][a]
|
|
|
|
# Create second-level directory displaying the architecture (amd64, etc)p
|
|
|
|
build_root = os.path.join(tmp_build_dir, p, a)
|
|
|
|
# Create directory tree to mimic file system of package
|
|
|
|
create_dir(build_root)
|
2016-02-22 22:16:46 +00:00
|
|
|
if p == 'windows':
|
|
|
|
package_scripts(build_root, windows=True)
|
|
|
|
else:
|
|
|
|
create_package_fs(build_root)
|
|
|
|
# Copy in packaging and miscellaneous scripts
|
|
|
|
package_scripts(build_root)
|
2016-01-08 15:47:09 +00:00
|
|
|
# Copy newly-built binaries to packaging directory
|
|
|
|
for b in targets:
|
|
|
|
if p == 'windows':
|
|
|
|
b = b + '.exe'
|
2016-02-24 16:10:47 +00:00
|
|
|
to = os.path.join(build_root, b)
|
|
|
|
else:
|
|
|
|
to = os.path.join(build_root, INSTALL_ROOT_DIR[1:], b)
|
2016-01-08 15:47:09 +00:00
|
|
|
fr = os.path.join(current_location, b)
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t- [{}][{}] - Moving from '{}' to '{}'".format(p, a, fr, to))
|
2016-01-08 15:47:09 +00:00
|
|
|
copy_file(fr, to)
|
|
|
|
# Package the directory structure
|
|
|
|
for package_type in supported_packages[p]:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t- Packaging directory '{}' as '{}'...".format(build_root, package_type))
|
2016-01-08 15:47:09 +00:00
|
|
|
name = "telegraf"
|
2016-01-23 00:09:38 +00:00
|
|
|
# Reset version, iteration, and current location on each run
|
|
|
|
# since they may be modified below.
|
2016-01-08 15:47:09 +00:00
|
|
|
package_version = version
|
|
|
|
package_iteration = iteration
|
2016-01-23 00:09:38 +00:00
|
|
|
current_location = build_output[p][a]
|
2016-02-18 04:57:33 +00:00
|
|
|
|
2016-01-08 15:47:09 +00:00
|
|
|
if package_type in ['zip', 'tar']:
|
|
|
|
if nightly:
|
|
|
|
name = '{}-nightly_{}_{}'.format(name, p, a)
|
|
|
|
else:
|
2016-01-23 00:09:38 +00:00
|
|
|
name = '{}-{}-{}_{}_{}'.format(name, package_version, package_iteration, p, a)
|
2016-01-08 15:47:09 +00:00
|
|
|
if package_type == 'tar':
|
|
|
|
# Add `tar.gz` to path to reduce package size
|
|
|
|
current_location = os.path.join(current_location, name + '.tar.gz')
|
|
|
|
if rc is not None:
|
|
|
|
package_iteration = "0.rc{}".format(rc)
|
2016-02-09 16:47:51 +00:00
|
|
|
saved_a = a
|
|
|
|
if pkg_arch is not None:
|
|
|
|
a = pkg_arch
|
2016-01-23 00:09:38 +00:00
|
|
|
if a == '386':
|
|
|
|
a = 'i386'
|
2016-02-22 22:16:46 +00:00
|
|
|
if package_type == 'zip':
|
|
|
|
zip_command = "cd {} && zip {}.zip ./*".format(
|
|
|
|
build_root,
|
|
|
|
name)
|
|
|
|
run(zip_command, shell=True)
|
|
|
|
run("mv {}.zip {}".format(os.path.join(build_root, name), current_location), shell=True)
|
|
|
|
outfile = os.path.join(current_location, name+".zip")
|
|
|
|
outfiles.append(outfile)
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t\tMD5 = {}".format(generate_md5_from_file(outfile)))
|
2016-02-22 22:16:46 +00:00
|
|
|
else:
|
|
|
|
fpm_command = "fpm {} --name {} -a {} -t {} --version {} --iteration {} -C {} -p {} ".format(
|
|
|
|
fpm_common_args,
|
|
|
|
name,
|
|
|
|
a,
|
|
|
|
package_type,
|
|
|
|
package_version,
|
|
|
|
package_iteration,
|
|
|
|
build_root,
|
|
|
|
current_location)
|
|
|
|
if pkg_arch is not None:
|
|
|
|
a = saved_a
|
|
|
|
if package_type == "rpm":
|
|
|
|
fpm_command += "--depends coreutils "
|
|
|
|
fpm_command += "--depends lsof"
|
|
|
|
out = run(fpm_command, shell=True)
|
|
|
|
matches = re.search(':path=>"(.*)"', out)
|
|
|
|
outfile = None
|
|
|
|
if matches is not None:
|
|
|
|
outfile = matches.groups()[0]
|
|
|
|
if outfile is None:
|
|
|
|
print("[ COULD NOT DETERMINE OUTPUT ]")
|
|
|
|
else:
|
|
|
|
# Strip nightly version (the unix epoch) from filename
|
|
|
|
if nightly and package_type in ['deb', 'rpm']:
|
|
|
|
outfile = rename_file(outfile, outfile.replace("{}-{}".format(version, iteration), "nightly"))
|
|
|
|
outfiles.append(os.path.join(os.getcwd(), outfile))
|
|
|
|
# Display MD5 hash for generated package
|
|
|
|
print("\t\tMD5 = {}".format(generate_md5_from_file(outfile)))
|
2016-01-12 21:10:36 +00:00
|
|
|
print("")
|
2016-01-23 00:09:38 +00:00
|
|
|
if debug:
|
|
|
|
print("[DEBUG] package outfiles: {}".format(outfiles))
|
2016-01-08 15:47:09 +00:00
|
|
|
return outfiles
|
|
|
|
finally:
|
|
|
|
# Cleanup
|
|
|
|
shutil.rmtree(tmp_build_dir)
|
|
|
|
|
|
|
|
def print_usage():
|
2016-01-12 21:10:36 +00:00
|
|
|
print("Usage: ./build.py [options]")
|
|
|
|
print("")
|
|
|
|
print("Options:")
|
|
|
|
print("\t --outdir=<path> \n\t\t- Send build output to a specified path. Defaults to ./build.")
|
|
|
|
print("\t --arch=<arch> \n\t\t- Build for specified architecture. Acceptable values: x86_64|amd64, 386, arm, or all")
|
|
|
|
print("\t --goarm=<arm version> \n\t\t- Build for specified ARM version (when building for ARM). Default value is: 6")
|
|
|
|
print("\t --platform=<platform> \n\t\t- Build for specified platform. Acceptable values: linux, windows, darwin, or all")
|
|
|
|
print("\t --version=<version> \n\t\t- Version information to apply to build metadata. If not specified, will be pulled from repo tag.")
|
2016-02-09 16:47:51 +00:00
|
|
|
print("\t --pkgarch=<package-arch> \n\t\t- Package architecture if different from <arch>")
|
2016-01-12 21:10:36 +00:00
|
|
|
print("\t --commit=<commit> \n\t\t- Use specific commit for build (currently a NOOP).")
|
|
|
|
print("\t --branch=<branch> \n\t\t- Build from a specific branch (currently a NOOP).")
|
|
|
|
print("\t --rc=<rc number> \n\t\t- Whether or not the build is a release candidate (affects version information).")
|
|
|
|
print("\t --iteration=<iteration number> \n\t\t- The iteration to display on the package output (defaults to 0 for RC's, and 1 otherwise).")
|
|
|
|
print("\t --race \n\t\t- Whether the produced build should have race detection enabled.")
|
|
|
|
print("\t --package \n\t\t- Whether the produced builds should be packaged for the target platform(s).")
|
|
|
|
print("\t --nightly \n\t\t- Whether the produced build is a nightly (affects version information).")
|
|
|
|
print("\t --parallel \n\t\t- Run Go tests in parallel up to the count specified.")
|
|
|
|
print("\t --timeout \n\t\t- Timeout for Go tests. Defaults to 480s.")
|
|
|
|
print("\t --clean \n\t\t- Clean the build output directory prior to creating build.")
|
2016-01-23 00:09:38 +00:00
|
|
|
print("\t --bucket=<S3 bucket>\n\t\t- Full path of the bucket to upload packages to (must also specify --upload).")
|
|
|
|
print("\t --debug \n\t\t- Displays debug output.")
|
2016-01-12 21:10:36 +00:00
|
|
|
print("")
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def print_package_summary(packages):
|
2016-01-12 21:10:36 +00:00
|
|
|
print(packages)
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
def main():
|
|
|
|
# Command-line arguments
|
|
|
|
outdir = "build"
|
|
|
|
commit = None
|
|
|
|
target_platform = None
|
|
|
|
target_arch = None
|
2016-02-09 16:47:51 +00:00
|
|
|
package_arch = None
|
2016-01-08 15:47:09 +00:00
|
|
|
nightly = False
|
|
|
|
race = False
|
|
|
|
branch = None
|
|
|
|
version = get_current_version()
|
|
|
|
rc = None
|
|
|
|
package = False
|
|
|
|
update = False
|
|
|
|
clean = False
|
|
|
|
upload = False
|
|
|
|
test = False
|
|
|
|
parallel = None
|
|
|
|
timeout = None
|
|
|
|
iteration = 1
|
|
|
|
no_vet = False
|
|
|
|
goarm_version = "6"
|
2016-01-23 00:09:38 +00:00
|
|
|
run_get = True
|
|
|
|
upload_bucket = None
|
|
|
|
global debug
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
for arg in sys.argv[1:]:
|
|
|
|
if '--outdir' in arg:
|
|
|
|
# Output directory. If none is specified, then builds will be placed in the same directory.
|
|
|
|
output_dir = arg.split("=")[1]
|
|
|
|
if '--commit' in arg:
|
|
|
|
# Commit to build from. If none is specified, then it will build from the most recent commit.
|
|
|
|
commit = arg.split("=")[1]
|
|
|
|
if '--branch' in arg:
|
|
|
|
# Branch to build from. If none is specified, then it will build from the current branch.
|
|
|
|
branch = arg.split("=")[1]
|
|
|
|
elif '--arch' in arg:
|
|
|
|
# Target architecture. If none is specified, then it will build for the current arch.
|
|
|
|
target_arch = arg.split("=")[1]
|
|
|
|
elif '--platform' in arg:
|
|
|
|
# Target platform. If none is specified, then it will build for the current platform.
|
|
|
|
target_platform = arg.split("=")[1]
|
|
|
|
elif '--version' in arg:
|
|
|
|
# Version to assign to this build (0.9.5, etc)
|
|
|
|
version = arg.split("=")[1]
|
2016-02-09 16:47:51 +00:00
|
|
|
elif '--pkgarch' in arg:
|
|
|
|
# Package architecture if different from <arch> (armhf, etc)
|
|
|
|
package_arch = arg.split("=")[1]
|
2016-01-08 15:47:09 +00:00
|
|
|
elif '--rc' in arg:
|
|
|
|
# Signifies that this is a release candidate build.
|
|
|
|
rc = arg.split("=")[1]
|
|
|
|
elif '--race' in arg:
|
|
|
|
# Signifies that race detection should be enabled.
|
|
|
|
race = True
|
|
|
|
elif '--package' in arg:
|
|
|
|
# Signifies that packages should be built.
|
|
|
|
package = True
|
|
|
|
elif '--nightly' in arg:
|
|
|
|
# Signifies that this is a nightly build.
|
|
|
|
nightly = True
|
|
|
|
elif '--upload' in arg:
|
|
|
|
# Signifies that the resulting packages should be uploaded to S3
|
|
|
|
upload = True
|
|
|
|
elif '--parallel' in arg:
|
|
|
|
# Set parallel for tests.
|
|
|
|
parallel = int(arg.split("=")[1])
|
|
|
|
elif '--timeout' in arg:
|
|
|
|
# Set timeout for tests.
|
|
|
|
timeout = arg.split("=")[1]
|
|
|
|
elif '--clean' in arg:
|
|
|
|
# Signifies that the outdir should be deleted before building
|
|
|
|
clean = True
|
|
|
|
elif '--iteration' in arg:
|
|
|
|
iteration = arg.split("=")[1]
|
|
|
|
elif '--no-vet' in arg:
|
|
|
|
no_vet = True
|
|
|
|
elif '--goarm' in arg:
|
|
|
|
# Signifies GOARM flag to pass to build command when compiling for ARM
|
|
|
|
goarm_version = arg.split("=")[1]
|
2016-01-23 00:09:38 +00:00
|
|
|
elif '--bucket' in arg:
|
|
|
|
# The bucket to upload the packages to, relies on boto
|
|
|
|
upload_bucket = arg.split("=")[1]
|
|
|
|
elif '--debug' in arg:
|
|
|
|
print "[DEBUG] Using debug output"
|
|
|
|
debug = True
|
2016-01-08 15:47:09 +00:00
|
|
|
elif '--help' in arg:
|
|
|
|
print_usage()
|
|
|
|
return 0
|
|
|
|
else:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("!! Unknown argument: {}".format(arg))
|
2016-01-08 15:47:09 +00:00
|
|
|
print_usage()
|
|
|
|
return 1
|
|
|
|
|
|
|
|
if nightly:
|
|
|
|
if rc:
|
2016-01-12 21:10:36 +00:00
|
|
|
print("!! Cannot be both nightly and a release candidate! Stopping.")
|
2016-01-08 15:47:09 +00:00
|
|
|
return 1
|
|
|
|
# In order to support nightly builds on the repository, we are adding the epoch timestamp
|
|
|
|
# to the version so that version numbers are always greater than the previous nightly.
|
|
|
|
version = "{}.n{}".format(version, int(time.time()))
|
|
|
|
|
|
|
|
# Pre-build checks
|
|
|
|
check_environ()
|
|
|
|
check_prereqs()
|
|
|
|
|
|
|
|
if not commit:
|
|
|
|
commit = get_current_commit(short=True)
|
|
|
|
if not branch:
|
|
|
|
branch = get_current_branch()
|
|
|
|
if not target_arch:
|
|
|
|
if 'arm' in get_system_arch():
|
|
|
|
# Prevent uname from reporting ARM arch (eg 'armv7l')
|
|
|
|
target_arch = "arm"
|
|
|
|
else:
|
|
|
|
target_arch = get_system_arch()
|
|
|
|
if not target_platform:
|
|
|
|
target_platform = get_system_platform()
|
|
|
|
if rc or nightly:
|
|
|
|
# If a release candidate or nightly, set iteration to 0 (instead of 1)
|
|
|
|
iteration = 0
|
|
|
|
|
2016-01-23 00:09:38 +00:00
|
|
|
if target_arch == '386':
|
|
|
|
target_arch = 'i386'
|
|
|
|
elif target_arch == 'x86_64':
|
|
|
|
target_arch = 'amd64'
|
2016-02-18 04:57:33 +00:00
|
|
|
|
2016-01-08 15:47:09 +00:00
|
|
|
build_output = {}
|
|
|
|
|
2016-02-18 04:57:33 +00:00
|
|
|
go_get()
|
2016-01-08 15:47:09 +00:00
|
|
|
|
|
|
|
platforms = []
|
|
|
|
single_build = True
|
|
|
|
if target_platform == 'all':
|
2016-01-12 21:10:36 +00:00
|
|
|
platforms = list(supported_builds.keys())
|
2016-01-08 15:47:09 +00:00
|
|
|
single_build = False
|
|
|
|
else:
|
|
|
|
platforms = [target_platform]
|
|
|
|
|
|
|
|
for platform in platforms:
|
2016-02-18 04:57:33 +00:00
|
|
|
if platform in prereq_cmds:
|
|
|
|
run(prereq_cmds[platform])
|
2016-01-08 15:47:09 +00:00
|
|
|
build_output.update( { platform : {} } )
|
|
|
|
archs = []
|
|
|
|
if target_arch == "all":
|
|
|
|
single_build = False
|
|
|
|
archs = supported_builds.get(platform)
|
|
|
|
else:
|
|
|
|
archs = [target_arch]
|
|
|
|
for arch in archs:
|
|
|
|
od = outdir
|
|
|
|
if not single_build:
|
|
|
|
od = os.path.join(outdir, platform, arch)
|
|
|
|
build(version=version,
|
|
|
|
branch=branch,
|
|
|
|
commit=commit,
|
|
|
|
platform=platform,
|
|
|
|
arch=arch,
|
|
|
|
nightly=nightly,
|
|
|
|
rc=rc,
|
|
|
|
race=race,
|
|
|
|
clean=clean,
|
|
|
|
outdir=od,
|
|
|
|
goarm_version=goarm_version)
|
|
|
|
build_output.get(platform).update( { arch : od } )
|
|
|
|
|
|
|
|
# Build packages
|
|
|
|
if package:
|
|
|
|
if not check_path_for("fpm"):
|
2016-01-12 21:10:36 +00:00
|
|
|
print("!! Cannot package without command 'fpm'. Stopping.")
|
2016-01-08 15:47:09 +00:00
|
|
|
return 1
|
2016-02-09 16:47:51 +00:00
|
|
|
packages = build_packages(build_output, version, package_arch, nightly=nightly, rc=rc, iteration=iteration)
|
2016-01-08 15:47:09 +00:00
|
|
|
# Optionally upload to S3
|
|
|
|
if upload:
|
2016-01-23 00:09:38 +00:00
|
|
|
upload_packages(packages, bucket_name=upload_bucket, nightly=nightly)
|
2016-01-08 15:47:09 +00:00
|
|
|
return 0
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
sys.exit(main())
|