Skip to content

Commit

Permalink
Make updating of the latest server deb in GCS more robust.
Browse files Browse the repository at this point in the history
  • Loading branch information
ogarod committed Nov 30, 2017
1 parent caba1a0 commit d4b6ddf
Show file tree
Hide file tree
Showing 14 changed files with 80 additions and 47 deletions.
10 changes: 1 addition & 9 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -271,15 +271,7 @@ matrix:
sudo docker exec "${DOCKER_CONTAINER}"
travis/build_server_deb.sh && `# Needs to be run as root.`
(gsutil rm -r gs://${GCS_BUCKET}/_latest_server_deb/ || true) &&
gsutil -m cp gcs_upload_dir/* gs://${GCS_BUCKET}/_latest_server_deb/ &&
travis/deploy_to_gcs.sh &&
`# Trigger build of a new GRR Docker image (grrdocker/grr)`
`# See https://hub.docker.com/r/grrdocker/grr/~/settings/automated-builds/`
curl -H "Content-Type: application/json" --data '{"docker_tag": "latest"}' -X POST https://registry.hub.docker.com/u/grrdocker/grr/trigger/4499c4d4-4a8b-48da-bc95-5dbab39be545/
travis/deploy_to_gcs.sh
cache:
directories:
Expand Down
6 changes: 3 additions & 3 deletions grr/parsers/config_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,7 @@ def Parse(self, stat, file_object, knowledge_base):
_, _ = stat, knowledge_base
# Clean out any residual state.
self.Flush()
lines = [l.strip() for l in file_object.read(100000).splitlines()]
lines = [l.strip() for l in file_object.read().splitlines()]
for line in lines:
# Remove comments (will break if it includes a quoted/escaped #)
line = line.split("#")[0].strip()
Expand Down Expand Up @@ -786,7 +786,7 @@ class CronAtAllowDenyParser(parsers.FileParser):
supported_artifacts = ["CronAtAllowDenyFiles"]

def Parse(self, stat, file_obj, unused_knowledge_base):
lines = set([l.strip() for l in file_obj.read(100000).splitlines()])
lines = set([l.strip() for l in file_obj.read().splitlines()])

users = []
bad_lines = []
Expand Down Expand Up @@ -933,7 +933,7 @@ def Parse(self, stat, file_object, knowledge_base):
# ntp.conf has no line continuation. Override the default 'cont' values
# then parse up the lines.
self.cont = ""
for line in self.ParseEntries(file_object.read(100000)):
for line in self.ParseEntries(file_object.read()):
self.ParseLine(line)
yield rdf_config_file.NtpConfig(
config=self.config,
Expand Down
2 changes: 1 addition & 1 deletion grr/parsers/cron_file_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def Parse(self, stat, file_object, knowledge_base):
_ = knowledge_base
entries = []

crondata = file_object.read(100000)
crondata = file_object.read()
jobs = crontab.CronTab(tab=crondata)

for job in jobs:
Expand Down
10 changes: 5 additions & 5 deletions grr/parsers/linux_file_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def ParseMultiple(self, stats, file_objects, unused_knowledge_base):
if bdf_regex.match(bdf):
# Remove newlines from all files except config. Config contains raw data
# so we don't want to touch it even if it has a newline character.
file_data = file_obj.read(100000)
file_data = file_obj.read()
if filename != "config":
file_data = file_data.rstrip("\n")
data[bdf][filename] = file_data
Expand Down Expand Up @@ -108,7 +108,7 @@ def ParseLine(cls, index, line):
def Parse(self, stat, file_object, knowledge_base):
"""Parse the passwd file."""
_, _ = stat, knowledge_base
lines = [l.strip() for l in file_object.read(100000).splitlines()]
lines = [l.strip() for l in file_object.read().splitlines()]
for index, line in enumerate(lines):
line = self.ParseLine(index, line)
if line:
Expand Down Expand Up @@ -163,7 +163,7 @@ def Parse(self, stat, file_object, knowledge_base):
"""Parse the wtmp file."""
_, _ = stat, knowledge_base
users = {}
wtmp = file_object.read(10000000)
wtmp = file_object.read()
while wtmp:
try:
record = UtmpStruct(wtmp)
Expand Down Expand Up @@ -261,7 +261,7 @@ def Parse(self, stat, file_object, knowledge_base):
rdf_client.User
"""
_, _ = stat, knowledge_base
lines = [l.strip() for l in file_object.read(100000).splitlines()]
lines = [l.strip() for l in file_object.read().splitlines()]
return self.ParseLines(lines)


Expand Down Expand Up @@ -349,7 +349,7 @@ def _ParseFile(self, file_obj, line_parser):
Raises:
parser.ParseError if the parser is unable to process the line.
"""
lines = [l.strip() for l in file_obj.read(100000).splitlines()]
lines = [l.strip() for l in file_obj.read().splitlines()]
try:
for index, line in enumerate(lines):
if line:
Expand Down
5 changes: 3 additions & 2 deletions grr/parsers/linux_release_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,14 +145,15 @@ class LinuxReleaseParser(parsers.FileParser):
ReleaseFileParseHandler('RedHat')),
# Debian-based.
WeightedReleaseFile(20, '/etc/debian_version',
ReleaseFileParseHandler('Debian')),)
ReleaseFileParseHandler('Debian')),
)

def _Combine(self, stats, file_objects):
result = {}
for stat, file_object in itertools.izip(stats, file_objects):
path = stat.pathspec.path
file_object.seek(0)
contents = file_object.read(100000)
contents = file_object.read()
result[path] = contents
return result

Expand Down
6 changes: 3 additions & 3 deletions grr/parsers/linux_service_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def _Facilities(self, condition):
def _ParseInit(self, init_files):
init_lexer = LSBInitLexer()
for path, file_obj in init_files:
init = init_lexer.ParseEntries(file_obj.read(100000))
init = init_lexer.ParseEntries(file_obj.read())
if init:
service = rdf_client.LinuxServiceInformation()
service.name = init.get("provides")
Expand Down Expand Up @@ -184,7 +184,7 @@ def ParseMultiple(self, stats, file_objs, _):
init_files = []
for k, v in files.iteritems():
if k.startswith("/etc/insserv.conf"):
insserv_data += "%s\n" % v.read(100000)
insserv_data += "%s\n" % v.read()
else:
init_files.append((k, v))
self._ParseInsserv(insserv_data)
Expand Down Expand Up @@ -218,7 +218,7 @@ def _ParseSection(self, section, cfg):
def _ProcessEntries(self, fd):
"""Extract entries from the xinetd config files."""
parser = config_file.KeyValueParser(kv_sep="{", term="}", sep=None)
data = fd.read(100000)
data = fd.read()
entries = parser.ParseEntries(data)
for entry in entries:
for section, cfg in entry.items():
Expand Down
2 changes: 1 addition & 1 deletion grr/parsers/linux_sysctl_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def _Parse(self, stat, file_obj):
# Remove /proc/sys
key = stat.pathspec.path.replace("/proc/sys/", "", 1)
key = key.replace("/", "_")
value = file_obj.read(100000).split()
value = file_obj.read().split()
if len(value) == 1:
value = value[0]
return key, value
Expand Down
3 changes: 1 addition & 2 deletions grr/parsers/rekall_artifact_parser_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@ def testBasicParsing(self):
"rekall_vad_result.dat.gz")

result = rdf_rekall_types.RekallResponse(
json_messages=gzip.open(ps_list_file, "rb").read(10000000),
plugin="pslist")
json_messages=gzip.open(ps_list_file, "rb").read(), plugin="pslist")

knowledge_base = rdf_client.KnowledgeBase()
knowledge_base.environ_systemdrive = "C:"
Expand Down
2 changes: 1 addition & 1 deletion grr/server/artifact_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ def _LoadArtifactsFromFiles(self, file_paths, overwrite_if_exists=True):
try:
with open(file_path, mode="rb") as fh:
logging.debug("Loading artifacts from %s", file_path)
for artifact_val in self.ArtifactsFromYaml(fh.read(1000000)):
for artifact_val in self.ArtifactsFromYaml(fh.read()):
self.RegisterArtifact(
artifact_val,
source="file:%s" % file_path,
Expand Down
2 changes: 1 addition & 1 deletion grr/server/artifact_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def RekallAction(self, _):
ps_list_file = os.path.join(config.CONFIG["Test.data_dir"],
self.result_filename)
result = rdf_rekall_types.RekallResponse(
json_messages=gzip.open(ps_list_file).read(10000000),
json_messages=gzip.open(ps_list_file).read(),
plugin="pslist",
client_urn=self.client_id)

Expand Down
2 changes: 1 addition & 1 deletion grr/server/flows/general/transfer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ def testMultiGetFileMultiFiles(self):
for pathspec in pathspecs:
urn = pathspec.AFF4Path(self.client_id)
fd = aff4.FACTORY.Open(urn, token=self.token)
self.assertEqual("Hello", fd.Read(100000))
self.assertEqual("Hello", fd.read())

def testMultiGetFileDeduplication(self):
client_mock = action_mocks.MultiGetFileClientMock()
Expand Down
2 changes: 1 addition & 1 deletion grr/tools/config_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -966,7 +966,7 @@ def main(argv):
yaml.load(open(flags.FLAGS.file, "rb")) # Check it will parse.
try:
artifact.UploadArtifactYamlFile(
open(flags.FLAGS.file, "rb").read(1000000),
open(flags.FLAGS.file, "rb").read(),
overwrite=flags.FLAGS.overwrite_artifact)
except artifact_registry.ArtifactDefinitionError as e:
print "Error %s. You may need to set --overwrite_artifact." % e
Expand Down
26 changes: 14 additions & 12 deletions grr/tools/frontend_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,9 @@ def FromPrivateKey(*_):
"""
return self.client_id

with utils.MultiStubber((standard.UploadFile, "SendReply", MockSendReply),
(rdf_client.ClientURN, "FromPrivateKey",
FromPrivateKey)):
with utils.MultiStubber(
(standard.UploadFile, "SendReply", MockSendReply),
(rdf_client.ClientURN, "FromPrivateKey", FromPrivateKey)):
action = standard.UploadFile(client.client_worker)
action.Run(args)

Expand Down Expand Up @@ -155,8 +155,8 @@ def testUpload(self):
with self.assertRaises(IOError):
self._UploadFile(args)

self.assertRegexpMatches("Signature did not match digest",
str(logger.args))
self.assertRegexpMatches("Signature did not match digest", str(
logger.args))
logger.args[:] = []

# Ok lets hmac the policy now, but its still too old.
Expand All @@ -167,7 +167,8 @@ def testUpload(self):
# Make sure the file is not written yet.
rootdir = config.CONFIG["FileUploadFileStore.root_dir"]
target_filename = os.path.join(
rootdir, self.client_id.Add(test_file).Path().lstrip(os.path.sep))
rootdir,
self.client_id.Add(test_file).Path().lstrip(os.path.sep))

self.assertNotEqual(target_filename, test_file)

Expand Down Expand Up @@ -282,7 +283,7 @@ def testClientFileFinderUploadBound(self):
for r in results:
aff4_obj = aff4.FACTORY.Open(
r.stat_entry.pathspec.AFF4Path(self.client_id), token=self.token)
data = aff4_obj.Read(1000000)
data = aff4_obj.read()
self.assertLessEqual(len(data), 300)
self.assertEqual(data,
open(r.stat_entry.pathspec.path, "rb").read(len(data)))
Expand Down Expand Up @@ -320,7 +321,8 @@ def testClientFileFinderUploadSkip(self):
aff4_obj = aff4.FACTORY.Open(
r.stat_entry.pathspec.AFF4Path(self.client_id), token=self.token)
self.assertEqual(
aff4_obj.Read(100), open(r.stat_entry.pathspec.path, "rb").read(100))
aff4_obj.Read(100),
open(r.stat_entry.pathspec.path, "rb").read(100))

def testClientFileFinderFilestoreIntegration(self):
paths = [os.path.join(self.base_path, "**/*.plist")]
Expand Down Expand Up @@ -388,12 +390,12 @@ def testRekallProfiles(self):
known_profile = "F8E2A8B5C9B74BF4A6E4A48F180099942"
unknown_profile = "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"

req = requests.get(self.base_url + "rekall_profiles/v1.0/nt/GUID/" +
unknown_profile)
req = requests.get(
self.base_url + "rekall_profiles/v1.0/nt/GUID/" + unknown_profile)
self.assertEqual(req.status_code, 404)

req = requests.get(self.base_url + "rekall_profiles/v1.0/nt/GUID/" +
known_profile)
req = requests.get(
self.base_url + "rekall_profiles/v1.0/nt/GUID/" + known_profile)
self.assertEqual(req.status_code, 200)

pb = rdf_rekall_types.RekallProfile.protobuf()
Expand Down
49 changes: 44 additions & 5 deletions travis/deploy_to_gcs.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,17 @@
#!/bin/bash
#
# Script that uploads artifacts built by Travis to GCS.

set -e

function delete_gcs_keys() {
if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
shred -u travis/travis_uploader_service_account.json
elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
srm -sz travis/travis_uploader_service_account.json
fi
}

commit_timestamp_secs="$(git show -s --format=%ct "${TRAVIS_COMMIT}")"

# Hacky, but platform independent way of formatting the timestamp.
Expand All @@ -12,14 +22,43 @@ print(datetime.utcfromtimestamp(
"
commit_timestamp=$(python -c "${pyscript}")

gcs_dest="gs://${GCS_BUCKET}/${commit_timestamp}_${TRAVIS_COMMIT}/travis_job_${TRAVIS_JOB_NUMBER}_${GCS_TAG}/"
gcs_dest="gs://${GCS_BUCKET}/${commit_timestamp}_${TRAVIS_COMMIT}/travis_job_${TRAVIS_JOB_NUMBER}_${GCS_TAG}"

echo Uploading templates to "${gcs_dest}"
gsutil -m cp gcs_upload_dir/* "${gcs_dest}"

if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
shred -u travis/travis_uploader_service_account.json
# No more work to do if the currently-running job is not the one that builds
# server debs.
if [[ "${GCS_TAG}" != 'server_deb' ]]; then
delete_gcs_keys
exit 0
fi
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
srm -sz travis/travis_uploader_service_account.json

latest_dir="gs://${GCS_BUCKET}/_latest_server_deb"
backup_dir="gs://${GCS_BUCKET}/.latest_server_deb"

# Copy the server deb to its backup location.
original_deb_exists="$( ( gsutil --quiet stat "${gcs_dest}/*.deb" && echo true ) || echo false )"
if [[ "${original_deb_exists}" != 'true' ]]; then
echo "Server deb not found in ${gcs_dest}"
delete_gcs_keys
exit 1
fi
gsutil rm -r "${backup_dir}" || true
gsutil -m cp "${gcs_dest}/*" "${backup_dir}"

# Copy the server deb from its backup location to its expected location.
backup_deb_exists="$( ( gsutil --quiet stat "${backup_dir}/*.deb" && echo true ) || echo false )"
if [[ "${backup_deb_exists}" != 'true' ]]; then
echo "Server deb not found in ${backup_dir}"
delete_gcs_keys
exit 2
fi
gsutil rm -r "${latest_dir}" || true
gsutil -m cp "${backup_dir}/*" "${latest_dir}"

delete_gcs_keys

# Trigger build of a new GRR Docker image (grrdocker/grr)
# See https://hub.docker.com/r/grrdocker/grr/~/settings/automated-builds/
curl -H "Content-Type: application/json" --data '{"docker_tag": "latest"}' -X POST https://registry.hub.docker.com/u/grrdocker/grr/trigger/4499c4d4-4a8b-48da-bc95-5dbab39be545/

0 comments on commit d4b6ddf

Please sign in to comment.