Skip to content

Commit

Permalink
test: refactor hdfs related functions in HDFS test helpers
Browse files Browse the repository at this point in the history
  • Loading branch information
skshetry committed Jan 13, 2020
1 parent 028d30e commit d55427d
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 39 deletions.
11 changes: 5 additions & 6 deletions tests/func/test_data_cloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@

from tests.remotes import (
_should_test_gcp,
_should_test_hdfs,
Azure,
GDrive,
HDFS,
S3,
SSHMocked,
OSS,
Expand All @@ -45,7 +45,6 @@
TEST_GDRIVE_CLIENT_SECRET,
TEST_REMOTE,
get_gcp_url,
get_hdfs_url,
get_local_url,
)

Expand Down Expand Up @@ -332,10 +331,10 @@ def _get_cloud_class(self):

class TestRemoteHDFS(TestDataCloudBase):
def _should_test(self):
return _should_test_hdfs()
return HDFS.should_test()

def _get_url(self):
return get_hdfs_url()
return HDFS.get_url()

def _get_cloud_class(self):
return RemoteHDFS
Expand Down Expand Up @@ -431,10 +430,10 @@ def _test(self):

class TestRemoteHDFSCLI(TestDataCloudCLIBase):
def _should_test(self):
return _should_test_hdfs()
return HDFS.should_test()

def _test(self):
url = get_hdfs_url()
url = HDFS.get_url()

self.main(["remote", "add", TEST_REMOTE, url])

Expand Down
4 changes: 2 additions & 2 deletions tests/func/test_repro.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from tests.basic_env import TestDvc
from tests.remotes import (
_should_test_gcp,
_should_test_hdfs,
HDFS,
S3,
SSH,
SSHMocked,
Expand Down Expand Up @@ -996,7 +996,7 @@ def write(self, bucket, key, body):

class TestReproExternalHDFS(TestReproExternalBase):
def should_test(self):
return _should_test_hdfs()
return HDFS.should_test()

@property
def scheme(self):
Expand Down
61 changes: 30 additions & 31 deletions tests/remotes.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,29 +66,6 @@ def _should_test_gcp():
return True


def _should_test_hdfs():
if platform.system() != "Linux":
return False

try:
check_output(
["hadoop", "version"], shell=True, executable=os.getenv("SHELL")
)
except (CalledProcessError, IOError):
return False

p = Popen(
"hadoop fs -ls hdfs://127.0.0.1/",
shell=True,
executable=os.getenv("SHELL"),
)
p.communicate()
if p.returncode != 0:
return False

return True


def get_local_storagepath():
return TestDvc.mkdtemp()

Expand All @@ -97,12 +74,6 @@ def get_local_url():
return get_local_storagepath()


def get_hdfs_url():
return "hdfs://{}@127.0.0.1{}".format(
getpass.getuser(), get_local_storagepath()
)


def get_gcp_storagepath():
return TEST_GCP_REPO_BUCKET + "/" + str(uuid.uuid4())

Expand Down Expand Up @@ -277,5 +248,33 @@ def get_url(user, port):


class HDFS:
should_test = _should_test_hdfs
get_url = get_hdfs_url
@staticmethod
def should_test():
if platform.system() != "Linux":
return False

try:
check_output(
["hadoop", "version"],
shell=True,
executable=os.getenv("SHELL"),
)
except (CalledProcessError, IOError):
return False

p = Popen(
"hadoop fs -ls hdfs://127.0.0.1/",
shell=True,
executable=os.getenv("SHELL"),
)
p.communicate()
if p.returncode != 0:
return False

return True

@staticmethod
def get_url():
return "hdfs://{}@127.0.0.1{}".format(
getpass.getuser(), get_local_storagepath()
)

0 comments on commit d55427d

Please sign in to comment.