aboutsummaryrefslogtreecommitdiff
path: root/automation/server
diff options
context:
space:
mode:
authorLuis Lozano <llozano@chromium.org>2015-12-15 13:49:30 -0800
committerLuis Lozano <llozano@chromium.org>2015-12-16 17:36:06 +0000
commitf2a3ef46f75d2196a93d3ed27f4d1fcf22b54fbe (patch)
tree185d243c7eed7c7a0db6f0e640746cadc1479ea9 /automation/server
parent2a66f70fef907c1cb15229cb58e5129cb620ac98 (diff)
downloadtoolchain-utils-f2a3ef46f75d2196a93d3ed27f4d1fcf22b54fbe.tar.gz
Run pyformat on all the toolchain-utils files.
This gets rid of a lot of lint issues. Ran by doing this: for f in *.py; do echo -n "$f " ; if [ -x $f ]; then pyformat -i --remove_trailing_comma --yapf --force_quote_type=double $f ; else pyformat -i --remove_shebang --remove_trailing_comma --yapf --force_quote_type=double $f ; fi ; done BUG=chromium:567921 TEST=Ran simple crosperf run. Change-Id: I59778835fdaa5f706d2e1765924389f9e97433d1 Reviewed-on: https://chrome-internal-review.googlesource.com/242031 Reviewed-by: Luis Lozano <llozano@chromium.org> Commit-Queue: Luis Lozano <llozano@chromium.org> Tested-by: Luis Lozano <llozano@chromium.org> Reviewed-by: Yunlian Jiang <yunlian@google.com>
Diffstat (limited to 'automation/server')
-rw-r--r--automation/server/__init__.py1
-rw-r--r--automation/server/job_executer.py48
-rw-r--r--automation/server/job_group_manager.py12
-rw-r--r--automation/server/job_manager.py13
-rw-r--r--automation/server/machine_manager.py5
-rwxr-xr-xautomation/server/machine_manager_test.py8
-rw-r--r--automation/server/monitor/__init__.py1
-rw-r--r--automation/server/monitor/dashboard.py50
-rwxr-xr-xautomation/server/monitor/manage.py4
-rw-r--r--automation/server/monitor/settings.py6
-rw-r--r--automation/server/monitor/urls.py18
-rwxr-xr-xautomation/server/server.py40
-rwxr-xr-xautomation/server/server_test.py7
13 files changed, 99 insertions, 114 deletions
diff --git a/automation/server/__init__.py b/automation/server/__init__.py
index e69de29b..8b137891 100644
--- a/automation/server/__init__.py
+++ b/automation/server/__init__.py
@@ -0,0 +1 @@
+
diff --git a/automation/server/job_executer.py b/automation/server/job_executer.py
index 8f48ca08..30b59463 100644
--- a/automation/server/job_executer.py
+++ b/automation/server/job_executer.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python
-#
# Copyright 2010 Google Inc. All Rights Reserved.
#
@@ -15,9 +13,10 @@ from automation.common.command_executer import CommandTerminator
class JobExecuter(threading.Thread):
+
def __init__(self, job_to_execute, machines, listeners):
threading.Thread.__init__(self)
-
+
assert machines
self.job = job_to_execute
@@ -25,13 +24,13 @@ class JobExecuter(threading.Thread):
self.machines = machines
# Set Thread name.
- self.name = "%s-%s" % (self.__class__.__name__, self.job.id)
+ self.name = '%s-%s' % (self.__class__.__name__, self.job.id)
self._logger = logging.getLogger(self.__class__.__name__)
self._executer = LoggingCommandExecuter(self.job.dry_run)
self._terminator = CommandTerminator()
- def _RunRemotely(self, command, fail_msg, command_timeout=1*60*60):
+ def _RunRemotely(self, command, fail_msg, command_timeout=1 * 60 * 60):
exit_code = self._executer.RunCommand(command,
self.job.primary_machine.hostname,
self.job.primary_machine.username,
@@ -40,7 +39,7 @@ class JobExecuter(threading.Thread):
if exit_code:
raise job.JobFailure(fail_msg, exit_code)
- def _RunLocally(self, command, fail_msg, command_timeout=1*60*60):
+ def _RunLocally(self, command, fail_msg, command_timeout=1 * 60 * 60):
exit_code = self._executer.RunCommand(command,
command_terminator=self._terminator,
command_timeout=command_timeout)
@@ -52,22 +51,20 @@ class JobExecuter(threading.Thread):
def CleanUpWorkDir(self):
self._logger.debug('Cleaning up %r work directory.', self.job)
- self._RunRemotely(
- cmd.RmTree(self.job.work_dir), "Cleanup workdir failed.")
+ self._RunRemotely(cmd.RmTree(self.job.work_dir), 'Cleanup workdir failed.')
def CleanUpHomeDir(self):
self._logger.debug('Cleaning up %r home directory.', self.job)
- self._RunLocally(
- cmd.RmTree(self.job.home_dir), "Cleanup homedir failed.")
+ self._RunLocally(cmd.RmTree(self.job.home_dir), 'Cleanup homedir failed.')
def _PrepareRuntimeEnvironment(self):
self._RunRemotely(
cmd.MakeDir(self.job.work_dir, self.job.logs_dir, self.job.results_dir),
- "Creating new job directory failed.")
+ 'Creating new job directory failed.')
# The log directory is ready, so we can prepare to log command's output.
- self._executer.OpenLog(
- os.path.join(self.job.logs_dir, self.job.log_filename_prefix))
+ self._executer.OpenLog(os.path.join(self.job.logs_dir,
+ self.job.log_filename_prefix))
def _SatisfyFolderDependencies(self):
for dependency in self.job.folder_dependencies:
@@ -79,18 +76,21 @@ class JobExecuter(threading.Thread):
# No need to make a copy, just symlink it
self._RunRemotely(
cmd.MakeSymlink(from_folder, to_folder),
- "Failed to create symlink to required directory.")
+ 'Failed to create symlink to required directory.')
else:
self._RunRemotely(
- cmd.RemoteCopyFrom(from_machine.hostname, from_folder, to_folder,
+ cmd.RemoteCopyFrom(from_machine.hostname,
+ from_folder,
+ to_folder,
username=from_machine.username),
- "Failed to copy required files.")
+ 'Failed to copy required files.')
def _LaunchJobCommand(self):
command = self.job.GetCommand()
- self._RunRemotely("%s; %s" % ("PS1=. TERM=linux source ~/.bashrc",
- cmd.Wrapper(command, cwd=self.job.work_dir)),
+ self._RunRemotely('%s; %s' % ('PS1=. TERM=linux source ~/.bashrc',
+ cmd.Wrapper(command,
+ cwd=self.job.work_dir)),
"Command failed to execute: '%s'." % command,
self.job.timeout)
@@ -101,14 +101,13 @@ class JobExecuter(threading.Thread):
self.job.results_dir,
self.job.home_dir,
username=self.job.primary_machine.username),
- "Failed to copy results.")
+ 'Failed to copy results.')
def run(self):
self.job.status = job.STATUS_SETUP
self.job.machines = self.machines
- self._logger.debug(
- "Executing %r on %r in directory %s.",
- self.job, self.job.primary_machine.hostname, self.job.work_dir)
+ self._logger.debug('Executing %r on %r in directory %s.', self.job,
+ self.job.primary_machine.hostname, self.job.work_dir)
try:
self.CleanUpWorkDir()
@@ -127,10 +126,9 @@ class JobExecuter(threading.Thread):
# If we get here, the job succeeded.
self.job.status = job.STATUS_SUCCEEDED
except job.JobFailure as ex:
- self._logger.error(
- "Job failed. Exit code %s. %s", ex.exit_code, ex)
+ self._logger.error('Job failed. Exit code %s. %s', ex.exit_code, ex)
if self._terminator.IsTerminated():
- self._logger.info("%r was killed", self.job)
+ self._logger.info('%r was killed', self.job)
self.job.status = job.STATUS_FAILED
diff --git a/automation/server/job_group_manager.py b/automation/server/job_group_manager.py
index 1a0b1b08..d66f5e07 100644
--- a/automation/server/job_group_manager.py
+++ b/automation/server/job_group_manager.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python
-#
# Copyright 2010 Google Inc. All Rights Reserved.
#
@@ -16,6 +14,7 @@ from automation.server.job_manager import IdProducerPolicy
class JobGroupManager(object):
+
def __init__(self, job_manager):
self.all_job_groups = []
@@ -27,7 +26,7 @@ class JobGroupManager(object):
self._id_producer = IdProducerPolicy()
self._id_producer.Initialize(job_group.JobGroup.HOMEDIR_PREFIX,
- "job-group-(?P<id>\d+)")
+ 'job-group-(?P<id>\d+)')
self._logger = logging.getLogger(self.__class__.__name__)
@@ -49,9 +48,8 @@ class JobGroupManager(object):
self._logger.debug('Creating runtime environment for %r.', group)
- CommandExecuter().RunCommand(
- cmd.Chain(cmd.RmTree(group.home_dir),
- cmd.MakeDir(group.home_dir)))
+ CommandExecuter().RunCommand(cmd.Chain(
+ cmd.RmTree(group.home_dir), cmd.MakeDir(group.home_dir)))
with self._lock:
self.all_job_groups.append(group)
@@ -61,7 +59,7 @@ class JobGroupManager(object):
group.status = job_group.STATUS_EXECUTING
- self._logger.info("Added %r to queue.", group)
+ self._logger.info('Added %r to queue.', group)
return group.id
diff --git a/automation/server/job_manager.py b/automation/server/job_manager.py
index de5bc47c..7a65b918 100644
--- a/automation/server/job_manager.py
+++ b/automation/server/job_manager.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python
-#
# Copyright 2010 Google Inc. All Rights Reserved.
#
@@ -60,6 +58,7 @@ class IdProducerPolicy(object):
class JobManager(threading.Thread):
+
def __init__(self, machine_manager):
threading.Thread.__init__(self, name=self.__class__.__name__)
self.all_jobs = []
@@ -81,14 +80,14 @@ class JobManager(threading.Thread):
self._logger = logging.getLogger(self.__class__.__name__)
def StartJobManager(self):
- self._logger.info("Starting...")
+ self._logger.info('Starting...')
with self._lock:
self.start()
self._jobs_available.notifyAll()
def StopJobManager(self):
- self._logger.info("Shutdown request received.")
+ self._logger.info('Shutdown request received.')
with self._lock:
for job_ in self.all_jobs:
@@ -117,7 +116,7 @@ class JobManager(threading.Thread):
return None
def _KillJob(self, job_id):
- self._logger.info("Killing [Job: %d].", job_id)
+ self._logger.info('Killing [Job: %d].', job_id)
if job_id in self.job_executer_mapping:
self.job_executer_mapping[job_id].Kill()
@@ -165,7 +164,7 @@ class JobManager(threading.Thread):
@logger.HandleUncaughtExceptions
def run(self):
- self._logger.info("Started.")
+ self._logger.info('Started.')
while not self._exit_request:
with self._lock:
@@ -192,4 +191,4 @@ class JobManager(threading.Thread):
executer.start()
self.job_executer_mapping[ready_job.id] = executer
- self._logger.info("Stopped.")
+ self._logger.info('Stopped.')
diff --git a/automation/server/machine_manager.py b/automation/server/machine_manager.py
index b54f19d4..b7186077 100644
--- a/automation/server/machine_manager.py
+++ b/automation/server/machine_manager.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python
-#
# Copyright 2010 Google Inc. All Rights Reserved.
__author__ = 'asharif@google.com (Ahmad Sharif)'
@@ -41,7 +39,8 @@ class MachineManager(object):
mach = min(available_pool, key=uses)
if mach_spec.preferred_machines:
- preferred_pool = [m for m in available_pool
+ preferred_pool = [m
+ for m in available_pool
if m.hostname in mach_spec.preferred_machines]
if preferred_pool:
mach = min(preferred_pool, key=uses)
diff --git a/automation/server/machine_manager_test.py b/automation/server/machine_manager_test.py
index ebdaea5f..67fdcc2b 100755
--- a/automation/server/machine_manager_test.py
+++ b/automation/server/machine_manager_test.py
@@ -2,7 +2,7 @@
#
# Copyright 2010 Google Inc. All Rights Reserved.
-__author__ = "asharif@google.com (Ahmad Sharif)"
+__author__ = 'asharif@google.com (Ahmad Sharif)'
import unittest
from automation.common import machine
@@ -18,15 +18,15 @@ class MachineManagerTest(unittest.TestCase):
print self.machine_manager
def testGetLinuxBox(self):
- mach_spec_list = [machine.MachineSpecification(os="linux")]
+ mach_spec_list = [machine.MachineSpecification(os='linux')]
machines = self.machine_manager.GetMachines(mach_spec_list)
self.assertTrue(machines)
def testGetChromeOSBox(self):
- mach_spec_list = [machine.MachineSpecification(os="chromeos")]
+ mach_spec_list = [machine.MachineSpecification(os='chromeos')]
machines = self.machine_manager.GetMachines(mach_spec_list)
self.assertTrue(machines)
-if __name__ == "__main__":
+if __name__ == '__main__':
unittest.main()
diff --git a/automation/server/monitor/__init__.py b/automation/server/monitor/__init__.py
index e69de29b..8b137891 100644
--- a/automation/server/monitor/__init__.py
+++ b/automation/server/monitor/__init__.py
@@ -0,0 +1 @@
+
diff --git a/automation/server/monitor/dashboard.py b/automation/server/monitor/dashboard.py
index 33d7c3d7..f6befed8 100644
--- a/automation/server/monitor/dashboard.py
+++ b/automation/server/monitor/dashboard.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python
-#
# Copyright 2011 Google Inc. All Rights Reserved.
#
@@ -19,7 +17,6 @@ from django.shortcuts import render_to_response
from django.template import Context
from django.views import static
-
Link = namedtuple('Link', 'href name')
@@ -29,8 +26,8 @@ def GetServerConnection():
def MakeDefaultContext(*args):
context = Context({'links': [
- Link('/job-group', 'Job Groups'),
- Link('/machine', 'Machines')]})
+ Link('/job-group', 'Job Groups'), Link('/machine', 'Machines')
+ ]})
for arg in args:
context.update(arg)
@@ -39,6 +36,7 @@ def MakeDefaultContext(*args):
class JobInfo(object):
+
def __init__(self, job_id):
self._job = pickle.loads(GetServerConnection().GetJob(job_id))
@@ -60,12 +58,9 @@ class JobInfo(object):
commands = enumerate(job.PrettyFormatCommand().split('\n'), start=1)
- return {'text': [('Label', job.label),
- ('Directory', job.work_dir)],
- 'link': [('Group', group),
- ('Predecessors', predecessors),
- ('Successors', successors),
- ('Machines', machines),
+ return {'text': [('Label', job.label), ('Directory', job.work_dir)],
+ 'link': [('Group', group), ('Predecessors', predecessors),
+ ('Successors', successors), ('Machines', machines),
('Logs', logs)],
'code': [('Command', commands)]}
@@ -77,8 +72,8 @@ class JobInfo(object):
for evlog in self._job.timeline.GetTransitionEventHistory()]
def GetLog(self):
- log_path = os.path.join(
- self._job.logs_dir, '%s.gz' % self._job.log_filename_prefix)
+ log_path = os.path.join(self._job.logs_dir,
+ '%s.gz' % self._job.log_filename_prefix)
try:
log = gzip.open(log_path, 'r')
@@ -104,9 +99,10 @@ class JobInfo(object):
class JobGroupInfo(object):
+
def __init__(self, job_group_id):
- self._job_group = pickle.loads(
- GetServerConnection().GetJobGroup(job_group_id))
+ self._job_group = pickle.loads(GetServerConnection().GetJobGroup(
+ job_group_id))
def GetAttributes(self):
group = self._job_group
@@ -159,9 +155,9 @@ class JobGroupInfo(object):
class JobGroupListInfo(object):
+
def __init__(self):
- self._all_job_groups = pickle.loads(
- GetServerConnection().GetAllJobGroups())
+ self._all_job_groups = pickle.loads(GetServerConnection().GetAllJobGroups())
def _GetJobGroupState(self, group):
return str(group.status)
@@ -188,7 +184,8 @@ def JobPageHandler(request, job_id):
ctx = MakeDefaultContext({
'job_id': job_id,
'attributes': job.GetAttributes(),
- 'timeline': job.GetTimeline()})
+ 'timeline': job.GetTimeline()
+ })
return render_to_response('job.html', ctx)
@@ -196,9 +193,7 @@ def JobPageHandler(request, job_id):
def LogPageHandler(request, job_id):
job = JobInfo(int(job_id))
- ctx = MakeDefaultContext({
- 'job_id': job_id,
- 'log_lines': job.GetLog()})
+ ctx = MakeDefaultContext({'job_id': job_id, 'log_lines': job.GetLog()})
return render_to_response('job_log.html', ctx)
@@ -210,7 +205,8 @@ def JobGroupPageHandler(request, job_group_id):
'group_id': job_group_id,
'attributes': group.GetAttributes(),
'job_list': group.GetJobList(),
- 'reports': group.GetReportList()})
+ 'reports': group.GetReportList()
+ })
return render_to_response('job_group.html', ctx)
@@ -218,8 +214,10 @@ def JobGroupPageHandler(request, job_group_id):
def JobGroupFilesPageHandler(request, job_group_id, path):
group = JobGroupInfo(int(job_group_id))
- return static.serve(
- request, path, document_root=group.GetHomeDirectory(), show_indexes=True)
+ return static.serve(request,
+ path,
+ document_root=group.GetHomeDirectory(),
+ show_indexes=True)
class FilterJobGroupsForm(forms.Form):
@@ -245,9 +243,7 @@ def JobGroupListPageHandler(request):
else:
form = FilterJobGroupsForm({'initial': '*'})
- ctx = MakeDefaultContext({
- 'filter': form,
- 'groups': group_list})
+ ctx = MakeDefaultContext({'filter': form, 'groups': group_list})
return render_to_response('job_group_list.html', ctx)
diff --git a/automation/server/monitor/manage.py b/automation/server/monitor/manage.py
index 1733753c..57deb5c2 100755
--- a/automation/server/monitor/manage.py
+++ b/automation/server/monitor/manage.py
@@ -8,7 +8,7 @@ __author__ = 'kbaclawski@google.com (Krystian Baclawski)'
from django.core.management import execute_manager
try:
- import settings # Assumed to be in the same directory.
+ import settings # Assumed to be in the same directory.
except ImportError:
import sys
@@ -16,5 +16,5 @@ except ImportError:
'containing %r.' % __file__)
sys.exit(1)
-if __name__ == "__main__":
+if __name__ == '__main__':
execute_manager(settings)
diff --git a/automation/server/monitor/settings.py b/automation/server/monitor/settings.py
index 9048da50..8cd20e35 100644
--- a/automation/server/monitor/settings.py
+++ b/automation/server/monitor/settings.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python
-#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Django settings for monitor project.
@@ -48,6 +46,4 @@ SECRET_KEY = '13p5p_4q91*8@yo+tvvt#2k&6#d_&e_zvxdpdil53k419i5sop'
ROOT_URLCONF = 'monitor.urls'
# List of locations of the template source files, in search order.
-TEMPLATE_DIRS = (
- os.path.join(ROOT_PATH, 'templates'),
-)
+TEMPLATE_DIRS = (os.path.join(ROOT_PATH, 'templates'),)
diff --git a/automation/server/monitor/urls.py b/automation/server/monitor/urls.py
index 19a1ef9c..1a6b2485 100644
--- a/automation/server/monitor/urls.py
+++ b/automation/server/monitor/urls.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python
-#
# Copyright 2011 Google Inc. All Rights Reserved.
#
@@ -9,17 +7,15 @@ from django.conf import settings
from django.conf.urls.defaults import patterns
urlpatterns = patterns(
- 'dashboard',
- (r'^job-group$', 'JobGroupListPageHandler'),
+ 'dashboard', (r'^job-group$', 'JobGroupListPageHandler'),
(r'^machine$', 'MachineListPageHandler'),
(r'^job/(?P<job_id>\d+)/log$', 'LogPageHandler'),
- (r'^job/(?P<job_id>\d+)$', 'JobPageHandler'),
- (r'^job-group/(?P<job_group_id>\d+)/files/(?P<path>.*)$',
- 'JobGroupFilesPageHandler'),
+ (r'^job/(?P<job_id>\d+)$', 'JobPageHandler'), (
+ r'^job-group/(?P<job_group_id>\d+)/files/(?P<path>.*)$',
+ 'JobGroupFilesPageHandler'),
(r'^job-group/(?P<job_group_id>\d+)$', 'JobGroupPageHandler'),
(r'^$', 'DefaultPageHandler'))
-urlpatterns += patterns(
- '',
- (r'^static/(?P<path>.*)$', 'django.views.static.serve',
- {'document_root': settings.MEDIA_ROOT}))
+urlpatterns += patterns('',
+ (r'^static/(?P<path>.*)$', 'django.views.static.serve',
+ {'document_root': settings.MEDIA_ROOT}))
diff --git a/automation/server/server.py b/automation/server/server.py
index e9243cbb..f02a1d0f 100755
--- a/automation/server/server.py
+++ b/automation/server/server.py
@@ -38,7 +38,7 @@ class Server(object):
def ExecuteJobGroup(self, job_group, dry_run=False):
job_group = pickle.loads(job_group)
- self._logger.info("Received ExecuteJobGroup(%r, dry_run=%s) request.",
+ self._logger.info('Received ExecuteJobGroup(%r, dry_run=%s) request.',
job_group, dry_run)
for job in job_group.jobs:
@@ -46,25 +46,25 @@ class Server(object):
return self.job_group_manager.AddJobGroup(job_group)
def GetAllJobGroups(self):
- self._logger.info("Received GetAllJobGroups() request.")
+ self._logger.info('Received GetAllJobGroups() request.')
return pickle.dumps(self.job_group_manager.GetAllJobGroups())
def KillJobGroup(self, job_group_id):
- self._logger.info("Received KillJobGroup(%d) request.", job_group_id)
+ self._logger.info('Received KillJobGroup(%d) request.', job_group_id)
self.job_group_manager.KillJobGroup(pickle.loads(job_group_id))
def GetJobGroup(self, job_group_id):
- self._logger.info("Received GetJobGroup(%d) request.", job_group_id)
+ self._logger.info('Received GetJobGroup(%d) request.', job_group_id)
return pickle.dumps(self.job_group_manager.GetJobGroup(job_group_id))
def GetJob(self, job_id):
- self._logger.info("Received GetJob(%d) request.", job_id)
+ self._logger.info('Received GetJob(%d) request.', job_id)
return pickle.dumps(self.job_manager.GetJob(job_id))
def GetMachineList(self):
- self._logger.info("Received GetMachineList() request.")
+ self._logger.info('Received GetMachineList() request.')
return pickle.dumps(self.job_manager.machine_manager.GetMachineList())
@@ -79,18 +79,18 @@ class Server(object):
def GetServerOptions():
"""Get server's settings from command line options."""
parser = optparse.OptionParser()
- parser.add_option("-m",
- "--machines-file",
- dest="machines_file",
- help="The location of the file "
- "containing the machines database",
+ parser.add_option('-m',
+ '--machines-file',
+ dest='machines_file',
+ help='The location of the file '
+ 'containing the machines database',
default=machine_manager.DEFAULT_MACHINES_FILE)
- parser.add_option("-n",
- "--dry-run",
- dest="dry_run",
- help="Start the server in dry-run mode, where jobs will "
- "not actually be executed.",
- action="store_true",
+ parser.add_option('-n',
+ '--dry-run',
+ dest='dry_run',
+ help='Start the server in dry-run mode, where jobs will '
+ 'not actually be executed.',
+ action='store_true',
default=False)
return parser.parse_args()[0]
@@ -110,7 +110,9 @@ def Main():
try:
xmlserver = SimpleXMLRPCServer(
- ("localhost", 8000), allow_none=True, logRequests=False)
+ ('localhost', 8000),
+ allow_none=True,
+ logRequests=False)
xmlserver.register_instance(server)
xmlserver.serve_forever()
except Exception as ex:
@@ -119,5 +121,5 @@ def Main():
sys.exit(1)
-if __name__ == "__main__":
+if __name__ == '__main__':
Main()
diff --git a/automation/server/server_test.py b/automation/server/server_test.py
index c2e4b0ae..bcf1b9f5 100755
--- a/automation/server/server_test.py
+++ b/automation/server/server_test.py
@@ -1,27 +1,26 @@
#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
-
"""Machine manager unittest.
MachineManagerTest tests MachineManager.
"""
-__author__ = "asharif@google.com (Ahmad Sharif)"
+__author__ = 'asharif@google.com (Ahmad Sharif)'
import server
import unittest
class ServerTest(unittest.TestCase):
+
def setUp(self):
pass
-
def testGetAllJobs(self):
s = server.Server()
print s.GetAllJobs()
-if __name__ == "__main__":
+if __name__ == '__main__':
unittest.main()