Skip to content

Commit

Permalink
Fix lint errors
Browse files Browse the repository at this point in the history
  • Loading branch information
gnossen committed Mar 9, 2019
1 parent 307044c commit 81c3b0b
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 19 deletions.
25 changes: 13 additions & 12 deletions examples/python/multiprocessing/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,45 +19,46 @@

import argparse
import atexit
import grpc
import logging
import multiprocessing
import operator
import os
import time
import sys

import grpc

import prime_pb2
import prime_pb2_grpc

_PROCESS_COUNT = 8
_MAXIMUM_CANDIDATE = 10000

# Each worker process initializes a single channel after forking.
# It's regrettable, but to ensure that each subprocess only has to instantiate
# a single channel to be reused across all RPCs, we use globals.
_worker_channel_singleton = None
_worker_stub_singleton = None

_LOGGER = logging.getLogger(__name__)


def _shutdown_worker():
_LOGGER.info('Shutting worker process down.')
if _worker_channel_singleton is not None:
_worker_channel_singleton.stop()


def _initialize_worker(server_address):
global _worker_channel_singleton
global _worker_stub_singleton
global _worker_channel_singleton # pylint: disable=global-statement
global _worker_stub_singleton # pylint: disable=global-statement
_LOGGER.info('Initializing worker process.')
_worker_channel_singleton = grpc.insecure_channel(server_address)
_worker_stub_singleton = prime_pb2_grpc.PrimeCheckerStub(
_worker_channel_singleton)
atexit.register(_shutdown_worker)


def _shutdown_worker():
_LOGGER.info('Shutting worker process down.')
if _worker_channel_singleton is not None:
_worker_channel_singleton.stop()


def _run_worker_query(primality_candidate):
_LOGGER.info('Checking primality of {}.'.format(primality_candidate))
_LOGGER.info('Checking primality of %s.', primality_candidate)
return _worker_stub_singleton.check(
prime_pb2.PrimeCandidate(candidate=primality_candidate))

Expand Down
8 changes: 4 additions & 4 deletions examples/python/multiprocessing/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@
from concurrent import futures
import contextlib
import datetime
import grpc
import logging
import math
import multiprocessing
import os
import time
import socket
import sys

import grpc

import prime_pb2
import prime_pb2_grpc

Expand All @@ -50,7 +50,7 @@ def is_prime(n):
class PrimeChecker(prime_pb2_grpc.PrimeCheckerServicer):

def check(self, request, context):
_LOGGER.info('Determining primality of {}'.format(request.candidate))
_LOGGER.info('Determining primality of %s', request.candidate)
return prime_pb2.Primality(isPrime=is_prime(request.candidate))


Expand Down Expand Up @@ -99,7 +99,7 @@ def _reserve_port():
def main():
with _reserve_port() as port:
bind_address = '[::]:{}'.format(port)
_LOGGER.info("Binding to '{}'".format(bind_address))
_LOGGER.info("Binding to '%s'", bind_address)
sys.stdout.flush()
workers = []
for _ in range(_PROCESS_COUNT):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,13 @@
# limitations under the License.
"""Test for multiprocessing example."""

import datetime
import ast
import logging
import math
import os
import re
import subprocess
import tempfile
import time
import unittest

_BINARY_DIR = os.path.realpath(
Expand Down Expand Up @@ -63,7 +62,7 @@ def test_multiprocessing_example(self):
client_process.wait()
server_process.terminate()
client_stdout.seek(0)
results = eval(client_stdout.read().strip().split('\n')[-1])
results = ast.literal_eval(client_stdout.read().strip().split('\n')[-1])
values = tuple(result[0] for result in results)
self.assertSequenceEqual(range(2, 10000), values)
for result in results:
Expand Down

0 comments on commit 81c3b0b

Please sign in to comment.