Skip to content
Toggle navigation
Toggle navigation
This project
Loading...
Sign in
周伟奇
/
part_of_F3_OCR
Go to a project
Toggle navigation
Toggle navigation pinning
Projects
Groups
Snippets
Help
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
0
Wiki
Network
Create a new issue
Builds
Commits
Issue Boards
Files
Commits
Network
Compare
Branches
Tags
b2dde0a9
authored
2022-07-14 15:57:19 +0800
by
Lyu Kui
Browse Files
Options
Browse Files
Tag
Download
Email Patches
Plain Diff
add async
1 parent
d0995b37
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
75 additions
and
14 deletions
async_test/locustfile.py
async_test/server2.py
async_test/locustfile.py
View file @
b2dde0a
import
os
import
time
import
random
from
locust
import
HttpUser
,
task
,
between
,
constant
,
tag
base_dir
=
'/home/lk/MyProject/BMW_F3OCR/数据集/文件分类/营业执照'
file_path_list
=
[
os
.
path
.
join
(
base_dir
,
file_name
)
for
file_name
in
os
.
listdir
(
base_dir
)]
class
QuickstartUser
(
HttpUser
):
# wait_time = between(1, 5)
@tag
(
'sync'
)
...
...
@@ -18,6 +25,13 @@ class QuickstartUser(HttpUser):
@tag
(
'sync_classification'
)
@task
def
sync_classification
(
self
):
img_path
=
'/home/lk/MyProject/BMW_F3OCR/数据集/文件分类/营业执照/授信资料-43.jpg'
img_path
=
random
.
choice
(
file_path_list
)
files
=
[(
'image'
,
(
''
,
open
(
img_path
,
'rb'
),
''
))]
self
.
client
.
post
(
"/sync_classification"
,
files
=
files
)
@tag
(
'async_classification'
)
@task
def
async_classification
(
self
):
img_path
=
random
.
choice
(
file_path_list
)
files
=
[(
'image'
,
(
''
,
open
(
img_path
,
'rb'
),
''
))]
self
.
client
.
post
(
"/async_classification"
,
files
=
files
)
...
...
async_test/server2.py
View file @
b2dde0a
...
...
@@ -29,15 +29,68 @@ tf_serving_settings = {
app
.
config
.
update
(
tf_serving_settings
)
@app.post
(
"/sync_classification"
)
async
def
sync_handler
(
request
):
# 同步写法01
# @app.post("/sync_classification")
# async def sync_handler(request):
# image = request.files.get("image")
# img_array = np.frombuffer(image.body, np.uint8)
# image = cv2.imdecode(img_array, cv2.IMREAD_COLOR)
# input_images = classifier.preprocess_input(image)
#
# options = [('grpc.max_send_message_length', 1000 * 1024 * 1024),
# ('grpc.max_receive_message_length', 1000 * 1024 * 1024)]
# with grpc.insecure_channel('localhost:8500', options=options) as channel:
# stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
# # See prediction_service.proto for gRPC request/response details.
# request = predict_pb2.PredictRequest()
# request.model_spec.name = classifier.model_name
# request.model_spec.signature_name = classifier.signature_name
#
# request.inputs['input_1'].CopyFrom(tf.make_tensor_proto(input_images))
# result = stub.Predict(request, timeout=100.0) # 100 secs timeout
# outputs = tf.make_ndarray(result.outputs['output'])
#
# res = classifier.reprocess_output(outputs)
# return json(res)
# 同步写法02
# @app.post("/sync_classification")
# async def sync_handler(request):
# image = request.files.get("image")
# img_array = np.frombuffer(image.body, np.uint8)
# image = cv2.imdecode(img_array, cv2.IMREAD_COLOR)
# input_images = classifier.preprocess_input(image)
#
# # See prediction_service.proto for gRPC request/response details.
# request = predict_pb2.PredictRequest()
# request.model_spec.name = classifier.model_name
# request.model_spec.signature_name = classifier.signature_name
# stub = getattr(app, classifier.server_name)
#
# request.inputs['input_1'].CopyFrom(tf.make_tensor_proto(input_images))
# result = stub.Predict(request, timeout=100.0) # 100 secs timeout
# outputs = tf.make_ndarray(result.outputs['output'])
#
# res = classifier.reprocess_output(outputs)
# return json(res)
#
# @app.listener("before_server_start")
# async def set_grpc_channel(app, loop):
# for server_name, server_settings in app.config['servers'].items():
# channel = grpc.insecure_channel(
# '{0}:{1}'.format(server_settings['host'], server_settings['port']),
# options=server_settings.get('options'))
# stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
# setattr(app, server_name, stub)
# 异步写法
@app.post
(
"/async_classification"
)
async
def
async_handler
(
request
):
image
=
request
.
files
.
get
(
"image"
)
img_array
=
np
.
frombuffer
(
image
.
body
,
np
.
uint8
)
image
=
cv2
.
imdecode
(
img_array
,
cv2
.
IMREAD_COLOR
)
input_images
=
classifier
.
preprocess_input
(
image
)
# print(type(image))
# See prediction_service.proto for gRPC request/response details.
request
=
predict_pb2
.
PredictRequest
()
request
.
model_spec
.
name
=
classifier
.
model_name
...
...
@@ -45,23 +98,17 @@ async def sync_handler(request):
stub
=
getattr
(
app
,
classifier
.
server_name
)
request
.
inputs
[
'input_1'
]
.
CopyFrom
(
tf
.
make_tensor_proto
(
input_images
))
result
=
stub
.
Predict
(
request
,
100.0
)
# 100 secs timeout
result
=
await
stub
.
Predict
(
request
,
timeout
=
100.0
)
# 100 secs timeout
outputs
=
tf
.
make_ndarray
(
result
.
outputs
[
'output'
])
res
=
classifier
.
reprocess_output
(
outputs
)
return
json
(
res
)
# @app.get("/async")
# async def async_handler(request):
# await asyncio.sleep(2)
# return json({'code': 1})
@app.listener
(
"before_server_start"
)
async
def
set_grpc_channel
(
app
,
loop
):
for
server_name
,
server_settings
in
app
.
config
[
'servers'
]
.
items
():
channel
=
grpc
.
insecure_channel
(
channel
=
grpc
.
aio
.
insecure_channel
(
'{0}:{1}'
.
format
(
server_settings
[
'host'
],
server_settings
[
'port'
]),
options
=
server_settings
.
get
(
'options'
))
stub
=
prediction_service_pb2_grpc
.
PredictionServiceStub
(
channel
)
...
...
@@ -69,4 +116,4 @@ async def set_grpc_channel(app, loop):
if
__name__
==
'__main__'
:
app
.
run
(
host
=
'0.0.0.0'
,
port
=
6699
,
workers
=
5
)
app
.
run
(
host
=
'0.0.0.0'
,
port
=
6699
,
workers
=
10
)
...
...
Write
Preview
Styling with
Markdown
is supported
Attach a file
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to post a comment