Text
Make predictions on passages of text
Make text predictions via URL
You can make predictions on passages of text hosted on the web
# Insert here the initialization code as outlined on this page:
# https://docs.clarifai.com/api-guide/api-overview/api-clients#client-installation-instructions
post_model_outputs_response = stub.PostModelOutputs(
service_pb2.PostModelOutputsRequest(
user_app_id=userDataObject, # The userDataObject is created in the overview and is required when using a PAT
model_id="{THE_MODEL_ID}",
version_id="{THE_MODEL_VERSION_ID}", # This is optional. Defaults to the latest model version.
inputs=[
resources_pb2.Input(
data=resources_pb2.Data(
text=resources_pb2.Text(
base64=file_bytes
)
)
)
]
),
metadata=metadata
)
if post_model_outputs_response.status.code != status_code_pb2.SUCCESS:
print("There was an error with your request!")
print("\tCode: {}".format(post_model_outputs_response.outputs[0].status.code))
print("\tDescription: {}".format(post_model_outputs_response.outputs[0].status.description))
print("\tDetails: {}".format(respopost_model_outputs_responsense.outputs[0].status.details))
raise Exception("Post model outputs failed, status: " + post_model_outputs_response.status.description)
# Since we have one input, one output will exist here.
output = post_model_outputs_response.outputs[0]
print("Predicted concepts:")
for concept in output.data.concepts:
print("%s %.2f" % (concept.name, concept.value))
Make text predictions on local text files
Make predictions based on local text files.
# Insert here the initialization code as outlined on this page:
# https://docs.clarifai.com/api-guide/api-overview/api-clients#client-installation-instructions
with open("{YOUR_IMAGE_FILE_LOCATION}", "rb") as f:
file_bytes = f.read()
post_model_outputs_response = stub.PostModelOutputs(
service_pb2.PostModelOutputsRequest(
user_app_id=userDataObject, # The userDataObject is created in the overview and is required when using a PAT
model_id="{THE_MODEL_ID}",
version_id="{THE_MODEL_VERSION_ID}", # This is optional. Defaults to the latest model version.
inputs=[
resources_pb2.Input(
data=resources_pb2.Data(
text=resources_pb2.Text(
raw="Butchart Gardens contains over 900 varieties of plants."
)
)
)
]
),
metadata=metadata
)
if post_model_outputs_response.status.code != status_code_pb2.SUCCESS:
print("There was an error with your request!")
print("\tCode: {}".format(post_model_outputs_response.outputs[0].status.code))
print("\tDescription: {}".format(post_model_outputs_response.outputs[0].status.description))
print("\tDetails: {}".format(respopost_model_outputs_responsense.outputs[0].status.details))
raise Exception("Post model outputs failed, status: " + post_model_outputs_response.status.description)
# Since we have one input, one output will exist here.
output = post_model_outputs_response.outputs[0]
print("Predicted concepts:")
for concept in output.data.concepts:
print("%s %.2f" % (concept.name, concept.value))
Last updated
Was this helpful?