Code: Select all
import pickle
# Create a pickle
small_object = list(range(0,1000))
name = 'small_file.pkl'
with open(name, 'wb') as f:
pickle.dump(small_object, f)
#### Verify integrity ###
# Loading from disk
with open(name, 'rb') as f:
opened_object_one = pickle.load(f)
print(opened_object_one) # prints expected list
# Loading from byte stream
with open('small_object.pkl', 'rb') as f:
opened_object_two = f.read()
print(pickle.loads(opened_object_two)) # print expected list
Azure-Funktionscode
Code: Select all
import logging
import pickle
app = func.FunctionApp()
@app.route(route="http_trigger", auth_level=func.AuthLevel.FUNCTION)
@app.blob_input(
arg_name = 'client',
path = '/small_file.pkl',
connection ='AzureWebJobsStorage'
)
def http_trigger(req: func.HttpRequest, client) -> func.HttpResponse:
# `client` is of type azure.functions.blob.InputStream
package_bytes = client.read() # return bytes
client.close()
small_object = pickle.loads(package_bytes)
Ich bin unter Windows 10 mit Python 3.10.5.>