File size: 2,705 Bytes
3b1fcce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9b5980f
3b1fcce
 
5394c3f
3b1fcce
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
import os
import boto3
import json

ACCESS_ID = os.environ.get('accessKeyId', None) or os.environ.get('AWS_ACCESS_KEY_ID', None)
ACCESS_KEY = os.environ.get('secretAccessKey', None) or os.environ.get('AWS_SECRET_ACCESS_KEY', None)
REGION = os.environ.get('region') or os.environ.get('AWS_REGION', None)

lambda_client = boto3.client('lambda',
                             region_name=REGION,
                             aws_access_key_id=ACCESS_ID,
                             aws_secret_access_key=ACCESS_KEY)


def run_lambda(body, function_name, invocation_type='RequestResponse'):
    response = json.load(lambda_client.invoke(FunctionName=function_name,
                                              InvocationType=invocation_type,
                                              Payload=json.dumps(body))['Payload'])

    return response['statusCode'], json.loads(response['body']) if not isinstance(response['body'], dict) else response[
        'body']


def upload_models(
        restaurant_id: int,
        mode: str = 'waste',
        shift_id: int = None,
        what_to_load=None,
        *args,
        **kwargs,
):
    """
    :param restaurant_id: int
    :param mode: str
    :param shift_id: int or None
    :param what_to_load: dict of form {'od': bool, 'encoder': bool, 'decoder': bool}
    :return: {"codes": codes,
          "mode": mode,
          "ip_ports": ip_ports,
          "restaurant_id": restaurant_id,
          "availability": availability,
          "models_identifier": identifier,
          "shift": event['shift_id'],
          "references": references,
          "models": models}
    """

    if what_to_load is None:
        what_to_load = {'od': True, 'encoder': True, 'decoder': True}

    body = {
        'mode': mode,
        'restaurant_id': restaurant_id,
        'shift_id': shift_id,
        'what_to_load': what_to_load,
    }

    status_code, r = run_lambda(body=body, function_name='postModels-fastpay-public-stack')

    return status_code, r


def predict(b64image: str,
            ip_ports: dict,
            upload: bool = True,
            patient_identifier: bool = None,
            codes: dict = None,
            models_identifier: str = None,
            shift: int = None,
            *args,
            **kwargs,
            ):
    body = {
        "b64image": b64image,
        "ip_ports": ip_ports,
        "upload": upload,
        "patient_identifier": patient_identifier, #None,  # patient_identifier
        "codes": codes,
        "models_identifier": models_identifier,
        "shift": shift  #shift,
    }

    status_code, r = run_lambda(body=body, function_name='getPredict-fastpay-public-stack')

    return status_code, r