Skip to content

Commit

Permalink
Implementation of Ruff accross the project (#489)
Browse files Browse the repository at this point in the history
* resolving conflict

* refactor orders service to python

* add persistence to orders service with dynamodb

* orders table cloudformation output

* node-modules package-lock

* add validation and more testing

* ruff github workflow commit 1

* README build status

* Ruff - autofix errors

* ruff config

* ruff: various small fixes

* Ruff fix E722 no bare except

* search for specific table during local and add more test

* avoid E712 on generators

* explicitly add routes and error handlers to carts service using blueprints rather than relying on import side effects

* explicit routes and handlers

* update valid keys

* remove unused imports and scripts

* update valid keys for order service

* remove old go scripts and make order keys consistent

* remove ttl order inherits from cart

* Update app.py and remove unused imports

* fix linter errors

* fix linter errors-too long lines

---------

Co-authored-by: Benedict Nartey-Tokoli <[email protected]>
Co-authored-by: Benedict Nartey-Tokoli <[email protected]>
  • Loading branch information
3 people authored Sep 20, 2023
1 parent e8080b3 commit 4d601a3
Show file tree
Hide file tree
Showing 34 changed files with 147 additions and 85 deletions.
8 changes: 8 additions & 0 deletions .github/workflows/ruff.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
name: Ruff
on: [ push, pull_request ]
jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: chartboost/ruff-action@v1
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@

# Retail Demo Store

A sample retail web application and workshop platform intended as an educational tool for demonstrating how AWS infrastructure and services can be used to build compelling customer experiences for eCommerce, retail, and digital marketing use-cases.

# Build Status
[![Ruff](./actions/workflows/ruff.yml/badge.svg)](./actions/workflows/ruff.yml)



**This project is intended for educational purposes only and not for production use.**

![Retail Demo Store Home Page](./workshop/images/retaildemostore-home-devices.png)
Expand Down
2 changes: 0 additions & 2 deletions generators/datagenerator/amplitude.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0

import datagenerator
import json
import requests
import yaml

# Amplitude event support
# This follows the Amplitude V2 HTTP Bulk API spec, here:
Expand Down
2 changes: 1 addition & 1 deletion generators/datagenerator/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,5 @@ def __repr__(self):
output = f'{self.event},{self.timestamp},{self.user_id},{self.anonymous_id},{self.platform}'
if len(self.traits) > 0:
output += self.traits
output += f'\n'
output += '\n'
return output
3 changes: 1 addition & 2 deletions generators/datagenerator/funnel.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@
import random
import numpy as np
import datetime
import inspect
from datagenerator.output import OutputFormatter
from collections.abc import Mapping, Iterable
from collections.abc import Iterable

class Funnel:
def __init__(self, timestamp, funnel, user):
Expand Down
4 changes: 2 additions & 2 deletions generators/datagenerator/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def to_amplitude(self, config, debug=False):
batch.append(event)
if len(batch) > 0:
response = sender.send_batch(funnel.platform, batch, debug)
if response != None and response.status_code > 200:
if response is not None and response.status_code > 200:
print(f'Error sending to Amplitude: {response.text}')
print(f'Processed {count} funnels...')

Expand All @@ -80,6 +80,6 @@ def to_segment(self, config_file, debug=False):
batch.append(event)
if len(batch) > 0:
response = sender.send_batch(funnel.platform, batch, debug)
if response != None and response.status_code > 200:
if response is not None and response.status_code > 200:
print(f'Error sending to Segment: {response.text}')
print(f'Processed {count} funnels...')
2 changes: 1 addition & 1 deletion generators/datagenerator/segment.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def send_batch(self, platform, events, debug=False):
}

key = self.config_keys[platform]
if key != None:
if key is not None:
events_str = json.dumps(batch_events, default=lambda x: x.__dict__)
#print(f'Batch length bytes: {len(events_str)}')
if debug:
Expand Down
8 changes: 2 additions & 6 deletions generators/datagenerator/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,8 @@
# SPDX-License-Identifier: MIT-0

import random
import datetime
import uuid
import json
import numpy as np
import gzip
import codecs
import bisect
from faker import Faker
from faker.providers import internet
Expand Down Expand Up @@ -130,7 +126,7 @@ def new_file(cls, filename, num_users,

class User:
def __init__(self, category_preference_personas, selectable_user, id_string=None):
if(id_string != None):
if(id_string is not None):
self.id = id_string
else:
self.id = str(random.randint(1000000000, 99999999999))
Expand Down Expand Up @@ -196,7 +192,7 @@ def __init__(self, category_preference_personas, selectable_user, id_string=None
]

def set_traits(self, traits):
if traits != None:
if traits is not None:
for (k,v) in traits.items():
self.traits[k] = random.choice(v)

Expand Down
11 changes: 6 additions & 5 deletions generators/generate_interactions_personalize.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,8 @@ def generate_user_items(out_users_filename, out_items_filename, in_users_filenam
'promoted': 'PROMOTED'})
# Since GENDER column requires a value for all rows, default all nulls to "Any"
products_dataset_df['GENDER'].fillna(GENDER_ANY, inplace = True)
products_dataset_df.loc[products_dataset_df['PROMOTED'] == True, 'PROMOTED'] = 'Y'
products_dataset_df['PROMOTED'].fillna(NOT_PROMOTED, inplace = True)
products_dataset_df['PROMOTED'].fillna(False, inplace = True)
products_dataset_df['PROMOTED'] = products_dataset_df['PROMOTED'].replace({True: 'Y', False: 'N'})
products_dataset_df.to_csv(out_items_filename, index=False)

users_dataset_df = users_df[['id', 'age', 'gender']]
Expand Down Expand Up @@ -157,7 +157,8 @@ def generate_interactions(out_interactions_filename, users_df, products_df):
average_product_price = int(products_df.price.mean())
print('Average product price: ${:.2f}'.format(average_product_price))

if seconds_increment <= 0: raise AssertionError(f"Should never happen: {seconds_increment} <= 0")
if seconds_increment <= 0:
raise AssertionError(f"Should never happen: {seconds_increment} <= 0")

print('Minimum interactions to generate: {}'.format(min_interactions))
print('Starting timestamp: {} ({})'.format(next_timestamp,
Expand Down Expand Up @@ -275,7 +276,7 @@ def generate_interactions(out_interactions_filename, users_df, products_df):
first_prod = user_category_to_first_prod[usercat_key]
prods_subset_df = product_affinities_bycatgender[(category, gender)][first_prod]

if not usercat_key in user_category_to_first_prod:
if usercat_key not in user_category_to_first_prod:
# If the user has not yet selected a first product for this category
# we do it by choosing between all products for gender.

Expand All @@ -296,7 +297,7 @@ def generate_interactions(out_interactions_filename, users_df, products_df):

user_to_product[user['id']].add(product['id'])

if not usercat_key in user_category_to_first_prod:
if usercat_key not in user_category_to_first_prod:
user_category_to_first_prod[usercat_key] = product['id']

# Decide if the product the user is interacting with is discounted
Expand Down
48 changes: 48 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
[tool.ruff]
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
select = ["E", "F"]

# bastil@ ignore Line too long E501 for now - gives too many warnings
ignore = ["E501"]

# Allow autofix for all enabled rules (when `--fix`) is provided.
fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"]
unfixable = []

# Exclude a variety of commonly ignored directories.
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".mypy_cache",
".nox",
".pants.d",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"venv",
]

# Same as Black.
line-length = 88

# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"

# Assume Python 3.10
target-version = "py310"

[tool.ruff.mccabe]
# Unlike Flake8, default to a complexity level of 10.
max-complexity = 10
13 changes: 5 additions & 8 deletions src/aws-lambda/alexa-skill-lambda/alexa-skill-lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,12 @@
from ask_sdk_core.skill_builder import SkillBuilder
from ask_sdk_core.dispatch_components import AbstractRequestHandler
from ask_sdk_core.dispatch_components import AbstractExceptionHandler
from ask_sdk_core.handler_input import HandlerInput

from ask_sdk_model.dialog import ElicitSlotDirective, DynamicEntitiesDirective, DelegateDirective
from ask_sdk_model.dialog_state import DialogState
from ask_sdk_model.er.dynamic import Entity, EntityValueAndSynonyms, EntityListItem, UpdateBehavior
from ask_sdk_model.slu.entityresolution import StatusCode

from ask_sdk_model.interfaces.connections import SendRequestDirective
from ask_sdk_model.ui import AskForPermissionsConsentCard

import boto3
import json
Expand Down Expand Up @@ -117,7 +114,7 @@ def get_cognito_user_details(handler_input):
logger.info(f"Got user info from Cognito: {user_details}")

if 'custom:profile_user_id' not in user_details:
logger.warning(f"Profile user has not been selected for Cognito user")
logger.warning("Profile user has not been selected for Cognito user")
raise Exception("Must use default user because simulation user not selected.")
else:
user_details['cognito_loaded'] = True
Expand Down Expand Up @@ -210,7 +207,7 @@ def is_pinpoint_email_channel_enabled() -> bool:
email_channel_response = pinpoint.get_email_channel(ApplicationId=PINPOINT_APP_ID)
except ClientError as error:
logger.info('Unable to find Email Channel configured for Pinpoint application: {}'.format(error))
return False;
return False

email_channel_from_address = None
email_channel_enabled = False
Expand Down Expand Up @@ -245,7 +242,7 @@ def send_order_confirm_email(handler_input, orders, add_images=True):
# Specify content:
subject = "Your order has been received!"
heading = "Welcome,"
subheading = f"Your order has been placed."
subheading = "Your order has been placed."
intro_text = f"""We will meet you at your pump with the following order ({order_ids}):"""
html_intro_text = intro_text.replace('\n', '</p><p>')

Expand Down Expand Up @@ -897,12 +894,12 @@ def handle(self, handler_input):

order_response = submit_order(handler_input)
send_order_confirm_email(handler_input, [order_response], False)
speak_output += f"It will be ready when you arrive"
speak_output += "It will be ready when you arrive"
if user_details['cognito_loaded']:
name = user_details.get('custom:profile_first_name', '')
speak_output += f" {name}"

speak_output += f". Hope to see you again soon."
speak_output += ". Hope to see you again soon."
return (
handler_input.response_builder
.speak(speak_output)
Expand Down
4 changes: 2 additions & 2 deletions src/aws-lambda/ivs-create-channels/ivs-create-channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def create_ivs_channels(event, _):
Type='String',
Overwrite=True
)
except botocore.exceptions.EndpointConnectionError as ex:
except botocore.exceptions.EndpointConnectionError:
logger.error("Could not create any IVS channels - probably because IVS is not supported in region. "
f"Channel name: {channel_name}. Region: {ivs_client.meta.region_name}")

Expand Down Expand Up @@ -209,7 +209,7 @@ def delete_all_channels(event, _):
"""
Deletes all IVS channels referenced in the SSM_VIDEO_CHANNEL_MAP_PARAM.
"""
logger.info(f"Deleting all IVS channels in stack")
logger.info("Deleting all IVS channels in stack")
if is_ssm_parameter_set(SSM_VIDEO_CHANNEL_MAP_PARAM):
video_channel_param_value = ssm_client.get_parameter(Name=SSM_VIDEO_CHANNEL_MAP_PARAM)['Parameter']['Value']
video_channel_map = json.loads(video_channel_param_value)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,7 @@ def send_pickup_sms(all_orders, add_order_details=False):
Returns:
Nothing but sends an SMS.
"""
logger.info(f"Collecting phone numbers to send SMSs")
logger.info("Collecting phone numbers to send SMSs")

phone_to_orders = defaultdict(list)
for order in all_orders:
Expand Down Expand Up @@ -543,7 +543,7 @@ def remove_browser_notification_connections(user_id, connection_ids):
UpdateExpression='DELETE connectionIds :c',
ExpressionAttributeValues=dynamo_update_expression
)
logger.info(f"Gone connections deleted")
logger.info("Gone connections deleted")


def send_browser_notification(user_id, data):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ def _delete_dataset_group(dataset_group_arn: str, wait_for_resources: bool = Tru
logger.info('Waiting for dataset group to be deleted')
time.sleep(20)
else:
raise ResourcePending(f'Dataset group still being deleted')
raise ResourcePending('Dataset group still being deleted')

def delete_dataset_groups(dataset_group_names: List[str], region: str = None, wait_for_resources: bool = True):
min_botocore_version = '1.23.15' # As of re:Invent 2021 when domain recommenders were added to the API
Expand Down
16 changes: 9 additions & 7 deletions src/aws-lambda/pinpoint-auto-workshop/pinpoint-auto-workshop.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,11 @@ def create_email_template(template_name, template_fname_root, subject, descripti
TemplateName=template_name
)
break
except pinpoint.exceptions.BadRequestException as e:
except pinpoint.exceptions.BadRequestException:
try:
delete_response = pinpoint.delete_email_template(TemplateName=template_name)
except:
pinpoint.delete_email_template(TemplateName=template_name)
except BaseException as error:
logger.info('An exception occurred: {}'.format(error))
pass
backoff_seconds = 30
logger.info(f"Waiting for old template to delete: {template_name} - waiting {backoff_seconds} seconds")
Expand Down Expand Up @@ -133,10 +134,11 @@ def create_sms_template(template_name, body, description, recommender_id=None):
TemplateName=template_name
)
break
except pinpoint.exceptions.BadRequestException as e:
except pinpoint.exceptions.BadRequestException:
try:
delete_response = pinpoint.delete_sms_template(TemplateName=template_name)
except:
pinpoint.delete_sms_template(TemplateName=template_name)
except BaseException as error:
logger.info('An exception occurred: {}'.format(error))
pass
backoff_seconds = 30
logger.info(f"Waiting for old template to delete: {template_name} - waiting {backoff_seconds} seconds")
Expand Down Expand Up @@ -275,7 +277,7 @@ def create_all_email_users_segment(application_id):
Returns:
Segment config. Returns even if already exists.
"""
segment_name = f'AllEmailUsers'
segment_name = 'AllEmailUsers'
segment_config = get_segment(application_id, segment_name)

if not segment_config:
Expand Down
3 changes: 0 additions & 3 deletions src/aws-lambda/pinpoint-sms-alerts/pinpoint-sms-alerts.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,8 @@

import json
import boto3
import botocore
import logging
import os
from datetime import datetime, timedelta
from botocore.exceptions import ClientError

logger = logging.getLogger()
logger.setLevel(logging.INFO)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
# Initialize the Amazon Personalize events boto object
personalize_events = boto3.client('personalize-events')

if not 'personalize_tracking_id' in os.environ or os.environ['personalize_tracking_id'] == '':
if 'personalize_tracking_id' not in os.environ or os.environ['personalize_tracking_id'] == '':
logger.error("Missing personalize_tracking_id environment variable in lambda configuration.")
raise Exception('personalize_tracking_id not configured as environment variable')
else:
Expand All @@ -37,7 +37,7 @@ def lambda_handler(event, context):
# Make sure this event contains an itemId since this is required for the Retail Demo Store
# dataset - you can also check for specific event names here if needed, and only pass the ones
# that you want to use in the training dataset
if (not 'productId' in event['properties']):
if ('productId' not in event['properties']):
logger.debug("Got event with no productId, discarding.")
return

Expand Down Expand Up @@ -78,12 +78,12 @@ def lambda_handler(event, context):

logger.debug('put_events parameters: {}'.format(json.dumps(params, indent = 2)))
# Call put_events
response = personalize_events.put_events(**params)
personalize_events.put_events(**params)
else:
logger.debug("Segment event does not contain required fields (anonymousId and sku)")
except ValueError as ve:
except ValueError:
logger.error("Invalid JSON format received, check your event sources.")
except KeyError as ke:
except KeyError:
logger.error("Invalid configuration for Personalize, most likely.")
except ClientError as ce:
logger.error("ClientError: ")
Expand Down
Loading

0 comments on commit 4d601a3

Please sign in to comment.