diff --git a/02_activities/homework/homework_1.md b/02_activities/homework/homework_1.md
index 58534937b..7a76adc2d 100644
--- a/02_activities/homework/homework_1.md
+++ b/02_activities/homework/homework_1.md
@@ -76,3 +76,5 @@ Please do not pick the exact same tables that I have already diagramed. For exam
- 
- The column names can be found in a few spots (DB Schema window in the bottom right, the Database Structure tab in the main window by expanding each table entry, at the top of the Browse Data tab in the main window)
+- 
+
diff --git a/02_activities/homework/homework_2.sql b/02_activities/homework/homework_2.sql
index f0788ee2a..cfe83f9f2 100644
--- a/02_activities/homework/homework_2.sql
+++ b/02_activities/homework/homework_2.sql
@@ -1,19 +1,31 @@
--SELECT
/* 1. Write a query that returns everything in the customer table. */
-
+SELECT *
+FROM customer
/* 2. Write a query that displays all of the columns and 10 rows from the cus- tomer table,
sorted by customer_last_name, then customer_first_ name. */
-
+SELECT *
+FROM customer
+ ORDER by customer_last_name, customer_first_name
+ LIMIT 10
--WHERE
/* 1. Write a query that returns all customer purchases of product IDs 4 and 9. */
-- option 1
+SELECT *
+FROM customer_purchases
+ WHERE product_id in (4, 9)
-- option 2
+SELECT *
+FROM customer_purchases
+ WHERE product_id = 4
+ OR product_id = 9
+
/*2. Write a query that returns all customer purchases and a new calculated column 'price' (quantity * cost_to_customer_per_qty),
filtered by vendor IDs between 8 and 10 (inclusive) using either:
@@ -21,22 +33,59 @@ filtered by vendor IDs between 8 and 10 (inclusive) using either:
2. one condition using BETWEEN
*/
-- option 1
+SELECT *
+,SUM (quantity * cost_to_customer_per_qty) as price
+FROM customer_purchases as cp
+ WHERE vendor_id BETWEEN 8 AND 10
-- option 2
+SELECT *
+,SUM (quantity * cost_to_customer_per_qty) as price
+
+FROM customer_purchases as cp
+ WHERE vendor_id>7
+ AND vendor_id <11
--CASE
/* 1. Products can be sold by the individual unit or by bulk measures like lbs. or oz.
Using the product table, write a query that outputs the product_id and product_name
columns and add a column called prod_qty_type_condensed that displays the word “unit”
if the product_qty_type is “unit,” and otherwise displays the word “bulk.” */
+SELECT product_id, product_name
+,CASE WHEN product_qty_type = 'unit'
+ THEN 'unit'
+ ELSE 'bulk'
+ END as product_qty_type_condensed
+FROM product
/* 2. We want to flag all of the different types of pepper products that are sold at the market.
add a column to the previous query called pepper_flag that outputs a 1 if the product_name
contains the word “pepper” (regardless of capitalization), and otherwise outputs 0. */
+SELECT product_id, product_name
+,CASE WHEN product_qty_type = 'unit'
+ THEN 'unit'
+ ELSE 'bulk'
+ END as product_qty_type_condensed
+
+,CASE WHEN product_name like '%pepper%' or '%Pepper%'
+ THEN '1'
+ ELSE '0'
+ END as pepper_flag
+
+FROM product
+
--JOIN
/* 1. Write a query that INNER JOINs the vendor table to the vendor_booth_assignments table on the
vendor_id field they both have in common, and sorts the result by vendor_name, then market_date. */
+
+SELECT *
+
+FROM vendor as v
+INNER JOIN vendor_booth_assignments as vba
+ on v.vendor_id = vba.vendor_id
+ GROUP by vendor_name, market_date
+
\ No newline at end of file
diff --git a/02_activities/homework/homework_3.sql b/02_activities/homework/homework_3.sql
index 3fe2589a2..830b28a06 100644
--- a/02_activities/homework/homework_3.sql
+++ b/02_activities/homework/homework_3.sql
@@ -2,7 +2,14 @@
/* 1. Write a query that determines how many times each vendor has rented a booth
at the farmer’s market by counting the vendor booth assignments per vendor_id. */
+SELECT
+count (vba.booth_number)
+, v.vendor_id
+,v.vendor_name
+FROM vendor_booth_assignments as vba
+inner JOIN vendor as v on v.vendor_id = vba.vendor_id
+GROUP by v.vendor_id
/* 2. The Farmer’s Market Customer Appreciation Committee wants to give a bumper
sticker to everyone who has ever spent more than $2000 at the market. Write a query that generates a list
@@ -10,6 +17,18 @@ of customers for them to give stickers to, sorted by last name, then first name.
HINT: This query requires you to join two tables, use an aggregate function, and use the HAVING keyword. */
+SELECT
+ c.customer_last_name
+,c.customer_first_name
+,cp.customer_id
+,sum (cp.quantity * cp.cost_to_customer_per_qty) as total_spent
+
+
+FROM customer_purchases as cp
+INNER JOIN customer as c on c.customer_id=cp.customer_id
+GROUP by customer_last_name, customer_first_name
+
+HAVING total_spent>2000
--Temp Table
@@ -23,8 +42,18 @@ When inserting the new vendor, you need to appropriately align the columns to be
-> To insert the new row use VALUES, specifying the value you want for each column:
VALUES(col1,col2,col3,col4,col5)
*/
+DROP TABLE IF EXISTS new_vendor;
+
+CREATE TEMP TABLE new_vendor AS
+SELECT *
+from vendor;
+INSERT INTO new_vendor
+VALUES (10, 'Thomass Superfood Store', 'Fresh Focused' , 'Thomas', 'Rosenthal');
+
+SELECT * FROM new_vendor
+
-- Date
/*1. Get the customer_id, month, and year (in separate columns) of every purchase in the customer_purchases table.
@@ -32,9 +61,32 @@ VALUES(col1,col2,col3,col4,col5)
HINT: you might need to search for strfrtime modifers sqlite on the web to know what the modifers for month
and year are! */
+SELECT
+customer_id
+, strftime ('%m', market_date) as month
+,strftime ('%Y', market_date) as year
+
+
+FROM customer_purchases
+
+
+
+
/* 2. Using the previous query as a base, determine how much money each customer spent in April 2022.
Remember that money spent is quantity*cost_to_customer_per_qty.
HINTS: you will need to AGGREGATE, GROUP BY, and filter...
but remember, STRFTIME returns a STRING for your WHERE statement!! */
+SELECT *
+, strftime ('%m', market_date) as month
+,strftime ('%Y', market_date) as year
+,sum (quantity * cost_to_customer_per_qty) as sales
+
+FROM customer_purchases
+
+GROUP by year, month
+HAVING year = '2022'
+ AND month = '04'
+
+
diff --git a/02_activities/homework/homework_4.sql b/02_activities/homework/homework_4.sql
index adb56c478..cb5906de2 100644
--- a/02_activities/homework/homework_4.sql
+++ b/02_activities/homework/homework_4.sql
@@ -17,7 +17,9 @@ The `||` values concatenate the columns into strings.
Edit the appropriate columns -- you're making two edits -- and the NULL rows will be fixed.
All the other rows will remain the same.) */
-
+SELECT
+product_name || ', ' || coalesce(product_size,' ')|| ' (' || coalesce (product_qty_type, 'unit') || ')'
+FROM product
--Windowed Functions
@@ -30,16 +32,39 @@ each new market date for each customer, or select only the unique market dates p
(without purchase details) and number those visits.
HINT: One of these approaches uses ROW_NUMBER() and one uses DENSE_RANK(). */
+SELECT *
+--,row_number () OVER (PARTITION by customer_id ORDER by market_date ASC) as customer_visit
+, dense_rank() OVER (PARTITION by customer_id ORDER by market_date ASC) as customer_visit_denserank
+FROM customer_purchases
/* 2. Reverse the numbering of the query from a part so each customer’s most recent visit is labeled 1,
then write another query that uses this one as a subquery (or temp table) and filters the results to
only the customer’s most recent visit. */
+SELECT *
+
+FROM (
+SELECT
+customer_id
+,market_date
+,vendor_id
+,transaction_time
+,row_number () OVER (PARTITION by customer_id ORDER by market_date DESC) as customer_visit
+
+FROM customer_purchases
+) x
+
+WHERE x.customer_visit = 1
/* 3. Using a COUNT() window function, include a value along with each row of the
customer_purchases table that indicates how many different times that customer has purchased that product_id. */
+SELECT *
+,count() OVER (PARTITION by customer_id ORDER by product_id DESC) as times_purchased
+
+FROM customer_purchases
+
-- String manipulations
@@ -53,6 +78,13 @@ Remove any trailing or leading whitespaces. Don't just use a case statement for
| Habanero Peppers - Organic | Organic |
Hint: you might need to use INSTR(product_name,'-') to find the hyphens. INSTR will help split the column. */
+SELECT *
+,CASE WHEN INSTR(product_name,'-') > 0
+ THEN LTRIM(RTRIM(SUBSTR(product_name,INSTR(product_name,'-')+1)))
+ ELSE NULL END AS description
+
+FROM product
+
diff --git a/02_activities/homework/homework_6.md b/02_activities/homework/homework_6.md
index 1d87066f8..4db2c318e 100644
--- a/02_activities/homework/homework_6.md
+++ b/02_activities/homework/homework_6.md
@@ -6,3 +6,11 @@
**Write**: Reflect on your previous work and how you would adjust to include ethics and inequity components. Total length should be a few paragraphs, no more than one page.
+
+Most databses are built around social and cultural norms as much they are built around effective data systems. Data is holds more power than we perceive, as data leaks and infringements have been some of the more newsworthy incidents in the computational world int he recent years. So information privacy needs to be built into large datasets at multiple levels. For example, sensitive information like hospital records need to be accessible to anyone controlling and inputting data as well as those making decisions based on that data. However, it needs to have enough security checks built in such that the people who have free access to that data cannot abuse their privileges. A fellow colleague's medical information is just as accessible as any other patients' data, and if that information comes to light, there is no guarantee that bias towards a collleague based on medical history can be avoided. Where we can, access to data must be limited by levels. For example colleagues' records might be restricted, and for higher level decision making anonymised data maybe sufficient.
+
+Databases must also be built in a way that accommodates some room for flexibility in structure in the future. For example, Pakistan's national database (NADRA) was not futureproofed by including more than two genders, non-nuclear families, single parents or orphans. While we are unable to anticipate all the changes in the future, change is guaranteed, so smart database designs must always account for guaranteed circumstances. As social norms evolve, people may identify differently, choose to have families outside of legal marriage or not share last names with their partners or children, and data entry needs to accommodate for such evolving climates. While changing the structure of a ginourmous database like NADRA might be incredibly labourious, it is doing a great disservice to the people of their country by not including certain groups of people who do not conform to what was previously considered the norm.
+
+Data and AI should work for everyone, not just a privileged group of people with money, status, citizenship, or decision-making power. Most decision making in the western world has been traditionally led by white men, with no room for women, gender-diverse people, other races or ethnicities, but their decisions affect the aforementioned minorities and more, usually negatively. The most streamlined method to avoid exclusion to include a diverse group of people in the research,designing and deploying phases. It is normal and somewhat expeceted to have our thinking and design influenced by our lives and backgrounds, so including as many diverse groups as possible will account for flexibility and ensuring that our data systems are inclusive for everyone.
+
+Ethics, AI and data are a dice-game of money, power, and privilege, much like most other things in life. Therefore it is upto those with the money, power, and privilege to make decisions that will not marginalise disenfranchised groups down the line with a tool that likely benefited from their labour to become successful.
\ No newline at end of file
diff --git a/02_activities/homework/images/homework_1.png b/02_activities/homework/images/homework_1.png
new file mode 100644
index 000000000..343468420
Binary files /dev/null and b/02_activities/homework/images/homework_1.png differ
diff --git a/05_src/data/db_csvs/new_customer_purchases.csv b/05_src/data/db_csvs/new_customer_purchases.csv
new file mode 100644
index 000000000..001c201e3
--- /dev/null
+++ b/05_src/data/db_csvs/new_customer_purchases.csv
@@ -0,0 +1,147 @@
+product_id,vendor_id,market_date,customer_id,quantity,cost_to_customer_per_qty,transaction_time
+21,3,2024-09-19,11,1,4.99,17:32:00
+21,3,2024-09-19,25,2,4.99,18:23:00
+21,3,2024-09-19,24,2,4.99,18:41:00
+21,3,2024-09-19,6,2,4.99,18:18:00
+21,3,2024-09-19,8,1,4.99,17:34:00
+21,3,2024-09-19,2,1,4.99,12:20:00
+21,3,2024-09-19,18,1,4.99,9:33:00
+21,3,2024-09-19,24,3,4.99,13:05:00
+21,3,2024-09-19,18,3,4.99,12:26:00
+21,3,2024-09-19,4,2,4.99,12:46:00
+21,3,2024-09-19,17,3,4.99,18:40:00
+21,3,2024-09-19,13,2,4.99,18:06:00
+21,3,2024-09-19,23,3,4.99,18:35:00
+21,3,2024-09-19,3,3,4.99,18:56:00
+21,3,2024-09-19,8,3,4.99,12:03:00
+21,3,2024-09-19,1,2,4.99,11:43:00
+21,3,2024-09-19,23,3,4.99,17:43:00
+21,3,2024-09-19,13,3,4.99,18:22:00
+21,3,2024-09-19,23,2,4.99,18:52:00
+21,3,2024-09-19,11,2,4.99,17:07:00
+21,3,2024-09-19,1,1,4.99,12:56:00
+21,3,2024-09-19,3,1,4.99,11:02:00
+21,3,2024-09-19,2,2,4.99,11:38:00
+21,3,2024-09-19,23,1,4.99,18:56:00
+21,3,2024-09-19,24,1,4.99,18:59:00
+21,3,2024-09-19,2,1,4.99,10:42:00
+21,3,2024-09-19,15,3,4.99,17:30:00
+21,3,2024-09-19,5,2,4.99,9:08:00
+21,3,2024-09-19,24,2,4.99,13:58:00
+21,3,2024-09-19,24,1,4.99,9:37:00
+21,3,2024-09-19,26,1,4.99,17:27:00
+21,3,2024-09-19,19,1,4.99,11:40:00
+21,3,2024-09-19,2,2,4.99,13:59:00
+21,3,2024-09-19,25,1,4.99,12:21:00
+21,3,2024-09-19,12,2,4.99,10:10:00
+21,3,2024-09-19,12,1,4.99,10:39:00
+21,3,2024-09-19,3,3,4.99,11:59:00
+21,3,2024-09-19,25,1,4.99,18:18:00
+21,3,2024-09-19,21,3,4.99,17:05:00
+21,3,2024-09-19,11,1,4.99,17:39:00
+21,3,2024-09-19,10,3,4.99,18:01:00
+21,3,2024-09-19,18,1,4.99,17:19:00
+21,3,2024-09-19,11,2,4.99,17:10:00
+22,3,2024-09-19,22,2.5,6.49,18:36:00
+22,3,2024-09-19,18,0.81,6.49,18:53:00
+22,3,2024-09-19,10,2.35,6.49,17:44:00
+22,3,2024-09-19,13,3.37,6.49,13:28:00
+22,3,2024-09-19,10,0.82,6.49,18:18:00
+22,3,2024-09-19,3,2.87,6.49,8:35:00
+22,3,2024-09-19,1,0.27,6.49,8:47:00
+22,3,2024-09-19,1,2.87,6.49,11:00:00
+22,3,2024-09-19,17,2.38,6.49,18:56:00
+22,3,2024-09-19,5,0.31,6.49,18:28:00
+22,3,2024-09-19,7,3.45,6.49,10:00:00
+22,3,2024-09-19,17,0.07,6.49,9:57:00
+22,3,2024-09-19,20,0.57,6.49,9:12:00
+22,3,2024-09-19,2,4.05,6.49,18:33:00
+22,3,2024-09-19,4,0.76,6.49,16:30:00
+22,3,2024-09-19,10,3.77,6.49,17:57:00
+22,3,2024-09-19,19,4.11,6.49,13:33:00
+22,3,2024-09-19,17,2.84,6.49,8:35:00
+22,3,2024-09-19,18,0,6.49,8:49:00
+22,3,2024-09-19,2,1.42,6.49,9:09:00
+10,1,2024-09-19,9,3,4.5,18:37:00
+10,1,2024-09-19,5,5,4.5,17:37:00
+10,1,2024-09-19,3,6,4.5,17:52:00
+10,1,2024-09-19,16,3,4.5,17:21:00
+10,1,2024-09-19,26,2,4.5,13:49:00
+10,1,2024-09-19,22,1,4.5,13:42:00
+10,1,2024-09-19,13,5,4.5,18:41:00
+10,1,2024-09-19,20,4,4.5,17:14:00
+10,1,2024-09-19,10,4,4.5,12:17:00
+10,1,2024-09-19,25,2,4.5,11:11:00
+10,1,2024-09-19,18,2,4.5,11:27:00
+10,1,2024-09-19,2,4,4.5,16:47:00
+10,1,2024-09-19,12,5,4.5,18:52:00
+10,1,2024-09-19,15,1,4.5,17:28:00
+10,1,2024-09-19,16,4,4.5,9:23:00
+10,1,2024-09-19,7,3,4.5,8:54:00
+10,1,2024-09-19,13,5,4.5,18:54:00
+10,1,2024-09-19,24,6,4.5,17:21:00
+10,1,2024-09-19,8,1,4.5,18:16:00
+10,1,2024-09-19,7,2,4.5,18:27:00
+10,1,2024-09-19,13,6,4.5,17:54:00
+10,1,2024-09-19,7,3,4.5,17:56:00
+10,1,2024-09-19,26,2,4.5,11:01:00
+10,1,2024-09-19,22,6,4.5,12:51:00
+10,1,2024-09-19,16,2,4.5,10:21:00
+10,1,2024-09-19,17,3,4.5,12:37:00
+10,1,2024-09-19,5,4,4.5,18:53:00
+10,1,2024-09-19,16,2,4.5,18:39:00
+10,1,2024-09-19,20,1,4.5,17:34:00
+10,1,2024-09-19,1,5,4.5,17:36:00
+10,1,2024-09-19,10,6,4.5,17:53:00
+10,1,2024-09-19,22,2,4.5,13:03:00
+10,1,2024-09-19,7,1,4.5,10:09:00
+10,1,2024-09-19,11,1,4.5,13:44:00
+10,1,2024-09-19,1,6,4.5,11:40:00
+10,1,2024-09-19,23,3,4.5,18:51:00
+10,1,2024-09-19,24,1,4.5,18:15:00
+10,1,2024-09-19,26,4,4.5,18:43:00
+10,1,2024-09-19,9,1,4.5,13:30:00
+10,1,2024-09-19,11,3,4.5,13:54:00
+10,1,2024-09-19,23,2,4.5,18:45:00
+10,1,2024-09-19,2,1,4.5,18:49:00
+11,1,2024-09-19,22,0.65,7.29,10:33:00
+11,1,2024-09-19,1,0.88,7.29,10:20:00
+11,1,2024-09-19,16,3.08,7.29,10:24:00
+11,1,2024-09-19,21,2.61,7.29,12:03:00
+11,1,2024-09-19,16,1.52,7.29,11:43:00
+11,1,2024-09-19,8,0.06,7.29,17:43:00
+11,1,2024-09-19,19,0.23,7.29,18:22:00
+11,1,2024-09-19,17,3.61,7.29,18:52:00
+20,5,2024-09-19,2,2,10,12:56:00
+20,5,2024-09-19,26,2,10,11:02:00
+20,5,2024-09-19,20,2,10,11:38:00
+20,5,2024-09-19,4,4,10,18:56:00
+20,5,2024-09-19,14,4,10,18:59:00
+20,5,2024-09-19,8,4,10,10:42:00
+20,5,2024-09-19,8,6,10,9:33:00
+20,5,2024-09-19,6,6,10,13:28:00
+20,5,2024-09-19,11,6,10,13:33:00
+20,5,2024-09-19,18,6,10,13:33:00
+20,5,2024-09-19,19,6,10,11:02:00
+20,5,2024-09-19,6,6,10,18:56:00
+20,5,2024-09-19,2,4,10,18:36:00
+20,5,2024-09-19,12,4,10,18:15:00
+20,5,2024-09-19,9,4,10,17:35:00
+20,5,2024-09-19,11,4,10,16:33:00
+20,5,2024-09-19,24,4,10,18:16:00
+20,5,2024-09-19,17,6,10,8:54:00
+20,5,2024-09-19,15,2,10,9:52:00
+20,5,2024-09-19,13,6,10,10:26:00
+20,5,2024-09-19,23,2,10,12:37:00
+20,5,2024-09-19,17,6,10,13:13:00
+20,5,2024-09-19,14,6,10,11:29:00
+20,5,2024-09-19,20,6,10,18:21:00
+20,5,2024-09-19,8,2,10,16:58:00
+20,5,2024-09-19,14,4,10,17:08:00
+20,5,2024-09-19,17,2,10,16:41:00
+19,5,2024-09-19,7,2,0.99,11:41:00
+19,5,2024-09-19,12,1,0.99,10:07:00
+19,5,2024-09-19,16,3,0.99,10:39:00
+19,5,2024-09-19,9,2,0.99,18:35:00
+19,5,2024-09-19,10,1,0.99,18:44:00
+19,5,2024-09-19,19,4,0.99,17:50:00
\ No newline at end of file
diff --git a/05_src/sql/Sep 17.sqbpro b/05_src/sql/Sep 17.sqbpro
new file mode 100644
index 000000000..16c52445e
--- /dev/null
+++ b/05_src/sql/Sep 17.sqbpro
@@ -0,0 +1,178 @@
+
+-- TEMP
+
+-- if a table named "?" exists, delete it, otherwise do nothing
+DROP TABLE IF EXISTS new_vendor_inventory;
+
+--make
+CREATE TEMP TABLE new_vendor_inventory AS
+
+--definition of it
+SELECT *,
+original_price * 5 as inflation
+FROM vendor_inventory;
+
+--put a temp table into another one
+DROP TABLE IF EXISTS new_new_vendor_inventory;
+
+CREATE TEMP TABLE new_new_vendor_inventory AS
+SELECT *
+,inflation*2 as super_inflation
+from new_vendor_inventory;
+
+SELECT * from new_new_vendor_inventory
+
+--CTEs
+
+--calculate sales per vendor per day
+
+WITH vendor_daily_sales AS (
+ SELECT md.market_date
+ ,market_day
+ ,market_week
+ ,market_year
+ ,vendor_name
+ ,sum(quantity*cost_to_customer_per_qty) AS sales
+
+ FROM market_date_info md
+ JOIN customer_purchases cp
+ on md.market_date = cp.market_date
+ JOIN vendor v
+ on cp.vendor_id = v.vendor_id
+
+ GROUP BY md.market_date, v.vendor_id
+)
+
+--another cte
+,a_new_cte AS (SELECT * FROM product)
+
+--re-aggregate daily sales within each week for each vendor
+SELECT market_year
+,market_week
+,vendor_name
+,sum(sales) as weekly_sales
+
+FROM vendor_daily_sales
+GROUP BY market_year, market_week, vendor_name;
+
+
+
+
+-- dates
+
+--now
+SELECT DISTINCT
+DATE('now')
+,DATETIME()
+
+--strftime
+,strftime('%Y-%m','now')
+,strftime('%Y-%m-%d','2024-09-16','+50 days') as the_future
+,market_date
+,strftime('%Y-%m-%d',market_date,'+50 days','-1 year') as the_past
+
+--dateadd
+--last day of the month
+,strftime('%Y-%m-%d', market_date, 'start of month','-1 day')
+
+--datediff equiv
+,market_date
+,(julianday('now') - julianday(market_date)) / 365.25 -- number of years between now and marketdate
+,julianday('now') - julianday(market_date) -- number of days between now and each marketdate
+,(julianday('now') - julianday(market_date)) * 24 -- number of HOURS between now and each marketdate
+
+FROM market_date_info
+
+--IFNULL and coalesce + NULLIF
+
+SELECT *
+,IFNULL(product_size, 'Unknown')
+
+--less meaningful, but conceptual
+--,IFNULL(product_size,product_category_id)
+,IFNULL(product_size,product_qty_type)
+,coalesce(product_size,product_qty_type, 'missing') -- this one is easier: if the first value is null, then the second value; if the second value is null, then the third value (and onwards if you have more arguments)
+,IFNULL(IFNULL(product_size,product_qty_type),'missing')
+
+FROM product;
+
+SELECT *
+,IFNULL(product_size, 'Unknown')
+
+--nullif
+,NULLIF(product_size, '') -- finding values in the product_size column that are "blanks" and setting them to NULL
+,coalesce(NULLIF(product_size, ''),'unknown')
+
+from product
+
+WHERE NULLIF(product_size, '') IS NULL
+SELECT *
+
+
+ FROM (
+
+ SELECT
+ vendor_id
+ , market_date
+ ,product_id
+ ,original_price
+ ,row_number() OVER( PARTITION by vendor_id ORDER by original_price DESC) as price_rank
+
+ FROM vendor_inventory
+) x
+
+WHERE x.price_rank =1;
+drop table if EXISTS temp.row_rank_dense;
+
+create TEMP TABLE if not EXISTS temp.row_rank_dense
+
+(
+emp_id INT,
+salary INT
+
+);
+
+INSERT INTO temp.row_rank_dense
+VALUES (1, 200000),
+(2, 1000000),
+(3, 340000),
+(4, 34450600),
+(5, 10385493),
+(6,340000);
+
+
+SELECT *
+,row_number() OVER (ORDER by salary DESC, emp_id ASC) as [row_number]
+, rank() over (order by salary DESC) as [rank]
+, dense_rank () over (ORDER by salary DESC) as [dense_rank]
+
+FROM row_rank_dense
+
+--ntile (4,5,100)
+
+SELECT *
+,NTILE(4) OVER( PARTITION BY vendor_name ORDER BY sales ASC) as quartile
+,NTILE(5) OVER( PARTITION BY vendor_name ORDER BY sales ASC) as quintile
+,NTILE(100) OVER( PARTITION BY vendor_name ORDER BY sales ASC) as percentile
+
+FROM (
+
+
+ SELECT md.market_date
+ ,market_day
+ ,market_week
+ ,market_year
+ ,vendor_name
+ ,sum(quantity*cost_to_customer_per_qty) AS sales
+
+ FROM market_date_info md
+ JOIN customer_purchases cp
+ on md.market_date = cp.market_date
+ JOIN vendor v
+ on cp.vendor_id = v.vendor_id
+
+ GROUP BY md.market_date, v.vendor_id
+
+) x
+
+
diff --git a/05_src/sql/farmersmarket.db b/05_src/sql/farmersmarket.db
index 0552dfec1..f7ccbfe60 100644
Binary files a/05_src/sql/farmersmarket.db and b/05_src/sql/farmersmarket.db differ
diff --git a/05_src/sql/sep 18.sqbpro b/05_src/sql/sep 18.sqbpro
new file mode 100644
index 000000000..08dd2c275
--- /dev/null
+++ b/05_src/sql/sep 18.sqbpro
@@ -0,0 +1,156 @@
+-- string manipulate
+
+SELECT DISTINCT
+
+LTRIM(' THOMAS ROSNETHAL ') as [ltrim]
+,RTRIM(' THOMAS ROSNETHAL ') as [rtrim]
+,LTRIM(RTRIM(' THOMAS ROSNETHAL ')) as [both]
+
+
+--upper/lower
+,lower(customer_first_name)
+,upper(customer_last_name)
+
+
+--concat
+,customer_first_name || ' ' ||customer_last_name as customer_name
+,UPPER(customer_first_name) || ' ' || UPPER(customer_last_name) as upper_full_name
+
+,SUBSTR(customer_last_name,4) -- any length from the 4th character
+,SUBSTR(customer_last_name,4,2)
+,SUBSTR(customer_last_name, -5,4) -- counting from the right
+,SUBSTR(customer_last_name,1,2)
+
+,length(customer_last_name)
+,'THOMAS
+
+ROSENTHAL'
+
+,replace('THOMAS
+
+ROSENTHAL',char(10),' ') -- removing all instances of line breaks from this string
+
+WHERE customer_first_name REGEXP '(a)$' -- filtering to only ending in e, has to be valid regex
+from customer
+WHERE customer_first_name REGEXP '(a)$' -- filtering to only ending in e, has to be valid regex
+
+--UNION/UNIONALL
+
+--most and least expensive product by vendor with a UNION
+
+SELECT vendor_id, product_id, original_price, rn_max AS [row_number]
+FROM
+(
+ SELECT DISTINCT
+ vendor_id, product_id, original_price
+ ,ROW_NUMBER() OVER(PARTITION BY vendor_id ORDER BY original_price DESC) as rn_max
+
+ FROM vendor_inventory
+) x
+where rn_max = 1
+
+UNION -- union returned 5 rows, union all returned 6 rows (vendor #4 was duplicate values)
+SELECT * FROM
+(
+ SELECT DISTINCT
+ vendor_id, product_id, original_price
+ ,ROW_NUMBER() OVER(PARTITION BY vendor_id ORDER BY original_price ASC) as rn_min
+
+ FROM vendor_inventory
+
+
+--
+--FULL OUTER JOIN WITH A UNION
+--two stores, determining which customes they have in stock
+
+DROP TABLE IF EXISTS temp.store1;
+CREATE TEMP TABLE IF NOT EXISTS temp.store1
+(
+costume TEXT,
+quantity INT
+);
+
+INSERT INTO temp.store1
+VALUES("tiger",6),
+ ("elephant",2),
+ ("princess", 4);
+
+
+DROP TABLE IF EXISTS temp.store2;
+CREATE TEMP TABLE IF NOT EXISTS temp.store2
+(
+costume TEXT,
+quantity INT
+);
+
+INSERT INTO temp.store2
+VALUES("tiger",2),
+ ("dancer",7),
+ ("superhero", 5);
+
+
+SELECT s1.costume, s1.quantity as store1_quantity, s2.quantity as store2_quantity
+FROM store1 s1
+LEFT JOIN store2 s2 on s1.costume = s2.costume
+
+UNION ALL
+
+SELECT s2.costume, s1.quantity, s2.quantity
+FROM store2 s2
+LEFT JOIN store1 s1 on s1.costume = s2.costume
+WHERE s1.quantity IS NULL
+
+
+--INTERSECT / EXCEPT
+
+--products that have been sold (e.g. are in customer_purchases and product)
+--direction does not matter
+SELECT product_id
+FROM customer_purchases
+INTERSECT
+SELECT product_id
+FROM product;
+
+--products that have NOT been sold (e.g. are NOT in customer_purchases even though in product
+--plus names
+
+SELECT x.product_id, product_name
+FROM (
+ SELECT product_id
+ FROM product
+ EXCEPT
+ SELECT product_id
+ FROM customer_purchases
+) x
+JOIN product p on x.product_id = p.product_id;
+
+--NOTHING
+--direction matters!
+SELECT product_id
+FROM product
+EXCEPT
+SELECT product_id
+FROM customer_purchases
+
+
+--INSERT UPDATE DELETE
+
+DROP TABLE IF EXISTS temp.product_expanded;
+CREATE TEMP TABLE product_expanded AS
+ SELECT * FROM product;
+
+INSERT INTO product_expanded
+VALUES(26, 'Almonds', '1 lb', 1, 'lbs');
+
+
+--update the product_size for almonds to 1/2 lb
+UPDATE product_expanded
+SET product_size = '1/2 kg',product_qty_type = 'kg'
+WHERE product_id = 26;
+
+-- delete our almond
+--DELETE from product_expanded
+--SELECT * from product_expanded
+--WHERE product_id = 26;
+
+SELECT * from product_expanded
diff --git a/05_src/sql/sep11.sqbpro b/05_src/sql/sep11.sqbpro
new file mode 100644
index 000000000..168c3efa3
--- /dev/null
+++ b/05_src/sql/sep11.sqbpro
@@ -0,0 +1,86 @@
+/*SELECT *
+FROM customer; */
+
+/*SELECT customer_first_name, customer_last_name
+
+FROM customer*/
+
+-- SELECT 10*5 AS [math_product], customer_id, customer_id +10
+-- FROM customer
+--
+-- SELECT * FROM customer_purchases
+
+-- SELECT 'Thomas' FROM customer
+
+
+-- SELECT * FROM customer
+-- WHERE customer_id < 10
+--
+-- SELECT * FROM customer
+-- WHERE customer_id = 5
+
+--
+-- SELECT * FROM customer
+-- WHERE customer_id = 5
+-- OR customer_id = 2
+
+
+-- SELECT * FROM customer_purchases
+-- WHERE customer_id IN (3,4,5)
+-- AND vendor_id = 20
+--
+-- SELECT * FROM product
+-- WHERE product_name LIKE '%pepper%organic'
+
+--SELECT * FROM product
+--WHERE product_id IN (1,2)SELECT*
+,CASE WHEN vendor_type = 'Fresh Focused'
+ THEN 'Wednesday'
+ WHEN vendor_type = 'Prepared Foods'
+ THEN 'Thursday'
+ ELSE 'Saturday'
+ END as day_of_specialty
+
+,CASE WHEN vendor_name LIKE '%pie%'
+THEN 'Thursday'
+ELSE 'Saturday'
+END as day_of_specialty2
+FROM vendor--DISTINCT
+
+-- SELECT DISTINCT booth_type FROM booth
+--
+-- SELECT DISTINCT booth_price_level, booth_type
+-- FROM booth
+--
+-- SELECT DISTINCT booth_price_level, booth_type, booth_description
+-- FROM booth
+
+--which vendor has sold products to a customer and which product was it?
+-- SELECT DISTINCT vendor_id, product_id
+-- FROM customer_purchases
+
+
+SELECT DISTINCT vendor_id, product_id, customer_id
+FROM customer_purchases
+
+-- INNER JOIN
+--
+-- SELECT customer_purchases.product_id, product.product_id, vendor_id, market_date, customer_id, product_name
+-- FROM customer_purchases
+-- INNER JOIN product
+-- ON customer_purchases.product_id = product.product_id
+--
+
+
+SELECT DISTINCT p.product_id, product_name
+FROM product as p
+INNER JOIN customer_purchases as cp
+ ON p.product_id = cp.product_id-- LEFT JOIN
+
+SELECT DISTINCT p.product_id, product_name, cp.*
+FROM product as p
+LEFT JOIN customer_purchases as cp
+ ON p.product_id = cp.product_id
+
+
+ WHERE cp.product_id is NULL
diff --git a/05_src/sql/sep12.sqbpro b/05_src/sql/sep12.sqbpro
new file mode 100644
index 000000000..1195f4a96
--- /dev/null
+++ b/05_src/sql/sep12.sqbpro
@@ -0,0 +1,78 @@
+-- count
+--count the number of products in the products TABLE
+
+-- SELECT count(product_id)
+-- FROM product
+
+-- SELECT product_qty_type, count(product_id)
+-- FROM product
+-- Where product_qty_type is not null
+-- group By product_qty_type
+
+-- SELECT product_qty_type, count(product_id), product_size
+-- FROM product
+-- Where product_qty_type is not null
+-- group By product_size
+--
+-- SELECT count(product_id), product_size
+-- FROM product
+-- group By product_size
+
+
+--sum and avg
+
+--how much did a customer spend each day at the farmers market?
+SELECT market_date, customer_id
+,SUM(quantity*cost_to_customer_per_qty) as total_cost
+
+FROM customer_purchases
+GROUP by market_date, customer_id;
+
+
+-- how much does each custoemr spend on avg
+
+select customer_first_name, customer_last_name
+,AVG(quantity*cost_to_customer_per_qty) as avg_cost
+
+FROM customer_purchases as cp
+inner join customer as c
+ on c.customer_id = cp.customer_id
+GROUP by cp.customer_id
+ORDER by avg_cost DESC
+--min and max
+
+--what is the most expensive product?
+--
+-- SELECT product_name, max(original_price)
+--
+-- FROM product as p
+-- INNER JOIN vendor_inventory as vi
+-- on p.product_id = vi.product_id;
+--
+-- --minimum price per each type of unit/lb
+
+SELECT product_name, product_qty_type, min(original_price)
+FROM product as p
+INNER JOIN vendor_inventory as vi
+ on p.product_id = vi.product_id
+
+group by product_name, product_qty_type;
+
+-- order by product_qty_type, original_price ASC
+
+
+-- SELECT 10.0/3.0
+
+,cast (10.0 as INT/)cast (3.0 as int) as INTEGER division
+ select DISTINCT
+cost_to_customer_per_qty
+,cast (cost_to_customer_per_qty as INT)/2 --HAVING
+SELECT
+customer_id
+, market_date
+,SUM (quantity*cost_to_customer_per_qty) as cost
+
+FROM customer_purchases as cp
+
+GROUP by customer_id
+, market date
diff --git a/7FA33B84-E47B-4667-BE00-FEE11C2FF0BB.png b/7FA33B84-E47B-4667-BE00-FEE11C2FF0BB.png
new file mode 100644
index 000000000..9fe4e2efe
Binary files /dev/null and b/7FA33B84-E47B-4667-BE00-FEE11C2FF0BB.png differ
diff --git a/D6945AFF-733F-441B-BE8F-1EE0681F9827.jpg b/D6945AFF-733F-441B-BE8F-1EE0681F9827.jpg
new file mode 100644
index 000000000..4ee04c3b9
Binary files /dev/null and b/D6945AFF-733F-441B-BE8F-1EE0681F9827.jpg differ