From 4fce04894794b922bc86d51de0369de674e33695 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Sat, 28 Jan 2017 19:34:16 -0700 Subject: [PATCH 001/367] Experimenting with a stricter elasticsearch mapping. --- config/default.yml | 2 +- config/elasticsearch_templates.json | 151 +++++++++--------- .../proxy/jobs/elasticsearch_setup.lua | 32 ++-- templates/etc/rsyslog.conf.mustache | 2 +- 4 files changed, 99 insertions(+), 88 deletions(-) diff --git a/config/default.yml b/config/default.yml index a18bfa23c..fe60e2dfd 100644 --- a/config/default.yml +++ b/config/default.yml @@ -201,7 +201,7 @@ flume: kafka: brokers: [] topic: api_umbrella_logs -log_template_version: v1 +log_template_version: v2 strip_cookies: - ^__utm.*$ - ^_ga$ diff --git a/config/elasticsearch_templates.json b/config/elasticsearch_templates.json index f70705799..43ba4745b 100644 --- a/config/elasticsearch_templates.json +++ b/config/elasticsearch_templates.json @@ -34,9 +34,9 @@ } }, { - "id": "api-umbrella-log-v1-template", + "id": "api-umbrella-log-v2-template", "template": { - "template": "api-umbrella-logs-v1-*", + "template": "api-umbrella-logs-v2-*", "settings": { "index": { "number_of_shards": 3 @@ -61,182 +61,185 @@ "_all": { "enabled": false }, - "date_detection": false, - "numeric_detection": false, - "dynamic_templates": [ - { - "string_template": { - "match": "*", - "match_mapping_type": "string", - "mapping": { - "type": "string", - "index": "analyzed", - "analyzer": "keyword_lowercase" - } - } - } - ], + "dynamic": "strict", "properties": { "api_key": { "type": "string", - "index": "not_analyzed" - }, - "backend_response_time": { - "type": "integer" + "index": "not_analyzed", + "doc_values": true }, "gatekeeper_denied_code": { "type": "string", - "analyzer": "keyword_lowercase" - }, - "internal_gatekeeper_time": { - "type": "float" - }, - "internal_response_time": { - "type": "float" - }, - "proxy_overhead": { - "type": "integer" + "index": "not_analyzed", + "doc_values": true }, "request_accept": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_accept_encoding": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_at": { - "type": "date" + "type": "date", + "doc_values": true }, "request_basic_auth_username": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_connection": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_content_type": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_hierarchy": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_host": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_ip": { "type": "string", - "index": "not_analyzed" + "index": "not_analyzed", + "doc_values": true }, "request_ip_city": { "type": "string", - "index": "not_analyzed" + "index": "not_analyzed", + "doc_values": true }, "request_ip_country": { "type": "string", - "index": "not_analyzed" + "index": "not_analyzed", + "doc_values": true }, "request_ip_location": { "type": "geo_point", - "lat_lon": true + "lat_lon": true, + "doc_values": true }, "request_ip_region": { "type": "string", - "index": "not_analyzed" + "index": "not_analyzed", + "doc_values": true }, "request_method": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_origin": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_path": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_path_hierarchy": { "type": "string", - "analyzer": "path_hierarchy_lowercase" + "analyzer": "path_hierarchy_lowercase", + "doc_values": false }, "request_referer": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_scheme": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_size": { - "type": "integer" + "type": "integer", + "doc_values": true }, "request_url": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_user_agent": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_user_agent_family": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "request_user_agent_type": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "response_age": { - "type": "integer" + "type": "integer", + "doc_values": true }, "response_cache": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "response_content_encoding": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "response_content_length": { - "type": "integer" + "type": "integer", + "doc_values": true }, "response_content_type": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "response_server": { "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "response_size": { - "type": "integer" + "type": "integer", + "doc_values": true }, "response_status": { - "type": "short" + "type": "short", + "doc_values": true }, "response_time": { - "type": "integer" + "type": "integer", + "doc_values": true }, "response_transfer_encoding": { "type": "string", - "analyzer": "keyword_lowercase" - }, - "user_email": { - "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true }, "user_id": { "type": "string", - "index": "not_analyzed" - }, - "user_registration_source": { - "type": "string", - "analyzer": "keyword_lowercase" + "index": "not_analyzed", + "doc_values": true } } } diff --git a/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua b/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua index 6015ba578..5897f4831 100644 --- a/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua +++ b/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua @@ -89,20 +89,28 @@ local function create_aliases() local httpc = http.new() for _, alias in ipairs(aliases) do - -- Make sure the index exists. - local _, create_err = httpc:request_uri(elasticsearch_host .. "/" .. alias["index"], { - method = "PUT", + -- Only create aliases if they don't already exist. + local exists_res, exists_err = httpc:request_uri(elasticsearch_host .. "/_alias/" .. alias["alias"], { + method = "HEAD", }) - if create_err then - ngx.log(ngx.ERR, "failed to create elasticsearch index: ", create_err) - end + if exists_err then + ngx.log(ngx.ERR, "failed to check elasticsearch index alias: ", exists_err) + elseif exists_res.status == 404 then + -- Make sure the index exists. + local _, create_err = httpc:request_uri(elasticsearch_host .. "/" .. alias["index"], { + method = "PUT", + }) + if create_err then + ngx.log(ngx.ERR, "failed to create elasticsearch index: ", create_err) + end - -- Create the alias for the index. - local _, alias_err = httpc:request_uri(elasticsearch_host .. "/" .. alias["index"] .. "/_alias/" .. alias["alias"], { - method = "PUT", - }) - if alias_err then - ngx.log(ngx.ERR, "failed to create elasticsearch index alias: ", alias_err) + -- Create the alias for the index. + local _, alias_err = httpc:request_uri(elasticsearch_host .. "/" .. alias["index"] .. "/_alias/" .. alias["alias"], { + method = "PUT", + }) + if alias_err then + ngx.log(ngx.ERR, "failed to create elasticsearch index alias: ", alias_err) + end end end end diff --git a/templates/etc/rsyslog.conf.mustache b/templates/etc/rsyslog.conf.mustache index 3b4de97b7..1e02dfe11 100644 --- a/templates/etc/rsyslog.conf.mustache +++ b/templates/etc/rsyslog.conf.mustache @@ -163,7 +163,7 @@ if($!raw!legacy_user_registration_source != "") then { } # Define templates for ElasticSearch output. -template(name="elasticsearch-index" type="string" string="api-umbrella-logs-v1-%timereported:1:7:date-rfc3339%") +template(name="elasticsearch-index" type="string" string="api-umbrella-logs-write-%timereported:1:7:date-rfc3339%") template(name="elasticsearch-id" type="subtree" subtree="$!raw!id") template(name="elasticsearch-json-record" type="subtree" subtree="$!usr!es") From fdff1521af5fc49ae74b2da91c31dc327ed57c1f Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Mon, 30 Jan 2017 23:14:51 -0700 Subject: [PATCH 002/367] An initial pass at a simplified approach to analytics. This gets rid of several fields that we were collecting but not using. It also removes various complexities with the logging due to the various timers we were trying to gather. Those were really for debugging purposes, but it doesn't seem worth all the extra hoops to jump through. --- config/elasticsearch_templates.json | 56 +-- .../proxy/hooks/log_api_backend_proxy.lua | 28 -- .../proxy/hooks/log_initial_proxy.lua | 333 ++-------------- src/api-umbrella/proxy/log_utils.lua | 368 ++++++++++++++---- .../web-app/app/models/log_search/base.rb | 5 +- .../app/models/log_search/elastic_search.rb | 8 +- .../web-app/app/models/log_search/sql.rb | 9 +- templates/etc/nginx/router.conf.mustache | 1 - templates/etc/rsyslog.conf.mustache | 38 +- 9 files changed, 355 insertions(+), 491 deletions(-) delete mode 100644 src/api-umbrella/proxy/hooks/log_api_backend_proxy.lua diff --git a/config/elasticsearch_templates.json b/config/elasticsearch_templates.json index 43ba4745b..4c8bb14dd 100644 --- a/config/elasticsearch_templates.json +++ b/config/elasticsearch_templates.json @@ -1,53 +1,14 @@ [ - { - "id": "api-umbrella-template", - "template": { - "template": "api-umbrella", - "mappings": { - "city": { - "_all": { - "enabled": false - }, - "properties": { - "updated_at": { - "type": "date" - }, - "country": { - "type": "string", - "index": "not_analyzed" - }, - "region": { - "type": "string", - "index": "not_analyzed" - }, - "city": { - "type": "string", - "index": "not_analyzed" - }, - "location": { - "type": "geo_point", - "lat_lon": true - } - } - } - } - } - }, { "id": "api-umbrella-log-v2-template", "template": { "template": "api-umbrella-logs-v2-*", "settings": { "index": { - "number_of_shards": 3 + "number_of_shards": 1 }, "analysis": { "analyzer": { - "keyword_lowercase": { - "type": "custom", - "tokenizer": "keyword", - "filter": ["lowercase"] - }, "path_hierarchy_lowercase": { "type": "custom", "tokenizer": "path_hierarchy", @@ -152,11 +113,6 @@ "index": "not_analyzed", "doc_values": true }, - "request_path_hierarchy": { - "type": "string", - "analyzer": "path_hierarchy_lowercase", - "doc_values": false - }, "request_referer": { "type": "string", "index": "not_analyzed", @@ -240,6 +196,16 @@ "type": "string", "index": "not_analyzed", "doc_values": true + }, + "user_email": { + "type": "string", + "index": "not_analyzed", + "doc_values": true + }, + "user_registration_source": { + "type": "string", + "index": "not_analyzed", + "doc_values": true } } } diff --git a/src/api-umbrella/proxy/hooks/log_api_backend_proxy.lua b/src/api-umbrella/proxy/hooks/log_api_backend_proxy.lua deleted file mode 100644 index 471457b64..000000000 --- a/src/api-umbrella/proxy/hooks/log_api_backend_proxy.lua +++ /dev/null @@ -1,28 +0,0 @@ -local log_utils = require "api-umbrella.proxy.log_utils" - -if log_utils.ignore_request() then - return -end - -local ngx_var = ngx.var -local log_timing_id = ngx_var.x_api_umbrella_request_id .. "_upstream_response_time" -local upstream_response_time = tonumber(ngx_var.upstream_response_time) -if upstream_response_time then - if config["app_env"] == "test" and ngx.var.http_x_api_umbrella_test_simulate_out_of_order_logging == "true" then - -- For the test environment, simulate the rare case where the initial - -- proxy's logging occurs before this backend proxy's logging. - -- - -- This is important to test, since log_initial_proxy.lua's behavior - -- changes when this edge case is hit, and we continue logging inside a - -- timer callback. Since not all nginx variables are available in the timer - -- context, we want to make sure we can reliably test this scenario and - -- ensure that code-path works (rather than it being rare and hard to - -- reproduce in the test suite). - local function set_fake_delayed_response_time() - ngx.shared.logs:set(log_timing_id, 99, 60) - end - ngx.timer.at(0.2, set_fake_delayed_response_time) - else - ngx.shared.logs:set(log_timing_id, upstream_response_time, 60) - end -end diff --git a/src/api-umbrella/proxy/hooks/log_initial_proxy.lua b/src/api-umbrella/proxy/hooks/log_initial_proxy.lua index b063c13eb..543d16e78 100644 --- a/src/api-umbrella/proxy/hooks/log_initial_proxy.lua +++ b/src/api-umbrella/proxy/hooks/log_initial_proxy.lua @@ -1,251 +1,49 @@ -local iconv = require "iconv" -local elasticsearch_encode_json = require "api-umbrella.utils.elasticsearch_encode_json" local flatten_headers = require "api-umbrella.utils.flatten_headers" local log_utils = require "api-umbrella.proxy.log_utils" -local logger = require "resty.logger.socket" -local luatz = require "luatz" -local mongo = require "api-umbrella.utils.mongo" -local sha256 = require "resty.sha256" -local str = require "resty.string" -local user_agent_parser = require "api-umbrella.proxy.user_agent_parser" -local utils = require "api-umbrella.proxy.utils" if log_utils.ignore_request() then return end -local truncate_header = log_utils.truncate_header - local ngx_ctx = ngx.ctx local ngx_var = ngx.var - -local syslog_facility = 16 -- local0 -local syslog_severity = 6 -- info -local syslog_priority = (syslog_facility * 8) + syslog_severity -local syslog_version = 1 - -local timezone = luatz.get_tz(config["analytics"]["timezone"]) - --- Cache the last geocoded location for each city in a separate index. When --- faceting by city names on the log index (for displaying on a map), there --- doesn't appear to be an easy way to fetch the associated locations for each --- city facet. This allows us to perform a separate lookup to fetch the --- pre-geocoded locations for each city. --- --- The geoip stuff actually returns different geocodes for different parts of --- cities. This approach rolls up each city to the last geocoded location --- within that city, so it's not perfect, but for now it'll do. -local function cache_city_geocode(premature, id, data) - if premature then - return - end - - local id_hash = sha256:new() - id_hash:update(id) - id_hash = id_hash:final() - id_hash = str.to_hex(id_hash) - local record = { - _id = id_hash, - country = data["request_ip_country"], - region = data["request_ip_region"], - city = data["request_ip_city"], - location = { - type = "Point", - coordinates = { - data["request_ip_lon"], - data["request_ip_lat"], - }, - }, - updated_at = { ["$date"] = { ["$numberLong"] = tostring(ngx.now() * 1000) } }, - } - - local _, err = mongo.update("log_city_locations", record["_id"], record) - if err then - ngx.log(ngx.ERR, "failed to cache city location: ", err) - end -end - -local function cache_new_city_geocode(data) - local id = (data["request_ip_country"] or "") .. "-" .. (data["request_ip_region"] or "") .. "-" .. (data["request_ip_city"] or "") - - -- Only cache the first city location per startup to prevent lots of indexing - -- churn re-indexing the same city. - if not ngx.shared.geocode_city_cache:get(id) then - ngx.shared.geocode_city_cache:set(id, true) - - -- Perform the actual cache call in a timer because the http library isn't - -- supported directly in the log_by_lua context. - ngx.timer.at(0, cache_city_geocode, id, data) - end -end - -local function log_request(data) - local syslog_message = "<" .. syslog_priority .. ">" - .. syslog_version - .. " " .. os.date("!%Y-%m-%dT%TZ", data["timestamp_utc"] / 1000) -- timestamp - .. " -" -- hostname - .. " api-umbrella" -- app-name - .. " -" -- procid - .. " -" -- msgid - .. " -" -- structured-data - .. " @cee:" -- CEE-enhanced logging for rsyslog to parse JSON - .. elasticsearch_encode_json({ raw = data }) -- JSON data - .. "\n" - - -- Check the syslog message length to ensure it doesn't exceed the configured - -- rsyslog maxMessageSize value. - -- - -- In general, this shouldn't be possible, since URLs can't exceed 8KB, and - -- we truncate the various headers that users can control for logging - -- purposes. However, this provides an extra sanity check to ensure this - -- doesn't unexpectedly pop up (eg, if we add additional headers we forget to - -- truncate). - local syslog_message_length = string.len(syslog_message) - if syslog_message_length > 32000 then - ngx.log(ngx.ERR, "request syslog message longer than expected - analytics logging may fail: ", syslog_message_length) - end - - -- Init the resty logger socket. - if not logger.initted() then - local ok, err = logger.init{ - host = config["rsyslog"]["host"], - port = config["rsyslog"]["port"], - flush_limit = 4096, -- 4KB - drop_limit = 10485760, -- 10MB - periodic_flush = 0.1, - } - - if not ok then - ngx.log(ngx.ERR, "failed to initialize the logger: ", err) - return - end - end - - local _, err = logger.log(syslog_message) - if err then - ngx.log(ngx.ERR, "failed to log message: ", err) - return - end - - if data["timer_backend_response"] then - local log_timing_id = data["id"] .. "_upstream_response_time" - ngx.shared.logs:delete(log_timing_id) - end - - if data["request_ip_lat"] then - cache_new_city_geocode(data) - end -end - -local function combine_log_data(premature, data) - if premature then - return - end - - -- If we are combining data after waiting for the backend data become - -- populated (this should be rare), then check for the timer information - -- again from the backend. - if data["_timer_backend_response"] == "pending" then - local log_timing_id = data["id"] .. "_upstream_response_time" - data["_timer_backend_response"] = ngx.shared.logs:get(log_timing_id) - end - - -- Pop the temporary variables off the data table. - local timer_backend_response = data["_timer_backend_response"] - data["_timer_backend_response"] = nil - local upstream_response_time = data["_upstream_response_time"] - data["_upstream_response_time"] = nil - - -- If we have more accurate timing information from the API backend layer, - -- then calculate additional timings. - if type(timer_backend_response) == "number" then - data["timer_backend_response"] = timer_backend_response - - -- Try to determine the overhead API Umbrella incurred on the request. - -- First we compare the upstream times from this initial proxy to the - -- backend api router proxy. Note that we don't use the "request_time" - -- variables, since that could be affected by slow clients. - data["timer_proxy_overhead"] = (tonumber(upstream_response_time) or 0) - timer_backend_response - - -- Since we're using the upstream response times for determining overhead, - -- next add in the amount of time we've calculated that we've used - -- internally in the Lua code. - -- - -- Note: Due to how openresty caches the ngx.now() calls (unless we call - -- ngx.update_time, which we don't want to do on every request), this timer - -- will be very approximate, but we mainly want this for detecting if - -- things really start to increase dramatically. - if data["timer_internal"] then - data["timer_proxy_overhead"] = data["timer_proxy_overhead"] + data["timer_internal"] - end - end - - -- Turn any internal fields from seconds (with millisecond precision - -- decimals) into milliseconds. - for _, msec_field in ipairs(log_utils.MSEC_FIELDS) do - if data[msec_field] then - -- Round the results after turning into milliseconds. Since all the nginx - -- timers only have millisecond precision, any decimals left after - -- converting are just an artifact of the original float storage or math - -- (eg, 1.00001... or 1.999988..). - data[msec_field] = utils.round(data[msec_field] * 1000) - end - end - - log_request(data) -end +local sec_to_ms = log_utils.sec_to_ms local function build_log_data() -- Fetch all the request and response headers. local request_headers = flatten_headers(ngx.req.get_headers()); local response_headers = flatten_headers(ngx.resp.get_headers()); - -- The GeoIP module returns ISO-8859-1 encoded city names, but we need UTF-8 - -- for inserting into ElasticSearch. - local geoip_city = ngx_var.geoip_city - if geoip_city then - local encoding_converter = iconv.new("utf-8//IGNORE", "iso-8859-1") - local geoip_city_encoding_err - geoip_city, geoip_city_encoding_err = encoding_converter:iconv(geoip_city) - if geoip_city_encoding_err then - ngx.log(ngx.ERR, "encoding error for geoip city: ", geoip_city_encoding_err, geoip_city) - end - end - -- Put together the basic log data. local id = ngx_var.x_api_umbrella_request_id local data = { denied_reason = ngx_ctx.gatekeeper_denied_code, id = id, - request_accept = truncate_header(request_headers["accept"], 200), - request_accept_encoding = truncate_header(request_headers["accept-encoding"], 200), + request_accept = request_headers["accept"], + request_accept_encoding = request_headers["accept-encoding"], request_basic_auth_username = ngx_var.remote_user, - request_connection = truncate_header(request_headers["connection"], 200), - request_content_type = truncate_header(request_headers["content-type"], 200), + request_connection = request_headers["connection"], + request_content_type = request_headers["content-type"], request_ip = ngx_var.remote_addr, - request_ip_city = geoip_city, - request_ip_country = ngx_var.geoip_city_country_code, - request_ip_region = ngx_var.geoip_region, request_method = ngx_var.request_method, - request_origin = truncate_header(request_headers["origin"], 200), - request_referer = truncate_header(request_headers["referer"], 200), - request_size = tonumber(ngx_var.request_length), - request_url_host = truncate_header(request_headers["host"], 200), + request_origin = request_headers["origin"], + request_referer = request_headers["referer"], + request_size = ngx_var.request_length, + request_url_host = request_headers["host"], request_url_port = ngx_var.real_port, request_url_scheme = ngx_var.real_scheme, - request_user_agent = truncate_header(request_headers["user-agent"], 400), - response_age = tonumber(response_headers["age"]), - response_cache = truncate_header(response_headers["x-cache"], 200), - response_content_encoding = truncate_header(response_headers["content-encoding"], 200), - response_content_length = tonumber(response_headers["content-length"]), - response_content_type = truncate_header(response_headers["content-type"], 200), + request_user_agent = request_headers["user-agent"], + response_age = response_headers["age"], + response_cache = response_headers["x-cache"], + response_content_encoding = response_headers["content-encoding"], + response_content_length = response_headers["content-length"], + response_content_type = response_headers["content-type"], response_server = ngx_var.upstream_http_server, - response_size = tonumber(ngx_var.bytes_sent), - response_status = tonumber(ngx_var.status), - response_transfer_encoding = truncate_header(response_headers["transfer-encoding"], 200), - timer_internal = ngx_ctx.internal_overhead, - timer_response = tonumber(ngx_var.request_time), - timestamp_utc = tonumber(ngx_var.msec), + response_size = ngx_var.bytes_sent, + response_status = ngx_var.status, + response_transfer_encoding = response_headers["transfer-encoding"], + timer_response = sec_to_ms(ngx_var.request_time), + timestamp_utc = sec_to_ms(ngx_var.msec), user_id = ngx_ctx.user_id, -- Deprecated @@ -254,87 +52,30 @@ local function build_log_data() legacy_user_registration_source = ngx_ctx.user_registration_source, } - local utc_sec = data["timestamp_utc"] - local tz_offset = timezone:find_current(utc_sec).gmtoff - local tz_sec = utc_sec + tz_offset - local tz_time = os.date("!%Y-%m-%d %H:%M:00", tz_sec) + log_utils.set_request_ip_geo_fields(data) + log_utils.set_computed_timestamp_fields(data) + log_utils.set_computed_url_fields(data) + log_utils.set_computed_user_agent_fields(data) - -- Determine the first day in the ISO week (the most recent Monday). - local tz_week = luatz.gmtime(tz_sec) - if tz_week.wday == 1 then - tz_week.day = tz_week.day - 6 - tz_week:normalize() - elseif tz_week.wday > 2 then - tz_week.day = tz_week.day - tz_week.wday + 2 - tz_week:normalize() - end - - data["timestamp_tz_offset"] = tz_offset * 1000 - data["timestamp_tz_year"] = string.sub(tz_time, 1, 4) .. "-01-01" -- YYYY-01-01 - data["timestamp_tz_month"] = string.sub(tz_time, 1, 7) .. "-01" -- YYYY-MM-01 - data["timestamp_tz_week"] = tz_week:strftime("%Y-%m-%d") -- YYYY-MM-DD of first day in ISO week. - data["timestamp_tz_date"] = string.sub(tz_time, 1, 10) -- YYYY-MM-DD - data["timestamp_tz_hour"] = string.sub(tz_time, 1, 13) .. ":00:00" -- YYYY-MM-DD HH:00:00 - data["timestamp_tz_minute"] = tz_time -- YYYY-MM-DD HH:MM:00 - - local geoip_latitude = ngx_var.geoip_latitude - if geoip_latitude then - data["request_ip_lat"] = tonumber(geoip_latitude) - data["request_ip_lon"] = tonumber(ngx_var.geoip_longitude) - - data["legacy_request_ip_location"] = { - lat = data["request_ip_lat"], - lon = data["request_ip_lon"], - } - end - - -- The geoip database returns "00" for unknown regions sometimes: - -- http://maxmind.com/download/geoip/kml/index.html Remove these and treat - -- these as nil. - if data["request_ip_region"] == "00" then - data["request_ip_region"] = nil - end + return log_utils.normalized_data(data) +end - if request_headers["user-agent"] then - local user_agent_data = user_agent_parser(request_headers["user-agent"]) - if user_agent_data then - data["request_user_agent_family"] = user_agent_data["family"] - data["request_user_agent_type"] = user_agent_data["type"] - end +local function log_request() + local data = build_log_data() + local syslog_message = log_utils.build_syslog_message(data) + local _, err = log_utils.send_syslog_message(syslog_message) + if err then + ngx.log(ngx.ERR, "failed to log message: ", err) + return end - -- Set the various URL fields. - log_utils.set_url_fields(data) - - -- Set the default timer_proxy_overhead. This may be overwritten by a more - -- accurate number in combine_log_data(). - data["timer_proxy_overhead"] = ngx_ctx.internal_overhead - - -- Grab the upstream_respone_time for temporary use. - data["_upstream_response_time"] = ngx_var.upstream_response_time - - -- Check for log data set by the separate api backend proxy - -- (log_api_backend_proxy.lua). This is used for timing information. - -- - -- If this value is marked as "pending" then we've hit an edge case where the - -- initial proxy is being logged before the api backend proxy. This shouldn't - -- happen frequently, but does sometimes crop up in testing. When this - -- happens, we'll defer logging for a second to give the backend proxy a - -- chance to finish it's logging (which will set the shared variable). - -- - -- But we want to generally avoid setting timers to log each individual - -- request, since there's a limit to how many timers we can have. So that's - -- why we only do it the handle this edge case that should be rare. - local log_timing_id = id .. "_upstream_response_time" - data["_timer_backend_response"] = ngx.shared.logs:get(log_timing_id) - if data["_timer_backend_response"] == "pending" then - ngx.timer.at(1, combine_log_data, data) - else - combine_log_data(false, data) + if data["request_ip_lat"] then + log_utils.cache_new_city_geocode(data) end end -local ok, err = pcall(build_log_data) + +local ok, err = pcall(log_request) if not ok then ngx.log(ngx.ERR, "failed to log request: ", err) end diff --git a/src/api-umbrella/proxy/log_utils.lua b/src/api-umbrella/proxy/log_utils.lua index b7b6b5963..c46f10240 100644 --- a/src/api-umbrella/proxy/log_utils.lua +++ b/src/api-umbrella/proxy/log_utils.lua @@ -1,28 +1,50 @@ +local elasticsearch_encode_json = require "api-umbrella.utils.elasticsearch_encode_json" local escape_uri_non_ascii = require "api-umbrella.utils.escape_uri_non_ascii" +local iconv = require "iconv" +local logger = require "resty.logger.socket" +local luatz = require "luatz" +local mongo = require "api-umbrella.utils.mongo" local plutils = require "pl.utils" +local sha256 = require "resty.sha256" +local str = require "resty.string" +local user_agent_parser = require "api-umbrella.proxy.user_agent_parser" +local utils = require "api-umbrella.proxy.utils" +local ngx_ctx = ngx.ctx +local ngx_var = ngx.var +local round = utils.round local split = plutils.split -local _M = {} +local syslog_facility = 16 -- local0 +local syslog_severity = 6 -- info +local syslog_priority = (syslog_facility * 8) + syslog_severity +local syslog_version = 1 +local timezone = luatz.get_tz(config["analytics"]["timezone"]) -_M.MSEC_FIELDS = { - "timer_backend_response", - "timer_internal", - "timer_proxy_overhead", - "timer_response", - "timestamp_utc", -} +local _M = {} -function _M.ignore_request() - -- Don't log some of our internal API calls used to determine if API Umbrella - -- is fully started and ready (since logging of these requests will likely - -- fail anyway if things aren't ready). - local uri = ngx.ctx.original_uri or ngx.var.uri - if uri == "/api-umbrella/v1/health" or uri == "/api-umbrella/v1/state" then - return true +local function truncate_string(value, max_length) + if string.len(value) > max_length then + return string.sub(value, 1, max_length) else - return false + return value + end +end + +local function lowercase_truncate(value, max_length) + if not value or type(value) ~= "string" then + return nil end + + return string.lower(truncate_string(value, max_length)) +end + +local function uppercase_truncate(value, max_length) + if not value or type(value) ~= "string" then + return nil + end + + return string.upper(truncate_string(value, max_length)) end -- To make drill-downs queries easier, split up how the path is stored. @@ -107,66 +129,149 @@ local function set_url_hierarchy(data) end end -local function elasticsearch_sanitize_args(data) - if not data then return end - - for key, value in pairs(data) do - local value_type = type(value) - if value_type == "table" then - -- Flatten any arguments with array values. - -- - -- These stem from duplicate argument names, like ?foo=bar&foo=baz - -- (resulting in { foo = { bar, baz } }). These need to be flattened into - -- a string value so ElasticSearch doesn't try to store these as - -- differing types depending on whether a single values comes in - -- (?foo=bar) or an array (?foo=bar&foo=baz). - -- - -- ngx.decode_args doesn't support other more deeply nested tables, so we - -- don't need to worry about recursing. - value = escape_uri_non_ascii(table.concat(value, ",")) - elseif value_type ~= "string" then - -- Convert any other types to strings to ensure ElasticSearch always - -- indexes things as a consistent type. - -- - -- This helps ensure boolean arguments from ngx.decode_args (?foo, - -- resulting in { foo = true }), can be mixed with string types. - value = tostring(value) - end +-- Cache the last geocoded location for each city in a separate index. When +-- faceting by city names on the log index (for displaying on a map), there +-- doesn't appear to be an easy way to fetch the associated locations for each +-- city facet. This allows us to perform a separate lookup to fetch the +-- pre-geocoded locations for each city. +-- +-- The geoip stuff actually returns different geocodes for different parts of +-- cities. This approach rolls up each city to the last geocoded location +-- within that city, so it's not perfect, but for now it'll do. +local function cache_city_geocode(premature, id, data) + if premature then + return + end - -- Escaping any non-ASCII chars to prevent invalid or wonky UTF-8 - -- sequences from generating invalid JSON that will prevent ElasticSearch - -- from indexing the request. - data[key] = escape_uri_non_ascii(value) + local id_hash = sha256:new() + id_hash:update(id) + id_hash = id_hash:final() + id_hash = str.to_hex(id_hash) + local record = { + _id = id_hash, + country = data["request_ip_country"], + region = data["request_ip_region"], + city = data["request_ip_city"], + location = { + type = "Point", + coordinates = { + data["request_ip_lon"], + data["request_ip_lat"], + }, + }, + updated_at = { ["$date"] = { ["$numberLong"] = tostring(ngx.now() * 1000) } }, + } - -- As of ElasticSearch 2, field names cannot contain dots. This affects our - -- nested hash of query parameters, since incoming query parameters may - -- contain dots. For storage purposes, replace these dots with underscores - -- (the same approach LogStash's de_dot plugin takes). - -- - -- See: - -- https://www.elastic.co/guide/en/elasticsearch/reference/2.0/breaking_20_mapping_changes.html#_field_names_may_not_contain_dots - -- - -- However, dots look like they'll be allowed again (although, treated as - -- nested objects) in ElasticSearch 5: - -- https://github.com/elastic/elasticsearch/issues/15951 - -- https://github.com/elastic/elasticsearch/pull/18106 - -- https://www.elastic.co/blog/elasticsearch-5-0-0-alpha3-released#_dots_in_field_names - local sanitized_key = ngx.re.gsub(key, "\\.", "_", "jo") - if key ~= sanitized_key then - data[sanitized_key] = data[key] - data[key] = nil + local _, err = mongo.update("log_city_locations", record["_id"], record) + if err then + ngx.log(ngx.ERR, "failed to cache city location: ", err) + end +end + +function _M.ignore_request() + -- Don't log some of our internal API calls used to determine if API Umbrella + -- is fully started and ready (since logging of these requests will likely + -- fail anyway if things aren't ready). + local uri = ngx_ctx.original_uri or ngx_var.uri + if uri == "/api-umbrella/v1/health" or uri == "/api-umbrella/v1/state" then + return true + else + return false + end +end + +function _M.sec_to_ms(value) + value = tonumber(value) + if not value then + return nil + end + + -- Round the results after turning into milliseconds. Since all the nginx + -- timers only have millisecond precision, any decimals left after + -- converting are just an artifact of the original float storage or math + -- (eg, 1.00001... or 1.999988..). + return round(value * 1000) +end + +function _M.cache_new_city_geocode(data) + local id = (data["request_ip_country"] or "") .. "-" .. (data["request_ip_region"] or "") .. "-" .. (data["request_ip_city"] or "") + + -- Only cache the first city location per startup to prevent lots of indexing + -- churn re-indexing the same city. + if not ngx.shared.geocode_city_cache:get(id) then + ngx.shared.geocode_city_cache:set(id, true) + + -- Perform the actual cache call in a timer because the http library isn't + -- supported directly in the log_by_lua context. + ngx.timer.at(0, cache_city_geocode, id, data) + end +end + +function _M.set_request_ip_geo_fields(data) + -- The GeoIP module returns ISO-8859-1 encoded city names, but we need UTF-8 + -- for inserting into ElasticSearch. + local geoip_city = ngx_var.geoip_city + if geoip_city then + local encoding_converter = iconv.new("utf-8//IGNORE", "iso-8859-1") + local geoip_city_encoding_err + geoip_city, geoip_city_encoding_err = encoding_converter:iconv(geoip_city) + if geoip_city_encoding_err then + ngx.log(ngx.ERR, "encoding error for geoip city: ", geoip_city_encoding_err, geoip_city) end end + + -- The geoip database returns "00" for unknown regions sometimes: + -- http://maxmind.com/download/geoip/kml/index.html Remove these and treat + -- these as nil. + local geoip_region = ngx_var.geoip_region + if geoip_region == "00" then + geoip_region = nil + end + + data["request_ip_city"] = geoip_city + data["request_ip_country"] = ngx_var.geoip_city_country_code + data["request_ip_region"] = geoip_region + + local geoip_latitude = ngx_var.geoip_latitude + if geoip_latitude then + data["request_ip_lat"] = tonumber(geoip_latitude) + data["request_ip_lon"] = tonumber(ngx_var.geoip_longitude) + end +end + +function _M.set_computed_timestamp_fields(data) + local utc_sec = data["timestamp_utc"] / 1000 + local tz_offset = timezone:find_current(utc_sec).gmtoff + local tz_sec = utc_sec + tz_offset + local tz_time = os.date("!%Y-%m-%d %H:%M:00", tz_sec) + + -- Determine the first day in the ISO week (the most recent Monday). + local tz_week = luatz.gmtime(tz_sec) + if tz_week.wday == 1 then + tz_week.day = tz_week.day - 6 + tz_week:normalize() + elseif tz_week.wday > 2 then + tz_week.day = tz_week.day - tz_week.wday + 2 + tz_week:normalize() + end + + data["timestamp_tz_offset"] = tz_offset * 1000 + data["timestamp_tz_year"] = string.sub(tz_time, 1, 4) .. "-01-01" -- YYYY-01-01 + data["timestamp_tz_month"] = string.sub(tz_time, 1, 7) .. "-01" -- YYYY-MM-01 + data["timestamp_tz_week"] = tz_week:strftime("%Y-%m-%d") -- YYYY-MM-DD of first day in ISO week. + data["timestamp_tz_date"] = string.sub(tz_time, 1, 10) -- YYYY-MM-DD + data["timestamp_tz_hour"] = string.sub(tz_time, 1, 13) .. ":00:00" -- YYYY-MM-DD HH:00:00 + data["timestamp_tz_minute"] = tz_time -- YYYY-MM-DD HH:MM:00 end -function _M.set_url_fields(data) +function _M.set_computed_url_fields(data) -- Extract just the path portion of the URL. -- -- Note: we're extracting this from the original "request_uri" variable here, -- rather than just using the original "uri" variable by itself, since -- "request_uri" has the raw encoding of the URL as it was passed in (eg, for -- url escaped encodings), which we'll prefer for consistency. - local parts = split(ngx.ctx.original_request_uri, "?", true, 2) + local parts = split(ngx_ctx.original_request_uri, "?", true, 2) data["request_url_path"] = escape_uri_non_ascii(parts[1]) -- Extract the query string arguments. @@ -176,34 +281,137 @@ function _M.set_url_fields(data) -- reflect the original URL (and not after any internal rewriting). if parts[2] then data["request_url_query"] = escape_uri_non_ascii(parts[2]) + end - if config["analytics"]["log_request_url_query_params_separately"] then - data["legacy_request_url_query_hash"] = ngx.decode_args(data["request_url_query"]) + set_url_hierarchy(data) +end - -- Sanitize the decoded the argument string table to prepare it for - -- ElasticSearch storage. - elasticsearch_sanitize_args(data["legacy_request_url_query_hash"]) +function _M.set_computed_user_agent_fields(data) + if data["request_user_agent"] then + local user_agent_data = user_agent_parser(data["request_user_agent"]) + if user_agent_data then + data["request_user_agent_family"] = user_agent_data["family"] + data["request_user_agent_type"] = user_agent_data["type"] end end +end + +function _M.normalized_data(data) + local normalized = { + denied_reason = lowercase_truncate(data["denied_reason"], 50), + id = lowercase_truncate(data["id"], 20), + request_accept = lowercase_truncate(data["request_accept"], 200), + request_accept_encoding = lowercase_truncate(data["request_accept_encoding"], 200), + request_basic_auth_username = lowercase_truncate(data["request_basic_auth_username"], 200), + request_connection = lowercase_truncate(data["request_connection"], 200), + request_content_type = lowercase_truncate(data["request_content_type"], 200), + request_ip = lowercase_truncate(data["request_ip"], 45), + request_ip_city = lowercase_truncate(data["request_ip_city"], 200), + request_ip_country = uppercase_truncate(data["request_ip_country"], 2), + request_ip_lat = tonumber(data["request_ip_lat"]), + request_ip_lon = tonumber(data["request_ip_lon"]), + request_ip_region = uppercase_truncate(data["request_ip_region"], 2), + request_method = uppercase_truncate(data["request_method"], 10), + request_origin = lowercase_truncate(data["request_origin"], 200), + request_referer = lowercase_truncate(data["request_referer"], 200), + request_size = tonumber(data["request_size"]), + request_url_hierarchy = data["request_url_hierarchy"], + request_url_host = lowercase_truncate(data["request_url_host"], 200), + request_url_path = lowercase_truncate(data["request_url_path"], 400), + request_url_path_level1 = lowercase_truncate(data["request_url_path_level1"], 40), + request_url_path_level2 = lowercase_truncate(data["request_url_path_level2"], 40), + request_url_path_level3 = lowercase_truncate(data["request_url_path_level3"], 40), + request_url_path_level4 = lowercase_truncate(data["request_url_path_level4"], 40), + request_url_path_level5 = lowercase_truncate(data["request_url_path_level5"], 40), + request_url_path_level6 = lowercase_truncate(data["request_url_path_level6"], 40), + request_url_port = tonumber(data["request_url_port"]), + request_url_query = lowercase_truncate(data["request_url_query"], 400), + request_url_scheme = lowercase_truncate(data["request_url_scheme"], 10), + request_user_agent = lowercase_truncate(data["request_user_agent"], 400), + request_user_agent_family = lowercase_truncate(data["request_user_agent_family"], 100), + request_user_agent_type = lowercase_truncate(data["request_user_agent_type"], 100), + response_age = tonumber(data["response_age"]), + response_cache = lowercase_truncate(data["response_cache"], 200), + response_content_encoding = lowercase_truncate(data["response_content_encoding"], 200), + response_content_length = tonumber(data["response_content_length"]), + response_content_type = lowercase_truncate(data["response_content_type"], 200), + response_server = lowercase_truncate(data["response_server"], 100), + response_size = tonumber(data["response_size"]), + response_status = tonumber(data["response_status"]), + response_transfer_encoding = lowercase_truncate(data["response_transfer_encoding"], 200), + timer_response = tonumber(data["timer_response"]), + timestamp_tz_date = uppercase_truncate(data["timestamp_tz_date"], 20), + timestamp_tz_hour = uppercase_truncate(data["timestamp_tz_hour"], 20), + timestamp_tz_minute = uppercase_truncate(data["timestamp_tz_minute"], 20), + timestamp_tz_month = uppercase_truncate(data["timestamp_tz_month"], 20), + timestamp_tz_offset = tonumber(data["timestamp_tz_offset"]), + timestamp_tz_week = uppercase_truncate(data["timestamp_tz_week"], 20), + timestamp_tz_year = uppercase_truncate(data["timestamp_tz_year"], 20), + timestamp_utc = tonumber(data["timestamp_utc"]), + user_id = lowercase_truncate(data["user_id"], 36), + + -- Deprecated + legacy_api_key = lowercase_truncate(data["legacy_api_key"], 40), + legacy_user_email = lowercase_truncate(data["legacy_user_email"], 200), + legacy_user_registration_source = lowercase_truncate(data["legacy_user_registration_source"], 200), + } - data["legacy_request_url"] = data["request_url_scheme"] .. "://" .. data["request_url_host"] .. data["request_url_path"] - if data["request_url_query"] then - data["legacy_request_url"] = data["legacy_request_url"] .. "?" .. data["request_url_query"] + if normalized["request_url_hierarchy"] then + for index, path in ipairs(normalized["request_url_hierarchy"]) do + normalized["request_url_hierarchy"][index] = lowercase_truncate(path, 400) + end end - set_url_hierarchy(data) + return normalized end -function _M.truncate_header(value, max_length) - if not value or type(value) ~= "string" then - return value +function _M.build_syslog_message(data) + local syslog_message = "<" .. syslog_priority .. ">" + .. syslog_version + .. " " .. os.date("!%Y-%m-%dT%TZ", data["timestamp_utc"] / 1000) -- timestamp + .. " -" -- hostname + .. " api-umbrella" -- app-name + .. " -" -- procid + .. " -" -- msgid + .. " -" -- structured-data + .. " @cee:" -- CEE-enhanced logging for rsyslog to parse JSON + .. elasticsearch_encode_json({ raw = data }) -- JSON data + .. "\n" + + return syslog_message +end + +function _M.send_syslog_message(syslog_message) + -- Check the syslog message length to ensure it doesn't exceed the configured + -- rsyslog maxMessageSize value. + -- + -- In general, this shouldn't be possible, since URLs can't exceed 8KB, and + -- we truncate the various headers that users can control for logging + -- purposes. However, this provides an extra sanity check to ensure this + -- doesn't unexpectedly pop up (eg, if we add additional headers we forget to + -- truncate). + local syslog_message_length = string.len(syslog_message) + if syslog_message_length > 32000 then + ngx.log(ngx.ERR, "request syslog message longer than expected - analytics logging may fail: ", syslog_message_length) end - if string.len(value) > max_length then - return string.sub(value, 1, max_length) - else - return value + -- Init the resty logger socket. + if not logger.initted() then + local ok, err = logger.init{ + host = config["rsyslog"]["host"], + port = config["rsyslog"]["port"], + flush_limit = 4096, -- 4KB + drop_limit = 10485760, -- 10MB + periodic_flush = 0.1, + } + + if not ok then + ngx.log(ngx.ERR, "failed to initialize the logger: ", err) + return + end end + + return logger.log(syslog_message) end return _M diff --git a/src/api-umbrella/web-app/app/models/log_search/base.rb b/src/api-umbrella/web-app/app/models/log_search/base.rb index 4962b1a9e..ea9a3726e 100644 --- a/src/api-umbrella/web-app/app/models/log_search/base.rb +++ b/src/api-umbrella/web-app/app/models/log_search/base.rb @@ -2,11 +2,10 @@ class LogSearch::Base attr_accessor :query, :query_options attr_reader :client, :start_time, :end_time, :interval, :region, :country, :state - CASE_SENSITIVE_FIELDS = [ - "api_key", + UPPERCASE_FIELDS = [ + "request_method", "request_ip_country", "request_ip_region", - "request_ip_city", ].freeze def self.policy_class diff --git a/src/api-umbrella/web-app/app/models/log_search/elastic_search.rb b/src/api-umbrella/web-app/app/models/log_search/elastic_search.rb index cfee207dd..d375231a0 100644 --- a/src/api-umbrella/web-app/app/models/log_search/elastic_search.rb +++ b/src/api-umbrella/web-app/app/models/log_search/elastic_search.rb @@ -110,8 +110,12 @@ def parse_query_builder(query) query["rules"].each do |rule| filter = {} - if(!CASE_SENSITIVE_FIELDS.include?(rule["field"]) && rule["value"].kind_of?(String)) - rule["value"].downcase! + if(rule["value"].kind_of?(String)) + if(UPPERCASE_FIELDS.include?(rule["field"])) + rule["value"].upcase! + else + rule["value"].downcase! + end end case(rule["operator"]) diff --git a/src/api-umbrella/web-app/app/models/log_search/sql.rb b/src/api-umbrella/web-app/app/models/log_search/sql.rb index 519801b7b..201824162 100644 --- a/src/api-umbrella/web-app/app/models/log_search/sql.rb +++ b/src/api-umbrella/web-app/app/models/log_search/sql.rb @@ -224,9 +224,12 @@ def parse_query_builder(query) operator = nil value = rule["value"] - if(!CASE_SENSITIVE_FIELDS.include?(rule["field"]) && value.kind_of?(String)) - # FIXME: Is this needed now that everything is case-sensitive in SQL? - # value.downcase! + if(value.kind_of?(String)) + if(UPPERCASE_FIELDS.include?(rule["field"])) + value.upcase! + else + value.downcase! + end end if(value.present?) diff --git a/templates/etc/nginx/router.conf.mustache b/templates/etc/nginx/router.conf.mustache index 195c9a47d..bbc4368b9 100644 --- a/templates/etc/nginx/router.conf.mustache +++ b/templates/etc/nginx/router.conf.mustache @@ -226,7 +226,6 @@ http { server_name _; set $x_api_umbrella_request_id $http_x_api_umbrella_request_id; - log_by_lua_file '{{_src_root_dir}}/src/api-umbrella/proxy/hooks/log_api_backend_proxy.lua'; # Enable keep alive connections to the backend servers. proxy_http_version 1.1; diff --git a/templates/etc/rsyslog.conf.mustache b/templates/etc/rsyslog.conf.mustache index 1e02dfe11..3bc949548 100644 --- a/templates/etc/rsyslog.conf.mustache +++ b/templates/etc/rsyslog.conf.mustache @@ -38,18 +38,9 @@ module(load="omelasticsearch") if($!raw!legacy_api_key != "") then { set $!usr!es!api_key = $!raw!legacy_api_key; } -if($!raw!timer_backend_response != "") then { - set $!usr!es!backend_response_time = $!raw!timer_backend_response; -} if($!raw!denied_reason != "") then { set $!usr!es!gatekeeper_denied_code = $!raw!denied_reason; } -if($!raw!timer_internal != "") then { - set $!usr!es!internal_gatekeeper_time = $!raw!timer_internal; -} -if($!raw!timer_proxy_overhead != "") then { - set $!usr!es!proxy_overhead = $!raw!timer_proxy_overhead; -} if($!raw!request_accept != "") then { set $!usr!es!request_accept = $!raw!request_accept; } @@ -83,9 +74,6 @@ if($!raw!request_ip_city != "") then { if($!raw!request_ip_country != "") then { set $!usr!es!request_ip_country = $!raw!request_ip_country; } -if($!raw!legacy_request_ip_location != "") then { - set $!usr!es!request_ip_location = $!raw!legacy_request_ip_location; -} if($!raw!request_ip_region != "") then { set $!usr!es!request_ip_region = $!raw!request_ip_region; } @@ -98,9 +86,6 @@ if($!raw!request_origin != "") then { if($!raw!request_url_path != "") then { set $!usr!es!request_path = $!raw!request_url_path; } -if($!raw!legacy_request_url_query_hash != "") then { - set $!usr!es!request_query = $!raw!legacy_request_url_query_hash; -} if($!raw!request_referer != "") then { set $!usr!es!request_referer = $!raw!request_referer; } @@ -110,9 +95,6 @@ if($!raw!request_url_scheme != "") then { if($!raw!request_size != "") then { set $!usr!es!request_size = $!raw!request_size; } -if($!raw!legacy_request_url != "") then { - set $!usr!es!request_url = $!raw!legacy_request_url; -} if($!raw!request_user_agent != "") then { set $!usr!es!request_user_agent = $!raw!request_user_agent; } @@ -152,12 +134,12 @@ if($!raw!timer_response != "") then { if($!raw!response_transfer_encoding != "") then { set $!usr!es!response_transfer_encoding = $!raw!response_transfer_encoding; } -if($!raw!legacy_user_email != "") then { - set $!usr!es!user_email = $!raw!legacy_user_email; -} if($!raw!user_id != "") then { set $!usr!es!user_id = $!raw!user_id; } +if($!raw!user_email != "") then { + set $!usr!es!user_email = $!raw!legacy_user_email; +} if($!raw!legacy_user_registration_source != "") then { set $!usr!es!user_registration_source = $!raw!legacy_user_registration_source; } @@ -185,6 +167,8 @@ local0.info action( bulkId="elasticsearch-id" dynBulkId="on" + errorfile="{{log_dir}}/rsyslog/elasticsearch_error.log" + # Allow bulk indexing of batches *up to* this size. # # Note that as long as Elasticsearch is keeping up and data isn't being @@ -193,9 +177,6 @@ local0.info action( # http://www.gossamer-threads.com/lists/rsyslog/users/17550 queue.dequeuebatchsize="5000" - # Require indexing by all replica shards. - asyncrepl="off" - # For the in-memory queue, use a linked-list (so the memory doesn't have to # be pre-allocated based on a fixed size). queue.type="LinkedList" @@ -348,15 +329,6 @@ if($!raw!response_status != "") then { if($!raw!response_transfer_encoding != "") then { set $!usr!sql!response_transfer_encoding = $!raw!response_transfer_encoding; } -if($!raw!timer_backend_response != "") then { - set $!usr!sql!timer_backend_response = $!raw!timer_backend_response; -} -if($!raw!timer_internal != "") then { - set $!usr!sql!timer_internal = $!raw!timer_internal; -} -if($!raw!timer_proxy_overhead != "") then { - set $!usr!sql!timer_proxy_overhead = $!raw!timer_proxy_overhead; -} if($!raw!timer_response != "") then { set $!usr!sql!timer_response = $!raw!timer_response; } From 1f33baf3cc0d3b2ee33be9b912dd965b84f74f23 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Wed, 1 Feb 2017 20:01:50 -0700 Subject: [PATCH 003/367] gzip the nginx access logs as they're written. --- config/default.yml | 3 ++- config/test.yml | 2 +- templates/etc/nginx/router.conf.mustache | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/config/default.yml b/config/default.yml index fe60e2dfd..192fa72a1 100644 --- a/config/default.yml +++ b/config/default.yml @@ -14,7 +14,8 @@ https_port: 443 nginx: workers: auto worker_connections: 8192 - access_log_options: buffer=32k flush=10s + access_log_filename: access.log.gz + access_log_options: buffer=256k gzip=4 flush=10s proxy_connect_timeout: 60 proxy_read_timeout: 60 proxy_send_timeout: 60 diff --git a/config/test.yml b/config/test.yml index 1307c1c88..b2b04f1dd 100644 --- a/config/test.yml +++ b/config/test.yml @@ -12,7 +12,7 @@ nginx: # hard-coded for test purposes so we have a more stable baseline and ensure # our tests always run with multiple workers. workers: 2 - access_log_options: + access_log_options: gzip=4 proxy_connect_timeout: 10 proxy_read_timeout: 10 proxy_send_timeout: 10 diff --git a/templates/etc/nginx/router.conf.mustache b/templates/etc/nginx/router.conf.mustache index bbc4368b9..4edf7adf3 100644 --- a/templates/etc/nginx/router.conf.mustache +++ b/templates/etc/nginx/router.conf.mustache @@ -25,7 +25,7 @@ http { '$x_api_umbrella_request_id ' '$scheme://$host:$server_port ' '$request_time $sent_http_x_cache'; - access_log {{log_dir}}/nginx/access.log combined_extended {{nginx.access_log_options}}; + access_log {{log_dir}}/nginx/{{nginx.access_log_filename}} combined_extended {{nginx.access_log_options}}; client_body_temp_path {{tmp_dir}}/nginx-client_body_temp; proxy_temp_path {{tmp_dir}}/nginx-proxy_temp; From 199f155158761a6af145b54ee60389a057cdcc01 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Wed, 1 Feb 2017 21:54:50 -0700 Subject: [PATCH 004/367] Update logging tests for new schema. --- config/elasticsearch_templates.json | 2 +- .../proxy/hooks/log_initial_proxy.lua | 14 +- src/api-umbrella/proxy/log_utils.lua | 12 +- templates/etc/rsyslog.conf.mustache | 5 +- test/proxy/logging/test_basics.rb | 144 +++++++--- test/proxy/logging/test_ip_geocoding.rb | 11 +- ...est_request_url_query_params_separately.rb | 255 ------------------ test/proxy/logging/test_special_chars.rb | 72 +++-- .../api_umbrella_test_helpers/logging.rb | 14 +- 9 files changed, 164 insertions(+), 365 deletions(-) delete mode 100644 test/proxy/logging/test_request_url_query_params_separately.rb diff --git a/config/elasticsearch_templates.json b/config/elasticsearch_templates.json index 4c8bb14dd..b3bd37c8b 100644 --- a/config/elasticsearch_templates.json +++ b/config/elasticsearch_templates.json @@ -127,7 +127,7 @@ "type": "integer", "doc_values": true }, - "request_url": { + "request_query": { "type": "string", "index": "not_analyzed", "doc_values": true diff --git a/src/api-umbrella/proxy/hooks/log_initial_proxy.lua b/src/api-umbrella/proxy/hooks/log_initial_proxy.lua index 543d16e78..67d399eed 100644 --- a/src/api-umbrella/proxy/hooks/log_initial_proxy.lua +++ b/src/api-umbrella/proxy/hooks/log_initial_proxy.lua @@ -1,12 +1,13 @@ local flatten_headers = require "api-umbrella.utils.flatten_headers" local log_utils = require "api-umbrella.proxy.log_utils" -if log_utils.ignore_request() then +local ngx_ctx = ngx.ctx +local ngx_var = ngx.var + +if log_utils.ignore_request(ngx_ctx, ngx_var) then return end -local ngx_ctx = ngx.ctx -local ngx_var = ngx.var local sec_to_ms = log_utils.sec_to_ms local function build_log_data() @@ -52,15 +53,16 @@ local function build_log_data() legacy_user_registration_source = ngx_ctx.user_registration_source, } - log_utils.set_request_ip_geo_fields(data) + log_utils.set_request_ip_geo_fields(data, ngx_var) log_utils.set_computed_timestamp_fields(data) - log_utils.set_computed_url_fields(data) + log_utils.set_computed_url_fields(data, ngx_ctx) log_utils.set_computed_user_agent_fields(data) return log_utils.normalized_data(data) end local function log_request() + -- Build the log message and send to rsyslog for processing. local data = build_log_data() local syslog_message = log_utils.build_syslog_message(data) local _, err = log_utils.send_syslog_message(syslog_message) @@ -69,12 +71,12 @@ local function log_request() return end + -- After logging, cache any new cities we see from GeoIP in our database. if data["request_ip_lat"] then log_utils.cache_new_city_geocode(data) end end - local ok, err = pcall(log_request) if not ok then ngx.log(ngx.ERR, "failed to log request: ", err) diff --git a/src/api-umbrella/proxy/log_utils.lua b/src/api-umbrella/proxy/log_utils.lua index c46f10240..97ad05e78 100644 --- a/src/api-umbrella/proxy/log_utils.lua +++ b/src/api-umbrella/proxy/log_utils.lua @@ -10,8 +10,6 @@ local str = require "resty.string" local user_agent_parser = require "api-umbrella.proxy.user_agent_parser" local utils = require "api-umbrella.proxy.utils" -local ngx_ctx = ngx.ctx -local ngx_var = ngx.var local round = utils.round local split = plutils.split @@ -168,7 +166,7 @@ local function cache_city_geocode(premature, id, data) end end -function _M.ignore_request() +function _M.ignore_request(ngx_ctx, ngx_var) -- Don't log some of our internal API calls used to determine if API Umbrella -- is fully started and ready (since logging of these requests will likely -- fail anyway if things aren't ready). @@ -207,7 +205,7 @@ function _M.cache_new_city_geocode(data) end end -function _M.set_request_ip_geo_fields(data) +function _M.set_request_ip_geo_fields(data, ngx_var) -- The GeoIP module returns ISO-8859-1 encoded city names, but we need UTF-8 -- for inserting into ElasticSearch. local geoip_city = ngx_var.geoip_city @@ -264,7 +262,7 @@ function _M.set_computed_timestamp_fields(data) data["timestamp_tz_minute"] = tz_time -- YYYY-MM-DD HH:MM:00 end -function _M.set_computed_url_fields(data) +function _M.set_computed_url_fields(data, ngx_ctx) -- Extract just the path portion of the URL. -- -- Note: we're extracting this from the original "request_uri" variable here, @@ -317,7 +315,7 @@ function _M.normalized_data(data) request_size = tonumber(data["request_size"]), request_url_hierarchy = data["request_url_hierarchy"], request_url_host = lowercase_truncate(data["request_url_host"], 200), - request_url_path = lowercase_truncate(data["request_url_path"], 400), + request_url_path = lowercase_truncate(data["request_url_path"], 4000), request_url_path_level1 = lowercase_truncate(data["request_url_path_level1"], 40), request_url_path_level2 = lowercase_truncate(data["request_url_path_level2"], 40), request_url_path_level3 = lowercase_truncate(data["request_url_path_level3"], 40), @@ -325,7 +323,7 @@ function _M.normalized_data(data) request_url_path_level5 = lowercase_truncate(data["request_url_path_level5"], 40), request_url_path_level6 = lowercase_truncate(data["request_url_path_level6"], 40), request_url_port = tonumber(data["request_url_port"]), - request_url_query = lowercase_truncate(data["request_url_query"], 400), + request_url_query = lowercase_truncate(data["request_url_query"], 4000), request_url_scheme = lowercase_truncate(data["request_url_scheme"], 10), request_user_agent = lowercase_truncate(data["request_user_agent"], 400), request_user_agent_family = lowercase_truncate(data["request_user_agent_family"], 100), diff --git a/templates/etc/rsyslog.conf.mustache b/templates/etc/rsyslog.conf.mustache index 3bc949548..c0c4a2125 100644 --- a/templates/etc/rsyslog.conf.mustache +++ b/templates/etc/rsyslog.conf.mustache @@ -86,6 +86,9 @@ if($!raw!request_origin != "") then { if($!raw!request_url_path != "") then { set $!usr!es!request_path = $!raw!request_url_path; } +if($!raw!request_url_query != "") then { + set $!usr!es!request_query = $!raw!request_url_query; +} if($!raw!request_referer != "") then { set $!usr!es!request_referer = $!raw!request_referer; } @@ -137,7 +140,7 @@ if($!raw!response_transfer_encoding != "") then { if($!raw!user_id != "") then { set $!usr!es!user_id = $!raw!user_id; } -if($!raw!user_email != "") then { +if($!raw!legacy_user_email != "") then { set $!usr!es!user_email = $!raw!legacy_user_email; } if($!raw!legacy_user_registration_source != "") then { diff --git a/test/proxy/logging/test_basics.rb b/test/proxy/logging/test_basics.rb index b1c4a53b6..ac829a7dd 100644 --- a/test/proxy/logging/test_basics.rb +++ b/test/proxy/logging/test_basics.rb @@ -37,9 +37,6 @@ def test_logs_expected_fields_for_non_chunked_non_gzip assert_equal([ "api_key", - "backend_response_time", - "internal_gatekeeper_time", - "proxy_overhead", "request_accept", "request_accept_encoding", "request_at", @@ -52,10 +49,10 @@ def test_logs_expected_fields_for_non_chunked_non_gzip "request_method", "request_origin", "request_path", + "request_query", "request_referer", "request_scheme", "request_size", - "request_url", "request_user_agent", "request_user_agent_family", "request_user_agent_type", @@ -73,10 +70,6 @@ def test_logs_expected_fields_for_non_chunked_non_gzip ].sort, record.keys.sort) assert_equal(self.api_key, record["api_key"]) - assert_kind_of(Numeric, record["backend_response_time"]) - assert_kind_of(Numeric, record["internal_gatekeeper_time"]) - assert_kind_of(Numeric, record["proxy_overhead"]) - assert_kind_of(Numeric, record["proxy_overhead"]) assert_equal("text/plain; q=0.5, text/html", record["request_accept"]) assert_equal("compress, gzip", record["request_accept_encoding"]) assert_kind_of(Numeric, record["request_at"]) @@ -96,19 +89,19 @@ def test_logs_expected_fields_for_non_chunked_non_gzip assert_equal("GET", record["request_method"]) assert_equal("http://foo.example", record["request_origin"]) assert_equal("/api/logging-example/foo/bar/", record["request_path"]) + assert_equal("url1=#{param_url1}&url2=#{param_url2}&url3=#{param_url3}".downcase, record["request_query"]) assert_equal("http://example.com", record["request_referer"]) assert_equal("http", record["request_scheme"]) assert_kind_of(Numeric, record["request_size"]) - assert_equal(url, record["request_url"]) assert_equal("curl/7.37.1", record["request_user_agent"]) - assert_equal("cURL", record["request_user_agent_family"]) - assert_equal("Library", record["request_user_agent_type"]) + assert_equal("curl", record["request_user_agent_family"]) + assert_equal("library", record["request_user_agent_type"]) # The backend responds with an age of 20. The actual age might higher than # the original response if the response happens right on the boundary of a # second or the proxy is congested and the response is delayed. assert_operator(record["response_age"], :>=, 20) assert_operator(record["response_age"], :<=, 40) - assert_equal("MISS", record["response_cache"]) + assert_equal("miss", record["response_cache"]) assert_equal("text/plain; charset=utf-8", record["response_content_type"]) assert_equal("openresty", record["response_server"]) assert_kind_of(Numeric, record["response_size"]) @@ -205,7 +198,7 @@ def test_requests_with_dots_in_query_params assert_response_code(200, response) record = wait_for_log(response)[:hit_source] - assert_equal(url, record["request_url"]) + assert_logged_url(url, record) end def test_requests_with_duplicate_query_params @@ -214,7 +207,7 @@ def test_requests_with_duplicate_query_params assert_response_code(200, response) record = wait_for_log(response)[:hit_source] - assert_equal(url, record["request_url"]) + assert_logged_url(url, record) end def test_logs_request_at_as_date @@ -289,8 +282,8 @@ def test_logs_cached_responses cache_results[record["response_cache"]] += 1 end assert_equal({ - "MISS" => 1, - "HIT" => 2, + "miss" => 1, + "hit" => 2, }, cache_results) end @@ -305,8 +298,7 @@ def test_logs_denied_requests record = wait_for_log(response)[:hit_source] assert_equal(403, record["response_status"]) assert_logs_base_fields(record) - refute_logs_backend_fields(record) - assert_equal("INVALID_KEY", record["api_key"]) + assert_equal("invalid_key", record["api_key"]) assert_equal("api_key_invalid", record["gatekeeper_denied_code"]) refute(record["user_email"]) refute(record["user_id"]) @@ -328,25 +320,9 @@ def test_logs_requests_when_backend_is_down record = wait_for_log(response)[:hit_source] assert_equal(502, record["response_status"]) assert_logs_base_fields(record, api_user) - assert_logs_backend_fields(record) end end - def test_logs_requests_when_logging_is_out_of_order - response = Typhoeus.get("http://127.0.0.1:9080/api/hello", log_http_options.deep_merge({ - :headers => { - "X-Api-Umbrella-Test-Simulate-Out-Of-Order-Logging" => "true", - }, - })) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - assert_equal(200, record["response_status"]) - assert_logs_base_fields(record, api_user) - assert_logs_backend_fields(record) - assert_equal(99000, record["backend_response_time"]) - end - def test_logs_requests_with_maximum_8kb_url_limit url_path = "/api/hello?long=" long_length = 8192 - "GET #{url_path} HTTP/1.1\r\n".length @@ -357,7 +333,8 @@ def test_logs_requests_with_maximum_8kb_url_limit assert_response_code(200, response) record = wait_for_log(response)[:hit_source] - assert_equal(url, record["request_url"]) + assert_equal("/api/hello", record["request_path"]) + assert_equal("long=#{long_value}"[0, 4000], record["request_query"]) end # We may actually want to revisit this behavior and log these requests, but @@ -382,7 +359,32 @@ def test_does_not_log_requests_exceeding_8kb_url_limit assert_match("Log not found: ", error.message) end - def test_logs_long_url_and_headers_truncating_headers + def test_truncates_url_path_length_in_logs + long_path = "/api/hello/#{Faker::Lorem.characters(6000)}" + response = Typhoeus.get("http://127.0.0.1:9080#{long_path}", log_http_options) + assert_response_code(200, response) + + record = wait_for_log(response)[:hit_source] + assert_operator(long_path.length, :>, 4000) + assert_equal(4000, record["request_path"].length) + assert_equal(long_path[0, 4000], record["request_path"]) + end + + def test_truncates_url_query_length_in_logs + long_query = "long=#{Faker::Lorem.characters(6000)}" + response = Typhoeus.get("http://127.0.0.1:9080/api/hello?#{long_query}", log_http_options) + assert_response_code(200, response) + + record = wait_for_log(response)[:hit_source] + assert_operator(long_query.length, :>, 4000) + assert_equal(4000, record["request_query"].length) + assert_equal(long_query[0, 4000], record["request_query"]) + end + + # Try to log a long version of all inputs to ensure the overall log message + # doesn't exceed rsyslog's buffer size. + def test_long_url_and_request_headers_and_response_headers + # Setup a backend to accept wildcard hosts so we can test a long hostname. prepend_api_backends([ { :frontend_host => "*", @@ -414,8 +416,10 @@ def test_logs_long_url_and_headers_truncating_headers record = wait_for_log(response)[:hit_source] - # Ensure the full URL got logged. - assert_equal("http://#{long_host[0, 200]}#{url_path}#{long_value}", record["request_url"]) + # Check the logged URL. + assert_equal(long_host[0, 200], record["request_host"]) + assert_equal("/#{unique_test_id}/logging-long-response-headers/".downcase, record["request_path"]) + assert_equal("long=#{long_value}"[0, 4000], record["request_query"]) # Ensure the long header values got truncated so we're not susceptible to # exceeding rsyslog's message buffers and we're also not storing an @@ -432,4 +436,68 @@ def test_logs_long_url_and_headers_truncating_headers assert_equal(200, record["response_content_type"].length, record["response_content_type"]) end end + + def test_normalizes_log_case_sensitivity + # Setup a backend to accept wildcard hosts so we can test an uppercase hostname. + prepend_api_backends([ + { + :frontend_host => "*", + :backend_host => "127.0.0.1", + :servers => [{ :host => "127.0.0.1", :port => 9444 }], + :url_matches => [{ :frontend_prefix => "/#{unique_test_id}/", :backend_prefix => "/" }], + }, + ]) do + url = "HTTP://127.0.0.1:9080/#{unique_test_id}/logging-example/FOO/BAR/?URL1=FOO" + response = Typhoeus.get(url, log_http_options.deep_merge({ + :headers => { + "Accept" => "TEXT/PLAIN", + "Accept-Encoding" => "GZIP", + "Connection" => "CLOSE", + "Content-Type" => "APPLICATION/X-WWW-FORM-URLENCODED", + "Host" => "FOOBAR.EXAMPLE", + "Origin" => "HTTP://FOO.EXAMPLE", + "User-Agent" => "CURL/7.37.1", + "Referer" => "HTTP://EXAMPLE.COM", + "X-Forwarded-For" => "0:0:0:0:0:FFFF:808:808", + }, + :userpwd => "BASIC-AUTH-USERNAME-EXAMPLE:MY-SECRET-PASSWORD", + })) + assert_response_code(200, response) + + record = wait_for_log(response)[:hit_source] + + # Lowercases nearly everything. + assert_equal(self.api_key, record["api_key"]) + assert_equal("text/plain", record["request_accept"]) + assert_equal("gzip", record["request_accept_encoding"]) + assert_equal("basic-auth-username-example", record["request_basic_auth_username"]) + assert_equal("close", record["request_connection"]) + assert_equal("application/x-www-form-urlencoded", record["request_content_type"]) + assert_equal([ + "0/foobar.example/", + "1/foobar.example/#{unique_test_id.downcase}/", + "2/foobar.example/#{unique_test_id.downcase}/logging-example/", + "3/foobar.example/#{unique_test_id.downcase}/logging-example/foo/", + "4/foobar.example/#{unique_test_id.downcase}/logging-example/foo/bar", + ], record["request_hierarchy"]) + assert_equal("foobar.example", record["request_host"]) + assert_equal("::ffff:8.8.8.8", record["request_ip"]) + assert_equal("mountain view", record["request_ip_city"]) + assert_equal("http://foo.example", record["request_origin"]) + assert_equal("/#{unique_test_id.downcase}/logging-example/foo/bar/", record["request_path"]) + assert_equal("url1=foo".downcase, record["request_query"]) + assert_equal("http://example.com", record["request_referer"]) + assert_equal("http", record["request_scheme"]) + assert_equal("curl/7.37.1", record["request_user_agent"]) + assert_equal("curl", record["request_user_agent_family"]) + assert_equal("library", record["request_user_agent_type"]) + assert_equal("miss", record["response_cache"]) + assert_equal("text/plain; charset=utf-8", record["response_content_type"]) + + # The few fields uppercased. + assert_equal("GET", record["request_method"]) + assert_equal("US", record["request_ip_country"]) + assert_equal("CA", record["request_ip_region"]) + end + end end diff --git a/test/proxy/logging/test_ip_geocoding.rb b/test/proxy/logging/test_ip_geocoding.rb index a8d872168..6df3d915a 100644 --- a/test/proxy/logging/test_ip_geocoding.rb +++ b/test/proxy/logging/test_ip_geocoding.rb @@ -23,7 +23,7 @@ def test_ipv4_address :ip => "8.8.8.8", :country => "US", :region => "CA", - :city => "Mountain View", + :city => "mountain view", :lat => 37.386, :lon => -122.0838, }) @@ -61,7 +61,7 @@ def test_ipv4_mapped_ipv6_address :ip => "::ffff:8.8.8.8", :country => "US", :region => "CA", - :city => "Mountain View", + :city => "mountain view", :lat => 37.386, :lon => -122.0838, }) @@ -80,7 +80,7 @@ def test_country_city_no_region :ip => "104.250.168.24", :country => "MC", :region => nil, - :city => "Monte-carlo", + :city => "monte-carlo", :lat => 43.7333, :lon => 7.4167, }) @@ -118,7 +118,7 @@ def test_city_accent_chars :ip => "191.102.110.22", :country => "CO", :region => "34", - :city => "Bogotá", + :city => "bogotá", :lat => 4.6492, :lon => -74.0628, }) @@ -146,9 +146,6 @@ def assert_geocode_log(record, options) else assert_equal(options.fetch(:city), record.fetch("request_ip_city")) end - assert_equal(["lat", "lon"].sort, record.fetch("request_ip_location").keys.sort) - assert_in_delta(options[:lat], record.fetch("request_ip_location").fetch("lat"), 0.02) - assert_in_delta(options[:lon], record.fetch("request_ip_location").fetch("lon"), 0.02) end def assert_geocode_cache(record, options) diff --git a/test/proxy/logging/test_request_url_query_params_separately.rb b/test/proxy/logging/test_request_url_query_params_separately.rb deleted file mode 100644 index f66cc9446..000000000 --- a/test/proxy/logging/test_request_url_query_params_separately.rb +++ /dev/null @@ -1,255 +0,0 @@ -require_relative "../../test_helper" - -class Test::Proxy::Logging::TestRequestUrlQueryParamsSeparately < Minitest::Test - include ApiUmbrellaTestHelpers::Setup - include ApiUmbrellaTestHelpers::Logging - include Minitest::Hooks - - def setup - super - setup_server - once_per_class_setup do - override_config_set({ - :analytics => { - :log_request_url_query_params_separately => true, - }, - }, "--router") - end - end - - def after_all - super - override_config_reset("--router") - end - - def test_encoding - param_url1 = "http%3A%2F%2Fexample.com%2F%3Ffoo%3Dbar%26foo%3Dbar%20more+stuff" - param_url2 = "%ED%A1%BC" - param_url3_prefix = "https%3A//example.com/foo/" - param_url3_invalid_suffix = "%D6%D0%B9%FA%BD%AD%CB%D5%CA%A1%B8%D3%D3%DC%CF%D8%D2%BB%C2%A5%C5%CC%CA%C0%BD%F5%BB%AA%B3%C7200%D3%E0%D2%B5%D6%F7%B9%BA%C2%F2%B5%C4%C9%CC%C6%B7%B7%BF%A3%AC%D2%F2%BF%AA%B7%A2%C9%CC%C5%DC%C2%B7%D2%D1%CD%A3%B9%A420%B8%F6%D4%C2%A3%AC%D2%B5%D6%F7%C4%C3%B7%BF%CE%DE%CD%FB%C8%B4%D0%E8%BC%CC%D0%F8%B3%A5%BB%B9%D2%F8%D0%D0%B4%FB%BF%EE%A1%A3%CF%F2%CA%A1%CA%D0%CF%D8%B9%FA%BC%D2%D0%C5%B7%C3%BE%D6%B7%B4%D3%B3%BD%FC2%C4%EA%CE%DE%C8%CB%B4%A6%C0%ED%A1%A3%D4%DA%B4%CB%B0%B8%D6%D0%A3%AC%CE%D2%C3%C7%BB%B3%D2%C9%D3%D0%C8%CB%CA%A7%D6%B0%E4%C2%D6%B0/sites/default/files/googleanalytics/ga.js" - param_url3 = param_url3_prefix + param_url3_invalid_suffix - - url = "http://127.0.0.1:9080/api/logging-example/foo/bar/?url1=#{param_url1}&url2=#{param_url2}&url3=#{param_url3}&api_key=#{api_key}" - response = Typhoeus.get(url, log_http_options) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - assert_equal(url, record["request_url"]) - assert_kind_of(Hash, record["request_query"]) - assert_equal([ - "api_key", - "url1", - "url2", - "url3", - ].sort, record["request_query"].keys.sort) - assert_equal(api_key, record["request_query"]["api_key"]) - assert_equal(CGI.unescape(param_url1), record["request_query"]["url1"]) - assert_equal(param_url2, record["request_query"]["url2"]) - assert_equal(CGI.unescape(param_url3_prefix) + param_url3_invalid_suffix, record["request_query"]["url3"]) - end - - # For Elasticsearch 2 compatibility - def test_requests_with_dots_in_query_params - response = Typhoeus.get("http://127.0.0.1:9080/api/hello", log_http_options.deep_merge({ - :params => { - "foo.bar.baz" => "example.1", - "foo.bar" => "example.2", - "foo[bar]" => "example.3", - }, - })) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - assert_equal("example.1", record["request_query"]["foo_bar_baz"]) - assert_equal("example.2", record["request_query"]["foo_bar"]) - assert_equal("example.3", record["request_query"]["foo[bar]"]) - end - - def test_requests_with_duplicate_query_params - response = Typhoeus.get("http://127.0.0.1:9080/api/hello?test_dup_arg=foo&test_dup_arg=bar", log_http_options) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - assert_equal("foo,bar", record["request_query"]["test_dup_arg"]) - end - - # Does not attempt to automatically map the first seen value into a date. - def test_dates_in_query_params_treated_as_strings - response = Typhoeus.get("http://127.0.0.1:9080/api/hello", log_http_options.deep_merge({ - :params => { - :date_field => "2010-05-01", - }, - })) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("2010-05-01", record["request_query"]["date_field"]) - - response = Typhoeus.get("http://127.0.0.1:9080/api/hello", log_http_options.deep_merge({ - :params => { - :date_field => "2010-05-0", - }, - })) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("2010-05-0", record["request_query"]["date_field"]) - - response = Typhoeus.get("http://127.0.0.1:9080/api/hello", log_http_options.deep_merge({ - :params => { - :date_field => "foo", - }, - })) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("foo", record["request_query"]["date_field"]) - end - - # Does not attempt to automatically map the values into an array, which would - # conflict with the first-seen string type. - def test_duplicate_query_params_treated_as_strings - response = Typhoeus.get("http://127.0.0.1:9080/api/hello?test_dup_arg_first_string=foo", log_http_options) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("foo", record["request_query"]["test_dup_arg_first_string"]) - - response = Typhoeus.get("http://127.0.0.1:9080/api/hello?test_dup_arg_first_string=foo&test_dup_arg_first_string=bar", log_http_options) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("foo,bar", record["request_query"]["test_dup_arg_first_string"]) - end - - # Does not attempt to automatically map the first seen value into a boolean. - def test_boolean_query_params_treated_as_strings - response = Typhoeus.get("http://127.0.0.1:9080/api/hello?test_arg_first_bool", log_http_options) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("true", record["request_query"]["test_arg_first_bool"]) - - response = Typhoeus.get("http://127.0.0.1:9080/api/hello?test_arg_first_bool=foo", log_http_options) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("foo", record["request_query"]["test_arg_first_bool"]) - end - - # Does not attempt to automatically map the first seen value into a number. - def test_numbers_in_query_params_treated_as_strings - response = Typhoeus.get("http://127.0.0.1:9080/api/hello", log_http_options.deep_merge({ - :params => { - :number_field => "123", - }, - })) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("123", record["request_query"]["number_field"]) - - response = Typhoeus.get("http://127.0.0.1:9080/api/hello", log_http_options.deep_merge({ - :params => { - :number_field => "foo", - }, - })) - assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] - assert_equal("foo", record["request_query"]["number_field"]) - end - - def test_valid_utf8_encoding_in_url_path_url_params_headers - # Test various encodings of the UTF-8 pound symbol: £ - url_encoded = "%C2%A3" - base64ed = "wqM=" - raw = "£" - response = Typhoeus.get("http://127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{raw}/?url_encoded=#{url_encoded}&base64ed=#{base64ed}&raw=#{raw}", log_http_options.deep_merge({ - :headers => { - "Content-Type" => url_encoded, - "Referer" => base64ed, - "Origin" => raw, - }, - })) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - - # When in the URL path or query string, we expect the raw £ symbol to be - # logged as the url encoded version. - expected_raw_in_url = url_encoded - - # URL query string - assert_equal(url_encoded, record["request_query"]["url_encoded"]) - assert_equal(base64ed, record["request_query"]["base64ed"]) - assert_equal(expected_raw_in_url, record["request_query"]["raw"]) - end - - def test_invalid_utf8_encoding - # Test various encodings of the ISO-8859-1 pound symbol: £ (but since this - # is the ISO-8859-1 version, it's not valid UTF-8). - url_encoded = "%A3" - base64ed = "ow==" - raw = Base64.decode64(base64ed).force_encoding("utf-8") - raw_utf8 = Base64.decode64(base64ed).encode("utf-8", :invalid => :replace, :undef => :replace) - response = Typhoeus.get("http://127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{raw}/#{raw_utf8}/?url_encoded=#{url_encoded}&base64ed=#{base64ed}&raw=#{raw}&raw_utf8=#{raw_utf8}", log_http_options.deep_merge({ - :headers => { - "Content-Type" => url_encoded, - "Referer" => base64ed, - "Origin" => raw, - "Accept" => raw_utf8, - }, - })) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - - # Since the encoding of this string wasn't actually a valid UTF-8 string, - # we test situations where it's sent as the raw ISO-8859-1 value, as well - # as the UTF-8 replacement character. - expected_raw_in_url = url_encoded - expected_raw_utf8_in_url = "%EF%BF%BD" - - # URL query string - assert_equal(url_encoded, record["request_query"]["url_encoded"]) - assert_equal(base64ed, record["request_query"]["base64ed"]) - assert_equal(expected_raw_in_url, record["request_query"]["raw"]) - assert_equal(expected_raw_utf8_in_url, record["request_query"]["raw_utf8"]) - end - - def test_decodes_url_encoding - url_encoded = "http%3A%2F%2Fexample.com%2Fsub%2Fsub%2F%3Ffoo%3Dbar%26foo%3Dbar%20more+stuff" - response = Typhoeus.get("http://127.0.0.1:9080/api/hello/#{url_encoded}/?url_encoded=#{url_encoded}", log_http_options.deep_merge({ - :headers => { - "Content-Type" => url_encoded, - }, - })) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - - # URL query string - assert_equal(CGI.unescape(url_encoded), record["request_query"]["url_encoded"]) - end - - def test_optionally_encodable_ascii_strings - as_is = "-%2D ;%3B +%2B /%2F :%3A 0%30 >%3E {%7B" - response = Typhoeus.get("http://127.0.0.1:9080/api/hello/#{as_is}/?as_is=#{as_is}", log_http_options.deep_merge({ - :headers => { - "Content-Type" => as_is, - }, - })) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - - # URL query string - assert_equal(CGI.unescape(as_is), record["request_query"]["as_is"]) - end - - def test_slashes_and_backslashes - url = "http://127.0.0.1:9080/api/hello/extra//slash/some\\backslash/encoded%5Cbackslash/encoded%2Fslash?&forward_slash=/slash&encoded_forward_slash=%2F&back_slash=\\&encoded_back_slash=%5C" - response = Typhoeus.get(url, log_http_options) - assert_response_code(200, response) - - record = wait_for_log(response)[:hit_source] - assert_equal("/slash", record["request_query"]["forward_slash"]) - assert_equal("/", record["request_query"]["encoded_forward_slash"]) - assert_equal("\\", record["request_query"]["back_slash"]) - assert_equal("\\", record["request_query"]["encoded_back_slash"]) - assert_equal("/api/hello/extra//slash/some\\backslash/encoded%5Cbackslash/encoded%2Fslash", record["request_path"]) - assert_equal(url, record["request_url"]) - end -end diff --git a/test/proxy/logging/test_special_chars.rb b/test/proxy/logging/test_special_chars.rb index 56c4eef6a..46324b359 100644 --- a/test/proxy/logging/test_special_chars.rb +++ b/test/proxy/logging/test_special_chars.rb @@ -45,8 +45,8 @@ def test_logs_utf8_urls assert_response_code(200, response) record = wait_for_log(response)[:hit_source] - assert_equal("/api/hello/utf8/%E2%9C%93/encoded_utf8/%E2%9C%93/", record["request_path"]) - assert_equal("http://127.0.0.1:9080/api/hello/utf8/%E2%9C%93/encoded_utf8/%E2%9C%93/?utf8=%E2%9C%93&utf8_url_encoded=%E2%9C%93&more_utf8=%C2%AC%C2%B6%C2%AA%C3%BE%C2%A4l&more_utf8_hex=%C2%AC%C2%B6%C2%AA%C3%BE%C2%A4l&more_utf8_hex_lowercase=%C2%AC%C2%B6%C2%AA%C3%BE%C2%A4l&actual_backslash_x=\\xC2\\xAC\\xC2\\xB6\\xC2\\xAA\\xC3\\xBE\\xC2\\xA4l", record["request_url"]) + assert_equal("/api/hello/utf8/%e2%9c%93/encoded_utf8/%e2%9c%93/", record["request_path"]) + assert_equal("utf8=%e2%9c%93&utf8_url_encoded=%e2%9c%93&more_utf8=%c2%ac%c2%b6%c2%aa%c3%be%c2%a4l&more_utf8_hex=%c2%ac%c2%b6%c2%aa%c3%be%c2%a4l&more_utf8_hex_lowercase=%c2%ac%c2%b6%c2%aa%c3%be%c2%a4l&actual_backslash_x=\\xc2\\xac\\xc2\\xb6\\xc2\\xaa\\xc3\\xbe\\xc2\\xa4l", record["request_query"]) end def test_valid_utf8_encoding_in_url_path_url_params_headers @@ -69,23 +69,21 @@ def test_valid_utf8_encoding_in_url_path_url_params_headers # logged as the url encoded version. expected_raw_in_url = url_encoded - # URL path - assert_equal("/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/", record["request_path"]) + # URL + assert_equal("/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/".downcase, record["request_path"]) assert_equal([ "0/127.0.0.1:9080/", "1/127.0.0.1:9080/api/", "2/127.0.0.1:9080/api/hello/", - "3/127.0.0.1:9080/api/hello/#{url_encoded}/", - "4/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/", - "5/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}", + "3/127.0.0.1:9080/api/hello/#{url_encoded}/".downcase, + "4/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/".downcase, + "5/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}".downcase, ], record["request_hierarchy"]) - - # Full URL - assert_equal("http://127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/?url_encoded=#{url_encoded}&base64ed=#{base64ed}&raw=#{expected_raw_in_url}", record["request_url"]) + assert_equal("url_encoded=#{url_encoded}&base64ed=#{base64ed}&raw=#{expected_raw_in_url}".downcase, record["request_query"]) # HTTP headers - assert_equal(url_encoded, record["request_content_type"]) - assert_equal(base64ed, record["request_referer"]) + assert_equal(url_encoded.downcase, record["request_content_type"]) + assert_equal(base64ed.downcase, record["request_referer"]) assert_equal(raw, record["request_origin"]) end @@ -116,24 +114,22 @@ def test_invalid_utf8_encoding_in_url_path_url_params_headers expected_raw_utf8_in_url = "%EF%BF%BD" expected_raw_utf8_in_header = Base64.decode64("77+9").force_encoding("utf-8") - # URL path - assert_equal("/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/#{expected_raw_utf8_in_url}/", record["request_path"]) + # URL + assert_equal("/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/#{expected_raw_utf8_in_url}/".downcase, record["request_path"]) assert_equal([ "0/127.0.0.1:9080/", "1/127.0.0.1:9080/api/", "2/127.0.0.1:9080/api/hello/", - "3/127.0.0.1:9080/api/hello/#{url_encoded}/", - "4/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/", - "5/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/", - "6/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/#{expected_raw_utf8_in_url}", + "3/127.0.0.1:9080/api/hello/#{url_encoded}/".downcase, + "4/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/".downcase, + "5/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/".downcase, + "6/127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/#{expected_raw_utf8_in_url}".downcase, ], record["request_hierarchy"]) - - # Full URL - assert_equal("http://127.0.0.1:9080/api/hello/#{url_encoded}/#{base64ed}/#{expected_raw_in_url}/#{expected_raw_utf8_in_url}/?url_encoded=#{url_encoded}&base64ed=#{base64ed}&raw=#{expected_raw_in_url}&raw_utf8=#{expected_raw_utf8_in_url}", record["request_url"]) + assert_equal("url_encoded=#{url_encoded}&base64ed=#{base64ed}&raw=#{expected_raw_in_url}&raw_utf8=#{expected_raw_utf8_in_url}".downcase, record["request_query"]) # HTTP headers - assert_equal(url_encoded, record["request_content_type"]) - assert_equal(base64ed, record["request_referer"]) + assert_equal(url_encoded.downcase, record["request_content_type"]) + assert_equal(base64ed.downcase, record["request_referer"]) assert_nil(expected_raw_in_header) assert_nil(record["request_origin"]) assert_equal(expected_raw_utf8_in_header, record["request_accept"]) @@ -150,20 +146,18 @@ def test_encoded_strings_as_given record = wait_for_log(response)[:hit_source] - # URL path - assert_equal("/api/hello/#{url_encoded}/", record["request_path"]) + # URL + assert_equal("/api/hello/#{url_encoded}/".downcase, record["request_path"]) assert_equal([ "0/127.0.0.1:9080/", "1/127.0.0.1:9080/api/", "2/127.0.0.1:9080/api/hello/", - "3/127.0.0.1:9080/api/hello/#{url_encoded}", + "3/127.0.0.1:9080/api/hello/#{url_encoded}".downcase, ], record["request_hierarchy"]) - - # Full URL - assert_equal("http://127.0.0.1:9080/api/hello/#{url_encoded}/?url_encoded=#{url_encoded}", record["request_url"]) + assert_equal("url_encoded=#{url_encoded}".downcase, record["request_query"]) # HTTP headers - assert_equal(url_encoded, record["request_content_type"]) + assert_equal(url_encoded.downcase, record["request_content_type"]) end def test_optionally_encodable_ascii_strings_as_given @@ -177,21 +171,19 @@ def test_optionally_encodable_ascii_strings_as_given record = wait_for_log(response)[:hit_source] - # URL path - assert_equal("/api/hello/#{as_is}/", record["request_path"]) + # URL + assert_equal("/api/hello/#{as_is.downcase}/", record["request_path"]) assert_equal([ "0/127.0.0.1:9080/", "1/127.0.0.1:9080/api/", "2/127.0.0.1:9080/api/hello/", - "3/127.0.0.1:9080/api/hello/-%2D ;%3B +%2B /", - "4/127.0.0.1:9080/api/hello/-%2D ;%3B +%2B /%2F :%3A 0%30 >%3E {%7B", + "3/127.0.0.1:9080/api/hello/-%2d ;%3b +%2b /", + "4/127.0.0.1:9080/api/hello/-%2d ;%3b +%2b /%2f :%3a 0%30 >%3e {%7b", ], record["request_hierarchy"]) - - # Full URL - assert_equal("http://127.0.0.1:9080/api/hello/#{as_is}/?as_is=#{as_is}", record["request_url"]) + assert_equal("as_is=#{as_is.downcase}", record["request_query"]) # HTTP headers - assert_equal(as_is, record["request_content_type"]) + assert_equal(as_is.downcase, record["request_content_type"]) end def test_slashes_and_backslashes @@ -200,7 +192,7 @@ def test_slashes_and_backslashes assert_response_code(200, response) record = wait_for_log(response)[:hit_source] - assert_equal("/api/hello/extra//slash/some\\backslash/encoded%5Cbackslash/encoded%2Fslash", record["request_path"]) - assert_equal(url, record["request_url"]) + assert_equal("/api/hello/extra//slash/some\\backslash/encoded%5cbackslash/encoded%2fslash", record["request_path"]) + assert_equal("&forward_slash=/slash&encoded_forward_slash=%2f&back_slash=\\&encoded_back_slash=%5c", record["request_query"]) end end diff --git a/test/support/api_umbrella_test_helpers/logging.rb b/test/support/api_umbrella_test_helpers/logging.rb index 7c4053706..a4405cac5 100644 --- a/test/support/api_umbrella_test_helpers/logging.rb +++ b/test/support/api_umbrella_test_helpers/logging.rb @@ -71,13 +71,9 @@ def assert_logs_base_fields(record, user = nil) assert_operator(record["request_path"].length, :>=, 1) assert_equal("http", record["request_scheme"]) assert_kind_of(Numeric, record["request_size"]) - assert_kind_of(String, record["request_url"]) - assert_equal(true, record["request_url"].start_with?("http://127.0.0.1:9080/"), record["request_url"]) assert_kind_of(Numeric, record["response_size"]) assert_kind_of(Numeric, record["response_status"]) assert_kind_of(Numeric, record["response_time"]) - assert_kind_of(Numeric, record["internal_gatekeeper_time"]) - assert_kind_of(Numeric, record["proxy_overhead"]) if(user) assert_equal(user.api_key, record["api_key"]) @@ -87,12 +83,10 @@ def assert_logs_base_fields(record, user = nil) end end - def assert_logs_backend_fields(record) - assert_kind_of(Numeric, record["backend_response_time"]) - end - - def refute_logs_backend_fields(record) - refute(record["backend_response_time"]) + def assert_logged_url(expected_url, record) + logged_url = "#{record["request_scheme"]}://#{record["request_host"]}#{record["request_path"]}" + logged_url += "?#{record["request_query"]}" if(record["request_query"]) + assert_equal(expected_url, logged_url) end end end From f8792af9dcba08bb5617ef2b5c965dd115f13524 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Thu, 2 Feb 2017 23:46:56 -0700 Subject: [PATCH 005/367] Update to Elasticsearch v5. --- Gemfile | 2 +- Gemfile.lock | 12 +- build/cmake/elasticsearch.cmake | 2 +- build/cmake/versions.cmake | 4 +- config/default.yml | 9 +- config/elasticsearch_templates.json | 168 +++++++----------- config/test.yml | 13 +- .../init_elasticsearch_templates_data.lua | 16 ++ src/api-umbrella/web-app/Gemfile | 2 +- src/api-umbrella/web-app/Gemfile.lock | 12 +- .../app/controllers/admin/stats_controller.rb | 7 +- .../app/models/log_search/elastic_search.rb | 42 ++--- .../web-app/app/views/admin/stats/logs.rabl | 2 +- .../elasticsearch/log4j2.properties.mustache | 74 ++++++++ .../etc/elasticsearch/logging.yml.mustache | 47 ----- .../etc/perp/elasticsearch/rc.env.mustache | 3 +- test/proxy/logging/test_basics.rb | 6 +- 17 files changed, 215 insertions(+), 206 deletions(-) create mode 100644 templates/etc/elasticsearch/log4j2.properties.mustache delete mode 100644 templates/etc/elasticsearch/logging.yml.mustache diff --git a/Gemfile b/Gemfile index aee57ac1d..3386acb44 100644 --- a/Gemfile +++ b/Gemfile @@ -32,7 +32,7 @@ gem "oj", "~> 2.18.1" # Database libraries gem "mongoid", "~> 6.0.3" -gem "elasticsearch", "~> 2.0.1" +gem "elasticsearch", "~> 5.0.1" gem "elasticsearch-persistence", "~> 0.1.9" # Factories for test database data diff --git a/Gemfile.lock b/Gemfile.lock index 4e940230c..656a5fd1a 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -48,10 +48,10 @@ GEM database_cleaner (1.5.3) descendants_tracker (0.0.4) thread_safe (~> 0.3, >= 0.3.1) - elasticsearch (2.0.1) - elasticsearch-api (= 2.0.1) - elasticsearch-transport (= 2.0.1) - elasticsearch-api (2.0.1) + elasticsearch (5.0.1) + elasticsearch-api (= 5.0.1) + elasticsearch-transport (= 5.0.1) + elasticsearch-api (5.0.1) multi_json elasticsearch-model (0.1.9) activesupport (> 3) @@ -64,7 +64,7 @@ GEM elasticsearch-model (>= 0.1) hashie virtus - elasticsearch-transport (2.0.1) + elasticsearch-transport (5.0.1) faraday multi_json equalizer (0.0.11) @@ -161,7 +161,7 @@ DEPENDENCIES childprocess (~> 0.6.1) concurrent-ruby (~> 1.0.4) database_cleaner (~> 1.5.3) - elasticsearch (~> 2.0.1) + elasticsearch (~> 5.0.1) elasticsearch-persistence (~> 0.1.9) factory_girl (~> 4.8.0) faker (~> 1.7.2) diff --git a/build/cmake/elasticsearch.cmake b/build/cmake/elasticsearch.cmake index 5bb6bd6be..f72e56c47 100644 --- a/build/cmake/elasticsearch.cmake +++ b/build/cmake/elasticsearch.cmake @@ -3,7 +3,7 @@ find_package(Java 1.7 REQUIRED COMPONENTS Runtime) # Elasticsearch: Analytics database ExternalProject_Add( elasticsearch - URL https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz + URL https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz URL_HASH SHA1=${ELASTICSEARCH_HASH} CONFIGURE_COMMAND "" BUILD_COMMAND "" diff --git a/build/cmake/versions.cmake b/build/cmake/versions.cmake index c99cd1ae6..8c4aac636 100644 --- a/build/cmake/versions.cmake +++ b/build/cmake/versions.cmake @@ -3,8 +3,8 @@ set(API_UMBRELLA_STATIC_SITE_VERSION e54283244890e3d5ffeb7ec00dde2f5eaa778d28) set(API_UMBRELLA_STATIC_SITE_HASH 037b9317f6b08eb2073060c1beed16e2) set(BUNDLER_VERSION 1.14.3) set(BUNDLER_HASH 9d61c7d983b99eb0b16d64658b182e045bcdd74ef3b139e849777f780782dbfe) -set(ELASTICSEARCH_VERSION 2.4.4) -set(ELASTICSEARCH_HASH cdb5068d1baa07388e522c3bc04cca38aa8f3048) +set(ELASTICSEARCH_VERSION 5.2.0) +set(ELASTICSEARCH_HASH aa0e7411b27bc897727517b524e69c7da0694a0a) set(FLUME_VERSION 1.7.0) set(FLUME_HASH 12496e632a96d7ca823ab3c239a2a7d2) set(JSON_C_VERSION 0.12.1) diff --git a/config/default.yml b/config/default.yml index 192fa72a1..3d7704091 100644 --- a/config/default.yml +++ b/config/default.yml @@ -149,7 +149,7 @@ elasticsearch: - "http://127.0.0.1:14002" embedded_server_env: heap_size: 512m - api_version: 2 + api_version: 5 embedded_server_config: network: host: 127.0.0.1 @@ -167,13 +167,6 @@ elasticsearch: breaker: fielddata: limit: 60% - index: - translog: - # Sync the data to disk asynchronously on a fixed interval, rather than - # for every request. This significantly helps indexing throughput (at - # the risk of losing a few seconds of data if things crash). - durability: async - sync_interval: 10s analytics: adapter: elasticsearch timezone: UTC diff --git a/config/elasticsearch_templates.json b/config/elasticsearch_templates.json index b3bd37c8b..34360c000 100644 --- a/config/elasticsearch_templates.json +++ b/config/elasticsearch_templates.json @@ -8,13 +8,16 @@ "number_of_shards": 1 }, "analysis": { - "analyzer": { - "path_hierarchy_lowercase": { + "normalizer": { + "lowercase_normalizer": { "type": "custom", - "tokenizer": "path_hierarchy", - "filter": ["lowercase"] + "filter": ["lowercase", "asciifolding"] } } + }, + "translog": { + "durability": "async", + "sync_interval": "10s" } }, "mappings": { @@ -22,158 +25,127 @@ "_all": { "enabled": false }, - "dynamic": "strict", + "dynamic": false, "properties": { "api_key": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "gatekeeper_denied_code": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_accept": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_accept_encoding": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_at": { "type": "date", "doc_values": true }, "request_basic_auth_username": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_connection": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_content_type": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_hierarchy": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_host": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_ip": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_ip_city": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_ip_country": { - "type": "string", - "index": "not_analyzed", - "doc_values": true - }, - "request_ip_location": { - "type": "geo_point", - "lat_lon": true, - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_ip_region": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_method": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_origin": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_path": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_referer": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_scheme": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_size": { "type": "integer", "doc_values": true }, "request_query": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_user_agent": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_user_agent_family": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_user_agent_type": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "response_age": { "type": "integer", "doc_values": true }, "response_cache": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "response_content_encoding": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "response_content_length": { "type": "integer", "doc_values": true }, "response_content_type": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "response_server": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "response_size": { "type": "integer", @@ -188,24 +160,20 @@ "doc_values": true }, "response_transfer_encoding": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "user_id": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "user_email": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "user_registration_source": { - "type": "string", - "index": "not_analyzed", - "doc_values": true + "type": "keyword", + "normalizer": "lowercase_normalizer" } } } diff --git a/config/test.yml b/config/test.yml index b2b04f1dd..0e7392033 100644 --- a/config/test.yml +++ b/config/test.yml @@ -86,14 +86,11 @@ elasticsearch: transport: tcp: port: 13003 - discovery: - zen: - ping: - multicast: - enabled: false - index: - number_of_shards: 1 - number_of_replicas: 0 + # discovery: + # zen: + # ping: + # multicast: + # enabled: false unbound: port: 13100 control_port: 13101 diff --git a/src/api-umbrella/proxy/startup/init_elasticsearch_templates_data.lua b/src/api-umbrella/proxy/startup/init_elasticsearch_templates_data.lua index ee8d4704c..539f3e353 100644 --- a/src/api-umbrella/proxy/startup/init_elasticsearch_templates_data.lua +++ b/src/api-umbrella/proxy/startup/init_elasticsearch_templates_data.lua @@ -10,6 +10,22 @@ else local ok, data = pcall(cjson.decode, content) if ok then elasticsearch_templates = data + + -- In the test environment, disable replicas and reduce shards to speed + -- things up. + if config["app_env"] == "test" then + for _, template in ipairs(elasticsearch_templates) do + if not template["template"]["settings"] then + template["template"]["settings"] = {} + end + if not template["template"]["settings"]["index"] then + template["template"]["settings"]["index"] = {} + end + + template["template"]["settings"]["index"]["number_of_shards"] = 1 + template["template"]["settings"]["index"]["number_of_replicas"] = 0 + end + end else ngx.log(ngx.ERR, "failed to parse json for ", path) end diff --git a/src/api-umbrella/web-app/Gemfile b/src/api-umbrella/web-app/Gemfile index 910a6d22b..61b1ee1f6 100644 --- a/src/api-umbrella/web-app/Gemfile +++ b/src/api-umbrella/web-app/Gemfile @@ -51,7 +51,7 @@ gem "mongoid-store", :git => "https://github.com/ahoward/mongoid-store.git" gem "seed-fu", :git => "https://github.com/GUI/seed-fu.git", :branch => "mongoid" # Elasticsearch -gem "elasticsearch", "~> 2.0.1" +gem "elasticsearch", "~> 5.0.1" # OmniAuth-based authentication gem "devise", "~> 4.2.0" diff --git a/src/api-umbrella/web-app/Gemfile.lock b/src/api-umbrella/web-app/Gemfile.lock index 818be9892..a22ed98d9 100644 --- a/src/api-umbrella/web-app/Gemfile.lock +++ b/src/api-umbrella/web-app/Gemfile.lock @@ -133,12 +133,12 @@ GEM responders warden (~> 1.2.3) devise-i18n (1.1.1) - elasticsearch (2.0.1) - elasticsearch-api (= 2.0.1) - elasticsearch-transport (= 2.0.1) - elasticsearch-api (2.0.1) + elasticsearch (5.0.1) + elasticsearch-api (= 5.0.1) + elasticsearch-transport (= 5.0.1) + elasticsearch-api (5.0.1) multi_json - elasticsearch-transport (2.0.1) + elasticsearch-transport (5.0.1) faraday multi_json erubis (2.7.0) @@ -336,7 +336,7 @@ DEPENDENCIES delayed_job_mongoid (~> 2.2.0) devise (~> 4.2.0) devise-i18n (~> 1.1.1) - elasticsearch (~> 2.0.1) + elasticsearch (~> 5.0.1) font-awesome-rails (~> 4.7.0) http_accept_language (~> 2.1.0) i18n-js (>= 3.0.0.rc15) diff --git a/src/api-umbrella/web-app/app/controllers/admin/stats_controller.rb b/src/api-umbrella/web-app/app/controllers/admin/stats_controller.rb index 0a66257e3..fd20c28c8 100644 --- a/src/api-umbrella/web-app/app/controllers/admin/stats_controller.rb +++ b/src/api-umbrella/web-app/app/controllers/admin/stats_controller.rb @@ -89,7 +89,7 @@ def logs csv_time(row["request_at"]), row["request_method"], row["request_host"], - strip_api_key_from_url(row["request_url"]), + strip_api_key_from_url(row), row["user_email"], row["request_ip"], row["request_ip_country"], @@ -227,7 +227,10 @@ def map private - def strip_api_key_from_url(url) + def strip_api_key_from_url(record) + url = "#{record["request_scheme"]}://#{record["request_host"]}#{record["request_path"]}" + url += "?#{record["request_query"]}" if(record["request_query"]) + stripped = url.gsub(/\bapi_key=?[^&]*(&|$)/, "") stripped.gsub!(/&$/, "") stripped diff --git a/src/api-umbrella/web-app/app/models/log_search/elastic_search.rb b/src/api-umbrella/web-app/app/models/log_search/elastic_search.rb index d375231a0..01d0827e4 100644 --- a/src/api-umbrella/web-app/app/models/log_search/elastic_search.rb +++ b/src/api-umbrella/web-app/app/models/log_search/elastic_search.rb @@ -14,8 +14,8 @@ def initialize(options = {}) @query = { :query => { - :filtered => { - :query => { + :bool => { + :must => { :match_all => {}, }, :filter => { @@ -74,16 +74,18 @@ def permission_scope!(scopes) filter[:bool][:should] << parse_query_builder(rule) end - @query[:query][:filtered][:filter][:bool][:must] << filter + @query[:query][:bool][:filter][:bool][:must] << filter end def search_type!(search_type) - @query_options[:search_type] = search_type + if(search_type == "count") + @query_options[:size] = 0 + end end def search!(query_string) if(query_string.present?) - @query[:query][:filtered][:query] = { + @query[:query][:bool][:query] = { :query_string => { :query => query_string, }, @@ -98,7 +100,7 @@ def query!(query) filter = parse_query_builder(query) if(filter.present?) - @query[:query][:filtered][:filter][:bool][:must] << filter + @query[:query][:bool][:filter][:bool][:must] << filter end end @@ -190,7 +192,7 @@ def parse_query_builder(query) end if(rule["operator"] =~ /(^not|^is_null)/ && filter.present?) - filter = { :not => filter } + filter = { :bool => { :must_not => [filter] } } end filters << filter @@ -222,7 +224,7 @@ def sort!(sort) end def exclude_imported! - @query[:query][:filtered][:filter][:bool][:must_not] << { + @query[:query][:bool][:filter][:bool][:must_not] << { :exists => { :field => "imported", }, @@ -230,7 +232,7 @@ def exclude_imported! end def filter_by_date_range! - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :range => { :request_at => { :from => @start_time.iso8601, @@ -241,7 +243,7 @@ def filter_by_date_range! end def filter_by_request_path!(request_path) - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :term => { :request_path => request_path, }, @@ -249,7 +251,7 @@ def filter_by_request_path!(request_path) end def filter_by_api_key!(api_key) - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :term => { :api_key => api_key, }, @@ -257,7 +259,7 @@ def filter_by_api_key!(api_key) end def filter_by_user!(user_email) - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :term => { :user => { :user_email => user_email, @@ -267,14 +269,14 @@ def filter_by_user!(user_email) end def filter_by_user_ids!(user_ids) - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :terms => { :user_id => user_ids, }, } end - def aggregate_by_drilldown!(prefix, size = 0) + def aggregate_by_drilldown!(prefix, size = 1_000_000) @query[:aggregations][:drilldown] = { :terms => { :field => "request_hierarchy", @@ -285,7 +287,7 @@ def aggregate_by_drilldown!(prefix, size = 0) end def aggregate_by_drilldown_over_time!(prefix) - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :prefix => { :request_hierarchy => prefix, }, @@ -387,7 +389,7 @@ def aggregate_by_region_field!(field) end def aggregate_by_country_regions!(country) - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :term => { :request_ip_country => country }, } @@ -395,10 +397,10 @@ def aggregate_by_country_regions!(country) end def aggregate_by_us_state_cities!(country, state) - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :term => { :request_ip_country => country }, } - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :term => { :request_ip_region => state }, } @@ -406,7 +408,7 @@ def aggregate_by_us_state_cities!(country, state) end def aggregate_by_country_cities!(country) - @query[:query][:filtered][:filter][:bool][:must] << { + @query[:query][:bool][:filter][:bool][:must] << { :term => { :request_ip_country => country }, } @@ -458,7 +460,7 @@ def aggregate_by_user_stats!(options = {}) @query[:aggregations][:user_stats] = { :terms => { :field => :user_id, - :size => 0, + :size => 1_000_000, }.merge(options), :aggregations => { :last_request_at => { diff --git a/src/api-umbrella/web-app/app/views/admin/stats/logs.rabl b/src/api-umbrella/web-app/app/views/admin/stats/logs.rabl index d69321859..ec31ba212 100644 --- a/src/api-umbrella/web-app/app/views/admin/stats/logs.rabl +++ b/src/api-umbrella/web-app/app/views/admin/stats/logs.rabl @@ -6,7 +6,7 @@ node(:recordsFiltered) { @result.total } node :data do @result.documents.map do |log| filtered = log["_source"].except("api_key", "_type", "_score", "_index").merge({ - "request_url" => strip_api_key_from_url(log["_source"]["request_url"]).gsub(%r{^.*://[^/]*}, "") + "request_url" => strip_api_key_from_url(log["_source"]).gsub(%r{^.*://[^/]*}, "") }) if(filtered["request_query"] && filtered["request_query"]["api_key"]) diff --git a/templates/etc/elasticsearch/log4j2.properties.mustache b/templates/etc/elasticsearch/log4j2.properties.mustache new file mode 100644 index 000000000..3702afff9 --- /dev/null +++ b/templates/etc/elasticsearch/log4j2.properties.mustache @@ -0,0 +1,74 @@ +status = error + +# log action execution errors for easier debugging +logger.action.name = org.elasticsearch.action +logger.action.level = debug + +appender.console.type = Console +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n + +appender.rolling.type = RollingFile +appender.rolling.name = rolling +appender.rolling.fileName = ${sys:es.logs}.log +appender.rolling.layout.type = PatternLayout +appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n +appender.rolling.filePattern = ${sys:es.logs}-%d{yyyy-MM-dd}.log +appender.rolling.policies.type = Policies +appender.rolling.policies.time.type = TimeBasedTriggeringPolicy +appender.rolling.policies.time.interval = 1 +appender.rolling.policies.time.modulate = true + +rootLogger.level = info +rootLogger.appenderRef.console.ref = console +rootLogger.appenderRef.rolling.ref = rolling + +appender.deprecation_rolling.type = RollingFile +appender.deprecation_rolling.name = deprecation_rolling +appender.deprecation_rolling.fileName = ${sys:es.logs}_deprecation.log +appender.deprecation_rolling.layout.type = PatternLayout +appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n +appender.deprecation_rolling.filePattern = ${sys:es.logs}_deprecation-%i.log.gz +appender.deprecation_rolling.policies.type = Policies +appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy +appender.deprecation_rolling.policies.size.size = 1GB +appender.deprecation_rolling.strategy.type = DefaultRolloverStrategy +appender.deprecation_rolling.strategy.max = 4 + +logger.deprecation.name = org.elasticsearch.deprecation +logger.deprecation.level = warn +logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling +logger.deprecation.additivity = false + +appender.index_search_slowlog_rolling.type = RollingFile +appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling +appender.index_search_slowlog_rolling.fileName = ${sys:es.logs}_index_search_slowlog.log +appender.index_search_slowlog_rolling.layout.type = PatternLayout +appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs}_index_search_slowlog-%d{yyyy-MM-dd}.log +appender.index_search_slowlog_rolling.policies.type = Policies +appender.index_search_slowlog_rolling.policies.time.type = TimeBasedTriggeringPolicy +appender.index_search_slowlog_rolling.policies.time.interval = 1 +appender.index_search_slowlog_rolling.policies.time.modulate = true + +logger.index_search_slowlog_rolling.name = index.search.slowlog +logger.index_search_slowlog_rolling.level = trace +logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling +logger.index_search_slowlog_rolling.additivity = false + +appender.index_indexing_slowlog_rolling.type = RollingFile +appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling +appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs}_index_indexing_slowlog.log +appender.index_indexing_slowlog_rolling.layout.type = PatternLayout +appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs}_index_indexing_slowlog-%d{yyyy-MM-dd}.log +appender.index_indexing_slowlog_rolling.policies.type = Policies +appender.index_indexing_slowlog_rolling.policies.time.type = TimeBasedTriggeringPolicy +appender.index_indexing_slowlog_rolling.policies.time.interval = 1 +appender.index_indexing_slowlog_rolling.policies.time.modulate = true + +logger.index_indexing_slowlog.name = index.indexing.slowlog.index +logger.index_indexing_slowlog.level = trace +logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling +logger.index_indexing_slowlog.additivity = false diff --git a/templates/etc/elasticsearch/logging.yml.mustache b/templates/etc/elasticsearch/logging.yml.mustache deleted file mode 100644 index 5b84af937..000000000 --- a/templates/etc/elasticsearch/logging.yml.mustache +++ /dev/null @@ -1,47 +0,0 @@ -# you can override this using by setting a system property, for example -Des.logger.level=DEBUG -es.logger.level: INFO -rootLogger: ${es.logger.level}, console -logger: - # log action execution errors for easier debugging - action: DEBUG - # reduce the logging for aws, too much is logged under the default INFO - com.amazonaws: WARN - org.apache.http: INFO - - # gateway - #gateway: DEBUG - #index.gateway: DEBUG - - # peer shard recovery - #indices.recovery: DEBUG - - # discovery - #discovery: TRACE - - index.search.slowlog: TRACE, index_search_slow_log_file - index.indexing.slowlog: TRACE, index_indexing_slow_log_file - -additivity: - index.search.slowlog: false - index.indexing.slowlog: false - -appender: - console: - type: console - layout: - type: consolePattern - conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" - - index_search_slow_log_file: - type: file - file: ${path.logs}/elasticsearch/index-search-slowlog.log - layout: - type: pattern - conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" - - index_indexing_slow_log_file: - type: file - file: ${path.logs}/elasticsearch/index-indexing-slowlog.log - layout: - type: pattern - conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" diff --git a/templates/etc/perp/elasticsearch/rc.env.mustache b/templates/etc/perp/elasticsearch/rc.env.mustache index 2f356c015..523b536e6 100644 --- a/templates/etc/perp/elasticsearch/rc.env.mustache +++ b/templates/etc/perp/elasticsearch/rc.env.mustache @@ -1,2 +1 @@ -ES_JAVA_OPTS=-Des.default.path.conf={{etc_dir}}/elasticsearch -Dmapper.allow_dots_in_name=true -XX:-HeapDumpOnOutOfMemoryError {{elasticsearch.embedded_server_env.java_opts}} -ES_HEAP_SIZE={{elasticsearch.embedded_server_env.heap_size}} +ES_JAVA_OPTS=-Xms{{elasticsearch.embedded_server_env.heap_size}} -Xmx{{elasticsearch.embedded_server_env.heap_size}} -Des.path.conf={{etc_dir}}/elasticsearch -Des.path.logs={{log_dir}}/elasticsearch -XX:-HeapDumpOnOutOfMemoryError {{elasticsearch.embedded_server_env.java_opts}} diff --git a/test/proxy/logging/test_basics.rb b/test/proxy/logging/test_basics.rb index ac829a7dd..0804a622c 100644 --- a/test/proxy/logging/test_basics.rb +++ b/test/proxy/logging/test_basics.rb @@ -226,11 +226,15 @@ def test_logs_request_at_as_date "type" => "date", "format" => "dateOptionalTime", }, property) - elsif($config["elasticsearch"]["api_version"] >= 2) + elsif($config["elasticsearch"]["api_version"] >= 2 && $config["elasticsearch"]["api_version"] < 5) assert_equal({ "type" => "date", "format" => "strict_date_optional_time||epoch_millis", }, property) + elsif($config["elasticsearch"]["api_version"] >= 5) + assert_equal({ + "type" => "date", + }, property) else flunk("Unknown elasticsearch version: #{$config["elasticsearch"]["api_version"].inspect}") end From f6bd3a30bfa64a006758271d5ecc7bff4a19a09c Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Sat, 25 Feb 2017 16:09:36 -0700 Subject: [PATCH 006/367] Elasticsearch v5 mapping fixes. --- build/cmake/versions.cmake | 4 ++-- config/elasticsearch_templates.json | 35 ++++++++++++----------------- test/proxy/logging/test_basics.rb | 23 ++++++++++++++++--- 3 files changed, 36 insertions(+), 26 deletions(-) diff --git a/build/cmake/versions.cmake b/build/cmake/versions.cmake index ea04a7441..d723c3940 100644 --- a/build/cmake/versions.cmake +++ b/build/cmake/versions.cmake @@ -3,8 +3,8 @@ set(API_UMBRELLA_STATIC_SITE_VERSION e54283244890e3d5ffeb7ec00dde2f5eaa778d28) set(API_UMBRELLA_STATIC_SITE_HASH 037b9317f6b08eb2073060c1beed16e2) set(BUNDLER_VERSION 1.14.4) set(BUNDLER_HASH dbcd7c05de13ff4a9ded7353fe761767e5777fe9c49d2f1420f50672cfaa4ec1) -set(ELASTICSEARCH_VERSION 5.2.0) -set(ELASTICSEARCH_HASH aa0e7411b27bc897727517b524e69c7da0694a0a) +set(ELASTICSEARCH_VERSION 5.2.1) +set(ELASTICSEARCH_HASH 93bae92937075fed18ac4a4dd57798aca792af03) set(FLUME_VERSION 1.7.0) set(FLUME_HASH 12496e632a96d7ca823ab3c239a2a7d2) set(JSON_C_VERSION 0.12.1) diff --git a/config/elasticsearch_templates.json b/config/elasticsearch_templates.json index 0256c1fd6..9bc892e8b 100644 --- a/config/elasticsearch_templates.json +++ b/config/elasticsearch_templates.json @@ -28,12 +28,12 @@ "dynamic": false, "properties": { "api_backend_id": { - "type": "string", - "index": "not_analyzed" + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "api_backend_url_match_id": { - "type": "string", - "index": "not_analyzed" + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "api_key": { "type": "keyword", @@ -52,8 +52,7 @@ "normalizer": "lowercase_normalizer" }, "request_at": { - "type": "date", - "doc_values": true + "type": "date" }, "request_basic_auth_username": { "type": "keyword", @@ -112,16 +111,15 @@ "normalizer": "lowercase_normalizer" }, "request_size": { - "type": "integer", - "doc_values": true + "type": "integer" }, - "request_query": { + "request_url": { "type": "keyword", "normalizer": "lowercase_normalizer" }, "request_url_query": { - "type": "string", - "analyzer": "lowercase_normalizer" + "type": "keyword", + "normalizer": "lowercase_normalizer" }, "request_user_agent": { "type": "keyword", @@ -136,8 +134,7 @@ "normalizer": "lowercase_normalizer" }, "response_age": { - "type": "integer", - "doc_values": true + "type": "integer" }, "response_cache": { "type": "keyword", @@ -148,8 +145,7 @@ "normalizer": "lowercase_normalizer" }, "response_content_length": { - "type": "integer", - "doc_values": true + "type": "integer" }, "response_content_type": { "type": "keyword", @@ -160,16 +156,13 @@ "normalizer": "lowercase_normalizer" }, "response_size": { - "type": "integer", - "doc_values": true + "type": "integer" }, "response_status": { - "type": "short", - "doc_values": true + "type": "short" }, "response_time": { - "type": "integer", - "doc_values": true + "type": "integer" }, "response_transfer_encoding": { "type": "keyword", diff --git a/test/proxy/logging/test_basics.rb b/test/proxy/logging/test_basics.rb index 71403b28c..616a4edff 100644 --- a/test/proxy/logging/test_basics.rb +++ b/test/proxy/logging/test_basics.rb @@ -33,9 +33,11 @@ def test_logs_expected_fields_for_non_chunked_non_gzip })) assert_response_code(200, response) - record = wait_for_log(response)[:hit_source] + result = wait_for_log(response) + record = result[:hit_source] + hit = result[:hit] - assert_equal([ + expected_fields = [ "api_backend_id", "api_backend_url_match_id", "api_key", @@ -70,7 +72,22 @@ def test_logs_expected_fields_for_non_chunked_non_gzip "user_email", "user_id", "user_registration_source", - ].sort, record.keys.sort) + ] + assert_equal(expected_fields.sort, record.keys.sort) + + mapping = LogItem.gateway.client.indices.get_mapping({ + :index => hit["_index"], + :type => hit["_type"], + }) + expected_mapping_fields = expected_fields + [ + "gatekeeper_denied_code", + "request_ip_city", + "request_ip_country", + "request_ip_region", + "response_content_encoding", + "response_transfer_encoding", + ] + assert_equal(expected_mapping_fields.sort, mapping[hit["_index"]]["mappings"][hit["_type"]]["properties"].keys.sort) assert_kind_of(String, record["api_backend_id"]) assert_kind_of(String, record["api_backend_url_match_id"]) From 9cd836744c8c2cdeab5135c0875dd165d03b6e6d Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Sun, 26 Feb 2017 19:54:16 -0700 Subject: [PATCH 007/367] An initial, messy elasticsearch data migration script. --- bin/api-umbrella-env | 2 + config/elasticsearch_templates.json | 29 +- scripts/elasticsearch-v2-migrate/migrate | 9 + scripts/elasticsearch-v2-migrate/migrate.conf | 2 + scripts/elasticsearch-v2-migrate/migrate.lua | 406 ++++++++++++++++++ .../proxy/jobs/elasticsearch_setup.lua | 12 +- src/api-umbrella/proxy/log_utils.lua | 39 +- src/api-umbrella/version.txt | 2 +- templates/etc/rsyslog.conf.mustache | 51 ++- test/proxy/logging/test_basics.rb | 14 +- .../api_umbrella_test_helpers/logging.rb | 3 - 11 files changed, 522 insertions(+), 47 deletions(-) create mode 100755 scripts/elasticsearch-v2-migrate/migrate create mode 100644 scripts/elasticsearch-v2-migrate/migrate.conf create mode 100644 scripts/elasticsearch-v2-migrate/migrate.lua diff --git a/bin/api-umbrella-env b/bin/api-umbrella-env index facfe4802..97922fbc6 100644 --- a/bin/api-umbrella-env +++ b/bin/api-umbrella-env @@ -47,6 +47,8 @@ if [ -z "${API_UMBRELLA_EMBEDDED_ROOT:-}" ]; then fi fi +#export API_UMBRELLA_RUNTIME_CONFIG="$API_UMBRELLA_ROOT/var/run/runtime_config.yml" + # Set the path for binaries. export PATH="$API_UMBRELLA_EMBEDDED_ROOT/sbin:$API_UMBRELLA_EMBEDDED_ROOT/bin:$PATH" diff --git a/config/elasticsearch_templates.json b/config/elasticsearch_templates.json index 9bc892e8b..c2ace4dc6 100644 --- a/config/elasticsearch_templates.json +++ b/config/elasticsearch_templates.json @@ -5,7 +5,8 @@ "template": "api-umbrella-logs-v2-*", "settings": { "index": { - "number_of_shards": 1 + "number_of_shards": 1, + "codec": "best_compression" }, "analysis": { "normalizer": { @@ -113,7 +114,31 @@ "request_size": { "type": "integer" }, - "request_url": { + "request_url_hierarchy_level0": { + "type": "keyword", + "normalizer": "lowercase_normalizer" + }, + "request_url_hierarchy_level1": { + "type": "keyword", + "normalizer": "lowercase_normalizer" + }, + "request_url_hierarchy_level2": { + "type": "keyword", + "normalizer": "lowercase_normalizer" + }, + "request_url_hierarchy_level3": { + "type": "keyword", + "normalizer": "lowercase_normalizer" + }, + "request_url_hierarchy_level4": { + "type": "keyword", + "normalizer": "lowercase_normalizer" + }, + "request_url_hierarchy_level5": { + "type": "keyword", + "normalizer": "lowercase_normalizer" + }, + "request_url_hierarchy_level6": { "type": "keyword", "normalizer": "lowercase_normalizer" }, diff --git a/scripts/elasticsearch-v2-migrate/migrate b/scripts/elasticsearch-v2-migrate/migrate new file mode 100755 index 000000000..d05f853a5 --- /dev/null +++ b/scripts/elasticsearch-v2-migrate/migrate @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e -u + +dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +export API_UMBRELLA_RUNTIME_CONFIG="/opt/api-umbrella/var/run/runtime_config.yml" + +api-umbrella-exec resty --http-include "$dir/migrate.conf" "$dir/migrate.lua" "$@" diff --git a/scripts/elasticsearch-v2-migrate/migrate.conf b/scripts/elasticsearch-v2-migrate/migrate.conf new file mode 100644 index 000000000..b49f5c2fe --- /dev/null +++ b/scripts/elasticsearch-v2-migrate/migrate.conf @@ -0,0 +1,2 @@ +error_log stderr error; +lua_shared_dict active_config 600k; diff --git a/scripts/elasticsearch-v2-migrate/migrate.lua b/scripts/elasticsearch-v2-migrate/migrate.lua new file mode 100644 index 000000000..54f0c6760 --- /dev/null +++ b/scripts/elasticsearch-v2-migrate/migrate.lua @@ -0,0 +1,406 @@ +config = require "api-umbrella.proxy.models.file_config" +require "api-umbrella.proxy.startup.init_user_agent_parser_data" +local elasticsearch_setup = require "api-umbrella.proxy.jobs.elasticsearch_setup" + +elasticsearch_setup.wait_for_elasticsearch() +elasticsearch_setup.create_templates() + +local log_utils = require "api-umbrella.proxy.log_utils" +local escape_uri_non_ascii = require "api-umbrella.utils.escape_uri_non_ascii" +local nillify_json_nulls = require "api-umbrella.utils.nillify_json_nulls" +local inspect = require "inspect" +local cjson = require "cjson" +local argparse = require "argparse" +local luatz = require "luatz" +local http = require "resty.http" +local tablex = require "pl.tablex" +local pretty = require "pl.pretty" +local Date = require "pl.Date" +local plutils = require "pl.utils" + +local cjson_encode = cjson.encode +local keys = tablex.keys +local split = plutils.split + +local bulk_size = 1000 +local args = {} + +local function table_difference(t1, t2) + local res = {} + for k,v in pairs(t1) do + if not tablex.deepcompare(t1[k], t2[k]) then res[k] = v end + end + return res +end + +local function parse_args() + local parser = argparse("api-umbrella", "Open source API management") + + parser:option("--input", "Input connection."):count(1) + parser:option("--output", "Output connection."):count(1) + + local args = parser:parse() + + local input_uri, input_err = http:parse_uri(args["input"], false) + if not input_uri then + print(input_err) + os.exit(1) + end + + local output_uri, output_err = http:parse_uri(args["output"], false) + if not output_uri then + print(output_err) + os.exit(1) + end + + local _, input_host, input_port = unpack(input_uri) + args["input_host"] = input_host + args["input_port"] = input_port + + local _, output_host, output_port = unpack(output_uri) + args["output_host"] = output_host + args["output_port"] = output_port + + --print(inspect(args)) + return args +end + +local function elasticsearch_query(host, port, options) + local httpc = http.new() + httpc:set_timeout(120000) + httpc:connect(host, port) + local res, err = httpc:request(options) + if err then + ngx.log(ngx.ERR, "elasticsearch query failed: " .. err) + return nil, body_err + end + + local body, body_err = res:read_body() + if not body then + ngx.log(ngx.ERR, body_err) + return nil, body_err + end + + local keepalive_ok, keepalive_err = httpc:set_keepalive() + if not keepalive_ok then + ngx.log(ngx.ERR, keepalive_err) + end + + local response = cjson.decode(body) + return response +end + +local function v1_first_index_time(args) + local res, err = elasticsearch_query(args["input_host"], args["input_port"], { + method = "GET", + path = "/api-umbrella-logs-v1-*/_aliases", + }) + + --print(pretty.write(res)) + local months = {} + for index, _ in pairs(res) do + local m = ngx.re.match(index, "-(\\d{4})-(\\d{2})") + if m then + local date = luatz.timetable.new(tonumber(m[1]), tonumber(m[2]), 1, 0, 0, 0) + table.insert(months, date) + end + end + table.sort(months) + --print(pretty.write(months)) + return months[1] +end + +local bulk_commands = {} +local last_bulk_commands_timestamp = nil +local function flush_bulk_commands() + if #bulk_commands == 0 then + return + end + + print("\n" .. os.date("!%Y-%m-%dT%TZ") .. " - Log data from " .. os.date("!%Y-%m-%dT%TZ", last_bulk_commands_timestamp / 1000)) + + local httpc = http.new() + httpc:set_timeout(120000) + httpc:connect(config["elasticsearch"]["_first_server"]["host"], config["elasticsearch"]["_first_server"]["port"]) + + local res, err = elasticsearch_query(args["output_host"], args["output_port"], { + method = "POST", + path = "/_bulk", + headers = { + ["Content-Type"] = "application/json", + }, + body = table.concat(bulk_commands, "\n") .. "\n", + }) + + if type(res["items"]) ~= "table" then + ngx.log(ngx.ERR, "unexpected error: " .. (body or nil)) + return false + end + + local skipped_count = 0 + local created_count = 0 + local error_count = 0 + local created_ids = {} + --print(inspect(res)) + for _, item in ipairs(res["items"]) do + if item["create"]["status"] == 409 then + io.write(string.char(27) .. "[30m" .. string.char(27) .. "[2m-" .. string.char(27) .. "[0m") + skipped_count = skipped_count + 1 + elseif item["create"]["status"] == 201 then + io.write(string.char(27) .. "[32m" .. string.char(27) .. "[1m✔" .. string.char(27) .. "[0m") + created_count = created_count + 1 + table.insert(created_ids, item["create"]["_id"]) + else + io.write(string.char(27) .. "[31m" .. string.char(27) .. "[1m✖" .. string.char(27) .. "[0m") + error_count = error_count + 1 + end + end + print("") + if created_count > 0 then + print("Created: " .. created_count) + -- print("Created IDs: " .. table.concat(created_ids, ", ")) + end + if skipped_count > 0 then + print("Skipped (already exists): " .. skipped_count) + end + if error_count > 0 then + print("Errors: " .. error_count) + end + + bulk_commands = {} + last_bulk_commands_timestamp = nil +end + +local function process_hit(hit, output_index) + nillify_json_nulls(hit) + + --print(pretty.write(hit)) + source = hit["_source"] + local data = { + api_backend_id = source["api_backend_id"], + api_backend_url_match_id = source["api_backend_url_match_id"], + legacy_api_key = source["api_key"], + denied_reason = source["gatekeeper_denied_code"], + request_accept = source["request_accept"], + request_accept_encoding = source["request_accept_encoding"], + timestamp_utc = source["request_at"], + request_basic_auth_username = source["request_basic_auth_username"], + request_connection = source["request_connection"], + request_content_type = source["request_content_type"], + request_url_hierarchy = source["request_hierarchy"], + request_url_hierarchy_level0 = source["request_url_hierarchy_level0"], + request_url_hierarchy_level1 = source["request_url_hierarchy_level1"], + request_url_hierarchy_level2 = source["request_url_hierarchy_level2"], + request_url_hierarchy_level3 = source["request_url_hierarchy_level3"], + request_url_hierarchy_level4 = source["request_url_hierarchy_level4"], + request_url_hierarchy_level5 = source["request_url_hierarchy_level5"], + request_url_hierarchy_level6 = source["request_url_hierarchy_level6"], + request_url_host = source["request_host"], + request_ip = source["request_ip"], + request_ip_city = source["request_ip_city"], + request_ip_country = source["request_ip_country"], + request_ip_region = source["request_ip_region"], + request_method = source["request_method"], + request_origin = source["request_origin"], + request_url_path = source["request_path"], + request_referer = source["request_referer"], + request_url_scheme = source["request_scheme"], + request_size = source["request_size"], + request_url_query = source["request_url_query"], + request_user_agent = source["request_user_agent"], + request_user_agent_family = source["request_user_agent_family"], + request_user_agent_type = source["request_user_agent_type"], + response_age = source["response_age"], + response_cache = source["response_cache"], + response_content_encoding = source["response_content_encoding"], + response_content_length = source["response_content_length"], + response_content_type = source["response_content_type"], + response_server = source["response_server"], + response_size = source["response_size"], + response_status = source["response_status"], + timer_response = source["response_time"], + response_transfer_encoding = source["response_transfer_encoding"], + legacy_user_email = source["user_email"], + user_id = source["user_id"], + legacy_user_registration_source = source["user_registration_source"], + } + + if type(data["timestamp_utc"]) == "string" then + data["timestamp_utc"] = luatz.parse.rfc_3339(data["timestamp_utc"]):timestamp() * 1000 + end + + log_utils.set_url_hierarchy(data) + + if not data["request_url_query"] and source["request_url"] then + local parts = split(source["request_url"], "?", true, 2) + if parts[2] then + data["request_url_query"] = escape_uri_non_ascii(parts[2]) + end + end + + local new_hit = log_utils.normalized_data(data) + local new_source = { + api_backend_id = new_hit["api_backend_id"], + api_backend_url_match_id = new_hit["api_backend_url_match_id"], + api_key = new_hit["legacy_api_key"], + gatekeeper_denied_code = new_hit["denied_reason"], + request_accept = new_hit["request_accept"], + request_accept_encoding = new_hit["request_accept_encoding"], + request_at = new_hit["timestamp_utc"], + request_basic_auth_username = new_hit["request_basic_auth_username"], + request_connection = new_hit["request_connection"], + request_content_type = new_hit["request_content_type"], + request_hierarchy = new_hit["request_url_hierarchy"], + request_url_hierarchy_level0 = new_hit["request_url_hierarchy_level0"], + request_url_hierarchy_level1 = new_hit["request_url_hierarchy_level1"], + request_url_hierarchy_level2 = new_hit["request_url_hierarchy_level2"], + request_url_hierarchy_level3 = new_hit["request_url_hierarchy_level3"], + request_url_hierarchy_level4 = new_hit["request_url_hierarchy_level4"], + request_url_hierarchy_level5 = new_hit["request_url_hierarchy_level5"], + request_url_hierarchy_level6 = new_hit["request_url_hierarchy_level6"], + request_host = new_hit["request_url_host"], + request_ip = new_hit["request_ip"], + request_ip_city = new_hit["request_ip_city"], + request_ip_country = new_hit["request_ip_country"], + request_ip_region = new_hit["request_ip_region"], + request_method = new_hit["request_method"], + request_origin = new_hit["request_origin"], + request_path = new_hit["request_url_path"], + request_referer = new_hit["request_referer"], + request_scheme = new_hit["request_url_scheme"], + request_size = new_hit["request_size"], + request_url_query = new_hit["request_url_query"], + request_user_agent = new_hit["request_user_agent"], + request_user_agent_family = new_hit["request_user_agent_family"], + request_user_agent_type = new_hit["request_user_agent_type"], + response_age = new_hit["response_age"], + response_cache = new_hit["response_cache"], + response_content_encoding = new_hit["response_content_encoding"], + response_content_length = new_hit["response_content_length"], + response_content_type = new_hit["response_content_type"], + response_server = new_hit["response_server"], + response_size = new_hit["response_size"], + response_status = new_hit["response_status"], + response_time = new_hit["timer_response"], + response_transfer_encoding = new_hit["response_transfer_encoding"], + user_email = new_hit["legacy_user_email"], + user_id = new_hit["user_id"], + user_registration_source = new_hit["legacy_user_registration_source"], + imported = source["imported"], + } + + --print(inspect(table_difference(source, new_source))) + --print(inspect(table_difference(new_source, source))) + + table.insert(bulk_commands, cjson_encode({ + create = { + _index = output_index, + _type = "log", + _id = hit["_id"], + } + })) + table.insert(bulk_commands, cjson_encode(new_source)) + + if not last_bulk_commands_timestamp then + last_bulk_commands_timestamp = data["timestamp_utc"] + end + + if #bulk_commands >= bulk_size * 2 then + flush_bulk_commands() + end +end + +local function search_day(date_start, date_end) + local input_index = string.format("api-umbrella-logs-v1-%04d-%02d", date_start["year"], date_start["month"]) + local output_index = string.format("api-umbrella-logs-v2-%04d-%02d-%02d", date_start["year"], date_start["month"], date_start["day"]) + local scroll_id + while true do + local res, err + if scroll_id then + res, err = elasticsearch_query(args["input_host"], args["input_port"], { + method = "GET", + path = "/_search/scroll", + query = { + scroll = "5m", + scroll_id = scroll_id, + }, + }) + else + res, err = elasticsearch_query(args["input_host"], args["input_port"], { + method = "GET", + path = "/" .. input_index .. "/_search", + query = { + scroll = "5m", + scroll_id = scroll_id, + }, + headers = { + ["Content-Type"] = "application/json", + }, + body = cjson_encode({ + sort = "request_at", + size = bulk_size, + query = { + range = { + request_at = { + gte = date_start:timestamp() * 1000, + lt = date_end:timestamp() * 1000, + }, + }, + }, + }) + }) + end + if err then + ngx.log(ngx.ERR, "elasticsearch query failed: " .. err) + return false + end + + scroll_id = res["_scroll_id"] + --print "." + -- print(inspect(response)) + if not res["hits"] or not res["hits"]["hits"] or #res["hits"]["hits"] == 0 then + break + end + + for _, hit in ipairs(res["hits"]["hits"]) do + process_hit(hit, output_index) + end + end + + flush_bulk_commands() + + elasticsearch_query(args["output_host"], args["output_port"], { + method = "POST", + path = "/" .. output_index .. "/_forcemerge", + query = { + max_num_segments = "1", + }, + }) +end + +local function search() + --local date = v1_first_index_time(args) + --local end_date = luatz.now() + local date = luatz.timetable.new(2010, 8, 1, 0, 0, 0) + local end_date = luatz.timetable.new(2010, 9, 1, 0, 0, 0) + local date = luatz.timetable.new(2016, 12, 7, 0, 0, 0) + local end_date = luatz.timetable.new(2016, 12, 31, 0, 0, 0) + while date:timestamp() <= end_date:timestamp() do + next_day = date:clone() + next_day["day"] = next_day["day"] + 1 + next_day:normalise() + + search_day(date, next_day) + + --print(date:rfc_3339()) + --print(date:timestamp()) + date = next_day + end +end + +local function run() + args = parse_args() + search() +end + +run() diff --git a/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua b/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua index 5897f4831..565aeb5f9 100644 --- a/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua +++ b/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua @@ -8,7 +8,7 @@ local delay = 3600 -- in seconds local elasticsearch_host = config["elasticsearch"]["hosts"][1] -local function wait_for_elasticsearch() +function _M.wait_for_elasticsearch() local httpc = http.new() local elasticsearch_alive = false local wait_time = 0 @@ -38,7 +38,7 @@ local function wait_for_elasticsearch() end end -local function create_templates() +function _M.create_templates() -- Template creation only needs to be run once on startup or reload. local created = ngx.shared.active_config:get("elasticsearch_templates_created") if created then return end @@ -59,7 +59,7 @@ local function create_templates() ngx.shared.active_config:set("elasticsearch_templates_created", true) end -local function create_aliases() +function _M.create_aliases() local today = os.date("!%Y-%m", ngx.time()) local tomorrow = os.date("!%Y-%m", ngx.time() + 86400) @@ -116,10 +116,10 @@ local function create_aliases() end local function setup() - local _, err = wait_for_elasticsearch() + local _, err = _M.wait_for_elasticsearch() if not err then - create_templates() - create_aliases() + _M.create_templates() + _M.create_aliases() else ngx.log(ngx.ERR, "timed out waiting for eleasticsearch before setup, rerunning...") ngx.sleep(5) diff --git a/src/api-umbrella/proxy/log_utils.lua b/src/api-umbrella/proxy/log_utils.lua index b2dbe0c5d..f3190124a 100644 --- a/src/api-umbrella/proxy/log_utils.lua +++ b/src/api-umbrella/proxy/log_utils.lua @@ -65,9 +65,9 @@ end -- -- Will get stored like this for SQL storage: -- --- request_url_path_level1 = /api/ --- request_url_path_level2 = /api/foo/ --- request_url_path_level3 = /api/foo/bar.json +-- request_url_hierarchy_level1 = /api/ +-- request_url_hierarchy_level2 = /api/foo/ +-- request_url_hierarchy_level3 = /api/foo/bar.json -- -- And gets indexed as this array for ElasticSearch storage: -- @@ -84,7 +84,7 @@ end -- See: -- http://wiki.apache.org/solr/HierarchicalFaceting -- http://www.springyweb.com/2012/01/hierarchical-faceting-with-elastic.html -local function set_url_hierarchy(data) +function _M.set_url_hierarchy(data) -- Remote duplicate slashes (eg foo//bar becomes foo/bar). local cleaned_path = ngx.re.gsub(data["request_url_path"], "//+", "/", "jo") @@ -103,15 +103,16 @@ local function set_url_hierarchy(data) -- Setup top-level host hierarchy for ElasticSearch storage. data["request_url_hierarchy"] = {} - local host_token = "0/" .. data["request_url_host"] + local host_level = data["request_url_host"] if #path_parts > 0 then - host_token = host_token .. "/" + host_level = host_level .. "/" end - table.insert(data["request_url_hierarchy"], host_token) + data["request_url_hierarchy_level0"] = host_level + table.insert(data["request_url_hierarchy"], "0/" .. host_level) - local path_level = "/" + local path_tree = "/" for index, _ in ipairs(path_parts) do - path_level = path_level .. path_parts[index] + local path_level = path_parts[index] -- Add a trailing slash to all parent paths, but not the last path. This -- is done for two reasons: @@ -128,10 +129,11 @@ local function set_url_hierarchy(data) end -- Store in the request_url_path_level(1-6) fields for SQL storage. - data["request_url_path_level" .. index] = path_level + data["request_url_hierarchy_level" .. index] = path_level -- Store as an array for ElasticSearch storage. - local path_token = index .. "/" .. data["request_url_host"] .. path_level + path_tree = path_tree .. path_level + local path_token = index .. "/" .. data["request_url_host"] .. path_tree table.insert(data["request_url_hierarchy"], path_token) end end @@ -302,7 +304,7 @@ function _M.set_computed_url_fields(data, ngx_ctx) data["legacy_request_url"] = data["legacy_request_url"] .. "?" .. data["request_url_query"] end - set_url_hierarchy(data) + _M.set_url_hierarchy(data) end function _M.set_computed_user_agent_fields(data) @@ -339,12 +341,13 @@ function _M.normalized_data(data) request_url_hierarchy = data["request_url_hierarchy"], request_url_host = lowercase_truncate(data["request_url_host"], 200), request_url_path = truncate(data["request_url_path"], 4000), - request_url_path_level1 = truncate(data["request_url_path_level1"], 40), - request_url_path_level2 = truncate(data["request_url_path_level2"], 40), - request_url_path_level3 = truncate(data["request_url_path_level3"], 40), - request_url_path_level4 = truncate(data["request_url_path_level4"], 40), - request_url_path_level5 = truncate(data["request_url_path_level5"], 40), - request_url_path_level6 = truncate(data["request_url_path_level6"], 40), + request_url_hierarchy_level0 = truncate(data["request_url_hierarchy_level0"], 200), + request_url_hierarchy_level1 = truncate(data["request_url_hierarchy_level1"], 200), + request_url_hierarchy_level2 = truncate(data["request_url_hierarchy_level2"], 200), + request_url_hierarchy_level3 = truncate(data["request_url_hierarchy_level3"], 200), + request_url_hierarchy_level4 = truncate(data["request_url_hierarchy_level4"], 200), + request_url_hierarchy_level5 = truncate(data["request_url_hierarchy_level5"], 200), + request_url_hierarchy_level6 = truncate(data["request_url_hierarchy_level6"], 200), request_url_port = tonumber(data["request_url_port"]), request_url_query = truncate(data["request_url_query"], 4000), request_url_scheme = lowercase_truncate(data["request_url_scheme"], 10), diff --git a/src/api-umbrella/version.txt b/src/api-umbrella/version.txt index a803cc227..525888825 100644 --- a/src/api-umbrella/version.txt +++ b/src/api-umbrella/version.txt @@ -1 +1 @@ -0.14.0 +0.15.0-pre1 diff --git a/templates/etc/rsyslog.conf.mustache b/templates/etc/rsyslog.conf.mustache index 54b91c24a..b76d68654 100644 --- a/templates/etc/rsyslog.conf.mustache +++ b/templates/etc/rsyslog.conf.mustache @@ -68,6 +68,27 @@ if($!raw!request_content_type != "") then { if($!raw!request_url_hierarchy != "") then { set $!usr!es!request_hierarchy = $!raw!request_url_hierarchy; } +if($!raw!request_url_hierarchy_level0 != "") then { + set $!usr!es!request_url_hierarchy_level0 = $!raw!request_url_hierarchy_level0; +} +if($!raw!request_url_hierarchy_level1 != "") then { + set $!usr!es!request_url_hierarchy_level1 = $!raw!request_url_hierarchy_level1; +} +if($!raw!request_url_hierarchy_level2 != "") then { + set $!usr!es!request_url_hierarchy_level2 = $!raw!request_url_hierarchy_level2; +} +if($!raw!request_url_hierarchy_level3 != "") then { + set $!usr!es!request_url_hierarchy_level3 = $!raw!request_url_hierarchy_level3; +} +if($!raw!request_url_hierarchy_level4 != "") then { + set $!usr!es!request_url_hierarchy_level4 = $!raw!request_url_hierarchy_level4; +} +if($!raw!request_url_hierarchy_level5 != "") then { + set $!usr!es!request_url_hierarchy_level5 = $!raw!request_url_hierarchy_level5; +} +if($!raw!request_url_hierarchy_level6 != "") then { + set $!usr!es!request_url_hierarchy_level6 = $!raw!request_url_hierarchy_level6; +} if($!raw!request_url_host != "") then { set $!usr!es!request_host = $!raw!request_url_host; } @@ -101,9 +122,6 @@ if($!raw!request_url_scheme != "") then { if($!raw!request_size != "") then { set $!usr!es!request_size = $!raw!request_size; } -if($!raw!legacy_request_url != "") then { - set $!usr!es!request_url = $!raw!legacy_request_url; -} if($!raw!request_url_query != "") then { set $!usr!es!request_url_query = $!raw!request_url_query; } @@ -281,23 +299,26 @@ if($!raw!request_url_host != "") then { if($!raw!request_url_path != "") then { set $!usr!sql!request_url_path = $!raw!request_url_path; } -if($!raw!request_url_path_level1 != "") then { - set $!usr!sql!request_url_path_level1 = $!raw!request_url_path_level1; +if($!raw!request_url_hierarchy_level0 != "") then { + set $!usr!sql!request_url_hierarchy_level0 = $!raw!request_url_hierarchy_level0; +} +if($!raw!request_url_hierarchy_level1 != "") then { + set $!usr!sql!request_url_hierarchy_level1 = $!raw!request_url_hierarchy_level1; } -if($!raw!request_url_path_level2 != "") then { - set $!usr!sql!request_url_path_level2 = $!raw!request_url_path_level2; +if($!raw!request_url_hierarchy_level2 != "") then { + set $!usr!sql!request_url_hierarchy_level2 = $!raw!request_url_hierarchy_level2; } -if($!raw!request_url_path_level3 != "") then { - set $!usr!sql!request_url_path_level3 = $!raw!request_url_path_level3; +if($!raw!request_url_hierarchy_level3 != "") then { + set $!usr!sql!request_url_hierarchy_level3 = $!raw!request_url_hierarchy_level3; } -if($!raw!request_url_path_level4 != "") then { - set $!usr!sql!request_url_path_level4 = $!raw!request_url_path_level4; +if($!raw!request_url_hierarchy_level4 != "") then { + set $!usr!sql!request_url_hierarchy_level4 = $!raw!request_url_hierarchy_level4; } -if($!raw!request_url_path_level5 != "") then { - set $!usr!sql!request_url_path_level5 = $!raw!request_url_path_level5; +if($!raw!request_url_hierarchy_level5 != "") then { + set $!usr!sql!request_url_hierarchy_level5 = $!raw!request_url_hierarchy_level5; } -if($!raw!request_url_path_level6 != "") then { - set $!usr!sql!request_url_path_level6 = $!raw!request_url_path_level6; +if($!raw!request_url_hierarchy_level6 != "") then { + set $!usr!sql!request_url_hierarchy_level6 = $!raw!request_url_hierarchy_level6; } if($!raw!request_url_query != "") then { set $!usr!sql!request_url_query = $!raw!request_url_query; diff --git a/test/proxy/logging/test_basics.rb b/test/proxy/logging/test_basics.rb index 616a4edff..c98cdf890 100644 --- a/test/proxy/logging/test_basics.rb +++ b/test/proxy/logging/test_basics.rb @@ -56,7 +56,11 @@ def test_logs_expected_fields_for_non_chunked_non_gzip "request_referer", "request_scheme", "request_size", - "request_url", + "request_url_hierarchy_level0", + "request_url_hierarchy_level1", + "request_url_hierarchy_level2", + "request_url_hierarchy_level3", + "request_url_hierarchy_level4", "request_url_query", "request_user_agent", "request_user_agent_family", @@ -84,6 +88,8 @@ def test_logs_expected_fields_for_non_chunked_non_gzip "request_ip_city", "request_ip_country", "request_ip_region", + "request_url_hierarchy_level5", + "request_url_hierarchy_level6", "response_content_encoding", "response_transfer_encoding", ] @@ -114,8 +120,12 @@ def test_logs_expected_fields_for_non_chunked_non_gzip assert_equal("http://example.com", record["request_referer"]) assert_equal("http", record["request_scheme"]) assert_kind_of(Numeric, record["request_size"]) - assert_equal(url, record["request_url"]) assert_equal("url1=#{param_url1}&url2=#{param_url2}&url3=#{param_url3}", record["request_url_query"]) + assert_equal("127.0.0.1:9080/", record["request_url_hierarchy_level0"]) + assert_equal("api/", record["request_url_hierarchy_level1"]) + assert_equal("logging-example/", record["request_url_hierarchy_level2"]) + assert_equal("foo/", record["request_url_hierarchy_level3"]) + assert_equal("bar", record["request_url_hierarchy_level4"]) assert_equal("curl/7.37.1", record["request_user_agent"]) assert_equal("cURL", record["request_user_agent_family"]) assert_equal("Library", record["request_user_agent_type"]) diff --git a/test/support/api_umbrella_test_helpers/logging.rb b/test/support/api_umbrella_test_helpers/logging.rb index 05aaef8b7..ddf3e01c2 100644 --- a/test/support/api_umbrella_test_helpers/logging.rb +++ b/test/support/api_umbrella_test_helpers/logging.rb @@ -73,8 +73,6 @@ def assert_logs_base_fields(record, user = nil) assert_operator(record["request_path"].length, :>=, 1) assert_equal("http", record["request_scheme"]) assert_kind_of(Numeric, record["request_size"]) - assert_kind_of(String, record["request_url"]) - assert_equal(true, record["request_url"].start_with?("http://127.0.0.1:9080/"), record["request_url"]) assert_kind_of(Numeric, record["response_size"]) assert_kind_of(Numeric, record["response_status"]) assert_kind_of(Numeric, record["response_time"]) @@ -91,7 +89,6 @@ def assert_logged_url(expected_url, record) logged_url = "#{record["request_scheme"]}://#{record["request_host"]}#{record["request_path"]}" logged_url += "?#{record["request_url_query"]}" if(record["request_url_query"]) assert_equal(expected_url, logged_url) - assert_equal(expected_url, record["request_url"]) end end end From 390c997afda38e8a50782d773c313d4209bed93d Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Tue, 4 Apr 2017 21:38:29 -0600 Subject: [PATCH 008/367] Make the migration script more flexible. Various cleanup to the data migration script, and allow setting start/end dates as CLI arguments. --- scripts/elasticsearch-v2-migrate/migrate.lua | 92 +++++++++++++------ .../etc/perp/elasticsearch/rc.env.mustache | 3 +- 2 files changed, 68 insertions(+), 27 deletions(-) diff --git a/scripts/elasticsearch-v2-migrate/migrate.lua b/scripts/elasticsearch-v2-migrate/migrate.lua index 54f0c6760..4a66bd8c0 100644 --- a/scripts/elasticsearch-v2-migrate/migrate.lua +++ b/scripts/elasticsearch-v2-migrate/migrate.lua @@ -1,22 +1,19 @@ config = require "api-umbrella.proxy.models.file_config" require "api-umbrella.proxy.startup.init_user_agent_parser_data" -local elasticsearch_setup = require "api-umbrella.proxy.jobs.elasticsearch_setup" - -elasticsearch_setup.wait_for_elasticsearch() -elasticsearch_setup.create_templates() -local log_utils = require "api-umbrella.proxy.log_utils" +local Date = require "pl.Date" +local argparse = require "argparse" +local cjson = require "cjson" +local elasticsearch_setup = require "api-umbrella.proxy.jobs.elasticsearch_setup" local escape_uri_non_ascii = require "api-umbrella.utils.escape_uri_non_ascii" -local nillify_json_nulls = require "api-umbrella.utils.nillify_json_nulls" +local http = require "resty.http" local inspect = require "inspect" -local cjson = require "cjson" -local argparse = require "argparse" +local log_utils = require "api-umbrella.proxy.log_utils" local luatz = require "luatz" -local http = require "resty.http" -local tablex = require "pl.tablex" -local pretty = require "pl.pretty" -local Date = require "pl.Date" +local nillify_json_nulls = require "api-umbrella.utils.nillify_json_nulls" local plutils = require "pl.utils" +local pretty = require "pl.pretty" +local tablex = require "pl.tablex" local cjson_encode = cjson.encode local keys = tablex.keys @@ -33,23 +30,40 @@ local function table_difference(t1, t2) return res end +local function parse_date(string) + local date + if string then + local m = ngx.re.match(string, "^(\\d{4})-(\\d{2})-(\\d{2})$") + if m then + date = luatz.timetable.new(tonumber(m[1]), tonumber(m[2]), tonumber(m[3]), 0, 0, 0) + end + end + + return date +end + local function parse_args() local parser = argparse("api-umbrella", "Open source API management") - parser:option("--input", "Input connection."):count(1) - parser:option("--output", "Output connection."):count(1) + parser:option("--input", "Input Elasticsearch database URL."):count(1) + parser:option("--output", "Output Elasticsearch database URL."):count(1) + parser:option("--start-date", "Migrate data starting at this date (YYYY-MM-DD format). Defaults to earliest data available from the input database."):count("0-1") + parser:option("--end-date", "Migrate data ending on this date (YYYY-MM-DD format). Defaults to current date."):count("0-1") + parser:flag("--debug", "Debug") local args = parser:parse() local input_uri, input_err = http:parse_uri(args["input"], false) if not input_uri then + print("--input could not be parsed. Elasticsearch URL expected.") print(input_err) os.exit(1) end local output_uri, output_err = http:parse_uri(args["output"], false) if not output_uri then - print(output_err) + print("--output could not be parsed. Elasticsearch URL expected.") + print(output_uri) os.exit(1) end @@ -61,7 +75,22 @@ local function parse_args() args["output_host"] = output_host args["output_port"] = output_port - --print(inspect(args)) + if args["start_date"] then + args["_start_date"] = parse_date(args["start_date"]) + if not args["_start_date"] then + print("--start-date could not be parsed. YYYY-MM-DD format expected.") + os.exit(1) + end + end + + if args["end_date"] then + args["_end_date"] = parse_date(args["end_date"]) + if not args["_end_date"] then + print("--start-date could not be parsed. YYYY-MM-DD format expected.") + os.exit(1) + end + end + return args end @@ -289,8 +318,12 @@ local function process_hit(hit, output_index) imported = source["imported"], } - --print(inspect(table_difference(source, new_source))) - --print(inspect(table_difference(new_source, source))) + if args["debug"] then + if #bulk_commands % 1000 == 0 then + print("DIFF - " .. inspect(table_difference(source, new_source))) + print("DIFF + " .. inspect(table_difference(new_source, source))) + end + end table.insert(bulk_commands, cjson_encode({ create = { @@ -379,12 +412,17 @@ local function search_day(date_start, date_end) end local function search() - --local date = v1_first_index_time(args) - --local end_date = luatz.now() - local date = luatz.timetable.new(2010, 8, 1, 0, 0, 0) - local end_date = luatz.timetable.new(2010, 9, 1, 0, 0, 0) - local date = luatz.timetable.new(2016, 12, 7, 0, 0, 0) - local end_date = luatz.timetable.new(2016, 12, 31, 0, 0, 0) + local start_date = args["_start_date"] + if not start_date then + start_date = v1_first_index_time(args) + end + + local end_date = args["_end_date"] + if not end_date then + end_date = luatz.now() + end + + local date = start_date while date:timestamp() <= end_date:timestamp() do next_day = date:clone() next_day["day"] = next_day["day"] + 1 @@ -392,14 +430,16 @@ local function search() search_day(date, next_day) - --print(date:rfc_3339()) - --print(date:timestamp()) date = next_day end end local function run() args = parse_args() + + elasticsearch_setup.wait_for_elasticsearch() + elasticsearch_setup.create_templates() + search() end diff --git a/templates/etc/perp/elasticsearch/rc.env.mustache b/templates/etc/perp/elasticsearch/rc.env.mustache index 523b536e6..526dcf463 100644 --- a/templates/etc/perp/elasticsearch/rc.env.mustache +++ b/templates/etc/perp/elasticsearch/rc.env.mustache @@ -1 +1,2 @@ -ES_JAVA_OPTS=-Xms{{elasticsearch.embedded_server_env.heap_size}} -Xmx{{elasticsearch.embedded_server_env.heap_size}} -Des.path.conf={{etc_dir}}/elasticsearch -Des.path.logs={{log_dir}}/elasticsearch -XX:-HeapDumpOnOutOfMemoryError {{elasticsearch.embedded_server_env.java_opts}} +ES_HEAP_SIZE={{elasticsearch.embedded_server_env.heap_size}} +ES_JAVA_OPTS=-Des.path.conf={{etc_dir}}/elasticsearch -Des.path.logs={{log_dir}}/elasticsearch -XX:-HeapDumpOnOutOfMemoryError {{elasticsearch.embedded_server_env.java_opts}} From cf542f537e06fed6acea30b0c054338238d758ea Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Thu, 12 Apr 2018 14:42:30 -0600 Subject: [PATCH 009/367] Fix broken analytics filtering on the request_method field. --- .../web-app/app/models/log_search/base.rb | 1 - test/apis/admin/stats/test_logs.rb | 22 +++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/src/api-umbrella/web-app/app/models/log_search/base.rb b/src/api-umbrella/web-app/app/models/log_search/base.rb index d10623cbf..6a0535818 100644 --- a/src/api-umbrella/web-app/app/models/log_search/base.rb +++ b/src/api-umbrella/web-app/app/models/log_search/base.rb @@ -8,7 +8,6 @@ class LogSearch::Base ].freeze UPPERCASE_FIELDS = [ - "request_method", "request_ip_country", "request_ip_region", ].freeze diff --git a/test/apis/admin/stats/test_logs.rb b/test/apis/admin/stats/test_logs.rb index f83969c9c..01c9cbf59 100644 --- a/test/apis/admin/stats/test_logs.rb +++ b/test/apis/admin/stats/test_logs.rb @@ -139,4 +139,26 @@ def test_query_builder_nulls assert_equal(1, data["recordsTotal"]) assert_equal("#{unique_test_id}-not-null", data["data"][0]["request_user_agent"]) end + + def test_query_builder_request_method + FactoryBot.create(:log_item, :request_at => Time.parse("2015-01-16T06:06:28.816Z").utc, :request_method => "POST", :request_user_agent => unique_test_id) + LogItem.gateway.refresh_index! + + response = Typhoeus.get("https://127.0.0.1:9081/admin/stats/logs.json", http_options.deep_merge(admin_session).deep_merge({ + :params => { + "start_at" => "2015-01-13", + "end_at" => "2015-01-18", + "interval" => "day", + "start" => "0", + "length" => "10", + "query" => '{"condition":"AND","rules":[{"id":"request_method","field":"request_method","type":"string","input":"select","operator":"equal","value":"post"}]}', + }, + })) + + assert_response_code(200, response) + data = MultiJson.load(response.body) + assert_equal(1, data["recordsTotal"]) + assert_equal("POST", data["data"][0]["request_method"]) + assert_equal(unique_test_id, data["data"][0]["request_user_agent"]) + end end From d8e96be32c43fd9326bc30877d84b640ab7a9640 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Thu, 12 Apr 2018 15:52:44 -0600 Subject: [PATCH 010/367] Update PCRE website URL. --- scripts/rake/outdated_packages.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/rake/outdated_packages.rb b/scripts/rake/outdated_packages.rb index dc398c7cf..130c2027d 100644 --- a/scripts/rake/outdated_packages.rb +++ b/scripts/rake/outdated_packages.rb @@ -146,7 +146,7 @@ class OutdatedPackages :string_version => true, }, "pcre" => { - :http => "http://ftp.csx.cam.ac.uk/pub/software/programming/pcre/", + :http => "https://ftp.pcre.org/pub/pcre/", }, "perp" => { :http => "http://b0llix.net/perp/site.cgi?page=download", From f88a2c020a0a341fc7dc5c2f6d3460df570a3f5b Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Thu, 12 Apr 2018 16:04:14 -0600 Subject: [PATCH 011/367] Fix possibility of Rails secret key not being set from config file. Basically, after the changes in c65ea2f746cbe5a015b03dadcd3d63034659be73, this accidentally broke some other ordering issues when reading the config, so that if `cached_random_config_values.yml` contained a Rails secret token, and any other cached values were being generated, then the cached values would always take precedent. This fixes it by ensuring that any existing config should always take precedent over the cached config that's being generated. https://github.com/18F/api.data.gov/issues/437 --- src/api-umbrella/cli/read_config.lua | 22 +++++++++++-------- src/api-umbrella/utils/deep_defaults.lua | 28 ++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 9 deletions(-) create mode 100644 src/api-umbrella/utils/deep_defaults.lua diff --git a/src/api-umbrella/cli/read_config.lua b/src/api-umbrella/cli/read_config.lua index bb944eb7e..252a2d1d1 100644 --- a/src/api-umbrella/cli/read_config.lua +++ b/src/api-umbrella/cli/read_config.lua @@ -1,5 +1,6 @@ local array_includes = require "api-umbrella.utils.array_includes" local array_last = require "api-umbrella.utils.array_last" +local deep_defaults = require "api-umbrella.utils.deep_defaults" local deep_merge_overwrite_arrays = require "api-umbrella.utils.deep_merge_overwrite_arrays" local dir = require "pl.dir" local file = require "pl.file" @@ -376,28 +377,31 @@ local function set_cached_random_tokens() local cached = {} if content then cached = lyaml.load(content) - deep_merge_overwrite_arrays(config, cached) + deep_defaults(config, cached) end -- If the tokens haven't already been written to the cache, generate them. if not config["web"]["rails_secret_token"] or not config["static_site"]["api_key"] then if not config["web"]["rails_secret_token"] then - cached["web"] = { - rails_secret_token = random_token(128), - } + deep_defaults(cached, { + web = { + rails_secret_token = random_token(128), + }, + }) end if not config["static_site"]["api_key"] then - cached["static_site"] = { - api_key = random_token(40), - } + deep_defaults(cached, { + static_site = { + api_key = random_token(40), + }, + }) end -- Persist the cached tokens. dir.makepath(config["run_dir"]) file.write(cached_path, lyaml.dump({ cached })) - - deep_merge_overwrite_arrays(config, cached) + deep_defaults(config, cached) end end end diff --git a/src/api-umbrella/utils/deep_defaults.lua b/src/api-umbrella/utils/deep_defaults.lua new file mode 100644 index 000000000..f76183839 --- /dev/null +++ b/src/api-umbrella/utils/deep_defaults.lua @@ -0,0 +1,28 @@ +local is_array = require "api-umbrella.utils.is_array" + +-- Like deep_merge_overwrite_arrays, but only assigns values from the source to +-- the destination if the destination is nil. So any existing values on the +-- destination object will be retained. +local function deep_defaults(dest, src) + if not src then return dest end + + for key, value in pairs(src) do + if type(value) == "table" and type(dest[key]) == "table" then + if is_array(value) or is_array(dest[key]) then + if dest[key] == nil then + dest[key] = value + end + else + deep_defaults(dest[key], src[key]) + end + else + if dest[key] == nil then + dest[key] = value + end + end + end + + return dest +end + +return deep_defaults From a7253421d8eebb160b23c1bd75b278d00f5d9dc3 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Wed, 9 May 2018 00:42:59 -0600 Subject: [PATCH 012/367] Fix the possibility of the seed process generating duplicate API keys. Lua's math.randomseed doesn't really work effectively when just using the current time as the seed value, since duplicate processes starting up at the same time can lead to duplicate random values. I think this was primarily affecting the test and CI environment, where the seeding might happen multiple times in rapid succession. This fixes it by using cryptographically secure random data to generate the random tokens, which is better anyway (this was taken from some work on the postgres branch, where we had already made this switch in bdaafa522fbfd218dbd4c58142cede29a9d55508 as part of more widespread usage of this random_token method, besides just for the seeding process). --- src/api-umbrella/utils/random_token.lua | 54 ++++++++++++++++--------- 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/src/api-umbrella/utils/random_token.lua b/src/api-umbrella/utils/random_token.lua index 94622a9d5..4d92ff283 100644 --- a/src/api-umbrella/utils/random_token.lua +++ b/src/api-umbrella/utils/random_token.lua @@ -1,24 +1,42 @@ -local alpha_numeric = { - "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", - "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", - "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", - "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", - "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" -} +local resty_random = require "resty.random" -local alpha_numeric_size = #alpha_numeric - -if ngx then - math.randomseed(ngx.time()) -else - math.randomseed(os.time()) -end +local encode_base64 = ngx.encode_base64 +local gsub = ngx.re.gsub +local random_bytes = resty_random.bytes return function(length) - local token = {} - for i = 1, length do - token[i] = alpha_numeric[math.random(alpha_numeric_size)] + local token = "" + -- Loop until we've generated a valid token. The basic process: + -- + -- 1. Generate secure random bytes. + -- 2. Convert random bytes to base64. + -- 3. Strip out special characters from base64 result, so we're left with + -- just alphanumerics. + -- + -- It should be extraordinarily rare that this needs to loop, but since we + -- strip out some of the special characters from the resulting base64 string, + -- this loops in case we strip more than expected. + while string.len(token) < length do + -- Attempt to generate cryptographically secure random bytes. We + -- purposefully generate more bytes than we need, since we'll be stripping + -- some of the base64 characters out. + local num_bytes = length + 10 + local strong_random = random_bytes(num_bytes, true) + if not strong_random then + ngx.log(ngx.WARN, "Could not generate cryptographically secure random data. Falling back to non-secure random data.") + strong_random = random_bytes(num_bytes, false) + end + + -- Encode with base64. + token = token .. encode_base64(strong_random) + + -- Strip +, /, and = out of the base64 result, since we just want a-z, A-Z, + -- and 0-9 in our tokens. + token = gsub(token, "[+/=]", "", "jo") + + -- Take just the number of characters requested. + token = string.sub(token, 1, length) end - return table.concat(token) + return token end From b57fcf9b37b4c538b7318cf89d9a4f4099abc9ce Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Tue, 8 May 2018 20:01:42 -0600 Subject: [PATCH 013/367] Fix running LDAP related tests when a different user is being used. The ldap database files weren't readable by the user the ldap process was running as. --- templates/etc/perp/test-env-openldap/rc.main.mustache | 3 +++ 1 file changed, 3 insertions(+) diff --git a/templates/etc/perp/test-env-openldap/rc.main.mustache b/templates/etc/perp/test-env-openldap/rc.main.mustache index 72390f7b2..88f185e88 100755 --- a/templates/etc/perp/test-env-openldap/rc.main.mustache +++ b/templates/etc/perp/test-env-openldap/rc.main.mustache @@ -26,6 +26,9 @@ if [ "${1}" = "start" ]; then mkdir -p "$db_dir" slapadd -F "$db_dir" -n 0 -l "$slapd_ldif_path" slapadd -F "$db_dir" -l "$seed_ldif_path" + if [ -n "$api_umbrella_user" ]; then + chown -R "$api_umbrella_user" "$db_dir" + fi exec runtool "${run_args[@]}" slapd -d 1 -h "$bind" -F "$db_dir" fi From 00fa8ef854bfcf77a672ecf8224dce5a4affe9b6 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Tue, 8 May 2018 20:05:40 -0600 Subject: [PATCH 014/367] Shift admin login test helpers into a separate module for reuse. --- .../admin_ui/login/test_external_providers.rb | 70 +---------------- .../admin_ui_login.rb | 76 +++++++++++++++++++ 2 files changed, 77 insertions(+), 69 deletions(-) create mode 100644 test/support/api_umbrella_test_helpers/admin_ui_login.rb diff --git a/test/admin_ui/login/test_external_providers.rb b/test/admin_ui/login/test_external_providers.rb index c6f44a26a..84c897932 100644 --- a/test/admin_ui/login/test_external_providers.rb +++ b/test/admin_ui/login/test_external_providers.rb @@ -4,6 +4,7 @@ class Test::AdminUi::Login::TestExternalProviders < Minitest::Capybara::Test include Capybara::Screenshot::MiniTestPlugin include ApiUmbrellaTestHelpers::Setup include ApiUmbrellaTestHelpers::AdminAuth + include ApiUmbrellaTestHelpers::AdminUiLogin include Minitest::Hooks def setup @@ -190,73 +191,4 @@ def assert_login_unverified_email_login(options) assert_login_forbidden(options.fetch(:login_button_text), "not verified") end end - - def assert_login_permitted(login_button_text, admin) - visit "/admin/" - trigger_click_link(login_button_text) - assert_link("my_account_nav_link", :href => /#{admin.id}/, :visible => :all) - end - - def assert_login_forbidden(login_button_text, error_text) - visit "/admin/" - trigger_click_link(login_button_text) - assert_text(error_text) - refute_link("my_account_nav_link") - end - - def omniauth_base_data(options) - omniauth_base_data = LazyHash.build_hash - omniauth_base_data["provider"] = options.fetch(:provider).to_s - if(options[:verified_path]) - LazyHash.add(omniauth_base_data, options.fetch(:verified_path), true) - end - - if(options[:extra]) - omniauth_base_data.deep_merge!(options[:extra]) - end - - omniauth_base_data - end - - def mock_omniauth(omniauth_data) - # Reset the session and clear caches before setting our cookie. For some - # reason this seems necessary to ensure click_link always works correctly - # (otherwise, we sporadically get failures caused by the click_link on the - # login buttons not actually going anywhere). - # - # Possibly related: - # https://github.com/teampoltergeist/poltergeist/issues/814#issuecomment-248830334 - Capybara.reset_session! - page.driver.clear_memory_cache - - # Set a cookie to mock the OmniAuth responses. This relies on the - # TestMockOmniauth middleware we install into the Rails app during the test - # environment. This gives us a way to mock this data from outside the Rails - # test suite. - page.driver.set_cookie("test_mock_omniauth", Base64.urlsafe_encode64(MultiJson.dump(omniauth_data))) - yield - ensure - page.driver.remove_cookie("test_mock_omniauth") - end - - # When using "click_link" on the login buttons we rarely/sporadically see it - # fail to do anything. Capybara doesn't raise an error, so it thinks it's - # clicked the button, but nothing appears to happen. - # - # As a workaround, find the element and programmatically trigger a click - # event on it, which seems to be more reliable. - # - # See: https://github.com/teampoltergeist/poltergeist/issues/530 - # - # I think we've only seen this issue in these tests (and not in other parts - # of the admin app). My theory is that this might be due to the click event - # firing right as the stylesheets load, so the original location it - # calculated and then clicks ends up being incorrect once the stylesheets - # load. I'm not sure about this, but it might explain why it's only happening - # here, and not within the app (since within the app, all the javascript and - # stylesheets must be loaded first for there to be anything rendering on the - # page). - def trigger_click_link(selector) - find_link(selector).trigger("click") - end end diff --git a/test/support/api_umbrella_test_helpers/admin_ui_login.rb b/test/support/api_umbrella_test_helpers/admin_ui_login.rb new file mode 100644 index 000000000..e1ac6fde6 --- /dev/null +++ b/test/support/api_umbrella_test_helpers/admin_ui_login.rb @@ -0,0 +1,76 @@ +require "securerandom" + +module ApiUmbrellaTestHelpers + module AdminUiLogin + private + + def assert_login_permitted(login_button_text, admin) + visit "/admin/" + trigger_click_link(login_button_text) + assert_link("my_account_nav_link", :href => /#{admin.id}/, :visible => :all) + end + + def assert_login_forbidden(login_button_text, error_text) + visit "/admin/" + trigger_click_link(login_button_text) + assert_text(error_text) + refute_link("my_account_nav_link") + end + + def omniauth_base_data(options) + omniauth_base_data = LazyHash.build_hash + omniauth_base_data["provider"] = options.fetch(:provider).to_s + if(options[:verified_path]) + LazyHash.add(omniauth_base_data, options.fetch(:verified_path), true) + end + + if(options[:extra]) + omniauth_base_data.deep_merge!(options[:extra]) + end + + omniauth_base_data + end + + def mock_omniauth(omniauth_data) + # Reset the session and clear caches before setting our cookie. For some + # reason this seems necessary to ensure click_link always works correctly + # (otherwise, we sporadically get failures caused by the click_link on the + # login buttons not actually going anywhere). + # + # Possibly related: + # https://github.com/teampoltergeist/poltergeist/issues/814#issuecomment-248830334 + Capybara.reset_session! + page.driver.clear_memory_cache + + # Set a cookie to mock the OmniAuth responses. This relies on the + # TestMockOmniauth middleware we install into the Rails app during the test + # environment. This gives us a way to mock this data from outside the Rails + # test suite. + page.driver.set_cookie("test_mock_omniauth", Base64.urlsafe_encode64(MultiJson.dump(omniauth_data))) + yield + ensure + page.driver.remove_cookie("test_mock_omniauth") + end + + # When using "click_link" on the login buttons we rarely/sporadically see it + # fail to do anything. Capybara doesn't raise an error, so it thinks it's + # clicked the button, but nothing appears to happen. + # + # As a workaround, find the element and programmatically trigger a click + # event on it, which seems to be more reliable. + # + # See: https://github.com/teampoltergeist/poltergeist/issues/530 + # + # I think we've only seen this issue in these tests (and not in other parts + # of the admin app). My theory is that this might be due to the click event + # firing right as the stylesheets load, so the original location it + # calculated and then clicks ends up being incorrect once the stylesheets + # load. I'm not sure about this, but it might explain why it's only happening + # here, and not within the app (since within the app, all the javascript and + # stylesheets must be loaded first for there to be anything rendering on the + # page). + def trigger_click_link(selector) + find_link(selector).trigger("click") + end + end +end From 469572c73991975b7d90b86e98eeb5a0e1b8528e Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Tue, 8 May 2018 20:12:09 -0600 Subject: [PATCH 015/367] Fix XSS on error messages in flash messages from external providers. --- .../admins/omniauth_callbacks_controller.rb | 3 + .../app/views/layouts/application.html.erb | 2 +- .../test_flash_messages_html_safety.rb | 135 ++++++++++++++++++ 3 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 test/admin_ui/test_flash_messages_html_safety.rb diff --git a/src/api-umbrella/web-app/app/controllers/admin/admins/omniauth_callbacks_controller.rb b/src/api-umbrella/web-app/app/controllers/admin/admins/omniauth_callbacks_controller.rb index b35e63f13..632378214 100644 --- a/src/api-umbrella/web-app/app/controllers/admin/admins/omniauth_callbacks_controller.rb +++ b/src/api-umbrella/web-app/app/controllers/admin/admins/omniauth_callbacks_controller.rb @@ -98,6 +98,7 @@ def login ActionController::Base.helpers.content_tag(:a, "contact us", :href => ApiUmbrellaConfig[:contact_url]), " for further assistance.", ]) + flash[:html_safe] = true redirect_to new_admin_session_path end @@ -111,6 +112,7 @@ def email_unverified_error ActionController::Base.helpers.content_tag(:a, "contact us", :href => ApiUmbrellaConfig[:contact_url]), " for further assistance.", ]) + flash[:html_safe] = true redirect_to new_admin_session_path end @@ -121,6 +123,7 @@ def mfa_required_error ActionController::Base.helpers.content_tag(:a, "contact us", :href => ApiUmbrellaConfig[:contact_url]), " for further assistance.", ]) + flash[:html_safe] = true redirect_to new_admin_session_path end diff --git a/src/api-umbrella/web-app/app/views/layouts/application.html.erb b/src/api-umbrella/web-app/app/views/layouts/application.html.erb index 69a8379f2..3881e3969 100644 --- a/src/api-umbrella/web-app/app/views/layouts/application.html.erb +++ b/src/api-umbrella/web-app/app/views/layouts/application.html.erb @@ -12,7 +12,7 @@ <% flash.each do |flash_type, message| %> <% next unless(message.kind_of?(String)) %>
- <%= message.html_safe %> + <%= if(flash[:html_safe]) then message.html_safe else message end %>
<% end %> diff --git a/test/admin_ui/test_flash_messages_html_safety.rb b/test/admin_ui/test_flash_messages_html_safety.rb new file mode 100644 index 000000000..f9b2021ea --- /dev/null +++ b/test/admin_ui/test_flash_messages_html_safety.rb @@ -0,0 +1,135 @@ +require_relative "../test_helper" + +class Test::AdminUi::TestFlashMessagesHtmlSafety < Minitest::Capybara::Test + include Capybara::Screenshot::MiniTestPlugin + include ApiUmbrellaTestHelpers::Setup + include ApiUmbrellaTestHelpers::AdminAuth + include ApiUmbrellaTestHelpers::AdminUiLogin + include Minitest::Hooks + + def setup + super + setup_server + once_per_class_setup do + override_config_set({ + # While the contact URL should be trusted, since it's configured by an + # admin, still test with special character to ensure it's escaped + # properly. + "contact_url" => "https://example.com/contact/?q='\">", + "web" => { + "admin" => { + "auth_strategies" => { + "enabled" => [ + "google", + "max.gov", + ], + "max.gov" => { + "require_mfa" => true, + }, + }, + }, + }, + }, ["--router", "--web"]) + end + end + + def after_all + super + override_config_reset(["--router", "--web"]) + end + + def test_unverified_html_message + omniauth_data = { + "provider" => "google_oauth2", + "info" => { + "email" => "unverified@example.com", + }, + "extra" => { + "raw_info" => { + "email_verified" => false, + }, + }, + } + + mock_omniauth(omniauth_data) do + assert_login_forbidden("Sign in with Google", "not verified") + assert_match("The email address 'unverified@example.com' is not verified. Please contact us for further assistance.", page.body) + end + end + + def test_unverified_html_message_with_xss_email + omniauth_data = { + "provider" => "google_oauth2", + "info" => { + "email" => "'\">", + }, + "extra" => { + "raw_info" => { + "email_verified" => false, + }, + }, + } + + mock_omniauth(omniauth_data) do + assert_login_forbidden("Sign in with Google", "not verified") + assert_match("The email address ''\"><script>alert('hello')</script>' is not verified. Please contact us for further assistance.", page.body) + end + end + + def test_nonexistent_html_message + omniauth_data = { + "provider" => "google_oauth2", + "info" => { + "email" => "noadmin@example.com", + }, + "extra" => { + "raw_info" => { + "email_verified" => true, + }, + }, + } + + mock_omniauth(omniauth_data) do + assert_login_forbidden("Sign in with Google", "not authorized") + assert_match("The account for 'noadmin@example.com' is not authorized to access the admin. Please contact us for further assistance.", page.body) + end + end + + def test_nonexistent_html_message_with_xss_email + omniauth_data = { + "provider" => "google_oauth2", + "info" => { + "email" => "'\">", + }, + "extra" => { + "raw_info" => { + "email_verified" => true, + }, + }, + } + + mock_omniauth(omniauth_data) do + assert_login_forbidden("Sign in with Google", "not authorized") + assert_match("The account for ''\"><script>alert('hello')</script>' is not authorized to access the admin. Please contact us for further assistance.", page.body) + end + end + + def test_mfa_required_html_message + omniauth_data = { + "provider" => "cas", + "info" => { + "email" => "noadmin@example.com", + }, + } + + mock_omniauth(omniauth_data) do + assert_login_forbidden("Sign in with MAX.gov", "must use multi-factor") + assert_match("You must use multi-factor authentication to sign in. Please try again, or contact us for further assistance.", page.body) + end + end + + def test_error_message_from_external_provider + visit "/admins/auth/google_oauth2/callback?error='\">" + assert_match("Could not authenticate you from GoogleOauth2 because \"'\"><script>confirm(document.domain)</script>\".", page.body) + end +end From 9091de98edf40920733cfc91e7101456a9d604fd Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Wed, 9 May 2018 21:51:13 -0600 Subject: [PATCH 016/367] Fix limited admin accounts not being able to publish website backends. Pending changes for website backends weren't being returned to admin accounts with limited scopes (only authorized on specific domains). The reason was that we were inadvertently applying the API backend policy scope to this permission check instead of the website backend policy (so the API backend policy would never consider the website backend authorized). This fixes that issue and adds more test coverage for the admin permissions surrounding website backends in the config publishing process. --- .../web-app/app/models/config_version.rb | 6 +- .../app/policies/website_backend_policy.rb | 9 +- ...pending_changes_admin_permissions_apis.rb} | 2 +- ...ding_changes_admin_permissions_websites.rb | 74 +++++++++++ ...=> test_publish_admin_permissions_apis.rb} | 2 +- ...test_publish_admin_permissions_websites.rb | 121 ++++++++++++++++++ test/factories/admin_groups.rb | 4 + test/factories/api_scopes.rb | 5 + test/factories/website_backends.rb | 4 + test/support/models/config_version.rb | 1 + 10 files changed, 223 insertions(+), 5 deletions(-) rename test/apis/v1/config/{test_pending_changes_admin_permissions.rb => test_pending_changes_admin_permissions_apis.rb} (99%) create mode 100644 test/apis/v1/config/test_pending_changes_admin_permissions_websites.rb rename test/apis/v1/config/{test_publish_admin_permissions.rb => test_publish_admin_permissions_apis.rb} (98%) create mode 100644 test/apis/v1/config/test_publish_admin_permissions_websites.rb diff --git a/src/api-umbrella/web-app/app/models/config_version.rb b/src/api-umbrella/web-app/app/models/config_version.rb index 3aa9a7a45..4de8d443d 100644 --- a/src/api-umbrella/web-app/app/models/config_version.rb +++ b/src/api-umbrella/web-app/app/models/config_version.rb @@ -70,7 +70,11 @@ def self.pending_changes(current_admin) end if(current_admin) - pending_records = ApiPolicy::Scope.new(current_admin, pending_records).resolve("backend_publish") + if(category == "website_backends") + pending_records = WebsiteBackendPolicy::Scope.new(current_admin, pending_records).resolve("backend_publish") + else + pending_records = ApiPolicy::Scope.new(current_admin, pending_records).resolve("backend_publish") + end end pending_records = pending_records.sorted.all diff --git a/src/api-umbrella/web-app/app/policies/website_backend_policy.rb b/src/api-umbrella/web-app/app/policies/website_backend_policy.rb index 8ad5eb1d2..fcce64cf4 100644 --- a/src/api-umbrella/web-app/app/policies/website_backend_policy.rb +++ b/src/api-umbrella/web-app/app/policies/website_backend_policy.rb @@ -1,10 +1,15 @@ class WebsiteBackendPolicy < ApplicationPolicy class Scope < Scope - def resolve + def resolve(permission = "backend_manage") if(user.superuser?) scope.all else - api_scopes = user.api_scopes_with_permission("backend_manage") + api_scopes = [] + if(permission == :any) + api_scopes = user.api_scopes + else + api_scopes = user.api_scopes_with_permission(permission) + end query_scopes = [] api_scopes.each do |api_scope| diff --git a/test/apis/v1/config/test_pending_changes_admin_permissions.rb b/test/apis/v1/config/test_pending_changes_admin_permissions_apis.rb similarity index 99% rename from test/apis/v1/config/test_pending_changes_admin_permissions.rb rename to test/apis/v1/config/test_pending_changes_admin_permissions_apis.rb index 6aa16c51f..b82e36057 100644 --- a/test/apis/v1/config/test_pending_changes_admin_permissions.rb +++ b/test/apis/v1/config/test_pending_changes_admin_permissions_apis.rb @@ -1,6 +1,6 @@ require_relative "../../../test_helper" -class Test::Apis::V1::Config::TestPendingChangesAdminPermissions < Minitest::Test +class Test::Apis::V1::Config::TestPendingChangesAdminPermissionsApis < Minitest::Test include ApiUmbrellaTestHelpers::AdminAuth include ApiUmbrellaTestHelpers::Setup include Minitest::Hooks diff --git a/test/apis/v1/config/test_pending_changes_admin_permissions_websites.rb b/test/apis/v1/config/test_pending_changes_admin_permissions_websites.rb new file mode 100644 index 000000000..ec2d22ff0 --- /dev/null +++ b/test/apis/v1/config/test_pending_changes_admin_permissions_websites.rb @@ -0,0 +1,74 @@ +require_relative "../../../test_helper" + +class Test::Apis::V1::Config::TestPendingChangesAdminPermissionsWebsites < Minitest::Test + include ApiUmbrellaTestHelpers::AdminAuth + include ApiUmbrellaTestHelpers::Setup + include Minitest::Hooks + + def setup + super + setup_server + Api.delete_all + WebsiteBackend.delete_all + ConfigVersion.delete_all + + @localhost_website = FactoryBot.create(:website_backend) + @example_com_website = FactoryBot.create(:example_com_website_backend) + ConfigVersion.publish!(ConfigVersion.pending_config) + end + + def after_all + super + default_config_version_needed + end + + def test_all_websites_for_superuser + response = Typhoeus.get("https://127.0.0.1:9081/api-umbrella/v1/config/pending_changes.json", http_options.deep_merge(admin_token)) + + assert_response_code(200, response) + data = MultiJson.load(response.body) + website_ids = data["config"]["website_backends"]["identical"].map { |website| website["pending"]["_id"] } + assert_includes(website_ids, @localhost_website.id) + assert_includes(website_ids, @example_com_website.id) + end + + def test_permitted_websites_for_limited_admin + localhost_admin = FactoryBot.create(:limited_admin, :groups => [FactoryBot.create(:localhost_root_admin_group, :backend_publish_permission)]) + response = Typhoeus.get("https://127.0.0.1:9081/api-umbrella/v1/config/pending_changes.json", http_options.deep_merge(admin_token(localhost_admin))) + + assert_response_code(200, response) + data = MultiJson.load(response.body) + website_ids = data["config"]["website_backends"]["identical"].map { |website| website["pending"]["_id"] } + assert_includes(website_ids, @localhost_website.id) + end + + def test_excludes_forbidden_websites + localhost_admin = FactoryBot.create(:limited_admin, :groups => [FactoryBot.create(:localhost_root_admin_group, :backend_publish_permission)]) + response = Typhoeus.get("https://127.0.0.1:9081/api-umbrella/v1/config/pending_changes.json", http_options.deep_merge(admin_token(localhost_admin))) + + assert_response_code(200, response) + data = MultiJson.load(response.body) + website_ids = data["config"]["website_backends"]["identical"].map { |website| website["pending"]["_id"] } + refute_includes(website_ids, @example_com_website.id) + end + + def test_exclude_websites_without_publish_permission + unauthorized_localhost_admin = FactoryBot.create(:limited_admin, :groups => [FactoryBot.create(:localhost_root_admin_group, :backend_manage_permission)]) + response = Typhoeus.get("https://127.0.0.1:9081/api-umbrella/v1/config/pending_changes.json", http_options.deep_merge(admin_token(unauthorized_localhost_admin))) + + assert_response_code(200, response) + data = MultiJson.load(response.body) + website_ids = data["config"]["website_backends"]["identical"].map { |website| website["pending"]["_id"] } + assert_equal(0, website_ids.length) + end + + def test_excludes_admins_without_root_url_permissions + localhost_admin = FactoryBot.create(:limited_admin, :groups => [FactoryBot.create(:google_admin_group, :backend_publish_permission)]) + response = Typhoeus.get("https://127.0.0.1:9081/api-umbrella/v1/config/pending_changes.json", http_options.deep_merge(admin_token(localhost_admin))) + + assert_response_code(200, response) + data = MultiJson.load(response.body) + website_ids = data["config"]["website_backends"]["identical"].map { |website| website["pending"]["_id"] } + assert_equal(0, website_ids.length) + end +end diff --git a/test/apis/v1/config/test_publish_admin_permissions.rb b/test/apis/v1/config/test_publish_admin_permissions_apis.rb similarity index 98% rename from test/apis/v1/config/test_publish_admin_permissions.rb rename to test/apis/v1/config/test_publish_admin_permissions_apis.rb index b0ebb86dd..b5f621db6 100644 --- a/test/apis/v1/config/test_publish_admin_permissions.rb +++ b/test/apis/v1/config/test_publish_admin_permissions_apis.rb @@ -1,6 +1,6 @@ require_relative "../../../test_helper" -class Test::Apis::V1::Config::TestPublishAdminPermissions < Minitest::Test +class Test::Apis::V1::Config::TestPublishAdminPermissionsApis < Minitest::Test include ApiUmbrellaTestHelpers::AdminAuth include ApiUmbrellaTestHelpers::Setup include Minitest::Hooks diff --git a/test/apis/v1/config/test_publish_admin_permissions_websites.rb b/test/apis/v1/config/test_publish_admin_permissions_websites.rb new file mode 100644 index 000000000..7e5050b8e --- /dev/null +++ b/test/apis/v1/config/test_publish_admin_permissions_websites.rb @@ -0,0 +1,121 @@ +require_relative "../../../test_helper" + +class Test::Apis::V1::Config::TestPublishAdminPermissionsWebsites < Minitest::Test + include ApiUmbrellaTestHelpers::AdminAuth + include ApiUmbrellaTestHelpers::Setup + include Minitest::Hooks + + def setup + super + setup_server + Api.delete_all + WebsiteBackend.delete_all + ConfigVersion.delete_all + + @localhost_website = FactoryBot.create(:website_backend) + @example_com_website = FactoryBot.create(:example_com_website_backend) + end + + def after_all + super + default_config_version_needed + end + + def test_superusers_publish_anything + config = { + :website_backends => { + @localhost_website.id => { :publish => "1" }, + @example_com_website.id => { :publish => "1" }, + }, + } + + response = Typhoeus.post("https://127.0.0.1:9081/api-umbrella/v1/config/publish.json", http_options.deep_merge(admin_token).deep_merge({ + :headers => { "Content-Type" => "application/x-www-form-urlencoded" }, + :body => { :config => config }, + })) + + assert_response_code(201, response) + active_config = ConfigVersion.active_config + assert_equal(2, active_config["website_backends"].length) + assert_equal([ + @localhost_website.id, + @example_com_website.id, + ].sort, active_config["website_backends"].map { |api| api["_id"] }.sort) + end + + def test_allow_limited_admins_publish_permitted_websites + config = { + :website_backends => { + @localhost_website.id => { :publish => "1" }, + }, + } + + localhost_admin = FactoryBot.create(:limited_admin, :groups => [FactoryBot.create(:localhost_root_admin_group, :backend_publish_permission)]) + response = Typhoeus.post("https://127.0.0.1:9081/api-umbrella/v1/config/publish.json", http_options.deep_merge(admin_token(localhost_admin)).deep_merge({ + :headers => { "Content-Type" => "application/x-www-form-urlencoded" }, + :body => { :config => config }, + })) + + assert_response_code(201, response) + active_config = ConfigVersion.active_config + assert_equal(1, active_config["website_backends"].length) + assert_equal(@localhost_website.id, active_config["website_backends"].first["_id"]) + end + + def test_reject_limited_admins_publish_forbidden_website_backends + config = { + :website_backends => { + @example_com_website.id => { :publish => "1" }, + }, + } + + localhost_admin = FactoryBot.create(:limited_admin, :groups => [FactoryBot.create(:localhost_root_admin_group, :backend_publish_permission)]) + response = Typhoeus.post("https://127.0.0.1:9081/api-umbrella/v1/config/publish.json", http_options.deep_merge(admin_token(localhost_admin)).deep_merge({ + :headers => { "Content-Type" => "application/x-www-form-urlencoded" }, + :body => { :config => config }, + })) + + assert_response_code(403, response) + data = MultiJson.load(response.body) + assert_equal(["errors"], data.keys) + assert_nil(ConfigVersion.active_config) + end + + def test_reject_limited_admins_without_publish_permission + config = { + :website_backends => { + @localhost_website.id => { :publish => "1" }, + }, + } + + unauthorized_localhost_admin = FactoryBot.create(:limited_admin, :groups => [FactoryBot.create(:localhost_root_admin_group, :backend_manage_permission)]) + response = Typhoeus.post("https://127.0.0.1:9081/api-umbrella/v1/config/publish.json", http_options.deep_merge(admin_token(unauthorized_localhost_admin)).deep_merge({ + :headers => { "Content-Type" => "application/x-www-form-urlencoded" }, + :body => { :config => config }, + })) + + assert_response_code(403, response) + data = MultiJson.load(response.body) + assert_equal(["errors"], data.keys) + assert_nil(ConfigVersion.active_config) + end + + def test_reject_limited_admins_without_root_url_permission + config = { + :website_backends => { + @localhost_website.id => { :publish => "1" }, + }, + } + + localhost_admin = FactoryBot.create(:limited_admin, :groups => [FactoryBot.create(:google_admin_group, :backend_publish_permission)]) + response = Typhoeus.post("https://127.0.0.1:9081/api-umbrella/v1/config/publish.json", http_options.deep_merge(admin_token(localhost_admin)).deep_merge({ + :headers => { "Content-Type" => "application/x-www-form-urlencoded" }, + :body => { :config => config }, + })) + + assert_response_code(403, response) + data = MultiJson.load(response.body) + assert_equal(["errors"], data.keys) + assert_nil(ConfigVersion.active_config) + end +end diff --git a/test/factories/admin_groups.rb b/test/factories/admin_groups.rb index d703265e5..bd5083974 100644 --- a/test/factories/admin_groups.rb +++ b/test/factories/admin_groups.rb @@ -77,5 +77,9 @@ ] end end + + factory :example_com_admin_group do + api_scopes { [ApiScope.find_or_create_by_instance!(FactoryBot.build(:example_com_root_api_scope))] } + end end end diff --git a/test/factories/api_scopes.rb b/test/factories/api_scopes.rb index dce948fa6..06e2ff1c5 100644 --- a/test/factories/api_scopes.rb +++ b/test/factories/api_scopes.rb @@ -35,5 +35,10 @@ factory :bing_maps_api_scope do path_prefix "/bing/maps" end + + factory :example_com_root_api_scope do + host "example.com" + path_prefix "/" + end end end diff --git a/test/factories/website_backends.rb b/test/factories/website_backends.rb index a7ee19387..b9410ec5d 100644 --- a/test/factories/website_backends.rb +++ b/test/factories/website_backends.rb @@ -4,5 +4,9 @@ backend_protocol "http" server_host "example.com" server_port 80 + + factory :example_com_website_backend do + frontend_host "example.com" + end end end diff --git a/test/support/models/config_version.rb b/test/support/models/config_version.rb index ddbada456..73d3683f9 100644 --- a/test/support/models/config_version.rb +++ b/test/support/models/config_version.rb @@ -23,6 +23,7 @@ def self.active_config def self.pending_config { "apis" => Api.order_by(:sort_order.asc).all.map { |api| Hash[api.attributes] }, + "website_backends" => WebsiteBackend.order_by(:frontend_host.asc).all.map { |api| Hash[api.attributes] }, } end From b43f0ca1dc567c63d8d240f46f86cbc3dfcf1f7d Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Fri, 11 May 2018 23:12:12 -0600 Subject: [PATCH 017/367] Update dependencies. --- Gemfile | 13 +- Gemfile.lock | 56 +- build/cmake/core-lua-deps.cmake | 2 - build/cmake/versions.cmake | 32 +- scripts/rake/outdated_packages.rb | 3 - src/api-umbrella/admin-ui/package.json | 2 +- src/api-umbrella/admin-ui/yarn.lock | 975 +++++++++--------- src/api-umbrella/web-app/Gemfile | 8 +- src/api-umbrella/web-app/Gemfile.lock | 42 +- .../20131127185950_fix_custom_rate_limits.rb | 2 +- .../perp/test-env-openldap/rc.main.mustache | 7 +- .../replica-set.json.mustache | 5 +- test/support/capybara.rb | 27 +- 13 files changed, 581 insertions(+), 593 deletions(-) diff --git a/Gemfile b/Gemfile index 3e6e841d8..4c1690ec9 100644 --- a/Gemfile +++ b/Gemfile @@ -18,7 +18,7 @@ gem "minitest-hooks", "~> 1.4.2" gem "minitest-ci", "~> 3.4.0" # Ruby lint/style checker -gem "rubocop", "~> 0.54.0", :require => false +gem "rubocop", "~> 0.55.0", :require => false # Running background processes gem "childprocess", "~> 0.9.0" @@ -28,7 +28,7 @@ gem "typhoeus", "~> 1.3.0" # JSON parsing gem "multi_json", "~> 1.13.1" -gem "oj", "~> 3.5.0" +gem "oj", "~> 3.6.0" # Database libraries gem "mongoid", "~> 6.3.0" @@ -39,7 +39,7 @@ gem "elasticsearch-persistence", "~> 0.1.9" gem "factory_bot", "~> 4.8.2" # Deleting database data between tests. -gem "database_cleaner", "~> 1.6.2" +gem "database_cleaner", "~> 1.7.0" # Programmatically generate Rails session cookies. gem "rails_compatible_cookies_utils", "~> 0.1.0" @@ -48,13 +48,10 @@ gem "rails_compatible_cookies_utils", "~> 0.1.0" gem "addressable", "~> 2.5.2" # Browser/JavaScript integration tests -gem "capybara", "~> 2.18.0" -# Use fork to fix failure messages: -# https://github.com/wojtekmach/minitest-capybara/pull/17 -gem "minitest-capybara", "~> 0.8.2", :git => "https://github.com/GUI/minitest-capybara.git" +gem "capybara", "~> 3.1.0" # Webkit-based driver for capybara -gem "poltergeist", "~> 1.17.0" +gem "poltergeist", "~> 1.18.0" # Take screenshots on capybara test failures gem "capybara-screenshot", "~> 1.0.14" diff --git a/Gemfile.lock b/Gemfile.lock index aec750199..e47a039f2 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,12 +1,3 @@ -GIT - remote: https://github.com/GUI/minitest-capybara.git - revision: 74743d1472891da8636d773557297495cf8d6a77 - specs: - minitest-capybara (0.8.2) - capybara (~> 2.2) - minitest (~> 5.0) - rake - GEM remote: https://rubygems.org/ specs: @@ -29,15 +20,15 @@ GEM bcrypt (3.1.11) bson (4.3.0) builder (3.2.3) - capybara (2.18.0) + capybara (3.1.0) addressable mini_mime (>= 0.1.3) - nokogiri (>= 1.3.3) - rack (>= 1.0.0) - rack-test (>= 0.5.4) - xpath (>= 2.0, < 4.0) - capybara-screenshot (1.0.18) - capybara (>= 1.0, < 3) + nokogiri (~> 1.8) + rack (>= 1.6.0) + rack-test (>= 0.6.3) + xpath (~> 3.0) + capybara-screenshot (1.0.21) + capybara (>= 1.0, < 4) launchy childprocess (0.9.0) ffi (~> 1.0, >= 1.0.11) @@ -45,7 +36,7 @@ GEM coercible (1.0.0) descendants_tracker (~> 0.0.1) concurrent-ruby (1.0.5) - database_cleaner (1.6.2) + database_cleaner (1.7.0) descendants_tracker (0.0.4) thread_safe (~> 0.3, >= 0.3.1) elasticsearch (2.0.2) @@ -74,11 +65,11 @@ GEM activesupport (>= 3.0.0) faker (1.8.7) i18n (>= 0.7) - faraday (0.14.0) + faraday (0.15.1) multipart-post (>= 1.2, < 3) ffi (1.9.23) hashie (3.5.7) - i18n (1.0.0) + i18n (1.0.1) concurrent-ruby (~> 1.0) ice_nine (0.11.2) launchy (2.4.3) @@ -97,7 +88,7 @@ GEM ruby-progressbar minitest-sprint (1.2.0) path_expander (~> 1.0) - mongo (2.5.1) + mongo (2.5.3) bson (>= 4.3.0, < 5.0.0) mongoid (6.3.0) activemodel (~> 5.1) @@ -106,24 +97,24 @@ GEM multipart-post (2.0.0) nokogiri (1.8.2) mini_portile2 (~> 2.3.0) - oj (3.5.0) + oj (3.6.0) parallel (1.12.1) - parser (2.5.0.5) + parser (2.5.1.0) ast (~> 2.4.0) path_expander (1.0.3) - poltergeist (1.17.0) - capybara (~> 2.1) + poltergeist (1.18.0) + capybara (>= 2.1, < 4) cliver (~> 0.3.1) websocket-driver (>= 0.2.0) powerpack (0.1.1) public_suffix (3.0.2) - rack (2.0.4) + rack (2.0.5) rack-test (1.0.0) rack (>= 1.0, < 3) rails_compatible_cookies_utils (0.1.2) rainbow (3.0.0) rake (12.3.1) - rubocop (0.54.0) + rubocop (0.55.0) parallel (~> 1.10) parser (>= 2.5) powerpack (~> 0.1) @@ -136,7 +127,7 @@ GEM ethon (>= 0.9.0) tzinfo (1.2.5) thread_safe (~> 0.1) - unicode-display_width (1.3.0) + unicode-display_width (1.3.2) virtus (1.0.5) axiom-types (~> 0.1) coercible (~> 1.0) @@ -157,18 +148,17 @@ DEPENDENCIES addressable (~> 2.5.2) awesome_print (~> 1.8.0) bcrypt (~> 3.1.11) - capybara (~> 2.18.0) + capybara (~> 3.1.0) capybara-screenshot (~> 1.0.14) childprocess (~> 0.9.0) concurrent-ruby (~> 1.0.5) - database_cleaner (~> 1.6.2) + database_cleaner (~> 1.7.0) elasticsearch (~> 2.0.2) elasticsearch-persistence (~> 0.1.9) factory_bot (~> 4.8.2) faker (~> 1.8.7) lazyhash (~> 0.1.1) minitest (~> 5.11.3) - minitest-capybara (~> 0.8.2)! minitest-ci (~> 3.4.0) minitest-hooks (~> 1.4.2) minitest-reporters (~> 1.2.0) @@ -176,12 +166,12 @@ DEPENDENCIES mongoid (~> 6.3.0) multi_json (~> 1.13.1) nokogiri (~> 1.8.1) - oj (~> 3.5.0) - poltergeist (~> 1.17.0) + oj (~> 3.6.0) + poltergeist (~> 1.18.0) rails_compatible_cookies_utils (~> 0.1.0) rainbow (~> 3.0.0) rake (~> 12.3.1) - rubocop (~> 0.54.0) + rubocop (~> 0.55.0) typhoeus (~> 1.3.0) zonebie (~> 0.6.1) diff --git a/build/cmake/core-lua-deps.cmake b/build/cmake/core-lua-deps.cmake index b59ea8ee6..0808afc65 100644 --- a/build/cmake/core-lua-deps.cmake +++ b/build/cmake/core-lua-deps.cmake @@ -6,7 +6,6 @@ luarocks_install(inspect ${LUAROCK_INSPECT_VERSION} ${LUAROCK_INSPECT_HASH}) luarocks_install(libcidr-ffi ${LUAROCK_LIBCIDR_VERSION} ${LUAROCK_LIBCIDR_HASH} CIDR_DIR=${LUA_PREFIX} libcidr) luarocks_install(lua-cmsgpack ${LUAROCK_CMSGPACK_VERSION} ${LUAROCK_CMSGPACK_HASH}) luarocks_install(lua-iconv ${LUAROCK_ICONV_VERSION} ${LUAROCK_ICONV_HASH}) -luarocks_install(lua-resty-auto-ssl ${LUAROCK_RESTY_AUTO_SSL_VERSION} ${LUAROCK_RESTY_AUTO_SSL_HASH}) luarocks_install(lua-resty-http ${LUAROCK_RESTY_HTTP_VERSION} ${LUAROCK_RESTY_HTTP_HASH}) luarocks_install(lua-resty-uuid ${LUAROCK_RESTY_UUID_VERSION} ${LUAROCK_RESTY_UUID_HASH}) luarocks_install(luaposix ${LUAROCK_LUAPOSIX_VERSION} ${LUAROCK_LUAPOSIX_HASH}) @@ -73,7 +72,6 @@ set( luarock_libcidr-ffi luarock_lua-cmsgpack luarock_lua-iconv - luarock_lua-resty-auto-ssl luarock_lua-resty-http luarock_lua-resty-uuid luarock_luaposix diff --git a/build/cmake/versions.cmake b/build/cmake/versions.cmake index b7e5904d5..8632802ff 100644 --- a/build/cmake/versions.cmake +++ b/build/cmake/versions.cmake @@ -7,8 +7,8 @@ set(ELASTICSEARCH_VERSION 2.4.6) set(ELASTICSEARCH_HASH c3441bef89cd91206edf3cf3bd5c4b62550e60a9) set(FLUME_VERSION 1.7.0) set(FLUME_HASH 12496e632a96d7ca823ab3c239a2a7d2) -set(GOLANG_VERSION 1.8.3) -set(GOLANG_HASH 1862f4c3d3907e59b04a757cfda0ea7aa9ef39274af99a784f5be843c80c6772) +set(GOLANG_VERSION 1.10.2) +set(GOLANG_HASH 4b677d698c65370afa33757b6954ade60347aaca310ea92a63ed717d7cb0c2ff) set(KYLIN_VERSION 1.6.0) set(KYLIN_HASH 3f15f35c5ad7168ab401858dd8759dee) set(LIBCIDR_VERSION 1.2.3) @@ -25,8 +25,8 @@ set(LIBRDKAFKA_VERSION 0.9.5) set(LIBRDKAFKA_HASH 45bc9713bd4ed948e1efbd62688fc502) set(LUAROCKS_VERSION 2.4.4) set(LUAROCKS_HASH 04e8b19d565e86b1d08f745adc4b1a56) -set(LUAROCK_ARGPARSE_VERSION 0.5.0-1) -set(LUAROCK_ARGPARSE_HASH 02db647a5521809390b101a741cca4ff) +set(LUAROCK_ARGPARSE_VERSION 0.6.0-1) +set(LUAROCK_ARGPARSE_HASH 6656139dd66430075aa2093556857a84) set(LUAROCK_CMSGPACK_VERSION 0.4.0-0) set(LUAROCK_CMSGPACK_HASH f459d16fffdbbc85e582803321b3cec9) set(LUAROCK_ICONV_VERSION 7-3) @@ -35,8 +35,8 @@ set(LUAROCK_INSPECT_VERSION 3.1.1-0) set(LUAROCK_INSPECT_HASH 8a8a05f10b07a603e44e4f8b39bddd35) set(LUAROCK_LIBCIDR_VERSION 0.1.2-1) set(LUAROCK_LIBCIDR_HASH b6bdc9431cb488de8b58a83117107f7a) -set(LUAROCK_LUACHECK_VERSION 0.21.2-1) -set(LUAROCK_LUACHECK_HASH 2db2625f0c0008cfa1910b2b75926231) +set(LUAROCK_LUACHECK_VERSION 0.22.0-1) +set(LUAROCK_LUACHECK_HASH 17608776f5d37ca898f96f4973b3be0e) set(LUAROCK_LUAPOSIX_VERSION 34.0.4-1) set(LUAROCK_LUAPOSIX_HASH e584252902055ee40f250a1a304ec18e) set(LUAROCK_LUATZ_VERSION 0.4-1) @@ -47,8 +47,6 @@ set(LUAROCK_LYAML_VERSION 6.2.2-1) set(LUAROCK_LYAML_HASH d8c8c11db09bfc3f82838d0195d7cf04) set(LUAROCK_PENLIGHT_VERSION 1.5.4-1) set(LUAROCK_PENLIGHT_HASH 8f4e6b4c7e851c28cb3e95be728d6507) -set(LUAROCK_RESTY_AUTO_SSL_VERSION 0.11.0-1) -set(LUAROCK_RESTY_AUTO_SSL_HASH f092ba7ff82c53e21e473222aadcf33e) set(LUAROCK_RESTY_HTTP_VERSION 0.12-0) set(LUAROCK_RESTY_HTTP_HASH deaf54d8ce752db7cae5a58566e11517) set(LUAROCK_RESTY_UUID_VERSION 1.1-1) @@ -65,9 +63,9 @@ set(MAVEN_VERSION 3.5.0) set(MAVEN_HASH 35c39251d2af99b6624d40d801f6ff02) set(MAILHOG_VERSION 1.0.0) set(MAILHOG_HASH 3b758c81bfe2c9110911511daca1a7bc) -set(MONGO_ORCHESTRATION_VERSION 0.6.10) -set(MONGODB_VERSION 3.2.19) -set(MONGODB_HASH 91ed5ed3b9531f664f3e8549026f0c1e) +set(MONGO_ORCHESTRATION_VERSION 0.6.11) +set(MONGODB_VERSION 3.2.20) +set(MONGODB_HASH 01f7660d86b3de679ce388eaa681286a) set(MORA_VERSION 8127901857cf88d3f0902708b25ad930354973a3) set(MORA_HASH b86cea913596370cd58fce89b23acd97) set(NGX_DYUPS_VERSION a5e75737e04ff3e5040a80f5f739171e96c3359c) @@ -94,8 +92,8 @@ set(RUBY_VERSION 2.4.4) set(RUBY_HASH 45a8de577471b90dc4838c5ef26aeb253a56002896189055a44dc680644243f1) set(RUBYGEMS_VERSION 2.7.6) set(RUBYGEMS_HASH ee5ef219ac97f5499c31e6071eae424c3265620ece33b5cc66e09fa30f22086a) -set(RSYSLOG_VERSION 8.33.0) -set(RSYSLOG_HASH 2df39d91baddb75c575aa03525cd9d4d20aad75011c6d6d25ef773ac26ff5c12) +set(RSYSLOG_VERSION 8.34.0) +set(RSYSLOG_HASH 18330a9764c55d2501b847aad267292bd96c2b12fa5c3b92909bd8d4563c80a9) set(RUNIT_VERSION 2.1.2) set(RUNIT_HASH 6c985fbfe3a34608eb3c53dc719172c4) set(SHELLCHECK_VERSION 0.4.7) @@ -104,7 +102,7 @@ set(SHELLCHECK_HASH 64bf19a1292f0357c007b615150b6e58dba138bc7bf168c5a5e27016f8b4 # https://issues.apache.org/jira/browse/TS-3959 set(TRAFFICSERVER_VERSION 5.3.2) set(TRAFFICSERVER_HASH c8e5f3e81da643ea79cba0494ed37d45) -set(UNBOUND_VERSION 1.7.0) -set(UNBOUND_HASH 94dd9071fb13d8ccd122a3ac67c4524a3324d0e771fc7a8a7c49af8abfb926a2) -set(YARN_VERSION 1.5.1) -set(YARN_HASH 561ac9089c33402abece941bc424cdd4) +set(UNBOUND_VERSION 1.7.1) +set(UNBOUND_HASH 56e085ef582c5372a20207de179d0edb4e541e59f87be7d4ee1d00d12008628d) +set(YARN_VERSION 1.6.0) +set(YARN_HASH a11a3d8a5d62712fc497a6d1cbea25f6) diff --git a/scripts/rake/outdated_packages.rb b/scripts/rake/outdated_packages.rb index 130c2027d..0d03320c1 100644 --- a/scripts/rake/outdated_packages.rb +++ b/scripts/rake/outdated_packages.rb @@ -81,9 +81,6 @@ class OutdatedPackages "luarock_penlight" => { :luarock => "penlight", }, - "luarock_resty_auto_ssl" => { - :luarock => "lua-resty-auto-ssl", - }, "luarock_resty_http" => { :luarock => "lua-resty-http", }, diff --git a/src/api-umbrella/admin-ui/package.json b/src/api-umbrella/admin-ui/package.json index c1f374732..ab87cb368 100644 --- a/src/api-umbrella/admin-ui/package.json +++ b/src/api-umbrella/admin-ui/package.json @@ -31,7 +31,7 @@ "ember-ajax": "^3.0.0", "ember-bootstrap": "~1.2.1", "ember-browserify": "~1.2.2", - "ember-buffered-proxy": "~0.8.1", + "ember-buffered-proxy": "~1.0.0", "ember-busy-blocker": "~0.1.0", "ember-cli": "~2.18.2", "ember-cli-app-version": "^3.0.0", diff --git a/src/api-umbrella/admin-ui/yarn.lock b/src/api-umbrella/admin-ui/yarn.lock index 71b15f3ad..fb5252de4 100644 --- a/src/api-umbrella/admin-ui/yarn.lock +++ b/src/api-umbrella/admin-ui/yarn.lock @@ -2,85 +2,85 @@ # yarn lockfile v1 -"@babel/code-frame@7.0.0-beta.42", "@babel/code-frame@^7.0.0-beta.40": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0-beta.42.tgz#a9c83233fa7cd06b39dc77adbb908616ff4f1962" +"@babel/code-frame@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0-beta.44.tgz#2a02643368de80916162be70865c97774f3adbd9" dependencies: - "@babel/highlight" "7.0.0-beta.42" + "@babel/highlight" "7.0.0-beta.44" -"@babel/generator@7.0.0-beta.42": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.0.0-beta.42.tgz#777bb50f39c94a7e57f73202d833141f8159af33" +"@babel/generator@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.0.0-beta.44.tgz#c7e67b9b5284afcf69b309b50d7d37f3e5033d42" dependencies: - "@babel/types" "7.0.0-beta.42" + "@babel/types" "7.0.0-beta.44" jsesc "^2.5.1" lodash "^4.2.0" source-map "^0.5.0" trim-right "^1.0.1" -"@babel/helper-function-name@7.0.0-beta.42": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.0.0-beta.42.tgz#b38b8f4f85168d1812c543dd700b5d549b0c4658" +"@babel/helper-function-name@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.0.0-beta.44.tgz#e18552aaae2231100a6e485e03854bc3532d44dd" dependencies: - "@babel/helper-get-function-arity" "7.0.0-beta.42" - "@babel/template" "7.0.0-beta.42" - "@babel/types" "7.0.0-beta.42" + "@babel/helper-get-function-arity" "7.0.0-beta.44" + "@babel/template" "7.0.0-beta.44" + "@babel/types" "7.0.0-beta.44" -"@babel/helper-get-function-arity@7.0.0-beta.42": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0-beta.42.tgz#ad072e32f912c033053fc80478169aeadc22191e" +"@babel/helper-get-function-arity@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0-beta.44.tgz#d03ca6dd2b9f7b0b1e6b32c56c72836140db3a15" dependencies: - "@babel/types" "7.0.0-beta.42" + "@babel/types" "7.0.0-beta.44" -"@babel/helper-split-export-declaration@7.0.0-beta.42": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.0.0-beta.42.tgz#0d0d5254220a9cc4e7e226240306b939dc210ee7" +"@babel/helper-split-export-declaration@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.0.0-beta.44.tgz#c0b351735e0fbcb3822c8ad8db4e583b05ebd9dc" dependencies: - "@babel/types" "7.0.0-beta.42" + "@babel/types" "7.0.0-beta.44" -"@babel/highlight@7.0.0-beta.42": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.0.0-beta.42.tgz#a502a1c0d6f99b2b0e81d468a1b0c0e81e3f3623" +"@babel/highlight@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.0.0-beta.44.tgz#18c94ce543916a80553edcdcf681890b200747d5" dependencies: chalk "^2.0.0" esutils "^2.0.2" js-tokens "^3.0.0" -"@babel/template@7.0.0-beta.42": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.0.0-beta.42.tgz#7186d4e70d44cdec975049ba0a73bdaf5cdee052" +"@babel/template@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.0.0-beta.44.tgz#f8832f4fdcee5d59bf515e595fc5106c529b394f" dependencies: - "@babel/code-frame" "7.0.0-beta.42" - "@babel/types" "7.0.0-beta.42" - babylon "7.0.0-beta.42" + "@babel/code-frame" "7.0.0-beta.44" + "@babel/types" "7.0.0-beta.44" + babylon "7.0.0-beta.44" lodash "^4.2.0" -"@babel/traverse@^7.0.0-beta.40": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.0.0-beta.42.tgz#f4bf4d1e33d41baf45205e2d0463591d57326285" +"@babel/traverse@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.0.0-beta.44.tgz#a970a2c45477ad18017e2e465a0606feee0d2966" dependencies: - "@babel/code-frame" "7.0.0-beta.42" - "@babel/generator" "7.0.0-beta.42" - "@babel/helper-function-name" "7.0.0-beta.42" - "@babel/helper-split-export-declaration" "7.0.0-beta.42" - "@babel/types" "7.0.0-beta.42" - babylon "7.0.0-beta.42" + "@babel/code-frame" "7.0.0-beta.44" + "@babel/generator" "7.0.0-beta.44" + "@babel/helper-function-name" "7.0.0-beta.44" + "@babel/helper-split-export-declaration" "7.0.0-beta.44" + "@babel/types" "7.0.0-beta.44" + babylon "7.0.0-beta.44" debug "^3.1.0" globals "^11.1.0" invariant "^2.2.0" lodash "^4.2.0" -"@babel/types@7.0.0-beta.42", "@babel/types@^7.0.0-beta.40": - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.0.0-beta.42.tgz#1e2118767684880f6963801b272fd2b3348efacc" +"@babel/types@7.0.0-beta.44": + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.0.0-beta.44.tgz#6b1b164591f77dec0a0342aca995f2d046b3a757" dependencies: esutils "^2.0.2" lodash "^4.2.0" to-fast-properties "^2.0.0" -"@ember-decorators/argument@^0.8.10": - version "0.8.13" - resolved "https://registry.yarnpkg.com/@ember-decorators/argument/-/argument-0.8.13.tgz#217804178c4278a8c264a2878b66c0cb55b77e3d" +"@ember-decorators/argument@^0.8.13": + version "0.8.15" + resolved "https://registry.yarnpkg.com/@ember-decorators/argument/-/argument-0.8.15.tgz#9b0be4b8ca362a9fb79af0bad5249575a2cbd6fa" dependencies: babel-plugin-filter-imports "^1.1.1" broccoli-funnel "^2.0.1" @@ -91,28 +91,64 @@ ember-source-channel-url "^1.0.1" ember-try "^0.2.23" -"@ember-decorators/babel-transforms@^0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@ember-decorators/babel-transforms/-/babel-transforms-0.1.1.tgz#c2be1677192e55ccfeb806002d57e314a0e728bc" +"@ember-decorators/babel-transforms@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@ember-decorators/babel-transforms/-/babel-transforms-2.0.1.tgz#75b668cfe996fa920c940ad723be015cddf904db" dependencies: babel-plugin-transform-class-properties "^6.24.1" babel-plugin-transform-decorators-legacy "^1.3.4" ember-cli-babel "^6.6.0" ember-cli-version-checker "^2.1.0" -"@ember-decorators/utils@^0.2.0": - version "0.2.0" - resolved "https://registry.yarnpkg.com/@ember-decorators/utils/-/utils-0.2.0.tgz#395362c75c4f85aa63aa7cbed77a6486fd6e5f22" +"@ember-decorators/component@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@ember-decorators/component/-/component-2.0.0.tgz#2ef20e0d2d7e74320256a16425aaa65a5b19ffd4" + dependencies: + "@ember-decorators/utils" "^2.0.0" + ember-cli-babel "^6.6.0" + +"@ember-decorators/controller@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@ember-decorators/controller/-/controller-2.0.0.tgz#4bd48ab4ff59ec113f1efab9c725eaaf0a6162c0" + dependencies: + "@ember-decorators/utils" "^2.0.0" + ember-cli-babel "^6.6.0" + +"@ember-decorators/data@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@ember-decorators/data/-/data-2.0.0.tgz#1a9b2160bf6c57ddcb649299e673680ea0ad3df8" + dependencies: + "@ember-decorators/utils" "^2.0.0" + ember-cli-babel "^6.6.0" + +"@ember-decorators/object@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@ember-decorators/object/-/object-2.0.0.tgz#e24cfe469f509d37eefef2c55e3e539039c34117" + dependencies: + "@ember-decorators/utils" "^2.0.0" + ember-cli-babel "^6.6.0" + ember-compatibility-helpers "^0.1.3" + +"@ember-decorators/service@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@ember-decorators/service/-/service-2.0.0.tgz#2a96eb099544d1552af13e1416ab1e2dc0c95110" + dependencies: + "@ember-decorators/utils" "^2.0.0" + ember-cli-babel "^6.6.0" + +"@ember-decorators/utils@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@ember-decorators/utils/-/utils-2.0.0.tgz#14f1fbd6de4f1d722b15c155287aafc6453943f2" dependencies: ember-cli-babel "^6.6.0" - ember-compatibility-helpers "^0.1.2" + ember-compatibility-helpers "1.0.0-beta.1" "@ember/test-helpers@^0.7.18": - version "0.7.20" - resolved "https://registry.yarnpkg.com/@ember/test-helpers/-/test-helpers-0.7.20.tgz#488b1c8ef23626f8831e5cb750ffca86e017e6dc" + version "0.7.22" + resolved "https://registry.yarnpkg.com/@ember/test-helpers/-/test-helpers-0.7.22.tgz#f8a0fb23d85dec2e7e07ec6138f93e1868ae3619" dependencies: broccoli-funnel "^2.0.1" - ember-cli-babel "^6.10.0" + ember-cli-babel "^6.12.0" ember-cli-htmlbars-inline-precompile "^1.0.0" "@glimmer/di@^0.2.0": @@ -184,13 +220,6 @@ ajv-keywords@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.1.tgz#617997fc5f60576894c435f940d819e135b80762" -ajv@^4.9.1: - version "4.11.8" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536" - dependencies: - co "^4.6.0" - json-stable-stringify "^1.0.1" - ajv@^5.1.0, ajv@^5.2.3, ajv@^5.3.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" @@ -238,10 +267,6 @@ ansi-escapes@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.1.0.tgz#f73207bb81207d75fd6c83f125af26eea378ca30" -ansi-regex@^0.2.0, ansi-regex@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.2.1.tgz#0d8e946967a3d8143f93e24e298525fc1b2235f9" - ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" @@ -250,10 +275,6 @@ ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" -ansi-styles@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.1.0.tgz#eaecbf66cd706882760b2f4691582b8f55d7a7de" - ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" @@ -488,9 +509,9 @@ asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" -atob@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.0.tgz#ab2b150e51d7b122b9efc8d7340c06b6c41076bc" +atob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.1.tgz#ae2d5a729477f289d60dd7f96a6314a22dd6c22a" aws-sign2@~0.6.0: version "0.6.0" @@ -501,8 +522,8 @@ aws-sign2@~0.7.0: resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" aws4@^1.2.1, aws4@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" + version "1.7.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.7.0.tgz#d4d0e9b9dbfca77bf08eeb0a8a471550fe39e289" babel-code-frame@^6.22.0, babel-code-frame@^6.26.0: version "6.26.0" @@ -564,8 +585,8 @@ babel-core@^5.0.0: try-resolve "^1.0.0" babel-core@^6.14.0, babel-core@^6.24.1, babel-core@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.0.tgz#af32f78b31a6fcef119c87b0fd8d9753f03a0bb8" + version "6.26.3" + resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.3.tgz#b2e2f09e342d0f0c88e2f02e067794125e75c207" dependencies: babel-code-frame "^6.26.0" babel-generator "^6.26.0" @@ -577,24 +598,24 @@ babel-core@^6.14.0, babel-core@^6.24.1, babel-core@^6.26.0: babel-traverse "^6.26.0" babel-types "^6.26.0" babylon "^6.18.0" - convert-source-map "^1.5.0" - debug "^2.6.8" + convert-source-map "^1.5.1" + debug "^2.6.9" json5 "^0.5.1" lodash "^4.17.4" minimatch "^3.0.4" path-is-absolute "^1.0.1" - private "^0.1.7" + private "^0.1.8" slash "^1.0.0" - source-map "^0.5.6" + source-map "^0.5.7" babel-eslint@^8.0.3: - version "8.2.2" - resolved "http://registry.npmjs.org/babel-eslint/-/babel-eslint-8.2.2.tgz#1102273354c6f0b29b4ea28a65f97d122296b68b" + version "8.2.3" + resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-8.2.3.tgz#1a2e6681cc9bc4473c32899e59915e19cd6733cf" dependencies: - "@babel/code-frame" "^7.0.0-beta.40" - "@babel/traverse" "^7.0.0-beta.40" - "@babel/types" "^7.0.0-beta.40" - babylon "^7.0.0-beta.40" + "@babel/code-frame" "7.0.0-beta.44" + "@babel/traverse" "7.0.0-beta.44" + "@babel/types" "7.0.0-beta.44" + babylon "7.0.0-beta.44" eslint-scope "~3.7.1" eslint-visitor-keys "^1.0.0" @@ -763,11 +784,11 @@ babel-plugin-filter-imports@^0.3.1: resolved "https://registry.yarnpkg.com/babel-plugin-filter-imports/-/babel-plugin-filter-imports-0.3.1.tgz#e7859b56886b175dd2616425d277b219e209ea8b" babel-plugin-filter-imports@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-filter-imports/-/babel-plugin-filter-imports-1.1.1.tgz#0f418fb517a7c64b54461b947940081e51b3fd9d" + version "1.1.2" + resolved "https://registry.yarnpkg.com/babel-plugin-filter-imports/-/babel-plugin-filter-imports-1.1.2.tgz#c6e1f2685253bbda91b1dc5a6652ce825f771264" dependencies: babel-types "^6.26.0" - lodash "^4.17.4" + lodash "^4.17.10" babel-plugin-htmlbars-inline-precompile@^0.2.3: version "0.2.4" @@ -945,8 +966,8 @@ babel-plugin-transform-es2015-modules-amd@^6.22.0, babel-plugin-transform-es2015 babel-template "^6.24.1" babel-plugin-transform-es2015-modules-commonjs@^6.23.0, babel-plugin-transform-es2015-modules-commonjs@^6.24.1: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.0.tgz#0d8394029b7dc6abe1a97ef181e00758dd2e5d8a" + version "6.26.2" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.2.tgz#58a793863a9e7ca870bdc5a881117ffac27db6f3" dependencies: babel-plugin-transform-strict-mode "^6.24.1" babel-runtime "^6.26.0" @@ -1068,8 +1089,8 @@ babel-polyfill@^6.16.0: regenerator-runtime "^0.10.5" babel-preset-env@^1.5.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.6.1.tgz#a18b564cc9b9afdf4aae57ae3c1b0d99188e6f48" + version "1.7.0" + resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.7.0.tgz#dea79fa4ebeb883cd35dab07e260c1c9c04df77a" dependencies: babel-plugin-check-es2015-constants "^6.22.0" babel-plugin-syntax-trailing-function-commas "^6.22.0" @@ -1098,7 +1119,7 @@ babel-preset-env@^1.5.1: babel-plugin-transform-es2015-unicode-regex "^6.22.0" babel-plugin-transform-exponentiation-operator "^6.22.0" babel-plugin-transform-regenerator "^6.22.0" - browserslist "^2.1.2" + browserslist "^3.2.6" invariant "^2.2.2" semver "^5.3.0" @@ -1162,9 +1183,9 @@ babel6-plugin-strip-heimdall@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/babel6-plugin-strip-heimdall/-/babel6-plugin-strip-heimdall-6.0.1.tgz#35f80eddec1f7fffdc009811dfbd46d9965072b6" -babylon@7.0.0-beta.42, babylon@^7.0.0-beta.40: - version "7.0.0-beta.42" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.42.tgz#67cfabcd4f3ec82999d29031ccdea89d0ba99657" +babylon@7.0.0-beta.44: + version "7.0.0-beta.44" + resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.44.tgz#89159e15e6e30c5096e22d738d8c0af8a0e8ca1d" babylon@^5.8.38: version "5.8.38" @@ -1197,8 +1218,8 @@ base64-arraybuffer@0.1.5: resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" base64-js@^1.0.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.3.tgz#fb13668233d9614cf5fb4bce95a9ba4096cdf801" + version "1.3.0" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.0.tgz#cab1e6118f051095e58b5281aea8c1cd22bfc0e3" base64id@1.0.0: version "1.0.0" @@ -1363,16 +1384,14 @@ braces@^1.8.2: repeat-element "^1.1.2" braces@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.1.tgz#7086c913b4e5a08dbe37ac0ee6a2500c4ba691bb" + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" dependencies: arr-flatten "^1.1.0" array-unique "^0.3.2" - define-property "^1.0.0" extend-shallow "^2.0.1" fill-range "^4.0.0" isobject "^3.0.1" - kind-of "^6.0.2" repeat-element "^1.1.2" snapdragon "^0.8.1" snapdragon-node "^2.0.1" @@ -1522,15 +1541,11 @@ broccoli-debug@^0.6.1, broccoli-debug@^0.6.2, broccoli-debug@^0.6.3: tree-sync "^1.2.2" broccoli-file-creator@^1.0.0, broccoli-file-creator@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/broccoli-file-creator/-/broccoli-file-creator-1.1.1.tgz#1b35b67d215abdfadd8d49eeb69493c39e6c3450" + version "1.2.0" + resolved "https://registry.yarnpkg.com/broccoli-file-creator/-/broccoli-file-creator-1.2.0.tgz#27f1b25b1b00e7bb7bf3d5d7abed5f4d5388df4d" dependencies: - broccoli-kitchen-sink-helpers "~0.2.0" broccoli-plugin "^1.1.0" - broccoli-writer "~0.1.1" mkdirp "^0.5.1" - rsvp "~3.0.6" - symlink-or-copy "^1.0.1" broccoli-filter@^0.1.11: version "0.1.14" @@ -1600,7 +1615,7 @@ broccoli-funnel@^1.0.0, broccoli-funnel@^1.0.1, broccoli-funnel@^1.1.0, broccoli symlink-or-copy "^1.0.0" walk-sync "^0.3.1" -broccoli-kitchen-sink-helpers@^0.2.5, broccoli-kitchen-sink-helpers@^0.2.6, broccoli-kitchen-sink-helpers@~0.2.0: +broccoli-kitchen-sink-helpers@^0.2.5, broccoli-kitchen-sink-helpers@^0.2.6: version "0.2.9" resolved "https://registry.yarnpkg.com/broccoli-kitchen-sink-helpers/-/broccoli-kitchen-sink-helpers-0.2.9.tgz#a5e0986ed8d76fb5984b68c3f0450d3a96e36ecc" dependencies: @@ -1792,7 +1807,7 @@ broccoli-unwatched-tree@^0.1.1: dependencies: broccoli-source "^1.1.0" -broccoli-writer@^0.1.1, broccoli-writer@~0.1.1: +broccoli-writer@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/broccoli-writer/-/broccoli-writer-0.1.1.tgz#d4d71aa8f2afbc67a3866b91a2da79084b96ab2d" dependencies: @@ -1821,8 +1836,8 @@ browser-resolve@^1.11.0, browser-resolve@^1.7.0: resolve "1.1.7" browserify-aes@^1.0.0, browserify-aes@^1.0.4: - version "1.1.1" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.1.1.tgz#38b7ab55edb806ff2dcda1a7f1620773a477c49f" + version "1.2.0" + resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" dependencies: buffer-xor "^1.0.3" cipher-base "^1.0.0" @@ -1832,16 +1847,16 @@ browserify-aes@^1.0.0, browserify-aes@^1.0.4: safe-buffer "^5.0.1" browserify-cipher@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.0.tgz#9988244874bf5ed4e28da95666dcd66ac8fc363a" + version "1.0.1" + resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" dependencies: browserify-aes "^1.0.4" browserify-des "^1.0.0" evp_bytestokey "^1.0.0" browserify-des@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.0.tgz#daa277717470922ed2fe18594118a175439721dd" + version "1.0.1" + resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.1.tgz#3343124db6d7ad53e26a8826318712bdc8450f9c" dependencies: cipher-base "^1.0.1" des.js "^1.0.0" @@ -1924,19 +1939,12 @@ browserify@^13.0.0: vm-browserify "~0.0.1" xtend "^4.0.0" -browserslist@^2.1.2, browserslist@^2.2.2: - version "2.11.3" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-2.11.3.tgz#fe36167aed1bbcde4827ebfe71347a2cc70b99b2" +browserslist@^3.1.0, browserslist@^3.2.6: + version "3.2.7" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-3.2.7.tgz#aa488634d320b55e88bab0256184dbbcca1e6de9" dependencies: - caniuse-lite "^1.0.30000792" - electron-to-chromium "^1.3.30" - -browserslist@^3.1.0: - version "3.2.4" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-3.2.4.tgz#fb9ad70fd09875137ae943a31ab815ed76896031" - dependencies: - caniuse-lite "^1.0.30000821" - electron-to-chromium "^1.3.41" + caniuse-lite "^1.0.30000835" + electron-to-chromium "^1.3.45" bser@^2.0.0: version "2.0.0" @@ -2055,11 +2063,11 @@ can-symlink@^1.0.0: dependencies: tmp "0.0.28" -caniuse-lite@^1.0.30000792, caniuse-lite@^1.0.30000821: - version "1.0.30000821" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000821.tgz#0f3223f1e048ed96451c56ca6cf197058c42cb93" +caniuse-lite@^1.0.30000835: + version "1.0.30000839" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000839.tgz#41fcc036cf1cb77a0e0be041210f77f1ced44a7b" -capture-exit@^1.1.0: +capture-exit@^1.1.0, capture-exit@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/capture-exit/-/capture-exit-1.2.0.tgz#1c5fcc489fd0ab00d4f1ac7ae1072e3173fbab6f" dependencies: @@ -2087,16 +2095,6 @@ center-align@^0.1.1: align-text "^0.1.3" lazy-cache "^1.0.3" -chalk@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174" - dependencies: - ansi-styles "^1.1.0" - escape-string-regexp "^1.0.0" - has-ansi "^0.1.0" - strip-ansi "^0.3.0" - supports-color "^0.2.0" - chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" @@ -2108,8 +2106,8 @@ chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3: supports-color "^2.0.0" chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.3.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.3.2.tgz#250dc96b07491bfd601e648d66ddf5f60c7a5c65" + version "2.4.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e" dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" @@ -2140,6 +2138,10 @@ chokidar@1.7.0: optionalDependencies: fsevents "^1.0.0" +chownr@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.0.1.tgz#e2a75042a9551908bebd25b8523d5f9769d79181" + cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" @@ -2196,8 +2198,8 @@ cli-cursor@^2.1.0: restore-cursor "^2.0.0" cli-spinners@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.1.0.tgz#f1847b168844d917a671eb9d147e3df497c90d06" + version "1.3.1" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.3.1.tgz#002c1990912d0d59580c93bd36c056de99e4259a" cli-table2@^0.2.0: version "0.2.0" @@ -2249,8 +2251,8 @@ clone@^1.0.2: resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" clone@^2.0.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + version "2.1.1" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.1.tgz#d217d1e961118e3ac9a4b8bba3285553bf647cdb" co@^4.6.0: version "4.6.0" @@ -2282,8 +2284,8 @@ colors@1.0.3: resolved "https://registry.yarnpkg.com/colors/-/colors-1.0.3.tgz#0433f44d809680fdeb60ed260f1b0c262e82a40b" colors@^1.1.2: - version "1.2.1" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.2.1.tgz#f4a3d302976aaf042356ba1ade3b1a2c62d9d794" + version "1.2.5" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.2.5.tgz#89c7ad9a374bc030df8013241f68136ed8835afc" combine-source-map@^0.8.0, combine-source-map@~0.8.0: version "0.8.0" @@ -2443,7 +2445,7 @@ continuable-cache@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/continuable-cache/-/continuable-cache-0.3.1.tgz#bd727a7faed77e71ff3985ac93351a912733ad0f" -convert-source-map@^1.1.0, convert-source-map@^1.5.0, convert-source-map@^1.5.1: +convert-source-map@^1.1.0, convert-source-map@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.1.tgz#b8278097b9bc229365de5c62cf5fcaed8b5599e5" @@ -2472,8 +2474,8 @@ core-js@^1.0.0: resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636" core-js@^2.4.0, core-js@^2.4.1, core-js@^2.5.0: - version "2.5.4" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.4.tgz#f2c8bf181f2a80b92f360121429ce63a2f0aeae0" + version "2.5.6" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.6.tgz#0fe6d45bf3cac3ac364a9d72de7576f4eb221b9d" core-object@^1.1.0: version "1.1.0" @@ -2490,24 +2492,25 @@ core-util-is@1.0.2, core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" create-ecdh@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.0.tgz#888c723596cdf7612f6498233eebd7a35301737d" + version "4.0.3" + resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" dependencies: bn.js "^4.1.0" elliptic "^6.0.0" create-hash@^1.1.0, create-hash@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.1.3.tgz#606042ac8b9262750f483caddab0f5819172d8fd" + version "1.2.0" + resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" dependencies: cipher-base "^1.0.1" inherits "^2.0.1" - ripemd160 "^2.0.0" + md5.js "^1.3.4" + ripemd160 "^2.0.1" sha.js "^2.4.0" create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: - version "1.1.6" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.6.tgz#acb9e221a4e17bdb076e90657c42b93e3726cf06" + version "1.1.7" + resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" dependencies: cipher-base "^1.0.3" create-hash "^1.1.0" @@ -2574,8 +2577,8 @@ crypto-random-string@^1.0.0: resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e" csv-parse@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-2.1.0.tgz#75293aa334e7f8dd1ef956c5551adf0db6a79c81" + version "2.4.0" + resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-2.4.0.tgz#d4c72a1533734b2c2a65f486d13091264cacc115" currently-unhandled@^0.4.1: version "0.4.1" @@ -2616,7 +2619,7 @@ date-now@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b" -debug@2.6.9, debug@^2.1.0, debug@^2.1.1, debug@^2.1.3, debug@^2.2.0, debug@^2.3.3, debug@^2.4.0, debug@^2.6.8: +debug@2.6.9, debug@^2.1.0, debug@^2.1.1, debug@^2.1.2, debug@^2.1.3, debug@^2.2.0, debug@^2.3.3, debug@^2.4.0, debug@^2.6.8, debug@^2.6.9: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" dependencies: @@ -2642,9 +2645,9 @@ decompress-response@^3.3.0: dependencies: mimic-response "^1.0.0" -deep-extend@~0.4.0: - version "0.4.2" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" +deep-extend@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.5.1.tgz#b894a9dd90d3023fbf1c55a394fb858eb2066f1f" deep-is@~0.1.3: version "0.1.3" @@ -2776,6 +2779,10 @@ detect-indent@^4.0.0: dependencies: repeating "^2.0.0" +detect-indent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-5.0.0.tgz#3871cc0a6a002e8c3e5b3cf7f336264675f06b9d" + detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" @@ -2792,8 +2799,8 @@ diff@^3.2.0, diff@~3.5.0: resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" diffie-hellman@^5.0.0: - version "5.0.2" - resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.2.tgz#b5835739270cfe26acf632099fded2a07f209e5e" + version "5.0.3" + resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" dependencies: bn.js "^4.1.0" miller-rabin "^4.0.0" @@ -2849,9 +2856,9 @@ ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" -electron-to-chromium@^1.3.30, electron-to-chromium@^1.3.41: - version "1.3.41" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.41.tgz#7e33643e00cd85edfd17e04194f6d00e73737235" +electron-to-chromium@^1.3.45: + version "1.3.45" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.45.tgz#458ac1b1c5c760ce8811a16d2bfbd97ec30bafb8" elliptic@^6.0.0: version "6.4.0" @@ -2879,8 +2886,8 @@ ember-assign-polyfill@^2.0.1: ember-cli-version-checker "^2.0.0" ember-bootstrap@~1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ember-bootstrap/-/ember-bootstrap-1.2.1.tgz#9b88d7568f162b211fa78b4030abade283ad4b52" + version "1.2.2" + resolved "https://registry.yarnpkg.com/ember-bootstrap/-/ember-bootstrap-1.2.2.tgz#768380b746c79811dd2fb508c81ac0eeb024a114" dependencies: broccoli-debug "^0.6.3" broccoli-funnel "^2.0.0" @@ -2895,7 +2902,7 @@ ember-bootstrap@~1.2.1: ember-concurrency "^0.8.7" ember-in-element-polyfill "^0.1.2" ember-maybe-in-element "^0.1.3" - ember-popper "^0.8.3" + ember-popper "^0.9.0" ember-runtime-enumerable-includes-polyfill "^2.0.0" findup-sync "^1.0.0" fs-extra "^4.0.1" @@ -2930,11 +2937,12 @@ ember-browserify@~1.2.2: through2 "^2.0.0" walk-sync "^0.2.7" -ember-buffered-proxy@~0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/ember-buffered-proxy/-/ember-buffered-proxy-0.8.1.tgz#d89c22c9db76f411bf92c7b3704ae8c70887cbfa" +ember-buffered-proxy@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/ember-buffered-proxy/-/ember-buffered-proxy-1.0.0.tgz#e8ce2f32231036f2391648dd273d5b7242e68a5a" dependencies: ember-cli-babel "^6.6.0" + ember-notify-property-change-polyfill "^0.0.1" ember-busy-blocker@~0.1.0: version "0.1.0" @@ -2944,13 +2952,13 @@ ember-busy-blocker@~0.1.0: ember-cli-htmlbars "^1.0.1" ember-cli-app-version@^3.0.0: - version "3.1.3" - resolved "https://registry.yarnpkg.com/ember-cli-app-version/-/ember-cli-app-version-3.1.3.tgz#26d25f5e653ff0106f0b39da6d75518ba8ed282d" + version "3.2.0" + resolved "https://registry.yarnpkg.com/ember-cli-app-version/-/ember-cli-app-version-3.2.0.tgz#7b9ad0e1b63ae0518648356ee24c703e922bc26e" dependencies: - ember-cli-babel "^6.8.0" - git-repo-version "^1.0.0" + ember-cli-babel "^6.12.0" + git-repo-version "^1.0.2" -ember-cli-babel@^5.1.5, ember-cli-babel@^5.1.7: +ember-cli-babel@^5.1.5: version "5.2.8" resolved "https://registry.yarnpkg.com/ember-cli-babel/-/ember-cli-babel-5.2.8.tgz#0356b03cc3fdff5d0f2ecaa46a0e1cfaebffd876" dependencies: @@ -2960,7 +2968,7 @@ ember-cli-babel@^5.1.5, ember-cli-babel@^5.1.7: ember-cli-version-checker "^1.0.2" resolve "^1.1.2" -ember-cli-babel@^6.0.0, ember-cli-babel@^6.0.0-beta.4, ember-cli-babel@^6.0.0-beta.7, ember-cli-babel@^6.0.0-beta.9, ember-cli-babel@^6.1.0, ember-cli-babel@^6.10.0, ember-cli-babel@^6.11.0, ember-cli-babel@^6.3.0, ember-cli-babel@^6.6.0, ember-cli-babel@^6.8.0, ember-cli-babel@^6.8.1, ember-cli-babel@^6.8.2, ember-cli-babel@^6.9.0, ember-cli-babel@^6.9.2: +ember-cli-babel@^6.0.0, ember-cli-babel@^6.0.0-beta.4, ember-cli-babel@^6.0.0-beta.7, ember-cli-babel@^6.1.0, ember-cli-babel@^6.10.0, ember-cli-babel@^6.11.0, ember-cli-babel@^6.12.0, ember-cli-babel@^6.3.0, ember-cli-babel@^6.6.0, ember-cli-babel@^6.8.1, ember-cli-babel@^6.8.2, ember-cli-babel@^6.9.0, ember-cli-babel@^6.9.2: version "6.12.0" resolved "https://registry.yarnpkg.com/ember-cli-babel/-/ember-cli-babel-6.12.0.tgz#3adcdbe1278da1fcd0b9038f1360cb4ac5d4414c" dependencies: @@ -3038,7 +3046,7 @@ ember-cli-htmlbars-inline-precompile@^1.0.0: heimdalljs-logger "^0.1.7" silent-error "^1.1.0" -ember-cli-htmlbars@^1.0.1, ember-cli-htmlbars@^1.0.3: +ember-cli-htmlbars@^1.0.1: version "1.3.4" resolved "https://registry.yarnpkg.com/ember-cli-htmlbars/-/ember-cli-htmlbars-1.3.4.tgz#461289724b34af372a6a0c4b6635819156963353" dependencies: @@ -3207,9 +3215,9 @@ ember-cli-version-checker@^1.0.2, ember-cli-version-checker@^1.2.0: dependencies: semver "^5.3.0" -ember-cli-version-checker@^2.0.0, ember-cli-version-checker@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ember-cli-version-checker/-/ember-cli-version-checker-2.1.0.tgz#fc79a56032f3717cf844ada7cbdec1a06fedb604" +ember-cli-version-checker@^2.0.0, ember-cli-version-checker@^2.1.0, ember-cli-version-checker@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ember-cli-version-checker/-/ember-cli-version-checker-2.1.2.tgz#305ce102390c66e4e0f1432dea9dc5c7c19fed98" dependencies: resolve "^1.3.3" semver "^5.3.0" @@ -3303,7 +3311,15 @@ ember-cli@~2.18.2: walk-sync "^0.3.0" yam "0.0.22" -ember-compatibility-helpers@^0.1.0, ember-compatibility-helpers@^0.1.2, ember-compatibility-helpers@^0.1.3: +ember-compatibility-helpers@1.0.0-beta.1: + version "1.0.0-beta.1" + resolved "https://registry.yarnpkg.com/ember-compatibility-helpers/-/ember-compatibility-helpers-1.0.0-beta.1.tgz#e54d68125f9bc15735ca4a68960d186d5c3ca58b" + dependencies: + babel-plugin-debug-macros "^0.1.11" + ember-cli-version-checker "^2.0.0" + semver "^5.4.1" + +ember-compatibility-helpers@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/ember-compatibility-helpers/-/ember-compatibility-helpers-0.1.3.tgz#039a57e9f1a401efda0023c1e3650bd01cfd7087" dependencies: @@ -3312,16 +3328,16 @@ ember-compatibility-helpers@^0.1.0, ember-compatibility-helpers@^0.1.2, ember-co semver "^5.4.1" ember-compatibility-helpers@^1.0.0-beta.2: - version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/ember-compatibility-helpers/-/ember-compatibility-helpers-1.0.0-beta.2.tgz#00cb134af45f9562fa47a23f4da81a63aad41943" + version "1.0.0" + resolved "https://registry.yarnpkg.com/ember-compatibility-helpers/-/ember-compatibility-helpers-1.0.0.tgz#616b22d2e14b4c6a1f4441e5390a49abf52b3c68" dependencies: babel-plugin-debug-macros "^0.1.11" - ember-cli-version-checker "^2.0.0" + ember-cli-version-checker "^2.1.1" semver "^5.4.1" ember-concurrency@^0.8.7: - version "0.8.17" - resolved "https://registry.yarnpkg.com/ember-concurrency/-/ember-concurrency-0.8.17.tgz#be47a90342f1960f4f57284c2fe5f7ce2396142a" + version "0.8.19" + resolved "https://registry.yarnpkg.com/ember-concurrency/-/ember-concurrency-0.8.19.tgz#71b9c175ba077865310029cb4bdb880e17d5155e" dependencies: babel-core "^6.24.1" ember-cli-babel "^6.8.2" @@ -3335,13 +3351,13 @@ ember-cookies@^0.3.0: ember-getowner-polyfill "^1.1.0 || ^2.0.0" ember-cp-validations@~3.5.2: - version "3.5.2" - resolved "https://registry.yarnpkg.com/ember-cp-validations/-/ember-cp-validations-3.5.2.tgz#1b205891ae7d7026f7b6c55cf6e440122db9b2ac" + version "3.5.3" + resolved "https://registry.yarnpkg.com/ember-cp-validations/-/ember-cp-validations-3.5.3.tgz#059d173e2f0904232516b23c4b774675e672ae11" dependencies: ember-cli-babel "^6.6.0" ember-cli-version-checker "^2.0.0" ember-getowner-polyfill "^2.0.1" - ember-require-module "0.1.2" + ember-require-module "0.1.3" ember-string-ishtmlsafe-polyfill "^2.0.0" ember-validators "1.0.4" exists-sync "0.0.4" @@ -3381,15 +3397,17 @@ ember-data@~2.18.0: semver "^5.1.0" silent-error "^1.0.0" -ember-decorators@^1.3.2: - version "1.3.4" - resolved "https://registry.yarnpkg.com/ember-decorators/-/ember-decorators-1.3.4.tgz#801115ae1be9157bbb75280991ee3d07d12c6cce" +ember-decorators@^2.0.0-beta.2: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ember-decorators/-/ember-decorators-2.0.0.tgz#c29d58dded3e6d40e91406bdf82c6278b125401b" dependencies: - "@ember-decorators/babel-transforms" "^0.1.1" - "@ember-decorators/utils" "^0.2.0" + "@ember-decorators/component" "^2.0.0" + "@ember-decorators/controller" "^2.0.0" + "@ember-decorators/data" "^2.0.0" + "@ember-decorators/object" "^2.0.0" + "@ember-decorators/service" "^2.0.0" ember-cli-babel "^6.0.0" - ember-compatibility-helpers "^0.1.0" - ember-macro-helpers "^0.17.0" + semver "^5.5.0" ember-export-application-global@^2.0.0: version "2.0.0" @@ -3404,8 +3422,8 @@ ember-factory-for-polyfill@^1.3.1: ember-cli-version-checker "^2.1.0" "ember-fetch@^2.1.0 || ^3.0.0": - version "3.4.4" - resolved "https://registry.yarnpkg.com/ember-fetch/-/ember-fetch-3.4.4.tgz#926ffa1c4120324b298c44e9558b458e586eb504" + version "3.4.5" + resolved "https://registry.yarnpkg.com/ember-fetch/-/ember-fetch-3.4.5.tgz#2967df9cbdbe0993402588216332580be3950b92" dependencies: broccoli-funnel "^1.2.0" broccoli-stew "^1.4.2" @@ -3438,8 +3456,8 @@ ember-in-element-polyfill@^0.1.2: ember-wormhole "^0.5.4" ember-inflector@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/ember-inflector/-/ember-inflector-2.2.0.tgz#edd273dfd1a29be27f14b195e2f0ed70e812d9e0" + version "2.3.0" + resolved "https://registry.yarnpkg.com/ember-inflector/-/ember-inflector-2.3.0.tgz#94797eba0eea98d902aa1e5da0f0aeef6053317f" dependencies: ember-cli-babel "^6.0.0" @@ -3449,7 +3467,7 @@ ember-invoke-action@^1.5.0: dependencies: ember-cli-babel "^6.6.0" -ember-legacy-class-shim@^1.0.0: +ember-legacy-class-shim@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/ember-legacy-class-shim/-/ember-legacy-class-shim-1.0.3.tgz#ce5a0e59a095e73f734cd341e2a5dbf08af85ed3" dependencies: @@ -3457,10 +3475,10 @@ ember-legacy-class-shim@^1.0.0: ember-cli-version-checker "^2.0.0" ember-load-initializers@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/ember-load-initializers/-/ember-load-initializers-1.0.0.tgz#4919eaf06f6dfeca7e134633d8c05a6c9921e6e7" + version "1.1.0" + resolved "https://registry.yarnpkg.com/ember-load-initializers/-/ember-load-initializers-1.1.0.tgz#4edacc0f3a14d9f53d241ac3e5561804c8377978" dependencies: - ember-cli-babel "^6.0.0-beta.7" + ember-cli-babel "^6.6.0" ember-load@~0.0.12: version "0.0.12" @@ -3470,15 +3488,6 @@ ember-load@~0.0.12: ember-cli-htmlbars "^2.0.1" ember-cli-version-checker "^2.0.0" -ember-macro-helpers@^0.17.0: - version "0.17.0" - resolved "https://registry.yarnpkg.com/ember-macro-helpers/-/ember-macro-helpers-0.17.0.tgz#5e64a49f476e38c1916aff75f949455533cd1abe" - dependencies: - ember-cli-babel "^6.6.0" - ember-cli-string-utils "^1.1.0" - ember-cli-test-info "^1.0.0" - ember-weakmap "^3.0.0" - ember-maybe-import-regenerator@^0.1.5: version "0.1.6" resolved "https://registry.yarnpkg.com/ember-maybe-import-regenerator/-/ember-maybe-import-regenerator-0.1.6.tgz#35d41828afa6d6a59bc0da3ce47f34c573d776ca" @@ -3502,6 +3511,13 @@ ember-multiselect-checkboxes@~0.11.1: ember-cli-htmlbars "^2.0.3" ember-runtime-enumerable-includes-polyfill "^2.1.0" +ember-notify-property-change-polyfill@^0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/ember-notify-property-change-polyfill/-/ember-notify-property-change-polyfill-0.0.1.tgz#636f75e2aaf67e2c56cb13e7139499226fd60d55" + dependencies: + ember-cli-babel "^6.6.0" + ember-cli-version-checker "^2.1.0" + ember-onbeforeunload@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/ember-onbeforeunload/-/ember-onbeforeunload-1.1.2.tgz#0874a75a4dfc77cb6c8dba7333e5cf4a87d91bae" @@ -3517,12 +3533,12 @@ ember-one-way-controls@~3.1.0: ember-invoke-action "^1.5.0" ember-runtime-enumerable-includes-polyfill "^2.0.0" -ember-popper@^0.8.3: - version "0.8.3" - resolved "https://registry.yarnpkg.com/ember-popper/-/ember-popper-0.8.3.tgz#547ec0e8d810805bd65bce7556f0a3377cb9730c" +ember-popper@^0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/ember-popper/-/ember-popper-0.9.0.tgz#ec211e116bb3c4fb70a770e75134b94ddda079b5" dependencies: - "@ember-decorators/argument" "^0.8.10" - "@ember-decorators/babel-transforms" "^0.1.1" + "@ember-decorators/argument" "^0.8.13" + "@ember-decorators/babel-transforms" "^2.0.0" babel-eslint "^8.0.3" babel6-plugin-strip-class-callcheck "^6.0.0" broccoli-funnel "^2.0.0" @@ -3531,11 +3547,11 @@ ember-popper@^0.8.3: ember-cli-node-assets "^0.2.2" ember-cli-version-checker "^2.1.0" ember-compatibility-helpers "^0.1.3" - ember-decorators "^1.3.2" - ember-legacy-class-shim "^1.0.0" + ember-decorators "^2.0.0-beta.2" + ember-legacy-class-shim "^1.0.3" ember-raf-scheduler "^0.1.0" fastboot-transform "^0.1.0" - popper.js "^1.12.9" + popper.js "^1.14.1" ember-qunit@^3.3.2: version "3.4.0" @@ -3555,13 +3571,7 @@ ember-raf-scheduler@^0.1.0: dependencies: ember-cli-babel "^6.6.0" -ember-require-module@0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/ember-require-module/-/ember-require-module-0.1.2.tgz#fdffdcf86745d601009211c1b41907395a4d081e" - dependencies: - ember-cli-babel "^5.1.7" - -ember-require-module@^0.1.2: +ember-require-module@0.1.3, ember-require-module@^0.1.2: version "0.1.3" resolved "https://registry.yarnpkg.com/ember-require-module/-/ember-require-module-0.1.3.tgz#f82f60552142179152d28ec97ebd75d967cae1dc" dependencies: @@ -3584,8 +3594,8 @@ ember-rfc176-data@^0.2.0, ember-rfc176-data@^0.2.7: resolved "https://registry.yarnpkg.com/ember-rfc176-data/-/ember-rfc176-data-0.2.7.tgz#bd355bc9b473e08096b518784170a23388bc973b" ember-rfc176-data@^0.3.0, ember-rfc176-data@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/ember-rfc176-data/-/ember-rfc176-data-0.3.1.tgz#6a5a4b8b82ec3af34f3010965fa96b936ca94519" + version "0.3.2" + resolved "https://registry.yarnpkg.com/ember-rfc176-data/-/ember-rfc176-data-0.3.2.tgz#bde5538939529b263c142b53a47402f8127f8dce" ember-router-generator@^1.0.0, ember-router-generator@^1.2.3: version "1.2.3" @@ -3616,8 +3626,8 @@ ember-simple-auth@~1.6.0: silent-error "^1.0.0" ember-source-channel-url@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/ember-source-channel-url/-/ember-source-channel-url-1.0.1.tgz#93517ccbd97a26220184b7986a5325317065308b" + version "1.1.0" + resolved "https://registry.yarnpkg.com/ember-source-channel-url/-/ember-source-channel-url-1.1.0.tgz#73de5cc6ebc25b2120e932ec1d8f82677bfaf6ef" dependencies: got "^8.0.1" @@ -3641,8 +3651,8 @@ ember-source@~2.18.0: resolve "^1.3.3" ember-string-ishtmlsafe-polyfill@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ember-string-ishtmlsafe-polyfill/-/ember-string-ishtmlsafe-polyfill-2.0.0.tgz#38f17038c3451999e9f1db748e424d8831e85863" + version "2.0.1" + resolved "https://registry.yarnpkg.com/ember-string-ishtmlsafe-polyfill/-/ember-string-ishtmlsafe-polyfill-2.0.1.tgz#5946a2810fd53832c0621351b38b0b32ffcd3e1d" dependencies: ember-cli-version-checker "^1.2.0" @@ -3685,21 +3695,13 @@ ember-validators@1.0.4: ember-cli-babel "^6.9.2" ember-require-module "^0.1.2" -ember-weakmap@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/ember-weakmap/-/ember-weakmap-3.1.1.tgz#2ae6e0080b5b80cf0d108f7752dc69ea9603dbd7" - dependencies: - browserslist "^2.2.2" - debug "^3.1.0" - ember-cli-babel "^6.3.0" - ember-welcome-page@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/ember-welcome-page/-/ember-welcome-page-3.1.1.tgz#dead338443f24257c552bfa83ca84b0704b6c184" + version "3.2.0" + resolved "https://registry.yarnpkg.com/ember-welcome-page/-/ember-welcome-page-3.2.0.tgz#fe9903dbbaccfb4a2a05f42c716b7d07470f2d78" dependencies: broccoli-funnel "^1.0.1" - ember-cli-babel "^6.0.0-beta.9" - ember-cli-htmlbars "^1.0.3" + ember-cli-babel "^6.6.0" + ember-cli-htmlbars "^2.0.3" ember-wormhole@^0.5.4: version "0.5.4" @@ -3840,7 +3842,7 @@ escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" -escape-string-regexp@^1.0.0, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" @@ -3943,8 +3945,8 @@ esprima@~3.1.0: resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" esquery@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.0.tgz#cfba8b57d7fba93f17298a8a006a04cda13d80fa" + version "1.0.1" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.1.tgz#406c51658b1f5991a5f9b62b1dc25b00e3e5c708" dependencies: estraverse "^4.0.0" @@ -3977,9 +3979,9 @@ event-emitter@~0.3.5: d "1" es5-ext "~0.10.14" -eventemitter3@1.x.x: - version "1.2.0" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-1.2.0.tgz#1c86991d816ad1e504750e73874224ecf3bec508" +eventemitter3@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.0.tgz#090b4d6cdbd645ed10bf750d4b5407942d7ba163" events-to-array@^1.0.1: version "1.1.2" @@ -4151,8 +4153,8 @@ external-editor@^1.1.0: tmp "^0.0.29" external-editor@^2.0.4: - version "2.1.0" - resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.1.0.tgz#3d026a21b7f95b5726387d4200ac160d372c3b48" + version "2.2.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.2.0.tgz#045511cfd8d133f3846673d1047c154e214ad3d5" dependencies: chardet "^0.4.0" iconv-lite "^0.4.17" @@ -4204,10 +4206,10 @@ fast-ordered-set@^1.0.0, fast-ordered-set@^1.0.2: blank-object "^1.0.1" fast-sourcemap-concat@^1.0.1: - version "1.2.5" - resolved "https://registry.yarnpkg.com/fast-sourcemap-concat/-/fast-sourcemap-concat-1.2.5.tgz#196db60ffefa9c616291512cd89113210e3cb747" + version "1.3.0" + resolved "https://registry.yarnpkg.com/fast-sourcemap-concat/-/fast-sourcemap-concat-1.3.0.tgz#cc618e4d6f68106b598a532e174076075bb82400" dependencies: - chalk "^0.5.1" + chalk "^2.0.0" fs-extra "^0.30.0" heimdalljs-logger "^0.1.7" memory-streams "^0.1.0" @@ -4257,12 +4259,12 @@ filesize@^3.1.3: resolved "https://registry.yarnpkg.com/filesize/-/filesize-3.6.1.tgz#090bb3ee01b6f801a8a8be99d31710b3422bb317" fill-range@^2.1.0: - version "2.2.3" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723" + version "2.2.4" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.4.tgz#eb1e773abb056dcd8df2bfdf6af59b8b3a936565" dependencies: is-number "^2.1.0" isobject "^2.0.0" - randomatic "^1.1.3" + randomatic "^3.0.0" repeat-element "^1.1.2" repeat-string "^1.5.2" @@ -4350,6 +4352,12 @@ flat-cache@^1.2.1: graceful-fs "^4.1.2" write "^0.2.1" +follow-redirects@^1.0.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.4.1.tgz#d8120f4518190f55aac65bb6fc7b85fcd666d6aa" + dependencies: + debug "^3.1.0" + font-awesome@~4.7.0: version "4.7.0" resolved "https://registry.yarnpkg.com/font-awesome/-/font-awesome-4.7.0.tgz#8fa8cf0411a1a31afd07b06d2902bb9fc815a133" @@ -4461,6 +4469,12 @@ fs-extra@^4.0.0, fs-extra@^4.0.1: jsonfile "^4.0.0" universalify "^0.1.0" +fs-minipass@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.5.tgz#06c277218454ec288df77ada54a03b8702aacb9d" + dependencies: + minipass "^2.2.1" + fs-readdir-recursive@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/fs-readdir-recursive/-/fs-readdir-recursive-0.1.2.tgz#315b4fb8c1ca5b8c47defef319d073dad3568059" @@ -4495,22 +4509,14 @@ fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" -fsevents@^1.0.0, fsevents@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.3.tgz#11f82318f5fe7bb2cd22965a108e9306208216d8" - dependencies: - nan "^2.3.0" - node-pre-gyp "^0.6.39" - -fstream-ignore@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105" +fsevents@^1.0.0, fsevents@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.3.tgz#08292982e7059f6674c93d8b829c1e8604979ac0" dependencies: - fstream "^1.0.0" - inherits "2" - minimatch "^3.0.0" + nan "^2.9.2" + node-pre-gyp "^0.9.0" -fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2: +fstream@^1.0.0, fstream@^1.0.2: version "1.0.11" resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.11.tgz#5c1fb1f117477114f0632a0eb4b71b3cb0fd3171" dependencies: @@ -4582,7 +4588,7 @@ git-repo-info@^1.1.2, git-repo-info@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/git-repo-info/-/git-repo-info-1.4.1.tgz#2a072823254aaf62fcf0766007d7b6651bd41943" -git-repo-version@^1.0.0: +git-repo-version@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/git-repo-version/-/git-repo-version-1.0.2.tgz#2c8e9bee5d970cafc0dd58480f9dc56d9afe8e4f" dependencies: @@ -4678,8 +4684,8 @@ global-prefix@^1.0.1: which "^1.2.14" globals@^11.0.1, globals@^11.1.0: - version "11.4.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.4.0.tgz#b85c793349561c16076a3c13549238a27945f1bc" + version "11.5.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.5.0.tgz#6bc840de6771173b191f13d3a9c94d441ee92642" globals@^6.4.0: version "6.4.1" @@ -4709,8 +4715,8 @@ globule@^1.0.0: minimatch "~3.0.2" got@^8.0.1: - version "8.3.0" - resolved "https://registry.yarnpkg.com/got/-/got-8.3.0.tgz#6ba26e75f8a6cc4c6b3eb1fe7ce4fec7abac8533" + version "8.3.1" + resolved "https://registry.yarnpkg.com/got/-/got-8.3.1.tgz#093324403d4d955f5a16a7a8d39955d055ae10ed" dependencies: "@sindresorhus/is" "^0.7.0" cacheable-request "^2.1.1" @@ -4752,10 +4758,6 @@ handlebars@^4.0.4: optionalDependencies: uglify-js "^2.6" -har-schema@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e" - har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" @@ -4769,13 +4771,6 @@ har-validator@~2.0.6: is-my-json-valid "^2.12.4" pinkie-promise "^2.0.0" -har-validator@~4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a" - dependencies: - ajv "^4.9.1" - har-schema "^1.0.5" - har-validator@~5.0.3: version "5.0.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.0.3.tgz#ba402c266194f15956ef15e0fcf242993f6a7dfd" @@ -4783,12 +4778,6 @@ har-validator@~5.0.3: ajv "^5.1.0" har-schema "^2.0.0" -has-ansi@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-0.1.0.tgz#84f265aae8c0e6a88a12d7022894b7568894c62e" - dependencies: - ansi-regex "^0.2.0" - has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" @@ -4856,12 +4845,6 @@ has@^1.0.0: dependencies: function-bind "^1.0.2" -hash-base@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-2.0.2.tgz#66ea1d856db4e8a5470cadf6fce23ae5244ef2e1" - dependencies: - inherits "^2.0.1" - hash-base@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" @@ -4885,7 +4868,7 @@ hash.js@^1.0.0, hash.js@^1.0.3: inherits "^2.0.3" minimalistic-assert "^1.0.0" -hawk@3.1.3, hawk@~3.1.3: +hawk@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" dependencies: @@ -4904,8 +4887,8 @@ hawk@~6.0.2: sntp "2.x.x" heimdalljs-fs-monitor@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/heimdalljs-fs-monitor/-/heimdalljs-fs-monitor-0.1.0.tgz#d404a65688c6714c485469ed3495da4853440272" + version "0.1.1" + resolved "https://registry.yarnpkg.com/heimdalljs-fs-monitor/-/heimdalljs-fs-monitor-0.1.1.tgz#acaf5ebf7137bc2fc98e811e31ae4b121c3a75a3" dependencies: heimdalljs "^0.2.0" heimdalljs-logger "^0.1.7" @@ -4969,7 +4952,7 @@ homedir-polyfill@^1.0.0, homedir-polyfill@^1.0.1: dependencies: parse-passwd "^1.0.0" -hosted-git-info@^2.1.4, hosted-git-info@^2.5.0: +hosted-git-info@^2.1.4, hosted-git-info@^2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.6.0.tgz#23235b29ab230c576aab0d4f13fc046b0b038222" @@ -5000,15 +4983,16 @@ http-errors@~1.6.2: statuses ">= 1.4.0 < 2" http-parser-js@>=0.4.0: - version "0.4.11" - resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.11.tgz#5b720849c650903c27e521633d94696ee95f3529" + version "0.4.12" + resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.12.tgz#b9cfbf4a2cf26f0fc34b10ca1489a27771e3474f" http-proxy@^1.13.1, http-proxy@^1.9.0: - version "1.16.2" - resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.16.2.tgz#06dff292952bf64dbe8471fa9df73066d4f37742" + version "1.17.0" + resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.17.0.tgz#7ad38494658f84605e2f6db4436df410f4e5be9a" dependencies: - eventemitter3 "1.x.x" - requires-port "1.x.x" + eventemitter3 "^3.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" http-signature@~1.1.0: version "1.1.1" @@ -5038,17 +5022,29 @@ humanize@^0.0.9: version "0.0.0" resolved "https://github.com/fnando/i18n-js/archive/v3.0.0.rc15.tar.gz#006992545a89fcb12808afec0740f9ec4b393631" -iconv-lite@0.4.19, iconv-lite@^0.4.17, iconv-lite@^0.4.5, iconv-lite@~0.4.13: +iconv-lite@0.4.19: version "0.4.19" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.19.tgz#f7468f60135f5e5dad3399c0a81be9a1603a082b" +iconv-lite@^0.4.17, iconv-lite@^0.4.4, iconv-lite@^0.4.5, iconv-lite@~0.4.13: + version "0.4.23" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.23.tgz#297871f63be507adcfbfca715d0cd0eed84e9a63" + dependencies: + safer-buffer ">= 2.1.2 < 3" + ieee754@^1.1.4: version "1.1.11" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.11.tgz#c16384ffe00f5b7835824e67b6f2bd44a5229455" +ignore-walk@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.1.tgz#a83e62e7d272ac0e3b551aaa82831a19b69f82f8" + dependencies: + minimatch "^3.0.4" + ignore@^3.3.3: - version "3.3.7" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.7.tgz#612289bfb3c220e186a58118618d5be8c1bab021" + version "3.3.8" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.8.tgz#3f8e9c35d38708a3a7e0e9abb6c73e7ee7707b2b" imagesloaded@>=3.0.0: version "4.1.4" @@ -5156,14 +5152,15 @@ inquirer@^3.0.6: through "^2.3.6" insert-module-globals@^7.0.0: - version "7.0.5" - resolved "https://registry.yarnpkg.com/insert-module-globals/-/insert-module-globals-7.0.5.tgz#6d0a6f28d4a7e0eae171ad305e0f47bdfe0c258e" + version "7.0.6" + resolved "https://registry.yarnpkg.com/insert-module-globals/-/insert-module-globals-7.0.6.tgz#15a31d9d394e76d08838b9173016911d7fd4ea1b" dependencies: JSONStream "^1.0.3" combine-source-map "^0.8.0" concat-stream "^1.6.1" is-buffer "^1.1.0" lexical-scope "^1.2.0" + path-is-absolute "^1.0.1" process "~0.11.0" through2 "^2.0.0" xtend "^4.0.0" @@ -5489,8 +5486,8 @@ isurl@^1.0.0-alpha5: is-object "^1.0.1" jQuery-QueryBuilder@~2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/jQuery-QueryBuilder/-/jQuery-QueryBuilder-2.5.1.tgz#3c432772c20596064fba9fe6c2451a6b8b940359" + version "2.5.2" + resolved "https://registry.yarnpkg.com/jQuery-QueryBuilder/-/jQuery-QueryBuilder-2.5.2.tgz#be337dc3ec01a35380fa0f7ee9366a5fc75a0dd1" dependencies: bootstrap ">=3.1.0 <4" dot ">=1.0.3" @@ -5730,8 +5727,8 @@ load-json-file@^1.0.0: strip-bom "^2.0.0" loader.js@^4.2.3: - version "4.6.0" - resolved "https://registry.yarnpkg.com/loader.js/-/loader.js-4.6.0.tgz#b965663ddbe2d80da482454cb865efe496e93e22" + version "4.7.0" + resolved "https://registry.yarnpkg.com/loader.js/-/loader.js-4.7.0.tgz#a1a52902001c83631efde9688b8ab3799325ef1f" locate-path@^2.0.0: version "2.0.0" @@ -5741,8 +5738,8 @@ locate-path@^2.0.0: path-exists "^3.0.0" lodash-es@~4.17.5: - version "4.17.8" - resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.8.tgz#6fa8c8c5d337481df0bdf1c0d899d42473121e45" + version "4.17.10" + resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.10.tgz#62cd7104cdf5dd87f235a837f0ede0e8e5117e05" lodash._baseassign@^3.0.0: version "3.2.0" @@ -6122,9 +6119,9 @@ lodash@^3.10.0, lodash@^3.10.1, lodash@^3.9.3: version "3.10.1" resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6" -lodash@^4.0.0, lodash@^4.14.0, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.5.1, lodash@^4.6.1, lodash@~4.17.4, lodash@~4.17.5: - version "4.17.5" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.5.tgz#99a92d65c0272debe8c96b6057bc8fbfa3bed511" +lodash@^4.0.0, lodash@^4.14.0, lodash@^4.17.10, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.5.1, lodash@^4.6.1, lodash@~4.17.4, lodash@~4.17.5: + version "4.17.10" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.10.tgz#1b7793cf7259ea38fb3661d4d38b3260af8ae4e7" log-symbols@^2.2.0: version "2.2.0" @@ -6162,15 +6159,15 @@ lowercase-keys@^1.0.0: resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" lru-cache@^4.0.1: - version "4.1.2" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.2.tgz#45234b2e6e2f2b33da125624c4664929a0224c3f" + version "4.1.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.3.tgz#a1175cf3496dfc8436c156c334b4955992bce69c" dependencies: pseudomap "^1.0.2" yallist "^2.1.2" make-dir@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.2.0.tgz#6d6a49eead4aae296c53bbf3a1a008bd6c89469b" + version "1.3.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" dependencies: pify "^3.0.0" @@ -6224,6 +6221,10 @@ matcher-collection@^1.0.0, matcher-collection@^1.0.5: dependencies: minimatch "^3.0.2" +math-random@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.1.tgz#8b3aac588b8a66e4975e3cdea67f7bb329601fac" + md5-hex@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/md5-hex/-/md5-hex-1.3.0.tgz#d2c4afe983c4370662179b8cad145219135046c4" @@ -6376,8 +6377,8 @@ mimic-response@^1.0.0: resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.0.tgz#df3d3652a73fded6b9b0b24146e6fd052353458e" minimalistic-assert@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.0.tgz#702be2dda6b37f4836bcb3f5db56641b64a1d3d3" + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: version "1.0.1" @@ -6407,6 +6408,19 @@ minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" +minipass@^2.2.1, minipass@^2.2.4: + version "2.3.0" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.0.tgz#2e11b1c46df7fe7f1afbe9a490280add21ffe384" + dependencies: + safe-buffer "^5.1.1" + yallist "^3.0.0" + +minizlib@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.1.0.tgz#11e13658ce46bc3a70a267aac58359d1e0c29ceb" + dependencies: + minipass "^2.2.1" + mixin-deep@^1.2.0: version "1.3.1" resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.1.tgz#a49e7268dce1a0d9698e45326c5626df3543d0fe" @@ -6449,14 +6463,14 @@ module-deps@^4.0.8: xtend "^4.0.0" moment-timezone@^0.5.14: - version "0.5.14" - resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.14.tgz#4eb38ff9538b80108ba467a458f3ed4268ccfcb1" + version "0.5.16" + resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.16.tgz#661717d5f55b4d2c2e002262d726c83785192a5a" dependencies: moment ">= 2.9.0" "moment@>= 2.9.0", moment@^2.21.0, moment@^2.9.0: - version "2.22.0" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.22.0.tgz#7921ade01017dd45186e7fee5f424f0b8663a730" + version "2.22.1" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.22.1.tgz#529a2e9bf973f259c9643d237fda84de3a26e8ad" morgan@^1.8.1: version "1.9.0" @@ -6488,7 +6502,7 @@ mute-stream@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" -nan@^2.10.0, nan@^2.3.0: +nan@^2.10.0, nan@^2.9.2: version "2.10.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.10.0.tgz#96d0cd610ebd58d4b4de9cc0c6828cda99c7548f" @@ -6513,6 +6527,14 @@ natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" +needle@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/needle/-/needle-2.2.1.tgz#b5e325bd3aae8c2678902fa296f729455d1d3a7d" + dependencies: + debug "^2.1.2" + iconv-lite "^0.4.4" + sax "^1.2.4" + negotiator@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9" @@ -6577,25 +6599,24 @@ node-notifier@^5.0.1: shellwords "^0.1.1" which "^1.3.0" -node-pre-gyp@^0.6.39: - version "0.6.39" - resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.39.tgz#c00e96860b23c0e1420ac7befc5044e1d78d8649" +node-pre-gyp@^0.9.0: + version "0.9.1" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.9.1.tgz#f11c07516dd92f87199dbc7e1838eab7cd56c9e0" dependencies: detect-libc "^1.0.2" - hawk "3.1.3" mkdirp "^0.5.1" + needle "^2.2.0" nopt "^4.0.1" + npm-packlist "^1.1.6" npmlog "^4.0.2" rc "^1.1.7" - request "2.81.0" rimraf "^2.6.1" semver "^5.3.0" - tar "^2.2.1" - tar-pack "^3.4.0" + tar "^4" node-sass@^4.7.2: - version "4.8.3" - resolved "https://registry.yarnpkg.com/node-sass/-/node-sass-4.8.3.tgz#d077cc20a08ac06f661ca44fb6f19cd2ed41debb" + version "4.9.0" + resolved "https://registry.yarnpkg.com/node-sass/-/node-sass-4.9.0.tgz#d1b8aa855d98ed684d6848db929a20771cc2ae52" dependencies: async-foreach "^0.1.3" chalk "^1.1.1" @@ -6653,19 +6674,30 @@ normalize-url@2.0.1: query-string "^5.0.1" sort-keys "^2.0.0" +npm-bundled@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.3.tgz#7e71703d973af3370a9591bafe3a63aca0be2308" + npm-git-info@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/npm-git-info/-/npm-git-info-1.0.3.tgz#a933c42ec321e80d3646e0d6e844afe94630e1d5" npm-package-arg@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-6.0.0.tgz#8cce04b49d3f9faec3f56b0fe5f4391aeb9d2fac" + version "6.1.0" + resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-6.1.0.tgz#15ae1e2758a5027efb4c250554b85a737db7fcc1" dependencies: - hosted-git-info "^2.5.0" - osenv "^0.1.4" - semver "^5.4.1" + hosted-git-info "^2.6.0" + osenv "^0.1.5" + semver "^5.5.0" validate-npm-package-name "^3.0.0" +npm-packlist@^1.1.6: + version "1.1.10" + resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.1.10.tgz#1039db9e985727e464df066f4cf0ab6ef85c398a" + dependencies: + ignore-walk "^3.0.1" + npm-bundled "^1.0.1" + npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" @@ -6742,7 +6774,7 @@ on-headers@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.1.tgz#928f5d0f470d49342651ea6794b0857c100693f7" -once@^1.3.0, once@^1.3.3: +once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" dependencies: @@ -6777,8 +6809,8 @@ optionator@^0.8.2: wordwrap "~1.0.0" ora@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ora/-/ora-2.0.0.tgz#8ec3a37fa7bffb54a3a0c188a1f6798e7e1827cd" + version "2.1.0" + resolved "https://registry.yarnpkg.com/ora/-/ora-2.1.0.tgz#6caf2830eb924941861ec53a173799e008b51e5b" dependencies: chalk "^2.3.1" cli-cursor "^2.1.0" @@ -6809,7 +6841,7 @@ os-tmpdir@^1.0.0, os-tmpdir@^1.0.1, os-tmpdir@~1.0.1, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" -osenv@0, osenv@^0.1.3, osenv@^0.1.4: +osenv@0, osenv@^0.1.3, osenv@^0.1.4, osenv@^0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" dependencies: @@ -6825,8 +6857,8 @@ output-file-sync@^1.1.0: object-assign "^4.1.0" p-cancelable@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.4.0.tgz#bcb41d35bf6097fc4367a065b6eb84b9b124eff0" + version "0.4.1" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.4.1.tgz#35f363d67d52081c8d9585e37bcceb7e0bbcb2a0" p-finally@^1.0.0: version "1.0.0" @@ -6869,8 +6901,8 @@ parents@^1.0.0, parents@^1.0.1: path-platform "~0.11.15" parse-asn1@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.0.tgz#37c4f9b7ed3ab65c74817b5f2480937fbf97c712" + version "5.1.1" + resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.1.tgz#f6bf293818332bd0dab54efb16087724745e6ca8" dependencies: asn1.js "^4.0.0" browserify-aes "^1.0.0" @@ -6978,8 +7010,8 @@ path-type@^1.0.0: pinkie-promise "^2.0.0" pbkdf2@^3.0.3: - version "3.0.14" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.14.tgz#a35e13c64799b06ce15320f459c230e68e73bade" + version "3.0.16" + resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.16.tgz#7404208ec6b01b62d85bf83853a8064f8d9c2a5c" dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" @@ -6987,10 +7019,6 @@ pbkdf2@^3.0.3: safe-buffer "^5.0.1" sha.js "^2.4.8" -performance-now@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5" - performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" @@ -7023,9 +7051,9 @@ pnotify@~3.2.1: dependencies: jquery ">=1.6.0" -popper.js@^1.12.9: - version "1.14.1" - resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.14.1.tgz#b8815e5cda6f62fc2042e47618649f75866e6753" +popper.js@^1.14.1: + version "1.14.3" + resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.14.3.tgz#1438f98d046acf7b4d78cd502bf418ac64d4f095" portfinder@^1.0.7: version "1.0.13" @@ -7055,7 +7083,7 @@ printf@^0.2.3: version "0.2.5" resolved "https://registry.yarnpkg.com/printf/-/printf-0.2.5.tgz#c438ca2ca33e3927671db4ab69c0e52f936a4f0f" -private@^0.1.6, private@^0.1.7, private@~0.1.5: +private@^0.1.6, private@^0.1.8, private@~0.1.5: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" @@ -7099,8 +7127,8 @@ pseudomap@^1.0.2: resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" public-encrypt@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.0.tgz#39f699f3a46560dd5ebacbca693caf7c65c18cc6" + version "4.0.2" + resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.2.tgz#46eb9107206bf73489f8b85b69d91334c6610994" dependencies: bn.js "^4.1.0" browserify-rsa "^4.0.0" @@ -7120,18 +7148,18 @@ q@^1.1.2: version "1.5.1" resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" -qs@6.5.1, qs@^6.4.0, qs@~6.5.1: +qs@6.5.1: version "6.5.1" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8" +qs@^6.4.0, qs@~6.5.1: + version "6.5.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" + qs@~6.3.0: version "6.3.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.3.2.tgz#e75bd5f6e268122a2a0e0bda630b2550c166502c" -qs@~6.4.0: - version "6.4.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233" - qtip2@~3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/qtip2/-/qtip2-3.0.3.tgz#7df088ae4412c24a4064de69e824cb3cf76210dc" @@ -7175,12 +7203,13 @@ qunit@^2.5.0: resolve "1.5.0" walk-sync "0.3.2" -randomatic@^1.1.3: - version "1.1.7" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.7.tgz#c7abe9cc8b87c0baa876b19fde83fd464797e38c" +randomatic@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.0.0.tgz#d35490030eb4f7578de292ce6dfb04a91a128923" dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" + is-number "^4.0.0" + kind-of "^6.0.0" + math-random "^1.0.1" randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: version "2.0.6" @@ -7216,10 +7245,10 @@ raw-body@~1.1.0: string_decoder "0.10" rc@^1.1.7: - version "1.2.6" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.6.tgz#eb18989c6d4f4f162c399f79ddd29f3835568092" + version "1.2.7" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.7.tgz#8a10ca30d588d00464360372b890d06dacd02297" dependencies: - deep-extend "~0.4.0" + deep-extend "^0.5.1" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" @@ -7245,16 +7274,16 @@ read-pkg@^1.0.0: normalize-package-data "^2.3.2" path-type "^1.0.0" -readable-stream@^2, readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3: - version "2.3.5" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.5.tgz#b4f85003a938cbb6ecbce2a124fb1012bd1a838d" +readable-stream@^2, readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.6: + version "2.3.6" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~2.0.0" safe-buffer "~5.1.1" - string_decoder "~1.0.3" + string_decoder "~1.1.1" util-deprecate "~1.0.1" readable-stream@~1.0.2: @@ -7467,33 +7496,6 @@ request@2: tunnel-agent "^0.6.0" uuid "^3.1.0" -request@2.81.0: - version "2.81.0" - resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0" - dependencies: - aws-sign2 "~0.6.0" - aws4 "^1.2.1" - caseless "~0.12.0" - combined-stream "~1.0.5" - extend "~3.0.0" - forever-agent "~0.6.1" - form-data "~2.1.1" - har-validator "~4.2.1" - hawk "~3.1.3" - http-signature "~1.1.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.7" - oauth-sign "~0.8.1" - performance-now "^0.2.0" - qs "~6.4.0" - safe-buffer "^5.0.1" - stringstream "~0.0.4" - tough-cookie "~2.3.0" - tunnel-agent "^0.6.0" - uuid "^3.0.0" - request@~2.79.0: version "2.79.0" resolved "https://registry.yarnpkg.com/request/-/request-2.79.0.tgz#4dfe5bf6be8b8cdc37fcf93e04b65577722710de" @@ -7540,7 +7542,7 @@ require-uncached@^1.0.3: caller-path "^0.1.0" resolve-from "^1.0.0" -requires-port@1.x.x: +requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" @@ -7577,8 +7579,8 @@ resolve@1.5.0: path-parse "^1.0.5" resolve@^1.1.2, resolve@^1.1.3, resolve@^1.1.4, resolve@^1.1.6, resolve@^1.1.7, resolve@^1.3.0, resolve@^1.3.3, resolve@^1.4.0, resolve@^1.5.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.6.0.tgz#0fbd21278b27b4004481c395349e7aba60a9ff5c" + version "1.7.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.7.1.tgz#aadd656374fd298aee895bc026b8297418677fd3" dependencies: path-parse "^1.0.5" @@ -7612,7 +7614,7 @@ right-align@^0.1.1: dependencies: align-text "^0.1.1" -rimraf@2, rimraf@^2.2.8, rimraf@^2.3.2, rimraf@^2.3.4, rimraf@^2.4.3, rimraf@^2.4.4, rimraf@^2.5.1, rimraf@^2.5.3, rimraf@^2.5.4, rimraf@^2.6.1, rimraf@^2.6.2: +rimraf@2, rimraf@^2.2.8, rimraf@^2.3.2, rimraf@^2.3.4, rimraf@^2.4.3, rimraf@^2.4.4, rimraf@^2.5.3, rimraf@^2.5.4, rimraf@^2.6.1, rimraf@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" dependencies: @@ -7623,10 +7625,10 @@ rimraf@~2.2.6, rimraf@~2.2.8: resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" ripemd160@^2.0.0, ripemd160@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.1.tgz#0f4584295c53a3628af7e6d79aca21ce57d1c6e7" + version "2.0.2" + resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" dependencies: - hash-base "^2.0.0" + hash-base "^3.0.0" inherits "^2.0.1" rollup@^0.41.4: @@ -7643,10 +7645,6 @@ rsvp@^4.0.1, rsvp@^4.6.1, rsvp@^4.7.0: version "4.8.2" resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.2.tgz#9d5647108735784eb13418cdddb56f75b919d722" -rsvp@~3.0.6: - version "3.0.21" - resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-3.0.21.tgz#49c588fe18ef293bcd0ab9f4e6756e6ac433359f" - rsvp@~3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-3.2.1.tgz#07cb4a5df25add9e826ebc67dcc9fd89db27d84a" @@ -7671,10 +7669,14 @@ rx@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" -safe-buffer@5.1.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" +safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + safe-json-parse@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/safe-json-parse/-/safe-json-parse-1.0.1.tgz#3e76723e38dfdda13c9b1d29a1e07ffee4b30b57" @@ -7685,11 +7687,16 @@ safe-regex@^1.1.0: dependencies: ret "~0.1.10" +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + sane@^2.2.0, sane@^2.4.1: - version "2.5.0" - resolved "https://registry.yarnpkg.com/sane/-/sane-2.5.0.tgz#6359cd676f5efd9988b264d8ce3b827dd6b27bec" + version "2.5.2" + resolved "https://registry.yarnpkg.com/sane/-/sane-2.5.2.tgz#b4dc1861c21b427e929507a3e751e2a2cb8ab3fa" dependencies: anymatch "^2.0.0" + capture-exit "^1.2.0" exec-sh "^0.2.0" fb-watchman "^2.0.0" micromatch "^3.1.4" @@ -7697,7 +7704,7 @@ sane@^2.2.0, sane@^2.4.1: walker "~1.0.5" watch "~0.18.0" optionalDependencies: - fsevents "^1.1.1" + fsevents "^1.2.3" sass-graph@^2.2.4: version "2.2.4" @@ -7708,6 +7715,10 @@ sass-graph@^2.2.4: scss-tokenizer "^0.2.3" yargs "^7.0.0" +sax@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + scss-tokenizer@^0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/scss-tokenizer/-/scss-tokenizer-0.2.3.tgz#8eb06db9a9723333824d3f5530641149847ce5d1" @@ -7964,16 +7975,17 @@ sort-object-keys@^1.1.1: resolved "https://registry.yarnpkg.com/sort-object-keys/-/sort-object-keys-1.1.2.tgz#d3a6c48dc2ac97e6bc94367696e03f6d09d37952" sort-package-json@^1.4.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/sort-package-json/-/sort-package-json-1.11.0.tgz#b7b59ebdfaf3f8719ec0bc2056264e937868cbfb" + version "1.13.0" + resolved "https://registry.yarnpkg.com/sort-package-json/-/sort-package-json-1.13.0.tgz#c029eeb99c72918b468b9c2a807b7de48cc0cfcf" dependencies: + detect-indent "^5.0.0" sort-object-keys "^1.1.1" source-map-resolve@^0.5.0: - version "0.5.1" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.1.tgz#7ad0f593f2281598e854df80f19aae4b92d7a11a" + version "0.5.2" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" dependencies: - atob "^2.0.0" + atob "^2.1.1" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" @@ -8100,8 +8112,8 @@ sshpk@^1.7.0: tweetnacl "~0.14.0" stable@~0.1.3: - version "0.1.6" - resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.6.tgz#910f5d2aed7b520c6e777499c1f32e139fdecb10" + version "0.1.8" + resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" static-extend@^0.1.1: version "0.1.2" @@ -8139,12 +8151,12 @@ stream-combiner2@^1.1.1: readable-stream "^2.0.2" stream-http@^2.0.0: - version "2.8.1" - resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.1.tgz#d0441be1a457a73a733a8a7b53570bebd9ef66a4" + version "2.8.2" + resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.2.tgz#4126e8c6b107004465918aa2fc35549e77402c87" dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" - readable-stream "^2.3.3" + readable-stream "^2.3.6" to-arraybuffer "^1.0.0" xtend "^4.0.0" @@ -8182,9 +8194,9 @@ string_decoder@0.10, string_decoder@~0.10.0, string_decoder@~0.10.x: version "0.10.31" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" -string_decoder@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" dependencies: safe-buffer "~5.1.0" @@ -8200,12 +8212,6 @@ stringstream@~0.0.4, stringstream@~0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" -strip-ansi@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.3.0.tgz#25f48ea22ca79187f3174a4db8759347bb126220" - dependencies: - ansi-regex "^0.2.1" - strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" @@ -8258,17 +8264,13 @@ sum-up@^1.0.1: dependencies: chalk "^1.0.0" -supports-color@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-0.2.0.tgz#d92de2694eb3f67323973d7ae3d8b55b4c22190a" - supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" supports-color@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.3.0.tgz#5b24ac15db80fa927cf5227a4a33fd3c4c7676c0" + version "5.4.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.4.0.tgz#1c6b337402c2137605efe19f10fec390f6faab54" dependencies: has-flag "^3.0.0" @@ -8302,20 +8304,7 @@ tap-parser@^5.1.0: optionalDependencies: readable-stream "^2" -tar-pack@^3.4.0: - version "3.4.1" - resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.1.tgz#e1dbc03a9b9d3ba07e896ad027317eb679a10a1f" - dependencies: - debug "^2.2.0" - fstream "^1.0.10" - fstream-ignore "^1.0.5" - once "^1.3.3" - readable-stream "^2.1.4" - rimraf "^2.5.1" - tar "^2.2.1" - uid-number "^0.0.6" - -tar@^2.0.0, tar@^2.2.1: +tar@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1" dependencies: @@ -8323,6 +8312,18 @@ tar@^2.0.0, tar@^2.2.1: fstream "^1.0.2" inherits "2" +tar@^4: + version "4.4.2" + resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.2.tgz#60685211ba46b38847b1ae7ee1a24d744a2cd462" + dependencies: + chownr "^1.0.1" + fs-minipass "^1.2.5" + minipass "^2.2.4" + minizlib "^1.1.0" + mkdirp "^0.5.0" + safe-buffer "^5.1.2" + yallist "^3.0.2" + temp@0.8.3: version "0.8.3" resolved "https://registry.yarnpkg.com/temp/-/temp-0.8.3.tgz#e0c6bc4d26b903124410e4fed81103014dfc1f59" @@ -8331,8 +8332,8 @@ temp@0.8.3: rimraf "~2.2.6" testem@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/testem/-/testem-2.2.0.tgz#4c9d0f9daaa2b05b9a3c8dde2e486bbf80b0f266" + version "2.4.0" + resolved "https://registry.yarnpkg.com/testem/-/testem-2.4.0.tgz#c74e9bbc5269dc102dad6b95979888c397c6e712" dependencies: backbone "^1.1.2" bluebird "^3.4.6" @@ -8559,10 +8560,6 @@ uglify-to-browserify@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" -uid-number@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" - ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" @@ -8579,8 +8576,8 @@ underscore.string@~3.3.4: util-deprecate "^1.0.2" underscore@>=1.8.3: - version "1.8.3" - resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.8.3.tgz#4f3fb53b106e6097fcf9cb4109f2a5e9bdfa5022" + version "1.9.0" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.0.tgz#31dbb314cfcc88f169cd3692d9149d81a00a73e4" union-value@^1.0.0: version "1.0.0" @@ -8867,6 +8864,10 @@ yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" +yallist@^3.0.0, yallist@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.2.tgz#8452b4bb7e83c7c188d8041c1a837c773d6d8bb9" + yam@0.0.22: version "0.0.22" resolved "https://registry.yarnpkg.com/yam/-/yam-0.0.22.tgz#38a76cb79a19284d9206ed49031e359a1340bd06" diff --git a/src/api-umbrella/web-app/Gemfile b/src/api-umbrella/web-app/Gemfile index a588b2371..7d7cae2ba 100644 --- a/src/api-umbrella/web-app/Gemfile +++ b/src/api-umbrella/web-app/Gemfile @@ -3,7 +3,7 @@ source "https://rubygems.org" gem "rails", "~> 4.2.10" # Rails app server -gem "puma", "~> 3.11.3" +gem "puma", "~> 3.11.4" # Error notification service (optional) gem "rollbar", "~> 2.15.5" @@ -16,10 +16,10 @@ gem "rack-proxy", "~> 0.6.4" # JSON handling gem "multi_json", "~> 1.13.1" -gem "oj", "~> 3.5.0", :platforms => [:ruby] +gem "oj", "~> 3.6.0", :platforms => [:ruby] # SQL escape libraries for Kylin analytics. -gem "sequel", "~> 5.6.0" +gem "sequel", "~> 5.8.0" # MongoDB gem "mongoid", "~> 5.2.1" @@ -49,7 +49,7 @@ gem "devise", "~> 4.4.3" gem "devise-i18n", "~> 1.6.0" gem "omniauth", "~> 1.8.1" gem "omniauth-cas", "~> 1.1.0", :git => "https://github.com/GUI/omniauth-cas.git", :branch => "rexml", :require => false -gem "omniauth-facebook", "~> 4.0.0", :require => false +gem "omniauth-facebook", "~> 5.0.0", :require => false gem "omniauth-github", "~> 1.3.0", :require => false gem "omniauth-gitlab", "~> 1.0.3", :require => false gem "omniauth-google-oauth2", "~> 0.5.3", :require => false diff --git a/src/api-umbrella/web-app/Gemfile.lock b/src/api-umbrella/web-app/Gemfile.lock index 73e82044c..9e704a372 100644 --- a/src/api-umbrella/web-app/Gemfile.lock +++ b/src/api-umbrella/web-app/Gemfile.lock @@ -72,14 +72,14 @@ GEM addressable (2.5.2) public_suffix (>= 2.0.2, < 4.0) arel (6.0.4) - autoprefixer-rails (8.2.0) + autoprefixer-rails (8.4.1) execjs awesome_print (1.8.0) bcrypt (3.1.11) bootstrap-sass (3.3.7) autoprefixer-rails (>= 5.2.1) sass (>= 3.3.4) - brakeman (4.2.1) + brakeman (4.3.0) bson (4.3.0) builder (3.2.3) bundler-audit (0.6.0) @@ -91,14 +91,14 @@ GEM money (~> 6.9) sixarm_ruby_unaccent (~> 1.1) unicode_utils (~> 1.4) - crass (1.0.3) + crass (1.0.4) css_parser (1.6.0) addressable csv_builder (2.1.1) actionpack (>= 3.0.0) daemons (1.2.6) - delayed_job (4.1.4) - activesupport (>= 3.0, < 5.2) + delayed_job (4.1.5) + activesupport (>= 3.0, < 5.3) delayed_job_mongoid (2.3.0) delayed_job (>= 3.0, < 5) mongoid (>= 3.0, < 7) @@ -109,7 +109,7 @@ GEM railties (>= 4.1.0, < 6.0) responders warden (~> 1.2.3) - devise-i18n (1.6.1) + devise-i18n (1.6.2) devise (>= 4.4) elasticsearch (2.0.2) elasticsearch-api (= 2.0.2) @@ -124,8 +124,8 @@ GEM faraday (0.12.2) multipart-post (>= 1.2, < 3) ffi (1.9.23) - font-awesome-rails (4.7.0.3) - railties (>= 3.2, < 5.2) + font-awesome-rails (4.7.0.4) + railties (>= 3.2, < 6.0) globalid (0.4.1) activesupport (>= 4.2.0) hashie (3.5.7) @@ -149,9 +149,9 @@ GEM mini_mime (1.0.0) mini_portile2 (2.3.0) minitest (5.11.3) - money (6.10.1) - i18n (>= 0.6.4, < 1.0) - mongo (2.5.1) + money (6.11.3) + i18n (>= 0.6.4, < 1.1) + mongo (2.5.3) bson (>= 4.3.0, < 5.0.0) mongoid (5.2.1) activemodel (~> 4.0) @@ -186,11 +186,11 @@ GEM multi_json (~> 1.3) multi_xml (~> 0.5) rack (>= 1.2, < 3) - oj (3.5.0) + oj (3.6.0) omniauth (1.8.1) hashie (>= 3.4.6, < 3.6.0) rack (>= 1.6.2, < 3) - omniauth-facebook (4.0.0) + omniauth-facebook (5.0.0) omniauth-oauth2 (~> 1.2) omniauth-github (1.3.0) omniauth (~> 1.5) @@ -221,13 +221,13 @@ GEM actionmailer (>= 3, < 6) premailer (~> 1.7, >= 1.7.9) public_suffix (3.0.2) - puma (3.11.3) + puma (3.11.4) pundit (1.1.0) activesupport (>= 3.0.0) pyu-ruby-sasl (0.0.3.3) rabl (0.13.1) activesupport (>= 2.3.14) - rack (1.6.9) + rack (1.6.10) rack-proxy (0.6.4) rack rack-test (0.6.3) @@ -267,7 +267,7 @@ GEM responders (2.4.0) actionpack (>= 4.2.0, < 5.3) railties (>= 4.2.0, < 5.3) - rollbar (2.15.5) + rollbar (2.15.6) multi_json rubyntlm (0.6.2) safe_yaml (1.0.4) @@ -282,7 +282,7 @@ GEM sprockets (>= 2.8, < 4.0) sprockets-rails (>= 2.0, < 4.0) tilt (>= 1.1, < 3) - sequel (5.6.0) + sequel (5.8.0) simple_form (3.5.1) actionpack (> 4, < 5.2) activemodel (> 4, < 5.2) @@ -333,16 +333,16 @@ DEPENDENCIES mongoid_store! multi_json (~> 1.13.1) nokogiri (~> 1.8.1) - oj (~> 3.5.0) + oj (~> 3.6.0) omniauth (~> 1.8.1) omniauth-cas (~> 1.1.0)! - omniauth-facebook (~> 4.0.0) + omniauth-facebook (~> 5.0.0) omniauth-github (~> 1.3.0) omniauth-gitlab (~> 1.0.3) omniauth-google-oauth2 (~> 0.5.3) omniauth-ldap (~> 2.0.0) premailer-rails (~> 1.10.2) - puma (~> 3.11.3) + puma (~> 3.11.4) pundit (~> 1.1.0) rabl (~> 0.13.1) rack-proxy (~> 0.6.4) @@ -353,7 +353,7 @@ DEPENDENCIES rollbar (~> 2.15.5) safe_yaml (~> 1.0.4) sass-rails (~> 5.0) - sequel (~> 5.6.0) + sequel (~> 5.8.0) simple_form (~> 3.5.1) BUNDLED WITH diff --git a/src/api-umbrella/web-app/db/migrate/20131127185950_fix_custom_rate_limits.rb b/src/api-umbrella/web-app/db/migrate/20131127185950_fix_custom_rate_limits.rb index ce8f2d035..23ab0b2a7 100644 --- a/src/api-umbrella/web-app/db/migrate/20131127185950_fix_custom_rate_limits.rb +++ b/src/api-umbrella/web-app/db/migrate/20131127185950_fix_custom_rate_limits.rb @@ -14,7 +14,7 @@ def self.up # Assign one of the limits to be primary if that hadn't gotten set. if(rate_limits.none? { |limit| limit.response_headers }) - primary = rate_limits.sort_by { |limit| limit.duration }.first + primary = rate_limits.min_by { |limit| limit.duration } primary.response_headers = true end diff --git a/templates/etc/perp/test-env-openldap/rc.main.mustache b/templates/etc/perp/test-env-openldap/rc.main.mustache index 88f185e88..eb0a511f4 100755 --- a/templates/etc/perp/test-env-openldap/rc.main.mustache +++ b/templates/etc/perp/test-env-openldap/rc.main.mustache @@ -24,11 +24,8 @@ if [ "${1}" = "start" ]; then set -x rm -rf "$db_dir" mkdir -p "$db_dir" - slapadd -F "$db_dir" -n 0 -l "$slapd_ldif_path" - slapadd -F "$db_dir" -l "$seed_ldif_path" - if [ -n "$api_umbrella_user" ]; then - chown -R "$api_umbrella_user" "$db_dir" - fi + runtool "${run_args[@]}" slapadd -F "$db_dir" -n 0 -l "$slapd_ldif_path" + runtool "${run_args[@]}" slapadd -F "$db_dir" -l "$seed_ldif_path" exec runtool "${run_args[@]}" slapd -d 1 -h "$bind" -F "$db_dir" fi diff --git a/templates/etc/test-env/mongo-orchestration/replica-set.json.mustache b/templates/etc/test-env/mongo-orchestration/replica-set.json.mustache index a8b5242ff..0e45d2b60 100644 --- a/templates/etc/test-env/mongo-orchestration/replica-set.json.mustache +++ b/templates/etc/test-env/mongo-orchestration/replica-set.json.mustache @@ -7,6 +7,7 @@ { "procParams": { "dbpath": "{{db_dir}}/mongodb-13090", + "ipv6": false, "logpath": "{{log_dir}}/mongod-13090.log", "nohttpinterface": true, "nojournal": true, @@ -23,7 +24,7 @@ { "procParams": { "dbpath": "{{db_dir}}/mongodb-13091", - "ipv6": true, + "ipv6": false, "logpath": "{{log_dir}}/mongod-13091.log", "nohttpinterface": true, "nojournal": true, @@ -40,7 +41,7 @@ { "procParams": { "dbpath": "{{db_dir}}/mongodb-13092", - "ipv6": true, + "ipv6": false, "logpath": "{{log_dir}}/mongod-13092.log", "nohttpinterface": true, "nojournal": true, diff --git a/test/support/capybara.rb b/test/support/capybara.rb index abc8ba0dd..caac949dd 100644 --- a/test/support/capybara.rb +++ b/test/support/capybara.rb @@ -1,3 +1,4 @@ +require "capybara/minitest" require "capybara/poltergeist" require "capybara-screenshot/minitest" require "support/api_umbrella_test_helpers/process" @@ -55,14 +56,22 @@ def write(msg) Capybara.app_host = "https://127.0.0.1:9081" Capybara.save_path = File.join(API_UMBRELLA_SRC_ROOT, "test/tmp/capybara") -class Minitest::Capybara::Test - # After each capybara test, also clear the memory cache in Poltergeist. This - # seems to be necessary to prevent Poltergeist from incorrectly caching - # redirect results across different tests, and other oddities: - # https://github.com/teampoltergeist/poltergeist/issues/754 - def teardown - super - ::Capybara.reset_session! - page.driver.clear_memory_cache +module Minitest + module Capybara + class Test < Minitest::Test + include ::Capybara::DSL + include ::Capybara::Minitest::Assertions + + # After each capybara test, also clear the memory cache in Poltergeist. This + # seems to be necessary to prevent Poltergeist from incorrectly caching + # redirect results across different tests, and other oddities: + # https://github.com/teampoltergeist/poltergeist/issues/754 + def teardown + super + ::Capybara.reset_session! + page.driver.clear_memory_cache + ::Capybara.use_default_driver + end + end end end From f8e4cb1d84f1e47d070b868817e5c989aba8a629 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Sat, 12 May 2018 08:50:14 -0600 Subject: [PATCH 018/367] Adjust tests for Capybara 3 changes in whitespace normalization. --- test/admin_ui/test_apis.rb | 12 ++++++------ test/admin_ui/test_permissions_navigation.rb | 8 ++++---- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/test/admin_ui/test_apis.rb b/test/admin_ui/test_apis.rb index 66703604f..f0b202dfa 100644 --- a/test/admin_ui/test_apis.rb +++ b/test/admin_ui/test_apis.rb @@ -337,22 +337,22 @@ def test_form assert_selector("#" + field["data-ace-content-id"], :text => "bar") assert_equal("bar", find_by_id(field["data-raw-input-id"], :visible => :all).value) field = find_field("CSV Template", :visible => :all) - assert_selector("#" + field["data-ace-content-id"], :text => "foo,bar bar,foo") + assert_selector("#" + field["data-ace-content-id"], :text => "foo,bar\nbar,foo") assert_equal("foo,bar\nbar,foo", find_by_id(field["data-raw-input-id"], :visible => :all).value) field = find_field("API Key Missing", :visible => :all) - assert_selector("#" + field["data-ace-content-id"], :text => "foo1: bar1 bar1: foo1") + assert_selector("#" + field["data-ace-content-id"], :text => "foo1: bar1\nbar1: foo1") assert_equal("foo1: bar1\nbar1: foo1", find_by_id(field["data-raw-input-id"], :visible => :all).value) field = find_field("API Key Invalid", :visible => :all) - assert_selector("#" + field["data-ace-content-id"], :text => "foo2: bar2 bar2: foo2") + assert_selector("#" + field["data-ace-content-id"], :text => "foo2: bar2\nbar2: foo2") assert_equal("foo2: bar2\nbar2: foo2", find_by_id(field["data-raw-input-id"], :visible => :all).value) field = find_field("API Key Disabled", :visible => :all) - assert_selector("#" + field["data-ace-content-id"], :text => "foo3: bar3 bar3: foo3") + assert_selector("#" + field["data-ace-content-id"], :text => "foo3: bar3\nbar3: foo3") assert_equal("foo3: bar3\nbar3: foo3", find_by_id(field["data-raw-input-id"], :visible => :all).value) field = find_field("API Key Unauthorized", :visible => :all) - assert_selector("#" + field["data-ace-content-id"], :text => "foo4: bar4 bar4: foo4") + assert_selector("#" + field["data-ace-content-id"], :text => "foo4: bar4\nbar4: foo4") assert_equal("foo4: bar4\nbar4: foo4", find_by_id(field["data-raw-input-id"], :visible => :all).value) field = find_field("Over Rate Limit", :visible => :all) - assert_selector("#" + field["data-ace-content-id"], :text => "foo5: bar5 bar5: foo5") + assert_selector("#" + field["data-ace-content-id"], :text => "foo5: bar5\nbar5: foo5") assert_equal("foo5: bar5\nbar5: foo5", find_by_id(field["data-raw-input-id"], :visible => :all).value) end diff --git a/test/admin_ui/test_permissions_navigation.rb b/test/admin_ui/test_permissions_navigation.rb index d7f3d873a..377937f60 100644 --- a/test/admin_ui/test_permissions_navigation.rb +++ b/test/admin_ui/test_permissions_navigation.rb @@ -210,24 +210,24 @@ def test_user_view_forbidden def assert_nav(menus) nav = find("nav.navbar") - assert_equal(menus.join(" "), nav.text) + assert_equal(menus.join("\n"), nav.text) end def assert_analytics_menu(items) nav = find("nav.navbar") menu = nav.find(".nav-analytics .dropdown-menu", :visible => :hidden) - assert_equal(items.join(" "), menu.text(:all)) + assert_equal(items.join("\n"), menu.text(:all)) end def assert_users_menu(items) nav = find("nav.navbar") menu = nav.find(".nav-users .dropdown-menu", :visible => :hidden) - assert_equal(items.join(" "), menu.text(:all)) + assert_equal(items.join("\n"), menu.text(:all)) end def assert_config_menu(items) nav = find("nav.navbar") menu = nav.find(".nav-config .dropdown-menu", :visible => :hidden) - assert_equal(items.join(" "), menu.text(:all)) + assert_equal(items.join("\n"), menu.text(:all)) end end From f3ad8c6a45d64abe6a393d333e80ee7d1264f757 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Sat, 12 May 2018 09:23:25 -0600 Subject: [PATCH 019/367] A couple more tweaks to fix capybara v3 upgrade changes. --- test/admin_ui/test_permissions_navigation.rb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/admin_ui/test_permissions_navigation.rb b/test/admin_ui/test_permissions_navigation.rb index 377937f60..6ce0c53ae 100644 --- a/test/admin_ui/test_permissions_navigation.rb +++ b/test/admin_ui/test_permissions_navigation.rb @@ -15,8 +15,8 @@ def test_superuser assert_nav([ "API Umbrella", - "Analytics", - "Users", + "Analytics ", + "Users ", "Configuration", ]) @@ -216,18 +216,18 @@ def assert_nav(menus) def assert_analytics_menu(items) nav = find("nav.navbar") menu = nav.find(".nav-analytics .dropdown-menu", :visible => :hidden) - assert_equal(items.join("\n"), menu.text(:all)) + assert_equal(items.join(" "), menu.text(:all)) end def assert_users_menu(items) nav = find("nav.navbar") menu = nav.find(".nav-users .dropdown-menu", :visible => :hidden) - assert_equal(items.join("\n"), menu.text(:all)) + assert_equal(items.join(" "), menu.text(:all)) end def assert_config_menu(items) nav = find("nav.navbar") menu = nav.find(".nav-config .dropdown-menu", :visible => :hidden) - assert_equal(items.join("\n"), menu.text(:all)) + assert_equal(items.join(" "), menu.text(:all)) end end From 1e5ca7d7f2695923ed70dd3609d03459ca25de57 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Sat, 12 May 2018 12:29:17 -0600 Subject: [PATCH 020/367] Fix openssl key generation under OpenSSL 1.1+ openssl 1.1.0+ can't have some empty fields specified, so instead don't specify them at all. See https://github.com/NREL/api-umbrella/issues/408 --- src/api-umbrella/cli/setup.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api-umbrella/cli/setup.lua b/src/api-umbrella/cli/setup.lua index 36a2aa8f8..db5baed61 100644 --- a/src/api-umbrella/cli/setup.lua +++ b/src/api-umbrella/cli/setup.lua @@ -131,7 +131,7 @@ local function generate_self_signed_cert() if not path.exists(ssl_key_path) or not path.exists(ssl_crt_path) then dir.makepath(ssl_dir) - local _, _, err = run_command("openssl req -new -newkey rsa:2048 -days 3650 -nodes -x509 -subj '/C=/ST=/L=/O=API Umbrella/CN=apiumbrella.example.com' -keyout " .. ssl_key_path .. " -out " .. ssl_crt_path) + local _, _, err = run_command("openssl req -new -newkey rsa:2048 -days 3650 -nodes -x509 -subj '/O=API Umbrella/CN=apiumbrella.example.com' -keyout " .. ssl_key_path .. " -out " .. ssl_crt_path) if err then print(err) os.exit(1) From 4ac3d4888f6104b4e930f76e0c83673d02caa274 Mon Sep 17 00:00:00 2001 From: Nick Muerdter Date: Sat, 12 May 2018 17:47:03 -0600 Subject: [PATCH 021/367] Update Traffic Server v7. Ubuntu 18.04, Debian 9 builds. Rework builds. - Add Ubuntu 18.04 (Bionic) and Debian 9 (Stretch) to our package building process. - Remove Ubuntu 12.04 (Precise) and Debian 7 (Wheezy) from our package builds. Ubuntu 12.04 has reached its end of support and Debian 7 will at the end of this month. - Upgrade from Traffic Server v5 to v7. This stemmed from needing to build under Ubuntu 18.04 where OpenSSL v1.1 is the default, and we need at least Traffic Server v6 to build against OpenSSL 1.1.0. There might still be some keepalive issues to sort out (which is why we were holding back), but since Traffic Server v5 isn't supported any longer, this update is due. - Rework the build process. While this is a bit messy to get wrapped in with these other updates, this pulls over various improvements to the cmake build process that were in the lapis-postgres branch. I keep hitting issues that would benefit from these build improvements, and maintaining things on both branches is becoming more of a hassle, so we'll manually pull over the bulk of these build process improvements. The improvements mainly revolve around better splitting up the build process into discrete targets. This better organizes the build files, but also provides easier ways to perform the build in specific steps, which helps with the Docker build process and better caching. - Make various improvement to the Docker development environment so performing builds and re-builds are easier/more reliable. Now the symlinking happens in an entrypoint script, so that it still takes place on "run" commands. Also improve the build "configure" script so it always performs builds in the /build directory. All of these changes better ensure builds happens as they did on initial build even if you're manually running these commands on an existing docker dev container. - Some other misc improvements pulled over from the lapis-postgres branch. Again not quite ideal to be manually pulling these over, but this should help get some of these more general improvements live before lapis-postgres is merged in. - Switch from luatz to icu-date for more proper timezone handling. - Bring over some improvements to startup logic and default yaml config logic. --- .dockerignore | 2 + CMakeLists.txt | 127 ++++----------- Dockerfile-dev-build | 44 +++-- bin/api-umbrella-env | 6 +- build/cmake/app-deps.cmake | 3 + .../cmake/{ => app-deps}/core-lua-deps.cmake | 62 ++++++- build/cmake/app.cmake | 8 + build/cmake/{ => app}/core-admin-ui.cmake | 19 --- build/cmake/app/core-locale.cmake | 33 ++++ build/cmake/{ => app}/core-web-app.cmake | 25 +-- build/cmake/{ => app}/core.cmake | 15 +- build/cmake/{ => app}/static-site.cmake | 7 +- build/cmake/build-deps.cmake | 3 + build/cmake/{dev => build-deps}/nodejs.cmake | 8 + build/cmake/clean-download-archives.cmake | 3 +- build/cmake/deps.cmake | 25 +++ build/cmake/{ => deps}/elasticsearch.cmake | 10 +- build/cmake/{ => deps}/libcidr.cmake | 5 + build/cmake/{ => deps}/libgeoip.cmake | 12 +- build/cmake/{ => deps}/mongodb.cmake | 5 + build/cmake/{ => deps}/mora.cmake | 7 + build/cmake/{ => deps}/openresty.cmake | 46 +++++- build/cmake/{ => deps}/perp.cmake | 5 + build/cmake/{ => deps}/rsyslog.cmake | 38 ++--- build/cmake/{ => deps}/ruby.cmake | 11 ++ build/cmake/{ => deps}/runit_svlogd.cmake | 7 +- build/cmake/{ => deps}/trafficserver.cmake | 11 +- build/cmake/distclean.cmake | 3 +- .../GetGitRevisionDescription.cmake | 0 .../GetGitRevisionDescription.cmake.in | 0 .../{ => functions}/GetGitTimestamp.cmake | 0 .../{ => functions}/luarocks_install.cmake | 1 + build/cmake/functions/opm_install.cmake | 22 +++ build/cmake/functions/require_program.cmake | 6 + build/cmake/hadoop-analytics/flume.cmake | 12 -- build/cmake/hadoop-analytics/kylin.cmake | 12 -- build/cmake/hadoop-analytics/presto.cmake | 12 -- build/cmake/hadoop-analytics/processor.cmake | 25 --- build/cmake/install.cmake | 22 +-- build/cmake/luarocks.cmake | 14 -- build/cmake/package.cmake | 29 +--- build/cmake/test-deps.cmake | 17 ++ build/cmake/test-deps/luacheck.cmake | 6 + build/cmake/{test => test-deps}/mailhog.cmake | 5 + .../mongo-orchestration.cmake | 8 +- .../cmake/{test => test-deps}/openldap.cmake | 5 + .../cmake/{test => test-deps}/phantomjs.cmake | 5 + .../{test => test-deps}/shellcheck.cmake | 4 + build/cmake/{test => test-deps}/unbound.cmake | 5 + build/cmake/test.cmake | 27 +++ build/cmake/test/bundle.cmake | 33 ---- build/cmake/test/lua-deps.cmake | 7 - build/cmake/test/test.cmake | 21 --- build/cmake/versions.cmake | 108 ------------ build/package/Makefile | 58 ++++--- build/package/docker_script | 2 +- build/package/publish | 4 +- build/package/verify/docker_script | 5 +- .../package/verify/download_previous_packages | 27 ++- build/package_dependencies.sh | 47 +++--- build/patches/opm.patch | 15 ++ build/scripts/download_cmake | 6 +- build/scripts/install_build_dependencies | 2 +- circle.yml | 2 +- config/default.yml | 31 +++- config/test.yml | 7 +- configure | 42 ++++- docker/Dockerfile | 2 +- docker/dev/docker-entrypoint | 37 +++++ docker/dev/docker-start | 17 -- scripts/rake/lint.rake | 2 +- scripts/rake/outdated_packages.rb | 47 ++---- src/api-umbrella/cli/read_config.lua | 59 ++++++- src/api-umbrella/cli/setup.lua | 154 ++++++++---------- src/api-umbrella/http-api/health.lua | 2 +- .../proxy/jobs/elasticsearch_setup.lua | 8 +- src/api-umbrella/proxy/log_utils.lua | 37 +++-- .../controllers/admin/sessions_controller.rb | 4 +- .../web-app/app/helpers/application_helper.rb | 2 +- src/api-umbrella/web-app/app/models/admin.rb | 4 +- .../app/views/devise/sessions/new.html.erb | 4 +- .../web-app/config/application.rb | 18 -- .../etc/perp/elasticsearch/rc.main.mustache | 1 + .../etc/trafficserver/logging.config.mustache | 8 + templates/etc/trafficserver/logs_xml.config | 10 -- .../etc/trafficserver/plugin.config.mustache | 2 +- .../etc/trafficserver/records.config.mustache | 6 +- test/proxy/test_config.rb | 31 ++-- 88 files changed, 863 insertions(+), 796 deletions(-) create mode 100644 build/cmake/app-deps.cmake rename build/cmake/{ => app-deps}/core-lua-deps.cmake (59%) create mode 100644 build/cmake/app.cmake rename build/cmake/{ => app}/core-admin-ui.cmake (71%) create mode 100644 build/cmake/app/core-locale.cmake rename build/cmake/{ => app}/core-web-app.cmake (63%) rename build/cmake/{ => app}/core.cmake (94%) rename build/cmake/{ => app}/static-site.cmake (81%) create mode 100644 build/cmake/build-deps.cmake rename build/cmake/{dev => build-deps}/nodejs.cmake (77%) create mode 100644 build/cmake/deps.cmake rename build/cmake/{ => deps}/elasticsearch.cmake (71%) rename build/cmake/{ => deps}/libcidr.cmake (80%) rename build/cmake/{ => deps}/libgeoip.cmake (78%) rename build/cmake/{ => deps}/mongodb.cmake (86%) rename build/cmake/{ => deps}/mora.cmake (78%) rename build/cmake/{ => deps}/openresty.cmake (65%) rename build/cmake/{ => deps}/perp.cmake (85%) rename build/cmake/{ => deps}/rsyslog.cmake (79%) rename build/cmake/{ => deps}/ruby.cmake (77%) rename build/cmake/{ => deps}/runit_svlogd.cmake (77%) rename build/cmake/{ => deps}/trafficserver.cmake (79%) rename build/cmake/{ => functions}/GetGitRevisionDescription.cmake (100%) rename build/cmake/{ => functions}/GetGitRevisionDescription.cmake.in (100%) rename build/cmake/{ => functions}/GetGitTimestamp.cmake (100%) rename build/cmake/{ => functions}/luarocks_install.cmake (97%) create mode 100644 build/cmake/functions/opm_install.cmake create mode 100644 build/cmake/functions/require_program.cmake delete mode 100644 build/cmake/hadoop-analytics/flume.cmake delete mode 100644 build/cmake/hadoop-analytics/kylin.cmake delete mode 100644 build/cmake/hadoop-analytics/presto.cmake delete mode 100644 build/cmake/hadoop-analytics/processor.cmake delete mode 100644 build/cmake/luarocks.cmake create mode 100644 build/cmake/test-deps.cmake create mode 100644 build/cmake/test-deps/luacheck.cmake rename build/cmake/{test => test-deps}/mailhog.cmake (77%) rename build/cmake/{test => test-deps}/mongo-orchestration.cmake (57%) rename build/cmake/{test => test-deps}/openldap.cmake (75%) rename build/cmake/{test => test-deps}/phantomjs.cmake (77%) rename build/cmake/{test => test-deps}/shellcheck.cmake (60%) rename build/cmake/{test => test-deps}/unbound.cmake (68%) create mode 100644 build/cmake/test.cmake delete mode 100644 build/cmake/test/bundle.cmake delete mode 100644 build/cmake/test/lua-deps.cmake delete mode 100644 build/cmake/test/test.cmake delete mode 100644 build/cmake/versions.cmake create mode 100644 build/patches/opm.patch create mode 100755 docker/dev/docker-entrypoint create mode 100644 templates/etc/trafficserver/logging.config.mustache delete mode 100644 templates/etc/trafficserver/logs_xml.config diff --git a/.dockerignore b/.dockerignore index d61674ac1..fd2393bd6 100644 --- a/.dockerignore +++ b/.dockerignore @@ -18,8 +18,10 @@ chef/tmp .ruby-version CMakeCache.txt CMakeFiles +Dockerfile* Makefile cmake_install.cmake +docker-compose.yml install_manifest.txt install_manifest_core.txt install_manifest_hadoop-analytics.txt diff --git a/CMakeLists.txt b/CMakeLists.txt index e6802b33e..ce0a48799 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,10 +1,6 @@ cmake_minimum_required(VERSION 3.6.0 FATAL_ERROR) project(api-umbrella) -option(ENABLE_HADOOP_ANALYTICS "Build dependencies for Hadoop analytics" off) -option(ENABLE_TEST_DEPENDENCIES "Build dependencies for running tests" off) -option(ENABLE_DEPLOY_ONLY "Only build dependencies for a deployment overlaying an existing package install" off) - # Installation prefix if (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) set(CMAKE_INSTALL_PREFIX /opt/api-umbrella CACHE PATH "default install path" FORCE) @@ -26,9 +22,6 @@ execute_process(COMMAND mkdir -p ${STAMP_DIR}) set(STAGE_DIR ${WORK_DIR}/stage) set(STAGE_PREFIX_DIR ${STAGE_DIR}${CMAKE_INSTALL_PREFIX}) set(STAGE_EMBEDDED_DIR ${STAGE_DIR}${INSTALL_PREFIX_EMBEDDED}) -set(HADOOP_ANALYTICS_STAGE_DIR ${WORK_DIR}/stage-hadoop-analytics) -set(HADOOP_ANALYTICS_STAGE_PREFIX_DIR ${HADOOP_ANALYTICS_STAGE_DIR}${CMAKE_INSTALL_PREFIX}) -set(HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR ${HADOOP_ANALYTICS_STAGE_DIR}${INSTALL_PREFIX_EMBEDDED}) # Where to install app-level vendor dependencies. set(VENDOR_DIR ${WORK_DIR}/vendor) @@ -39,6 +32,7 @@ else() set(LUA_PREFIX ${STAGE_EMBEDDED_DIR}) endif() set(LUAROCKS_CMD env LUA_PATH=${LUA_PREFIX}/openresty/luajit/share/lua/5.1/?.lua$${LUA_PREFIX}/openresty/luajit/share/lua/5.1/?/init.lua$$ ${LUA_PREFIX}/bin/luarocks) +set(OPM_CMD env LUA_PATH=${LUA_PREFIX}/openresty/lualib/?.lua$${LUA_PREFIX}/openresty/lualib/?/init.lua$$ PATH=${STAGE_EMBEDDED_DIR}/bin:$ENV{PATH} LD_LIBRARY_PATH=${STAGE_EMBEDDED_DIR}/openresty/luajit/lib:${STAGE_EMBEDDED_DIR}/lib opm) # Where to install development-only dependencies. set(DEV_INSTALL_PREFIX ${WORK_DIR}/dev-env) @@ -50,90 +44,41 @@ set(TEST_VENDOR_DIR ${TEST_INSTALL_PREFIX}/vendor) set(TEST_VENDOR_LUA_SHARE_DIR ${TEST_VENDOR_DIR}/share/lua/5.1) set(TEST_VENDOR_LUA_LIB_DIR ${TEST_VENDOR_DIR}/lib/lua/5.1) -# Define a timestamped release name for our app installations. Base this on the -# last git commit timestamp so installs are consistent for each git commit. -include(${CMAKE_SOURCE_DIR}/build/cmake/GetGitRevisionDescription.cmake) -include(${CMAKE_SOURCE_DIR}/build/cmake/GetGitTimestamp.cmake) -get_git_timestamp(RELEASE_TIMESTAMP) -string(SUBSTRING ${RELEASE_TIMESTAMP} 0 8 RELEASE_DATE) - +# Misc dependencies/functions. include(ExternalProject) +find_package(PkgConfig REQUIRED) +include(${CMAKE_SOURCE_DIR}/build/cmake/functions/GetGitRevisionDescription.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/functions/GetGitTimestamp.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/functions/luarocks_install.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/functions/opm_install.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/functions/require_program.cmake) -function(require_program name) - find_program(${name} ${name}) - if(NOT ${name}) - MESSAGE(FATAL_ERROR "Could not find ${name}") - endif() -endfunction(require_program) +# make deps +include(${CMAKE_SOURCE_DIR}/build/cmake/deps.cmake) -if(ENABLE_DEPLOY_ONLY) - # Create stub/empty targets for things the core build process depends on. But - # for deploy-based builds, we'll assume these dependencies have already been - # installed (since we're assuming the deploys are overlaying a package - # installation). - add_custom_target(bundler) - add_custom_target(libcidr) - add_custom_target(luarocks) - - include(${CMAKE_SOURCE_DIR}/build/cmake/versions.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/core.cmake) -else() - find_package(LibXml2 REQUIRED) - find_package(PkgConfig REQUIRED) - pkg_search_module(LIBUUID REQUIRED uuid) - pkg_search_module(LIBFFI REQUIRED libffi) - require_program(rsync) - - include(${CMAKE_SOURCE_DIR}/build/cmake/versions.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/elasticsearch.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/libcidr.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/libgeoip.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/mongodb.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/mora.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/openresty.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/luarocks.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/perp.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/runit_svlogd.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/ruby.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/rsyslog.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/trafficserver.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/static-site.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/core.cmake) - if(ENABLE_HADOOP_ANALYTICS) - include(${CMAKE_SOURCE_DIR}/build/cmake/hadoop-analytics/flume.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/hadoop-analytics/kylin.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/hadoop-analytics/presto.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/hadoop-analytics/processor.cmake) - endif() - - # - # Testing - # - if(ENABLE_TEST_DEPENDENCIES) - include(${CMAKE_SOURCE_DIR}/build/cmake/test/lua-deps.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/test/mailhog.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/test/mongo-orchestration.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/test/openldap.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/test/phantomjs.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/test/shellcheck.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/test/unbound.cmake) - endif() - include(${CMAKE_SOURCE_DIR}/build/cmake/test/bundle.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/test/test.cmake) - - # - # Installation - # - include(${CMAKE_SOURCE_DIR}/build/cmake/install.cmake) - - # - # Packaging - # - include(${CMAKE_SOURCE_DIR}/build/cmake/package.cmake) - - # - # Clean Task - # - include(${CMAKE_SOURCE_DIR}/build/cmake/clean-download-archives.cmake) - include(${CMAKE_SOURCE_DIR}/build/cmake/distclean.cmake) -endif() +# make build-deps +include(${CMAKE_SOURCE_DIR}/build/cmake/build-deps.cmake OPTIONAL) + +# make app-deps +include(${CMAKE_SOURCE_DIR}/build/cmake/app-deps.cmake OPTIONAL) + +# make app +include(${CMAKE_SOURCE_DIR}/build/cmake/app.cmake OPTIONAL) + +# make test-deps +include(${CMAKE_SOURCE_DIR}/build/cmake/test-deps.cmake OPTIONAL) + +# make test +include(${CMAKE_SOURCE_DIR}/build/cmake/test.cmake OPTIONAL) + +# make distclean +include(${CMAKE_SOURCE_DIR}/build/cmake/distclean.cmake OPTIONAL) + +# make clean-download-archives +include(${CMAKE_SOURCE_DIR}/build/cmake/clean-download-archives.cmake OPTIONAL) + +# make install +include(${CMAKE_SOURCE_DIR}/build/cmake/install.cmake OPTIONAL) + +# make package +include(${CMAKE_SOURCE_DIR}/build/cmake/package.cmake OPTIONAL) diff --git a/Dockerfile-dev-build b/Dockerfile-dev-build index cb8deb657..947ebadc6 100644 --- a/Dockerfile-dev-build +++ b/Dockerfile-dev-build @@ -1,34 +1,48 @@ -FROM ubuntu:xenial +FROM ubuntu:18.04 RUN mkdir /app +RUN mkdir /build +WORKDIR /build +ENV DOCKER_DEV="true" + COPY build/scripts/install_build_dependencies /app/build/scripts/install_build_dependencies COPY build/package_dependencies.sh /app/build/package_dependencies.sh RUN env INSTALL_TEST_DEPENDENCIES=true /app/build/scripts/install_build_dependencies -RUN mkdir /build -WORKDIR /build - COPY build/scripts/download_cmake /app/build/scripts/download_cmake RUN /app/build/scripts/download_cmake COPY CMakeLists.txt /app/CMakeLists.txt -COPY Gemfile /app/Gemfile -COPY Gemfile.lock /app/Gemfile.lock -COPY build/cmake /app/build/cmake -COPY config/default.yml /app/config/default.yml +COPY build/cmake/functions /app/build/cmake/functions +COPY build/cmake/deps /app/build/cmake/deps +COPY build/cmake/deps.cmake /app/build/cmake/deps.cmake +COPY build/patches /app/build/patches COPY configure /app/configure -COPY src/api-umbrella/admin-ui /app/src/api-umbrella/admin-ui -COPY src/api-umbrella/version.txt /app/src/api-umbrella/version.txt -COPY src/api-umbrella/web-app /app/src/api-umbrella/web-app -RUN /app/configure --enable-test-dependencies && make +RUN /app/configure && make deps -RUN groupadd -r api-umbrella && \ - useradd -r -g api-umbrella -s /sbin/nologin -d /opt/api-umbrella -c "API Umbrella user" api-umbrella +COPY build/cmake/build-deps /app/build/cmake/build-deps +COPY build/cmake/build-deps.cmake /app/build/cmake/build-deps.cmake +RUN /app/configure && make build-deps + +COPY build/cmake/app-deps /app/build/cmake/app-deps +COPY build/cmake/app-deps.cmake /app/build/cmake/app-deps.cmake +RUN /app/configure && make app-deps + +COPY build/cmake/test-deps /app/build/cmake/test-deps +COPY build/cmake/test-deps.cmake /app/build/cmake/test-deps.cmake +RUN /app/configure && make test-deps COPY . /app +RUN /app/configure && make test-bundle && make && \ + rm -rf /build/build/work/src/api-umbrella-core/tmp/admin-ui-build/node_modules/*/.node_modules.ember-try + +RUN groupadd -r api-umbrella && \ + useradd -r -g api-umbrella -s /sbin/nologin -d /opt/api-umbrella -c "API Umbrella user" api-umbrella ENV PATH="/app/bin:/build/build/work/dev-env/sbin:/build/build/work/dev-env/bin:/build/build/work/test-env/sbin:/build/build/work/test-env/bin:/build/build/work/stage/opt/api-umbrella/sbin:/build/build/work/stage/opt/api-umbrella/bin:/build/build/work/stage/opt/api-umbrella/embedded/sbin:/build/build/work/stage/opt/api-umbrella/embedded/bin:${PATH}" ENV API_UMBRELLA_ROOT="/build/build/work/stage/opt/api-umbrella" WORKDIR /app -CMD ./docker/dev/docker-start + +ENTRYPOINT ["/app/docker/dev/docker-entrypoint"] +CMD ["/app/docker/dev/docker-start"] diff --git a/bin/api-umbrella-env b/bin/api-umbrella-env index facfe4802..2d88f4c65 100644 --- a/bin/api-umbrella-env +++ b/bin/api-umbrella-env @@ -59,7 +59,8 @@ export LD_LIBRARY_PATH="$API_UMBRELLA_EMBEDDED_ROOT/openresty/luajit/lib:$API_UM # Note that we purposefully don't use any of the default Lua load paths (like # /usr/local) so that other Lua packages on the system aren't picked up (since # they might conflict). -export LUA_PATH="$API_UMBRELLA_EMBEDDED_ROOT/apps/core/shared/vendor/share/lua/5.1/?.lua;\ +export LUA_PATH="$API_UMBRELLA_EMBEDDED_ROOT/apps/core/shared/vendor/resty_modules/lualib/?.lua;\ +$API_UMBRELLA_EMBEDDED_ROOT/apps/core/shared/vendor/share/lua/5.1/?.lua;\ $API_UMBRELLA_EMBEDDED_ROOT/apps/core/shared/vendor/share/lua/5.1/?/init.lua;\ $API_UMBRELLA_EMBEDDED_ROOT/openresty/lualib/?.lua;\ $API_UMBRELLA_EMBEDDED_ROOT/openresty/lualib/?/init.lua;\ @@ -70,7 +71,8 @@ $API_UMBRELLA_EMBEDDED_ROOT/openresty/luajit/share/lua/5.1/?/init.lua" if [ -n "$API_UMBRELLA_SRC_ROOT" ]; then export LUA_PATH="$API_UMBRELLA_SRC_ROOT/src/?.lua;$LUA_PATH" fi -export LUA_CPATH="$API_UMBRELLA_EMBEDDED_ROOT/apps/core/shared/vendor/lib/lua/5.1/?.so;\ +export LUA_CPATH="$API_UMBRELLA_EMBEDDED_ROOT/apps/core/shared/vendor/resty_modules/?.so;\ +$API_UMBRELLA_EMBEDDED_ROOT/apps/core/shared/vendor/lib/lua/5.1/?.so;\ $API_UMBRELLA_EMBEDDED_ROOT/openresty/lualib/?.so;\ $API_UMBRELLA_EMBEDDED_ROOT/openresty/luajit/lib/lua/5.1/?.so" diff --git a/build/cmake/app-deps.cmake b/build/cmake/app-deps.cmake new file mode 100644 index 000000000..8c811049b --- /dev/null +++ b/build/cmake/app-deps.cmake @@ -0,0 +1,3 @@ +include(${CMAKE_SOURCE_DIR}/build/cmake/app-deps/core-lua-deps.cmake) + +add_custom_target(app-deps ALL DEPENDS ${STAMP_DIR}/core-lua-deps) diff --git a/build/cmake/core-lua-deps.cmake b/build/cmake/app-deps/core-lua-deps.cmake similarity index 59% rename from build/cmake/core-lua-deps.cmake rename to build/cmake/app-deps/core-lua-deps.cmake index 0808afc65..f0acd3c6b 100644 --- a/build/cmake/core-lua-deps.cmake +++ b/build/cmake/app-deps/core-lua-deps.cmake @@ -1,22 +1,67 @@ -include(${CMAKE_SOURCE_DIR}/build/cmake/luarocks_install.cmake) +set(LUAROCK_ARGPARSE_VERSION 0.6.0-1) +set(LUAROCK_ARGPARSE_HASH 6656139dd66430075aa2093556857a84) +set(LUAROCK_CMSGPACK_VERSION 0.4.0-0) +set(LUAROCK_CMSGPACK_HASH f459d16fffdbbc85e582803321b3cec9) +set(LUAROCK_ICONV_VERSION 7-3) +set(LUAROCK_ICONV_HASH 138d21a895d267f09ff40fcb75324f74) +set(LUAROCK_INSPECT_VERSION 3.1.1-0) +set(LUAROCK_INSPECT_HASH 8a8a05f10b07a603e44e4f8b39bddd35) +set(LUAROCK_LUAPOSIX_VERSION 34.0.4-1) +set(LUAROCK_LUAPOSIX_HASH e584252902055ee40f250a1a304ec18e) +set(LUAROCK_LUSTACHE_VERSION 1.3.1-0) +set(LUAROCK_LUSTACHE_HASH 840ecd41bf19ed1751916de2cd46229e) +set(LUAROCK_LYAML_VERSION 6.2.2-1) +set(LUAROCK_LYAML_HASH d8c8c11db09bfc3f82838d0195d7cf04) +set(LUAROCK_PENLIGHT_VERSION 1.5.4-1) +set(LUAROCK_PENLIGHT_HASH 8f4e6b4c7e851c28cb3e95be728d6507) +set(LUAROCK_RESTY_UUID_VERSION 1.1-1) +set(LUAROCK_RESTY_UUID_HASH d14ae99d6f18edd5c934e6050e974c5e) +set(LUA_LUASOCKET_VERSION 652959890943c34d7180cae372339b91e62f0d7b) +set(LUA_LUASOCKET_HASH 6b3e3bdf60267f5957c2ea44e563ed70) +set(LUA_RESTY_DNS_CACHE_VERSION 32d9d461465edbec1cc798c18447c0ac7ee6e528) +set(LUA_RESTY_DNS_CACHE_HASH 3a5414110c6ad4331fe82873e19bd1e8) +set(LUA_RESTY_LOGGER_SOCKET_VERSION 15cc1c256e55b8e68ec9b220b6883c227a763d4e) +set(LUA_RESTY_LOGGER_SOCKET_HASH efe14697a8c4be612c011f54fce06191) +set(LUA_RESTY_SHCACHE_VERSION fb2e275c2cdca08eaa34a7b73375e41ac3eff200) +set(LUA_RESTY_SHCACHE_HASH 5d3cbcf8fbad1954cdcb3826afa41afe) +set(OPM_ICU_DATE_VERSION 857990ba72cf48f7ae20dfb861a783231b5a2e79) +set(OPM_ICU_DATE_HASH 580f4a650782556266cc341630d39f63) +set(OPM_LIBCIDR_VERSION 0.1.3) +set(OPM_LIBCIDR_HASH 9d995b83a7d857fcdec949725711b784) +set(OPM_RESTY_HTTP_VERSION 0.12) +set(OPM_RESTY_HTTP_HASH edc5d6deb82c1f5f628e382290c79209) # LuaRock app dependencies luarocks_install(argparse ${LUAROCK_ARGPARSE_VERSION} ${LUAROCK_ARGPARSE_HASH}) luarocks_install(inspect ${LUAROCK_INSPECT_VERSION} ${LUAROCK_INSPECT_HASH}) -luarocks_install(libcidr-ffi ${LUAROCK_LIBCIDR_VERSION} ${LUAROCK_LIBCIDR_HASH} CIDR_DIR=${LUA_PREFIX} libcidr) luarocks_install(lua-cmsgpack ${LUAROCK_CMSGPACK_VERSION} ${LUAROCK_CMSGPACK_HASH}) luarocks_install(lua-iconv ${LUAROCK_ICONV_VERSION} ${LUAROCK_ICONV_HASH}) -luarocks_install(lua-resty-http ${LUAROCK_RESTY_HTTP_VERSION} ${LUAROCK_RESTY_HTTP_HASH}) luarocks_install(lua-resty-uuid ${LUAROCK_RESTY_UUID_VERSION} ${LUAROCK_RESTY_UUID_HASH}) luarocks_install(luaposix ${LUAROCK_LUAPOSIX_VERSION} ${LUAROCK_LUAPOSIX_HASH}) -luarocks_install(luatz ${LUAROCK_LUATZ_VERSION} ${LUAROCK_LUATZ_HASH}) luarocks_install(lustache ${LUAROCK_LUSTACHE_VERSION} ${LUAROCK_LUSTACHE_HASH}) luarocks_install(lyaml ${LUAROCK_LYAML_VERSION} ${LUAROCK_LYAML_HASH}) luarocks_install(penlight ${LUAROCK_PENLIGHT_VERSION} ${LUAROCK_PENLIGHT_HASH}) +# OPM app dependencies +opm_install(lua-libcidr-ffi GUI ${OPM_LIBCIDR_VERSION} ${OPM_LIBCIDR_HASH} libcidr) +opm_install(lua-resty-http pintsized ${OPM_RESTY_HTTP_VERSION} ${OPM_RESTY_HTTP_HASH}) + +ExternalProject_Add( + opm_lua-icu-date + EXCLUDE_FROM_ALL 1 + DEPENDS luarocks + URL https://github.com/GUI/lua-icu-date/archive/${OPM_ICU_DATE_VERSION}.tar.gz + URL_HASH MD5=${OPM_ICU_DATE_HASH} + BUILD_IN_SOURCE 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND ${LUAROCKS_CMD} --tree=${VENDOR_DIR} make --local icu-date-git-1.rockspec +) + # Other Lua app dependencies (non-luarocks) ExternalProject_Add( lua_luasocket_url + EXCLUDE_FROM_ALL 1 DEPENDS luarocks URL https://github.com/diegonehab/luasocket/archive/${LUA_LUASOCKET_VERSION}.tar.gz URL_HASH MD5=${LUA_LUASOCKET_HASH} @@ -33,6 +78,7 @@ ExternalProject_Add( ExternalProject_Add( lua_resty_dns_cache + EXCLUDE_FROM_ALL 1 DEPENDS luarocks URL https://github.com/hamishforbes/lua-resty-dns-cache/archive/${LUA_RESTY_DNS_CACHE_VERSION}.tar.gz URL_HASH MD5=${LUA_RESTY_DNS_CACHE_HASH} @@ -43,6 +89,7 @@ ExternalProject_Add( ExternalProject_Add( lua_resty_logger_socket + EXCLUDE_FROM_ALL 1 DEPENDS luarocks URL https://github.com/cloudflare/lua-resty-logger-socket/archive/${LUA_RESTY_LOGGER_SOCKET_VERSION}.tar.gz URL_HASH MD5=${LUA_RESTY_LOGGER_SOCKET_HASH} @@ -53,6 +100,7 @@ ExternalProject_Add( ExternalProject_Add( lua_resty_shcache + EXCLUDE_FROM_ALL 1 DEPENDS luarocks URL https://github.com/cloudflare/lua-resty-shcache/archive/${LUA_RESTY_SHCACHE_VERSION}.tar.gz URL_HASH MD5=${LUA_RESTY_SHCACHE_HASH} @@ -69,16 +117,16 @@ set( lua_resty_shcache luarock_argparse luarock_inspect - luarock_libcidr-ffi luarock_lua-cmsgpack luarock_lua-iconv - luarock_lua-resty-http luarock_lua-resty-uuid luarock_luaposix - luarock_luatz luarock_lustache luarock_lyaml luarock_penlight + opm_lua-icu-date + opm_lua-libcidr-ffi + opm_lua-resty-http ) # Also depend on the internal stamp files used by ExternalProject_Add, since diff --git a/build/cmake/app.cmake b/build/cmake/app.cmake new file mode 100644 index 000000000..35e6dda1c --- /dev/null +++ b/build/cmake/app.cmake @@ -0,0 +1,8 @@ +# Define a timestamped release name for our app installations. Base this on the +# last git commit timestamp so installs are consistent for each git commit. +get_git_timestamp(RELEASE_TIMESTAMP) + +include(${CMAKE_SOURCE_DIR}/build/cmake/app/core.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/app/static-site.cmake) + +add_custom_target(app ALL DEPENDS ${STAMP_DIR}/core api_umbrella_static_site) diff --git a/build/cmake/core-admin-ui.cmake b/build/cmake/app/core-admin-ui.cmake similarity index 71% rename from build/cmake/core-admin-ui.cmake rename to build/cmake/app/core-admin-ui.cmake index 3ebdb6ecd..6cc5ce607 100644 --- a/build/cmake/core-admin-ui.cmake +++ b/build/cmake/app/core-admin-ui.cmake @@ -1,5 +1,3 @@ -include(${CMAKE_SOURCE_DIR}/build/cmake/dev/nodejs.cmake) - file(GLOB_RECURSE admin_ui_files ${CMAKE_SOURCE_DIR}/src/api-umbrella/admin-ui/app/*.hbs ${CMAKE_SOURCE_DIR}/src/api-umbrella/admin-ui/app/*.html @@ -52,20 +50,3 @@ add_custom_command( COMMAND cd ${CORE_BUILD_DIR}/tmp/admin-ui-build && rm -rf ./dist && env PATH=${DEV_INSTALL_PREFIX}/bin:$ENV{PATH} ./node_modules/.bin/ember build --environment=production --output-path=./dist COMMAND touch ${STAMP_DIR}/core-admin-ui-build ) - -# Normally we perform the yarn installs out-of-source (so the build takes place -# entirely out of source), but if testing/development is enabled for this -# build, then also create a local symlink within the source. This then allows -# for easier interactions with the application. -if(ENABLE_TEST_DEPENDENCIES) - add_custom_command( - OUTPUT ${CMAKE_SOURCE_DIR}/src/api-umbrella/admin-ui/node_modules - DEPENDS - ${STAMP_DIR}/core-admin-ui-yarn-install - ${CORE_BUILD_DIR}/tmp/admin-ui-build/node_modules - COMMAND rm -rf ${CMAKE_SOURCE_DIR}/src/api-umbrella/admin-ui/node_modules - COMMAND ln -snf ${CORE_BUILD_DIR}/tmp/admin-ui-build/node_modules ${CMAKE_SOURCE_DIR}/src/api-umbrella/admin-ui/node_modules - COMMAND touch -h ${CMAKE_SOURCE_DIR}/src/api-umbrella/admin-ui/node_modules - ) - add_custom_target(core-admin-ui-local-yarn ALL DEPENDS ${CMAKE_SOURCE_DIR}/src/api-umbrella/admin-ui/node_modules) -endif() diff --git a/build/cmake/app/core-locale.cmake b/build/cmake/app/core-locale.cmake new file mode 100644 index 000000000..a19d915e9 --- /dev/null +++ b/build/cmake/app/core-locale.cmake @@ -0,0 +1,33 @@ +find_package(Gettext REQUIRED) + +file(GLOB locale_files ${CMAKE_SOURCE_DIR}/locale/*.po) + +add_custom_command( + OUTPUT ${CORE_BUILD_DIR}/tmp/locale-build + DEPENDS ${locale_files} + COMMAND rm -rf ${CORE_BUILD_DIR}/tmp/locale-build + COMMAND mkdir -p ${CORE_BUILD_DIR}/tmp/locale-build + COMMAND touch -h ${CORE_BUILD_DIR}/tmp/locale-build +) + +foreach(locale_file ${locale_files}) + get_filename_component(locale ${locale_file} NAME_WE) + + add_custom_command( + OUTPUT ${CORE_BUILD_DIR}/tmp/locale-build/${locale}.json + DEPENDS + ${STAMP_DIR}/core-admin-ui-yarn-install + ${CORE_BUILD_DIR}/tmp/locale-build + ${locale_file} + COMMAND mkdir -p ${CORE_BUILD_DIR}/tmp/locale-build/ + COMMAND cd ${CORE_BUILD_DIR}/tmp/admin-ui-build && env PATH=${DEV_INSTALL_PREFIX}/bin:$ENV{PATH} ./node_modules/.bin/po2json --format=jed1.x --domain=api-umbrella ${locale_file} ${CORE_BUILD_DIR}/tmp/locale-build/${locale}.json + ) + + list(APPEND locale_depends ${CORE_BUILD_DIR}/tmp/locale-build/${locale}.json) +endforeach(locale_file) + +add_custom_command( + OUTPUT ${STAMP_DIR}/core-locale-build + DEPENDS ${locale_depends} + COMMAND touch ${STAMP_DIR}/core-locale-build +) diff --git a/build/cmake/core-web-app.cmake b/build/cmake/app/core-web-app.cmake similarity index 63% rename from build/cmake/core-web-app.cmake rename to build/cmake/app/core-web-app.cmake index 7f6bcf439..2b9714f77 100644 --- a/build/cmake/core-web-app.cmake +++ b/build/cmake/app/core-web-app.cmake @@ -29,21 +29,10 @@ add_custom_command( COMMAND touch ${STAMP_DIR}/core-web-app-precompile ) -# Normally we perform the bundle out-of-source (so the build takes place -# entirely out of source), but if testing/development is enabled for this -# build, then also create a local ".bundle/config" item within the source. This -# then allows for gems to be found when interacting with the local source -# version of the app. -if(ENABLE_TEST_DEPENDENCIES) - add_custom_command( - OUTPUT ${CMAKE_SOURCE_DIR}/src/api-umbrella/web-app/.bundle/config - DEPENDS - ${STAMP_DIR}/core-web-app-bundle - ${WORK_DIR}/src/web-app/.bundle - ${VENDOR_DIR}/bundle - COMMAND rm -rf ${CMAKE_SOURCE_DIR}/src/api-umbrella/web-app/.bundle - COMMAND ln -snf ${WORK_DIR}/src/web-app/.bundle ${CMAKE_SOURCE_DIR}/src/api-umbrella/web-app/.bundle - COMMAND touch -c ${CMAKE_SOURCE_DIR}/src/api-umbrella/web-app/.bundle/config - ) - add_custom_target(core-web-app-local-bundle ALL DEPENDS ${CMAKE_SOURCE_DIR}/src/api-umbrella/web-app/.bundle/config) -endif() +add_custom_command( + OUTPUT + ${STAMP_DIR}/core-web-app-build + DEPENDS + ${STAMP_DIR}/core-web-app-precompile + COMMAND touch ${STAMP_DIR}/core-web-app-build +) diff --git a/build/cmake/core.cmake b/build/cmake/app/core.cmake similarity index 94% rename from build/cmake/core.cmake rename to build/cmake/app/core.cmake index 959f10e14..4dabb586c 100644 --- a/build/cmake/core.cmake +++ b/build/cmake/app/core.cmake @@ -1,8 +1,7 @@ set(CORE_BUILD_DIR ${WORK_DIR}/src/api-umbrella-core) -include(${CMAKE_SOURCE_DIR}/build/cmake/core-lua-deps.cmake) -include(${CMAKE_SOURCE_DIR}/build/cmake/core-web-app.cmake) -include(${CMAKE_SOURCE_DIR}/build/cmake/core-admin-ui.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/app/core-admin-ui.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/app/core-web-app.cmake) # Copy the vendored libraries into the shared build directory. add_custom_command( @@ -33,15 +32,15 @@ add_custom_command( add_custom_command( OUTPUT ${STAMP_DIR}/core-build-install-dist - ${CORE_BUILD_DIR}/releases/0/build/dist/web-app-assets ${CORE_BUILD_DIR}/releases/0/build/dist/admin-ui + ${CORE_BUILD_DIR}/releases/0/build/dist/web-app-assets DEPENDS ${STAMP_DIR}/core-admin-ui-build - ${STAMP_DIR}/core-web-app-precompile + ${STAMP_DIR}/core-web-app-build ${STAMP_DIR}/core-build-release-dir COMMAND mkdir -p ${CORE_BUILD_DIR}/releases/0/build/dist/web-app-assets - COMMAND rsync -a --delete-after ${CORE_BUILD_DIR}/tmp/web-app-build/web-assets/ ${CORE_BUILD_DIR}/releases/0/build/dist/web-app-assets/web-assets/ COMMAND rsync -a --delete-after ${CORE_BUILD_DIR}/tmp/admin-ui-build/dist/ ${CORE_BUILD_DIR}/releases/0/build/dist/admin-ui/ + COMMAND rsync -a --delete-after ${CORE_BUILD_DIR}/tmp/web-app-build/web-assets/ ${CORE_BUILD_DIR}/releases/0/build/dist/web-app-assets/web-assets/ COMMAND touch ${STAMP_DIR}/core-build-install-dist ) @@ -75,7 +74,7 @@ add_custom_command( # Disable all non-production gems and remove any old, unused gems. COMMAND env PATH=${STAGE_EMBEDDED_DIR}/bin:$ENV{PATH} bundle install --path=../../../vendor/bundle --without=development test assets --clean --deployment # Purge gem files we don't need to make for a lighter package distribution. - COMMAND cd ${CORE_BUILD_DIR}/shared/vendor/bundle && rm -rf ruby/*/cache ruby/*/gems/*/test* ruby/*/gems/*/spec ruby/*/bundler/gems/*/test* ruby/*/bundler/gems/*/spec ruby/*/bundler/gems/*/.git + COMMAND cd ${CORE_BUILD_DIR}/shared/vendor/bundle && rm -rf ruby/*/cache ruby/*/gems/*/test* ruby/*/gems/*/spec ruby/*/bundler/gems/*/test* ruby/*/bundler/gems/*/spec ruby/*/bundl COMMAND touch -c ${CORE_BUILD_DIR}/releases/0/src/api-umbrella/web-app/.bundle/config ) @@ -138,5 +137,3 @@ add_custom_command( ${STAMP_DIR}/core-api-umbrella-exec-bin-symlink COMMAND touch ${STAMP_DIR}/core ) - -add_custom_target(core ALL DEPENDS ${STAMP_DIR}/core) diff --git a/build/cmake/static-site.cmake b/build/cmake/app/static-site.cmake similarity index 81% rename from build/cmake/static-site.cmake rename to build/cmake/app/static-site.cmake index 69cb918a5..ceac381ce 100644 --- a/build/cmake/static-site.cmake +++ b/build/cmake/app/static-site.cmake @@ -1,7 +1,12 @@ # api-umbrella-static-site: Example website content + +set(API_UMBRELLA_STATIC_SITE_VERSION c02b8869cafb063deb7f9436d0137b0ea6e652aa) +set(API_UMBRELLA_STATIC_SITE_HASH 07dbd5e6d96e62a9ad6b725b14f727a1) + ExternalProject_Add( api_umbrella_static_site - DEPENDS bundler + EXCLUDE_FROM_ALL 1 + DEPENDS bundler nodejs URL https://github.com/NREL/api-umbrella-static-site/archive/${API_UMBRELLA_STATIC_SITE_VERSION}.tar.gz URL_HASH MD5=${API_UMBRELLA_STATIC_SITE_HASH} BUILD_IN_SOURCE 1 diff --git a/build/cmake/build-deps.cmake b/build/cmake/build-deps.cmake new file mode 100644 index 000000000..238a906cc --- /dev/null +++ b/build/cmake/build-deps.cmake @@ -0,0 +1,3 @@ +include(${CMAKE_SOURCE_DIR}/build/cmake/build-deps/nodejs.cmake) + +add_custom_target(build-deps ALL DEPENDS nodejs yarn) diff --git a/build/cmake/dev/nodejs.cmake b/build/cmake/build-deps/nodejs.cmake similarity index 77% rename from build/cmake/dev/nodejs.cmake rename to build/cmake/build-deps/nodejs.cmake index 057e2aac2..b04c8adf0 100644 --- a/build/cmake/dev/nodejs.cmake +++ b/build/cmake/build-deps/nodejs.cmake @@ -1,6 +1,13 @@ # NodeJS: For building admin-ui Ember app. + +set(NODEJS_VERSION 8.11.1) +set(NODEJS_HASH 6617e245fa0f7fbe0e373e71d543fea878315324ab31dc64b4eba10e42d04c11) +set(YARN_VERSION 1.6.0) +set(YARN_HASH a11a3d8a5d62712fc497a6d1cbea25f6) + ExternalProject_Add( nodejs + EXCLUDE_FROM_ALL 1 URL https://nodejs.org/dist/v${NODEJS_VERSION}/node-v${NODEJS_VERSION}-linux-x64.tar.xz URL_HASH SHA256=${NODEJS_HASH} CONFIGURE_COMMAND "" @@ -10,6 +17,7 @@ ExternalProject_Add( ExternalProject_Add( yarn + EXCLUDE_FROM_ALL 1 DEPENDS nodejs URL https://github.com/yarnpkg/yarn/releases/download/v${YARN_VERSION}/yarn-v${YARN_VERSION}.tar.gz URL_HASH MD5=${YARN_HASH} diff --git a/build/cmake/clean-download-archives.cmake b/build/cmake/clean-download-archives.cmake index bdbe6b12a..8a1616ba1 100644 --- a/build/cmake/clean-download-archives.cmake +++ b/build/cmake/clean-download-archives.cmake @@ -1,4 +1,3 @@ -add_custom_target( - clean-download-archives +add_custom_target(clean-download-archives COMMAND rm -f ${WORK_DIR}/src/*.gz ${WORK_DIR}/src/*.bz2 ${WORK_DIR}/src/*.tgz ) diff --git a/build/cmake/deps.cmake b/build/cmake/deps.cmake new file mode 100644 index 000000000..46eeb166a --- /dev/null +++ b/build/cmake/deps.cmake @@ -0,0 +1,25 @@ +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/elasticsearch.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/libcidr.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/mongodb.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/mora.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/openresty.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/perp.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/rsyslog.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/ruby.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/runit_svlogd.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/trafficserver.cmake) + +add_custom_target(deps ALL DEPENDS + bundler + elasticsearch + geolitecity + libcidr + luarocks + mongodb + mora + openresty + perp + rsyslog + runit_svlogd + trafficserver +) diff --git a/build/cmake/elasticsearch.cmake b/build/cmake/deps/elasticsearch.cmake similarity index 71% rename from build/cmake/elasticsearch.cmake rename to build/cmake/deps/elasticsearch.cmake index 2c66fbb5a..d69aafda7 100644 --- a/build/cmake/elasticsearch.cmake +++ b/build/cmake/deps/elasticsearch.cmake @@ -1,14 +1,18 @@ +# Elasticsearch: Analytics database + find_package(Java 1.7 REQUIRED COMPONENTS Runtime) +require_program(rsync) + +set(ELASTICSEARCH_VERSION 2.4.6) +set(ELASTICSEARCH_HASH c3441bef89cd91206edf3cf3bd5c4b62550e60a9) -# Elasticsearch: Analytics database ExternalProject_Add( elasticsearch + EXCLUDE_FROM_ALL 1 URL https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz URL_HASH SHA1=${ELASTICSEARCH_HASH} CONFIGURE_COMMAND "" BUILD_COMMAND "" INSTALL_COMMAND rsync -a -v --checksum --delete-after / ${STAGE_EMBEDDED_DIR}/elasticsearch/ COMMAND mkdir -p ${STAGE_EMBEDDED_DIR}/elasticsearch/plugins - COMMAND cd ${STAGE_EMBEDDED_DIR}/bin && ln -snf ../elasticsearch/bin/plugin ./plugin - COMMAND cd ${STAGE_EMBEDDED_DIR}/bin && ln -snf ../elasticsearch/bin/elasticsearch ./elasticsearch ) diff --git a/build/cmake/libcidr.cmake b/build/cmake/deps/libcidr.cmake similarity index 80% rename from build/cmake/libcidr.cmake rename to build/cmake/deps/libcidr.cmake index 45cc6cecd..5c5758849 100644 --- a/build/cmake/libcidr.cmake +++ b/build/cmake/deps/libcidr.cmake @@ -1,6 +1,11 @@ # libcidr: CIDR IP calculations for libcidr-ffi LuaRock + +set(LIBCIDR_VERSION 1.2.3) +set(LIBCIDR_HASH c5efcc7ae114fdaa5583f58dacecd9de) + ExternalProject_Add( libcidr + EXCLUDE_FROM_ALL 1 URL https://www.over-yonder.net/~fullermd/projects/libcidr/libcidr-${LIBCIDR_VERSION}.tar.xz URL_HASH MD5=${LIBCIDR_HASH} BUILD_IN_SOURCE 1 diff --git a/build/cmake/libgeoip.cmake b/build/cmake/deps/libgeoip.cmake similarity index 78% rename from build/cmake/libgeoip.cmake rename to build/cmake/deps/libgeoip.cmake index 490d6941b..fc31c4929 100644 --- a/build/cmake/libgeoip.cmake +++ b/build/cmake/deps/libgeoip.cmake @@ -1,10 +1,15 @@ # libgeoip & GeoLiteCityv6.dat: GeoIP locations + +set(LIBGEOIP_VERSION 1.6.12) +set(LIBGEOIP_HASH 77d496cc40daa1dbc2b97365807d64d7) + list(APPEND LIBGEOIP_CONFIGURE_CMD env) list(APPEND LIBGEOIP_CONFIGURE_CMD /configure) list(APPEND LIBGEOIP_CONFIGURE_CMD --prefix=${INSTALL_PREFIX_EMBEDDED}) ExternalProject_Add( libgeoip + EXCLUDE_FROM_ALL 1 URL https://github.com/maxmind/geoip-api-c/releases/download/v${LIBGEOIP_VERSION}/GeoIP-${LIBGEOIP_VERSION}.tar.gz URL_HASH MD5=${LIBGEOIP_HASH} CONFIGURE_COMMAND ${LIBGEOIP_CONFIGURE_CMD} @@ -12,16 +17,19 @@ ExternalProject_Add( COMMAND find ${STAGE_EMBEDDED_DIR}/bin/ -name geoiplookup* -exec chrpath -d {} $ ) +execute_process(COMMAND date -u +%Y%m%d OUTPUT_VARIABLE CURRENT_DATE OUTPUT_STRIP_TRAILING_WHITESPACE) ExternalProject_Add( # Make the project name dynamic based on the current date. This forces a # re-download once per day. This helps ensure development and CI environments # are using fresh GeoIP data files without downloading on each run. - geolitecity-${RELEASE_DATE} + geolitecity-${CURRENT_DATE} + EXCLUDE_FROM_ALL 1 URL https://geolite.maxmind.com/download/geoip/database/GeoLiteCityv6-beta/GeoLiteCityv6.dat.gz DOWNLOAD_NO_EXTRACT 1 # Since we re-download every day as a separate project name, this cleans up # any old downloads in the work directory. - CONFIGURE_COMMAND find ${CMAKE_BINARY_DIR}/${EP_BASE} -maxdepth 2 -name geolitecity* -not -name geolitecity-${RELEASE_DATE}* -print -exec rm -rf {} $ + CONFIGURE_COMMAND find ${CMAKE_BINARY_DIR}/${EP_BASE} -maxdepth 2 -name geolitecity* -not -name geolitecity-${CURRENT_DATE}* -print -exec rm -rf {} $ BUILD_COMMAND gunzip -c > /GeoLiteCityv6.dat INSTALL_COMMAND install -D -m 644 /GeoLiteCityv6.dat ${STAGE_EMBEDDED_DIR}/var/db/geoip/city-v6.dat ) +add_custom_target(geolitecity DEPENDS geolitecity-${CURRENT_DATE}) diff --git a/build/cmake/mongodb.cmake b/build/cmake/deps/mongodb.cmake similarity index 86% rename from build/cmake/mongodb.cmake rename to build/cmake/deps/mongodb.cmake index e424dcd11..cf0188d03 100644 --- a/build/cmake/mongodb.cmake +++ b/build/cmake/deps/mongodb.cmake @@ -1,6 +1,11 @@ # MongoDB: General database + +set(MONGODB_VERSION 3.2.20) +set(MONGODB_HASH 01f7660d86b3de679ce388eaa681286a) + ExternalProject_Add( mongodb + EXCLUDE_FROM_ALL 1 URL https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB_VERSION}.tgz URL_HASH MD5=${MONGODB_HASH} CONFIGURE_COMMAND "" diff --git a/build/cmake/mora.cmake b/build/cmake/deps/mora.cmake similarity index 78% rename from build/cmake/mora.cmake rename to build/cmake/deps/mora.cmake index 25812b366..9a39fab37 100644 --- a/build/cmake/mora.cmake +++ b/build/cmake/deps/mora.cmake @@ -1,7 +1,14 @@ # Mora: HTTP API for MongoDB (allowing OpenResty connectivity) + +set(GOLANG_VERSION 1.10.2) +set(GOLANG_HASH 4b677d698c65370afa33757b6954ade60347aaca310ea92a63ed717d7cb0c2ff) +set(MORA_VERSION 8127901857cf88d3f0902708b25ad930354973a3) +set(MORA_HASH b86cea913596370cd58fce89b23acd97) + # Built with Go ExternalProject_Add( golang + EXCLUDE_FROM_ALL 1 URL https://storage.googleapis.com/golang/go${GOLANG_VERSION}.linux-amd64.tar.gz URL_HASH SHA256=${GOLANG_HASH} CONFIGURE_COMMAND "" diff --git a/build/cmake/openresty.cmake b/build/cmake/deps/openresty.cmake similarity index 65% rename from build/cmake/openresty.cmake rename to build/cmake/deps/openresty.cmake index 08fa6b45a..edc1efe79 100644 --- a/build/cmake/openresty.cmake +++ b/build/cmake/deps/openresty.cmake @@ -1,8 +1,24 @@ # OpenResty and nginx plugins +include(${CMAKE_SOURCE_DIR}/build/cmake/deps/libgeoip.cmake) + +set(LUAROCKS_VERSION 2.4.4) +set(LUAROCKS_HASH 04e8b19d565e86b1d08f745adc4b1a56) +set(NGX_DYUPS_VERSION a5e75737e04ff3e5040a80f5f739171e96c3359c) +set(NGX_DYUPS_HASH e16860efcd0629f38f514469052d998a) +set(NGX_TXID_VERSION f1c197cb9c42e364a87fbb28d5508e486592ca42) +set(NGX_TXID_HASH 408ee86eb6e42e27a51514f711c41d6b) +set(OPENRESTY_VERSION 1.13.6.1) +set(OPENRESTY_HASH 637f82d0b36c74aec1c01bd3b8e0289c) +set(OPENSSL_VERSION 1.0.2o) +set(OPENSSL_HASH ec3f5c9714ba0fd45cb4e087301eb1336c317e0d20b575a125050470e8089e4d) +set(PCRE_VERSION 8.42) +set(PCRE_HASH 085b6aa253e0f91cae70b3cdbe8c1ac2) + # ngx_dyups: Dynamic upstream handling for handling DNS changes ExternalProject_Add( ngx_dyups + EXCLUDE_FROM_ALL 1 URL https://github.com/yzprofile/ngx_http_dyups_module/archive/${NGX_DYUPS_VERSION}.tar.gz URL_HASH MD5=${NGX_DYUPS_HASH} CONFIGURE_COMMAND "" @@ -15,6 +31,7 @@ set(NGX_DYUPS_SOURCE_DIR ${SOURCE_DIR}) # ngx_txid: Generate unique request IDs ExternalProject_Add( ngx_txid + EXCLUDE_FROM_ALL 1 URL https://github.com/streadway/ngx_txid/archive/${NGX_TXID_VERSION}.tar.gz URL_HASH MD5=${NGX_TXID_HASH} CONFIGURE_COMMAND "" @@ -27,6 +44,7 @@ set(NGX_TXID_SOURCE_DIR ${SOURCE_DIR}) # Pull in newer version of PCRE (8.20+) for OpenResty to enable PCRE JIT. ExternalProject_Add( pcre + EXCLUDE_FROM_ALL 1 URL https://ftp.pcre.org/pub/pcre/pcre-${PCRE_VERSION}.tar.bz2 URL_HASH MD5=${PCRE_HASH} CONFIGURE_COMMAND "" @@ -39,6 +57,7 @@ set(PCRE_SOURCE_DIR ${SOURCE_DIR}) # OpenResty's ssl_certificate_by_lua functionality requires OpenSSL 1.0.2e+ ExternalProject_Add( openssl + EXCLUDE_FROM_ALL 1 URL https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz URL_HASH SHA256=${OPENSSL_HASH} CONFIGURE_COMMAND "" @@ -51,11 +70,7 @@ set(OPENSSL_SOURCE_DIR ${SOURCE_DIR}) list(APPEND OPENRESTY_CONFIGURE_CMD /configure) list(APPEND OPENRESTY_CONFIGURE_CMD --prefix=${INSTALL_PREFIX_EMBEDDED}/openresty) list(APPEND OPENRESTY_CONFIGURE_CMD --with-cc-opt=-I${STAGE_EMBEDDED_DIR}/include) -if(ENABLE_TEST_DEPENDENCIES) - list(APPEND OPENRESTY_CONFIGURE_CMD "--with-ld-opt=-L${STAGE_EMBEDDED_DIR}/lib") -else() - list(APPEND OPENRESTY_CONFIGURE_CMD "--with-ld-opt=-L${STAGE_EMBEDDED_DIR}/lib") -endif() +list(APPEND OPENRESTY_CONFIGURE_CMD --with-ld-opt=-L${STAGE_EMBEDDED_DIR}/lib) list(APPEND OPENRESTY_CONFIGURE_CMD --error-log-path=stderr) list(APPEND OPENRESTY_CONFIGURE_CMD --with-ipv6) list(APPEND OPENRESTY_CONFIGURE_CMD --with-openssl=${OPENSSL_SOURCE_DIR}) @@ -74,10 +89,14 @@ list(APPEND OPENRESTY_CONFIGURE_CMD --add-module=${NGX_TXID_SOURCE_DIR}) ExternalProject_Add( openresty + EXCLUDE_FROM_ALL 1 DEPENDS libgeoip ngx_dyups ngx_txid openssl pcre URL https://openresty.org/download/openresty-${OPENRESTY_VERSION}.tar.gz URL_HASH MD5=${OPENRESTY_HASH} BUILD_IN_SOURCE 1 + # Patch opm to allow it to pick up dynamic LUA_PATH and LUA_CPATH, since we + # need different paths while performing staged installations. + PATCH_COMMAND patch -p1 < ${CMAKE_SOURCE_DIR}/build/patches/opm.patch CONFIGURE_COMMAND ${OPENRESTY_CONFIGURE_CMD} # Wipe the .openssl directory inside the openssl dir, or else openresty # will fail to build on rebuilds: https://trac.nginx.org/nginx/ticket/583 @@ -85,7 +104,24 @@ ExternalProject_Add( COMMAND make INSTALL_COMMAND make install DESTDIR=${STAGE_DIR} COMMAND chrpath -d ${STAGE_EMBEDDED_DIR}/openresty/nginx/sbin/nginx + COMMAND cd ${STAGE_EMBEDDED_DIR}/bin && ln -snf ../openresty/bin/opm ./opm COMMAND cd ${STAGE_EMBEDDED_DIR}/bin && ln -snf ../openresty/bin/resty ./resty COMMAND cd ${STAGE_EMBEDDED_DIR}/bin && ln -snf ../openresty/luajit/bin/luajit ./luajit COMMAND mkdir -p ${STAGE_EMBEDDED_DIR}/sbin && cd ${STAGE_EMBEDDED_DIR}/sbin && ln -snf ../openresty/nginx/sbin/nginx ./nginx ) + +# LuaRocks: Lua dependency management +ExternalProject_Add( + luarocks + EXCLUDE_FROM_ALL 1 + DEPENDS openresty + URL http://luarocks.org/releases/luarocks-${LUAROCKS_VERSION}.tar.gz + URL_HASH MD5=${LUAROCKS_HASH} + BUILD_IN_SOURCE 1 + CONFIGURE_COMMAND /configure --prefix=${INSTALL_PREFIX_EMBEDDED}/openresty/luajit --with-lua=${STAGE_EMBEDDED_DIR}/openresty/luajit --with-lua-include=${STAGE_EMBEDDED_DIR}/openresty/luajit/include/luajit-2.1 --lua-suffix=jit + BUILD_COMMAND make build + INSTALL_COMMAND make install DESTDIR=${STAGE_DIR} + COMMAND cd ${STAGE_EMBEDDED_DIR}/bin && ln -snf ../openresty/luajit/bin/luarocks ./luarocks + COMMAND rm -rf ${VENDOR_DIR}/share/lua ${VENDOR_DIR}/lib/luarocks + COMMAND rm -rf ${TEST_VENDOR_DIR}/share/lua ${TEST_VENDOR_DIR}/lib/luarocks +) diff --git a/build/cmake/perp.cmake b/build/cmake/deps/perp.cmake similarity index 85% rename from build/cmake/perp.cmake rename to build/cmake/deps/perp.cmake index 974ede026..570667196 100644 --- a/build/cmake/perp.cmake +++ b/build/cmake/deps/perp.cmake @@ -1,6 +1,11 @@ # Perp: Process supervision and control + +set(PERP_VERSION 2.07) +set(PERP_HASH a2acc7425d556d9635a25addcee9edb5) + ExternalProject_Add( perp + EXCLUDE_FROM_ALL 1 URL http://b0llix.net/perp/distfiles/perp-${PERP_VERSION}.tar.gz URL_HASH MD5=${PERP_HASH} PATCH_COMMAND sed -i -e "s%BINDIR.*%BINDIR = ${INSTALL_PREFIX_EMBEDDED}/bin%" conf.mk diff --git a/build/cmake/rsyslog.cmake b/build/cmake/deps/rsyslog.cmake similarity index 79% rename from build/cmake/rsyslog.cmake rename to build/cmake/deps/rsyslog.cmake index b99845cbf..8d6a06199 100644 --- a/build/cmake/rsyslog.cmake +++ b/build/cmake/deps/rsyslog.cmake @@ -1,5 +1,14 @@ # rsyslog: Log buffering and processing +set(LIBESTR_VERSION 0.1.10) +set(LIBESTR_HASH bd655e126e750edd18544b88eb1568d200a424a0c23f665eb14bbece07ac703c) +set(LIBFASTJSON_VERSION 0.99.8) +set(LIBFASTJSON_HASH 730713ad1d851def7ac8898f751bbfdd) +set(LIBLOGGING_VERSION 1.0.6) +set(LIBLOGGING_HASH 338c6174e5c8652eaa34f956be3451f7491a4416ab489aef63151f802b00bf93) +set(RSYSLOG_VERSION 8.34.0) +set(RSYSLOG_HASH 18330a9764c55d2501b847aad267292bd96c2b12fa5c3b92909bd8d4563c80a9) + find_package(CURL REQUIRED) require_program(autoconf) require_program(automake) @@ -11,6 +20,7 @@ pkg_check_modules(LIBUUID REQUIRED uuid) # (https://bugzilla.redhat.com/show_bug.cgi?id=1152899). ExternalProject_Add( libestr + EXCLUDE_FROM_ALL 1 URL http://libestr.adiscon.com/files/download/libestr-${LIBESTR_VERSION}.tar.gz URL_HASH SHA256=${LIBESTR_HASH} BUILD_IN_SOURCE 1 @@ -20,6 +30,7 @@ ExternalProject_Add( ExternalProject_Add( libfastjson + EXCLUDE_FROM_ALL 1 URL https://github.com/rsyslog/libfastjson/archive/v${LIBFASTJSON_VERSION}.tar.gz URL_HASH MD5=${LIBFASTJSON_HASH} BUILD_IN_SOURCE 1 @@ -34,6 +45,7 @@ list(APPEND LIBLOGGING_CONFIGURE_CMD --prefix=${INSTALL_PREFIX_EMBEDDED}) list(APPEND LIBLOGGING_CONFIGURE_CMD --disable-man-pages) ExternalProject_Add( liblogging + EXCLUDE_FROM_ALL 1 URL http://download.rsyslog.com/liblogging/liblogging-${LIBLOGGING_VERSION}.tar.gz URL_HASH SHA256=${LIBLOGGING_HASH} BUILD_IN_SOURCE 1 @@ -46,27 +58,9 @@ ExternalProject_Add( COMMAND chrpath -d ${STAGE_EMBEDDED_DIR}/bin/stdlogctl ) -if(ENABLE_HADOOP_ANALYTICS) - # There's a small dependency on Python for librdkafka's Makefile: - # https://github.com/edenhill/librdkafka/blob/v0.9.2/Makefile#L8 - find_package(PythonInterp REQUIRED) - - ExternalProject_Add( - librdkafka - URL https://github.com/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz - URL_HASH MD5=${LIBRDKAFKA_HASH} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND /configure --prefix=${INSTALL_PREFIX_EMBEDDED} - INSTALL_COMMAND make install DESTDIR=${STAGE_DIR} - ) -endif() - list(APPEND RSYSLOG_DEPENDS libestr) list(APPEND RSYSLOG_DEPENDS libfastjson) list(APPEND RSYSLOG_DEPENDS liblogging) -if(ENABLE_HADOOP_ANALYTICS) - list(APPEND RSYSLOG_DEPENDS librdkafka) -endif() list(APPEND RSYSLOG_CONFIGURE_CMD env) list(APPEND RSYSLOG_CONFIGURE_CMD LIBESTR_CFLAGS=-I${STAGE_EMBEDDED_DIR}/include) @@ -75,10 +69,6 @@ list(APPEND RSYSLOG_CONFIGURE_CMD LIBFASTJSON_CFLAGS=-I${STAGE_EMBEDDED_DIR}/inc list(APPEND RSYSLOG_CONFIGURE_CMD "LIBFASTJSON_LIBS=-L${STAGE_EMBEDDED_DIR}/lib -lfastjson") list(APPEND RSYSLOG_CONFIGURE_CMD LIBLOGGING_STDLOG_CFLAGS=-I${STAGE_EMBEDDED_DIR}/include) list(APPEND RSYSLOG_CONFIGURE_CMD "LIBLOGGING_STDLOG_LIBS=-L${STAGE_EMBEDDED_DIR}/lib -llogging-stdlog") -if(ENABLE_HADOOP_ANALYTICS) - list(APPEND RSYSLOG_CONFIGURE_CMD LIBRDKAFKA_CFLAGS=-I${STAGE_EMBEDDED_DIR}/include) - list(APPEND RSYSLOG_CONFIGURE_CMD "LIBRDKAFKA_LIBS=-L${STAGE_EMBEDDED_DIR}/lib -lrdkafka") -endif() list(APPEND RSYSLOG_CONFIGURE_CMD /configure) list(APPEND RSYSLOG_CONFIGURE_CMD --prefix=${INSTALL_PREFIX_EMBEDDED}) list(APPEND RSYSLOG_CONFIGURE_CMD --enable-liblogging-stdlog) @@ -88,12 +78,10 @@ list(APPEND RSYSLOG_CONFIGURE_CMD --enable-impstats) list(APPEND RSYSLOG_CONFIGURE_CMD --enable-mmjsonparse) list(APPEND RSYSLOG_CONFIGURE_CMD --enable-mmutf8fix) list(APPEND RSYSLOG_CONFIGURE_CMD --enable-elasticsearch) -if(ENABLE_HADOOP_ANALYTICS) - list(APPEND RSYSLOG_CONFIGURE_CMD --enable-omkafka) -endif() ExternalProject_Add( rsyslog + EXCLUDE_FROM_ALL 1 DEPENDS ${RSYSLOG_DEPENDS} URL http://www.rsyslog.com/download/files/download/rsyslog/rsyslog-${RSYSLOG_VERSION}.tar.gz URL_HASH SHA256=${RSYSLOG_HASH} diff --git a/build/cmake/ruby.cmake b/build/cmake/deps/ruby.cmake similarity index 77% rename from build/cmake/ruby.cmake rename to build/cmake/deps/ruby.cmake index 1e9721034..4211e13f9 100644 --- a/build/cmake/ruby.cmake +++ b/build/cmake/deps/ruby.cmake @@ -1,4 +1,12 @@ # Ruby & Bundler: For Rails web-app component + +set(BUNDLER_VERSION 1.16.1) +set(BUNDLER_HASH 42b8e0f57093e1d10c15542f956a871446b759e7969d99f91caf3b6731c156e8) +set(RUBY_VERSION 2.4.4) +set(RUBY_HASH 45a8de577471b90dc4838c5ef26aeb253a56002896189055a44dc680644243f1) +set(RUBYGEMS_VERSION 2.7.6) +set(RUBYGEMS_HASH ee5ef219ac97f5499c31e6071eae424c3265620ece33b5cc66e09fa30f22086a) + list(APPEND RUBY_CONFIGURE_CMD env) list(APPEND RUBY_CONFIGURE_CMD /configure) list(APPEND RUBY_CONFIGURE_CMD --prefix=${INSTALL_PREFIX_EMBEDDED}) @@ -8,6 +16,7 @@ list(APPEND RUBY_CONFIGURE_CMD --disable-install-doc) ExternalProject_Add( ruby + EXCLUDE_FROM_ALL 1 URL https://cache.ruby-lang.org/pub/ruby/ruby-${RUBY_VERSION}.tar.bz2 URL_HASH SHA256=${RUBY_HASH} CONFIGURE_COMMAND rm -rf && mkdir -p # Clean across version upgrades @@ -17,6 +26,7 @@ ExternalProject_Add( ExternalProject_Add( rubygems + EXCLUDE_FROM_ALL 1 DEPENDS ruby URL https://rubygems.org/downloads/rubygems-update-${RUBYGEMS_VERSION}.gem URL_HASH SHA256=${RUBYGEMS_HASH} @@ -28,6 +38,7 @@ ExternalProject_Add( ExternalProject_Add( bundler + EXCLUDE_FROM_ALL 1 DEPENDS rubygems URL https://rubygems.org/downloads/bundler-${BUNDLER_VERSION}.gem URL_HASH SHA256=${BUNDLER_HASH} diff --git a/build/cmake/runit_svlogd.cmake b/build/cmake/deps/runit_svlogd.cmake similarity index 77% rename from build/cmake/runit_svlogd.cmake rename to build/cmake/deps/runit_svlogd.cmake index 7ea4cd1b9..21ee3a1a4 100644 --- a/build/cmake/runit_svlogd.cmake +++ b/build/cmake/deps/runit_svlogd.cmake @@ -1,6 +1,11 @@ # runit's svlogd as alternative to perp's tinylog with more features. + +set(RUNIT_VERSION 2.1.2) +set(RUNIT_HASH 6c985fbfe3a34608eb3c53dc719172c4) + ExternalProject_Add( - runit + runit_svlogd + EXCLUDE_FROM_ALL 1 URL http://smarden.org/runit/runit-${RUNIT_VERSION}.tar.gz URL_HASH MD5=${RUNIT_HASH} BUILD_IN_SOURCE 1 diff --git a/build/cmake/trafficserver.cmake b/build/cmake/deps/trafficserver.cmake similarity index 79% rename from build/cmake/trafficserver.cmake rename to build/cmake/deps/trafficserver.cmake index 8e116c576..06eae3cfa 100644 --- a/build/cmake/trafficserver.cmake +++ b/build/cmake/deps/trafficserver.cmake @@ -1,4 +1,8 @@ # TrafficServer: HTTP caching server + +set(TRAFFICSERVER_VERSION 7.1.3) +set(TRAFFICSERVER_HASH 1ddb23a1c0564929d2246ff3cd97595a9d0b1891736a9d0ef8ca56f52a7b86159b657bbc22f2e64aaccee13009ceff2a47c92b8b25121d65c7ccfdedf8b084ea) + list(APPEND TRAFFICSERVER_CONFIGURE_CMD env) list(APPEND TRAFFICSERVER_CONFIGURE_CMD SPHINXBUILD=false) list(APPEND TRAFFICSERVER_CONFIGURE_CMD /configure) @@ -7,16 +11,13 @@ list(APPEND TRAFFICSERVER_CONFIGURE_CMD --enable-experimental-plugins) ExternalProject_Add( trafficserver + EXCLUDE_FROM_ALL 1 URL http://mirror.olnevhost.net/pub/apache/trafficserver/trafficserver-${TRAFFICSERVER_VERSION}.tar.bz2 - URL_HASH MD5=${TRAFFICSERVER_HASH} + URL_HASH SHA512=${TRAFFICSERVER_HASH} CONFIGURE_COMMAND rm -rf && mkdir -p # Clean across version upgrades COMMAND ${TRAFFICSERVER_CONFIGURE_CMD} INSTALL_COMMAND make install DESTDIR=${STAGE_DIR} COMMAND chrpath -d ${STAGE_EMBEDDED_DIR}/lib/libtsmgmt.so COMMAND find ${STAGE_EMBEDDED_DIR}/libexec/trafficserver/ -name *.so -exec chrpath -d {} $ COMMAND find ${STAGE_EMBEDDED_DIR}/bin/ -name traffic_* -exec chrpath -d {} $ - COMMAND chrpath -d ${STAGE_EMBEDDED_DIR}/bin/tstop - # Trim our own distribution by removing some larger files we don't need for - # API Umbrella. - COMMAND rm -f ${STAGE_EMBEDDED_DIR}/bin/traffic_sac ) diff --git a/build/cmake/distclean.cmake b/build/cmake/distclean.cmake index 8e9030542..e5918c007 100644 --- a/build/cmake/distclean.cmake +++ b/build/cmake/distclean.cmake @@ -1,4 +1,3 @@ -add_custom_target( - distclean +add_custom_target(distclean COMMAND ${CMAKE_SOURCE_DIR}/build/scripts/distclean ) diff --git a/build/cmake/GetGitRevisionDescription.cmake b/build/cmake/functions/GetGitRevisionDescription.cmake similarity index 100% rename from build/cmake/GetGitRevisionDescription.cmake rename to build/cmake/functions/GetGitRevisionDescription.cmake diff --git a/build/cmake/GetGitRevisionDescription.cmake.in b/build/cmake/functions/GetGitRevisionDescription.cmake.in similarity index 100% rename from build/cmake/GetGitRevisionDescription.cmake.in rename to build/cmake/functions/GetGitRevisionDescription.cmake.in diff --git a/build/cmake/GetGitTimestamp.cmake b/build/cmake/functions/GetGitTimestamp.cmake similarity index 100% rename from build/cmake/GetGitTimestamp.cmake rename to build/cmake/functions/GetGitTimestamp.cmake diff --git a/build/cmake/luarocks_install.cmake b/build/cmake/functions/luarocks_install.cmake similarity index 97% rename from build/cmake/luarocks_install.cmake rename to build/cmake/functions/luarocks_install.cmake index 66a5a34de..7a3955e26 100644 --- a/build/cmake/luarocks_install.cmake +++ b/build/cmake/functions/luarocks_install.cmake @@ -1,6 +1,7 @@ function(_luarocks_install tree_dir package version hash) ExternalProject_Add( luarock_${package} + EXCLUDE_FROM_ALL 1 DEPENDS luarocks ${ARGV5} URL https://luarocks.org/${package}-${version}.rockspec URL_HASH MD5=${hash} diff --git a/build/cmake/functions/opm_install.cmake b/build/cmake/functions/opm_install.cmake new file mode 100644 index 000000000..39e6d9e88 --- /dev/null +++ b/build/cmake/functions/opm_install.cmake @@ -0,0 +1,22 @@ +function(_opm_install tree_dir package account version hash) + ExternalProject_Add( + opm_${package} + EXCLUDE_FROM_ALL 1 + DEPENDS openresty ${ARGV5} + URL https://opm.openresty.org/api/pkg/tarball/${account}/${package}-${version}.opm.tar.gz + URL_HASH MD5=${hash} + DOWNLOAD_NO_EXTRACT 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND mkdir -p ${tree_dir} && cd ${tree_dir} && ${OPM_CMD} --cwd get ${account}/${package}=${version} + COMMAND find ${tree_dir}/resty_modules -name *.so -exec chrpath -d {} $ + ) +endfunction() + +function(opm_install package account version hash) + _opm_install(${VENDOR_DIR} ${package} ${account} ${version} ${hash} ${ARGV4}) +endfunction() + +function(test_opm_install package account version hash) + _opm_install(${TEST_VENDOR_DIR} ${package} ${account} ${version} ${hash} ${ARGV4}) +endfunction() diff --git a/build/cmake/functions/require_program.cmake b/build/cmake/functions/require_program.cmake new file mode 100644 index 000000000..29a2a86ca --- /dev/null +++ b/build/cmake/functions/require_program.cmake @@ -0,0 +1,6 @@ +function(require_program name) + find_program(${name} ${name}) + if(NOT ${name}) + MESSAGE(FATAL_ERROR "Could not find ${name}") + endif() +endfunction(require_program) diff --git a/build/cmake/hadoop-analytics/flume.cmake b/build/cmake/hadoop-analytics/flume.cmake deleted file mode 100644 index 1605204c3..000000000 --- a/build/cmake/hadoop-analytics/flume.cmake +++ /dev/null @@ -1,12 +0,0 @@ -find_package(Java 1.7 REQUIRED COMPONENTS Runtime) - -# Flume: Hadoop log buffering and writing -ExternalProject_Add( - flume - URL http://apache.cs.utah.edu/flume/${FLUME_VERSION}/apache-flume-${FLUME_VERSION}-bin.tar.gz - URL_HASH MD5=${FLUME_HASH} - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND mkdir -p ${HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR}/flume - COMMAND rsync -a -v --exclude=/docs --exclude=/tools --delete-after --delete-excluded / ${HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR}/flume/ -) diff --git a/build/cmake/hadoop-analytics/kylin.cmake b/build/cmake/hadoop-analytics/kylin.cmake deleted file mode 100644 index 047e73510..000000000 --- a/build/cmake/hadoop-analytics/kylin.cmake +++ /dev/null @@ -1,12 +0,0 @@ -find_package(Java 1.7 REQUIRED COMPONENTS Runtime) - -# Kylin: Hadoop-based analytics database -ExternalProject_Add( - kylin - URL http://mirrors.sonic.net/apache/kylin/apache-kylin-${KYLIN_VERSION}/apache-kylin-${KYLIN_VERSION}-bin.tar.gz - URL_HASH MD5=${KYLIN_HASH} - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND mkdir -p ${HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR}/kylin - COMMAND rsync -a -v --exclude=/sample_cube --delete-after --delete-excluded / ${HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR}/kylin/ -) diff --git a/build/cmake/hadoop-analytics/presto.cmake b/build/cmake/hadoop-analytics/presto.cmake deleted file mode 100644 index 8f5db3cbf..000000000 --- a/build/cmake/hadoop-analytics/presto.cmake +++ /dev/null @@ -1,12 +0,0 @@ -find_package(Java 1.7 REQUIRED COMPONENTS Runtime) - -# Presto: ANSI-SQL queries against Hadoop Hive tables. -ExternalProject_Add( - presto - URL https://repo1.maven.org/maven2/com/facebook/presto/presto-server/${PRESTO_VERSION}/presto-server-${PRESTO_VERSION}.tar.gz - URL_HASH MD5=${PRESTO_HASH} - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND mkdir -p ${HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR}/presto - COMMAND rsync -a -v --include=/plugin/hive-hadoop2 --include=/plugin/jmx --exclude=/plugin/* --delete-after --delete-excluded / ${HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR}/presto/ -) diff --git a/build/cmake/hadoop-analytics/processor.cmake b/build/cmake/hadoop-analytics/processor.cmake deleted file mode 100644 index 9f0ec9471..000000000 --- a/build/cmake/hadoop-analytics/processor.cmake +++ /dev/null @@ -1,25 +0,0 @@ -find_package(Java 1.7 REQUIRED COMPONENTS Runtime) -find_package(Java 1.7 REQUIRED COMPONENTS Development) - -ExternalProject_Add( - maven - URL http://apache.mirrors.ionfish.org/maven/maven-3/${MAVEN_VERSION}/binaries/apache-maven-${MAVEN_VERSION}-bin.tar.gz - URL_HASH MD5=${MAVEN_HASH} - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND "" -) -ExternalProject_Get_Property(maven SOURCE_DIR) -set(MAVEN_SOURCE_DIR ${SOURCE_DIR}) - -add_custom_command( - OUTPUT ${STAMP_DIR}/hadoop-analytics-processor - DEPENDS - maven - COMMAND mkdir -p ${WORK_DIR}/src/hadoop-analytics - COMMAND env PATH=${MAVEN_SOURCE_DIR}/bin:$ENV{PATH} mvn -f ${CMAKE_SOURCE_DIR}/src/api-umbrella/hadoop-analytics/pom.xml clean package -DbuildDir=${WORK_DIR}/src/hadoop-analytics - COMMAND mkdir -p ${HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR}/hadoop-analytics - COMMAND cp ${WORK_DIR}/src/hadoop-analytics/processor/processor-0.0.1-SNAPSHOT.jar ${HADOOP_ANALYTICS_STAGE_EMBEDDED_DIR}/hadoop-analytics/processor.jar - COMMAND touch ${STAMP_DIR}/hadoop-analytics-processor -) -add_custom_target(hadoop-analytics-processor ALL DEPENDS ${STAMP_DIR}/hadoop-analytics-processor) diff --git a/build/cmake/install.cmake b/build/cmake/install.cmake index 9c0489ee4..3879e8ba0 100644 --- a/build/cmake/install.cmake +++ b/build/cmake/install.cmake @@ -86,28 +86,10 @@ install( COMPONENT core ) -if(ENABLE_HADOOP_ANALYTICS) - install( - DIRECTORY ${HADOOP_ANALYTICS_STAGE_PREFIX_DIR}/ - DESTINATION ${CMAKE_INSTALL_PREFIX} - USE_SOURCE_PERMISSIONS - COMPONENT hadoop-analytics - ) -endif() - -add_custom_target( - install-core +add_custom_target(install-core COMMAND ${CMAKE_COMMAND} -D CMAKE_INSTALL_COMPONENT=core -P ${CMAKE_BINARY_DIR}/cmake_install.cmake ) -if(ENABLE_HADOOP_ANALYTICS) - add_custom_target( - install-hadoop-analytics - COMMAND ${CMAKE_COMMAND} -D CMAKE_INSTALL_COMPONENT=hadoop-analytics -P ${CMAKE_BINARY_DIR}/cmake_install.cmake - ) -endif() - -add_custom_target( - after-install +add_custom_target(after-install COMMAND ${CMAKE_SOURCE_DIR}/build/package/scripts/after-install 1 ) diff --git a/build/cmake/luarocks.cmake b/build/cmake/luarocks.cmake deleted file mode 100644 index ae8e0adee..000000000 --- a/build/cmake/luarocks.cmake +++ /dev/null @@ -1,14 +0,0 @@ -# LuaRocks: Lua dependency management -ExternalProject_Add( - luarocks - DEPENDS openresty - URL http://luarocks.org/releases/luarocks-${LUAROCKS_VERSION}.tar.gz - URL_HASH MD5=${LUAROCKS_HASH} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND /configure --prefix=${INSTALL_PREFIX_EMBEDDED}/openresty/luajit --with-lua=${STAGE_EMBEDDED_DIR}/openresty/luajit --with-lua-include=${STAGE_EMBEDDED_DIR}/openresty/luajit/include/luajit-2.1 --lua-suffix=jit - BUILD_COMMAND make build - INSTALL_COMMAND make install DESTDIR=${STAGE_DIR} - COMMAND cd ${STAGE_EMBEDDED_DIR}/bin && ln -snf ../openresty/luajit/bin/luarocks ./luarocks - COMMAND rm -rf ${VENDOR_DIR}/share/lua ${VENDOR_DIR}/lib/luarocks - COMMAND rm -rf ${TEST_VENDOR_DIR}/share/lua ${TEST_VENDOR_DIR}/lib/luarocks -) diff --git a/build/cmake/package.cmake b/build/cmake/package.cmake index a70129f0a..6514a127a 100644 --- a/build/cmake/package.cmake +++ b/build/cmake/package.cmake @@ -8,8 +8,7 @@ add_custom_command( COMMAND touch ${STAMP_DIR}/package-bundle ) -add_custom_target( - package-core +add_custom_target(package-core DEPENDS ${STAMP_DIR}/package-bundle COMMAND rm -rf ${WORK_DIR}/package-dest-core COMMAND make @@ -18,28 +17,4 @@ add_custom_target( COMMAND rm -rf ${WORK_DIR}/package-dest-core ) -if(ENABLE_HADOOP_ANALYTICS) - add_custom_target( - package-hadoop-analytics - DEPENDS ${STAMP_DIR}/package-bundle - COMMAND rm -rf ${WORK_DIR}/package-dest-hadoop-analytics - COMMAND make - COMMAND make install-hadoop-analytics DESTDIR=${WORK_DIR}/package-dest-hadoop-analytics - COMMAND env PATH=${STAGE_EMBEDDED_DIR}/bin:$ENV{PATH} BUNDLE_GEMFILE=${CMAKE_SOURCE_DIR}/build/package/Gemfile BUNDLE_APP_CONFIG=${WORK_DIR}/src/package/.bundle WORK_DIR=${WORK_DIR} PACKAGE_WORK_DIR=${PACKAGE_WORK_DIR} PACKAGE=hadoop-analytics ${CMAKE_SOURCE_DIR}/build/package/build_package - COMMAND rm -rf ${WORK_DIR}/package-dest-hadoop-analytics - ) -endif() - -# CMake policy CMP0037 to allow target named "package". -cmake_policy(PUSH) -if(POLICY CMP0037) - cmake_policy(SET CMP0037 OLD) -endif() -add_custom_target( - package - DEPENDS package-core -) -if(ENABLE_HADOOP_ANALYTICS) - add_dependencies(package package-hadoop-analytics) -endif() -cmake_policy(POP) +add_custom_target(package DEPENDS package-core) diff --git a/build/cmake/test-deps.cmake b/build/cmake/test-deps.cmake new file mode 100644 index 000000000..83229a233 --- /dev/null +++ b/build/cmake/test-deps.cmake @@ -0,0 +1,17 @@ +include(${CMAKE_SOURCE_DIR}/build/cmake/test-deps/luacheck.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/test-deps/mailhog.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/test-deps/mongo-orchestration.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/test-deps/openldap.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/test-deps/phantomjs.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/test-deps/shellcheck.cmake) +include(${CMAKE_SOURCE_DIR}/build/cmake/test-deps/unbound.cmake) + +add_custom_target(test-deps DEPENDS + luacheck + mailhog + mongo-orchestration + openldap + phantomjs + shellcheck + unbound +) diff --git a/build/cmake/test-deps/luacheck.cmake b/build/cmake/test-deps/luacheck.cmake new file mode 100644 index 000000000..30bdbf339 --- /dev/null +++ b/build/cmake/test-deps/luacheck.cmake @@ -0,0 +1,6 @@ +set(LUAROCK_LUACHECK_VERSION 0.22.0-1) +set(LUAROCK_LUACHECK_HASH 17608776f5d37ca898f96f4973b3be0e) + +test_luarocks_install(luacheck ${LUAROCK_LUACHECK_VERSION} ${LUAROCK_LUACHECK_HASH}) + +add_custom_target(luacheck DEPENDS luarock_luacheck) diff --git a/build/cmake/test/mailhog.cmake b/build/cmake/test-deps/mailhog.cmake similarity index 77% rename from build/cmake/test/mailhog.cmake rename to build/cmake/test-deps/mailhog.cmake index 56f0c7bf7..99407342a 100644 --- a/build/cmake/test/mailhog.cmake +++ b/build/cmake/test-deps/mailhog.cmake @@ -1,6 +1,11 @@ # MailHog: SMTP testing server + +set(MAILHOG_VERSION 1.0.0) +set(MAILHOG_HASH 3b758c81bfe2c9110911511daca1a7bc) + ExternalProject_Add( mailhog + EXCLUDE_FROM_ALL 1 URL https://github.com/mailhog/MailHog/releases/download/v${MAILHOG_VERSION}/MailHog_linux_amd64 URL_HASH MD5=${MAILHOG_HASH} DOWNLOAD_NO_EXTRACT 1 diff --git a/build/cmake/test/mongo-orchestration.cmake b/build/cmake/test-deps/mongo-orchestration.cmake similarity index 57% rename from build/cmake/test/mongo-orchestration.cmake rename to build/cmake/test-deps/mongo-orchestration.cmake index 4e296bb79..f9e952fb2 100644 --- a/build/cmake/test/mongo-orchestration.cmake +++ b/build/cmake/test-deps/mongo-orchestration.cmake @@ -1,11 +1,13 @@ -# Python test dependencies (mongo-orchestration) +set(MONGO_ORCHESTRATION_VERSION 0.6.11) + add_custom_command( OUTPUT ${TEST_INSTALL_PREFIX}/bin/pip COMMAND virtualenv ${TEST_INSTALL_PREFIX} ) -add_custom_target(test_virtualenv ALL DEPENDS ${TEST_INSTALL_PREFIX}/bin/pip) add_custom_command( OUTPUT ${TEST_INSTALL_PREFIX}/bin/mongo-orchestration + DEPENDS ${TEST_INSTALL_PREFIX}/bin/pip COMMAND ${TEST_INSTALL_PREFIX}/bin/pip install --ignore-installed 'mongo-orchestration==${MONGO_ORCHESTRATION_VERSION}' ) -add_custom_target(test_pip_install ALL DEPENDS ${TEST_INSTALL_PREFIX}/bin/mongo-orchestration) + +add_custom_target(mongo-orchestration DEPENDS ${TEST_INSTALL_PREFIX}/bin/mongo-orchestration) diff --git a/build/cmake/test/openldap.cmake b/build/cmake/test-deps/openldap.cmake similarity index 75% rename from build/cmake/test/openldap.cmake rename to build/cmake/test-deps/openldap.cmake index 347d906fc..a6f8414a9 100644 --- a/build/cmake/test/openldap.cmake +++ b/build/cmake/test-deps/openldap.cmake @@ -1,6 +1,11 @@ # OpenLDAP: For testing LDAP admin auth. + +set(OPENLDAP_VERSION 2.4.46) +set(OPENLDAP_HASH a9ae2273eb9bdd70090dafe0d018a3132606bef6) + ExternalProject_Add( openldap + EXCLUDE_FROM_ALL 1 URL ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-${OPENLDAP_VERSION}.tgz URL_HASH SHA1=${OPENLDAP_HASH} CONFIGURE_COMMAND /configure --prefix=${TEST_INSTALL_PREFIX} --disable-backends --enable-mdb diff --git a/build/cmake/test/phantomjs.cmake b/build/cmake/test-deps/phantomjs.cmake similarity index 77% rename from build/cmake/test/phantomjs.cmake rename to build/cmake/test-deps/phantomjs.cmake index 6d3a976e3..44a150817 100644 --- a/build/cmake/test/phantomjs.cmake +++ b/build/cmake/test-deps/phantomjs.cmake @@ -1,6 +1,11 @@ # PhantomJS: Headless WebKit for testing. + +set(PHANTOMJS_VERSION 2.1.1) +set(PHANTOMJS_HASH 1c947d57fce2f21ce0b43fe2ed7cd361) + ExternalProject_Add( phantomjs + EXCLUDE_FROM_ALL 1 URL https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-${PHANTOMJS_VERSION}-linux-x86_64.tar.bz2 URL_HASH MD5=${PHANTOMJS_HASH} CONFIGURE_COMMAND "" diff --git a/build/cmake/test/shellcheck.cmake b/build/cmake/test-deps/shellcheck.cmake similarity index 60% rename from build/cmake/test/shellcheck.cmake rename to build/cmake/test-deps/shellcheck.cmake index c1817af14..48e913475 100644 --- a/build/cmake/test/shellcheck.cmake +++ b/build/cmake/test-deps/shellcheck.cmake @@ -1,5 +1,9 @@ +set(SHELLCHECK_VERSION 0.4.7) +set(SHELLCHECK_HASH 64bf19a1292f0357c007b615150b6e58dba138bc7bf168c5a5e27016f8b4f802afd9950be8be46bf9e4833f98ae81c6e7b1761a3a76ddbba2a04929265433134) + ExternalProject_Add( shellcheck + EXCLUDE_FROM_ALL 1 URL https://storage.googleapis.com/shellcheck/shellcheck-v${SHELLCHECK_VERSION}.linux.x86_64.tar.xz URL_HASH SHA512=${SHELLCHECK_HASH} CONFIGURE_COMMAND "" diff --git a/build/cmake/test/unbound.cmake b/build/cmake/test-deps/unbound.cmake similarity index 68% rename from build/cmake/test/unbound.cmake rename to build/cmake/test-deps/unbound.cmake index 63fa9852c..ecaa8fd7d 100644 --- a/build/cmake/test/unbound.cmake +++ b/build/cmake/test-deps/unbound.cmake @@ -1,6 +1,11 @@ # Unbound: Local DNS server for testing DNS changes + +set(UNBOUND_VERSION 1.7.1) +set(UNBOUND_HASH 56e085ef582c5372a20207de179d0edb4e541e59f87be7d4ee1d00d12008628d) + ExternalProject_Add( unbound + EXCLUDE_FROM_ALL 1 URL http://www.unbound.net/downloads/unbound-${UNBOUND_VERSION}.tar.gz URL_HASH SHA256=${UNBOUND_HASH} BUILD_IN_SOURCE 1 diff --git a/build/cmake/test.cmake b/build/cmake/test.cmake new file mode 100644 index 000000000..c347dcab7 --- /dev/null +++ b/build/cmake/test.cmake @@ -0,0 +1,27 @@ +add_custom_command( + OUTPUT + ${STAMP_DIR}/test-bundle + ${WORK_DIR}/.bundle + ${WORK_DIR}/bundle + DEPENDS + bundler + ${CMAKE_SOURCE_DIR}/Gemfile + ${CMAKE_SOURCE_DIR}/Gemfile.lock + COMMAND env PATH=${STAGE_EMBEDDED_DIR}/bin:$ENV{PATH} BUNDLE_GEMFILE=${CMAKE_SOURCE_DIR}/Gemfile BUNDLE_APP_CONFIG=${WORK_DIR}/.bundle bundle install --clean --path=${WORK_DIR}/bundle + COMMAND touch -c ${WORK_DIR}/.bundle + COMMAND touch -c ${WORK_DIR}/bundle + COMMAND touch ${STAMP_DIR}/test-bundle +) + +add_custom_target(test-bundle DEPENDS + ${STAMP_DIR}/test-bundle +) + +add_custom_target(test DEPENDS + deps + build-deps + app-deps + test-deps + test-bundle + COMMAND env PATH=${STAGE_EMBEDDED_DIR}/bin:$ENV{PATH} BUNDLE_GEMFILE=${CMAKE_SOURCE_DIR}/Gemfile BUNDLE_APP_CONFIG=${WORK_DIR}/.bundle bundle exec rake +) diff --git a/build/cmake/test/bundle.cmake b/build/cmake/test/bundle.cmake deleted file mode 100644 index 1869be32b..000000000 --- a/build/cmake/test/bundle.cmake +++ /dev/null @@ -1,33 +0,0 @@ -add_custom_command( - OUTPUT - ${STAMP_DIR}/test-bundle - ${WORK_DIR}/.bundle - ${WORK_DIR}/bundle - DEPENDS - bundler - ${CMAKE_SOURCE_DIR}/Gemfile - ${CMAKE_SOURCE_DIR}/Gemfile.lock - COMMAND env PATH=${STAGE_EMBEDDED_DIR}/bin:$ENV{PATH} BUNDLE_GEMFILE=${CMAKE_SOURCE_DIR}/Gemfile BUNDLE_APP_CONFIG=${WORK_DIR}/.bundle bundle install --clean --path=${WORK_DIR}/bundle - COMMAND touch -c ${WORK_DIR}/.bundle - COMMAND touch -c ${WORK_DIR}/bundle - COMMAND touch ${STAMP_DIR}/test-bundle -) - -# Normally we perform the bundle out-of-source (so the build takes place -# entirely out of source), but if testing/development is enabled for this -# build, then also create a local ".bundle/config" item within the source. This -# then allows for gems to be found when interacting with the local source -# version of the app. -if(ENABLE_TEST_DEPENDENCIES) - add_custom_command( - OUTPUT ${CMAKE_SOURCE_DIR}/.bundle/config - DEPENDS - ${STAMP_DIR}/test-bundle - ${WORK_DIR}/.bundle - ${WORK_DIR}/bundle - COMMAND rm -rf ${CMAKE_SOURCE_DIR}/.bundle - COMMAND ln -snf ${WORK_DIR}/.bundle ${CMAKE_SOURCE_DIR}/.bundle - COMMAND touch -c ${CMAKE_SOURCE_DIR}/.bundle/config - ) - add_custom_target(test-local-bundle ALL DEPENDS ${CMAKE_SOURCE_DIR}/.bundle/config) -endif() diff --git a/build/cmake/test/lua-deps.cmake b/build/cmake/test/lua-deps.cmake deleted file mode 100644 index 0f49e1087..000000000 --- a/build/cmake/test/lua-deps.cmake +++ /dev/null @@ -1,7 +0,0 @@ -# Luarocks test dependencies -test_luarocks_install(luacheck ${LUAROCK_LUACHECK_VERSION} ${LUAROCK_LUACHECK_HASH}) - -add_custom_target( - test-lua-deps - DEPENDS luarock_luacheck -) diff --git a/build/cmake/test/test.cmake b/build/cmake/test/test.cmake deleted file mode 100644 index 143349ea0..000000000 --- a/build/cmake/test/test.cmake +++ /dev/null @@ -1,21 +0,0 @@ -add_custom_target( - test-deps - DEPENDS ${STAMP_DIR}/test-bundle test-lua-deps -) - -add_custom_target( - test-target - DEPENDS test-deps - COMMAND env PATH=${STAGE_EMBEDDED_DIR}/bin:$ENV{PATH} BUNDLE_GEMFILE=${CMAKE_SOURCE_DIR}/Gemfile BUNDLE_APP_CONFIG=${WORK_DIR}/.bundle bundle exec rake -) - -# CMake policy CMP0037 business to allow target named "test". -cmake_policy(PUSH) -if(POLICY CMP0037) - cmake_policy(SET CMP0037 OLD) -endif() -add_custom_target( - test - DEPENDS all test-target -) -cmake_policy(POP) diff --git a/build/cmake/versions.cmake b/build/cmake/versions.cmake deleted file mode 100644 index 8632802ff..000000000 --- a/build/cmake/versions.cmake +++ /dev/null @@ -1,108 +0,0 @@ -# Define the versions of the various dependencies to build. -set(API_UMBRELLA_STATIC_SITE_VERSION c02b8869cafb063deb7f9436d0137b0ea6e652aa) -set(API_UMBRELLA_STATIC_SITE_HASH 07dbd5e6d96e62a9ad6b725b14f727a1) -set(BUNDLER_VERSION 1.16.1) -set(BUNDLER_HASH 42b8e0f57093e1d10c15542f956a871446b759e7969d99f91caf3b6731c156e8) -set(ELASTICSEARCH_VERSION 2.4.6) -set(ELASTICSEARCH_HASH c3441bef89cd91206edf3cf3bd5c4b62550e60a9) -set(FLUME_VERSION 1.7.0) -set(FLUME_HASH 12496e632a96d7ca823ab3c239a2a7d2) -set(GOLANG_VERSION 1.10.2) -set(GOLANG_HASH 4b677d698c65370afa33757b6954ade60347aaca310ea92a63ed717d7cb0c2ff) -set(KYLIN_VERSION 1.6.0) -set(KYLIN_HASH 3f15f35c5ad7168ab401858dd8759dee) -set(LIBCIDR_VERSION 1.2.3) -set(LIBCIDR_HASH c5efcc7ae114fdaa5583f58dacecd9de) -set(LIBESTR_VERSION 0.1.10) -set(LIBESTR_HASH bd655e126e750edd18544b88eb1568d200a424a0c23f665eb14bbece07ac703c) -set(LIBFASTJSON_VERSION 0.99.8) -set(LIBFASTJSON_HASH 730713ad1d851def7ac8898f751bbfdd) -set(LIBGEOIP_VERSION 1.6.12) -set(LIBGEOIP_HASH 77d496cc40daa1dbc2b97365807d64d7) -set(LIBLOGGING_VERSION 1.0.6) -set(LIBLOGGING_HASH 338c6174e5c8652eaa34f956be3451f7491a4416ab489aef63151f802b00bf93) -set(LIBRDKAFKA_VERSION 0.9.5) -set(LIBRDKAFKA_HASH 45bc9713bd4ed948e1efbd62688fc502) -set(LUAROCKS_VERSION 2.4.4) -set(LUAROCKS_HASH 04e8b19d565e86b1d08f745adc4b1a56) -set(LUAROCK_ARGPARSE_VERSION 0.6.0-1) -set(LUAROCK_ARGPARSE_HASH 6656139dd66430075aa2093556857a84) -set(LUAROCK_CMSGPACK_VERSION 0.4.0-0) -set(LUAROCK_CMSGPACK_HASH f459d16fffdbbc85e582803321b3cec9) -set(LUAROCK_ICONV_VERSION 7-3) -set(LUAROCK_ICONV_HASH 138d21a895d267f09ff40fcb75324f74) -set(LUAROCK_INSPECT_VERSION 3.1.1-0) -set(LUAROCK_INSPECT_HASH 8a8a05f10b07a603e44e4f8b39bddd35) -set(LUAROCK_LIBCIDR_VERSION 0.1.2-1) -set(LUAROCK_LIBCIDR_HASH b6bdc9431cb488de8b58a83117107f7a) -set(LUAROCK_LUACHECK_VERSION 0.22.0-1) -set(LUAROCK_LUACHECK_HASH 17608776f5d37ca898f96f4973b3be0e) -set(LUAROCK_LUAPOSIX_VERSION 34.0.4-1) -set(LUAROCK_LUAPOSIX_HASH e584252902055ee40f250a1a304ec18e) -set(LUAROCK_LUATZ_VERSION 0.4-1) -set(LUAROCK_LUATZ_HASH 80772a925cea2ab53bc33184a6e3b24e) -set(LUAROCK_LUSTACHE_VERSION 1.3.1-0) -set(LUAROCK_LUSTACHE_HASH 840ecd41bf19ed1751916de2cd46229e) -set(LUAROCK_LYAML_VERSION 6.2.2-1) -set(LUAROCK_LYAML_HASH d8c8c11db09bfc3f82838d0195d7cf04) -set(LUAROCK_PENLIGHT_VERSION 1.5.4-1) -set(LUAROCK_PENLIGHT_HASH 8f4e6b4c7e851c28cb3e95be728d6507) -set(LUAROCK_RESTY_HTTP_VERSION 0.12-0) -set(LUAROCK_RESTY_HTTP_HASH deaf54d8ce752db7cae5a58566e11517) -set(LUAROCK_RESTY_UUID_VERSION 1.1-1) -set(LUAROCK_RESTY_UUID_HASH d14ae99d6f18edd5c934e6050e974c5e) -set(LUA_LUASOCKET_VERSION 652959890943c34d7180cae372339b91e62f0d7b) -set(LUA_LUASOCKET_HASH 6b3e3bdf60267f5957c2ea44e563ed70) -set(LUA_RESTY_DNS_CACHE_VERSION 32d9d461465edbec1cc798c18447c0ac7ee6e528) -set(LUA_RESTY_DNS_CACHE_HASH 3a5414110c6ad4331fe82873e19bd1e8) -set(LUA_RESTY_LOGGER_SOCKET_VERSION 15cc1c256e55b8e68ec9b220b6883c227a763d4e) -set(LUA_RESTY_LOGGER_SOCKET_HASH efe14697a8c4be612c011f54fce06191) -set(LUA_RESTY_SHCACHE_VERSION fb2e275c2cdca08eaa34a7b73375e41ac3eff200) -set(LUA_RESTY_SHCACHE_HASH 5d3cbcf8fbad1954cdcb3826afa41afe) -set(MAVEN_VERSION 3.5.0) -set(MAVEN_HASH 35c39251d2af99b6624d40d801f6ff02) -set(MAILHOG_VERSION 1.0.0) -set(MAILHOG_HASH 3b758c81bfe2c9110911511daca1a7bc) -set(MONGO_ORCHESTRATION_VERSION 0.6.11) -set(MONGODB_VERSION 3.2.20) -set(MONGODB_HASH 01f7660d86b3de679ce388eaa681286a) -set(MORA_VERSION 8127901857cf88d3f0902708b25ad930354973a3) -set(MORA_HASH b86cea913596370cd58fce89b23acd97) -set(NGX_DYUPS_VERSION a5e75737e04ff3e5040a80f5f739171e96c3359c) -set(NGX_DYUPS_HASH e16860efcd0629f38f514469052d998a) -set(NGX_TXID_VERSION f1c197cb9c42e364a87fbb28d5508e486592ca42) -set(NGX_TXID_HASH 408ee86eb6e42e27a51514f711c41d6b) -set(NODEJS_VERSION 8.11.1) -set(NODEJS_HASH 6617e245fa0f7fbe0e373e71d543fea878315324ab31dc64b4eba10e42d04c11) -set(OPENLDAP_VERSION 2.4.46) -set(OPENLDAP_HASH a9ae2273eb9bdd70090dafe0d018a3132606bef6) -set(OPENRESTY_VERSION 1.13.6.1) -set(OPENRESTY_HASH 637f82d0b36c74aec1c01bd3b8e0289c) -set(OPENSSL_VERSION 1.0.2o) -set(OPENSSL_HASH ec3f5c9714ba0fd45cb4e087301eb1336c317e0d20b575a125050470e8089e4d) -set(PCRE_VERSION 8.42) -set(PCRE_HASH 085b6aa253e0f91cae70b3cdbe8c1ac2) -set(PERP_VERSION 2.07) -set(PERP_HASH a2acc7425d556d9635a25addcee9edb5) -set(PHANTOMJS_VERSION 2.1.1) -set(PHANTOMJS_HASH 1c947d57fce2f21ce0b43fe2ed7cd361) -set(PRESTO_VERSION 0.173) -set(PRESTO_HASH f6586ac7a5001a771342307d44b1255d) -set(RUBY_VERSION 2.4.4) -set(RUBY_HASH 45a8de577471b90dc4838c5ef26aeb253a56002896189055a44dc680644243f1) -set(RUBYGEMS_VERSION 2.7.6) -set(RUBYGEMS_HASH ee5ef219ac97f5499c31e6071eae424c3265620ece33b5cc66e09fa30f22086a) -set(RSYSLOG_VERSION 8.34.0) -set(RSYSLOG_HASH 18330a9764c55d2501b847aad267292bd96c2b12fa5c3b92909bd8d4563c80a9) -set(RUNIT_VERSION 2.1.2) -set(RUNIT_HASH 6c985fbfe3a34608eb3c53dc719172c4) -set(SHELLCHECK_VERSION 0.4.7) -set(SHELLCHECK_HASH 64bf19a1292f0357c007b615150b6e58dba138bc7bf168c5a5e27016f8b4f802afd9950be8be46bf9e4833f98ae81c6e7b1761a3a76ddbba2a04929265433134) -# Don't move to TrafficServer 6 until we can verify keepalive behavior: -# https://issues.apache.org/jira/browse/TS-3959 -set(TRAFFICSERVER_VERSION 5.3.2) -set(TRAFFICSERVER_HASH c8e5f3e81da643ea79cba0494ed37d45) -set(UNBOUND_VERSION 1.7.1) -set(UNBOUND_HASH 56e085ef582c5372a20207de179d0edb4e541e59f87be7d4ee1d00d12008628d) -set(YARN_VERSION 1.6.0) -set(YARN_HASH a11a3d8a5d62712fc497a6d1cbea25f6) diff --git a/build/package/Makefile b/build/package/Makefile index 8390be5ee..2f80648c1 100644 --- a/build/package/Makefile +++ b/build/package/Makefile @@ -12,21 +12,21 @@ LOG_DIR:=$(WORK_DIR)/log docker_centos7 \ docker_centos7_build \ docker_centos7_verify \ - docker_ubuntu1204 \ - docker_ubuntu1204_build \ - docker_ubuntu1204_verify \ docker_ubuntu1404 \ docker_ubuntu1404_build \ docker_ubuntu1404_verify \ docker_ubuntu1604 \ docker_ubuntu1604_build \ docker_ubuntu1604_verify \ - docker_debian7 \ - docker_debian7_build \ - docker_debian7_verify \ + docker_ubuntu1804 \ + docker_ubuntu1804_build \ + docker_ubuntu1804_verify \ docker_debian8 \ docker_debian8_build \ docker_debian8_verify \ + docker_debian9 \ + docker_debian9_build \ + docker_debian9_verify \ docker_all $(LOG_DIR): @@ -61,15 +61,6 @@ docker_centos7_build: docker_centos7_verify: DIST=centos-7 $(MAKE) docker_verify -docker_ubuntu1204: - DIST=ubuntu-12.04 $(MAKE) docker - -docker_ubuntu1204_build: - DIST=ubuntu-12.04 $(MAKE) docker_build - -docker_ubuntu1204_verify: - DIST=ubuntu-12.04 $(MAKE) docker_verify - docker_ubuntu1404: DIST=ubuntu-14.04 $(MAKE) docker @@ -88,14 +79,14 @@ docker_ubuntu1604_build: docker_ubuntu1604_verify: DIST=ubuntu-16.04 $(MAKE) docker_verify -docker_debian7: - DIST=debian-7 $(MAKE) docker +docker_ubuntu1804: + DIST=ubuntu-18.04 $(MAKE) docker -docker_debian7_build: - DIST=debian-7 $(MAKE) docker_build +docker_ubuntu1804_build: + DIST=ubuntu-18.04 $(MAKE) docker_build -docker_debian7_verify: - DIST=debian-7 $(MAKE) docker_verify +docker_ubuntu1804_verify: + DIST=ubuntu-18.04 $(MAKE) docker_verify docker_debian8: DIST=debian-8 $(MAKE) docker @@ -106,26 +97,33 @@ docker_debian8_build: docker_debian8_verify: DIST=debian-8 $(MAKE) docker_verify +docker_debian9: + DIST=debian-9 $(MAKE) docker + +docker_debian9_build: + DIST=debian-9 $(MAKE) docker_build + +docker_debian9_verify: + DIST=debian-9 $(MAKE) docker_verify + docker_all: docker_centos6 \ docker_centos7 \ - docker_ubuntu1204 \ docker_ubuntu1404 \ docker_ubuntu1604 \ - docker_debian7 \ - docker_debian8 + docker_ubuntu1804 \ + docker_debian8 \ + docker_debian9 docker_all_build: docker_centos6_build \ docker_centos7_build \ - docker_ubuntu1204_build \ docker_ubuntu1404_build \ docker_ubuntu1604_build \ - docker_debian7_build \ - docker_debian8_build + docker_debian8_build \ + docker_debian9_build docker_all_verify: docker_centos6_verify \ docker_centos7_verify \ - docker_ubuntu1204_verify \ docker_ubuntu1404_verify \ docker_ubuntu1604_verify \ - docker_debian7_verify \ - docker_debian8_verify + docker_debian8_verify \ + docker_debian9_verify diff --git a/build/package/docker_script b/build/package/docker_script index e58fe0c35..c019e03ca 100755 --- a/build/package/docker_script +++ b/build/package/docker_script @@ -8,7 +8,7 @@ if [ -f /etc/redhat-release ]; then yum -y install git rsync elif [ -f /etc/debian_version ]; then apt-get update - apt-get -y install git rsync + DEBIAN_FRONTEND=noninteractive apt-get -y --no-install-recommends install git rsync fi # Create a clean copy of the source directory. diff --git a/build/package/publish b/build/package/publish index 2624ef124..f035e7e5c 100755 --- a/build/package/publish +++ b/build/package/publish @@ -10,11 +10,11 @@ source "$source_dir/build/package/parse_version" packages=( "build/package/work/current/centos-6/core/api-umbrella-$version-$package_iteration.el6.x86_64.rpm" "build/package/work/current/centos-7/core/api-umbrella-$version-$package_iteration.el7.x86_64.rpm" - "build/package/work/current/debian-7/core/api-umbrella_$version-$package_iteration~wheezy_amd64.deb" "build/package/work/current/debian-8/core/api-umbrella_$version-$package_iteration~jessie_amd64.deb" - "build/package/work/current/ubuntu-12.04/core/api-umbrella_$version-$package_iteration~precise_amd64.deb" + "build/package/work/current/debian-9/core/api-umbrella_$version-$package_iteration~stretch_amd64.deb" "build/package/work/current/ubuntu-14.04/core/api-umbrella_$version-$package_iteration~trusty_amd64.deb" "build/package/work/current/ubuntu-16.04/core/api-umbrella_$version-$package_iteration~xenial_amd64.deb" + "build/package/work/current/ubuntu-18.04/core/api-umbrella_$version-$package_iteration~bionic_amd64.deb" ) for package in "${packages[@]}"; do diff --git a/build/package/verify/docker_script b/build/package/verify/docker_script index 159a6e5e9..632892d35 100755 --- a/build/package/verify/docker_script +++ b/build/package/verify/docker_script @@ -35,14 +35,11 @@ if [ -f /etc/redhat-release ]; then socat \ sudo elif [ -f /etc/debian_version ]; then - apt-get -y install \ + DEBIAN_FRONTEND=noninteractive apt-get -y --no-install-recommends install \ net-tools \ ruby \ socat \ sudo - if [ "$DIST" == "ubuntu-12.04" ] || [ "$DIST" == "debian-7" ]; then - apt-get -y install rubygems - fi fi gem install bundler --no-rdoc --no-ri diff --git a/build/package/verify/download_previous_packages b/build/package/verify/download_previous_packages index dbf3e9bae..a83c98d2d 100755 --- a/build/package/verify/download_previous_packages +++ b/build/package/verify/download_previous_packages @@ -25,10 +25,11 @@ previous_versions=( distros=( centos-6 centos-7 - ubuntu-12.04 ubuntu-14.04 - debian-7 + ubuntu-16.04 + ubuntu-18.04 debian-8 + debian-9 ) if [ -n "${DIST:-}" ]; then @@ -53,10 +54,6 @@ for dist in "${distros[@]}"; do filename="api-umbrella-${version}.el7.x86_64.rpm" url_prefix="$bintray_url_root/api-umbrella-el7" ;; - ubuntu-12.04) - filename="api-umbrella_${version}~precise_amd64.deb" - url_prefix="$bintray_url_root/api-umbrella-ubuntu/pool/main/a/api-umbrella" - ;; ubuntu-14.04) filename="api-umbrella_${version}~trusty_amd64.deb" url_prefix="$bintray_url_root/api-umbrella-ubuntu/pool/main/a/api-umbrella" @@ -69,14 +66,26 @@ for dist in "${distros[@]}"; do filename="" fi ;; - debian-7) - filename="api-umbrella_${version}~wheezy_amd64.deb" - url_prefix="$bintray_url_root/api-umbrella-debian/pool/main/a/api-umbrella" + ubuntu-18.04) + filename="api-umbrella_${version}~bionic_amd64.deb" + url_prefix="$bintray_url_root/api-umbrella-ubuntu/pool/main/a/api-umbrella" + if [ "$(compare_version "$version")" -lt "$(compare_version "0.15.0")" ]; then + # No Ubuntu 18.04 packages until v0.15 + filename="" + fi ;; debian-8) filename="api-umbrella_${version}~jessie_amd64.deb" url_prefix="$bintray_url_root/api-umbrella-debian/pool/main/a/api-umbrella" ;; + debian-9) + filename="api-umbrella_${version}~stretch_amd64.deb" + url_prefix="$bintray_url_root/api-umbrella-debian/pool/main/a/api-umbrella" + if [ "$(compare_version "$version")" -lt "$(compare_version "0.15.0")" ]; then + # No Debian 9 packages until v0.15 + filename="" + fi + ;; *) echo "Unknown distribution: $dist" exit 1 diff --git a/build/package_dependencies.sh b/build/package_dependencies.sh index 31afa3fdf..1d51e4ccf 100644 --- a/build/package_dependencies.sh +++ b/build/package_dependencies.sh @@ -8,12 +8,12 @@ if [ -f /etc/os-release ]; then fi if [ -f /etc/redhat-release ]; then - util_linux_package="util-linux-ng" - procps_package="procps" + util_linux_package="util-linux" + procps_package="procps-ng" - if [[ "${VERSION_ID:-}" == "7" ]]; then - util_linux_package="util-linux" - procps_package="procps-ng" + if [[ "${VERSION_ID:-}" == "6" ]]; then + util_linux_package="util-linux-ng" + procps_package="procps" fi core_package_dependencies=( @@ -57,6 +57,9 @@ if [ -f /etc/redhat-release ]; then # For OpenResty's "resty" CLI. perl perl-Time-HiRes + + # lua-icu-date + libicu-devel ) hadoop_analytics_package_dependencies=( java-1.8.0-openjdk-headless @@ -77,7 +80,6 @@ if [ -f /etc/redhat-release ]; then libyaml-devel make ncurses-devel - openssl openssl-devel patch pcre-devel @@ -119,20 +121,27 @@ if [ -f /etc/redhat-release ]; then groff ) elif [ -f /etc/debian_version ]; then - libffi_version=6 - openjdk_version=7 + libcurl_version=3 + libtool_bin_package="libtool-bin" + openjdk_version=8 + + if [[ "$ID" == "ubuntu" && "$VERSION_ID" == "18.04" ]]; then + libcurl_version=4 + fi + + if [[ "$ID" == "ubuntu" && "$VERSION_ID" == "14.04" ]]; then + libtool_bin_package="libtool" + fi - if [[ "$ID" == "debian" && "$VERSION_ID" == "7" ]]; then - libffi_version=5 - elif [[ "$ID" == "ubuntu" && "$VERSION_ID" == "16.04" ]]; then - openjdk_version=8 + if [[ "$ID" == "debian" && "$VERSION_ID" == "8" ]] || [[ "$ID" == "ubuntu" && "$VERSION_ID" == "14.04" ]]; then + openjdk_version=7 fi core_package_dependencies=( # General bash libc6 - "libffi$libffi_version" + libffi6 libncurses5 libpcre3 libuuid1 @@ -154,7 +163,7 @@ elif [ -f /etc/debian_version ]; then "openjdk-$openjdk_version-jre-headless" # rsyslog omelasticsearch - libcurl3 + "libcurl$libcurl_version" # init.d script helpers sysvinit-utils @@ -168,6 +177,9 @@ elif [ -f /etc/debian_version ]; then # For OpenResty's "resty" CLI. perl + + # lua-icu-date + libicu-dev ) hadoop_analytics_package_dependencies=( "openjdk-$openjdk_version-jre-headless" @@ -186,11 +198,11 @@ elif [ -f /etc/debian_version ]; then libpcre3-dev libssl-dev libtool + "$libtool_bin_package" libxml2-dev libyaml-dev lsb-release make - openssl patch pkg-config python @@ -211,6 +223,7 @@ elif [ -f /etc/debian_version ]; then # For installing the mongo-orchestration test dependency. python-virtualenv + virtualenv # For checking for file descriptor leaks during the tests. lsof @@ -229,10 +242,6 @@ elif [ -f /etc/debian_version ]; then # OpenLDAP groff-base ) - - if [[ "$ID" == "debian" && "$VERSION_ID" == "8" ]] || [[ "$ID" == "ubuntu" && "$VERSION_ID" == "16.04" ]]; then - core_build_dependencies+=("libtool-bin") - fi else echo "Unknown build system" exit 1 diff --git a/build/patches/opm.patch b/build/patches/opm.patch new file mode 100644 index 000000000..50629b1db --- /dev/null +++ b/build/patches/opm.patch @@ -0,0 +1,15 @@ +diff --git a/bundle/opm-0.0.4/bin/opm b/bundle/opm-0.0.4/bin/opm +index 9029225..24a8200 100755 +--- a/bundle/opm-0.0.4/bin/opm ++++ b/bundle/opm-0.0.4/bin/opm +@@ -97,10 +97,6 @@ if ($cmd eq '-v') { + exit; + } + +-# explicitly clear the environments to avoid breaking luajit and resty. +-delete $ENV{LUA_PATH}; +-delete $ENV{LUA_CPATH}; +- + for ($cmd) { + if ($_ eq 'get' || $_ eq 'install') { + check_lock_file(); diff --git a/build/scripts/download_cmake b/build/scripts/download_cmake index 984b8ff46..5aeef1628 100755 --- a/build/scripts/download_cmake +++ b/build/scripts/download_cmake @@ -2,8 +2,8 @@ set -e -u -version=3.8.2 -checksum=33e4851d3219b720f4b64fcf617151168f1bffdf5afad25eb4b7f5f58cee3a08 +version=3.11.1 +checksum=3aa8d3b53903563bdc13dfec80716c286b2228db36ef65c23bc616f9bb930367 build_dir=$(pwd) work_dir="$build_dir/build/work" @@ -20,7 +20,7 @@ if [[ ! -e "$prefix/bin/cmake" || ! -e "$version_stamp" ]]; then download_path="$work_dir/$download_filename" if [ ! -e "$download_path" ]; then echo "Downloading CMake $version..." - curl -fL -o "$download_path" "$download_url" + curl -fL --retry 3 -o "$download_path" "$download_url" fi actual_checksum=$(openssl sha256 "$download_path" | awk '{print $2}') diff --git a/build/scripts/install_build_dependencies b/build/scripts/install_build_dependencies index 18333471a..e2a3b7f43 100755 --- a/build/scripts/install_build_dependencies +++ b/build/scripts/install_build_dependencies @@ -39,7 +39,7 @@ if [ -f /etc/redhat-release ]; then rpm --query --queryformat "" "${install_dependencies[@]}" elif [ -f /etc/debian_version ]; then apt-get update - apt-get -y install "${install_dependencies[@]}" + DEBIAN_FRONTEND=noninteractive apt-get -y --no-install-recommends install "${install_dependencies[@]}" else echo "Unknown build system" exit 1 diff --git a/circle.yml b/circle.yml index d5eff625d..d0af06594 100644 --- a/circle.yml +++ b/circle.yml @@ -30,7 +30,7 @@ dependencies: # - ./build/work/git-restore-mtime -f . override: # Build all the API Umbrella software dependencies. - - ./configure --enable-test-dependencies + - ./configure - make all test-deps # Remove the download archives, since we don't need to cache these in # CircleCI, and doing so also leads to multiple versions being kept around diff --git a/config/default.yml b/config/default.yml index 319695a5b..9ff69a878 100644 --- a/config/default.yml +++ b/config/default.yml @@ -11,9 +11,14 @@ rlimits: nproc: 20000 http_port: 80 https_port: 443 +listen: + addresses: + - "*" + - "[::]" nginx: workers: auto worker_connections: 8192 + error_log_level: notice access_log_filename: access.log access_log_options: buffer=256k flush=10s proxy_connect_timeout: 60 @@ -62,7 +67,6 @@ gatekeeper: trafficserver: host: 127.0.0.1 port: 14009 - autoconf_port: 14007 management_port: 14008 storage: size: 256M @@ -94,7 +98,11 @@ web: enabled: - local cas: - options: {} + options: + service_validate_url: /serviceValidate + login_url: /login + logout_url: /logout + ssl: true facebook: client_id: client_secret: @@ -108,9 +116,18 @@ web: client_id: client_secret: ldap: - options: {} + options: + port: 389 + method: plain + uid: sAMAccountName max.gov: require_mfa: true + options: + host: login.max.gov + login_url: /cas/login + service_validate_url: /cas/serviceValidate + logout_url: /cas/logout + ssl: true mailer: smtp_settings: static_site: @@ -138,6 +155,8 @@ router: rsyslog: host: 127.0.0.1 port: 14014 +log: + destination: file dns_resolver: negative_ttl: 60 max_stale: 86400 @@ -162,6 +181,7 @@ elasticsearch: embedded_server_env: heap_size: 512m api_version: 2 + template_version: 1 embedded_server_config: network: host: 127.0.0.1 @@ -214,7 +234,6 @@ flume: kafka: brokers: [] topic: api_umbrella_logs -log_template_version: v1 strip_cookies: - ^__utm.*$ - ^_ga$ @@ -372,3 +391,7 @@ ban: ember_server: port: 14050 live_reload_port: 14051 +_test_config: + default_null_override_hash: + default_null_override_string: + default_empty_hash_override_hash: {} diff --git a/config/test.yml b/config/test.yml index d3a811824..4f16352ce 100644 --- a/config/test.yml +++ b/config/test.yml @@ -20,7 +20,6 @@ nginx: port: 13005 trafficserver: port: 13009 - autoconf_port: 13007 management_port: 13008 embedded_server_config: records: @@ -109,3 +108,9 @@ openldap: port: 13104 apiSettings: require_https: optional +_test_config: + default_null_override_hash: + foo: bar + default_null_override_string: foobar + default_empty_hash_override_hash: + baz: qux diff --git a/configure b/configure index d13ddffc1..0a5416362 100755 --- a/configure +++ b/configure @@ -1,4 +1,4 @@ -#!/bin/sh +#!/usr/bin/env bash # Autotools-style (./configure) wrapper for CMake # @@ -21,7 +21,18 @@ # copyright and related or neighboring rights to this work. For # details, see -TOP_SRCDIR="$(dirname $0)" +set -e + +# Resolve the current path, taking into account symlinks. +# http://stackoverflow.com/a/246128/222487 +source="${BASH_SOURCE[0]}" +while [ -L "$source" ]; do + dir="$(cd -P "$(dirname "$source")" && pwd)" + source="$(readlink "$source")" + [[ $source != /* ]] && source="$dir/$source" +done +TOP_SRCDIR="$(cd -P "$(dirname "$source")" && pwd)" + CMAKE_CMD="cmake ${TOP_SRCDIR}" BUILD_TYPE="Debug" @@ -34,7 +45,6 @@ if [ -e "${TOP_SRCDIR}/.configure-custom.sh" ]; then fi PREFIX=/opt/api-umbrella -ENABLE_VARS="hadoop-analytics|on|ENABLE_HADOOP_ANALYTICS test-dependencies|on|ENABLE_TEST_DEPENDENCIES deploy-only|on|ENABLE_DEPLOY_ONLY" quote() { echo "$1" | sed -e "s|'|'\\\\''|g; 1s/^/'/; \$s/\$/'/" @@ -299,9 +309,35 @@ if [ "x${LDFLAGS}" != "x" ]; then done fi +# If running in the docker development environment, always force the build to +# happen in the out-of source /build location. This helps ensure that even if +# you manually run "./configure" from inside the default /app directory, the +# generated files still assume /build as the build root. +if [ "${DOCKER_DEV:-}" == "true" ]; then + cd /build + + # Remove existing /app symlinks before running cmake again, or else the + # cached files might cause cmake to not consider this an out-of-source build. + # We'll re-create the symlinks after cmake has been executed. + rm -f /app/CMakeCache.txt \ + /app/CMakeFiles \ + /app/Makefile \ + /app/cmake_install.cmake +fi + "$TOP_SRCDIR/build/scripts/download_cmake" build_dir="$(pwd)" PATH="$build_dir/build/work/cmake/bin:$PATH" export PATH eval "cmake ${TOP_SRCDIR} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DCMAKE_INSTALL_PREFIX=${PREFIX} ${CMAKE_ARGS}" + +# If running in the docker development environment, ensure symlinks are in +# place from the /app working directory to the /build location, so "make" +# commands can work from the default /app directory. +if [ "${DOCKER_DEV:-}" == "true" ]; then + ln -snf /build/CMakeCache.txt /app/CMakeCache.txt + ln -snf /build/CMakeFiles /app/CMakeFiles + ln -snf /build/Makefile /app/Makefile + ln -snf /build/cmake_install.cmake /app/cmake_install.cmake +fi diff --git a/docker/Dockerfile b/docker/Dockerfile index 745e14925..f8dc55d3d 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -4,7 +4,7 @@ ENV API_UMBRELLA_VERSION 0.14.4-1~jessie # Install API Umbrella RUN echo "deb http://dl.bintray.com/nrel/api-umbrella-debian jessie main" >> /etc/apt/sources.list.d/api-umbrella.list -RUN apt-get update && apt-get -y --allow-unauthenticated install api-umbrella +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get -y --no-install-recommends --allow-unauthenticated install api-umbrella # Define mountable directories VOLUME ["/etc/api-umbrella", "/opt/api-umbrella/var/db", "/opt/api-umbrella/var/log"] diff --git a/docker/dev/docker-entrypoint b/docker/dev/docker-entrypoint new file mode 100755 index 000000000..0d73a9c06 --- /dev/null +++ b/docker/dev/docker-entrypoint @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +set -e -u -x + +mkdir -p /etc/api-umbrella +{ + echo "app_env: development" + echo "http_port: $HTTP_PORT" + echo "https_port: $HTTPS_PORT" +} > /etc/api-umbrella/api-umbrella.yml + +mkdir -p /build/build/work/stage/opt/api-umbrella/var/log +mkdir -p /build/CMakeFiles +ln -snf /build/build/work/stage/opt/api-umbrella /opt/api-umbrella +ln -snf /build/build/work/stage/opt/api-umbrella/var/log /var/log/api-umbrella +ln -snf /build/build/work /app/build/work +ln -snf /build/CMakeCache.txt /app/CMakeCache.txt +ln -snf /build/CMakeFiles /app/CMakeFiles +ln -snf /build/Makefile /app/Makefile +ln -snf /build/cmake_install.cmake /app/cmake_install.cmake + +mkdir -p /build/test/tmp/run +mkdir -p /app/test/tmp +ln -snf /build/test/tmp/run /app/test/tmp/run + +mkdir -p /app/build/work/src/api-umbrella-core/tmp/admin-ui-build/node_modules +ln -snf /app/build/work/src/api-umbrella-core/tmp/admin-ui-build/node_modules /app/src/api-umbrella/admin-ui/node_modules + +mkdir -p /app/build/work/.bundle +ln -snf /app/build/work/.bundle /app/.bundle + +mkdir -p /app/build/work/src/web-app/.bundle +ln -snf /app/build/work/src/web-app/.bundle /app/src/api-umbrella/web-app/.bundle + +chmod 1777 /app/src/api-umbrella/admin-ui/tmp + +exec "$@" diff --git a/docker/dev/docker-start b/docker/dev/docker-start index 5057bc567..8e41d1d5f 100755 --- a/docker/dev/docker-start +++ b/docker/dev/docker-start @@ -2,23 +2,6 @@ set -e -u -x -mkdir -p /etc/api-umbrella -{ - echo "app_env: development" - echo "http_port: $HTTP_PORT" - echo "https_port: $HTTPS_PORT" -} > /etc/api-umbrella/api-umbrella.yml - -ln -snf /build/build/work/stage/opt/api-umbrella /opt/api-umbrella -ln -snf /build/build/work/stage/opt/api-umbrella/var/log /var/log/api-umbrella -ln -snf /build/build/work ./build/work -ln -snf /build/CMakeFiles ./CMakeFiles -ln -snf /build/CMakeCache.txt ./CMakeCache.txt -ln -snf /build/Makefile ./Makefile -mkdir -p /build/test/tmp/run -mkdir -p ./test/tmp -ln -snf /build/test/tmp/run ./test/tmp/run - (cd /build && env PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" make) chmod 1777 /app/src/api-umbrella/admin-ui/tmp diff --git a/scripts/rake/lint.rake b/scripts/rake/lint.rake index d9e85efe7..d89eeb815 100644 --- a/scripts/rake/lint.rake +++ b/scripts/rake/lint.rake @@ -18,7 +18,7 @@ namespace :lint do task :lua do require "childprocess" - lua_files = `git ls-files #{API_UMBRELLA_SRC_ROOT} | grep "\.lua$"`.split("\n") + lua_files = `git ls-files #{API_UMBRELLA_SRC_ROOT} | grep "\\.lua$"`.split("\n") process = ChildProcess.build("build/work/test-env/vendor/bin/luacheck", *lua_files) process.cwd = API_UMBRELLA_SRC_ROOT process.environment["LUA_PATH"] = "build/work/test-env/vendor/share/lua/5.1/?.lua;build/work/test-env/vendor/share/lua/5.1/?/init.lua;;" diff --git a/scripts/rake/outdated_packages.rb b/scripts/rake/outdated_packages.rb index 0d03320c1..d3af0239a 100644 --- a/scripts/rake/outdated_packages.rb +++ b/scripts/rake/outdated_packages.rb @@ -15,18 +15,9 @@ class OutdatedPackages :git => "https://github.com/elasticsearch/elasticsearch.git", :constraint => "~> 2.4.3", }, - "flume" => { - :git => "https://github.com/apache/flume.git", - }, "golang" => { :git => "https://go.googlesource.com/go", }, - "json_c" => { - :git => "https://github.com/json-c/json-c.git", - }, - "kylin" => { - :git => "https://github.com/apache/kylin.git", - }, "libcidr" => { :http => "https://www.over-yonder.net/~fullermd/projects/libcidr", }, @@ -42,9 +33,6 @@ class OutdatedPackages "liblogging" => { :git => "https://github.com/rsyslog/liblogging.git", }, - "librdkafka" => { - :git => "https://github.com/edenhill/librdkafka.git", - }, "luarocks" => { :git => "https://github.com/keplerproject/luarocks.git", }, @@ -60,18 +48,12 @@ class OutdatedPackages "luarock_inspect" => { :luarock => "inspect", }, - "luarock_libcidr" => { - :luarock => "libcidr-ffi", - }, "luarock_luacheck" => { :luarock => "luacheck", }, "luarock_luaposix" => { :luarock => "luaposix", }, - "luarock_luatz" => { - :luarock => "luatz", - }, "luarock_lustache" => { :luarock => "lustache", }, @@ -81,9 +63,6 @@ class OutdatedPackages "luarock_penlight" => { :luarock => "penlight", }, - "luarock_resty_http" => { - :luarock => "lua-resty-http", - }, "luarock_resty_uuid" => { :luarock => "lua-resty-uuid", }, @@ -103,9 +82,6 @@ class OutdatedPackages :git => "https://github.com/cloudflare/lua-resty-shcache.git", :git_ref => "master", }, - "maven" => { - :git => "https://github.com/apache/maven.git", - }, "mailhog" => { :git => "https://github.com/mailhog/MailHog.git", }, @@ -142,6 +118,16 @@ class OutdatedPackages :git => "https://github.com/openssl/openssl.git", :string_version => true, }, + "opm_icu_date" => { + :git => "https://github.com/GUI/lua-icu-date.git", + :git_ref => "master", + }, + "opm_libcidr" => { + :git => "https://github.com/GUI/lua-libcidr-ffi.git", + }, + "opm_resty_http" => { + :git => "https://github.com/pintsized/lua-resty-http.git", + }, "pcre" => { :http => "https://ftp.pcre.org/pub/pcre/", }, @@ -151,9 +137,6 @@ class OutdatedPackages "phantomjs" => { :git => "https://github.com/ariya/phantomjs.git", }, - "presto" => { - :git => "https://github.com/facebook/presto.git", - }, "ruby" => { :git => "https://github.com/ruby/ruby.git", :constraint => "~> 2.4.3", @@ -172,7 +155,6 @@ class OutdatedPackages }, "trafficserver" => { :git => "https://github.com/apache/trafficserver.git", - :constraint => "~> 5.3.2", }, "unbound" => { :http => "https://www.unbound.net/download.html", @@ -227,8 +209,9 @@ def tag_to_semver(name, tag) end def initialize + seen_names = [] versions = {} - versions_content = File.read(File.join(API_UMBRELLA_SRC_ROOT, "build/cmake/versions.cmake")) + versions_content = `git grep -h "^set.*_VERSION" build/cmake`.strip versions_content.each_line do |line| current_version_matches = line.match(/set\((.+?)_VERSION (.+?)\)/) if(!current_version_matches) @@ -236,6 +219,7 @@ def initialize end name = current_version_matches[1].downcase + seen_names.push(name) options = REPOS[name] || {} current_version_string = current_version_matches[2] @@ -330,6 +314,11 @@ def initialize end end + unused_repos = REPOS.keys - seen_names + if(unused_repos.any?) + puts "\n\nNOTICE: Unused repos defined in scripts/rake/outdated_packages.rb: #{unused_repos.sort.join(", ")}" + end + puts "\n\n" print Rainbow("Package".ljust(32)).underline diff --git a/src/api-umbrella/cli/read_config.lua b/src/api-umbrella/cli/read_config.lua index 252a2d1d1..2f14856d8 100644 --- a/src/api-umbrella/cli/read_config.lua +++ b/src/api-umbrella/cli/read_config.lua @@ -5,6 +5,7 @@ local deep_merge_overwrite_arrays = require "api-umbrella.utils.deep_merge_overw local dir = require "pl.dir" local file = require "pl.file" local host_normalize = require "api-umbrella.utils.host_normalize" +local invert_table = require "api-umbrella.utils.invert_table" local lyaml = require "lyaml" local nillify_yaml_nulls = require "api-umbrella.utils.nillify_yaml_nulls" local path = require "pl.path" @@ -215,6 +216,27 @@ local function set_computed_config() config["_default_hostname_normalized"] = host_normalize(default_hostname) end + if not config["web"] then + config["web"] = {} + end + + -- Set the default host used for web application links (for mailers, contact + -- URLs, etc). + -- + -- By default, pick this up from the `hosts` array where `default` has been + -- set to true (this gets put on `_default_hostname` for easier access). But + -- still allow the web host to be explicitly set via `web.default_host`. + if not config["web"]["default_host"] then + config["web"]["default_host"] = config["_default_hostname"] + + -- Fallback to something that will at least generate valid URLs if there's + -- no default, or the default is "*" (since in this context, a wildcard + -- doesn't make sense for generating URLs). + if not config["web"]["default_host"] or config["web"]["default_host"] == "*" then + config["web"]["default_host"] = "localhost" + end + end + -- Determine the nameservers for DNS resolution. Prefer explicitly configured -- nameservers, but fallback to nameservers defined in resolv.conf, and then -- Google's DNS servers if nothing else is defined. @@ -308,17 +330,34 @@ local function set_computed_config() deep_merge_overwrite_arrays(config, { _embedded_root_dir = embedded_root_dir, _src_root_dir = src_root_dir, + _api_umbrella_config_runtime_file = path.join(config["run_dir"], "runtime_config.yml"), _package_path = package.path, _package_cpath = package.cpath, + ["_test_env?"] = (config["app_env"] == "test"), + ["_development_env?"] = (config["app_env"] == "development"), analytics = { ["_output_elasticsearch?"] = array_includes(config["analytics"]["outputs"], "elasticsearch"), ["_output_kylin?"] = array_includes(config["analytics"]["outputs"], "kylin"), }, mongodb = { _database = plutils.split(array_last(plutils.split(config["mongodb"]["url"], "/", true)), "?", true)[1], + embedded_server_config = { + storage = { + dbPath = path.join(config["db_dir"], "mongodb"), + }, + }, }, elasticsearch = { _first_server = config["elasticsearch"]["_servers"][1], + embedded_server_config = { + path = { + data = path.join(config["db_dir"], "elasticsearch"), + logs = path.join(config["log_dir"], "elasticsearch"), + }, + }, + ["_template_version_v1?"] = (config["elasticsearch"]["template_version"] == 1), + ["_template_version_v2?"] = (config["elasticsearch"]["template_version"] == 2), + ["_api_version_lte_2?"] = (config["elasticsearch"]["api_version"] <= 2), }, ["_service_general_db_enabled?"] = array_includes(config["services"], "general_db"), ["_service_log_db_enabled?"] = array_includes(config["services"], "log_db"), @@ -335,7 +374,7 @@ local function set_computed_config() web = { admin = { auth_strategies = { - ["_local_enabled?"] = array_includes(config["web"]["admin"]["auth_strategies"]["enabled"], "local"), + ["_enabled"] = invert_table(config["web"]["admin"]["auth_strategies"]["enabled"]), ["_only_ldap_enabled?"] = (#config["web"]["admin"]["auth_strategies"]["enabled"] == 1 and config["web"]["admin"]["auth_strategies"]["enabled"][1] == "ldap"), }, }, @@ -350,6 +389,24 @@ local function set_computed_config() }, }) + if config["elasticsearch"]["api_version"] <= 2 then + deep_merge_overwrite_arrays(config, { + elasticsearch = { + embedded_server_config = { + path = { + conf = path.join(config["etc_dir"], "elasticsearch"), + scripts = path.join(config["etc_dir"], "elasticsearch_scripts"), + }, + }, + }, + }) + end + + deep_merge_overwrite_arrays(config, { + _mongodb_yaml = lyaml.dump({ config["mongodb"]["embedded_server_config"] }), + _elasticsearch_yaml = lyaml.dump({ config["elasticsearch"]["embedded_server_config"] }), + }) + if config["app_env"] == "development" then config["_dev_env_install_dir"] = path.join(src_root_dir, "build/work/dev-env") end diff --git a/src/api-umbrella/cli/setup.lua b/src/api-umbrella/cli/setup.lua index db5baed61..7baf22862 100644 --- a/src/api-umbrella/cli/setup.lua +++ b/src/api-umbrella/cli/setup.lua @@ -1,9 +1,7 @@ -local array_includes = require "api-umbrella.utils.array_includes" -local deep_merge_overwrite_arrays = require "api-umbrella.utils.deep_merge_overwrite_arrays" local dir = require "pl.dir" local file = require "pl.file" +local invert_table = require "api-umbrella.utils.invert_table" local lustache = require "lustache" -local lyaml = require "lyaml" local mustache_unescape = require "api-umbrella.utils.mustache_unescape" local path = require "pl.path" local plutils = require "pl.utils" @@ -14,31 +12,6 @@ local tablex = require "pl.tablex" local unistd = require "posix.unistd" local config -local template_config - -local function set_template_config() - local runtime_config_path = path.join(config["run_dir"], "runtime_config.yml") - - template_config = tablex.deepcopy(config) - deep_merge_overwrite_arrays(template_config, { - _api_umbrella_config_runtime_file = runtime_config_path, - ["_test_env?"] = (config["app_env"] == "test"), - ["_development_env?"] = (config["app_env"] == "development"), - _mongodb_yaml = lyaml.dump({deep_merge_overwrite_arrays({ - storage = { - dbPath = path.join(config["db_dir"], "mongodb"), - }, - }, config["mongodb"]["embedded_server_config"])}), - _elasticsearch_yaml = lyaml.dump({deep_merge_overwrite_arrays({ - path = { - conf = path.join(config["etc_dir"], "elasticsearch"), - scripts = path.join(config["etc_dir"], "elasticsearch_scripts"), - data = path.join(config["db_dir"], "elasticsearch"), - logs = config["log_dir"], - }, - }, config["elasticsearch"]["embedded_server_config"])}) - }) -end local function permission_check() local effective_uid = unistd.geteuid() @@ -100,7 +73,6 @@ local function prepare() path.join(config["db_dir"], "mongodb"), path.join(config["db_dir"], "rsyslog"), path.join(config["etc_dir"], "trafficserver/snapshots"), - path.join(config["log_dir"], "trafficserver"), path.join(config["root_dir"], "var/trafficserver"), } @@ -183,7 +155,7 @@ local function write_templates() local _, extension = path.splitext(template_path) if extension == ".mustache" then - content = lustache:render(mustache_unescape(content), template_config) + content = lustache:render(mustache_unescape(content), config) end dir.makepath(path.dirname(install_path)) @@ -231,8 +203,39 @@ local function set_permissions() end local function activate_services() - local active_services = dir.getdirectories(path.join(config["_src_root_dir"], "templates/etc/perp")) - tablex.transform(path.basename, active_services) + local available_services = dir.getdirectories(path.join(config["_src_root_dir"], "templates/etc/perp")) + tablex.transform(path.basename, available_services) + available_services = invert_table(available_services) + + local active_services = {} + if config["_service_general_db_enabled?"] then + active_services["mongod"] = 1 + end + if config["_service_log_db_enabled?"] then + active_services["elasticsearch"] = 1 + end + if config["_service_router_enabled?"] then + active_services["geoip-auto-updater"] = 1 + active_services["mora"] = 1 + active_services["nginx"] = 1 + active_services["nginx-reloader"] = 1 + active_services["rsyslog"] = 1 + active_services["trafficserver"] = 1 + end + if config["_service_web_enabled?"] then + active_services["web-delayed-job"] = 1 + active_services["web-puma"] = 1 + end + if config["app_env"] == "development" then + active_services["dev-env-ember-server"] = 1 + end + if config["app_env"] == "test" then + active_services["test-env-mailhog"] = 1 + active_services["test-env-mongo-orchestration"] = 1 + active_services["test-env-nginx"] = 1 + active_services["test-env-openldap"] = 1 + active_services["test-env-unbound"] = 1 + end -- Loop over the perp controlled services and set the sticky permission bit -- for any services that are supposed to be active (this sticky bit is how @@ -243,72 +246,50 @@ local function activate_services() -- Disable any old services that might be installed, but are no longer -- present in templates/etc/perp. - local is_active = array_includes(active_services, service_name) - - -- Disable services according to the broader service groups marked as - -- enabled in api-umbrella.yml's "services" list. - if is_active then - if not config["_service_general_db_enabled?"] then - if array_includes({ "mongod" }, service_name) then - is_active = false - end - end + local is_active = false + if available_services[service_name] and active_services[service_name] then + is_active = true + end - if not config["_service_log_db_enabled?"] then - if array_includes({ "elasticsearch" }, service_name) then - is_active = false - end - end + -- Perp's hidden directories don't need the sticky bit. + local is_hidden = (string.find(service_name, ".", 1, true) == 1) + if is_hidden then + is_active = false + end - if not config["_service_hadoop_db_enabled?"] then - if array_includes({ "flume", "kylin", "presto" }, service_name) then - is_active = false - end + -- Create the log directory for svlogd output for this service. + if is_active or service_name == ".boot" then + local service_log_name = service_name + if service_name == ".boot" then + service_log_name = "perpd" end - if not config["_service_router_enabled?"] then - if array_includes({ "geoip-auto-updater", "mora", "nginx", "rsyslog", "trafficserver" }, service_name) then - is_active = false - end + local service_log_dir = path.join(config["log_dir"], service_log_name) + dir.makepath(service_log_dir) + local _, _, log_chmod_err = run_command("chmod 0755 " .. service_log_dir) + if log_chmod_err then + print("chmod failed: ", log_chmod_err) + os.exit(1) end - - if not config["_service_web_enabled?"] then - if array_includes({ "web-delayed-job", "web-puma" }, service_name) then - is_active = false + if config["user"] and config["group"] then + local _, _, log_chown_err = run_command("chown " .. config["user"] .. ":" .. config["group"] .. " " .. service_log_dir) + if log_chown_err then + print("chown failed: ", log_chown_err) + os.exit(1) end end - if not config["_service_nginx_reloader_enabled?"] then - if array_includes({ "nginx-reloader" }, service_name) then - is_active = false + -- Disable the svlogd script if we want all output to go to + -- stdout/stderr. + if config["log"]["destination"] == "console" then + local _, _, err = run_command("chmod -x " .. service_dir .. "/rc.log") + if err then + print("chmod failed: ", err) + os.exit(1) end end end - -- Disable any dev-only services when not running in the dev environment. - if string.find(service_name, "dev-env", 1, true) == 1 then - if config["app_env"] == "development" then - is_active = true - else - is_active = false - end - end - - -- Disable any test-only services when not running in the test environment. - if string.find(service_name, "test-env", 1, true) == 1 then - if config["app_env"] == "test" then - is_active = true - else - is_active = false - end - end - - -- Perp's hidden directories don't need the sticky bit. - local is_hidden = (string.find(service_name, ".", 1, true) == 1) - if is_hidden then - is_active = false - end - -- Set the sticky bit for any active services. if is_active then local _, _, err = run_command("chmod +t " .. service_dir) @@ -328,7 +309,6 @@ end return function() config = read_config({ write = true }) - set_template_config() permission_check() prepare() generate_self_signed_cert() diff --git a/src/api-umbrella/http-api/health.lua b/src/api-umbrella/http-api/health.lua index 2cbd4ef25..ea5189547 100644 --- a/src/api-umbrella/http-api/health.lua +++ b/src/api-umbrella/http-api/health.lua @@ -37,7 +37,7 @@ local function status_response() -- Check to see if the ElasticSearch index aliases have been setup. local today = os.date("!%Y-%m", ngx.time()) local alias = "api-umbrella-logs-" .. today - local index = "api-umbrella-logs-" .. config["log_template_version"] .. "-" .. today + local index = "api-umbrella-logs-" .. config["elasticsearch"]["template_version"] .. "-" .. today res, err = httpc:request_uri(config["elasticsearch"]["hosts"][1] .. "/" .. index .. "/_alias/" .. alias) if err then ngx.log(ngx.ERR, "failed to fetch elasticsearch alias details: ", err) diff --git a/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua b/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua index 5897f4831..f3faf2c03 100644 --- a/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua +++ b/src/api-umbrella/proxy/jobs/elasticsearch_setup.lua @@ -66,11 +66,11 @@ local function create_aliases() local aliases = { { alias = "api-umbrella-logs-" .. today, - index = "api-umbrella-logs-" .. config["log_template_version"] .. "-" .. today, + index = "api-umbrella-logs-" .. config["elasticsearch"]["template_version"] .. "-" .. today, }, { alias = "api-umbrella-logs-write-" .. today, - index = "api-umbrella-logs-" .. config["log_template_version"] .. "-" .. today, + index = "api-umbrella-logs-" .. config["elasticsearch"]["template_version"] .. "-" .. today, }, } @@ -79,11 +79,11 @@ local function create_aliases() if tomorrow ~= today then table.insert(aliases, { alias = "api-umbrella-logs-" .. tomorrow, - index = "api-umbrella-logs-" .. config["log_template_version"] .. "-" .. tomorrow, + index = "api-umbrella-logs-" .. config["elasticsearch"]["template_version"] .. "-" .. tomorrow, }) table.insert(aliases, { alias = "api-umbrella-logs-write-" .. tomorrow, - index = "api-umbrella-logs-" .. config["log_template_version"] .. "-" .. tomorrow, + index = "api-umbrella-logs-" .. config["elasticsearch"]["template_version"] .. "-" .. tomorrow, }) end diff --git a/src/api-umbrella/proxy/log_utils.lua b/src/api-umbrella/proxy/log_utils.lua index b2dbe0c5d..8dd4e0900 100644 --- a/src/api-umbrella/proxy/log_utils.lua +++ b/src/api-umbrella/proxy/log_utils.lua @@ -1,8 +1,8 @@ local cjson = require "cjson" local escape_uri_non_ascii = require "api-umbrella.utils.escape_uri_non_ascii" local iconv = require "iconv" +local icu_date = require "icu-date" local logger = require "resty.logger.socket" -local luatz = require "luatz" local mongo = require "api-umbrella.utils.mongo" local plutils = require "pl.utils" local sha256 = require "resty.sha256" @@ -18,7 +18,14 @@ local syslog_facility = 16 -- local0 local syslog_severity = 6 -- info local syslog_priority = (syslog_facility * 8) + syslog_severity local syslog_version = 1 -local timezone = luatz.get_tz(config["analytics"]["timezone"]) + +local ZONE_OFFSET = icu_date.fields.ZONE_OFFSET +local DST_OFFSET = icu_date.fields.DST_OFFSET +local DAY_OF_WEEK = icu_date.fields.DAY_OF_WEEK +-- Setup the date object in the analytics timezone, and set the first day of +-- the week to Mondays for ISO week calculations. +local date = icu_date.new({ zone_id = config["analytics"]["timezone"] }) +date:set_attribute(icu_date.attributes.FIRST_DAY_OF_WEEK, 2) local _M = {} @@ -252,25 +259,23 @@ function _M.set_request_ip_geo_fields(data, ngx_var) end function _M.set_computed_timestamp_fields(data) - local utc_sec = data["timestamp_utc"] / 1000 - local tz_offset = timezone:find_current(utc_sec).gmtoff - local tz_sec = utc_sec + tz_offset - local tz_time = os.date("!%Y-%m-%d %H:%M:00", tz_sec) + -- Generate a string of current timestamp in the analytics timezone. + -- + -- Note that we use os.date instead of icu-date's "format" function, since in + -- some microbenchmarks, this approach is faster. + date:set_millis(data["timestamp_utc"]) + local tz_offset = date:get(ZONE_OFFSET) + date:get(DST_OFFSET) + local tz_time = os.date("!%Y-%m-%d %H:%M:00", (date:get_millis() + tz_offset) / 1000) -- Determine the first day in the ISO week (the most recent Monday). - local tz_week = luatz.gmtime(tz_sec) - if tz_week.wday == 1 then - tz_week.day = tz_week.day - 6 - tz_week:normalize() - elseif tz_week.wday > 2 then - tz_week.day = tz_week.day - tz_week.wday + 2 - tz_week:normalize() - end + date:set(DAY_OF_WEEK, 2) + local week_tz_offset = date:get(ZONE_OFFSET) + date:get(DST_OFFSET) + local tz_week = os.date("!%Y-%m-%d", (date:get_millis() + week_tz_offset) / 1000) - data["timestamp_tz_offset"] = tz_offset * 1000 + data["timestamp_tz_offset"] = tz_offset data["timestamp_tz_year"] = string.sub(tz_time, 1, 4) .. "-01-01" -- YYYY-01-01 data["timestamp_tz_month"] = string.sub(tz_time, 1, 7) .. "-01" -- YYYY-MM-01 - data["timestamp_tz_week"] = tz_week:strftime("%Y-%m-%d") -- YYYY-MM-DD of first day in ISO week. + data["timestamp_tz_week"] = tz_week -- YYYY-MM-DD of first day in ISO week. data["timestamp_tz_date"] = string.sub(tz_time, 1, 10) -- YYYY-MM-DD data["timestamp_tz_hour"] = string.sub(tz_time, 1, 13) .. ":00:00" -- YYYY-MM-DD HH:00:00 data["timestamp_tz_minute"] = tz_time -- YYYY-MM-DD HH:MM:00 diff --git a/src/api-umbrella/web-app/app/controllers/admin/sessions_controller.rb b/src/api-umbrella/web-app/app/controllers/admin/sessions_controller.rb index 4bdfeaa7c..4332f6ada 100644 --- a/src/api-umbrella/web-app/app/controllers/admin/sessions_controller.rb +++ b/src/api-umbrella/web-app/app/controllers/admin/sessions_controller.rb @@ -19,7 +19,7 @@ def auth "analytics_timezone" => ApiUmbrellaConfig[:analytics][:timezone], "enable_beta_analytics" => (ApiUmbrellaConfig[:analytics][:adapter] == "kylin" || (ApiUmbrellaConfig[:analytics][:outputs] && ApiUmbrellaConfig[:analytics][:outputs].include?("kylin"))), "username_is_email" => ApiUmbrellaConfig[:web][:admin][:username_is_email], - "local_auth_enabled" => ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_local_enabled?], + "local_auth_enabled" => ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_enabled][:local], "password_length_min" => ApiUmbrellaConfig[:web][:admin][:password_length_min], "api_umbrella_version" => API_UMBRELLA_VERSION, "admin" => current_admin.as_json.slice( @@ -66,7 +66,7 @@ def first_time_setup_check end def only_for_local_auth - unless(ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_local_enabled?]) + unless(ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_enabled][:local]) raise ActionController::RoutingError.new("Not Found") end end diff --git a/src/api-umbrella/web-app/app/helpers/application_helper.rb b/src/api-umbrella/web-app/app/helpers/application_helper.rb index a1c773b10..d31e67479 100644 --- a/src/api-umbrella/web-app/app/helpers/application_helper.rb +++ b/src/api-umbrella/web-app/app/helpers/application_helper.rb @@ -30,7 +30,7 @@ def omniauth_external_providers end def display_login_form? - ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_local_enabled?] || ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_only_ldap_enabled?] + ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_enabled][:local] || ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_only_ldap_enabled?] end def ldap_title diff --git a/src/api-umbrella/web-app/app/models/admin.rb b/src/api-umbrella/web-app/app/models/admin.rb index 045457873..7afde6b90 100644 --- a/src/api-umbrella/web-app/app/models/admin.rb +++ b/src/api-umbrella/web-app/app/models/admin.rb @@ -97,7 +97,7 @@ def self.sorted end def self.needs_first_account? - ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_local_enabled?] && self.unscoped.count == 0 + ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_enabled][:local] && self.unscoped.count == 0 end def group_names @@ -249,7 +249,7 @@ def assign_with_password(params, *options) def send_invite_instructions token = nil - if(ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_local_enabled?]) + if(ApiUmbrellaConfig[:web][:admin][:auth_strategies][:_enabled][:local]) token = set_invite_reset_password_token end diff --git a/src/api-umbrella/web-app/app/views/devise/sessions/new.html.erb b/src/api-umbrella/web-app/app/views/devise/sessions/new.html.erb index ed3c32d34..06ae4cb33 100644 --- a/src/api-umbrella/web-app/app/views/devise/sessions/new.html.erb +++ b/src/api-umbrella/web-app/app/views/devise/sessions/new.html.erb @@ -1,7 +1,7 @@

<%= t(".admin_sign_in") %>