Last active
July 9, 2021 16:05
-
-
Save gburgett/ae540d685a380e505d8142cc7bc0147f to your computer and use it in GitHub Desktop.
logentries API wrapper
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#! /usr/bin/env bash | |
COLOR_NC='\033[0m' # No Color | |
COLOR_LGREEN='\033[1;32m' | |
COLOR_GRAY='\033[1;30m' | |
COLOR_LGRAY='\033[0;37m' | |
COLOR_RED='\033[0;31m' | |
logv() { | |
[[ -z "$VERBOSE" ]] && return 0; | |
local msg=$@ | |
[[ -z $LOGENTRIES_API_KEY ]] || msg=$(echo "$@" | sed "s/$LOGENTRIES_API_KEY/\*\*\*\*\*/" ) | |
>&2 echo -e "${COLOR_GRAY}$msg${COLOR_NC}" || true | |
} | |
logerr() { | |
>&2 echo -e "${COLOR_RED}$@${COLOR_NC}" | |
} | |
log() { | |
>&2 echo -e "${COLOR_LGREEN}$@${COLOR_NC}" | |
} | |
panic() { | |
logerr "$@" | |
exit -1; | |
} | |
curlv() { | |
execv curl "$@" | |
} | |
execv() { | |
logv "$@" | |
"$@" | |
} | |
# Write your usage | |
usage() { | |
echo "$0 [command] | |
logs list: Lists all log streams | |
logs [stream ID] <from> <to> <query> | |
Gets the logs for the specified log stream | |
from, to: UNIX timestamp or parseable datetime string | |
query: a valid LEQL query | |
https://docs.logentries.com/docs/search#section-leql | |
labels: lists all created labels | |
requests: pulls logs with a line saying 'Started GET|HEAD|POST|etc' | |
count: counts requests that match a query | |
errors: pulls HTTP 500 responses and counts the associated URLs | |
curl: Execute a curl request with the logentries API key set as a header. | |
" && \ | |
grep " .)\ #" $0; exit 0; | |
} | |
load_logentriesrc() { | |
export logentriesrc='{}' | |
if [ -f ~/.logentries.profile ]; then | |
logentriesrc=$(cat ~/.logentries.profile) | |
logv "loaded ~/.logentries.profile" | |
fi | |
if [ -f ".logentriesrc" ]; then | |
logentriesrc=$( (echo "$logentriesrc"; cat .logentriesrc) | jq --slurp '.[0] * .[1]' ) | |
logv "loaded .logentriesrc" | |
fi | |
} | |
merge_json_file() { | |
filename="$1" | |
json="$2" | |
(cat $filename 2>/dev/null || echo "{}") | jq ". + $json" > "$filename.tmp" | |
mv "$filename.tmp" $filename | |
} | |
require_env() { | |
load_logentriesrc | |
if [[ -z "${LOGENTRIES_API_KEY}" ]]; then | |
logv "setting LOGENTRIES_API_KEY from ~/.logentries.profile" | |
export LOGENTRIES_API_KEY=$(echo "${logentriesrc}" | jq -r '.LOGENTRIES_API_KEY // empty') | |
if [[ -z "$LOGENTRIES_API_KEY" ]]; then | |
log "Please paste in your logentries API key (go to Settings: API Keys in rapid7 console):" | |
read -s password | |
[[ -z $password ]] && panic "No api key supplied!" | |
merge_json_file ~/.logentries.profile "{LOGENTRIES_API_KEY: \"$password\"}" | |
require_env | |
fi | |
fi | |
[[ ! -z "$LOGENTRIES_API_KEY" ]] || panic "Could not load LOGENTRIES_API_KEY" | |
} | |
get_date_ms() { | |
if [[ -z "$1" ]]; then | |
return 0 | |
fi | |
if [[ $1 =~ ^-?[0-9]+$ ]]; then | |
echo "$1" | |
return | |
fi | |
node -e "console.log(Date.parse('$1').toString())" | |
} | |
parse_log_id() { | |
log_id="$1" | |
logv "parse_log_id: ${1}" | |
if [[ ! -z "$log_id" ]]; then | |
if is_guid "$log_id"; then | |
echo "$log_id" | |
return | |
fi | |
log_id=$(fzf_log_stream "$1" | cut -d' ' -f1); | |
if [[ ! -z "$log_id" ]]; then | |
logv "fzf log id: ${log_id}" | |
echo "$log_id" | |
return | |
fi | |
fi | |
panic "Unable to parse log ID: ${1}" | |
echo "$1" | |
} | |
fzf_log_stream() { | |
name="$@" | |
logs_list | fzf -q "$name" -1 -0 | |
} | |
HEADERS_FILE=$(mktemp /tmp/logentries-headers.XXXXXX) | |
function finish { | |
# rm $HEADERS_FILE | |
echo "" | |
} | |
trap finish EXIT | |
api_curl() { | |
local url=${@: -1}; | |
if [[ ! -z "$url" ]]; then set -- "${@:1:${#}-1}"; fi | |
resp=$(curlv -s -H "x-api-key: $LOGENTRIES_API_KEY" -H "Accept: application/json" -D "$HEADERS_FILE" "$@" \ | |
"$url") | |
if grep -q 'HTTP/1.1 202' "$HEADERS_FILE"; then | |
link_url=$(echo "$resp" | jq -r '.links[] | select(.rel = "Self") | .href') | |
logv "long running query - will check ${link_url} every second" | |
sleep 1 | |
api_curl "$link_url" "$@" | |
elif grep -q 'HTTP/1.1 400' "$HEADERS_FILE"; then | |
panic "Bad request! This usually means your query is bad. $url" | |
else | |
[[ ! -z "$resp" ]] || panic "No response - this usually means something's wrong with your API key!" | |
error=$(echo "$resp" | jq -r '.errorCode // empty') | |
if [[ ! -z "$error" ]]; then | |
msg=$(echo "$resp" | jq -r '.message') | |
panic "API error: ${msg}" | |
fi | |
echo "$resp" | |
fi | |
} | |
paginate() { | |
next="$@" | |
while true; do | |
resp=$(api_curl "$next") | |
echo "$resp" | |
links=$(echo "$resp" | jq -r '.links // empty') | |
[[ ! -z "$links" ]] || break | |
next=$(echo "$resp" | jq -r '.links[] | select(.rel = "Next") | .href') | |
[[ ! -z "$next" ]] || break; | |
logv "Next Page: ${next}" | |
done | |
} | |
is_guid() { | |
[[ "$*" =~ ^\{?[A-F0-9a-f]{8}-[A-F0-9a-f]{4}-[A-F0-9a-f]{4}-[A-F0-9a-f]{4}-[A-F0-9a-f]{12}\}?$ ]] | |
} | |
url_encode() { | |
jq -nr --arg v "$@" '$v|@uri' | |
} | |
logs_list() { | |
api_curl https://rest.logentries.com/management/logs | jq -r '.logs[] | "\(.id) \(.name) \(.logsets_info[].name)"' | |
} | |
logs() { | |
[[ ! -z "$log_id" ]] || panic "No log ID given and none set using the local command" | |
[[ ! -z "$FROM" ]] || FROM=$(node -e 'console.log((Date.now() - 60 * 1000).toString())') # default last 60 seconds | |
[[ ! -z "$TO" ]] || TO=$(node -e 'console.log(Date.now().toString())') # default to now | |
query="$*" | |
[[ -z "$query" ]] || query=$(url_encode "where(/$query/)") | |
extra="&per_page=500" | |
[[ -z "$label_query" ]] || extra="$extra&label=$(label_get_id "$label_query")" | |
(paginate "https://rest.logentries.com/query/logs/$log_id?from=$FROM&to=$TO&query=$query$extra") | | |
jq -r '.events[].message' | |
} | |
labels_list() { | |
api_curl https://rest.logentries.com/management/labels | jq -rc '.labels[]' | |
} | |
label_get_id() { | |
[[ ! -z "$1" ]] || panic "No label given!" | |
labels_list | jq -r '"\(.id) \(.name)"' | fzf -q "$1" -1 -0 | awk '{ print $1 }' | |
} | |
requests() { | |
regexp="heroku\\srouter" | |
if [[ ! -z "$1" ]]; then | |
regexp="$regexp\\s.*$1" | |
fi | |
logs "$regexp" | |
} | |
legacy() { | |
logs "Proxying\sLegacy" | awk '{ print $18 }' | sort | uniq -c | sort -r | |
} | |
queries() { | |
regexp="ETHON\:\\sperformed" | |
if [[ ! -z "$1" ]]; then | |
regexp="$regexp\\s.*$1" | |
fi | |
logs "$regexp" | cut -d' ' -f 18 | cut -d "=" -f2- | |
} | |
count() { | |
requests "$1" | parse_log_line | jq -r '"\(.path) \(.method) \(.status)"' | sort | uniq -c | sort -r | |
} | |
errors() { | |
count "status=500" | |
} | |
saved_list() { | |
api_curl "https://rest.logentries.com/query/saved_queries" | jq '.saved_queries[]' | |
} | |
set_local() { | |
log_stream=$(fzf_log_stream "$@") || panic "No log stream selected" | |
log "Setting local log stream to ${log_stream}" | |
id=$(echo "$log_stream" | cut -d' ' -f1) | |
name=$(echo "$log_stream" | awk '{for (i=2; i<NF; i++) printf $i " "; print $NF}') | |
merge_json_file .logentriesrc "{ \"local\": { \"id\": \"$id\", \"name\": \"$name\" } }" | |
} | |
get_local() { | |
echo "$logentriesrc" | jq -r '.local // empty' | jq -r '"\(.id) \(.name)"' | |
} | |
parse_log_line() { | |
ruby -e "$(cat <<- 'EOF' | |
require 'json' | |
ts = /\d{4}\-\d{2}\-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+[+-Z](\d{2}:?\d{2}?)?)?/ | |
field = /\s(?<key>\w+)\=(?<value>[^\"\s]+)(?=\s)/ | |
quoted_field = /\s(?<key>\w+)\="(?<value>[^\"]+)"(?=\s)/ | |
integers = ['status', 'bytes', 'connect', 'service'] | |
begin | |
ARGF.each_line do |line| | |
structured = { | |
ts: ts.match(line), | |
} | |
matches = line.to_enum(:scan, field).map { Regexp.last_match } + | |
line.to_enum(:scan, quoted_field).map { Regexp.last_match } | |
matches.each do |m| | |
key = m['key'] | |
value = m['value'] | |
if integers.include?(key) | |
value = value.to_i | |
end | |
structured[key] = value | |
end | |
puts structured.to_json | |
end | |
rescue => ex | |
# piped to HEAD which ended the stream | |
return if ex.is_a? Errno::EPIPE | |
abort($@) | |
end | |
EOF | |
)" | |
} | |
parse_urls() { | |
ruby -e "$(cat <<- 'EOF' | |
require 'cgi' | |
require 'json' | |
begin | |
ARGF.each_line do |line| | |
line.scan(URI.regexp(['http', 'https'])).each do |url| | |
structured = { | |
scheme: url[0], | |
host: url[3], | |
path: url[6], | |
rawQuery: url[7], | |
query: url[7] && CGI.parse(url[7]).transform_values!(&:join), | |
} | |
puts structured.to_json | |
end | |
end | |
rescue => ex | |
# piped to HEAD which ended the stream | |
return if ex.is_a? Errno::EPIPE | |
abort($@) | |
end | |
EOF | |
)" | |
} | |
parse_count_to_json() { | |
awk '{ printf "{\"url\": \"%s\", \"count\": \"%s\", \"method\": \"%s\", \"status\": %d}\n", $2, $1, $3, $4 }' | |
} | |
parse_args() { | |
OPTIND=1 | |
local s=$(echo "$1" | tr '[:upper:]' '[:lower:]') | |
case "$s" in | |
logs|labels|saved|requests|queries|count|errors|local|curl|help|h|\?) | |
export subcommand=$s | |
OPTIND=2 | |
;; | |
esac | |
# Parse flags | |
while getopts ":hvjf:t:s:l:" arg; do | |
case $arg in | |
v) # Verbose mode - extra output | |
VERBOSE=true | |
FLAGS="$FLAGS -v" | |
;; | |
j) # force JSON output | |
JSON=true | |
FLAGS="$FLAGS -j" | |
;; | |
f) # FROM date | |
FROM=$(get_date_ms "$OPTARG") | |
FLAGS="$FLAGS -f \"$OPTARG\"" | |
;; | |
t) # TO date | |
TO=$(get_date_ms "$OPTARG") | |
FLAGS="$FLAGS -t \"$OPTARG\"" | |
;; | |
s) # Stream ID | |
log_id="$OPTARG" | |
FLAGS="$FLAGS -s \"$OPTARG\"" | |
;; | |
l) # Filter to this Label (ID or Name) | |
label_query="$OPTARG" | |
FLAGS="$FLAGS -l \"$OPTARG\"" | |
;; | |
h | *) # Display help. | |
usage | |
exit 0 | |
;; | |
esac | |
done | |
export OPTIND | |
} | |
parse_args "$@" && shift $(($OPTIND - 1)) | |
# If they put args before the command like 'bin/contentful -s 1xab migrate -y', try parsing again | |
[[ -z "$subcommand" ]] && parse_args "$@" && shift $(($OPTIND - 1)) | |
set -e | |
case $subcommand in | |
help|h|\?) | |
usage | |
;; | |
version|v) | |
echo "$VERSION" | |
;; | |
esac | |
require_env | |
[[ -z "$log_id" ]] || log_id=$(parse_log_id "$log_id") | |
[[ ! -z "$log_id" ]] || log_id=$(echo "$logentriesrc" | jq -r '.local.id // empty') | |
format="cat" | |
case $subcommand in | |
logs) | |
if [[ "list" == "$1" ]]; then | |
logs_list | |
else | |
if [[ ! -z "$JSON" ]]; then | |
format="parse_log_line" | |
fi | |
logs "$@" | $format | |
fi | |
;; | |
labels) | |
if [[ -z "$JSON" ]]; then | |
labels_list | jq -r '"\(.id) \(.name)"' | |
else | |
labels_list | |
fi | |
;; | |
requests) | |
if [[ ! -z "$JSON" ]]; then | |
format="parse_log_line" | |
fi | |
requests "$@" | $format | |
;; | |
queries) | |
if [[ ! -z "$JSON" ]]; then | |
format="parse_urls" | |
fi | |
queries "$@" | $format | |
;; | |
count) | |
if [[ ! -z "$JSON" ]]; then | |
format="parse_count_to_json" | |
fi | |
count "$@" | $format | |
;; | |
errors) | |
if [[ ! -z "$JSON" ]]; then | |
format="parse_count_to_json" | |
fi | |
errors "$@" | $format | |
;; | |
local) | |
if [[ -z "$@" ]]; then | |
get_local "$@" | |
else | |
set_local "$@" | |
fi | |
;; | |
curl) | |
require_env | |
api_curl "$@" | |
;; | |
*) | |
logerr "Unknown command: '$1'" | |
usage | |
exit -1 | |
;; | |
esac |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment