mirror of https://github.com/grafana/grafana.git
Merge branch 'graphite-series-by-tags'
This commit is contained in:
commit
f591bea696
|
|
@ -0,0 +1,109 @@
|
|||
FROM phusion/baseimage:0.9.22
|
||||
MAINTAINER Denys Zhdanov <denis.zhdanov@gmail.com>
|
||||
|
||||
RUN apt-get -y update \
|
||||
&& apt-get -y upgrade \
|
||||
&& apt-get -y install vim \
|
||||
nginx \
|
||||
python-dev \
|
||||
python-flup \
|
||||
python-pip \
|
||||
python-ldap \
|
||||
expect \
|
||||
git \
|
||||
memcached \
|
||||
sqlite3 \
|
||||
libffi-dev \
|
||||
libcairo2 \
|
||||
libcairo2-dev \
|
||||
python-cairo \
|
||||
python-rrdtool \
|
||||
pkg-config \
|
||||
nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# choose a timezone at build-time
|
||||
# use `--build-arg CONTAINER_TIMEZONE=Europe/Brussels` in `docker build`
|
||||
ARG CONTAINER_TIMEZONE
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN if [ ! -z "${CONTAINER_TIMEZONE}" ]; \
|
||||
then ln -sf /usr/share/zoneinfo/$CONTAINER_TIMEZONE /etc/localtime && \
|
||||
dpkg-reconfigure -f noninteractive tzdata; \
|
||||
fi
|
||||
|
||||
# fix python dependencies (LTS Django and newer memcached/txAMQP)
|
||||
RUN pip install --upgrade pip && \
|
||||
pip install django==1.8.18 \
|
||||
python-memcached==1.53 \
|
||||
txAMQP==0.6.2
|
||||
|
||||
ARG version=1.0.2
|
||||
ARG whisper_version=${version}
|
||||
ARG carbon_version=${version}
|
||||
ARG graphite_version=${version}
|
||||
|
||||
ARG statsd_version=v0.7.2
|
||||
|
||||
# install whisper
|
||||
RUN git clone -b ${whisper_version} --depth 1 https://github.com/graphite-project/whisper.git /usr/local/src/whisper
|
||||
WORKDIR /usr/local/src/whisper
|
||||
RUN python ./setup.py install
|
||||
|
||||
# install carbon
|
||||
RUN git clone -b ${carbon_version} --depth 1 https://github.com/graphite-project/carbon.git /usr/local/src/carbon
|
||||
WORKDIR /usr/local/src/carbon
|
||||
RUN pip install -r requirements.txt \
|
||||
&& python ./setup.py install
|
||||
|
||||
# install graphite
|
||||
RUN git clone -b ${graphite_version} --depth 1 https://github.com/graphite-project/graphite-web.git /usr/local/src/graphite-web
|
||||
WORKDIR /usr/local/src/graphite-web
|
||||
RUN pip install -r requirements.txt \
|
||||
&& python ./setup.py install
|
||||
ADD conf/opt/graphite/conf/*.conf /opt/graphite/conf/
|
||||
ADD conf/opt/graphite/webapp/graphite/local_settings.py /opt/graphite/webapp/graphite/local_settings.py
|
||||
# ADD conf/opt/graphite/webapp/graphite/app_settings.py /opt/graphite/webapp/graphite/app_settings.py
|
||||
WORKDIR /opt/graphite/webapp
|
||||
RUN mkdir -p /var/log/graphite/ \
|
||||
&& PYTHONPATH=/opt/graphite/webapp django-admin.py collectstatic --noinput --settings=graphite.settings
|
||||
|
||||
# install statsd
|
||||
RUN git clone -b ${statsd_version} https://github.com/etsy/statsd.git /opt/statsd
|
||||
ADD conf/opt/statsd/config.js /opt/statsd/config.js
|
||||
|
||||
# config nginx
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
ADD conf/etc/nginx/nginx.conf /etc/nginx/nginx.conf
|
||||
ADD conf/etc/nginx/sites-enabled/graphite-statsd.conf /etc/nginx/sites-enabled/graphite-statsd.conf
|
||||
|
||||
# init django admin
|
||||
ADD conf/usr/local/bin/django_admin_init.exp /usr/local/bin/django_admin_init.exp
|
||||
ADD conf/usr/local/bin/manage.sh /usr/local/bin/manage.sh
|
||||
RUN chmod +x /usr/local/bin/manage.sh && /usr/local/bin/django_admin_init.exp
|
||||
|
||||
# logging support
|
||||
RUN mkdir -p /var/log/carbon /var/log/graphite /var/log/nginx
|
||||
ADD conf/etc/logrotate.d/graphite-statsd /etc/logrotate.d/graphite-statsd
|
||||
|
||||
# daemons
|
||||
ADD conf/etc/service/carbon/run /etc/service/carbon/run
|
||||
ADD conf/etc/service/carbon-aggregator/run /etc/service/carbon-aggregator/run
|
||||
ADD conf/etc/service/graphite/run /etc/service/graphite/run
|
||||
ADD conf/etc/service/statsd/run /etc/service/statsd/run
|
||||
ADD conf/etc/service/nginx/run /etc/service/nginx/run
|
||||
|
||||
# default conf setup
|
||||
ADD conf /etc/graphite-statsd/conf
|
||||
ADD conf/etc/my_init.d/01_conf_init.sh /etc/my_init.d/01_conf_init.sh
|
||||
|
||||
# cleanup
|
||||
RUN apt-get clean\
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
# defaults
|
||||
EXPOSE 80 2003-2004 2023-2024 8125/udp 8126
|
||||
VOLUME ["/opt/graphite/conf", "/opt/graphite/storage", "/etc/nginx", "/opt/statsd", "/etc/logrotate.d", "/var/log"]
|
||||
WORKDIR /
|
||||
ENV HOME /root
|
||||
CMD ["/sbin/my_init"]
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
Copyright (c) 2013-2016 Nathan Hopkins
|
||||
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
|
@ -1,9 +1,10 @@
|
|||
FROM phusion/baseimage:0.9.22
|
||||
LABEL maintainer="Denys Zhdanov <denis.zhdanov@gmail.com>"
|
||||
MAINTAINER Denys Zhdanov <denis.zhdanov@gmail.com>
|
||||
|
||||
|
||||
RUN apt-get -y update \
|
||||
&& apt-get -y upgrade \
|
||||
&& apt-get -y --force-yes install vim \
|
||||
&& apt-get -y install vim \
|
||||
nginx \
|
||||
python-dev \
|
||||
python-flup \
|
||||
|
|
@ -22,38 +23,67 @@ RUN apt-get -y update \
|
|||
nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# choose a timezone at build-time
|
||||
# use `--build-arg CONTAINER_TIMEZONE=Europe/Brussels` in `docker build`
|
||||
ARG CONTAINER_TIMEZONE
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN if [ ! -z "${CONTAINER_TIMEZONE}" ]; \
|
||||
then ln -sf /usr/share/zoneinfo/$CONTAINER_TIMEZONE /etc/localtime && \
|
||||
dpkg-reconfigure -f noninteractive tzdata; \
|
||||
fi
|
||||
|
||||
# fix python dependencies (LTS Django and newer memcached/txAMQP)
|
||||
RUN pip install django==1.8.18 \
|
||||
RUN pip install --upgrade pip && \
|
||||
pip install django==1.8.18 \
|
||||
python-memcached==1.53 \
|
||||
txAMQP==0.6.2 \
|
||||
&& pip install --upgrade pip
|
||||
txAMQP==0.6.2
|
||||
|
||||
ARG version=1.0.2
|
||||
ARG whisper_version=${version}
|
||||
ARG carbon_version=${version}
|
||||
ARG graphite_version=${version}
|
||||
|
||||
RUN echo "Building Version: $version"
|
||||
|
||||
ARG whisper_repo=https://github.com/graphite-project/whisper.git
|
||||
ARG carbon_repo=https://github.com/graphite-project/carbon.git
|
||||
ARG graphite_repo=https://github.com/graphite-project/graphite-web.git
|
||||
|
||||
ARG statsd_version=v0.8.0
|
||||
|
||||
ARG statsd_repo=https://github.com/etsy/statsd.git
|
||||
|
||||
# install whisper
|
||||
RUN git clone -b 1.0.2 --depth 1 https://github.com/graphite-project/whisper.git /usr/local/src/whisper
|
||||
RUN git clone -b ${whisper_version} --depth 1 ${whisper_repo} /usr/local/src/whisper
|
||||
WORKDIR /usr/local/src/whisper
|
||||
RUN python ./setup.py install
|
||||
|
||||
# install carbon
|
||||
RUN git clone -b 1.0.2 --depth 1 https://github.com/graphite-project/carbon.git /usr/local/src/carbon
|
||||
RUN git clone -b ${carbon_version} --depth 1 ${carbon_repo} /usr/local/src/carbon
|
||||
WORKDIR /usr/local/src/carbon
|
||||
RUN pip install -r requirements.txt \
|
||||
&& python ./setup.py install
|
||||
|
||||
# install graphite
|
||||
RUN git clone -b 1.0.2 --depth 1 https://github.com/graphite-project/graphite-web.git /usr/local/src/graphite-web
|
||||
RUN git clone -b ${graphite_version} --depth 1 ${graphite_repo} /usr/local/src/graphite-web
|
||||
WORKDIR /usr/local/src/graphite-web
|
||||
RUN pip install -r requirements.txt \
|
||||
&& python ./setup.py install
|
||||
|
||||
# install statsd
|
||||
RUN git clone -b ${statsd_version} ${statsd_repo} /opt/statsd
|
||||
|
||||
# config graphite
|
||||
ADD conf/opt/graphite/conf/*.conf /opt/graphite/conf/
|
||||
ADD conf/opt/graphite/webapp/graphite/local_settings.py /opt/graphite/webapp/graphite/local_settings.py
|
||||
ADD conf/opt/graphite/webapp/graphite/app_settings.py /opt/graphite/webapp/graphite/app_settings.py
|
||||
# ADD conf/opt/graphite/webapp/graphite/app_settings.py /opt/graphite/webapp/graphite/app_settings.py
|
||||
WORKDIR /opt/graphite/webapp
|
||||
RUN mkdir -p /var/log/graphite/ \
|
||||
&& PYTHONPATH=/opt/graphite/webapp django-admin.py collectstatic --noinput --settings=graphite.settings
|
||||
|
||||
# install statsd
|
||||
RUN git clone -b v0.7.2 https://github.com/etsy/statsd.git /opt/statsd
|
||||
ADD conf/opt/statsd/config.js /opt/statsd/config.js
|
||||
# config statsd
|
||||
ADD conf/opt/statsd/config.js /opt/statsd/
|
||||
|
||||
# config nginx
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
|
|
@ -63,8 +93,7 @@ ADD conf/etc/nginx/sites-enabled/graphite-statsd.conf /etc/nginx/sites-enabled/g
|
|||
# init django admin
|
||||
ADD conf/usr/local/bin/django_admin_init.exp /usr/local/bin/django_admin_init.exp
|
||||
ADD conf/usr/local/bin/manage.sh /usr/local/bin/manage.sh
|
||||
RUN chmod +x /usr/local/bin/manage.sh \
|
||||
&& /usr/local/bin/django_admin_init.exp
|
||||
RUN chmod +x /usr/local/bin/manage.sh && /usr/local/bin/django_admin_init.exp
|
||||
|
||||
# logging support
|
||||
RUN mkdir -p /var/log/carbon /var/log/graphite /var/log/nginx
|
||||
|
|
@ -86,9 +115,10 @@ RUN apt-get clean\
|
|||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
# defaults
|
||||
EXPOSE 80 2003-2004 2023-2024 8125/udp 8126
|
||||
EXPOSE 80 2003-2004 2023-2024 8125 8125/udp 8126
|
||||
VOLUME ["/opt/graphite/conf", "/opt/graphite/storage", "/etc/nginx", "/opt/statsd", "/etc/logrotate.d", "/var/log"]
|
||||
WORKDIR /
|
||||
ENV HOME /root
|
||||
ENV STATSD_INTERFACE udp
|
||||
|
||||
CMD ["/sbin/my_init"]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ graphite_conf_dir_contents=$(find /opt/graphite/conf -mindepth 1 -print -quit)
|
|||
graphite_webapp_dir_contents=$(find /opt/graphite/webapp/graphite -mindepth 1 -print -quit)
|
||||
graphite_storage_dir_contents=$(find /opt/graphite/storage -mindepth 1 -print -quit)
|
||||
if [[ -z $graphite_dir_contents ]]; then
|
||||
git clone -b 1.0.2 --depth 1 https://github.com/graphite-project/graphite-web.git /usr/local/src/graphite-web
|
||||
# git clone -b 1.0.2 --depth 1 https://github.com/graphite-project/graphite-web.git /usr/local/src/graphite-web
|
||||
cd /usr/local/src/graphite-web && python ./setup.py install
|
||||
fi
|
||||
if [[ -z $graphite_storage_dir_contents ]]; then
|
||||
|
|
|
|||
|
|
@ -8,18 +8,18 @@
|
|||
# Defaults to ../
|
||||
# GRAPHITE_CONF_DIR - Configuration directory (where this file lives).
|
||||
# Defaults to $GRAPHITE_ROOT/conf/
|
||||
# GRAPHITE_STORAGE_DIR - Storage directory for whipser/rrd/log/pid files.
|
||||
# GRAPHITE_STORAGE_DIR - Storage directory for whisper/rrd/log/pid files.
|
||||
# Defaults to $GRAPHITE_ROOT/storage/
|
||||
#
|
||||
# To change other directory paths, add settings to this file. The following
|
||||
# configuration variables are available with these default values:
|
||||
#
|
||||
# STORAGE_DIR = $GRAPHITE_STORAGE_DIR
|
||||
# LOCAL_DATA_DIR = STORAGE_DIR/whisper/
|
||||
# WHITELISTS_DIR = STORAGE_DIR/lists/
|
||||
# CONF_DIR = STORAGE_DIR/conf/
|
||||
# LOG_DIR = STORAGE_DIR/log/
|
||||
# PID_DIR = STORAGE_DIR/
|
||||
# LOCAL_DATA_DIR = %(STORAGE_DIR)s/whisper/
|
||||
# WHITELISTS_DIR = %(STORAGE_DIR)s/lists/
|
||||
# CONF_DIR = %(STORAGE_DIR)s/conf/
|
||||
# LOG_DIR = %(STORAGE_DIR)s/log/
|
||||
# PID_DIR = %(STORAGE_DIR)s/
|
||||
#
|
||||
# For FHS style directory structures, use:
|
||||
#
|
||||
|
|
@ -30,20 +30,30 @@
|
|||
#
|
||||
#LOCAL_DATA_DIR = /opt/graphite/storage/whisper/
|
||||
|
||||
# Enable daily log rotation. If disabled, a kill -HUP can be used after a manual rotate
|
||||
# Specify the database library used to store metric data on disk. Each database
|
||||
# may have configurable options to change the behaviour of how it writes to
|
||||
# persistent storage.
|
||||
#
|
||||
# whisper - Fixed-size database, similar in design and purpose to RRD. This is
|
||||
# the default storage backend for carbon and the most rigorously tested.
|
||||
#
|
||||
# ceres - Experimental alternative database that supports storing data in sparse
|
||||
# files of arbitrary fixed-size resolutions.
|
||||
DATABASE = whisper
|
||||
|
||||
# Enable daily log rotation. If disabled, a new file will be opened whenever the log file path no
|
||||
# longer exists (i.e. it is removed or renamed)
|
||||
ENABLE_LOGROTATION = True
|
||||
|
||||
# Specify the user to drop privileges to
|
||||
# If this is blank carbon runs as the user that invokes it
|
||||
# If this is blank carbon-cache runs as the user that invokes it
|
||||
# This user must have write access to the local data directory
|
||||
USER =
|
||||
#
|
||||
# NOTE: The above settings must be set under [relay] and [aggregator]
|
||||
# to take effect for those daemons as well
|
||||
|
||||
# Limit the size of the cache to avoid swapping or becoming CPU bound.
|
||||
# Sorts and serving cache queries gets more expensive as the cache grows.
|
||||
# Use the value "inf" (infinity) for an unlimited cache size.
|
||||
# value should be an integer number of metric datapoints.
|
||||
MAX_CACHE_SIZE = inf
|
||||
|
||||
# Limits the number of whisper update_many() calls per second, which effectively
|
||||
|
|
@ -60,14 +70,30 @@ MAX_UPDATES_PER_SECOND = 500
|
|||
# MAX_UPDATES_PER_SECOND_ON_SHUTDOWN = 1000
|
||||
|
||||
# Softly limits the number of whisper files that get created each minute.
|
||||
# Setting this value low (like at 50) is a good way to ensure your graphite
|
||||
# Setting this value low (e.g. 50) is a good way to ensure that your carbon
|
||||
# system will not be adversely impacted when a bunch of new metrics are
|
||||
# sent to it. The trade off is that it will take much longer for those metrics'
|
||||
# database files to all get created and thus longer until the data becomes usable.
|
||||
# Setting this value high (like "inf" for infinity) will cause graphite to create
|
||||
# the files quickly but at the risk of slowing I/O down considerably for a while.
|
||||
# sent to it. The trade off is that any metrics received in excess of this
|
||||
# value will be silently dropped, and the whisper file will not be created
|
||||
# until such point as a subsequent metric is received and fits within the
|
||||
# defined rate limit. Setting this value high (like "inf" for infinity) will
|
||||
# cause carbon to create the files quickly but at the risk of increased I/O.
|
||||
MAX_CREATES_PER_MINUTE = 50
|
||||
|
||||
# Set the minimum timestamp resolution supported by this instance. This allows
|
||||
# internal optimisations by overwriting points with equal truncated timestamps
|
||||
# in order to limit the number of updates to the database. It defaults to one
|
||||
# second.
|
||||
MIN_TIMESTAMP_RESOLUTION = 1
|
||||
|
||||
# Set the minimum lag in seconds for a point to be written to the database
|
||||
# in order to optimize batching. This means that each point will wait at least
|
||||
# the duration of this lag before being written. Setting this to 0 disable the feature.
|
||||
# This currently only works when using the timesorted write strategy.
|
||||
# MIN_TIMESTAMP_LAG = 0
|
||||
|
||||
# Set the interface and port for the line (plain text) listener. Setting the
|
||||
# interface to 0.0.0.0 listens on all interfaces. Port can be set to 0 to
|
||||
# disable this listener if it is not required.
|
||||
LINE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
LINE_RECEIVER_PORT = 2003
|
||||
|
||||
|
|
@ -78,11 +104,23 @@ ENABLE_UDP_LISTENER = False
|
|||
UDP_RECEIVER_INTERFACE = 0.0.0.0
|
||||
UDP_RECEIVER_PORT = 2003
|
||||
|
||||
# Set the interface and port for the pickle listener. Setting the interface to
|
||||
# 0.0.0.0 listens on all interfaces. Port can be set to 0 to disable this
|
||||
# listener if it is not required.
|
||||
PICKLE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
PICKLE_RECEIVER_PORT = 2004
|
||||
|
||||
# Set to false to disable logging of successful connections
|
||||
LOG_LISTENER_CONNECTIONS = True
|
||||
# Set the interface and port for the protobuf listener. Setting the interface to
|
||||
# 0.0.0.0 listens on all interfaces. Port can be set to 0 to disable this
|
||||
# listener if it is not required.
|
||||
# PROTOBUF_RECEIVER_INTERFACE = 0.0.0.0
|
||||
# PROTOBUF_RECEIVER_PORT = 2005
|
||||
|
||||
# Limit the number of open connections the receiver can handle as any time.
|
||||
# Default is no limit. Setting up a limit for sites handling high volume
|
||||
# traffic may be recommended to avoid running out of TCP memory or having
|
||||
# thousands of TCP connections reduce the throughput of the service.
|
||||
#MAX_RECEIVER_CONNECTIONS = inf
|
||||
|
||||
# Per security concerns outlined in Bug #817247 the pickle receiver
|
||||
# will use a more secure and slightly less efficient unpickler.
|
||||
|
|
@ -98,13 +136,19 @@ CACHE_QUERY_PORT = 7002
|
|||
# data until the cache size falls below 95% MAX_CACHE_SIZE.
|
||||
USE_FLOW_CONTROL = True
|
||||
|
||||
# By default, carbon-cache will log every whisper update and cache hit. This can be excessive and
|
||||
# degrade performance if logging on the same volume as the whisper data is stored.
|
||||
LOG_UPDATES = False
|
||||
LOG_CACHE_HITS = False
|
||||
LOG_CACHE_QUEUE_SORTS = True
|
||||
# If enabled this setting is used to timeout metric client connection if no
|
||||
# metrics have been sent in specified time in seconds
|
||||
#METRIC_CLIENT_IDLE_TIMEOUT = None
|
||||
|
||||
# The thread that writes metrics to disk can use on of the following strategies
|
||||
# By default, carbon-cache will log every whisper update and cache hit.
|
||||
# This can be excessive and degrade performance if logging on the same
|
||||
# volume as the whisper data is stored.
|
||||
LOG_UPDATES = False
|
||||
LOG_CREATES = False
|
||||
LOG_CACHE_HITS = False
|
||||
LOG_CACHE_QUEUE_SORTS = False
|
||||
|
||||
# The thread that writes metrics to disk can use one of the following strategies
|
||||
# determining the order in which metrics are removed from cache and flushed to
|
||||
# disk. The default option preserves the same behavior as has been historically
|
||||
# available in version 0.9.10.
|
||||
|
|
@ -114,6 +158,12 @@ LOG_CACHE_QUEUE_SORTS = True
|
|||
# moment of the list's creation. Metrics will then be flushed from the cache to
|
||||
# disk in that order.
|
||||
#
|
||||
# timesorted - All metrics in the list will be looked at and sorted according
|
||||
# to the timestamp of there datapoints. The metric that were the least recently
|
||||
# written will be written first. This is an hybrid strategy between max and
|
||||
# sorted which is particularly adapted to sets of metrics with non-uniform
|
||||
# resolutions.
|
||||
#
|
||||
# max - The writer thread will always pop and flush the metric from cache
|
||||
# that has the most datapoints. This will give a strong flush preference to
|
||||
# frequently updated metrics and will also reduce random file-io. Infrequently
|
||||
|
|
@ -152,12 +202,61 @@ WHISPER_FALLOCATE_CREATE = True
|
|||
|
||||
# Enabling this option will cause Whisper to lock each Whisper file it writes
|
||||
# to with an exclusive lock (LOCK_EX, see: man 2 flock). This is useful when
|
||||
# multiple carbon-cache daemons are writing to the same files
|
||||
# multiple carbon-cache daemons are writing to the same files.
|
||||
# WHISPER_LOCK_WRITES = False
|
||||
|
||||
# On systems which has a large number of metrics, an amount of Whisper write(2)'s
|
||||
# pageback sometimes cause disk thrashing due to memory shortage, so that abnormal
|
||||
# disk reads occur. Enabling this option makes it possible to decrease useless
|
||||
# page cache memory by posix_fadvise(2) with POSIX_FADVISE_RANDOM option.
|
||||
# WHISPER_FADVISE_RANDOM = False
|
||||
|
||||
# By default all nodes stored in Ceres are cached in memory to improve the
|
||||
# throughput of reads and writes to underlying slices. Turning this off will
|
||||
# greatly reduce memory consumption for databases with millions of metrics, at
|
||||
# the cost of a steep increase in disk i/o, approximately an extra two os.stat
|
||||
# calls for every read and write. Reasons to do this are if the underlying
|
||||
# storage can handle stat() with practically zero cost (SSD, NVMe, zRAM).
|
||||
# Valid values are:
|
||||
# all - all nodes are cached
|
||||
# none - node caching is disabled
|
||||
# CERES_NODE_CACHING_BEHAVIOR = all
|
||||
|
||||
# Ceres nodes can have many slices and caching the right ones can improve
|
||||
# performance dramatically. Note that there are many trade-offs to tinkering
|
||||
# with this, and unless you are a ceres developer you *really* should not
|
||||
# mess with this. Valid values are:
|
||||
# latest - only the most recent slice is cached
|
||||
# all - all slices are cached
|
||||
# none - slice caching is disabled
|
||||
# CERES_SLICE_CACHING_BEHAVIOR = latest
|
||||
|
||||
# If a Ceres node accumulates too many slices, performance can suffer.
|
||||
# This can be caused by intermittently reported data. To mitigate
|
||||
# slice fragmentation there is a tolerance for how much space can be
|
||||
# wasted within a slice file to avoid creating a new one. That tolerance
|
||||
# level is determined by MAX_SLICE_GAP, which is the number of consecutive
|
||||
# null datapoints allowed in a slice file.
|
||||
# If you set this very low, you will waste less of the *tiny* bit disk space
|
||||
# that this feature wastes, and you will be prone to performance problems
|
||||
# caused by slice fragmentation, which can be pretty severe.
|
||||
# If you set this really high, you will waste a bit more disk space (each
|
||||
# null datapoint wastes 8 bytes, but keep in mind your filesystem's block
|
||||
# size). If you suffer slice fragmentation issues, you should increase this or
|
||||
# run the ceres-maintenance defrag plugin more often. However you should not
|
||||
# set it to be huge because then if a large but allowed gap occurs it has to
|
||||
# get filled in, which means instead of a simple 8-byte write to a new file we
|
||||
# could end up doing an (8 * MAX_SLICE_GAP)-byte write to the latest slice.
|
||||
# CERES_MAX_SLICE_GAP = 80
|
||||
|
||||
# Enabling this option will cause Ceres to lock each Ceres file it writes to
|
||||
# to with an exclusive lock (LOCK_EX, see: man 2 flock). This is useful when
|
||||
# multiple carbon-cache daemons are writing to the same files.
|
||||
# CERES_LOCK_WRITES = False
|
||||
|
||||
# Set this to True to enable whitelisting and blacklisting of metrics in
|
||||
# CONF_DIR/whitelist and CONF_DIR/blacklist. If the whitelist is missing or
|
||||
# empty, all metrics will pass through
|
||||
# CONF_DIR/whitelist.conf and CONF_DIR/blacklist.conf. If the whitelist is
|
||||
# missing or empty, all metrics will pass through
|
||||
# USE_WHITELIST = False
|
||||
|
||||
# By default, carbon itself will log statistics (such as a count,
|
||||
|
|
@ -203,16 +302,25 @@ WHISPER_FALLOCATE_CREATE = True
|
|||
# Example: store everything
|
||||
# BIND_PATTERNS = #
|
||||
|
||||
# URL of graphite-web instance, this is used to add incoming series to the tag database
|
||||
GRAPHITE_URL = http://127.0.0.1:80
|
||||
|
||||
# Tag update interval, this specifies how frequently updates to existing series will trigger
|
||||
# an update to the tag index, the default setting is once every 100 updates
|
||||
# TAG_UPDATE_INTERVAL = 100
|
||||
|
||||
# To configure special settings for the carbon-cache instance 'b', uncomment this:
|
||||
#[cache:b]
|
||||
#LINE_RECEIVER_PORT = 2103
|
||||
#PICKLE_RECEIVER_PORT = 2104
|
||||
#CACHE_QUERY_PORT = 7102
|
||||
# and any other settings you want to customize, defaults are inherited
|
||||
# from [carbon] section.
|
||||
# from the [cache] section.
|
||||
# You can then specify the --instance=b option to manage this instance
|
||||
|
||||
|
||||
#
|
||||
# In order to turn off logging of successful connections for the line
|
||||
# receiver, set this to False
|
||||
# LOG_LISTENER_CONN_SUCCESS = True
|
||||
|
||||
[relay]
|
||||
LINE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
|
|
@ -220,9 +328,6 @@ LINE_RECEIVER_PORT = 2013
|
|||
PICKLE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
PICKLE_RECEIVER_PORT = 2014
|
||||
|
||||
# Set to false to disable logging of successful connections
|
||||
LOG_LISTENER_CONNECTIONS = True
|
||||
|
||||
# Carbon-relay has several options for metric routing controlled by RELAY_METHOD
|
||||
#
|
||||
# Use relay-rules.conf to route metrics to destinations based on pattern rules
|
||||
|
|
@ -237,12 +342,24 @@ LOG_LISTENER_CONNECTIONS = True
|
|||
# instance.
|
||||
# Enable this for carbon-relays that send to a group of carbon-aggregators
|
||||
#RELAY_METHOD = aggregated-consistent-hashing
|
||||
#
|
||||
# You can also use fast-hashing and fast-aggregated-hashing which are in O(1)
|
||||
# and will always redirect the metrics to the same destination but do not try
|
||||
# to minimize rebalancing when the list of destinations is changing.
|
||||
RELAY_METHOD = rules
|
||||
|
||||
# If you use consistent-hashing you can add redundancy by replicating every
|
||||
# datapoint to more than one machine.
|
||||
REPLICATION_FACTOR = 1
|
||||
|
||||
# For REPLICATION_FACTOR >=2, set DIVERSE_REPLICAS to True to guarantee replicas
|
||||
# across distributed hosts. With this setting disabled, it's possible that replicas
|
||||
# may be sent to different caches on the same host. This has been the default
|
||||
# behavior since introduction of 'consistent-hashing' relay method.
|
||||
# Note that enabling this on an existing pre-0.9.14 cluster will require rebalancing
|
||||
# your metrics across the cluster nodes using a tool like Carbonate.
|
||||
#DIVERSE_REPLICAS = True
|
||||
|
||||
# This is a list of carbon daemons we will send any relayed or
|
||||
# generated metrics to. The default provided would send to a single
|
||||
# carbon-cache instance on the default port. However if you
|
||||
|
|
@ -261,20 +378,71 @@ REPLICATION_FACTOR = 1
|
|||
# must be defined in this list
|
||||
DESTINATIONS = 127.0.0.1:2004
|
||||
|
||||
# This defines the maximum "message size" between carbon daemons.
|
||||
# You shouldn't need to tune this unless you really know what you're doing.
|
||||
MAX_DATAPOINTS_PER_MESSAGE = 500
|
||||
# This define the protocol to use to contact the destination. It can be
|
||||
# set to one of "line", "pickle", "udp" and "protobuf". This list can be
|
||||
# extended with CarbonClientFactory plugins and defaults to "pickle".
|
||||
# DESTINATION_PROTOCOL = pickle
|
||||
|
||||
# When using consistent hashing it sometime makes sense to make
|
||||
# the ring dynamic when you don't want to loose points when a
|
||||
# single destination is down. Replication is an answer to that
|
||||
# but it can be quite expensive.
|
||||
# DYNAMIC_ROUTER = False
|
||||
|
||||
# Controls the number of connection attempts before marking a
|
||||
# destination as down. We usually do one connection attempt per
|
||||
# second.
|
||||
# DYNAMIC_ROUTER_MAX_RETRIES = 5
|
||||
|
||||
# This is the maximum number of datapoints that can be queued up
|
||||
# for a single destination. Once this limit is hit, we will
|
||||
# stop accepting new data if USE_FLOW_CONTROL is True, otherwise
|
||||
# we will drop any subsequently received datapoints.
|
||||
MAX_QUEUE_SIZE = 10000
|
||||
|
||||
# This defines the maximum "message size" between carbon daemons. If
|
||||
# your queue is large, setting this to a lower number will cause the
|
||||
# relay to forward smaller discrete chunks of stats, which may prevent
|
||||
# overloading on the receiving side after a disconnect.
|
||||
MAX_DATAPOINTS_PER_MESSAGE = 500
|
||||
|
||||
# Limit the number of open connections the receiver can handle as any time.
|
||||
# Default is no limit. Setting up a limit for sites handling high volume
|
||||
# traffic may be recommended to avoid running out of TCP memory or having
|
||||
# thousands of TCP connections reduce the throughput of the service.
|
||||
#MAX_RECEIVER_CONNECTIONS = inf
|
||||
|
||||
# Specify the user to drop privileges to
|
||||
# If this is blank carbon-relay runs as the user that invokes it
|
||||
# USER =
|
||||
|
||||
# This is the percentage that the queue must be empty before it will accept
|
||||
# more messages. For a larger site, if the queue is very large it makes sense
|
||||
# to tune this to allow for incoming stats. So if you have an average
|
||||
# flow of 100k stats/minute, and a MAX_QUEUE_SIZE of 3,000,000, it makes sense
|
||||
# to allow stats to start flowing when you've cleared the queue to 95% since
|
||||
# you should have space to accommodate the next minute's worth of stats
|
||||
# even before the relay incrementally clears more of the queue
|
||||
QUEUE_LOW_WATERMARK_PCT = 0.8
|
||||
|
||||
# To allow for batch efficiency from the pickle protocol and to benefit from
|
||||
# other batching advantages, all writes are deferred by putting them into a queue,
|
||||
# and then the queue is flushed and sent a small fraction of a second later.
|
||||
TIME_TO_DEFER_SENDING = 0.0001
|
||||
|
||||
# Set this to False to drop datapoints when any send queue (sending datapoints
|
||||
# to a downstream carbon daemon) hits MAX_QUEUE_SIZE. If this is True (the
|
||||
# default) then sockets over which metrics are received will temporarily stop accepting
|
||||
# data until the send queues fall below 80% MAX_QUEUE_SIZE.
|
||||
# data until the send queues fall below QUEUE_LOW_WATERMARK_PCT * MAX_QUEUE_SIZE.
|
||||
USE_FLOW_CONTROL = True
|
||||
|
||||
# If enabled this setting is used to timeout metric client connection if no
|
||||
# metrics have been sent in specified time in seconds
|
||||
#METRIC_CLIENT_IDLE_TIMEOUT = None
|
||||
|
||||
# Set this to True to enable whitelisting and blacklisting of metrics in
|
||||
# CONF_DIR/whitelist and CONF_DIR/blacklist. If the whitelist is missing or
|
||||
# empty, all metrics will pass through
|
||||
# CONF_DIR/whitelist.conf and CONF_DIR/blacklist.conf. If the whitelist is
|
||||
# missing or empty, all metrics will pass through
|
||||
# USE_WHITELIST = False
|
||||
|
||||
# By default, carbon itself will log statistics (such as a count,
|
||||
|
|
@ -282,7 +450,40 @@ USE_FLOW_CONTROL = True
|
|||
# seconds. Set CARBON_METRIC_INTERVAL to 0 to disable instrumentation
|
||||
# CARBON_METRIC_PREFIX = carbon
|
||||
# CARBON_METRIC_INTERVAL = 60
|
||||
#
|
||||
# In order to turn off logging of successful connections for the line
|
||||
# receiver, set this to False
|
||||
# LOG_LISTENER_CONN_SUCCESS = True
|
||||
|
||||
# If you're connecting from the relay to a destination that's over the
|
||||
# internet or similarly iffy connection, a backlog can develop because
|
||||
# of internet weather conditions, e.g. acks getting lost or similar issues.
|
||||
# To deal with that, you can enable USE_RATIO_RESET which will let you
|
||||
# re-set the connection to an individual destination. Defaults to being off.
|
||||
USE_RATIO_RESET=False
|
||||
|
||||
# When there is a small number of stats flowing, it's not desirable to
|
||||
# perform any actions based on percentages - it's just too "twitchy".
|
||||
MIN_RESET_STAT_FLOW=1000
|
||||
|
||||
# When the ratio of stats being sent in a reporting interval is far
|
||||
# enough from 1.0, we will disconnect the socket and reconnecto to
|
||||
# clear out queued stats. The default ratio of 0.9 indicates that 10%
|
||||
# of stats aren't being delivered within one CARBON_METRIC_INTERVAL
|
||||
# (default of 60 seconds), which can lead to a queue backup. Under
|
||||
# some circumstances re-setting the connection can fix this, so
|
||||
# set this according to your tolerance, and look in the logs for
|
||||
# "resetConnectionForQualityReasons" to observe whether this is kicking
|
||||
# in when your sent queue is building up.
|
||||
MIN_RESET_RATIO=0.9
|
||||
|
||||
# The minimum time between resets. When a connection is re-set, we
|
||||
# need to wait before another reset is performed.
|
||||
# (2*CARBON_METRIC_INTERVAL) + 1 second is the minimum time needed
|
||||
# before stats for the new connection will be available. Setting this
|
||||
# below (2*CARBON_METRIC_INTERVAL) + 1 second will result in a lot of
|
||||
# reset connections for no good reason.
|
||||
MIN_RESET_INTERVAL=121
|
||||
|
||||
[aggregator]
|
||||
LINE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
|
|
@ -291,14 +492,17 @@ LINE_RECEIVER_PORT = 2023
|
|||
PICKLE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
PICKLE_RECEIVER_PORT = 2024
|
||||
|
||||
# Set to false to disable logging of successful connections
|
||||
LOG_LISTENER_CONNECTIONS = True
|
||||
|
||||
# If set true, metric received will be forwarded to DESTINATIONS in addition to
|
||||
# the output of the aggregation rules. If set false the carbon-aggregator will
|
||||
# only ever send the output of aggregation.
|
||||
FORWARD_ALL = True
|
||||
|
||||
# Filenames of the configuration files to use for this instance of aggregator.
|
||||
# Filenames are relative to CONF_DIR.
|
||||
#
|
||||
# AGGREGATION_RULES = aggregation-rules.conf
|
||||
# REWRITE_RULES = rewrite-rules.conf
|
||||
|
||||
# This is a list of carbon daemons we will send any relayed or
|
||||
# generated metrics to. The default provided would send to a single
|
||||
# carbon-cache instance on the default port. However if you
|
||||
|
|
@ -330,6 +534,10 @@ MAX_QUEUE_SIZE = 10000
|
|||
# data until the send queues fall below 80% MAX_QUEUE_SIZE.
|
||||
USE_FLOW_CONTROL = True
|
||||
|
||||
# If enabled this setting is used to timeout metric client connection if no
|
||||
# metrics have been sent in specified time in seconds
|
||||
#METRIC_CLIENT_IDLE_TIMEOUT = None
|
||||
|
||||
# This defines the maximum "message size" between carbon daemons.
|
||||
# You shouldn't need to tune this unless you really know what you're doing.
|
||||
MAX_DATAPOINTS_PER_MESSAGE = 500
|
||||
|
|
@ -339,6 +547,12 @@ MAX_DATAPOINTS_PER_MESSAGE = 500
|
|||
# the past MAX_AGGREGATION_INTERVALS * intervalSize seconds.
|
||||
MAX_AGGREGATION_INTERVALS = 5
|
||||
|
||||
# Limit the number of open connections the receiver can handle as any time.
|
||||
# Default is no limit. Setting up a limit for sites handling high volume
|
||||
# traffic may be recommended to avoid running out of TCP memory or having
|
||||
# thousands of TCP connections reduce the throughput of the service.
|
||||
#MAX_RECEIVER_CONNECTIONS = inf
|
||||
|
||||
# By default (WRITE_BACK_FREQUENCY = 0), carbon-aggregator will write back
|
||||
# aggregated data points once every rule.frequency seconds, on a per-rule basis.
|
||||
# Set this (WRITE_BACK_FREQUENCY = N) to write back all aggregated data points
|
||||
|
|
@ -348,8 +562,8 @@ MAX_AGGREGATION_INTERVALS = 5
|
|||
# WRITE_BACK_FREQUENCY = 0
|
||||
|
||||
# Set this to True to enable whitelisting and blacklisting of metrics in
|
||||
# CONF_DIR/whitelist and CONF_DIR/blacklist. If the whitelist is missing or
|
||||
# empty, all metrics will pass through
|
||||
# CONF_DIR/whitelist.conf and CONF_DIR/blacklist.conf. If the whitelist is
|
||||
# missing or empty, all metrics will pass through
|
||||
# USE_WHITELIST = False
|
||||
|
||||
# By default, carbon itself will log statistics (such as a count,
|
||||
|
|
@ -357,3 +571,24 @@ MAX_AGGREGATION_INTERVALS = 5
|
|||
# seconds. Set CARBON_METRIC_INTERVAL to 0 to disable instrumentation
|
||||
# CARBON_METRIC_PREFIX = carbon
|
||||
# CARBON_METRIC_INTERVAL = 60
|
||||
|
||||
# In order to turn off logging of successful connections for the line
|
||||
# receiver, set this to False
|
||||
# LOG_LISTENER_CONN_SUCCESS = True
|
||||
|
||||
# In order to turn off logging of metrics with no corresponding
|
||||
# aggregation rules receiver, set this to False
|
||||
# LOG_AGGREGATOR_MISSES = False
|
||||
|
||||
# Specify the user to drop privileges to
|
||||
# If this is blank carbon-aggregator runs as the user that invokes it
|
||||
# USER =
|
||||
|
||||
# Part of the code, and particularly aggregator rules, need
|
||||
# to cache metric names. To avoid leaking too much memory you
|
||||
# can tweak the size of this cache. The default allow for 1M
|
||||
# different metrics per rule (~200MiB).
|
||||
# CACHE_METRIC_NAMES_MAX=1000000
|
||||
|
||||
# You can optionally set a ttl to this cache.
|
||||
# CACHE_METRIC_NAMES_TTL=600
|
||||
|
|
|
|||
|
|
@ -40,4 +40,3 @@ aggregationMethod = sum
|
|||
pattern = .*
|
||||
xFilesFactor = 0.3
|
||||
aggregationMethod = average
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,23 @@
|
|||
# Schema definitions for Whisper files. Entries are scanned in order,
|
||||
# and first match wins. This file is scanned for changes every 60 seconds.
|
||||
#
|
||||
# Definition Syntax:
|
||||
#
|
||||
# [name]
|
||||
# pattern = regex
|
||||
# retentions = timePerPoint:timeToStore, timePerPoint:timeToStore, ...
|
||||
#
|
||||
# Remember: To support accurate aggregation from higher to lower resolution
|
||||
# archives, the precision of a longer retention archive must be
|
||||
# cleanly divisible by precision of next lower retention archive.
|
||||
#
|
||||
# Valid: 60s:7d,300s:30d (300/60 = 5)
|
||||
# Invalid: 180s:7d,300s:30d (300/180 = 3.333)
|
||||
#
|
||||
|
||||
# Carbon's internal metrics. This entry should match what is specified in
|
||||
# CARBON_METRIC_PREFIX and CARBON_METRIC_INTERVAL settings
|
||||
|
||||
[carbon]
|
||||
pattern = ^carbon\..*
|
||||
retentions = 1m:31d,10m:1y,1h:5y
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/bash
|
||||
PYTHONPATH=/opt/graphite/webapp django-admin.py syncdb --settings=graphite.settings
|
||||
PYTHONPATH=/opt/graphite/webapp django-admin.py update_users --settings=graphite.settings
|
||||
# PYTHONPATH=/opt/graphite/webapp django-admin.py update_users --settings=graphite.settings
|
||||
|
|
@ -1,5 +1,8 @@
|
|||
graphite:
|
||||
build: blocks/graphite1
|
||||
build:
|
||||
context: blocks/graphite1
|
||||
args:
|
||||
version: master
|
||||
ports:
|
||||
- "8080:80"
|
||||
- "2003:2003"
|
||||
|
|
|
|||
|
|
@ -1,76 +0,0 @@
|
|||
[cache]
|
||||
LOCAL_DATA_DIR = /opt/graphite/storage/whisper/
|
||||
|
||||
# Specify the user to drop privileges to
|
||||
# If this is blank carbon runs as the user that invokes it
|
||||
# This user must have write access to the local data directory
|
||||
USER =
|
||||
|
||||
# Limit the size of the cache to avoid swapping or becoming CPU bound.
|
||||
# Sorts and serving cache queries gets more expensive as the cache grows.
|
||||
# Use the value "inf" (infinity) for an unlimited cache size.
|
||||
MAX_CACHE_SIZE = inf
|
||||
|
||||
# Limits the number of whisper update_many() calls per second, which effectively
|
||||
# means the number of write requests sent to the disk. This is intended to
|
||||
# prevent over-utilizing the disk and thus starving the rest of the system.
|
||||
# When the rate of required updates exceeds this, then carbon's caching will
|
||||
# take effect and increase the overall throughput accordingly.
|
||||
MAX_UPDATES_PER_SECOND = 1000
|
||||
|
||||
# Softly limits the number of whisper files that get created each minute.
|
||||
# Setting this value low (like at 50) is a good way to ensure your graphite
|
||||
# system will not be adversely impacted when a bunch of new metrics are
|
||||
# sent to it. The trade off is that it will take much longer for those metrics'
|
||||
# database files to all get created and thus longer until the data becomes usable.
|
||||
# Setting this value high (like "inf" for infinity) will cause graphite to create
|
||||
# the files quickly but at the risk of slowing I/O down considerably for a while.
|
||||
MAX_CREATES_PER_MINUTE = inf
|
||||
|
||||
LINE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
LINE_RECEIVER_PORT = 2003
|
||||
|
||||
PICKLE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
PICKLE_RECEIVER_PORT = 2004
|
||||
|
||||
CACHE_QUERY_INTERFACE = 0.0.0.0
|
||||
CACHE_QUERY_PORT = 7002
|
||||
|
||||
LOG_UPDATES = False
|
||||
|
||||
# Enable AMQP if you want to receve metrics using an amqp broker
|
||||
# ENABLE_AMQP = False
|
||||
|
||||
# Verbose means a line will be logged for every metric received
|
||||
# useful for testing
|
||||
# AMQP_VERBOSE = False
|
||||
|
||||
# AMQP_HOST = localhost
|
||||
# AMQP_PORT = 5672
|
||||
# AMQP_VHOST = /
|
||||
# AMQP_USER = guest
|
||||
# AMQP_PASSWORD = guest
|
||||
# AMQP_EXCHANGE = graphite
|
||||
|
||||
# Patterns for all of the metrics this machine will store. Read more at
|
||||
# http://en.wikipedia.org/wiki/Advanced_Message_Queuing_Protocol#Bindings
|
||||
#
|
||||
# Example: store all sales, linux servers, and utilization metrics
|
||||
# BIND_PATTERNS = sales.#, servers.linux.#, #.utilization
|
||||
#
|
||||
# Example: store everything
|
||||
# BIND_PATTERNS = #
|
||||
|
||||
# NOTE: you cannot run both a cache and a relay on the same server
|
||||
# with the default configuration, you have to specify a distinict
|
||||
# interfaces and ports for the listeners.
|
||||
|
||||
[relay]
|
||||
LINE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
LINE_RECEIVER_PORT = 2003
|
||||
|
||||
PICKLE_RECEIVER_INTERFACE = 0.0.0.0
|
||||
PICKLE_RECEIVER_PORT = 2004
|
||||
|
||||
CACHE_SERVERS = server1, server2, server3
|
||||
MAX_QUEUE_SIZE = 10000
|
||||
|
|
@ -1,102 +0,0 @@
|
|||
import datetime
|
||||
import time
|
||||
|
||||
from django.utils.timezone import get_current_timezone
|
||||
from django.core.urlresolvers import get_script_prefix
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import render_to_response, get_object_or_404
|
||||
from pytz import timezone
|
||||
|
||||
from graphite.util import json
|
||||
from graphite.events import models
|
||||
from graphite.render.attime import parseATTime
|
||||
|
||||
|
||||
def to_timestamp(dt):
|
||||
return time.mktime(dt.timetuple())
|
||||
|
||||
|
||||
class EventEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
return to_timestamp(obj)
|
||||
return json.JSONEncoder.default(self, obj)
|
||||
|
||||
|
||||
def view_events(request):
|
||||
if request.method == "GET":
|
||||
context = { 'events' : fetch(request),
|
||||
'slash' : get_script_prefix()
|
||||
}
|
||||
return render_to_response("events.html", context)
|
||||
else:
|
||||
return post_event(request)
|
||||
|
||||
def detail(request, event_id):
|
||||
e = get_object_or_404(models.Event, pk=event_id)
|
||||
context = { 'event' : e,
|
||||
'slash' : get_script_prefix()
|
||||
}
|
||||
return render_to_response("event.html", context)
|
||||
|
||||
|
||||
def post_event(request):
|
||||
if request.method == 'POST':
|
||||
event = json.loads(request.body)
|
||||
assert isinstance(event, dict)
|
||||
|
||||
values = {}
|
||||
values["what"] = event["what"]
|
||||
values["tags"] = event.get("tags", None)
|
||||
values["when"] = datetime.datetime.fromtimestamp(
|
||||
event.get("when", time.time()))
|
||||
if "data" in event:
|
||||
values["data"] = event["data"]
|
||||
|
||||
e = models.Event(**values)
|
||||
e.save()
|
||||
|
||||
return HttpResponse(status=200)
|
||||
else:
|
||||
return HttpResponse(status=405)
|
||||
|
||||
def get_data(request):
|
||||
if 'jsonp' in request.REQUEST:
|
||||
response = HttpResponse(
|
||||
"%s(%s)" % (request.REQUEST.get('jsonp'),
|
||||
json.dumps(fetch(request), cls=EventEncoder)),
|
||||
mimetype='text/javascript')
|
||||
else:
|
||||
response = HttpResponse(
|
||||
json.dumps(fetch(request), cls=EventEncoder),
|
||||
mimetype="application/json")
|
||||
return response
|
||||
|
||||
def fetch(request):
|
||||
#XXX we need to move to USE_TZ=True to get rid of naive-time conversions
|
||||
def make_naive(dt):
|
||||
if 'tz' in request.GET:
|
||||
tz = timezone(request.GET['tz'])
|
||||
else:
|
||||
tz = get_current_timezone()
|
||||
local_dt = dt.astimezone(tz)
|
||||
if hasattr(local_dt, 'normalize'):
|
||||
local_dt = local_dt.normalize()
|
||||
return local_dt.replace(tzinfo=None)
|
||||
|
||||
if request.GET.get("from", None) is not None:
|
||||
time_from = make_naive(parseATTime(request.GET["from"]))
|
||||
else:
|
||||
time_from = datetime.datetime.fromtimestamp(0)
|
||||
|
||||
if request.GET.get("until", None) is not None:
|
||||
time_until = make_naive(parseATTime(request.GET["until"]))
|
||||
else:
|
||||
time_until = datetime.datetime.now()
|
||||
|
||||
tags = request.GET.get("tags", None)
|
||||
if tags is not None:
|
||||
tags = request.GET.get("tags").split(" ")
|
||||
|
||||
return [x.as_dict() for x in
|
||||
models.Event.find_events(time_from, time_until, tags=tags)]
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
[
|
||||
{
|
||||
"pk": 1,
|
||||
"model": "auth.user",
|
||||
"fields": {
|
||||
"username": "admin",
|
||||
"first_name": "",
|
||||
"last_name": "",
|
||||
"is_active": true,
|
||||
"is_superuser": true,
|
||||
"is_staff": true,
|
||||
"last_login": "2011-09-20 17:02:14",
|
||||
"groups": [],
|
||||
"user_permissions": [],
|
||||
"password": "sha1$1b11b$edeb0a67a9622f1f2cfeabf9188a711f5ac7d236",
|
||||
"email": "root@example.com",
|
||||
"date_joined": "2011-09-20 17:02:14"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
# Edit this file to override the default graphite settings, do not edit settings.py
|
||||
|
||||
# Turn on debugging and restart apache if you ever see an "Internal Server Error" page
|
||||
#DEBUG = True
|
||||
|
||||
# Set your local timezone (django will try to figure this out automatically)
|
||||
TIME_ZONE = 'UTC'
|
||||
|
||||
# Setting MEMCACHE_HOSTS to be empty will turn off use of memcached entirely
|
||||
#MEMCACHE_HOSTS = ['127.0.0.1:11211']
|
||||
|
||||
# Sometimes you need to do a lot of rendering work but cannot share your storage mount
|
||||
#REMOTE_RENDERING = True
|
||||
#RENDERING_HOSTS = ['fastserver01','fastserver02']
|
||||
#LOG_RENDERING_PERFORMANCE = True
|
||||
#LOG_CACHE_PERFORMANCE = True
|
||||
|
||||
# If you've got more than one backend server they should all be listed here
|
||||
#CLUSTER_SERVERS = []
|
||||
|
||||
# Override this if you need to provide documentation specific to your graphite deployment
|
||||
#DOCUMENTATION_URL = "http://wiki.mycompany.com/graphite"
|
||||
|
||||
# Enable email-related features
|
||||
#SMTP_SERVER = "mail.mycompany.com"
|
||||
|
||||
# LDAP / ActiveDirectory authentication setup
|
||||
#USE_LDAP_AUTH = True
|
||||
#LDAP_SERVER = "ldap.mycompany.com"
|
||||
#LDAP_PORT = 389
|
||||
#LDAP_SEARCH_BASE = "OU=users,DC=mycompany,DC=com"
|
||||
#LDAP_BASE_USER = "CN=some_readonly_account,DC=mycompany,DC=com"
|
||||
#LDAP_BASE_PASS = "readonly_account_password"
|
||||
#LDAP_USER_QUERY = "(username=%s)" #For Active Directory use "(sAMAccountName=%s)"
|
||||
|
||||
# If sqlite won't cut it, configure your real database here (don't forget to run manage.py syncdb!)
|
||||
#DATABASE_ENGINE = 'mysql' # or 'postgres'
|
||||
#DATABASE_NAME = 'graphite'
|
||||
#DATABASE_USER = 'graphite'
|
||||
#DATABASE_PASSWORD = 'graphite-is-awesome'
|
||||
#DATABASE_HOST = 'mysql.mycompany.com'
|
||||
#DATABASE_PORT = '3306'
|
||||
|
|
@ -1 +0,0 @@
|
|||
grafana:$apr1$4R/20xhC$8t37jPP5dbcLr48btdkU//
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
daemon off;
|
||||
user www-data;
|
||||
worker_processes 1;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
server_tokens off;
|
||||
|
||||
server_names_hash_bucket_size 32;
|
||||
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
access_log /var/log/nginx/access.log;
|
||||
error_log /var/log/nginx/error.log;
|
||||
|
||||
gzip on;
|
||||
gzip_disable "msie6";
|
||||
|
||||
server {
|
||||
listen 80 default_server;
|
||||
server_name _;
|
||||
|
||||
open_log_file_cache max=1000 inactive=20s min_uses=2 valid=1m;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Server $host;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
client_max_body_size 10m;
|
||||
client_body_buffer_size 128k;
|
||||
|
||||
proxy_connect_timeout 90;
|
||||
proxy_send_timeout 90;
|
||||
proxy_read_timeout 90;
|
||||
|
||||
proxy_buffer_size 4k;
|
||||
proxy_buffers 4 32k;
|
||||
proxy_busy_buffers_size 64k;
|
||||
proxy_temp_file_write_size 64k;
|
||||
}
|
||||
|
||||
add_header Access-Control-Allow-Origin "*";
|
||||
add_header Access-Control-Allow-Methods "GET, OPTIONS";
|
||||
add_header Access-Control-Allow-Headers "origin, authorization, accept";
|
||||
|
||||
location /content {
|
||||
alias /opt/graphite/webapp/content;
|
||||
|
||||
}
|
||||
|
||||
location /media {
|
||||
alias /usr/share/pyshared/django/contrib/admin/media;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
graphitePort: 2003,
|
||||
graphiteHost: "127.0.0.1",
|
||||
port: 8125,
|
||||
mgmt_port: 8126,
|
||||
backends: ['./backends/graphite'],
|
||||
debug: true
|
||||
}
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
[min]
|
||||
pattern = \.min$
|
||||
xFilesFactor = 0.1
|
||||
aggregationMethod = min
|
||||
|
||||
[max]
|
||||
pattern = \.max$
|
||||
xFilesFactor = 0.1
|
||||
aggregationMethod = max
|
||||
|
||||
[sum]
|
||||
pattern = \.count$
|
||||
xFilesFactor = 0
|
||||
aggregationMethod = sum
|
||||
|
||||
[default_average]
|
||||
pattern = .*
|
||||
xFilesFactor = 0.5
|
||||
aggregationMethod = average
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
[carbon]
|
||||
pattern = ^carbon\..*
|
||||
retentions = 1m:31d,10m:1y,1h:5y
|
||||
|
||||
[highres]
|
||||
pattern = ^highres.*
|
||||
retentions = 1s:1d,1m:7d
|
||||
|
||||
[statsd]
|
||||
pattern = ^statsd.*
|
||||
retentions = 1m:7d,10m:1y
|
||||
|
||||
[default]
|
||||
pattern = .*
|
||||
retentions = 10s:1d,1m:7d,10m:1y
|
||||
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
[supervisord]
|
||||
nodaemon = true
|
||||
environment = GRAPHITE_STORAGE_DIR='/opt/graphite/storage',GRAPHITE_CONF_DIR='/opt/graphite/conf'
|
||||
|
||||
[program:nginx]
|
||||
command = /usr/sbin/nginx
|
||||
stdout_logfile = /var/log/supervisor/%(program_name)s.log
|
||||
stderr_logfile = /var/log/supervisor/%(program_name)s.log
|
||||
autorestart = true
|
||||
|
||||
[program:carbon-cache]
|
||||
;user = www-data
|
||||
command = /opt/graphite/bin/carbon-cache.py --debug start
|
||||
stdout_logfile = /var/log/supervisor/%(program_name)s.log
|
||||
stderr_logfile = /var/log/supervisor/%(program_name)s.log
|
||||
autorestart = true
|
||||
|
||||
[program:graphite-webapp]
|
||||
;user = www-data
|
||||
directory = /opt/graphite/webapp
|
||||
environment = PYTHONPATH='/opt/graphite/webapp'
|
||||
command = /usr/bin/gunicorn_django -b127.0.0.1:8000 -w2 graphite/settings.py
|
||||
stdout_logfile = /var/log/supervisor/%(program_name)s.log
|
||||
stderr_logfile = /var/log/supervisor/%(program_name)s.log
|
||||
autorestart = true
|
||||
|
||||
|
|
@ -1,5 +1,3 @@
|
|||
///<reference path="../../../headers/common.d.ts" />
|
||||
|
||||
import _ from 'lodash';
|
||||
import $ from 'jquery';
|
||||
import coreModule from '../../core_module';
|
||||
|
|
@ -201,9 +199,9 @@ export class FormDropdownCtrl {
|
|||
}
|
||||
|
||||
open() {
|
||||
this.inputElement.show();
|
||||
|
||||
this.inputElement.css('width', (Math.max(this.linkElement.width(), 80) + 16) + 'px');
|
||||
|
||||
this.inputElement.show();
|
||||
this.inputElement.focus();
|
||||
|
||||
this.linkElement.hide();
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@ function (_, $, coreModule) {
|
|||
segment.html = selected.html || selected.value;
|
||||
segment.fake = false;
|
||||
segment.expandable = selected.expandable;
|
||||
segment.type = selected.type;
|
||||
}
|
||||
else if (segment.custom !== 'false') {
|
||||
segment.value = value;
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ function (angular, _, coreModule) {
|
|||
};
|
||||
|
||||
this.newPlusButton = function() {
|
||||
return new MetricSegment({fake: true, html: '<i class="fa fa-plus "></i>', type: 'plus-button' });
|
||||
return new MetricSegment({fake: true, html: '<i class="fa fa-plus "></i>', type: 'plus-button', cssClass: 'query-part' });
|
||||
};
|
||||
|
||||
this.newSelectTagValue = function() {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
import _ from 'lodash';
|
||||
|
||||
const versionPattern = /^(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:-([0-9A-Za-z\.]+))?/;
|
||||
|
||||
export class SemVersion {
|
||||
major: number;
|
||||
minor: number;
|
||||
patch: number;
|
||||
meta: string;
|
||||
|
||||
constructor(version: string) {
|
||||
let match = versionPattern.exec(version);
|
||||
if (match) {
|
||||
this.major = Number(match[1]);
|
||||
this.minor = Number(match[2] || 0);
|
||||
this.patch = Number(match[3] || 0);
|
||||
this.meta = match[4];
|
||||
}
|
||||
}
|
||||
|
||||
isGtOrEq(version: string): boolean {
|
||||
let compared = new SemVersion(version);
|
||||
return !(this.major < compared.major || this.minor < compared.minor || this.patch < compared.patch);
|
||||
}
|
||||
|
||||
isValid(): boolean {
|
||||
return _.isNumber(this.major);
|
||||
}
|
||||
}
|
||||
|
||||
export function isVersionGtOrEq(a: string, b: string): boolean {
|
||||
let a_semver = new SemVersion(a);
|
||||
return a_semver.isGtOrEq(b);
|
||||
}
|
||||
|
|
@ -2,12 +2,26 @@
|
|||
|
||||
export class GraphiteConfigCtrl {
|
||||
static templateUrl = 'public/app/plugins/datasource/graphite/partials/config.html';
|
||||
datasourceSrv: any;
|
||||
current: any;
|
||||
|
||||
/** @ngInject */
|
||||
constructor($scope) {
|
||||
constructor($scope, datasourceSrv) {
|
||||
this.datasourceSrv = datasourceSrv;
|
||||
this.current.jsonData = this.current.jsonData || {};
|
||||
this.current.jsonData.graphiteVersion = this.current.jsonData.graphiteVersion || '0.9';
|
||||
|
||||
this.autoDetectGraphiteVersion();
|
||||
}
|
||||
|
||||
autoDetectGraphiteVersion() {
|
||||
this.datasourceSrv.loadDatasource(this.current.name)
|
||||
.then((ds) => {
|
||||
return ds.getVersion();
|
||||
}).then((version) => {
|
||||
this.graphiteVersions.push({name: version, value: version});
|
||||
this.current.jsonData.graphiteVersion = version;
|
||||
});
|
||||
}
|
||||
|
||||
graphiteVersions = [
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import _ from 'lodash';
|
||||
import * as dateMath from 'app/core/utils/datemath';
|
||||
import {isVersionGtOrEq, SemVersion} from 'app/core/utils/version';
|
||||
|
||||
/** @ngInject */
|
||||
export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv) {
|
||||
|
|
@ -9,6 +10,7 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
|
|||
this.url = instanceSettings.url;
|
||||
this.name = instanceSettings.name;
|
||||
this.graphiteVersion = instanceSettings.jsonData.graphiteVersion || '0.9';
|
||||
this.supportsTags = supportsTags(this.graphiteVersion);
|
||||
this.cacheTimeout = instanceSettings.cacheTimeout;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
this.render_method = instanceSettings.render_method || 'POST';
|
||||
|
|
@ -217,6 +219,126 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
|
|||
});
|
||||
};
|
||||
|
||||
this.getTags = function(optionalOptions) {
|
||||
let options = optionalOptions || {};
|
||||
|
||||
let httpOptions: any = {
|
||||
method: 'GET',
|
||||
url: '/tags',
|
||||
// for cancellations
|
||||
requestId: options.requestId,
|
||||
};
|
||||
|
||||
if (options && options.range) {
|
||||
httpOptions.params.from = this.translateTime(options.range.from, false);
|
||||
httpOptions.params.until = this.translateTime(options.range.to, true);
|
||||
}
|
||||
|
||||
return this.doGraphiteRequest(httpOptions).then(results => {
|
||||
return _.map(results.data, tag => {
|
||||
return {
|
||||
text: tag.tag,
|
||||
id: tag.id
|
||||
};
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
this.getTagValues = function(tag, optionalOptions) {
|
||||
let options = optionalOptions || {};
|
||||
|
||||
let httpOptions: any = {
|
||||
method: 'GET',
|
||||
url: '/tags/' + tag,
|
||||
// for cancellations
|
||||
requestId: options.requestId,
|
||||
};
|
||||
|
||||
if (options && options.range) {
|
||||
httpOptions.params.from = this.translateTime(options.range.from, false);
|
||||
httpOptions.params.until = this.translateTime(options.range.to, true);
|
||||
}
|
||||
|
||||
return this.doGraphiteRequest(httpOptions).then(results => {
|
||||
if (results.data && results.data.values) {
|
||||
return _.map(results.data.values, value => {
|
||||
return {
|
||||
text: value.value,
|
||||
id: value.id
|
||||
};
|
||||
});
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
this.getTagsAutoComplete = (expression, tagPrefix) => {
|
||||
let httpOptions: any = {
|
||||
method: 'GET',
|
||||
url: '/tags/autoComplete/tags',
|
||||
params: {
|
||||
expr: expression
|
||||
}
|
||||
};
|
||||
|
||||
if (tagPrefix) {
|
||||
httpOptions.params.tagPrefix = tagPrefix;
|
||||
}
|
||||
|
||||
return this.doGraphiteRequest(httpOptions).then(results => {
|
||||
if (results.data) {
|
||||
return _.map(results.data, (tag) => {
|
||||
return { text: tag };
|
||||
});
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
this.getTagValuesAutoComplete = (expression, tag, valuePrefix) => {
|
||||
let httpOptions: any = {
|
||||
method: 'GET',
|
||||
url: '/tags/autoComplete/values',
|
||||
params: {
|
||||
expr: expression,
|
||||
tag: tag
|
||||
}
|
||||
};
|
||||
|
||||
if (valuePrefix) {
|
||||
httpOptions.params.valuePrefix = valuePrefix;
|
||||
}
|
||||
|
||||
return this.doGraphiteRequest(httpOptions).then(results => {
|
||||
if (results.data) {
|
||||
return _.map(results.data, (value) => {
|
||||
return { text: value };
|
||||
});
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
this.getVersion = function() {
|
||||
let httpOptions = {
|
||||
method: 'GET',
|
||||
url: '/version/_', // Prevent last / trimming
|
||||
};
|
||||
|
||||
return this.doGraphiteRequest(httpOptions).then(results => {
|
||||
if (results.data) {
|
||||
let semver = new SemVersion(results.data);
|
||||
return semver.isValid() ? results.data : '';
|
||||
}
|
||||
return '';
|
||||
}).catch(() => {
|
||||
return '';
|
||||
});
|
||||
};
|
||||
|
||||
this.testDatasource = function() {
|
||||
return this.metricFindQuery('*').then(function () {
|
||||
return { status: "success", message: "Data source is working"};
|
||||
|
|
@ -303,3 +425,7 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
|
|||
return clean_options;
|
||||
};
|
||||
}
|
||||
|
||||
function supportsTags(version: string): boolean {
|
||||
return isVersionGtOrEq(version, '1.1');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,981 @@
|
|||
define([
|
||||
'lodash',
|
||||
'jquery',
|
||||
'app/core/utils/version'
|
||||
],
|
||||
function (_, $, version) {
|
||||
'use strict';
|
||||
|
||||
var index = [];
|
||||
var categories = {
|
||||
Combine: [],
|
||||
Transform: [],
|
||||
Calculate: [],
|
||||
Filter: [],
|
||||
Special: []
|
||||
};
|
||||
|
||||
function addFuncDef(funcDef) {
|
||||
funcDef.params = funcDef.params || [];
|
||||
funcDef.defaultParams = funcDef.defaultParams || [];
|
||||
|
||||
if (funcDef.category) {
|
||||
funcDef.category.push(funcDef);
|
||||
}
|
||||
index[funcDef.name] = funcDef;
|
||||
index[funcDef.shortName || funcDef.name] = funcDef;
|
||||
}
|
||||
|
||||
var optionalSeriesRefArgs = [
|
||||
{ name: 'other', type: 'value_or_series', optional: true },
|
||||
{ name: 'other', type: 'value_or_series', optional: true },
|
||||
{ name: 'other', type: 'value_or_series', optional: true },
|
||||
{ name: 'other', type: 'value_or_series', optional: true },
|
||||
{ name: 'other', type: 'value_or_series', optional: true }
|
||||
];
|
||||
|
||||
addFuncDef({
|
||||
name: 'scaleToSeconds',
|
||||
category: categories.Transform,
|
||||
params: [{ name: 'seconds', type: 'int' }],
|
||||
defaultParams: [1],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'perSecond',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "max value", type: "int", optional: true }],
|
||||
defaultParams: [],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "holtWintersForecast",
|
||||
category: categories.Calculate,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "holtWintersConfidenceBands",
|
||||
category: categories.Calculate,
|
||||
params: [{ name: "delta", type: 'int' }],
|
||||
defaultParams: [3]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "holtWintersAberration",
|
||||
category: categories.Calculate,
|
||||
params: [{ name: "delta", type: 'int' }],
|
||||
defaultParams: [3]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "nPercentile",
|
||||
category: categories.Calculate,
|
||||
params: [{ name: "Nth percentile", type: 'int' }],
|
||||
defaultParams: [95]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'diffSeries',
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: ['#A'],
|
||||
category: categories.Calculate,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'stddevSeries',
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: [''],
|
||||
category: categories.Calculate,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'divideSeries',
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: ['#A'],
|
||||
category: categories.Calculate,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'multiplySeries',
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: ['#A'],
|
||||
category: categories.Calculate,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'asPercent',
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: ['#A'],
|
||||
category: categories.Calculate,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'group',
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: ['#A', '#B'],
|
||||
category: categories.Combine,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sumSeries',
|
||||
shortName: 'sum',
|
||||
category: categories.Combine,
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: [''],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'averageSeries',
|
||||
shortName: 'avg',
|
||||
category: categories.Combine,
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: [''],
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'rangeOfSeries',
|
||||
category: categories.Combine
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'percentileOfSeries',
|
||||
category: categories.Combine,
|
||||
params: [{ name: 'n', type: 'int' }, { name: 'interpolate', type: 'boolean', options: ['true', 'false'] }],
|
||||
defaultParams: [95, 'false']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sumSeriesWithWildcards',
|
||||
category: categories.Combine,
|
||||
params: [
|
||||
{ name: "node", type: "int" },
|
||||
{ name: "node", type: "int", optional: true },
|
||||
{ name: "node", type: "int", optional: true },
|
||||
{ name: "node", type: "int", optional: true }
|
||||
],
|
||||
defaultParams: [3]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'maxSeries',
|
||||
shortName: 'max',
|
||||
category: categories.Combine,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'minSeries',
|
||||
shortName: 'min',
|
||||
category: categories.Combine,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'averageSeriesWithWildcards',
|
||||
category: categories.Combine,
|
||||
params: [
|
||||
{ name: "node", type: "int" },
|
||||
{ name: "node", type: "int", optional: true },
|
||||
],
|
||||
defaultParams: [3]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "alias",
|
||||
category: categories.Special,
|
||||
params: [{ name: "alias", type: 'string' }],
|
||||
defaultParams: ['alias']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "aliasSub",
|
||||
category: categories.Special,
|
||||
params: [{ name: "search", type: 'string' }, { name: "replace", type: 'string' }],
|
||||
defaultParams: ['', '\\1']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "stacked",
|
||||
category: categories.Special,
|
||||
params: [{ name: "stack", type: 'string' }],
|
||||
defaultParams: ['stacked']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "consolidateBy",
|
||||
category: categories.Special,
|
||||
params: [
|
||||
{
|
||||
name: 'function',
|
||||
type: 'string',
|
||||
options: ['sum', 'average', 'min', 'max']
|
||||
}
|
||||
],
|
||||
defaultParams: ['max']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "cumulative",
|
||||
category: categories.Special,
|
||||
params: [],
|
||||
defaultParams: []
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "groupByNode",
|
||||
category: categories.Special,
|
||||
params: [
|
||||
{
|
||||
name: "node",
|
||||
type: "int",
|
||||
options: [0,1,2,3,4,5,6,7,8,9,10,12]
|
||||
},
|
||||
{
|
||||
name: "function",
|
||||
type: "string",
|
||||
options: ['sum', 'avg', 'maxSeries']
|
||||
}
|
||||
],
|
||||
defaultParams: [3, "sum"]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'aliasByNode',
|
||||
category: categories.Special,
|
||||
params: [
|
||||
{ name: "node", type: "int", options: [0,1,2,3,4,5,6,7,8,9,10,12] },
|
||||
{ name: "node", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
{ name: "node", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
{ name: "node", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
],
|
||||
defaultParams: [3]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'substr',
|
||||
category: categories.Special,
|
||||
params: [
|
||||
{ name: "start", type: "int", options: [-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10,12] },
|
||||
{ name: "stop", type: "int", options: [-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10,12] },
|
||||
],
|
||||
defaultParams: [0, 0]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sortByName',
|
||||
category: categories.Special,
|
||||
params: [{ name: 'natural', type: 'boolean', options: ['true', 'false'], optional: true }],
|
||||
defaultParams: ['false']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sortByMaxima',
|
||||
category: categories.Special
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sortByMinima',
|
||||
category: categories.Special
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'sortByTotal',
|
||||
category: categories.Special
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'aliasByMetric',
|
||||
category: categories.Special,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'randomWalk',
|
||||
fake: true,
|
||||
category: categories.Special,
|
||||
params: [{ name: "name", type: "string", }],
|
||||
defaultParams: ['randomWalk']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'countSeries',
|
||||
category: categories.Special
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'constantLine',
|
||||
category: categories.Special,
|
||||
params: [{ name: "value", type: "int", }],
|
||||
defaultParams: [10]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'cactiStyle',
|
||||
category: categories.Special,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'keepLastValue',
|
||||
category: categories.Special,
|
||||
params: [{ name: "n", type: "int", }],
|
||||
defaultParams: [100]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "changed",
|
||||
category: categories.Special,
|
||||
params: [],
|
||||
defaultParams: []
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'scale',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "factor", type: "int", }],
|
||||
defaultParams: [1]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'offset',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "amount", type: "int", }],
|
||||
defaultParams: [10]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'transformNull',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "amount", type: "int", }],
|
||||
defaultParams: [0]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'integral',
|
||||
category: categories.Transform,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'derivative',
|
||||
category: categories.Transform,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'nonNegativeDerivative',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "max value or 0", type: "int", optional: true }],
|
||||
defaultParams: ['']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'timeShift',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "amount", type: "select", options: ['1h', '6h', '12h', '1d', '2d', '7d', '14d', '30d'] }],
|
||||
defaultParams: ['1d']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'timeStack',
|
||||
category: categories.Transform,
|
||||
params: [
|
||||
{ name: "timeShiftUnit", type: "select", options: ['1h', '6h', '12h', '1d', '2d', '7d', '14d', '30d'] },
|
||||
{ name: "timeShiftStart", type: "int" },
|
||||
{ name: "timeShiftEnd", type: "int" }
|
||||
],
|
||||
defaultParams: ['1d', 0, 7]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'summarize',
|
||||
category: categories.Transform,
|
||||
params: [
|
||||
{ name: "interval", type: "string" },
|
||||
{ name: "func", type: "select", options: ['sum', 'avg', 'min', 'max', 'last'] },
|
||||
{ name: "alignToFrom", type: "boolean", optional: true, options: ['false', 'true'] },
|
||||
],
|
||||
defaultParams: ['1h', 'sum', 'false']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'smartSummarize',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "interval", type: "string" }, { name: "func", type: "select", options: ['sum', 'avg', 'min', 'max', 'last'] }],
|
||||
defaultParams: ['1h', 'sum']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'absolute',
|
||||
category: categories.Transform,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'hitcount',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "interval", type: "string" }],
|
||||
defaultParams: ['10s']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'log',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "base", type: "int" }],
|
||||
defaultParams: ['10']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'averageAbove',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int", }],
|
||||
defaultParams: [25]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'averageBelow',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int", }],
|
||||
defaultParams: [25]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'currentAbove',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int", }],
|
||||
defaultParams: [25]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'currentBelow',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int", }],
|
||||
defaultParams: [25]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'maximumAbove',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "value", type: "int" }],
|
||||
defaultParams: [0]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'maximumBelow',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "value", type: "int" }],
|
||||
defaultParams: [0]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'minimumAbove',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "value", type: "int" }],
|
||||
defaultParams: [0]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'minimumBelow',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "value", type: "int" }],
|
||||
defaultParams: [0]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'limit',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'mostDeviant',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int" }],
|
||||
defaultParams: [10]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "exclude",
|
||||
category: categories.Filter,
|
||||
params: [{ name: "exclude", type: 'string' }],
|
||||
defaultParams: ['exclude']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'highestCurrent',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "count", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'highestMax',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "count", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'lowestCurrent',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "count", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingAverage',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "windowSize", type: "int_or_interval", options: ['5', '7', '10', '5min', '10min', '30min', '1hour'] }],
|
||||
defaultParams: [10]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingMedian',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "windowSize", type: "int_or_interval", options: ['5', '7', '10', '5min', '10min', '30min', '1hour'] }],
|
||||
defaultParams: ['5']
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'stdev',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int" }, { name: "tolerance", type: "int" }],
|
||||
defaultParams: [5,0.1]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'highestAverage',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "count", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'lowestAverage',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "count", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'removeAbovePercentile',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'removeAboveValue',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'removeBelowPercentile',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'removeBelowValue',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int" }],
|
||||
defaultParams: [5]
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'useSeriesAbove',
|
||||
category: categories.Filter,
|
||||
params: [
|
||||
{ name: "value", type: "int" },
|
||||
{ name: "search", type: "string" },
|
||||
{ name: "replace", type: "string" }
|
||||
],
|
||||
defaultParams: [0, 'search', 'replace']
|
||||
});
|
||||
|
||||
////////////////////
|
||||
// Graphite 1.0.x //
|
||||
////////////////////
|
||||
|
||||
addFuncDef({
|
||||
name: 'aggregateLine',
|
||||
category: categories.Combine,
|
||||
params: [{ name: "func", type: "select", options: ['sum', 'avg', 'min', 'max', 'last']}],
|
||||
defaultParams: ['avg'],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'averageOutsidePercentile',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int", }],
|
||||
defaultParams: [95],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'delay',
|
||||
category: categories.Transform,
|
||||
params: [{ name: 'steps', type: 'int', }],
|
||||
defaultParams: [1],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'exponentialMovingAverage',
|
||||
category: categories.Calculate,
|
||||
params: [{ name: 'windowSize', type: 'int_or_interval', options: ['5', '7', '10', '5min', '10min', '30min', '1hour'] }],
|
||||
defaultParams: [10],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'fallbackSeries',
|
||||
category: categories.Special,
|
||||
params: [{ name: 'fallback', type: 'string' }],
|
||||
defaultParams: ['constantLine(0)'],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "grep",
|
||||
category: categories.Filter,
|
||||
params: [{ name: "grep", type: 'string' }],
|
||||
defaultParams: ['grep'],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "groupByNodes",
|
||||
category: categories.Special,
|
||||
params: [
|
||||
{
|
||||
name: "function",
|
||||
type: "string",
|
||||
options: ['sum', 'avg', 'maxSeries']
|
||||
},
|
||||
{ name: "node", type: "int", options: [0,1,2,3,4,5,6,7,8,9,10,12] },
|
||||
{ name: "node", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
{ name: "node", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
{ name: "node", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
],
|
||||
defaultParams: ["sum", 3],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'integralByInterval',
|
||||
category: categories.Transform,
|
||||
params: [{ name: "intervalUnit", type: "select", options: ['1h', '6h', '12h', '1d', '2d', '7d', '14d', '30d'] }],
|
||||
defaultParams: ['1d'],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'interpolate',
|
||||
category: categories.Transform,
|
||||
params: [{ name: 'limit', type: 'int', optional: true}],
|
||||
defaultParams: [],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'invert',
|
||||
category: categories.Transform,
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'isNonNull',
|
||||
category: categories.Combine,
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'linearRegression',
|
||||
category: categories.Calculate,
|
||||
params: [
|
||||
{ name: "startSourceAt", type: "select", options: ['-1h', '-6h', '-12h', '-1d', '-2d', '-7d', '-14d', '-30d'], optional: true },
|
||||
{ name: "endSourceAt", type: "select", options: ['-1h', '-6h', '-12h', '-1d', '-2d', '-7d', '-14d', '-30d'], optional: true }
|
||||
],
|
||||
defaultParams: [],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'mapSeries',
|
||||
shortName: 'map',
|
||||
params: [{ name: "node", type: 'int' }],
|
||||
defaultParams: [3],
|
||||
category: categories.Combine,
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingMin',
|
||||
category: categories.Calculate,
|
||||
params: [{ name: 'windowSize', type: 'int_or_interval', options: ['5', '7', '10', '5min', '10min', '30min', '1hour'] }],
|
||||
defaultParams: [10],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingMax',
|
||||
category: categories.Calculate,
|
||||
params: [{ name: 'windowSize', type: 'int_or_interval', options: ['5', '7', '10', '5min', '10min', '30min', '1hour'] }],
|
||||
defaultParams: [10],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'movingSum',
|
||||
category: categories.Calculate,
|
||||
params: [{ name: 'windowSize', type: 'int_or_interval', options: ['5', '7', '10', '5min', '10min', '30min', '1hour'] }],
|
||||
defaultParams: [10],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "multiplySeriesWithWildcards",
|
||||
category: categories.Calculate,
|
||||
params: [
|
||||
{ name: "position", type: "int", options: [0,1,2,3,4,5,6,7,8,9,10,12] },
|
||||
{ name: "position", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
{ name: "position", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
{ name: "position", type: "int", options: [0,-1,-2,-3,-4,-5,-6,-7], optional: true },
|
||||
],
|
||||
defaultParams: [2],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'offsetToZero',
|
||||
category: categories.Transform,
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'pow',
|
||||
category: categories.Transform,
|
||||
params: [{ name: 'factor', type: 'int' }],
|
||||
defaultParams: [10],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'powSeries',
|
||||
category: categories.Transform,
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: [''],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'reduceSeries',
|
||||
shortName: 'reduce',
|
||||
params: [
|
||||
{ name: "function", type: 'string', options: ['asPercent', 'diffSeries', 'divideSeries'] },
|
||||
{ name: "reduceNode", type: 'int', options: [0,1,2,3,4,5,6,7,8,9,10,11,12,13] },
|
||||
{ name: "reduceMatchers", type: 'string' },
|
||||
{ name: "reduceMatchers", type: 'string' },
|
||||
],
|
||||
defaultParams: ['asPercent', 2, 'used_bytes', 'total_bytes'],
|
||||
category: categories.Combine,
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'removeBetweenPercentile',
|
||||
category: categories.Filter,
|
||||
params: [{ name: "n", type: "int", }],
|
||||
defaultParams: [95],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'removeEmptySeries',
|
||||
category: categories.Filter,
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'squareRoot',
|
||||
category: categories.Transform,
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'timeSlice',
|
||||
category: categories.Transform,
|
||||
params: [
|
||||
{ name: "startSliceAt", type: "select", options: ['-1h', '-6h', '-12h', '-1d', '-2d', '-7d', '-14d', '-30d']},
|
||||
{ name: "endSliceAt", type: "select", options: ['-1h', '-6h', '-12h', '-1d', '-2d', '-7d', '-14d', '-30d'], optional: true }
|
||||
],
|
||||
defaultParams: ['-1h'],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'weightedAverage',
|
||||
category: categories.Filter,
|
||||
params: [
|
||||
{ name: 'other', type: 'value_or_series', optional: true },
|
||||
{ name: "node", type: "int", options: [0,1,2,3,4,5,6,7,8,9,10,12] },
|
||||
],
|
||||
defaultParams: ['#A', 4],
|
||||
version: '1.0'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'seriesByTag',
|
||||
category: categories.Special,
|
||||
params: [
|
||||
{ name: "tagExpression", type: "string" },
|
||||
{ name: "tagExpression", type: "string", optional: true },
|
||||
{ name: "tagExpression", type: "string", optional: true },
|
||||
{ name: "tagExpression", type: "string", optional: true },
|
||||
],
|
||||
version: '1.1'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "groupByTags",
|
||||
category: categories.Special,
|
||||
params: [
|
||||
{
|
||||
name: "function",
|
||||
type: "string",
|
||||
options: ['sum', 'avg', 'maxSeries']
|
||||
},
|
||||
{ name: "tag", type: "string" },
|
||||
{ name: "tag", type: "string", optional: true },
|
||||
{ name: "tag", type: "string", optional: true },
|
||||
{ name: "tag", type: "string", optional: true },
|
||||
],
|
||||
defaultParams: ["sum", "tag"],
|
||||
version: '1.1'
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: "aliasByTags",
|
||||
category: categories.Special,
|
||||
params: [
|
||||
{ name: "tag", type: "string" },
|
||||
{ name: "tag", type: "string", optional: true },
|
||||
{ name: "tag", type: "string", optional: true },
|
||||
{ name: "tag", type: "string", optional: true },
|
||||
],
|
||||
defaultParams: ["tag"],
|
||||
version: '1.1'
|
||||
});
|
||||
|
||||
_.each(categories, function(funcList, catName) {
|
||||
categories[catName] = _.sortBy(funcList, 'name');
|
||||
});
|
||||
|
||||
function FuncInstance(funcDef, options) {
|
||||
this.def = funcDef;
|
||||
this.params = [];
|
||||
|
||||
if (options && options.withDefaultParams) {
|
||||
this.params = funcDef.defaultParams.slice(0);
|
||||
}
|
||||
|
||||
this.updateText();
|
||||
}
|
||||
|
||||
FuncInstance.prototype.render = function(metricExp) {
|
||||
var str = this.def.name + '(';
|
||||
var parameters = _.map(this.params, function(value, index) {
|
||||
|
||||
var paramType = this.def.params[index].type;
|
||||
if (paramType === 'int' || paramType === 'value_or_series' || paramType === 'boolean') {
|
||||
return value;
|
||||
}
|
||||
else if (paramType === 'int_or_interval' && $.isNumeric(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return "'" + value + "'";
|
||||
|
||||
}.bind(this));
|
||||
|
||||
if (metricExp) {
|
||||
parameters.unshift(metricExp);
|
||||
}
|
||||
|
||||
return str + parameters.join(', ') + ')';
|
||||
};
|
||||
|
||||
FuncInstance.prototype._hasMultipleParamsInString = function(strValue, index) {
|
||||
if (strValue.indexOf(',') === -1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.def.params[index + 1] && this.def.params[index + 1].optional;
|
||||
};
|
||||
|
||||
FuncInstance.prototype.updateParam = function(strValue, index) {
|
||||
// handle optional parameters
|
||||
// if string contains ',' and next param is optional, split and update both
|
||||
if (this._hasMultipleParamsInString(strValue, index)) {
|
||||
_.each(strValue.split(','), function(partVal, idx) {
|
||||
this.updateParam(partVal.trim(), index + idx);
|
||||
}.bind(this));
|
||||
return;
|
||||
}
|
||||
|
||||
if (strValue === '' && this.def.params[index].optional) {
|
||||
this.params.splice(index, 1);
|
||||
}
|
||||
else {
|
||||
this.params[index] = strValue;
|
||||
}
|
||||
|
||||
this.updateText();
|
||||
};
|
||||
|
||||
FuncInstance.prototype.updateText = function () {
|
||||
if (this.params.length === 0) {
|
||||
this.text = this.def.name + '()';
|
||||
return;
|
||||
}
|
||||
|
||||
var text = this.def.name + '(';
|
||||
text += this.params.join(', ');
|
||||
text += ')';
|
||||
this.text = text;
|
||||
};
|
||||
|
||||
function isVersionRelatedFunction(func, graphiteVersion) {
|
||||
return version.isVersionGtOrEq(graphiteVersion, func.version) || !func.version;
|
||||
}
|
||||
|
||||
return {
|
||||
createFuncInstance: function(funcDef, options) {
|
||||
if (_.isString(funcDef)) {
|
||||
if (!index[funcDef]) {
|
||||
throw { message: 'Method not found ' + name };
|
||||
}
|
||||
funcDef = index[funcDef];
|
||||
}
|
||||
return new FuncInstance(funcDef, options);
|
||||
},
|
||||
|
||||
getFuncDef: function(name) {
|
||||
return index[name];
|
||||
},
|
||||
|
||||
getCategories: function(graphiteVersion) {
|
||||
var filteredCategories = {};
|
||||
_.each(categories, function(functions, category) {
|
||||
var filteredFuncs = _.filter(functions, function(func) {
|
||||
return isVersionRelatedFunction(func, graphiteVersion);
|
||||
});
|
||||
if (filteredFuncs.length) {
|
||||
filteredCategories[category] = filteredFuncs;
|
||||
}
|
||||
});
|
||||
|
||||
return filteredCategories;
|
||||
}
|
||||
};
|
||||
|
||||
});
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
import _ from 'lodash';
|
||||
import {isVersionGtOrEq} from 'app/core/utils/version';
|
||||
|
||||
|
||||
var index = [];
|
||||
var categories = {
|
||||
|
|
@ -968,13 +970,7 @@ FuncInstance.prototype.updateText = function() {
|
|||
};
|
||||
|
||||
function isVersionRelatedFunction(func, graphiteVersion) {
|
||||
return isVersionGreaterOrEqual(graphiteVersion, func.version) || !func.version;
|
||||
}
|
||||
|
||||
function isVersionGreaterOrEqual(a, b) {
|
||||
var a_num = Number(a);
|
||||
var b_num = Number(b);
|
||||
return a_num >= b_num;
|
||||
return isVersionGtOrEq(graphiteVersion, func.version) || !func.version;
|
||||
}
|
||||
|
||||
export default {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,284 @@
|
|||
import _ from 'lodash';
|
||||
import gfunc from './gfunc';
|
||||
import {Parser} from './parser';
|
||||
|
||||
export default class GraphiteQuery {
|
||||
target: any;
|
||||
functions: any[];
|
||||
segments: any[];
|
||||
tags: any[];
|
||||
error: any;
|
||||
seriesByTagUsed: boolean;
|
||||
checkOtherSegmentsIndex: number;
|
||||
removeTagValue: string;
|
||||
templateSrv: any;
|
||||
scopedVars: any;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(target, templateSrv?, scopedVars?) {
|
||||
this.target = target;
|
||||
this.parseTarget();
|
||||
|
||||
this.removeTagValue = '-- remove tag --';
|
||||
}
|
||||
|
||||
parseTarget() {
|
||||
this.functions = [];
|
||||
this.segments = [];
|
||||
this.tags = [];
|
||||
this.error = null;
|
||||
|
||||
if (this.target.textEditor) {
|
||||
return;
|
||||
}
|
||||
|
||||
var parser = new Parser(this.target.target);
|
||||
var astNode = parser.getAst();
|
||||
if (astNode === null) {
|
||||
this.checkOtherSegmentsIndex = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
if (astNode.type === 'error') {
|
||||
this.error = astNode.message + " at position: " + astNode.pos;
|
||||
this.target.textEditor = true;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.parseTargetRecursive(astNode, null, 0);
|
||||
} catch (err) {
|
||||
console.log('error parsing target:', err.message);
|
||||
this.error = err.message;
|
||||
this.target.textEditor = true;
|
||||
}
|
||||
|
||||
this.checkOtherSegmentsIndex = this.segments.length - 1;
|
||||
this.checkForSeriesByTag();
|
||||
}
|
||||
|
||||
checkForSeriesByTag() {
|
||||
let seriesByTagFunc = _.find(this.functions, (func) => func.def.name === 'seriesByTag');
|
||||
if (seriesByTagFunc) {
|
||||
this.seriesByTagUsed = true;
|
||||
seriesByTagFunc.hidden = true;
|
||||
let tags = this.splitSeriesByTagParams(seriesByTagFunc);
|
||||
this.tags = tags;
|
||||
}
|
||||
}
|
||||
|
||||
getSegmentPathUpTo(index) {
|
||||
var arr = this.segments.slice(0, index);
|
||||
|
||||
return _.reduce(arr, function(result, segment) {
|
||||
return result ? (result + "." + segment.value) : segment.value;
|
||||
}, "");
|
||||
}
|
||||
|
||||
parseTargetRecursive(astNode, func, index) {
|
||||
if (astNode === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
switch (astNode.type) {
|
||||
case 'function':
|
||||
var innerFunc = gfunc.createFuncInstance(astNode.name, { withDefaultParams: false });
|
||||
_.each(astNode.params, (param, index) => {
|
||||
this.parseTargetRecursive(param, innerFunc, index);
|
||||
});
|
||||
|
||||
innerFunc.updateText();
|
||||
this.functions.push(innerFunc);
|
||||
break;
|
||||
case 'series-ref':
|
||||
this.addFunctionParameter(func, astNode.value, index, this.segments.length > 0);
|
||||
break;
|
||||
case 'bool':
|
||||
case 'string':
|
||||
case 'number':
|
||||
if ((index-1) >= func.def.params.length) {
|
||||
throw { message: 'invalid number of parameters to method ' + func.def.name };
|
||||
}
|
||||
var shiftBack = this.isShiftParamsBack(func);
|
||||
this.addFunctionParameter(func, astNode.value, index, shiftBack);
|
||||
break;
|
||||
case 'metric':
|
||||
if (this.segments.length > 0) {
|
||||
if (astNode.segments.length !== 1) {
|
||||
throw { message: 'Multiple metric params not supported, use text editor.' };
|
||||
}
|
||||
this.addFunctionParameter(func, astNode.segments[0].value, index, true);
|
||||
break;
|
||||
}
|
||||
|
||||
this.segments = astNode.segments;
|
||||
}
|
||||
}
|
||||
|
||||
isShiftParamsBack(func) {
|
||||
return func.def.name !== 'seriesByTag';
|
||||
}
|
||||
|
||||
updateSegmentValue(segment, index) {
|
||||
this.segments[index].value = segment.value;
|
||||
}
|
||||
|
||||
addSelectMetricSegment() {
|
||||
this.segments.push({value: "select metric"});
|
||||
}
|
||||
|
||||
addFunction(newFunc) {
|
||||
this.functions.push(newFunc);
|
||||
this.moveAliasFuncLast();
|
||||
}
|
||||
|
||||
moveAliasFuncLast() {
|
||||
var aliasFunc = _.find(this.functions, function(func) {
|
||||
return func.def.name === 'alias' ||
|
||||
func.def.name === 'aliasByNode' ||
|
||||
func.def.name === 'aliasByMetric';
|
||||
});
|
||||
|
||||
if (aliasFunc) {
|
||||
this.functions = _.without(this.functions, aliasFunc);
|
||||
this.functions.push(aliasFunc);
|
||||
}
|
||||
}
|
||||
|
||||
addFunctionParameter(func, value, index, shiftBack) {
|
||||
if (shiftBack) {
|
||||
index = Math.max(index - 1, 0);
|
||||
}
|
||||
func.params[index] = value;
|
||||
}
|
||||
|
||||
removeFunction(func) {
|
||||
this.functions = _.without(this.functions, func);
|
||||
}
|
||||
|
||||
updateModelTarget(targets) {
|
||||
// render query
|
||||
if (!this.target.textEditor) {
|
||||
var metricPath = this.getSegmentPathUpTo(this.segments.length);
|
||||
this.target.target = _.reduce(this.functions, wrapFunction, metricPath);
|
||||
}
|
||||
|
||||
this.updateRenderedTarget(this.target, targets);
|
||||
|
||||
// loop through other queries and update targetFull as needed
|
||||
for (const target of targets || []) {
|
||||
if (target.refId !== this.target.refId) {
|
||||
this.updateRenderedTarget(target, targets);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateRenderedTarget(target, targets) {
|
||||
// render nested query
|
||||
var targetsByRefId = _.keyBy(targets, 'refId');
|
||||
|
||||
// no references to self
|
||||
delete targetsByRefId[target.refId];
|
||||
|
||||
var nestedSeriesRefRegex = /\#([A-Z])/g;
|
||||
var targetWithNestedQueries = target.target;
|
||||
|
||||
// Keep interpolating until there are no query references
|
||||
// The reason for the loop is that the referenced query might contain another reference to another query
|
||||
while (targetWithNestedQueries.match(nestedSeriesRefRegex)) {
|
||||
var updated = targetWithNestedQueries.replace(nestedSeriesRefRegex, (match, g1) => {
|
||||
var t = targetsByRefId[g1];
|
||||
if (!t) {
|
||||
return match;
|
||||
}
|
||||
|
||||
// no circular references
|
||||
delete targetsByRefId[g1];
|
||||
return t.target;
|
||||
});
|
||||
|
||||
if (updated === targetWithNestedQueries) {
|
||||
break;
|
||||
}
|
||||
|
||||
targetWithNestedQueries = updated;
|
||||
}
|
||||
|
||||
delete target.targetFull;
|
||||
if (target.target !== targetWithNestedQueries) {
|
||||
target.targetFull = targetWithNestedQueries;
|
||||
}
|
||||
}
|
||||
|
||||
splitSeriesByTagParams(func) {
|
||||
const tagPattern = /([^\!=~]+)([\!=~]+)([^\!=~]+)/;
|
||||
return _.flatten(_.map(func.params, (param: string) => {
|
||||
let matches = tagPattern.exec(param);
|
||||
if (matches) {
|
||||
let tag = matches.slice(1);
|
||||
if (tag.length === 3) {
|
||||
return {
|
||||
key: tag[0],
|
||||
operator: tag[1],
|
||||
value: tag[2]
|
||||
};
|
||||
}
|
||||
}
|
||||
return [];
|
||||
}));
|
||||
}
|
||||
|
||||
getSeriesByTagFuncIndex() {
|
||||
return _.findIndex(this.functions, (func) => func.def.name === 'seriesByTag');
|
||||
}
|
||||
|
||||
getSeriesByTagFunc() {
|
||||
let seriesByTagFuncIndex = this.getSeriesByTagFuncIndex();
|
||||
if (seriesByTagFuncIndex >= 0) {
|
||||
return this.functions[seriesByTagFuncIndex];
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
addTag(tag) {
|
||||
let newTagParam = renderTagString(tag);
|
||||
this.getSeriesByTagFunc().params.push(newTagParam);
|
||||
this.tags.push(tag);
|
||||
}
|
||||
|
||||
removeTag(index) {
|
||||
this.getSeriesByTagFunc().params.splice(index, 1);
|
||||
this.tags.splice(index, 1);
|
||||
}
|
||||
|
||||
updateTag(tag, tagIndex) {
|
||||
this.error = null;
|
||||
|
||||
if (tag.key === this.removeTagValue) {
|
||||
this.removeTag(tagIndex);
|
||||
return;
|
||||
}
|
||||
|
||||
let newTagParam = renderTagString(tag);
|
||||
this.getSeriesByTagFunc().params[tagIndex] = newTagParam;
|
||||
this.tags[tagIndex] = tag;
|
||||
}
|
||||
|
||||
renderTagExpressions(excludeIndex = -1) {
|
||||
return _.compact(_.map(this.tags, (tagExpr, index) => {
|
||||
// Don't render tag that we want to lookup
|
||||
if (index !== excludeIndex) {
|
||||
return tagExpr.key + tagExpr.operator + tagExpr.value;
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
function wrapFunction(target, func) {
|
||||
return func.render(target);
|
||||
}
|
||||
|
||||
function renderTagString(tag) {
|
||||
return tag.key + tag.operator + tag.value;
|
||||
}
|
||||
|
|
@ -1,17 +1,53 @@
|
|||
<query-editor-row query-ctrl="ctrl" has-text-edit-mode="true">
|
||||
|
||||
<div class="gf-form" ng-show="ctrl.target.textEditor">
|
||||
<input type="text" class="gf-form-input" ng-model="ctrl.target.target" spellcheck="false" ng-blur="ctrl.targetTextChanged()"></input>
|
||||
</div>
|
||||
<div class="gf-form" ng-show="ctrl.target.textEditor">
|
||||
<input type="text" class="gf-form-input" ng-model="ctrl.target.target" spellcheck="false" ng-blur="ctrl.targetTextChanged()"></input>
|
||||
</div>
|
||||
|
||||
<div ng-hide="ctrl.target.textEditor">
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label width-6 query-keyword">Series</label>
|
||||
</div>
|
||||
|
||||
<div ng-repeat="tag in ctrl.queryModel.tags" class="gf-form">
|
||||
<gf-form-dropdown model="tag.key" lookup-text="false" allow-custom="false" label-mode="true" css-class="query-segment-key"
|
||||
get-options="ctrl.getTags($index, $query)"
|
||||
on-change="ctrl.tagChanged(tag, $index)">
|
||||
</gf-form-dropdown>
|
||||
<gf-form-dropdown model="tag.operator" lookup-text="false" allow-custom="false" label-mode="true" css-class="query-segment-operator"
|
||||
get-options="ctrl.getTagOperators()"
|
||||
on-change="ctrl.tagChanged(tag, $index)"
|
||||
min-input-width="30">
|
||||
</gf-form-dropdown>
|
||||
<gf-form-dropdown model="tag.value" lookup-text="false" allow-custom="false" label-mode="true" css-class="query-segment-value"
|
||||
get-options="ctrl.getTagValues(tag, $index, $query)"
|
||||
on-change="ctrl.tagChanged(tag, $index)">
|
||||
</gf-form-dropdown>
|
||||
<label class="gf-form-label query-keyword" ng-if="ctrl.showDelimiter($index)">AND</label>
|
||||
</div>
|
||||
|
||||
<div ng-repeat="segment in ctrl.segments" role="menuitem" class="gf-form">
|
||||
<metric-segment segment="segment" get-options="ctrl.getAltSegments($index)" on-change="ctrl.segmentValueChanged(segment, $index)"></metric-segment>
|
||||
</div>
|
||||
|
||||
<div ng-repeat="func in ctrl.functions" class="gf-form">
|
||||
<span graphite-func-editor class="gf-form-label query-part"></span>
|
||||
<div ng-if="ctrl.queryModel.seriesByTagUsed" ng-repeat="segment in ctrl.addTagSegments" role="menuitem" class="gf-form">
|
||||
<metric-segment segment="segment" get-options="ctrl.getTagsAsSegments()" on-change="ctrl.addNewTag(segment)">
|
||||
</metric-segment>
|
||||
</div>
|
||||
|
||||
<div class="gf-form gf-form--grow">
|
||||
<div class="gf-form-label gf-form-label--grow"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label width-6 query-keyword">Functions</label>
|
||||
</div>
|
||||
|
||||
<div ng-repeat="func in ctrl.queryModel.functions" class="gf-form">
|
||||
<span graphite-func-editor class="gf-form-label query-part" ng-hide="func.hidden"></span>
|
||||
</div>
|
||||
|
||||
<div class="gf-form dropdown">
|
||||
|
|
@ -19,8 +55,8 @@
|
|||
</div>
|
||||
|
||||
<div class="gf-form gf-form--grow">
|
||||
<div class="gf-form-label gf-form-label--grow"></div>
|
||||
</div>
|
||||
<div class="gf-form-label gf-form-label--grow"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -3,24 +3,39 @@ import './func_editor';
|
|||
|
||||
import _ from 'lodash';
|
||||
import gfunc from './gfunc';
|
||||
import {Parser} from './parser';
|
||||
import GraphiteQuery from './graphite_query';
|
||||
import {QueryCtrl} from 'app/plugins/sdk';
|
||||
import appEvents from 'app/core/app_events';
|
||||
|
||||
const GRAPHITE_TAG_OPERATORS = ['=', '!=', '=~', '!=~'];
|
||||
const TAG_PREFIX = 'tag: ';
|
||||
|
||||
export class GraphiteQueryCtrl extends QueryCtrl {
|
||||
static templateUrl = 'partials/query.editor.html';
|
||||
|
||||
functions: any[];
|
||||
queryModel: GraphiteQuery;
|
||||
segments: any[];
|
||||
addTagSegments: any[];
|
||||
removeTagValue: string;
|
||||
supportsTags: boolean;
|
||||
|
||||
/** @ngInject **/
|
||||
constructor($scope, $injector, private uiSegmentSrv, private templateSrv) {
|
||||
super($scope, $injector);
|
||||
this.supportsTags = this.datasource.supportsTags;
|
||||
|
||||
if (this.target) {
|
||||
this.target.target = this.target.target || '';
|
||||
this.parseTarget();
|
||||
this.queryModel = new GraphiteQuery(this.target, templateSrv);
|
||||
this.buildSegments();
|
||||
}
|
||||
|
||||
this.removeTagValue = '-- remove tag --';
|
||||
}
|
||||
|
||||
parseTarget() {
|
||||
this.queryModel.parseTarget();
|
||||
this.buildSegments();
|
||||
}
|
||||
|
||||
toggleEditorMode() {
|
||||
|
|
@ -28,107 +43,31 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
|||
this.parseTarget();
|
||||
}
|
||||
|
||||
parseTarget() {
|
||||
this.functions = [];
|
||||
this.segments = [];
|
||||
this.error = null;
|
||||
buildSegments() {
|
||||
this.segments = _.map(this.queryModel.segments, segment => {
|
||||
return this.uiSegmentSrv.newSegment(segment);
|
||||
});
|
||||
|
||||
if (this.target.textEditor) {
|
||||
return;
|
||||
}
|
||||
let checkOtherSegmentsIndex = this.queryModel.checkOtherSegmentsIndex || 0;
|
||||
this.checkOtherSegments(checkOtherSegmentsIndex);
|
||||
|
||||
var parser = new Parser(this.target.target);
|
||||
var astNode = parser.getAst();
|
||||
if (astNode === null) {
|
||||
this.checkOtherSegments(0);
|
||||
return;
|
||||
}
|
||||
|
||||
if (astNode.type === 'error') {
|
||||
this.error = astNode.message + " at position: " + astNode.pos;
|
||||
this.target.textEditor = true;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.parseTargetRecursive(astNode, null, 0);
|
||||
} catch (err) {
|
||||
console.log('error parsing target:', err.message);
|
||||
this.error = err.message;
|
||||
this.target.textEditor = true;
|
||||
}
|
||||
|
||||
this.checkOtherSegments(this.segments.length - 1);
|
||||
}
|
||||
|
||||
addFunctionParameter(func, value, index, shiftBack) {
|
||||
if (shiftBack) {
|
||||
index = Math.max(index - 1, 0);
|
||||
}
|
||||
func.params[index] = value;
|
||||
}
|
||||
|
||||
parseTargetRecursive(astNode, func, index) {
|
||||
if (astNode === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
switch (astNode.type) {
|
||||
case 'function':
|
||||
var innerFunc = gfunc.createFuncInstance(astNode.name, { withDefaultParams: false });
|
||||
_.each(astNode.params, (param, index) => {
|
||||
this.parseTargetRecursive(param, innerFunc, index);
|
||||
});
|
||||
|
||||
innerFunc.updateText();
|
||||
this.functions.push(innerFunc);
|
||||
break;
|
||||
case 'series-ref':
|
||||
this.addFunctionParameter(func, astNode.value, index, this.segments.length > 0);
|
||||
break;
|
||||
case 'bool':
|
||||
case 'string':
|
||||
case 'number':
|
||||
if ((index-1) >= func.def.params.length) {
|
||||
throw { message: 'invalid number of parameters to method ' + func.def.name };
|
||||
}
|
||||
var shiftBack = this.isShiftParamsBack(func);
|
||||
this.addFunctionParameter(func, astNode.value, index, shiftBack);
|
||||
break;
|
||||
case 'metric':
|
||||
if (this.segments.length > 0) {
|
||||
if (astNode.segments.length !== 1) {
|
||||
throw { message: 'Multiple metric params not supported, use text editor.' };
|
||||
}
|
||||
this.addFunctionParameter(func, astNode.segments[0].value, index, true);
|
||||
break;
|
||||
}
|
||||
|
||||
this.segments = _.map(astNode.segments, segment => {
|
||||
return this.uiSegmentSrv.newSegment(segment);
|
||||
});
|
||||
if (this.queryModel.seriesByTagUsed) {
|
||||
this.fixTagSegments();
|
||||
}
|
||||
}
|
||||
|
||||
isShiftParamsBack(func) {
|
||||
return func.def.name !== 'seriesByTag';
|
||||
}
|
||||
|
||||
getSegmentPathUpTo(index) {
|
||||
var arr = this.segments.slice(0, index);
|
||||
|
||||
return _.reduce(arr, function(result, segment) {
|
||||
return result ? (result + "." + segment.value) : segment.value;
|
||||
}, "");
|
||||
addSelectMetricSegment() {
|
||||
this.queryModel.addSelectMetricSegment();
|
||||
this.segments.push(this.uiSegmentSrv.newSelectMetric());
|
||||
}
|
||||
|
||||
checkOtherSegments(fromIndex) {
|
||||
if (fromIndex === 0) {
|
||||
this.segments.push(this.uiSegmentSrv.newSelectMetric());
|
||||
this.addSelectMetricSegment();
|
||||
return;
|
||||
}
|
||||
|
||||
var path = this.getSegmentPathUpTo(fromIndex + 1);
|
||||
var path = this.queryModel.getSegmentPathUpTo(fromIndex + 1);
|
||||
if (path === "") {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
|
@ -136,12 +75,13 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
|||
return this.datasource.metricFindQuery(path).then(segments => {
|
||||
if (segments.length === 0) {
|
||||
if (path !== '') {
|
||||
this.queryModel.segments = this.queryModel.segments.splice(0, fromIndex);
|
||||
this.segments = this.segments.splice(0, fromIndex);
|
||||
this.segments.push(this.uiSegmentSrv.newSelectMetric());
|
||||
this.addSelectMetricSegment();
|
||||
}
|
||||
} else if (segments[0].expandable) {
|
||||
if (this.segments.length === fromIndex) {
|
||||
this.segments.push(this.uiSegmentSrv.newSelectMetric());
|
||||
this.addSelectMetricSegment();
|
||||
} else {
|
||||
return this.checkOtherSegments(fromIndex + 1);
|
||||
}
|
||||
|
|
@ -157,12 +97,8 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
|||
});
|
||||
}
|
||||
|
||||
wrapFunction(target, func) {
|
||||
return func.render(target);
|
||||
}
|
||||
|
||||
getAltSegments(index) {
|
||||
var query = index === 0 ? '*' : this.getSegmentPathUpTo(index) + '.*';
|
||||
var query = index === 0 ? '*' : this.queryModel.getSegmentPathUpTo(index) + '.*';
|
||||
var options = {range: this.panelCtrl.range, requestId: "get-alt-segments"};
|
||||
|
||||
return this.datasource.metricFindQuery(query, options).then(segments => {
|
||||
|
|
@ -183,17 +119,44 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
|||
|
||||
// add wildcard option
|
||||
altSegments.unshift(this.uiSegmentSrv.newSegment('*'));
|
||||
return altSegments;
|
||||
|
||||
if (this.supportsTags && index === 0) {
|
||||
this.removeTaggedEntry(altSegments);
|
||||
return this.addAltTagSegments(index, altSegments);
|
||||
} else {
|
||||
return altSegments;
|
||||
}
|
||||
}).catch(err => {
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
addAltTagSegments(index, altSegments) {
|
||||
return this.getTagsAsSegments().then((tagSegments) => {
|
||||
tagSegments = _.map(tagSegments, (segment) => {
|
||||
segment.value = TAG_PREFIX + segment.value;
|
||||
return segment;
|
||||
});
|
||||
return altSegments.concat(...tagSegments);
|
||||
});
|
||||
}
|
||||
|
||||
removeTaggedEntry(altSegments) {
|
||||
altSegments = _.remove(altSegments, (s) => s.value === '_tagged');
|
||||
}
|
||||
|
||||
segmentValueChanged(segment, segmentIndex) {
|
||||
this.error = null;
|
||||
this.queryModel.updateSegmentValue(segment, segmentIndex);
|
||||
|
||||
if (this.functions.length > 0 && this.functions[0].def.fake) {
|
||||
this.functions = [];
|
||||
if (this.queryModel.functions.length > 0 && this.queryModel.functions[0].def.fake) {
|
||||
this.queryModel.functions = [];
|
||||
}
|
||||
|
||||
if (segment.type === 'tag') {
|
||||
let tag = removeTagPrefix(segment.value);
|
||||
this.addSeriesByTagFunc(tag);
|
||||
return;
|
||||
}
|
||||
|
||||
if (segment.expandable) {
|
||||
|
|
@ -202,81 +165,41 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
|||
this.targetChanged();
|
||||
});
|
||||
} else {
|
||||
this.segments = this.segments.splice(0, segmentIndex + 1);
|
||||
this.spliceSegments(segmentIndex + 1);
|
||||
}
|
||||
|
||||
this.setSegmentFocus(segmentIndex + 1);
|
||||
this.targetChanged();
|
||||
}
|
||||
|
||||
spliceSegments(index) {
|
||||
this.segments = this.segments.splice(0, index);
|
||||
this.queryModel.segments = this.queryModel.segments.splice(0, index);
|
||||
}
|
||||
|
||||
emptySegments() {
|
||||
this.queryModel.segments = [];
|
||||
this.segments = [];
|
||||
}
|
||||
|
||||
targetTextChanged() {
|
||||
this.updateModelTarget();
|
||||
this.refresh();
|
||||
}
|
||||
|
||||
updateModelTarget() {
|
||||
// render query
|
||||
if (!this.target.textEditor) {
|
||||
var metricPath = this.getSegmentPathUpTo(this.segments.length);
|
||||
this.target.target = _.reduce(this.functions, this.wrapFunction, metricPath);
|
||||
}
|
||||
|
||||
this.updateRenderedTarget(this.target);
|
||||
|
||||
// loop through other queries and update targetFull as needed
|
||||
for (const target of this.panelCtrl.panel.targets || []) {
|
||||
if (target.refId !== this.target.refId) {
|
||||
this.updateRenderedTarget(target);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateRenderedTarget(target) {
|
||||
// render nested query
|
||||
var targetsByRefId = _.keyBy(this.panelCtrl.panel.targets, 'refId');
|
||||
|
||||
// no references to self
|
||||
delete targetsByRefId[target.refId];
|
||||
|
||||
var nestedSeriesRefRegex = /\#([A-Z])/g;
|
||||
var targetWithNestedQueries = target.target;
|
||||
|
||||
// Keep interpolating until there are no query references
|
||||
// The reason for the loop is that the referenced query might contain another reference to another query
|
||||
while (targetWithNestedQueries.match(nestedSeriesRefRegex)) {
|
||||
var updated = targetWithNestedQueries.replace(nestedSeriesRefRegex, (match, g1) => {
|
||||
var t = targetsByRefId[g1];
|
||||
if (!t) {
|
||||
return match;
|
||||
}
|
||||
|
||||
// no circular references
|
||||
delete targetsByRefId[g1];
|
||||
return t.target;
|
||||
});
|
||||
|
||||
if (updated === targetWithNestedQueries) {
|
||||
break;
|
||||
}
|
||||
|
||||
targetWithNestedQueries = updated;
|
||||
}
|
||||
|
||||
delete target.targetFull;
|
||||
if (target.target !== targetWithNestedQueries) {
|
||||
target.targetFull = targetWithNestedQueries;
|
||||
}
|
||||
this.queryModel.updateModelTarget(this.panelCtrl.panel.targets);
|
||||
}
|
||||
|
||||
targetChanged() {
|
||||
if (this.error) {
|
||||
if (this.queryModel.error) {
|
||||
return;
|
||||
}
|
||||
|
||||
var oldTarget = this.target.target;
|
||||
var oldTarget = this.queryModel.target.target;
|
||||
this.updateModelTarget();
|
||||
|
||||
if (this.target.target !== oldTarget) {
|
||||
if (this.queryModel.target !== oldTarget) {
|
||||
var lastSegment = this.segments.length > 0 ? this.segments[this.segments.length - 1] : {};
|
||||
if (lastSegment.value !== 'select metric') {
|
||||
this.panelCtrl.refresh();
|
||||
|
|
@ -284,39 +207,41 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
|||
}
|
||||
}
|
||||
|
||||
removeFunction(func) {
|
||||
this.functions = _.without(this.functions, func);
|
||||
this.targetChanged();
|
||||
}
|
||||
|
||||
addFunction(funcDef) {
|
||||
var newFunc = gfunc.createFuncInstance(funcDef, { withDefaultParams: true });
|
||||
newFunc.added = true;
|
||||
this.functions.push(newFunc);
|
||||
|
||||
this.moveAliasFuncLast();
|
||||
this.queryModel.addFunction(newFunc);
|
||||
this.smartlyHandleNewAliasByNode(newFunc);
|
||||
|
||||
if (this.segments.length === 1 && this.segments[0].fake) {
|
||||
this.segments = [];
|
||||
this.emptySegments();
|
||||
}
|
||||
|
||||
if (!newFunc.params.length && newFunc.added) {
|
||||
this.targetChanged();
|
||||
}
|
||||
|
||||
if (newFunc.def.name === 'seriesByTag') {
|
||||
this.parseTarget();
|
||||
}
|
||||
}
|
||||
|
||||
moveAliasFuncLast() {
|
||||
var aliasFunc = _.find(this.functions, function(func) {
|
||||
return func.def.name === 'alias' ||
|
||||
func.def.name === 'aliasByNode' ||
|
||||
func.def.name === 'aliasByMetric';
|
||||
});
|
||||
removeFunction(func) {
|
||||
this.queryModel.removeFunction(func);
|
||||
this.targetChanged();
|
||||
}
|
||||
|
||||
if (aliasFunc) {
|
||||
this.functions = _.without(this.functions, aliasFunc);
|
||||
this.functions.push(aliasFunc);
|
||||
}
|
||||
addSeriesByTagFunc(tag) {
|
||||
let funcDef = gfunc.getFuncDef('seriesByTag');
|
||||
let newFunc = gfunc.createFuncInstance(funcDef, { withDefaultParams: false });
|
||||
let tagParam = `${tag}=select tag value`;
|
||||
newFunc.params = [tagParam];
|
||||
this.queryModel.addFunction(newFunc);
|
||||
newFunc.added = true;
|
||||
|
||||
this.emptySegments();
|
||||
this.targetChanged();
|
||||
this.parseTarget();
|
||||
}
|
||||
|
||||
smartlyHandleNewAliasByNode(func) {
|
||||
|
|
@ -325,7 +250,7 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
|||
}
|
||||
|
||||
for (var i = 0; i < this.segments.length; i++) {
|
||||
if (this.segments[i].value.indexOf('*') >= 0) {
|
||||
if (this.segments[i].value.indexOf('*') >= 0) {
|
||||
func.params[0] = i;
|
||||
func.added = false;
|
||||
this.targetChanged();
|
||||
|
|
@ -333,4 +258,90 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
getAllTags() {
|
||||
return this.datasource.getTags().then((values) => {
|
||||
let altTags = _.map(values, 'text');
|
||||
altTags.splice(0, 0, this.removeTagValue);
|
||||
return mapToDropdownOptions(altTags);
|
||||
});
|
||||
}
|
||||
|
||||
getTags(index, tagPrefix) {
|
||||
let tagExpressions = this.queryModel.renderTagExpressions(index);
|
||||
return this.datasource.getTagsAutoComplete(tagExpressions, tagPrefix)
|
||||
.then((values) => {
|
||||
let altTags = _.map(values, 'text');
|
||||
altTags.splice(0, 0, this.removeTagValue);
|
||||
return mapToDropdownOptions(altTags);
|
||||
});
|
||||
}
|
||||
|
||||
getTagsAsSegments() {
|
||||
let tagExpressions = this.queryModel.renderTagExpressions();
|
||||
return this.datasource.getTagsAutoComplete(tagExpressions)
|
||||
.then((values) => {
|
||||
return _.map(values, (val) => {
|
||||
return this.uiSegmentSrv.newSegment({value: val.text, type: 'tag', expandable: false});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getTagOperators() {
|
||||
return mapToDropdownOptions(GRAPHITE_TAG_OPERATORS);
|
||||
}
|
||||
|
||||
getAllTagValues(tag) {
|
||||
let tagKey = tag.key;
|
||||
return this.datasource.getTagValues(tagKey).then((values) => {
|
||||
let altValues = _.map(values, 'text');
|
||||
return mapToDropdownOptions(altValues);
|
||||
});
|
||||
}
|
||||
|
||||
getTagValues(tag, index, valuePrefix) {
|
||||
let tagExpressions = this.queryModel.renderTagExpressions(index);
|
||||
let tagKey = tag.key;
|
||||
return this.datasource.getTagValuesAutoComplete(tagExpressions, tagKey, valuePrefix).then((values) => {
|
||||
let altValues = _.map(values, 'text');
|
||||
return mapToDropdownOptions(altValues);
|
||||
});
|
||||
}
|
||||
|
||||
tagChanged(tag, tagIndex) {
|
||||
this.queryModel.updateTag(tag, tagIndex);
|
||||
this.targetChanged();
|
||||
}
|
||||
|
||||
addNewTag(segment) {
|
||||
let newTagKey = segment.value;
|
||||
let newTag = {key: newTagKey, operator: '=', value: 'select tag value'};
|
||||
this.queryModel.addTag(newTag);
|
||||
this.targetChanged();
|
||||
this.fixTagSegments();
|
||||
}
|
||||
|
||||
removeTag(index) {
|
||||
this.queryModel.removeTag(index);
|
||||
this.targetChanged();
|
||||
}
|
||||
|
||||
fixTagSegments() {
|
||||
// Adding tag with the same name as just removed works incorrectly if single segment is used (instead of array)
|
||||
this.addTagSegments = [this.uiSegmentSrv.newPlusButton()];
|
||||
}
|
||||
|
||||
showDelimiter(index) {
|
||||
return index !== this.queryModel.tags.length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
function mapToDropdownOptions(results) {
|
||||
return _.map(results, (value) => {
|
||||
return {text: value, value: value};
|
||||
});
|
||||
}
|
||||
|
||||
function removeTagPrefix(value: string): string {
|
||||
return value.replace(TAG_PREFIX, '');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ describe('GraphiteQueryCtrl', function() {
|
|||
});
|
||||
|
||||
it('should parse expression and build function model', function() {
|
||||
expect(ctx.ctrl.functions.length).to.be(2);
|
||||
expect(ctx.ctrl.queryModel.functions.length).to.be(2);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -61,7 +61,7 @@ describe('GraphiteQueryCtrl', function() {
|
|||
});
|
||||
|
||||
it('should add function with correct node number', function() {
|
||||
expect(ctx.ctrl.functions[0].params[0]).to.be(2);
|
||||
expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(2);
|
||||
});
|
||||
|
||||
it('should update target', function() {
|
||||
|
|
@ -99,7 +99,7 @@ describe('GraphiteQueryCtrl', function() {
|
|||
});
|
||||
|
||||
it('should add both series refs as params', function() {
|
||||
expect(ctx.ctrl.functions[0].params.length).to.be(2);
|
||||
expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(2);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -115,7 +115,7 @@ describe('GraphiteQueryCtrl', function() {
|
|||
});
|
||||
|
||||
it('should add function param', function() {
|
||||
expect(ctx.ctrl.functions[0].params.length).to.be(1);
|
||||
expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -131,7 +131,7 @@ describe('GraphiteQueryCtrl', function() {
|
|||
});
|
||||
|
||||
it('should have correct func params', function() {
|
||||
expect(ctx.ctrl.functions[0].params.length).to.be(1);
|
||||
expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -210,4 +210,113 @@ describe('GraphiteQueryCtrl', function() {
|
|||
});
|
||||
});
|
||||
|
||||
describe('when adding seriesByTag function', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ctrl.target.target = '';
|
||||
ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.ctrl.parseTarget();
|
||||
ctx.ctrl.addFunction(gfunc.getFuncDef('seriesByTag'));
|
||||
});
|
||||
|
||||
it('should update functions', function() {
|
||||
expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).to.be(0);
|
||||
});
|
||||
|
||||
it('should update seriesByTagUsed flag', function() {
|
||||
expect(ctx.ctrl.queryModel.seriesByTagUsed).to.be(true);
|
||||
});
|
||||
|
||||
it('should update target', function() {
|
||||
expect(ctx.ctrl.target.target).to.be('seriesByTag()');
|
||||
});
|
||||
|
||||
it('should call refresh', function() {
|
||||
expect(ctx.panelCtrl.refresh.called).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when parsing seriesByTag function', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
|
||||
ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.ctrl.parseTarget();
|
||||
});
|
||||
|
||||
it('should add tags', function() {
|
||||
const expected = [
|
||||
{key: 'tag1', operator: '=', value: 'value1'},
|
||||
{key: 'tag2', operator: '!=~', value: 'value2'}
|
||||
];
|
||||
expect(ctx.ctrl.queryModel.tags).to.eql(expected);
|
||||
});
|
||||
|
||||
it('should add plus button', function() {
|
||||
expect(ctx.ctrl.addTagSegments.length).to.be(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when tag added', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ctrl.target.target = "seriesByTag()";
|
||||
ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.ctrl.parseTarget();
|
||||
ctx.ctrl.addNewTag({value: 'tag1'});
|
||||
});
|
||||
|
||||
it('should update tags with default value', function() {
|
||||
const expected = [
|
||||
{key: 'tag1', operator: '=', value: 'select tag value'}
|
||||
];
|
||||
expect(ctx.ctrl.queryModel.tags).to.eql(expected);
|
||||
});
|
||||
|
||||
it('should update target', function() {
|
||||
const expected = "seriesByTag('tag1=select tag value')";
|
||||
expect(ctx.ctrl.target.target).to.eql(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when tag changed', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
|
||||
ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.ctrl.parseTarget();
|
||||
ctx.ctrl.tagChanged({key: 'tag1', operator: '=', value: 'new_value'}, 0);
|
||||
});
|
||||
|
||||
it('should update tags', function() {
|
||||
const expected = [
|
||||
{key: 'tag1', operator: '=', value: 'new_value'},
|
||||
{key: 'tag2', operator: '!=~', value: 'value2'}
|
||||
];
|
||||
expect(ctx.ctrl.queryModel.tags).to.eql(expected);
|
||||
});
|
||||
|
||||
it('should update target', function() {
|
||||
const expected = "seriesByTag('tag1=new_value', 'tag2!=~value2')";
|
||||
expect(ctx.ctrl.target.target).to.eql(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when tag removed', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
|
||||
ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.ctrl.parseTarget();
|
||||
ctx.ctrl.removeTag(0);
|
||||
});
|
||||
|
||||
it('should update tags', function() {
|
||||
const expected = [
|
||||
{key: 'tag2', operator: '!=~', value: 'value2'}
|
||||
];
|
||||
expect(ctx.ctrl.queryModel.tags).to.eql(expected);
|
||||
});
|
||||
|
||||
it('should update target', function() {
|
||||
const expected = "seriesByTag('tag2!=~value2')";
|
||||
expect(ctx.ctrl.target.target).to.eql(expected);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,55 @@
|
|||
import {describe, beforeEach, it, expect} from 'test/lib/common';
|
||||
|
||||
import {SemVersion, isVersionGtOrEq} from 'app/core/utils/version';
|
||||
|
||||
describe("SemVersion", () => {
|
||||
let version = '1.0.0-alpha.1';
|
||||
|
||||
describe('parsing', () => {
|
||||
it('should parse version properly', () => {
|
||||
let semver = new SemVersion(version);
|
||||
expect(semver.major).to.be(1);
|
||||
expect(semver.minor).to.be(0);
|
||||
expect(semver.patch).to.be(0);
|
||||
expect(semver.meta).to.be('alpha.1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('comparing', () => {
|
||||
beforeEach(() => {
|
||||
version = '3.4.5';
|
||||
});
|
||||
|
||||
it('should detect greater version properly', () => {
|
||||
let semver = new SemVersion(version);
|
||||
let cases = [
|
||||
{value: '3.4.5', expected: true},
|
||||
{value: '3.4.4', expected: true},
|
||||
{value: '3.4.6', expected: false},
|
||||
{value: '4', expected: false},
|
||||
{value: '3.5', expected: false},
|
||||
];
|
||||
cases.forEach((testCase) => {
|
||||
expect(semver.isGtOrEq(testCase.value)).to.be(testCase.expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isVersionGtOrEq', () => {
|
||||
it('should compare versions properly (a >= b)', () => {
|
||||
let cases = [
|
||||
{values: ['3.4.5', '3.4.5'], expected: true},
|
||||
{values: ['3.4.5', '3.4.4'] , expected: true},
|
||||
{values: ['3.4.5', '3.4.6'], expected: false},
|
||||
{values: ['3.4', '3.4.0'], expected: true},
|
||||
{values: ['3', '3.0.0'], expected: true},
|
||||
{values: ['3.1.1-beta1', '3.1'], expected: true},
|
||||
{values: ['3.4.5', '4'], expected: false},
|
||||
{values: ['3.4.5', '3.5'], expected: false},
|
||||
];
|
||||
cases.forEach((testCase) => {
|
||||
expect(isVersionGtOrEq(testCase.values[0], testCase.values[1])).to.be(testCase.expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
Loading…
Reference in New Issue