update docs

This commit is contained in:
Zhang Peng 2019-10-24 18:15:59 +08:00
parent 34458c1c1c
commit 90908bb378
7 changed files with 246 additions and 58 deletions

View File

@ -0,0 +1,152 @@
#!/usr/bin/env bash
##############################################################################
# console color
BLACK="\033[1;30m"
RED="\033[1;31m"
GREEN="\033[1;32m"
YELLOW="\033[1;33m"
BLUE="\033[1;34m"
PURPLE="\033[1;35m"
CYAN="\033[1;36m"
RESET="$(tput sgr0)"
##############################################################################
JAVA_OPTS=""
APP_OPTS=""
packageJavaOpts() {
# GC OPTS
JAVA_OPTS="${JAVA_OPTS} -server -Xms8g -Xmx16g -Xss512k"
JAVA_OPTS="${JAVA_OPTS} -XX:+UseParallelOldGC -XX:+UseAdaptiveSizePolicy -XX:MaxGCPauseMillis=150"
JAVA_OPTS="${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom"
# DEBUG OPTS
if [[ ${debug} == "on" ]]; then
# Remote Debug
JAVA_OPTS="${JAVA_OPTS} -Xdebug -Xnoagent -Djava.compiler=NONE"
JAVA_OPTS="${JAVA_OPTS} -Xrunjdwp:transport=dt_socket,address=28889,server=y,suspend=n"
# GC LOG
JAVA_OPTS="${JAVA_OPTS} -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps"
JAVA_OPTS="${JAVA_OPTS} -verbose:gc -Xloggc:${LOG_PATH}/${APP_NAME}.gc.log"
JAVA_OPTS="${JAVA_OPTS} -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M"
# Heap Dump
JAVA_OPTS="${JAVA_OPTS} -XX:-OmitStackTraceInFastThrow -XX:+HeapDumpOnOutOfMemoryError"
JAVA_OPTS="${JAVA_OPTS} -XX:HeapDumpPath=${LOG_PATH}/${APP_NAME}.heapdump.hprof"
# JMX OPTS
IP=`ip addr|grep "inet "|grep -v 127.0.0.1|awk '{print $2}'|cut -d/ -f1`
JAVA_OPTS="${JAVA_OPTS} -Dcom.sun.management.jmxremote=true"
JAVA_OPTS="${JAVA_OPTS} -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
JAVA_OPTS="${JAVA_OPTS} -Djava.rmi.server.hostname=${IP} -Dcom.sun.management.jmxremote.port=18889"
fi
# APP OPTS
JAVA_OPTS="${JAVA_OPTS} -Dsun.net.inetaddr.ttl=60 -Djava.net.preferIPv4Stack=true"
JAVA_OPTS="${JAVA_OPTS} -Dspring.profiles.active=${profile} -Dfile.encoding=UTF-8"
# CLASSPATH
APP_OPTS=" -classpath lib/* -Dlogging.config=file:./config/logback.dev.xml --spring.config.location=classpath:/,classpath:/config/,file:./,file:./config/"
}
# 检查服务是否已经启动
pid=""
checkStarted() {
pid=`ps -ef | grep java | grep ${APP_NAME} | awk '{print $2}'`
if [[ -n "${pid}" ]]; then
return 0
else
return 1
fi
}
main() {
case "${oper}" in
start )
startServer
;;
stop )
stopServer
;;
restart )
stopServer
sleep 5
startServer
;;
* )
echo "Invalid oper: ${oper}."
exit 1
esac
exit 0
}
stopServer() {
echo -n "stopping server: "
if checkStarted ;then
kill -9 ${pid}
printf "${GREEN}\n${APP_NAME} is stopped.${RESET}\n"
else
printf "${RED}\n${APP_NAME} fail to stop.${RESET}\n"
fi
}
startServer() {
printf "${BLUE}starting ${APP_NAME}...${RESET}\n"
if checkStarted ;then
printf "${YELLOW}[WARN] ${APP_NAME} already started!${RESET}\n"
printf "PID: ${pid}\n"
exit 1
fi
packageJavaOpts
printf "${CYAN}JVM OPTS:\n ${JAVA_OPTS}${RESET}\n"
if [[ ! -f "${LOG_PATH}/start.out" ]]; then
touch "${LOG_PATH}/start.out"
fi
nohup java ${JAVA_OPTS} -jar ${ROOT_DIR}/../spring-boot-app.jar ${APP_OPTS} >> ${LOG_PATH}/start.out 2>&1 &
printf "${GREEN}\n${APP_NAME} is started.${RESET}\n"
}
######################################## MAIN ########################################
# 设置环境变量
export LANG="zh_CN.UTF-8"
ROOT_DIR=$(pwd)
APP_NAME=spring-boot-app
LOG_PATH=${ROOT_DIR}/../logs
mkdir -p ${LOG_PATH}
declare -a serial
serial=(start stop restart)
echo -n "请选择操作可选值start|stop|restart"
read oper
if ! echo "${serial[@]}" | grep -q ${oper}; then
echo "请选择正确操作可选值start|stop|restart"
exit 1
fi
if [[ ${oper} == "start" ]] || [[ "${oper}" == "restart" ]]; then
declare -a serial2
serial2=(prod dev test)
echo -n "选择 profile可选值prod|dev|test"
read profile
if ! echo "${serial2[@]}" | grep -q ${profile}; then
echo "请选择正确 profile可选值prod|dev|test"
exit 1
fi
declare -a serial3
serial3=(on off)
echo -n "是否启动 debug 模式可选值on|off"
read debug
if ! echo "${serial3[@]}" | grep -q ${debug}; then
echo "是否启动 debug 模式可选值on|off"
exit 1
fi
fi
main

View File

@ -63,7 +63,7 @@ checkOsVersion() {
menus=( "配置系统" "安装软件" "退出" )
main() {
PS3="请输入命令编号:"
select item in ${menus[@]}
select item in "${menus[@]}"
do
case ${item} in
"配置系统")

View File

@ -12,7 +12,7 @@ EOF
menus=( "替换yum镜像" "安装基本的命令工具" "安装常用libs" "系统配置" "全部执行" "退出" )
main() {
PS3="请输入命令编号:"
select item in ${menus[@]}
select item in "${menus[@]}"
do
case ${item} in
"替换yum镜像")

View File

@ -0,0 +1,35 @@
#!/usr/bin/env bash
# ----------------------------------------------------------------------------------
# 控制台颜色
BLACK="\033[1;30m"
RED="\033[1;31m"
GREEN="\033[1;32m"
YELLOW="\033[1;33m"
BLUE="\033[1;34m"
PURPLE="\033[1;35m"
CYAN="\033[1;36m"
RESET="$(tput sgr0)"
# ----------------------------------------------------------------------------------
printf "${PURPLE}"
cat << EOF
# ----------------------------------------------------------------------------------
# Arthas 安装脚本
# @author: Zhang Peng
# ----------------------------------------------------------------------------------
EOF
printf "${RESET}"
printf "${BLUE}>>>>>>>> begin.\n${RESET}"
root=/opt/arthas
if [[ -n $1 ]]; then
root=$1
fi
mkdir -p ${root}
curl -o ${root}/arthas-boot.jar https://alibaba.github.io/arthas/arthas-boot.jar
printf "${GREEN}[OK]\n${RESET}"
printf "${BLUE}<<<<<<<< end.\n${RESET}"

View File

@ -1,7 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">
<settings xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">
<!-- 使用 aliyun maven 仓库加速下载 -->
<mirrors>
@ -18,4 +19,4 @@
<mirrorOf>central</mirrorOf>
</mirror>
</mirrors>
</settings>
</settings>

View File

@ -14,54 +14,54 @@
filebeat.prospectors:
# Each - is a prospector. Most options can be set at the prospector level, so
# you can use different prospectors for various configurations.
# Below are the prospector specific configurations.
# Each - is a prospector. Most options can be set at the prospector level, so
# you can use different prospectors for various configurations.
# Below are the prospector specific configurations.
- type: log
- type: log
# Change to true to enable this prospector configuration.
enabled: true
# Change to true to enable this prospector configuration.
enabled: true
# Paths that should be crawled and fetched. Glob based paths.
paths:
#- /var/log/*.log
#- c:\programdata\elasticsearch\logs\*
- /home/zp/log/*.log
# Paths that should be crawled and fetched. Glob based paths.
paths:
#- /var/log/*.log
#- c:\programdata\elasticsearch\logs\*
- /home/zp/log/*.log
# Exclude lines. A list of regular expressions to match. It drops the lines that are
# matching any regular expression from the list.
#exclude_lines: ['^DBG']
# Exclude lines. A list of regular expressions to match. It drops the lines that are
# matching any regular expression from the list.
#exclude_lines: ['^DBG']
# Include lines. A list of regular expressions to match. It exports the lines that are
# matching any regular expression from the list.
#include_lines: ['^ERR', '^WARN']
# Include lines. A list of regular expressions to match. It exports the lines that are
# matching any regular expression from the list.
#include_lines: ['^ERR', '^WARN']
# Exclude files. A list of regular expressions to match. Filebeat drops the files that
# are matching any regular expression from the list. By default, no files are dropped.
#exclude_files: ['.gz$']
# Exclude files. A list of regular expressions to match. Filebeat drops the files that
# are matching any regular expression from the list. By default, no files are dropped.
#exclude_files: ['.gz$']
# Optional additional fields. These fields can be freely picked
# to add additional information to the crawled log files for filtering
#fields:
# level: debug
# review: 1
# Optional additional fields. These fields can be freely picked
# to add additional information to the crawled log files for filtering
#fields:
# level: debug
# review: 1
### Multiline options
### Multiline options
# Mutiline can be used for log messages spanning multiple lines. This is common
# for Java Stack Traces or C-Line Continuation
# Mutiline can be used for log messages spanning multiple lines. This is common
# for Java Stack Traces or C-Line Continuation
# The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
#multiline.pattern: ^\[
# The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
#multiline.pattern: ^\[
# Defines if the pattern set under pattern should be negated or not. Default is false.
#multiline.negate: false
# Defines if the pattern set under pattern should be negated or not. Default is false.
#multiline.negate: false
# Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
# that was (not) matched before or after or as long as a pattern is not matched based on negate.
# Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
#multiline.match: after
# Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
# that was (not) matched before or after or as long as a pattern is not matched based on negate.
# Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
#multiline.match: after
#============================= Filebeat modules ===============================
@ -123,25 +123,25 @@ setup.kibana:
# IPv6 addresses should always be defined as: https://[2001:db8::1]:5601
host: "192.168.28.11:5601"
#============================= Elastic Cloud ==================================
#============================= Elastic Cloud ==================================
# These settings simplify using filebeat with the Elastic Cloud (https://cloud.elastic.co/).
# These settings simplify using filebeat with the Elastic Cloud (https://cloud.elastic.co/).
# The cloud.id setting overwrites the `output.elasticsearch.hosts` and
# `setup.kibana.host` options.
# You can find the `cloud.id` in the Elastic Cloud web UI.
#cloud.id:
# The cloud.id setting overwrites the `output.elasticsearch.hosts` and
# `setup.kibana.host` options.
# You can find the `cloud.id` in the Elastic Cloud web UI.
#cloud.id:
# The cloud.auth setting overwrites the `output.elasticsearch.username` and
# `output.elasticsearch.password` settings. The format is `<user>:<pass>`.
#cloud.auth:
# The cloud.auth setting overwrites the `output.elasticsearch.username` and
# `output.elasticsearch.password` settings. The format is `<user>:<pass>`.
#cloud.auth:
#================================ Outputs =====================================
#================================ Outputs =====================================
# Configure what output to use when sending the data collected by the beat.
# Configure what output to use when sending the data collected by the beat.
#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:
#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:
# Array of hosts to connect to.
#hosts: ["192.168.28.11:9200"]

View File

@ -3,7 +3,7 @@
<!-- logback中一共有5种有效级别分别是TRACE、DEBUG、INFO、WARN、ERROR优先级依次从低到高 -->
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<property name="FILE_NAME" value="javatool"/>
<property name="FILE_NAME" value="javatool" />
<!-- 将记录日志打印到控制台 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
@ -45,12 +45,12 @@
<!-- logger begin -->
<!-- 本项目的日志记录,分级打印 -->
<logger name="io.github.zp" level="TRACE">
<appender-ref ref="ELK-TCP"/>
<appender-ref ref="ALL"/>
<appender-ref ref="ELK-TCP" />
<appender-ref ref="ALL" />
</logger>
<root level="TRACE">
<appender-ref ref="STDOUT"/>
<appender-ref ref="STDOUT" />
</root>
<!-- logger end -->