From 9b82dbbbdb263f37ca06a054c1e1176122662610 Mon Sep 17 00:00:00 2001 From: Cao Xueliang Date: Thu, 27 Mar 2025 15:15:20 +0800 Subject: [PATCH] modify containers to adapt to k8s deployment Signed-off-by: Cao Xueliang --- container/auto-submit/prepare | 31 +-- container/build | 19 +- container/compass-ci-web/prepare | 22 ++ container/conserver/Dockerfile | 4 +- container/data-api/prepare | 22 ++ container/extract-stats/Dockerfile | 2 +- container/extract-stats/prepare | 21 ++ container/extract-stats/start | 2 +- container/git-mirror/repositories | 4 + container/k8s-start-check/Dockerfile | 27 +++ container/k8s-start-check/build | 15 ++ container/k8s-start-check/openEuler.repo | 26 +++ container/k8s-start-check/prepare | 14 ++ container/k8s-start-check/requirements.txt | 37 +++ container/kibana-base/Dockerfile | 23 ++ container/kibana-base/kibana.yml | 8 + container/kibana-logging/Dockerfile | 19 +- container/kibana/Dockerfile | 21 +- container/lifecycle/prepare | 18 ++ container/lifecycle/repositories | 4 + container/logging-es-base/Dockerfile | 110 +++++++++ container/logging-es-base/elasticsearch.yml | 12 + container/logging-es/Dockerfile | 108 +-------- container/redis-base/Dockerfile | 15 +- container/redis-base/redis.conf | 64 ++++++ container/redis/build | 4 +- container/result-webdav-base/Dockerfile | 39 ++++ container/result-webdav-base/build | 13 ++ container/result-webdav/Dockerfile | 25 +- container/result-webdav/gpg-agent.conf | 2 + container/result-webdav/nginx.conf | 4 +- container/result-webdav/start | 4 +- container/scheduler/my-start | 2 +- container/scheduler/start | 2 +- container/serial-logging/prepare | 14 ++ container/serial-logging/repositories | 4 + container/web-backend/Dockerfile | 15 +- container/web-backend/prepare | 28 +++ sparrow/3-code/dev-env | 2 +- src/lib/data_api.rb | 1 - src/lib/etcd_client.cr | 9 +- src/lib/web_backend.rb | 14 -- src/libpy/constants.py | 119 ++++++++++ src/libpy/es_client.py | 241 +++++++++++++++----- src/libpy/etcd_client.py | 84 +++++++ src/libpy/k8s_start_check.py | 98 ++++++++ src/libpy/redis_client.py | 49 ++++ src/libpy/single_class.py | 16 ++ src/monitoring/parse_serial_logs.cr | 58 +---- src/scheduler/redis_client.cr | 4 +- src/scheduler/scheduler.cr | 22 +- 51 files changed, 1188 insertions(+), 333 deletions(-) create mode 100755 container/compass-ci-web/prepare create mode 100755 container/data-api/prepare create mode 100755 container/extract-stats/prepare create mode 100644 container/git-mirror/repositories create mode 100644 container/k8s-start-check/Dockerfile create mode 100755 container/k8s-start-check/build create mode 100644 container/k8s-start-check/openEuler.repo create mode 100755 container/k8s-start-check/prepare create mode 100644 container/k8s-start-check/requirements.txt create mode 100644 container/kibana-base/Dockerfile create mode 100644 container/kibana-base/kibana.yml create mode 100755 container/lifecycle/prepare create mode 100644 container/lifecycle/repositories create mode 100644 container/logging-es-base/Dockerfile create mode 100644 container/logging-es-base/elasticsearch.yml create mode 100644 container/redis-base/redis.conf create mode 100644 container/result-webdav-base/Dockerfile create mode 100755 container/result-webdav-base/build create mode 100644 container/result-webdav/gpg-agent.conf create mode 100755 container/serial-logging/prepare create mode 100644 container/serial-logging/repositories create mode 100755 container/web-backend/prepare create mode 100644 src/libpy/constants.py create mode 100644 src/libpy/etcd_client.py create mode 100644 src/libpy/k8s_start_check.py create mode 100644 src/libpy/redis_client.py create mode 100644 src/libpy/single_class.py diff --git a/container/auto-submit/prepare b/container/auto-submit/prepare index cfc74ad2d..bba108296 100755 --- a/container/auto-submit/prepare +++ b/container/auto-submit/prepare @@ -5,23 +5,28 @@ CURRENT_PATH=$(dirname `readlink -f $0`) PROJECT_PATH=$(dirname $(dirname $CURRENT_PATH)) declare -A REPOS=( -["compass-ci"]="https://gitee.com/openeuler/compass-ci" -["lkp-tests"]="https://gitee.com/compass-ci/lkp-tests" + ["compass-ci"]="https://gitee.com/openeuler/compass-ci" + ["lkp-tests"]="https://gitee.com/compass-ci/lkp-tests" ) process_repos() { - for repo_dir in "${!REPOS[@]}"; do - local full_path="$CURRENT_PATH/$repo_dir" + for repo_dir in "${!REPOS[@]}"; do + local full_path="$CURRENT_PATH/$repo_dir" - if [[ -d "$full_path" ]]; then - rm -rf "$full_path" - fi + if [ -d "$full_path" ]; then + rm -rf "$full_path" + fi - if ! git clone --depth=1 "${REPOS[$repo_dir]}" "$full_path"; then - echo "ERROR: Failed to clone ${REPOS[$repo_dir]}" >&2 - exit 1 - fi - done + clone_opt="--depth=1" + if [ "$repo_dir" = "lkp-tests" ];then + clone_opt="" + fi + + if ! git clone ${clone_opt} "${REPOS[$repo_dir]}" "$full_path"; then + echo "ERROR: Failed to clone ${REPOS[$repo_dir]}" >&2 + exit 1 + fi + done } -process_repos +#process_repos diff --git a/container/build b/container/build index e948e4c3e..51ec6888b 100755 --- a/container/build +++ b/container/build @@ -14,6 +14,7 @@ BASE_IMAGE_URL=${BASE_IMAGE_URL:-"https://eulermaker.compass-ci.openeuler.openat CURRENT_PATH=$(dirname `readlink -f $0`) PROJECT_PATH=$(dirname $(dirname `readlink -f $0`)) +PROJECT_NAME=$(basename "$PROJECT_PATH") IAMGE_PATH=$CURRENT_PATH/_images . $PROJECT_PATH/container/defconfig.sh @@ -82,19 +83,16 @@ case "$app_name" in [ "$arch" = "aarch64" ] && arch=arm64 [ "$arch" = "x86_64" ] && arch=amd64 build_args=( - --build-arg ARCH=$arch --build-arg VERSION=$version ) ;; redis) build_args=( - --build-arg ARCH=$arch --build-arg PASSWD=$REDIS_PASSWD ) ;; kibana|es) build_args=( - --build-arg ARCH=$arch --build-arg USER=$ES_SUPER_USER --build-arg PASSWORD=$ES_SUPER_PASSWORD --build-arg MEMORY=$available_memory @@ -102,29 +100,26 @@ case "$app_name" in ;; kibana-logging|logging-es) build_args=( - --build-arg ARCH=$arch --build-arg USER=$LOGGING_ES_USER --build-arg PASSWORD=$LOGGING_ES_PASSWORD --build-arg MEMORY=$available_memory ) ;; - *) - build_args=( - --build-arg ARCH=$arch - ) - ;; esac docker build --network=host $build_opt \ - -t $app_name:$tag \ -t $app_name:$tag_latest \ - "${build_args[@]}" . + "${build_args[@]}" \ + --build-arg ARCH=$arch \ + --build-arg PROJECT_NAME=$PROJECT_NAME . popd if [ $PACK_FLAG == "true" ]; then echo "Packing image..." - docker save -o $image_path/$app_name-$tag-$arch.tar $app_name:$tag + [ "$arch" = "arm64" ] && arch=aarch64 + [ "$arch" = "amd64" ] && arch=x86_64 + docker save -o $image_path/$app_name-$tag-$arch.tar $app_name:$tag_latest gzip -9f $image_path/$app_name-$tag-$arch.tar fi } diff --git a/container/compass-ci-web/prepare b/container/compass-ci-web/prepare new file mode 100755 index 000000000..cb9eea841 --- /dev/null +++ b/container/compass-ci-web/prepare @@ -0,0 +1,22 @@ +#!/bin/bash +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +set -e + +CURRENT_PATH=$(dirname `readlink -f $0`) +PROJECT_PATH=$(dirname $(dirname $CURRENT_PATH)) +. $PROJECT_PATH/container/defconfig.sh + +load_cci_defaults + +[[ $BASE_RESULT_URL ]] || BASE_RESULT_URL='https://api.compass-ci.openeuler.org' +[[ $BASE_WEB_BACKEND_URL ]] || BASE_WEB_BACKEND_URL='/web_backend' + +if [ -d "$CURRENT_PATH/compass-ci-web" ]; then + rm -rf $CURRENT_PATH/compass-ci-web +fi + +git clone https://gitee.com/openeuler/compass-ci-web +sed -i "s#export const BASEURLRESULT = 'https://api.compass-ci.openeuler.org';#export const BASEURLRESULT = '$BASE_RESULT_URL';#g" $CURRENT_PATH/compass-ci-web/src/utils/baseUrl.js +sed -i "s#const BASEURL = 'https://api.compass-ci.openeuler.org/web_backend';#export const BASEURL = '$BASE_WEB_BACKEND_URL';#g" $CURRENT_PATH/compass-ci-web/src/utils/axios.utils.js diff --git a/container/conserver/Dockerfile b/container/conserver/Dockerfile index 20afa0759..dc6f4a518 100644 --- a/container/conserver/Dockerfile +++ b/container/conserver/Dockerfile @@ -9,7 +9,7 @@ RUN sed -ri.origin 's|^https?://dl-cdn.alpinelinux.org|http://mirrors.huaweiclou RUN apk update RUN apk add gcc make g++ bash ipmitool -RUN wget https://ghproxy.com/https://github.com/bstansell/conserver/releases/download/v8.2.4/conserver-8.2.4.tar.gz && \ +RUN wget https://gh-proxy.com/https://github.com/bstansell/conserver/releases/download/v8.2.4/conserver-8.2.4.tar.gz && \ tar -xf conserver-8.2.4.tar.gz && \ cd conserver-8.2.4 && \ ./configure && make && make install && mkdir -p /var/consoles/ @@ -17,5 +17,5 @@ RUN wget https://ghproxy.com/https://github.com/bstansell/conserver/releases/dow RUN echo "console 782/tcp conserver" >> /etc/services COPY ipmi-sol /usr/local/bin/ COPY conserver /usr/local/bin/ -COPY conserver.cf /etc/conserver.cf +COPY conserver-head.cf /etc/conserver.cf ENTRYPOINT ["/usr/local/bin/conserver"] diff --git a/container/data-api/prepare b/container/data-api/prepare new file mode 100755 index 000000000..cecd0dab1 --- /dev/null +++ b/container/data-api/prepare @@ -0,0 +1,22 @@ +#!/bin/bash +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +set -e + +CURRENT_PATH=$(dirname `readlink -f $0`) +PROJECT_PATH=$(dirname $(dirname $CURRENT_PATH)) +. $PROJECT_PATH/container/defconfig.sh + +load_cci_defaults + +if [ -d "$CURRENT_PATH/compass-ci" ]; then + rm -rf $CURRENT_PATH/compass-ci +fi + +if [ -d "$CURRENT_PATH/lkp-tests" ]; then + rm -rf $CURRENT_PATH/lkp-tests +fi + +git clone https://gitee.com/openeuler/compass-ci +git clone https://gitee.com/compass-ci/lkp-tests diff --git a/container/extract-stats/Dockerfile b/container/extract-stats/Dockerfile index b92ef75b5..1b2b8c02f 100644 --- a/container/extract-stats/Dockerfile +++ b/container/extract-stats/Dockerfile @@ -11,7 +11,7 @@ RUN sed -ri.origin 's|^https?://dl-cdn.alpinelinux.org|http://mirrors.huaweiclou RUN umask 002 && \ gem sources -r https://rubygems.org/ -a https://gems.ruby-china.com/ && \ - gem install rest-client activesupport git json yaml elasticsearch:7.11.1 terminal-table + gem install securerandom:0.3.2 rest-client activesupport:7.1.5.1 git:1.19.0 json yaml elasticsearch:7.11.1 terminal-table ENV RUNTIME_DIR /c/cci/extract diff --git a/container/extract-stats/prepare b/container/extract-stats/prepare new file mode 100755 index 000000000..3c8eb8869 --- /dev/null +++ b/container/extract-stats/prepare @@ -0,0 +1,21 @@ +#!/bin/bash +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +set -e + +CURRENT_PATH=$(dirname `readlink -f $0`) +PROJECT_PATH=$(dirname $(dirname $CURRENT_PATH)) + + +for tmp in compass-ci lkp-tests extract-stats; do + + if [ -e "$CURRENT_PATH/$tmp" ]; then + rm -rf "$CURRENT_PATH/$tmp" + fi +done + +$PROJECT_PATH/container/compile . + +git clone --depth=1 https://gitee.com/openeuler/compass-ci +git clone --depth=1 https://gitee.com/compass-ci/lkp-tests diff --git a/container/extract-stats/start b/container/extract-stats/start index a0bedfa08..9d4cc60bc 100755 --- a/container/extract-stats/start +++ b/container/extract-stats/start @@ -12,7 +12,7 @@ start_pod names = Set.new %w[ REDIS_HOST REDIS_PORT - REDIS_PASSWD + REDIS_PASSWORD ES_HOST ES_PORT ES_USER diff --git a/container/git-mirror/repositories b/container/git-mirror/repositories new file mode 100644 index 000000000..cfaf6237a --- /dev/null +++ b/container/git-mirror/repositories @@ -0,0 +1,4 @@ +http://mirrors.huaweicloud.com/alpine/v3.18/main +http://mirrors.huaweicloud.com/alpine/v3.18/community +http://mirrors.huaweicloud.com/alpine/v3.8/main +http://mirrors.huaweicloud.com/alpine/v3.8/community \ No newline at end of file diff --git a/container/k8s-start-check/Dockerfile b/container/k8s-start-check/Dockerfile new file mode 100644 index 000000000..0fd7ea6ab --- /dev/null +++ b/container/k8s-start-check/Dockerfile @@ -0,0 +1,27 @@ +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +FROM openeuler/openeuler:22.03 + +ARG PROJECT_NAME + +MAINTAINER He Shoucheng <15695901908@163.com> + +COPY openEuler.repo /etc/yum.repos.d/ + +RUN yum clean all && \ + yum update -y && \ + yum install -y ca-certificates python3-3.9.9 shadow && \ + yum install -y git util-linux python3-pip + +ENV PIP_SOURCE https://mirrors.huaweicloud.com/repository/pypi/simple/ + +RUN groupadd -g 1999 committer && useradd -m -u 1090 -g 1999 lkp + +COPY requirements.txt . + +RUN umask 002 && pip3 install -r ./requirements.txt -i ${PIP_SOURCE} + +COPY --chown=1090 ${PROJECT_NAME} /opt/${PROJECT_NAME} + +WORKDIR /opt/${PROJECT_NAME}/src/libpy diff --git a/container/k8s-start-check/build b/container/k8s-start-check/build new file mode 100755 index 000000000..1abf20444 --- /dev/null +++ b/container/k8s-start-check/build @@ -0,0 +1,15 @@ +#!/bin/sh +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +. ../defconfig.sh + +load_cci_defaults + +docker_skip_rebuild "k8s-start-check" + +download_repo "cbs" "$git_branch_cbs" + +docker build -t k8s-start-check . + +push_image_remote "k8s-start-check" diff --git a/container/k8s-start-check/openEuler.repo b/container/k8s-start-check/openEuler.repo new file mode 100644 index 000000000..ddcf95954 --- /dev/null +++ b/container/k8s-start-check/openEuler.repo @@ -0,0 +1,26 @@ +[openEuler-source] +name=openEuler-source +baseurl=https://repo.huaweicloud.com/openeuler/openEuler-22.03-LTS/source/ +enabled=1 +gpgcheck=1 +gpgkey=https://repo.huaweicloud.com/openeuler/openEuler-22.03-LTS/source/RPM-GPG-KEY-openEuler + +[openEuler-os] +name=openEuler-os +baseurl=https://repo.huaweicloud.com/openeuler/openEuler-22.03-LTS/OS/$basearch/ +enabled=1 +gpgcheck=1 +gpgkey=https://repo.huaweicloud.com/openeuler/openEuler-22.03-LTS/OS/$basearch/RPM-GPG-KEY-openEuler + +[openEuler-everything] +name=openEuler-everything +baseurl=https://repo.huaweicloud.com/openeuler/openEuler-22.03-LTS/everything/$basearch/ +enabled=1 +gpgcheck=1 +gpgkey=https://repo.huaweicloud.com/openeuler/openEuler-22.03-LTS/everything/$basearch/RPM-GPG-KEY-openEuler + +[openEuler-EPOL] +name=openEuler-epol +baseurl=https://repo.huaweicloud.com/openeuler/openEuler-22.03-LTS/EPOL/main/$basearch/ +enabled=1 +gpgcheck=0 diff --git a/container/k8s-start-check/prepare b/container/k8s-start-check/prepare new file mode 100755 index 000000000..000f0c4d6 --- /dev/null +++ b/container/k8s-start-check/prepare @@ -0,0 +1,14 @@ +#!/bin/bash +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +set -e + +CURRENT_PATH=$(dirname `readlink -f $0`) +PROJECT_PATH=$(dirname $(dirname $(dirname `readlink -f $0`))) +PROJECT_NAME=$(basename "$PROJECT_PATH") + +[ -d "$CURRENT_PATH/$PROJECT_NAME" ] && rm -rf "$CURRENT_PATH/$PROJECT_NAME" + +mkdir -p $CURRENT_PATH/$PROJECT_NAME/src +cp -r $PROJECT_PATH/src/libpy $CURRENT_PATH/$PROJECT_NAME/src diff --git a/container/k8s-start-check/requirements.txt b/container/k8s-start-check/requirements.txt new file mode 100644 index 000000000..2c277bb0d --- /dev/null +++ b/container/k8s-start-check/requirements.txt @@ -0,0 +1,37 @@ +grpcio==1.44.0 +aniso8601==9.0.1 +apscheduler==3.6.0 +attrs==21.4.0 +certifi==2022.6.15 +Cheetah3==3.2.5 +click==8.1.3 +elasticsearch==7.8.0 +flasgger==0.9.5 +Flask==1.1.2 +Flask-RESTful==0.3.9 +func_timeout==4.3.5 +gevent==21.8.0 +greenlet==1.1.2 +gitpython==3.1.18 +giturlparse==0.10.0 +gunicorn==20.1.0 +itsdangerous==2.0.1 +Jinja2==2.11.3 +jsonschema==4.6.0 +MarkupSafe==2.0.1 +mistune==2.0.2 +pika==1.3.0 +pyrsistent==0.18.1 +pytz==2022.1 +PyYAML==6.0 +redis==3.5.3 +redis-py-cluster==2.1.3 +requests==2.28.0 +schema==0.7.5 +six==1.16.0 +urllib3==1.26.9 +Werkzeug==2.0.2 +zope.event==4.5.0 +zope.interface==5.4.0 +etcd3== 0.12.0 +protobuf==3.20.1 diff --git a/container/kibana-base/Dockerfile b/container/kibana-base/Dockerfile new file mode 100644 index 000000000..8f11df2f2 --- /dev/null +++ b/container/kibana-base/Dockerfile @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +FROM openeuler/openeuler:22.03 + +ARG ARCH + +MAINTAINER Wu Zhende + +RUN yum clean all && \ + yum -y update && \ + yum -y install tar gzip java-11-openjdk findutils + +RUN curl https://artifacts.elastic.co/downloads/kibana/kibana-7.11.1-linux-${ARCH}.tar.gz -o kibana-7.11.1-linux-${ARCH}.tar.gz && \ + tar -xzvf kibana-7.11.1-linux-${ARCH}.tar.gz -C /usr/share/ && \ + rm -f kibana-7.11.1-linux-${ARCH}.tar.gz && \ + mv /usr/share/kibana-7.11.1-linux-${ARCH} /usr/share/kibana && \ + chmod -R a+rx /usr/share/kibana + +COPY kibana.yml /usr/share/kibana/config/ +WORKDIR /usr/share/kibana + +RUN sed -i 's/server.host: "0"/server.host: "0.0.0.0"/' config/kibana.yml diff --git a/container/kibana-base/kibana.yml b/container/kibana-base/kibana.yml new file mode 100644 index 000000000..423a1401c --- /dev/null +++ b/container/kibana-base/kibana.yml @@ -0,0 +1,8 @@ +server.host: "0.0.0.0" +elasticsearch.username: +elasticsearch.password: +i18n.locale: "zh-CN" +xpack.reporting.enabled: true +server.name: kibana +elasticsearch.hosts: [ "http://192.168.71.63:9202" ] +monitoring.ui.container.elasticsearch.enabled: true \ No newline at end of file diff --git a/container/kibana-logging/Dockerfile b/container/kibana-logging/Dockerfile index 75888bd44..cf0379f81 100644 --- a/container/kibana-logging/Dockerfile +++ b/container/kibana-logging/Dockerfile @@ -1,33 +1,18 @@ # SPDX-License-Identifier: MulanPSL-2.0+ # Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. -ARG BASE_IMAGE - -FROM openeuler/openeuler:22.03 +FROM kibana-base:latest ARG USER ARG PASSWORD -ARG ARCH - MAINTAINER Wu Zhende -RUN yum clean all && \ - yum -y update && \ - yum -y install tar gzip java-11-openjdk findutils - -RUN curl https://artifacts.elastic.co/downloads/kibana/kibana-7.11.1-linux-${ARCH}.tar.gz -o kibana-7.11.1-linux-${ARCH}.tar.gz && \ - tar -xzvf kibana-7.11.1-linux-${ARCH}.tar.gz -C /usr/share/ && \ - rm -f kibana-7.11.1-linux-${ARCH}.tar.gz && \ - mv /usr/share/kibana-7.11.1-linux-${ARCH} /usr/share/kibana && \ - chmod -R a+rx /usr/share/kibana - -COPY kibana.yml /usr/share/kibana/config/ WORKDIR /usr/share/kibana -RUN sed -i 's/server.host: "0"/server.host: "0.0.0.0"/' config/kibana.yml RUN sed -i '/^elasticsearch.username/c \elasticsearch.username: '"${USER}"'' config/kibana.yml RUN sed -i '/^elasticsearch.password/c \elasticsearch.password: '"${PASSWORD}"'' config/kibana.yml +RUN sed -i '6i\i18n.locale: "zh-CN"' config/kibana.yml EXPOSE 5601 CMD ["bin/kibana", "--allow-root"] diff --git a/container/kibana/Dockerfile b/container/kibana/Dockerfile index 952d0ab9c..3a6963dce 100644 --- a/container/kibana/Dockerfile +++ b/container/kibana/Dockerfile @@ -1,29 +1,20 @@ # SPDX-License-Identifier: MulanPSL-2.0+ # Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. -FROM openeuler/openeuler:22.03 +FROM kibana-base:latest MAINTAINER Wu Zhende ARG USER ARG PASSWORD -ARG ARCH -RUN yum clean all && \ - yum -y update && \ - yum -y install tar gzip java-11-openjdk findutils - -RUN curl https://artifacts.elastic.co/downloads/kibana/kibana-7.11.1-linux-${ARCH}.tar.gz -o kibana-7.11.1-linux-${ARCH}.tar.gz && \ - tar -xzvf kibana-7.11.1-linux-${ARCH}.tar.gz -C /usr/share/ && \ - rm -f kibana-7.11.1-linux-${ARCH}.tar.gz && \ - mv /usr/share/kibana-7.11.1-linux-${ARCH} /usr/share/kibana && \ - chmod -R a+rx /usr/share/kibana - -COPY kibana.yml /usr/share/kibana/config/ WORKDIR /usr/share/kibana -RUN sed -i '/^elasticsearch.username/c \elasticsearch.username: '"${USER}"'' config/kibana.yml -RUN sed -i '/^elasticsearch.password/c \elasticsearch.password: '"${PASSWORD}"'' config/kibana.yml +RUN sed -i '/^elasticsearch.username/c \elasticsearch.username: '"${USER}"'' config/kibana.yml && \ + sed -i '/^elasticsearch.password/c \elasticsearch.password: '"${PASSWORD}"'' config/kibana.yml &&\ + sed -i '6i\i18n.locale: "zh-CN"' config/kibana.yml && \ + sed -i '6ixpack.reporting.enabled: true' config/kibana.yml + EXPOSE 5601 CMD ["bin/kibana", "--allow-root"] diff --git a/container/lifecycle/prepare b/container/lifecycle/prepare new file mode 100755 index 000000000..c7bdc7a2f --- /dev/null +++ b/container/lifecycle/prepare @@ -0,0 +1,18 @@ +#!/bin/bash +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +set -e + +CURRENT_PATH=$(dirname `readlink -f $0`) +PROJECT_PATH=$(dirname $(dirname $CURRENT_PATH)) + + +for tmp in lifecycle; do + + if [ -e "$CURRENT_PATH/$tmp" ]; then + rm -rf $CURRENT_PATH/$tmp + fi +done + +$PROJECT_PATH/container/compile . diff --git a/container/lifecycle/repositories b/container/lifecycle/repositories new file mode 100644 index 000000000..f70737cfc --- /dev/null +++ b/container/lifecycle/repositories @@ -0,0 +1,4 @@ +http://mirrors.huaweicloud.com/alpine/v3.16/main +http://mirrors.huaweicloud.com/alpine/v3.16/community +http://mirrors.huaweicloud.com/alpine/v3.13/main +http://mirrors.huaweicloud.com/alpine/v3.13/community diff --git a/container/logging-es-base/Dockerfile b/container/logging-es-base/Dockerfile new file mode 100644 index 000000000..52b7174ed --- /dev/null +++ b/container/logging-es-base/Dockerfile @@ -0,0 +1,110 @@ +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +FROM openeuler/openeuler:22.03 +MAINTAINER Wu Zhende + +ARG ARCH + +RUN yum clean all && \ + yum -y update && \ + yum -y install tar gzip shadow + +RUN groupadd -r -g 1090 es && useradd -r -u 1090 -g es es + +RUN curl -o elasticsearch-7.11.1-linux-x86_64.tar.gz https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.11.1-linux-${ARCH}.tar.gz && \ + tar -xzvf elasticsearch-7.11.1-linux-x86_64.tar.gz -C /usr/share && \ + rm -f elasticsearch-7.11.1-linux-x86_64.tar.gz && \ + mv /usr/share/elasticsearch-7.11.1 /usr/share/elasticsearch && \ + chmod -R a+xr /usr/share/elasticsearch/ && \ + rm /usr/share/elasticsearch/bin/elasticsearch-sql-cli && \ + rm /usr/share/elasticsearch/bin/elasticsearch-sql-cli-7.11.1.jar && \ + rm /usr/share/elasticsearch/modules/x-pack-core/log4j-1.2-api-2.11.1.jar && \ + rm /usr/share/elasticsearch/modules/x-pack-identity-provider/log4j-slf4j-impl-2.11.1.jar && \ + rm /usr/share/elasticsearch/modules/x-pack-security/log4j-slf4j-impl-2.11.1.jar && \ + rm /usr/share/elasticsearch/lib/log4j-api-2.11.1.jar && \ + rm /usr/share/elasticsearch/lib/log4j-core-2.11.1.jar && \ + curl https://repo1.maven.org/maven2/org/apache/logging/log4j/log4j-1.2-api/2.20.0/log4j-1.2-api-2.20.0.jar -o /usr/share/elasticsearch/modules/x-pack-core/log4j-1.2-api-2.20.0.jar && \ + curl https://repo1.maven.org/maven2/org/apache/logging/log4j/log4j-slf4j-impl/2.20.0/log4j-slf4j-impl-2.20.0.jar -o /usr/share/elasticsearch/modules/x-pack-identity-provider/log4j-slf4j-impl-2.20.0.jar && \ + cp /usr/share/elasticsearch/modules/x-pack-identity-provider/log4j-slf4j-impl-2.20.0.jar /usr/share/elasticsearch/modules/x-pack-security/ && \ + curl https://repo1.maven.org/maven2/org/apache/logging/log4j/log4j-api/2.20.0/log4j-api-2.20.0.jar -o /usr/share/elasticsearch/lib/log4j-api-2.20.0.jar && \ + curl https://repo1.maven.org/maven2/org/apache/logging/log4j/log4j-core/2.20.0/log4j-core-2.20.0.jar -o /usr/share/elasticsearch/lib/log4j-core-2.20.0.jar && \ + rm -f /usr/share/elasticsearch/modules/ingest-geoip/jackson-annotations-2.10.4.jar && \ + rm -f /usr/share/elasticsearch/modules/ingest-geoip/jackson-databind-2.10.4.jar && \ + rm -f /usr/share/elasticsearch/lib/jackson-core-2.10.4.jar && \ + rm -f /usr/share/elasticsearch/lib/jackson-dataformat-yaml-2.10.4.jar && \ + rm -f /usr/share/elasticsearch/lib/jackson-dataformat-smile-2.10.4.jar && \ + rm -f /usr/share/elasticsearch/lib/jackson-dataformat-cbor-2.10.4.jar && \ + curl https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.15.2/jackson-annotations-2.15.2.jar -o /usr/share/elasticsearch/modules/ingest-geoip/jackson-annotations-2.15.2.jar && \ + curl https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.15.2/jackson-core-2.15.2.jar -o /usr/share/elasticsearch/lib/jackson-core-2.15.2.jar && \ + curl https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.15.2/jackson-databind-2.15.2.jar -o /usr/share/elasticsearch/modules/ingest-geoip/jackson-databind-2.15.2.jar && \ + curl https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.15.2/jackson-dataformat-yaml-2.15.2.jar -o /usr/share/elasticsearch/lib/jackson-dataformat-yaml-2.15.2.jar && \ + curl https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.15.2/jackson-dataformat-smile-2.15.2.jar -o /usr/share/elasticsearch/lib/jackson-dataformat-smile-2.15.2.jar && \ + curl https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.15.2/jackson-dataformat-cbor-2.15.2.jar -o /usr/share/elasticsearch/lib/jackson-dataformat-cbor-2.15.2.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-buffer-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-common-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-resolver-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-handler-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-transport-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-codec-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-codec-http-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-buffer-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-common-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-resolver-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-handler-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-transport-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-codec-4.1.49.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-codec-http-4.1.49.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-buffer/4.1.94.Final/netty-buffer-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-buffer-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-buffer/4.1.94.Final/netty-buffer-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-buffer-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-common/4.1.94.Final/netty-common-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-common-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-common/4.1.94.Final/netty-common-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-common-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-resolver/4.1.94.Final/netty-resolver-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-resolver-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-resolver/4.1.94.Final/netty-resolver-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-resolver-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-handler/4.1.94.Final/netty-handler-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-handler-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-handler/4.1.94.Final/netty-handler-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-handler-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-transport/4.1.94.Final/netty-transport-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-transport-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-transport/4.1.94.Final/netty-transport-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-transport-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-codec/4.1.94.Final/netty-codec-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-codec-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-codec/4.1.94.Final/netty-codec-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-codec-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-codec-http/4.1.94.Final/netty-codec-http-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-codec-http-4.1.94.Final.jar && \ + curl https://repo1.maven.org/maven2/io/netty/netty-codec-http/4.1.94.Final/netty-codec-http-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-codec-http-4.1.94.Final.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-core/httpclient-4.5.10.jar && \ + rm -f /usr/share/elasticsearch/modules/reindex/httpclient-4.5.10.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-identity-provider/httpclient-cache-4.5.10.jar && \ + rm -f /usr/share/elasticsearch/modules/kibana/httpclient-4.5.10.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-fleet/httpclient-4.5.10.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-security/httpclient-cache-4.5.10.jar && \ + curl https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.14/httpclient-4.5.14.jar -o /httpclient-4.5.14.jar && \ + curl https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient-cache/4.5.14/httpclient-cache-4.5.14.jar -o /httpclient-cache-4.5.14.jar && \ + cp /httpclient-4.5.14.jar /usr/share/elasticsearch/modules/x-pack-core/ && \ + cp /httpclient-4.5.14.jar /usr/share/elasticsearch/modules/reindex/ && \ + cp /httpclient-4.5.14.jar /usr/share/elasticsearch/modules/kibana/ && \ + cp /httpclient-4.5.14.jar /usr/share/elasticsearch/modules/x-pack-fleet/ && \ + cp /httpclient-cache-4.5.14.jar /usr/share/elasticsearch/modules/x-pack-identity-provider/ && \ + cp /httpclient-cache-4.5.14.jar /usr/share/elasticsearch/modules/x-pack-security/ && \ + rm -f /httpclient-4.5.14.jar && \ + rm -f /httpclient-cache-4.5.14.jar && \ + rm -f /usr/share/elasticsearch/lib/snakeyaml-1.26.jar && \ + curl https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.0/snakeyaml-2.0.jar -o /usr/share/elasticsearch/lib/snakeyaml-2.0.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-identity-provider/xmlsec-2.1.4.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-security/xmlsec-2.1.4.jar && \ + curl https://repo1.maven.org/maven2/org/apache/santuario/xmlsec/2.1.8/xmlsec-2.1.8.jar -o /usr/share/elasticsearch/modules/x-pack-identity-provider/xmlsec-2.1.8.jar && \ + cp /usr/share/elasticsearch/modules/x-pack-identity-provider/xmlsec-2.1.8.jar /usr/share/elasticsearch/modules/x-pack-security/ && \ + rm -f /usr/share/elasticsearch/modules/x-pack-identity-provider/guava-19.0.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-watcher/guava-27.1-jre.jar && \ + rm -f /usr/share/elasticsearch/modules/x-pack-security/guava-19.0.jar && \ + curl https://repo1.maven.org/maven2/com/google/guava/guava/32.1.1-jre/guava-32.1.1-jre.jar -o /usr/share/elasticsearch/modules/x-pack-identity-provider/guava-32.1.1-jre.jar && \ + cp /usr/share/elasticsearch/modules/x-pack-identity-provider/guava-32.1.1-jre.jar /usr/share/elasticsearch/modules/x-pack-watcher/ && \ + cp /usr/share/elasticsearch/modules/x-pack-identity-provider/guava-32.1.1-jre.jar /usr/share/elasticsearch/modules/x-pack-security/ + +RUN mkdir /usr/share/elasticsearch/tmp && \ + chown -R es:es /usr/share/elasticsearch + +COPY elasticsearch.yml /usr/share/elasticsearch/config/ + +WORKDIR /usr/share/elasticsearch + +ENV PATH /usr/share/elasticsearch/bin:$PATH +ENV ES_TMPDIR /usr/share/elasticsearch/tmp + diff --git a/container/logging-es-base/elasticsearch.yml b/container/logging-es-base/elasticsearch.yml new file mode 100644 index 000000000..28570c60b --- /dev/null +++ b/container/logging-es-base/elasticsearch.yml @@ -0,0 +1,12 @@ +network.host: 0.0.0.0 +path.data: /srv/es/logging-es +node.name: node-1 +cluster.initial_master_nodes: ["node-1"] +indices.memory.index_buffer_size: 20% +thread_pool.write.queue_size: 2000 +xpack.security.enabled: false +xpack.license.self_generated.type: basic +xpack.security.transport.ssl.enabled: false +#xpack.security.transport.ssl.verification_mode: certificate +#xpack.security.transport.ssl.keystore.path: certs/elastic-certificates.p12 +#xpack.security.transport.ssl.truststore.path: certs/elastic-certificates.p12 diff --git a/container/logging-es/Dockerfile b/container/logging-es/Dockerfile index 6e5e2723b..dec3f53ce 100644 --- a/container/logging-es/Dockerfile +++ b/container/logging-es/Dockerfile @@ -1,118 +1,20 @@ # SPDX-License-Identifier: MulanPSL-2.0+ # Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. -ARG BASE_IMAGE -FROM openeuler/openeuler:22.03 +FROM logging-es-base:latest MAINTAINER Wu Zhende -ARG MEMORY ARG USER ARG PASSWORD -ARG ARCH - -RUN yum clean all && \ - yum -y update && \ - yum -y install tar gzip shadow - -RUN groupadd -r -g 1090 es && useradd -r -u 1090 -g es es - -RUN curl -o elasticsearch-7.11.1-linux-x86_64.tar.gz https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.11.1-linux-${ARCH}.tar.gz && \ - tar -xzvf elasticsearch-7.11.1-linux-x86_64.tar.gz -C /usr/share && \ - rm -f elasticsearch-7.11.1-linux-x86_64.tar.gz && \ - mv /usr/share/elasticsearch-7.11.1 /usr/share/elasticsearch && \ - chmod -R a+xr /usr/share/elasticsearch/ && \ - echo "-Xms${MEMORY}m" >> /usr/share/elasticsearch/config/jvm.options && \ - echo "-Xmx${MEMORY}m" >> /usr/share/elasticsearch/config/jvm.options && \ - rm /usr/share/elasticsearch/bin/elasticsearch-sql-cli && \ - rm /usr/share/elasticsearch/bin/elasticsearch-sql-cli-7.11.1.jar && \ - rm /usr/share/elasticsearch/modules/x-pack-core/log4j-1.2-api-2.11.1.jar && \ - rm /usr/share/elasticsearch/modules/x-pack-identity-provider/log4j-slf4j-impl-2.11.1.jar && \ - rm /usr/share/elasticsearch/modules/x-pack-security/log4j-slf4j-impl-2.11.1.jar && \ - rm /usr/share/elasticsearch/lib/log4j-api-2.11.1.jar && \ - rm /usr/share/elasticsearch/lib/log4j-core-2.11.1.jar && \ - curl https://repo1.maven.org/maven2/org/apache/logging/log4j/log4j-1.2-api/2.20.0/log4j-1.2-api-2.20.0.jar -o /usr/share/elasticsearch/modules/x-pack-core/log4j-1.2-api-2.20.0.jar && \ - curl https://repo1.maven.org/maven2/org/apache/logging/log4j/log4j-slf4j-impl/2.20.0/log4j-slf4j-impl-2.20.0.jar -o /usr/share/elasticsearch/modules/x-pack-identity-provider/log4j-slf4j-impl-2.20.0.jar && \ - cp /usr/share/elasticsearch/modules/x-pack-identity-provider/log4j-slf4j-impl-2.20.0.jar /usr/share/elasticsearch/modules/x-pack-security/ && \ - curl https://repo1.maven.org/maven2/org/apache/logging/log4j/log4j-api/2.20.0/log4j-api-2.20.0.jar -o /usr/share/elasticsearch/lib/log4j-api-2.20.0.jar && \ - curl https://repo1.maven.org/maven2/org/apache/logging/log4j/log4j-core/2.20.0/log4j-core-2.20.0.jar -o /usr/share/elasticsearch/lib/log4j-core-2.20.0.jar && \ - rm -f /usr/share/elasticsearch/modules/ingest-geoip/jackson-annotations-2.10.4.jar && \ - rm -f /usr/share/elasticsearch/modules/ingest-geoip/jackson-databind-2.10.4.jar && \ - rm -f /usr/share/elasticsearch/lib/jackson-core-2.10.4.jar && \ - rm -f /usr/share/elasticsearch/lib/jackson-dataformat-yaml-2.10.4.jar && \ - rm -f /usr/share/elasticsearch/lib/jackson-dataformat-smile-2.10.4.jar && \ - rm -f /usr/share/elasticsearch/lib/jackson-dataformat-cbor-2.10.4.jar && \ - curl https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.15.2/jackson-annotations-2.15.2.jar -o /usr/share/elasticsearch/modules/ingest-geoip/jackson-annotations-2.15.2.jar && \ - curl https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.15.2/jackson-core-2.15.2.jar -o /usr/share/elasticsearch/lib/jackson-core-2.15.2.jar && \ - curl https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.15.2/jackson-databind-2.15.2.jar -o /usr/share/elasticsearch/modules/ingest-geoip/jackson-databind-2.15.2.jar && \ - curl https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.15.2/jackson-dataformat-yaml-2.15.2.jar -o /usr/share/elasticsearch/lib/jackson-dataformat-yaml-2.15.2.jar && \ - curl https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.15.2/jackson-dataformat-smile-2.15.2.jar -o /usr/share/elasticsearch/lib/jackson-dataformat-smile-2.15.2.jar && \ - curl https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.15.2/jackson-dataformat-cbor-2.15.2.jar -o /usr/share/elasticsearch/lib/jackson-dataformat-cbor-2.15.2.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-buffer-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-common-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-resolver-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-handler-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-transport-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-codec-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-core/netty-codec-http-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-buffer-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-common-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-resolver-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-handler-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-transport-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-codec-4.1.49.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/transport-netty4/netty-codec-http-4.1.49.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-buffer/4.1.94.Final/netty-buffer-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-buffer-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-buffer/4.1.94.Final/netty-buffer-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-buffer-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-common/4.1.94.Final/netty-common-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-common-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-common/4.1.94.Final/netty-common-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-common-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-resolver/4.1.94.Final/netty-resolver-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-resolver-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-resolver/4.1.94.Final/netty-resolver-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-resolver-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-handler/4.1.94.Final/netty-handler-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-handler-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-handler/4.1.94.Final/netty-handler-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-handler-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-transport/4.1.94.Final/netty-transport-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-transport-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-transport/4.1.94.Final/netty-transport-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-transport-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-codec/4.1.94.Final/netty-codec-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-codec-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-codec/4.1.94.Final/netty-codec-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-codec-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-codec-http/4.1.94.Final/netty-codec-http-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/x-pack-core/netty-codec-http-4.1.94.Final.jar && \ - curl https://repo1.maven.org/maven2/io/netty/netty-codec-http/4.1.94.Final/netty-codec-http-4.1.94.Final.jar -o /usr/share/elasticsearch/modules/transport-netty4/netty-codec-http-4.1.94.Final.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-core/httpclient-4.5.10.jar && \ - rm -f /usr/share/elasticsearch/modules/reindex/httpclient-4.5.10.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-identity-provider/httpclient-cache-4.5.10.jar && \ - rm -f /usr/share/elasticsearch/modules/kibana/httpclient-4.5.10.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-fleet/httpclient-4.5.10.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-security/httpclient-cache-4.5.10.jar && \ - curl https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.14/httpclient-4.5.14.jar -o /httpclient-4.5.14.jar && \ - curl https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient-cache/4.5.14/httpclient-cache-4.5.14.jar -o /httpclient-cache-4.5.14.jar && \ - cp /httpclient-4.5.14.jar /usr/share/elasticsearch/modules/x-pack-core/ && \ - cp /httpclient-4.5.14.jar /usr/share/elasticsearch/modules/reindex/ && \ - cp /httpclient-4.5.14.jar /usr/share/elasticsearch/modules/kibana/ && \ - cp /httpclient-4.5.14.jar /usr/share/elasticsearch/modules/x-pack-fleet/ && \ - cp /httpclient-cache-4.5.14.jar /usr/share/elasticsearch/modules/x-pack-identity-provider/ && \ - cp /httpclient-cache-4.5.14.jar /usr/share/elasticsearch/modules/x-pack-security/ && \ - rm -f /httpclient-4.5.14.jar && \ - rm -f /httpclient-cache-4.5.14.jar && \ - rm -f /usr/share/elasticsearch/lib/snakeyaml-1.26.jar && \ - curl https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.0/snakeyaml-2.0.jar -o /usr/share/elasticsearch/lib/snakeyaml-2.0.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-identity-provider/xmlsec-2.1.4.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-security/xmlsec-2.1.4.jar && \ - curl https://repo1.maven.org/maven2/org/apache/santuario/xmlsec/2.1.8/xmlsec-2.1.8.jar -o /usr/share/elasticsearch/modules/x-pack-identity-provider/xmlsec-2.1.8.jar && \ - cp /usr/share/elasticsearch/modules/x-pack-identity-provider/xmlsec-2.1.8.jar /usr/share/elasticsearch/modules/x-pack-security/ && \ - rm -f /usr/share/elasticsearch/modules/x-pack-identity-provider/guava-19.0.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-watcher/guava-27.1-jre.jar && \ - rm -f /usr/share/elasticsearch/modules/x-pack-security/guava-19.0.jar && \ - curl https://repo1.maven.org/maven2/com/google/guava/guava/32.1.1-jre/guava-32.1.1-jre.jar -o /usr/share/elasticsearch/modules/x-pack-identity-provider/guava-32.1.1-jre.jar && \ - cp /usr/share/elasticsearch/modules/x-pack-identity-provider/guava-32.1.1-jre.jar /usr/share/elasticsearch/modules/x-pack-watcher/ && \ - cp /usr/share/elasticsearch/modules/x-pack-identity-provider/guava-32.1.1-jre.jar /usr/share/elasticsearch/modules/x-pack-security/ - -RUN mkdir /usr/share/elasticsearch/tmp && \ - chown -R es:es /usr/share/elasticsearch - -COPY elasticsearch.yml /usr/share/elasticsearch/config/ +ARG MEMORY WORKDIR /usr/share/elasticsearch RUN ./bin/elasticsearch-users useradd ${USER} -p ${PASSWORD} -r superuser +RUN echo "-Xms${MEMORY}m" >> /usr/share/elasticsearch/config/jvm.options && \ + echo "-Xmx${MEMORY}m" >> /usr/share/elasticsearch/config/jvm.options + ENV PATH /usr/share/elasticsearch/bin:$PATH ENV ES_TMPDIR /usr/share/elasticsearch/tmp diff --git a/container/redis-base/Dockerfile b/container/redis-base/Dockerfile index 07c469ada..0a92046d6 100644 --- a/container/redis-base/Dockerfile +++ b/container/redis-base/Dockerfile @@ -5,10 +5,15 @@ FROM alpine:3.18 RUN sed -ri.origin 's|^https?://dl-cdn.alpinelinux.org|http://mirrors.huaweicloud.com|g' /etc/apk/repositories -RUN apk add --no-cache 'redis' +RUN apk update --no-cache && \ + apk upgrade && \ + apk add --no-cache 'redis' -RUN sed -i 's:dir /var/lib/redis:dir /srv/redis:' /etc/redis.conf; \ - sed -i 's:protected-mode yes:protected-mode no:' /etc/redis.conf; \ - sed -i 's:bind 127.0.0.1:#bind 127.0.0.1:' /etc/redis.conf; \ +COPY redis.conf /etc/ -RUN apk del gcc cpp rpcgen gdb || exit 0 +VOLUME /srv/redis +WORKDIR /srv/redis + +RUN apk del gcc rpcgen gdb + +EXPOSE 6379 diff --git a/container/redis-base/redis.conf b/container/redis-base/redis.conf new file mode 100644 index 000000000..dfcf771fc --- /dev/null +++ b/container/redis-base/redis.conf @@ -0,0 +1,64 @@ +protected-mode no +port 6379 +tcp-backlog 511 +unixsocket /run/redis/redis.sock +unixsocketperm 770 +timeout 0 +tcp-keepalive 300 +supervised no +loglevel notice +logfile /var/log/redis/redis.log +databases 16 +always-show-logo no +save 900 1 +save 300 10 +save 60 10000 +stop-writes-on-bgsave-error yes +rdbcompression yes +rdbchecksum yes +dbfilename dump.rdb +dir /srv/redis +replica-serve-stale-data yes +replica-read-only yes +repl-diskless-sync no +repl-diskless-sync-delay 5 +repl-disable-tcp-nodelay no +replica-priority 100 +lazyfree-lazy-eviction no +lazyfree-lazy-expire no +lazyfree-lazy-server-del no +replica-lazy-flush no +appendonly no +appendfilename "appendonly.aof" +appendfsync everysec +no-appendfsync-on-rewrite no +auto-aof-rewrite-percentage 100 +auto-aof-rewrite-min-size 64mb +aof-load-truncated yes +aof-use-rdb-preamble yes +lua-time-limit 5000 +slowlog-log-slower-than 10000 +slowlog-max-len 128 +latency-monitor-threshold 0 +notify-keyspace-events "" +hash-max-ziplist-entries 512 +hash-max-ziplist-value 64 +list-max-ziplist-size -2 +list-compress-depth 0 +set-max-intset-entries 512 +zset-max-ziplist-entries 128 +zset-max-ziplist-value 64 +hll-sparse-max-bytes 3000 +stream-node-max-bytes 4096 +stream-node-max-entries 100 +activerehashing yes +client-output-buffer-limit normal 0 0 0 +client-output-buffer-limit replica 256mb 64mb 60 +client-output-buffer-limit pubsub 32mb 8mb 60 +hz 10 +dynamic-hz yes +aof-rewrite-incremental-fsync yes +rdb-save-incremental-fsync yes +cluster-enabled yes +cluster-config-file nodes-6379.conf +cluster-node-timeout 5000 diff --git a/container/redis/build b/container/redis/build index 8e9af8918..4d57195e3 100755 --- a/container/redis/build +++ b/container/redis/build @@ -6,13 +6,13 @@ require 'set' require_relative '../defconfig.rb' names = Set.new %w[ - REDIS_PASSWD + REDIS_PASSWORD ] defaults = relevant_service_authentication(names) docker_skip_rebuild "redis507n:alpine311" -system "docker build -t redis507n:alpine311 --build-arg PASSWD=#{defaults['REDIS_PASSWD']} ." +system "docker build -t redis507n:alpine311 --build-arg PASSWD=#{defaults['REDIS_PASSWORD']} ." push_image_remote "redis507n:alpine311" diff --git a/container/result-webdav-base/Dockerfile b/container/result-webdav-base/Dockerfile new file mode 100644 index 000000000..78022e9a8 --- /dev/null +++ b/container/result-webdav-base/Dockerfile @@ -0,0 +1,39 @@ +# Origin: https://hub.docker.com/r/openresty/openresty +# Copyright (C) 2016-2020 Eric D. Evan Wies +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +FROM openresty/openresty:1.21.4.1-alpine-fat + +ARG ARCH + +RUN sed -ri.origin 's|^https?://dl-cdn.alpinelinux.org|http://mirrors.huaweicloud.com|g' /etc/apk/repositories +RUN apk update --no-cache && \ + apk upgrade && \ + apk add --no-cache bash rpm pinentry gpg gpg-agent gcc lua-dev git + +RUN git clone https://gh-proxy.com/https://github.com/PythonEngineer007/lua-resty-redis-cluster.git /lua-resty-redis-cluster + +RUN cp /lua-resty-redis-cluster/lib/redis_slot.c /usr/local/openresty/lualib/ + +RUN cp /lua-resty-redis-cluster/lib/resty/rediscluster.lua /usr/local/openresty/lualib/resty/ + +RUN cd /usr/local/openresty/lualib && gcc redis_slot.c -fPIC -shared -o libredis_slot.so + +RUN rm -rf /lua-resty-redis-cluster + +RUN if [ "$ARCH" = "aarch64" ]; \ +then wget https://gitee.com/openeuler/signatrust/releases/download/v1.0.0-rc.2/client_linux_aarch64_musl.tar.gz -P / &>/dev/null && \ + cd / && \ + tar xvf client_linux_aarch64_musl.tar.gz && \ + mv client /usr/bin && \ + rm -f client_linux_aarch64_musl.tar.gz; \ +elif [ "$ARCH" = "x86_64" ]; \ +then wget https://gitee.com/openeuler/signatrust/releases/download/v1.0.0-rc.2/client_linux_x86_64.tar.gz -P / &>/dev/null && \ + cd / && \ + tar xvf client_linux_x86_64.tar.gz && \ + mv client /usr/bin && \ + rm -f client_linux_x86_64.tar.gz; \ +fi + +RUN apk del rpcgen gdb diff --git a/container/result-webdav-base/build b/container/result-webdav-base/build new file mode 100755 index 000000000..6f233f31c --- /dev/null +++ b/container/result-webdav-base/build @@ -0,0 +1,13 @@ +#!/bin/bash +# Origin: https://hub.docker.com/r/openresty/openresty +# Copyright (C) 2016-2020 Eric D. Evan Wies +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +. ../defconfig.sh + +docker_skip_rebuild "result-webdav-base" + +docker build --no-cache -t result-webdav-base --build-arg ARCH=$(arch) . + +push_image_remote "result-webdav-base" diff --git a/container/result-webdav/Dockerfile b/container/result-webdav/Dockerfile index 6269aa42b..416d31159 100644 --- a/container/result-webdav/Dockerfile +++ b/container/result-webdav/Dockerfile @@ -3,33 +3,12 @@ # SPDX-License-Identifier: MulanPSL-2.0+ # Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. -FROM openresty/openresty:1.21.4.1-alpine-fat +FROM result-webdav-base -ARG ARCH - -RUN sed -ri.origin 's|^https?://dl-cdn.alpinelinux.org|http://mirrors.huaweicloud.com|g' /etc/apk/repositories -RUN apk update --no-cache && \ - apk upgrade && \ - apk add --no-cache bash rpm pinentry gpg gpg-agent gcc lua-dev git -RUN mkdir -p /var/log/nginx/ RUN adduser lkp -u 1090 -g 1090 -D COPY upload.lua /usr/local/openresty/nginx/conf/lua/upload.lua COPY nginx.conf /usr/local/openresty/nginx/conf/nginx.conf -COPY openresty.sh /usr/local/bin/ +COPY ./openresty.sh /usr/local/bin/ RUN chown lkp:lkp /usr/local/openresty/nginx /run /var/run - -RUN git clone https://github.com/PythonEngineer007/lua-resty-redis-cluster.git /lua-resty-redis-cluster - -RUN cp /lua-resty-redis-cluster/lib/redis_slot.c /usr/local/openresty/lualib/ - -RUN cp /lua-resty-redis-cluster/lib/resty/rediscluster.lua /usr/local/openresty/lualib/resty/ - -RUN cd /usr/local/openresty/lualib && gcc redis_slot.c -fPIC -shared -o libredis_slot.so - -RUN rm -rf /lua-resty-redis-cluster - -RUN apk del rpcgen gdb - -CMD ["sh", "-c", "umask 002 && /usr/local/bin/openresty.sh"] diff --git a/container/result-webdav/gpg-agent.conf b/container/result-webdav/gpg-agent.conf new file mode 100644 index 000000000..e08635905 --- /dev/null +++ b/container/result-webdav/gpg-agent.conf @@ -0,0 +1,2 @@ +allow-preset-passphrase +pinentry-program /usr/bin/pinentry-tty diff --git a/container/result-webdav/nginx.conf b/container/result-webdav/nginx.conf index cb01159ce..1eb2243d8 100644 --- a/container/result-webdav/nginx.conf +++ b/container/result-webdav/nginx.conf @@ -6,7 +6,7 @@ worker_processes auto; pid /tmp/nginx.pid; env REDIS_HOST; env REDIS_PORT; -env REDIS_PASSWD; +env REDIS_PASSWORD; events {} @@ -87,7 +87,7 @@ http { redis_host = os.getenv("REDIS_HOST") redis_port = os.getenv("REDIS_PORT") - redis_pass = os.getenv("REDIS_PASSWD") + redis_pass = os.getenv("REDIS_PASSWORD") local redis = require "resty.redis" local red = redis:new() diff --git a/container/result-webdav/start b/container/result-webdav/start index c0acb61e4..01e2e0e49 100755 --- a/container/result-webdav/start +++ b/container/result-webdav/start @@ -7,7 +7,7 @@ : ${REDIS_HOST:=172.17.0.1} : ${REDIS_PORT:=6379} -: ${REDIS_PASSWD:=default} +: ${REDIS_PASSWORD:=default} load_cci_defaults load_service_authentication @@ -21,7 +21,7 @@ cmd=( -p 3080:3080 -e REDIS_HOST=$REDIS_HOST -e REDIS_PORT=$REDIS_PORT - -e REDIS_PASSWD=$REDIS_PASSWD + -e REDIS_PASSWORD=$REDIS_PASSWORD -v $PWD/nginx.conf:/usr/local/openresty/nginx/conf/nginx.conf -v /etc/localtime:/etc/localtime:ro -v /srv/result/:/srv/result/ diff --git a/container/scheduler/my-start b/container/scheduler/my-start index 4fbb88369..6fe1edeb6 100755 --- a/container/scheduler/my-start +++ b/container/scheduler/my-start @@ -11,7 +11,7 @@ names = Set.new %w[ SCHED_PORT REDIS_HOST REDIS_PORT - REDIS_PASSWD + REDIS_PASSWORD ETCD_USER ETCD_PASSWORD ES_HOST diff --git a/container/scheduler/start b/container/scheduler/start index dac562da5..fb3adac8e 100755 --- a/container/scheduler/start +++ b/container/scheduler/start @@ -14,7 +14,7 @@ names = Set.new %w[ SCHED_NODES REDIS_HOST REDIS_PORT - REDIS_PASSWD + REDIS_PASSWORD ETCD_USER ETCD_PASSWORD ES_HOST diff --git a/container/serial-logging/prepare b/container/serial-logging/prepare new file mode 100755 index 000000000..c8252d656 --- /dev/null +++ b/container/serial-logging/prepare @@ -0,0 +1,14 @@ +#!/bin/bash +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +set -e + +CURRENT_PATH=$(dirname `readlink -f $0`) +PROJECT_PATH=$(dirname $(dirname $CURRENT_PATH)) + +if [ -d "$CURRENT_PATH/serial-logging" ]; then + rm -rf $CURRENT_PATH/serial-logging +fi + +$PROJECT_PATH/container/compile . diff --git a/container/serial-logging/repositories b/container/serial-logging/repositories new file mode 100644 index 000000000..f70737cfc --- /dev/null +++ b/container/serial-logging/repositories @@ -0,0 +1,4 @@ +http://mirrors.huaweicloud.com/alpine/v3.16/main +http://mirrors.huaweicloud.com/alpine/v3.16/community +http://mirrors.huaweicloud.com/alpine/v3.13/main +http://mirrors.huaweicloud.com/alpine/v3.13/community diff --git a/container/web-backend/Dockerfile b/container/web-backend/Dockerfile index dc444f3d7..4f674dbae 100644 --- a/container/web-backend/Dockerfile +++ b/container/web-backend/Dockerfile @@ -2,17 +2,28 @@ FROM debian:bookworm +ARG PROJECT_NAME + +MAINTAINER Lu Weitao + ENV DEBIAN_FRONTEND noninteractive COPY conf/sources.list /etc/apt/ RUN rm /etc/apt/sources.list.d/* -RUN apt-get update && \ +RUN apt-get clean && \ + apt-get update && \ apt-get install -y git uuid-runtime ruby-json ruby-rails ruby-sinatra ruby-jwt ruby-rest-client curl && \ gem install public_suffix -v 4.0.7 && \ gem sources -r https://rubygems.org/ -a https://gems.ruby-china.com/ && \ - gem install elasticsearch:7.11.1 redis git terminal-table + gem install google-protobuf:3.25.0 elasticsearch:7.11.1 redis git terminal-table etcdv3:0.11.6 + +COPY ${PROJECT_NAME} /c/${PROJECT_NAME} +COPY lkp-tests /c/lkp-tests RUN groupadd -g 1090 lkp && useradd -m -u 1090 -g 1090 lkp +RUN chown -R lkp:lkp /c/${PROJECT_NAME} /c/lkp-tests && chmod -R 750 /c/${PROJECT_NAME} /c/lkp-tests + RUN apt-get remove gcc gdb cpp -y +WORKDIR /c/${PROJECT_NAME}/container/web-backend/ diff --git a/container/web-backend/prepare b/container/web-backend/prepare new file mode 100755 index 000000000..76d5d4bf2 --- /dev/null +++ b/container/web-backend/prepare @@ -0,0 +1,28 @@ +#!/bin/bash +set -e + +CURRENT_PATH=$(dirname `readlink -f $0`) +PROJECT_PATH=$(dirname $(dirname $CURRENT_PATH)) +PROJECT_NAME=$(basename "$PROJECT_PATH") + +declare -A REPOS=( +["${PROJECT_NAME}"]="https://gitee.com/openeuler/${PROJECT_NAME}" +["lkp-tests"]="https://gitee.com/compass-ci/lkp-tests" +) + +process_repos() { + for repo_dir in "${!REPOS[@]}"; do + local full_path="$CURRENT_PATH/$repo_dir" + + if [[ -d "$full_path" ]]; then + rm -rf "$full_path" + fi + + if ! git clone --depth=1 "${REPOS[$repo_dir]}" "$full_path"; then + echo "ERROR: Failed to clone ${REPOS[$repo_dir]}" >&2 + exit 1 + fi + done +} + +process_repos diff --git a/sparrow/3-code/dev-env b/sparrow/3-code/dev-env index 4fc74df0b..77e75e847 100755 --- a/sparrow/3-code/dev-env +++ b/sparrow/3-code/dev-env @@ -66,7 +66,7 @@ DOCKER_REGISTRY_USER: $DOCKER_REGISTRY_USER DOCKER_REGISTRY_PASSWORD: $DOCKER_REGISTRY_PASSWORD ETCD_USER: root ETCD_PASSWORD: $passwd -REDIS_PASSWD: $passwd +REDIS_PASSWORD: $passwd EOF chmod 664 /etc/compass-ci/passwd.yaml diff --git a/src/lib/data_api.rb b/src/lib/data_api.rb index 980188488..942360f55 100644 --- a/src/lib/data_api.rb +++ b/src/lib/data_api.rb @@ -12,7 +12,6 @@ end def es_search(index, params) begin - check_xss(params) result = EsDataApi.search(index, params) rescue StandardError => e error_msg = { 'error_msg' => e.message } diff --git a/src/lib/etcd_client.cr b/src/lib/etcd_client.cr index 9776f8ba7..987dd8dc5 100644 --- a/src/lib/etcd_client.cr +++ b/src/lib/etcd_client.cr @@ -11,13 +11,8 @@ class EtcdClient port = (ENV.has_key?("ETCD_PORT") ? ENV["ETCD_PORT"].to_i32 : ETCD_PORT) version = (ENV.has_key?("ETCD_VERSION") ? ENV["ETCD_VERSION"] : ETCD_VERSION) user = ENV["ETCD_USER"]? - @etcd = Etcd.client(host, port, version) - #unless user.nil? || user.empty? - # password = ENV["ETCD_PASSWORD"] - # @etcd = Etcd.client(host, port, user, password, version) - #else - # @etcd = Etcd.client(host, port, version) - #end + password = ENV["ETCD_PASSWORD"] + @etcd = Etcd.client(host, port, user, password, version) end def close diff --git a/src/lib/web_backend.rb b/src/lib/web_backend.rb index cd28e78bd..7ef038079 100644 --- a/src/lib/web_backend.rb +++ b/src/lib/web_backend.rb @@ -253,9 +253,6 @@ def get_compare_body(params) end def compare(params) - payload = auth(params) - params[:my_account] = payload['my_account'] if payload and payload['my_account'] - begin body = get_compare_body(params) rescue StandardError => e @@ -411,9 +408,6 @@ def get_jobs_body(params) end def get_jobs(params) - payload = auth(params) - params[:my_account] = payload['my_account'] if payload and payload['my_account'] - begin body = get_jobs_body(params) rescue StandardError => e @@ -538,8 +532,6 @@ def get_job_field(params) end def performance_result(data, params) - payload = auth(params) - begin request_body = JSON.parse(data) incorrect_input = check_performance_result(request_body) @@ -742,9 +734,6 @@ def get_jobs_stats(params) end def group_jobs_stats(params) - payload = auth(params) - params[:my_account] = payload['my_account'] if payload and payload['my_account'] - begin body = get_jobs_stats(params) rescue StandardError => e @@ -766,9 +755,6 @@ end # ------------------------------------------------------------------------------------------- def get_job_error(params) - payload = auth(params) - params[:my_account] = payload['my_account'] if payload and payload['my_account'] - begin body = job_error_body(params) rescue StandardError => e diff --git a/src/libpy/constants.py b/src/libpy/constants.py new file mode 100644 index 000000000..58a9ef298 --- /dev/null +++ b/src/libpy/constants.py @@ -0,0 +1,119 @@ +#! /usr/bin/env python +# coding=utf-8 +# ****************************************************************************** +# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved. +# licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR +# PURPOSE. +# See the Mulan PSL v2 for more details. +# Author: He Shoucheng +# Create: 2022-07-12 +# ******************************************************************************/ + +import os +import inspect + + +class SystemRoles: + ADMIN = "admin" + STANDARD = "standard" + + +class ProjectRoles: + MAINTAINER = "maintainer" + READER = "reader" + +ALL_ROLES = [ProjectRoles.MAINTAINER, ProjectRoles.READER] + +# 构建任务类型 +class BuildType: + FULL = "full" + INCREMENTAL = "incremental" + SINGLE = "single" + MAKEHOTPATCH = "makehotpatch" + SPEED_FULL = "speed_full" + SPEED_INCREMENTAL = "speed_incremental" + FULL_SPLIT_INCREMENTAL = "incremental_from_full" + FULL_SPLIT_SINGLE = "single_batch_from_full" + SPECIFIED = "specified" + SINGLE_BATCH = "single_batch" + INCLUDE_BUILD_DEP_TYPES = [FULL, INCREMENTAL, \ + SPEED_FULL, SPEED_INCREMENTAL, \ + FULL_SPLIT_INCREMENTAL, SPECIFIED, SINGLE_BATCH] + + +class DcgDictType: + ORIGIN = "origin" + VISUALIZATION = "broken_cycles" + + +class SpecDictType: + NORMAL = "spec_file_path_dict" + DOWNLOAD_FAILED = "download_failed_dict" + BLANK_SPEC = "blank_spec_dict" + NO_SPEC = "no_spec_dict" + GET_SPEC_NAME_FAILED = "get_spec_name_failed_dict" + UNSUPPORTED_ARCH = "unsupported_arch_dict" + PARSE_SPEC_FAILED = "parse_spec_failed_dict" + +# 请求submit超时时间 +MAX_CALL_SUBMIT = 600 +MAX_ZIP_COMPRESS_RATIO = 300 +USE_SINGLE_THREAD_SPEED = True +DAG_CALCULATE_SPEED = False +DAG_THREAD_LOOP_WAIT_TIME = 10 + +# 支持架构列表 +EXCLUSIVE_ARCH = ["x86_64", "aarch64", "loongarch64", "riscv64", "ppc64le", "sw_64"] + +AUTH_HOST = os.getenv("AUTH_HOST", '172.17.0.1') +AUTH_PORT = os.getenv("AUTH_PORT", 10002) +# project相关api请求路径用/切割后list的最小长度 +OS_PROJECT_PATH_ITEM_LIST_LEN = 3 + +ES_REFRESH_TIME = 60 +REDIS_REFRESH_TIME = 10 +# dag接收到一个请求,需要一定的时间去解析repo准备数据 +DELAY_START_WATCH_BUILD = 120 +DELAY_WATCH_REQUESTS = 5 +ETCD_RETRY_TIME = 30 + +MAX_STABLE_STATS_LOOPS = 5 +MIN_CYCLE_BUILD = 2 +MAX_COMMON_BUILD = 2 + +DAG_HOST = os.getenv("DAG_HOST", "172.17.0.1") +DAG_PORT = os.getenv("DAG_PORT", 20036) + +PUBLISHER_HOST = os.getenv("PUBLISHER_HOST", "172.17.0.1") +PUBLISHER_PORT = os.getenv("PUBLISHER_PORT", 20037) + +ETCD_HOST = os.getenv("ETCD_HOST", "172.17.0.1") +ETCD_PORT = os.getenv("ETCD_PORT", 2379) + +SCHED_HOST = os.getenv("SCHED_HOST", "172.17.0.1") +SCHED_PORT = os.getenv("SCHED_PORT", 3000) + +ETCD_GET_LIMIT = 5000 +ES_GET_LIMIT = 10000 + +REDIS_HOST = os.getenv("REDIS_HOST", "172.17.0.1") +REDIS_PORT = os.getenv("REDIS_PORT", 6379) + +REMOTE_GIT_HOST = os.getenv("REMOTE_GIT_HOST", "172.17.0.1") +REMOTE_GIT_PORT = os.getenv("REMOTE_GIT_PORT", 8100) + +GIT_SERVER = os.getenv("GIT_SERVER", "172.17.0.1") + +# repo对外的主机(包括工程构建结果的repo和分层定制生成的repo) +REPO_HOST = os.getenv('REPO_HOST', '172.17.0.1') + +MQ_HOST = os.getenv('MQ_HOST', '172.17.0.1') +MQ_PORT = os.getenv('MQ_PORT', 5672) + +TIMER_HOST = os.getenv('TIMER_HOST', '172.17.0.1') +TIMER_PORT = os.getenv('TIMER_PORT', 20034) diff --git a/src/libpy/es_client.py b/src/libpy/es_client.py index 16dd0f0e3..202327415 100644 --- a/src/libpy/es_client.py +++ b/src/libpy/es_client.py @@ -12,22 +12,28 @@ # See the Mulan PSL v2 for more details. # Author: He Shoucheng # Create: 2022-06-23 -# Update: 2025-01-11 # ******************************************************************************/ import os import traceback -import json from elasticsearch import Elasticsearch +from src.libpy.single_class import SingleClass -class EsClient(): + +class EsClient(metaclass=SingleClass): def __init__(self): hosts = ["http://{0}:{1}".format(os.getenv("ES_HOST", '172.17.0.1'), os.getenv("ES_PORT", 9200))] self.es_handler = Elasticsearch(hosts=hosts, http_auth=(os.getenv("ES_USER"), os.getenv("ES_PASSWORD")), timeout=3600) + def get_cluster_health(self) -> dict: + """ + :return: {} + """ + return self.es_handler.cluster.health() + def search_by_id(self, index: str, doc_id: str) -> dict: """ :param index: @@ -36,72 +42,205 @@ class EsClient(): """ return self.es_handler.get(index=index, id=doc_id, ignore=404) - def update_by_id(self, index: str, doc_id: str, body: dict) -> dict: + def delete_by_id(self, index: str, doc_id: str) -> dict: """ - Update a document by its ID. - :param index: Index name - :param doc_id: Document ID - :param body: Document content to update (as a dictionary) - :return: Response from the update operation + :param index: + :param doc_id: + :return: {} """ - return self.es_handler.index(index=index, id=doc_id, body=body) + return self.es_handler.delete(index=index, id=doc_id, ignore=409, refresh=True) - def delete_by_id(self, index: str, doc_id: str) -> dict: + def delete_by_query(self, index: str, query: dict = None) -> dict: + """ + :param index: + :param doc_id: + :return: {} + """ + return self.es_handler.delete_by_query(index=index, body=query, ignore=409, refresh=True) + + def insert_document_with_id(self, index: str, doc_id: str, document: dict): """ - Delete a document by its ID. - :param index: Index name - :param doc_id: Document ID - :return: Response from the delete operation + Creates a new document in the index. Returns a 409 response when a document with + a same ID already exists in the index. and mast input id + :param doc_id: + :param index: + :param document: + :return: """ - return self.es_handler.delete(index=index, id=doc_id, ignore=404) + return self.es_handler.index(index=index, id=doc_id, body=document, ignore=[400, 409], refresh=True) - def create_index(self, index: str, body: dict = None) -> dict: + def insert_document(self, index: str, document: dict): """ - Create an index. - :param index: Index name - :param body: Index mappings and settings (optional) - :return: Response from the index creation operation + Creates or updates a document in an index, no need input id + :param index: + :param document: + :return: """ - return self.es_handler.indices.create(index=index, body=body, ignore=400) # ignore=400 to ignore errors if the index already exists + return self.es_handler.index(index=index, body=document, ignore=[400, 409], refresh=True) - def delete_index(self, index: str) -> dict: + def update_document_by_id(self, index: str, doc_id: str, document: dict): """ - Delete an index. - :param index: Index name - :return: Response from the delete operation + updates a document in an index, it should input id + :param doc_id: + :param index: + :param document: + :return: """ - return self.es_handler.indices.delete(index=index, ignore=404) # ignore=404 to ignore errors if the index does not exist + doc = {"doc": document} + return self.es_handler.update(index=index, id=doc_id, body=doc, refresh=True) - def bulk_index(self, index: str, data: list) -> dict: + def search_raw(self, index: str, query_body: dict) -> dict: """ - Perform bulk indexing of documents. - :param index: Index name - :param data: List of documents, formatted as [{"id": "1", "name": "John Doe"}, {"id": "2", "name": "Jane Smith"}] - :return: Response from the bulk operation + :param index: + :param query_body: + :return: """ - actions = [] - for item in data: - actions.append({"index": {"_index": index, "_id": item.get("id")}}) - actions.append(item) - return self.es_handler.bulk(index=index, body=actions) + return self.es_handler.search(index=index, body=query_body) - def search_by_query(self, index: str, query: dict) -> dict: + def search(self, index: str, query_body: dict = None, source: list = None, size: int = None, should: dict = None): """ - Search for documents based on a query. - :param index: Index name - :param query: Query conditions (as a dictionary) - :return: Query results + :param index: + :param query_body: + :param source: + :param size: + :param should: + :return: """ - # Perform the search query - response = self.es_handler.search(index=index, body=query) - # Extract relevant information from the response - total_hits = response['hits']['total']['value'] # Total number of hits - hits = response['hits']['hits'] # List of documents + if not source: + source = [] - # Format the results - results = { - "documents": [hit["_source"] for hit in hits] # Extract the '_source' field + if not size: + size = 10 + + if not query_body: + return self.es_handler.search(index=index, _source=source, size=10000).get("hits").get("hits") + + musts = [] + for key, value in query_body.items(): + musts.append({ + "term": { + key: value + } + }) + shoulds = [] + if should: + for k, v in should.items(): + shoulds.append({ + "terms": { + k: v + } + }) + final_query_body = { + "query": + { + "bool": { + "must": musts, + "should": shoulds, + "minimum_should_match": len(shoulds) + } + } } - return results["documents"] + return self.es_handler.search(index=index, body=final_query_body, _source=source, size=size + ).get("hits").get("hits") + def search_one(self, index: str, query_body: dict, sorted_key: str, order_by: str = "asc", + source: list = None, should: dict = None, exists: list = None, return_matchs: bool = False): + """ + :param index: + :param query_body: + :param sorted_key: + :param order_by: + :param source: + :param should: + :param exists: + :param return_matchs: + :return: + success: + {'_id': '58fd230e-0354-11ed-bbb0-0242ac11003e', + '_index': 'builds', + '_score': None, + '_source': {'build_id': '58fd230e-0354-11ed-bbb0-0242ac11003e', + 'build_target': {'architecture': 'aarch64', + 'os_variant': 'openeuler:20.03-LTS-SP1'}, + ... + 'status': 'init'}, + '_type': '_doc', + 'sort': [1657760832347]} + fail: + {} + """ + musts = [] + if query_body: + for key, value in query_body.items(): + musts.append({ + "term": { + key: value + } + }) + if exists: + for exists_key in exists: + musts.append({ + "exists": { + "field": exists_key + } + }) + shoulds = [] + if should: + for k, v in should.items(): + shoulds.append({ + "terms": { + k: v + } + }) + final_query_body = { + "query": { + "bool": { + "must": musts, + "should": shoulds, + "minimum_should_match": len(shoulds) + } + }, + "sort": [ + { + sorted_key: { + "order": order_by + } + } + ] + } + if not source: + source = [] + result = self.es_handler.search( + index=index, body=final_query_body, _source=source, ignore=400).get("hits", {}).get("hits", []) + if result: + if return_matchs: + return result + return result[0] + return {} + def search_count(self, index: str, query_body: dict, should: dict = None): + musts = [] + for key, value in query_body.items(): + musts.append({ + "term": { + key: value + } + }) + shoulds = [] + if should: + for k, v in should.items(): + shoulds.append({ + "terms": { + k: v + } + }) + final_query_body = { + "query": { + "bool": { + "must": musts, + "should": shoulds, + "minimum_should_match": len(shoulds) + } + }, + } + return self.es_handler.count(index=index, body=final_query_body).get("count") diff --git a/src/libpy/etcd_client.py b/src/libpy/etcd_client.py new file mode 100644 index 000000000..78d8df93b --- /dev/null +++ b/src/libpy/etcd_client.py @@ -0,0 +1,84 @@ +import os + +from etcd3 import Etcd3Client, etcdrpc, utils, client, events + +from src.libpy.constants import ETCD_HOST, ETCD_PORT + + +class _Etcd3ClientPatch(object): + def build_get_range_request(self, key, + range_end=None, + limit=None, + sort_order=None, + sort_target='key', + serializable=False, + keys_only=False, + min_create_revision=None): + range_request = etcdrpc.RangeRequest() + range_request.key = utils.to_bytes(key) + range_request.keys_only = keys_only + if range_end is not None: + range_request.range_end = utils.to_bytes(range_end) + if limit is not None: + range_request.limit = limit + if min_create_revision is not None: + range_request.min_mod_revision = min_create_revision + + if sort_order is None: + range_request.sort_order = etcdrpc.RangeRequest.NONE + elif sort_order == 'ascend': + range_request.sort_order = etcdrpc.RangeRequest.ASCEND + elif sort_order == 'descend': + range_request.sort_order = etcdrpc.RangeRequest.DESCEND + else: + raise ValueError('unknown sort order: "{}"'.format(sort_order)) + + if sort_target is None or sort_target == 'key': + range_request.sort_target = etcdrpc.RangeRequest.KEY + elif sort_target == 'version': + range_request.sort_target = etcdrpc.RangeRequest.VERSION + elif sort_target == 'create': + range_request.sort_target = etcdrpc.RangeRequest.CREATE + elif sort_target == 'mod': + range_request.sort_target = etcdrpc.RangeRequest.MOD + elif sort_target == 'value': + range_request.sort_target = etcdrpc.RangeRequest.VALUE + else: + raise ValueError('sort_target must be one of "key", ' + '"version", "create", "mod" or "value"') + + range_request.serializable = serializable + + return range_request + + +# 动态替换实现,支持limit和min_create_revision +Etcd3Client._build_get_range_request = _Etcd3ClientPatch.build_get_range_request + + +class EtcdClient: + def __init__(self): + self.client = client(host=ETCD_HOST, port=ETCD_PORT, + user=os.getenv("ETCD_USER"), password=os.getenv("ETCD_PASSWORD")) + + def get_prefix(self, key_prefix, *args, **kwargs): + return self.client.get_prefix(key_prefix, *args, **kwargs) + + def get(self, key): + return self.client.get(key) + + def put(self, key, value): + return self.client.put(key, value) + + def delete(self, key): + return self.client.delete(key) + + def watch_rpmbuild(self, start_revision=None): + return self.client.watch_prefix("update_jobs/", start_revision=start_revision) + + @staticmethod + def is_delete_event(event): + return isinstance(event, events.DeleteEvent) + + def watch_prefix(self, prefix_key, start_revision=None): + return self.client.watch_prefix(prefix_key, start_revision=start_revision) diff --git a/src/libpy/k8s_start_check.py b/src/libpy/k8s_start_check.py new file mode 100644 index 000000000..75d94e6ab --- /dev/null +++ b/src/libpy/k8s_start_check.py @@ -0,0 +1,98 @@ +import os +import sys +import pika +import time +import argparse + +sys.path.append(os.path.abspath("../../")) + +from src.libpy.es_client import EsClient +from src.libpy.redis_client import RedisClient +from src.libpy.etcd_client import EtcdClient +from src.libpy.constants import ETCD_HOST, ETCD_PORT +from src.libpy.constants import MQ_HOST, MQ_PORT + +def wait_for_service(service_name, check_function, delay=2): + while True: + try: + print(f"accessing {service_name}.", flush=True) + check_function() + print(f"{service_name} is available.", flush=True) + return + except Exception as e: + print(f"Waiting for {service_name}... {e}", flush=True) + time.sleep(delay) + +def access_redis(): + _redis = RedisClient() + test_key = 'test_key' + test_value = 'test_value' + + _redis.set(test_key, test_value) + value = _redis.get(test_key) + + if value == test_value: + print("Inserted into Redis and verified:", value, flush=True) + else: + raise Exception("Data verification failed for Redis.") + +def access_rabbitmq(): + try: + connection = pika.BlockingConnection(pika.ConnectionParameters(MQ_HOST, port=MQ_PORT)) + channel = connection.channel() + channel.queue_declare(queue='k8s_test_queue') + + test_value = 'test_value' + channel.basic_publish(exchange='', routing_key='k8s_test_queue', body=test_value) + print("Sent to RabbitMQ:", test_value, flush=True) + except Exception as e: + raise Exception(f"Data verification failed for rabbitmq: {e}") + finally: + if connection is not None: + print("Close connection.") + connection.close() + +def access_etcd(): + _etcd=EtcdClient() + test_key = 'test_key' + test_value = 'test_value' + _etcd.put(test_key, test_value) + value, metadata=_etcd.get(test_key) + + value = value.decode("utf-8") + if value == test_value: + print("Inserted into etcd and verified:", value, flush=True) + else: + raise Exception("Data verification failed for etcd: {}={}".format(value, test_value)) + +def access_es(): + _es = EsClient() + health_info = _es.get_cluster_health() + status = health_info.get("status") + if status in ["green", "yellow"]: + print(f"Elasticsearch is available with status: {status}", flush=True) + else: + raise Exception(f"Elasticsearch is unavailable with status: {status}", flush=True) + + +def main(): + parser = argparse.ArgumentParser(description="Access different services.") + parser.add_argument("service", choices=["es", "redis", "etcd", "rabbitmq", "all"], help="The service to access") + args = parser.parse_args() + service = args.service + if service == "es": + wait_for_service(service, access_es) + elif service == "redis": + wait_for_service(service, access_redis) + elif service == "etcd": + wait_for_service(service, access_etcd) + elif service == "rabbitmq": + wait_for_service(service, access_rabbitmq) + else: + wait_for_service(service, access_es) + wait_for_service(service, access_etcd) + wait_for_service(service, access_redis) + wait_for_service(service, access_rabbitmq) + +if __name__ == "__main__": + main() diff --git a/src/libpy/redis_client.py b/src/libpy/redis_client.py new file mode 100644 index 000000000..e49c5292b --- /dev/null +++ b/src/libpy/redis_client.py @@ -0,0 +1,49 @@ +import os +from rediscluster import RedisCluster + +from src.libpy.constants import REDIS_HOST, REDIS_PORT + + +class RedisClient: + def __init__(self): + startup_nodes = [{"host": REDIS_HOST, "port": REDIS_PORT}] + self.client = RedisCluster(startup_nodes=startup_nodes, decode_responses=True) + + def set(self, key, value): + return self.client.set(key, value) + + def get(self, key): + return self.client.get(key) + + def delete(self, key): + return self.client.delete(key) + + def incr(self, key): + return self.client.incr(key) + + def hget(self, name, key): + return self.client.hget(name, key) + + def hset(self, name, key, value): + return self.client.hset(name, key, value) + + def hgetall(self, key): + return self.client.hgetall(key) + + def exists(self, name): + return self.client.exists(name) + + def set_expire(self, name, timeout): + return self.client.expire(name, timeout) + + def set_add(self, name, *values): + return self.client.sadd(name, *values) + + def is_set_member(self, name, value): + return self.client.sismember(name, value) + + def set_remove(self, name, *values): + return self.client.srem(name, *values) + + def get_match_keys(self, match): + return self.client.scan_iter(match) diff --git a/src/libpy/single_class.py b/src/libpy/single_class.py new file mode 100644 index 000000000..dd82eb9f1 --- /dev/null +++ b/src/libpy/single_class.py @@ -0,0 +1,16 @@ +""" +usage: + class NewClass(metaclass=SingleClass): + ... +""" + + +class SingleClass(type): + def __init__(self, *args, **kwargs): + self.__instance = None + super().__init__(*args, **kwargs) + + def __call__(self, *args, **kwargs): + if self.__instance is None: + self.__instance = super().__call__(*args, **kwargs) + return self.__instance diff --git a/src/monitoring/parse_serial_logs.cr b/src/monitoring/parse_serial_logs.cr index d2947d3b2..3e9eda589 100644 --- a/src/monitoring/parse_serial_logs.cr +++ b/src/monitoring/parse_serial_logs.cr @@ -137,6 +137,8 @@ class SerialParser delete_host(msg, host, END_PATTERNS) return if check_save + job_id = match_job_id(msg) + job = find_job(job_id) return if dump_cache(job, msg, host) cache_dmesg(msg, host) @@ -153,68 +155,24 @@ class SerialParser def match_job_id(msg) matched = msg["message"].to_s.match(/.*\/job_initrd_tmpfs\/(?.*?)\//) - if matched - return matched.named_captures["job_id"] - end - - matched = msg["message"].to_s.match(/ipxe will boot job id=(?[0-9]+), /) - if matched - return matched.named_captures["job_id"] - end - - # [ 422.557639][ T5645] /usr/bin/wget -q --timeout=1800 --tries=1 --local-encoding=UTF-8 http://172.168.131.113:3000/~lkp/cgi-bin/lkp-jobfile-append-var?job_file=/lkp/scheduled/job.yaml&job_id=crystal.5240046&job_state=running -O /dev/null^M - matched = msg.to_s.match(/ http:.*&job_id=(?[0-9]+)&/) - if matched - return matched.named_captures["job_id"] - end - - return nil - end + return unless matched - def match_result_root(msg) - # remove "^M" in serial log: - # result_root=/result/mugen/2024-12-11/taishan200-2280-2s48p-256g--a17/openeuler-2^M - # 2.03-LTS-SP4-aarch64/libguestfs/10241211102818490^M - line = msg["message"].to_s.delete("\r") - - # [ 421.102575][ T5645] RESULT_ROOT=/result/lmbench3/2022-04-01/taishan200-2280-2s48p-256g--a1010/openeuler-22.03-LTS-iso-aarch64/development-1-SELECT-4294967297/crystal.5240046^M - matched = line.match(/RESULT_ROOT=(?.*)$/) - if matched - return matched.named_captures["rr"] - end - - # [ 422.515716][ T5645] result_service: raw_upload, RESULT_MNT: /172.168.131.113/result, RESULT_ROOT: /172.168.131.113/result/lmbench3/2022-04-01/taishan200-2280-2s48p-256g--a1010/openeuler-22.03-LTS-iso-aarch64/development-1-SELECT-4294967297/crystal.5240046, TMP_RESULT_ROOT: /tmp/lkp/result^M - matched = line.match(/RESULT_ROOT: \/.*(?\/result\/.*), /) - if matched - return matched.named_captures["rr"] - end - - return nil + matched.named_captures["job_id"] end def find_job(job_id) return unless job_id - Elasticsearch::Client.new.get_job(job_id) + Elasticsearch::Client.new.get_job_content(job_id) end def dump_cache(job, msg, host) - return if @host2rt.has_key?(host) - - job_id = match_job_id(msg) - job = find_job(job_id) - if job - result_root = File.join("/srv", job.result_root) - else - # continue to work when ES DB is down - result_root = match_result_root(msg) - end - return unless result_root - return unless File.exists? "/srv/#{result_root}/job.yaml" + return unless job + result_root = File.join("/srv", job["result_root"].to_s) @host2rt[host] = result_root - f = File.new("/srv/#{result_root}/dmesg", "a") + f = File.new("#{result_root}/dmesg", "a") f.puts @host2head[host].join("\n") f.puts msg["message"] f.flush diff --git a/src/scheduler/redis_client.cr b/src/scheduler/redis_client.cr index 56e663314..ac856c184 100644 --- a/src/scheduler/redis_client.cr +++ b/src/scheduler/redis_client.cr @@ -14,7 +14,7 @@ class RedisClient HOST = (ENV.has_key?("REDIS_HOST") ? ENV["REDIS_HOST"] : JOB_REDIS_HOST) PORT = (ENV.has_key?("REDIS_PORT") ? ENV["REDIS_PORT"] : JOB_REDIS_PORT).to_i32 IS_CLUSTER = (ENV.has_key?("IS_CLUSTER") ? ENV["IS_CLUSTER"] : false) - PASSWD = ENV["REDIS_PASSWD"] + PASSWD = ENV["REDIS_PASSWORD"] @@size = 25 def self.instance @@ -23,7 +23,7 @@ class RedisClient def initialize(host = HOST, port = PORT, passwd = PASSWD) if IS_CLUSTER - @client = Redis::Cluster.new(URI.parse("redis://:#{passwd}@#{host}:#{port}")) + @client = Redis::Cluster.new(URI.parse("redis://#{host}:#{port}")) else @client = Redis::Client.new(URI.parse("redis://:#{URI.encode_www_form(passwd)}@#{host}:#{port}")) end diff --git a/src/scheduler/scheduler.cr b/src/scheduler/scheduler.cr index a6ffed07c..7f1fd9116 100644 --- a/src/scheduler/scheduler.cr +++ b/src/scheduler/scheduler.cr @@ -111,8 +111,6 @@ module Scheduler env.create_socket(socket) sched = env.sched - # XXX: port cbs - # spawn sched.get_job_boot_content spawn sched.find_job_boot socket.on_message do |msg| @@ -320,10 +318,10 @@ module Scheduler # /boot.xxx/host/${hostname} # /boot.yyy/mac/${mac} get "/scheduler/boot.:boot_type/:parameter/:value" do |env| - env.sched.find_job_boot + env.sched.hw_find_job_boot end - ws "/scheduler/ws/boot.:boot_type/:parameter/:value" do |socket, env| + ws "/scheduler/ws/boot.:boot_type" do |socket, env| env.set "ws", true env.set "ws_state", "normal" env.create_socket(socket) @@ -339,7 +337,6 @@ module Scheduler socket.on_close do env.set "ws_state", "close" sched.etcd_close - spawn env.watch_channel.send("close") if env.get?("watch_state") == "watching" env.log.info({ "from" => env.request.remote_address.to_s, "message" => "socket on closed" @@ -502,4 +499,19 @@ module Scheduler post "/scheduler/repo/set-srpm-info" do |env| env.sched.set_srpm_info end + + # curl -X PUT "http://localhost:3000/scheduler/register-host2redis?type=dc&arch=aarch64&...." + put "/scheduler/register-host2redis" do |env| + env.sched.register_host2redis + end + + get "/scheduler/heart-beat" do |env| + status = env.sched.heart_beat + {"status_code" => status}.to_json + rescue e + env.log.warn({ + "message" => e.to_s, + "error_message" => e.inspect_with_backtrace.to_s + }.to_json) + end end -- Gitee