Skip to content

Commit b8f88d4

Browse files
committed
ci: enable only build openocd jobs
1 parent d40061a commit b8f88d4

File tree

1 file changed

+0
-318
lines changed

1 file changed

+0
-318
lines changed

.gitlab-ci.yml

Lines changed: 0 additions & 318 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,6 @@ stages:
22
- pre_check
33
- static_analyzers
44
- build_openocd
5-
- run_test_pipelines
6-
- build_nuttx
7-
- test_host
8-
- test_nuttx
9-
- results
10-
- deploy
11-
- pre_release
12-
- release_stage1
13-
- release_submit
14-
- update_idf_tools
155

166
image: $CI_DOCKER_REGISTRY/openocd-ci-env:1
177

@@ -55,319 +45,11 @@ variables:
5545
ARCHIVE_EXT: "tar.gz"
5646
RELEASE_DESC: "New release"
5747

58-
# prefix should be like a $CI_PROJECT_NAME, but we cannot use variable here
59-
.release_tag_filter: &release_tag_filter
60-
only:
61-
- /^v[0-9].*$/
62-
- /^openocd-esp32-.*$/
63-
64-
.release_binaries: &release_binaries
65-
needs:
66-
- job: build_linux
67-
- job: build_linux_armhf
68-
- job: build_linux_armel
69-
- job: build_linux_arm64
70-
- job: build_windows_win32
71-
- job: build_windows_win64
72-
- job: macos_codesign
73-
#- job: build_windows_arm64
74-
75-
.release_submit_action: &release_submit_action
76-
image: espressif/github-hub:2
77-
when: manual
78-
allow_failure: true
79-
before_script:
80-
- set -o errexit; set -o pipefail; set -o nounset
81-
- test "${DEBUG_SHELL:-''}" = "1" && set -x
82-
- git remote remove github || true
83-
- git remote add github ${GH_REPO_HTTPS}
84-
variables:
85-
GIT_STRATEGY: fetch
86-
GH_REL_TAG: ${CI_COMMIT_TAG}
87-
SHA256_FILE: openocd-esp32-${CI_COMMIT_TAG}-checksum.sha256
88-
8948
before_script:
9049
- set -o errexit; set -o pipefail; set -o nounset
9150
- test "${DEBUG_SHELL:-''}" = "1" && set -x
9251

93-
pipeline_variables:
94-
stage: pre_check
95-
tags:
96-
- build
97-
variables:
98-
GIT_STRATEGY: none
99-
script:
100-
- >
101-
echo "TRIGGERED_BY_GDB_PIPELINE_BRANCH=${TRIGGERED_BY_GDB_PIPELINE_BRANCH:-}" >> variables.env;
102-
if [[ ! -z ${TRIGGERED_BY_GDB_PIPELINE_BRANCH:-} ]]; then
103-
echo "CI_FULL_RUN=0" >> variables.env;
104-
elif [[ $CI_PIPELINE_SOURCE != "push" || $CI_COMMIT_BRANCH == "master" ]]; then
105-
echo "CI_FULL_RUN=1" >> variables.env;
106-
else
107-
api_call="https://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/67/merge_requests?source_branch=${CI_COMMIT_REF_NAME}&state=opened";
108-
echo "${api_call}";
109-
OPEN_MR=$(curl --header "PRIVATE-TOKEN: ${ESPCI_TOKEN}" "${api_call}");
110-
echo $OPEN_MR;
111-
ready=$(echo "$OPEN_MR" | python3 -c "import json; x=json.loads(input()); print(1 if len(x) and 'labels' in x[0] and 'ready_to_merge' in x[0]['labels'] else 0)");
112-
echo "CI_FULL_RUN=${ready}" >> variables.env;
113-
fi
114-
artifacts:
115-
reports:
116-
dotenv: variables.env
117-
118-
build_macos_strip:
119-
stage: build_openocd
120-
tags: [ "darwin" ]
121-
needs: [build_macos, build_macos_arm64]
122-
artifacts:
123-
paths:
124-
- dist
125-
script:
126-
- cd dist
127-
- dist_names="dist_name_macos dist_name_macos-arm64"
128-
- ocd_bin=tmp/openocd-esp32/bin/openocd
129-
- >
130-
for dist_file in $dist_names; do
131-
mkdir tmp;
132-
tar -xzf `cat $dist_file` -C tmp;
133-
otool -l $ocd_bin | grep -A 5 LC_SYMTAB;
134-
ls -lh $ocd_bin;
135-
strip -S $ocd_bin;
136-
otool -l $ocd_bin | grep -A 5 LC_SYMTAB;
137-
ls -lh $ocd_bin;
138-
tar -czf `cat $dist_file` -C tmp .;
139-
rm -rf tmp;
140-
done
141-
142-
macos_codesign:
143-
stage: pre_release
144-
<<: *release_tag_filter
145-
when: on_success
146-
resource_group: macos_codesign
147-
tags: [ "darwin", "codesign" ]
148-
# list all jobs that produces macos distros
149-
needs: [build_macos_strip]
150-
artifacts:
151-
paths:
152-
- ${DIST_ART_DIR}
153-
variables:
154-
# directory with distro archives
155-
DIST_ART_DIR: dist
156-
# command to unarchive distro
157-
UNARCHIVE_TOOL: "tar xzf"
158-
# URL to macos codesign repo
159-
NOTARIZATION_SCRIPTS_GIT: "${CI_SERVER_PROTOCOL}://gitlab-ci-token:${CI_JOB_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/espressif/macos_codesign_notarization.git"
160-
script:
161-
- git clone -q --depth=1 ${NOTARIZATION_SCRIPTS_GIT} -b ${CI_COMMIT_REF_NAME} ||
162-
git clone -q --depth=1 ${NOTARIZATION_SCRIPTS_GIT}
163-
- ./macos_codesign_notarization/run.sh
164-
165-
release_tag_draft:
166-
stage: release_stage1
167-
tags: [ "amd64", "internet" ]
168-
<<: *release_tag_filter
169-
<<: *release_submit_action
170-
<<: *release_binaries
171-
script:
172-
- git remote remove github || true
173-
- git remote add github ${GH_REPO_HTTPS}
174-
- hub release show ${GH_REL_TAG} || { echo "Please create a release on GitHub with ${GH_REL_TAG} tag at first"; exit 1; }
175-
# List of archives
176-
- DIST_DIR=dist
177-
- FILES=$(find ${DIST_DIR} -name dist_name_\* -exec cat {} \+)
178-
- cd ${DIST_DIR}
179-
- ls -l $FILES
180-
# Generate checksum file
181-
- >
182-
for n in $FILES; do
183-
sz=$(stat -c%s "${n}") >> ${SHA256_FILE};
184-
printf "# %s: %s bytes\n" "${n}" "${sz}" >> ${SHA256_FILE};
185-
sha256sum -b "${n}" >> ${SHA256_FILE};
186-
done
187-
# Append FILES with checksum file
188-
- FILES=$(echo -e "${FILES}\n${SHA256_FILE}")
189-
- ls -l $FILES
190-
# Upload archives
191-
- for n in ${FILES}; do hub release edit -m "" -a "${n}" "${GH_REL_TAG}"; done
192-
193-
Release_tag_submit:
194-
stage: release_submit
195-
tags: [ "amd64", "internet" ]
196-
<<: *release_tag_filter
197-
<<: *release_submit_action
198-
dependencies: []
199-
script:
200-
- hub release create -m "${RELEASE_DESC}" ${GH_REL_TAG}
201-
202-
Pre-Release_tag_submit:
203-
stage: release_submit
204-
tags: [ "amd64", "internet" ]
205-
<<: *release_tag_filter
206-
<<: *release_submit_action
207-
dependencies: []
208-
script:
209-
- hub release create --prerelease -m "${RELEASE_DESC}" ${GH_REL_TAG}
210-
211-
Delete_tag_release:
212-
stage: release_submit
213-
tags: [ "amd64", "internet" ]
214-
<<: *release_tag_filter
215-
<<: *release_submit_action
216-
dependencies: []
217-
script:
218-
- hub release delete ${GH_REL_TAG}
219-
220-
test_idf_examples:
221-
stage: pre_release
222-
allow_failure: true
223-
rules:
224-
- if: $CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "run_idf_tests"
225-
when: always
226-
- if: $CI_COMMIT_BRANCH == "master"
227-
when: manual
228-
variables:
229-
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
230-
trigger:
231-
include: .gitlab/ci/idf-examples.yml
232-
strategy: depend
233-
needs:
234-
- job: build_linux_armhf
235-
- job: build_linux_arm64
236-
237-
create_reports:
238-
stage: results
239-
tags:
240-
- build
241-
coverage: '/lines\.*:.*.*\s+(\d+\.\d+%)/'
242-
artifacts:
243-
paths:
244-
- dist/lcov_all_report.tar.gz
245-
- cov_infos/metrics.txt
246-
- cov_infos/cobertura.xml
247-
reports:
248-
metrics: cov_infos/metrics.txt
249-
coverage_report:
250-
coverage_format: cobertura
251-
path: cov_infos/cobertura.xml
252-
junit:
253-
- "*/results/*.xml"
254-
when: always
255-
expire_in: 1 week
256-
when: always
257-
expire_in: 1 week
258-
needs:
259-
- job: 5.1.x_run_test_pipeline
260-
- job: 5.2.x_run_test_pipeline
261-
- job: 5.3.x_run_test_pipeline
262-
- job: 5.4.x_run_test_pipeline
263-
- job: 5.5.x_run_test_pipeline
264-
- job: master_run_test_pipeline
265-
# NuttX
266-
- job: test_nuttx_esp32
267-
- job: test_nuttx_esp32s2
268-
- job: test_nuttx_esp32s3
269-
- job: test_nuttx_esp32c3
270-
- job: test_nuttx_esp32c6
271-
- job: test_nuttx_esp32h2
272-
when: always
273-
script:
274-
# Fetch artifacts from downstream
275-
- >
276-
api_call="https://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/67/pipelines/${CI_PIPELINE_ID}/bridges";
277-
echo "${api_call}";
278-
BRIDGES=$(curl --header "PRIVATE-TOKEN: ${ESPCI_TOKEN}" "${api_call}");
279-
CHILD_IDS=$(echo "$BRIDGES" | python3 -c "import json; x=json.loads(input()); print(' '.join(str(y['downstream_pipeline']['id']) for y in x if y['name'].endswith('_run_test_pipeline')))");
280-
for child in $CHILD_IDS; do
281-
api_call="https://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/67/pipelines/${child}/jobs";
282-
echo "${api_call}";
283-
JOBS=$(curl --header "PRIVATE-TOKEN: ${ESPCI_TOKEN}" "${api_call}");
284-
TEST_IDS=$(echo "$JOBS" | python3 -c "import json; x=json.loads(input()); print(' '.join(str(y['id']) for y in x if y['name'].startswith('tests_') and y['status'] != 'skipped'))");
285-
for test in $TEST_IDS; do
286-
api_call="https://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/67/jobs/${test}/artifacts";
287-
echo "${api_call}";
288-
curl --header "PRIVATE-TOKEN: ${ESPCI_TOKEN}" "${api_call}" -o artifacts.zip;
289-
unzip artifacts.zip || echo "No valid artifacts for ${test}";
290-
done;
291-
done;
292-
- mkdir -p cov_infos
293-
- mkdir -p dist
294-
# Below lines copies all .info files into cov_infos folder
295-
- >
296-
folder_list=$(ls -d build_test_app*);
297-
for each_folder in $folder_list ;
298-
do
299-
lcov --gcov-tool ${PWD}/${each_folder}/esp_cov_files/gcov --capture --directory ${each_folder}/esp_cov_files --output-file ${each_folder}/${each_folder}.info;
300-
done
301-
- cp `find . -wholename "./build_test_app*/*.info" -size +0` cov_infos
302-
- ls -la cov_infos/
303-
# Creating a html report of coverage files.
304-
- genhtml --ignore-errors source cov_infos/*.info -o lcov_html_report/
305-
- tar czf dist/lcov_all_report.tar.gz lcov_html_report/
306-
# Below lines collecting all coverage file names with '-a' flag for lcov merge command.
307-
- >
308-
FILES="" ;
309-
for each_file in cov_infos/*.info ;
310-
do
311-
FILES+=" -a ${each_file}" ;
312-
done
313-
- lcov ${FILES} -o cov_infos/merged.info
314-
# Line in below creates a txt file from merged coverage file which includes coverage percentages.
315-
- lcov --rc lcov_list_width=150 --list cov_infos/merged.info > cov_infos/metrics_input.txt
316-
- python3 tools/list_to_metrics.py --file cov_infos/metrics_input.txt
317-
- lcov_cobertura cov_infos/merged.info -o cov_infos/cobertura.xml
318-
319-
mr_auto_approve:
320-
stage: results
321-
tags:
322-
- build
323-
dependencies:
324-
- pipeline_variables
325-
when: always
326-
script:
327-
- >
328-
if [[ -z ${TRIGGERED_BY_GDB_PIPELINE_BRANCH:-} && $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH != "master" ]]; then
329-
api_call="https://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/67/merge_requests?source_branch=${CI_COMMIT_REF_NAME}&state=opened";
330-
echo "${api_call}";
331-
OPEN_MR=$(curl --header "PRIVATE-TOKEN: ${ESPCI_TOKEN}" "${api_call}");
332-
echo $OPEN_MR;
333-
iid=$(echo "$OPEN_MR" | python3 -c "import json; x=json.loads(input()); print(x[0]['iid'] if len(x) and 'iid' in x[0] else 0)");
334-
target=$(echo "$OPEN_MR" | python3 -c "import json; x=json.loads(input()); print(x[0]['target_branch'] if len(x) and 'target_branch' in x[0] else '')");
335-
if [[ $iid != "0" && $target == "master" ]]; then
336-
action="unapprove"
337-
if [[ $CI_FULL_RUN == "1" ]]; then
338-
api_call="https://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/67/pipelines/${CI_PIPELINE_ID}/jobs?scope[]=failed";
339-
echo "${api_call}";
340-
FAILED_JOBS=$(curl --header "PRIVATE-TOKEN: ${ESPCI_TOKEN}" "${api_call}");
341-
echo $FAILED_JOBS
342-
if [[ $FAILED_JOBS == "[]" ]]; then
343-
action="approve"
344-
fi
345-
fi
346-
api_call="https://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/67/merge_requests/${iid}/${action}";
347-
echo "${api_call}";
348-
curl --request POST --header "PRIVATE-TOKEN: ${ESPCI_TOKEN}" "${api_call}";
349-
fi
350-
fi
351-
352-
update_idf_tools:
353-
stage: update_idf_tools
354-
when: manual
355-
allow_failure: true
356-
<<: *release_tag_filter
357-
variables:
358-
TOOL_NAME: openocd
359-
TOOL_MEMBERS: openocd-esp32
360-
TOOL_VERSION: ${CI_COMMIT_TAG}
361-
TOOL_SHA256_URL: https://github.com/espressif/openocd-esp32/releases/download/${CI_COMMIT_TAG}/openocd-esp32-${CI_COMMIT_TAG}-checksum.sha256
362-
trigger:
363-
project: idf/idf-tools-updater
364-
strategy: depend
365-
36652
include:
36753
- '.gitlab/ci/util.yml'
36854
- '.gitlab/ci/build.yml'
369-
- '.gitlab/ci/test-template.yml'
37055
- '.gitlab/ci/pre-check.yml'
371-
- '.gitlab/ci/nuttx.yml'
372-
- '.gitlab/ci/host-test.yml'
373-
- '.gitlab/ci/run-pipeline.yml'

0 commit comments

Comments
 (0)