Mise à jour effectuée, merci de nous signaler tout dysfonctionnement ! | Upgrade done, please let us know about any dysfunction!

Commit f129e907 authored by Éloïs's avatar Éloïs
Browse files

Merge branch 'AutoRelease' into '1.6'

Automatic release management

See merge request nodes/typescript/duniter!1240
parents 4fd9397f 99b7110f
......@@ -18,6 +18,9 @@ gui/nw
vagrant/*.log
vagrant/duniter
# Python compiled
*.pyc
# Releases
/work
*.deb
......
......@@ -2,96 +2,102 @@ stages:
- github-sync
- build
- test
- releases
- releases-page
- package
- prerelease
- release
push_to_github:
stage: github-sync
variables:
GIT_STRATEGY: none
tags:
- redshift
script:
- rm -rf ./*
- rm -rf .git
- git clone --mirror $CI_REPOSITORY_URL .
- git remote add github $GITHUB_URL_AND_KEY
- git config --global user.email "contact@duniter.org"
- git config --global user.name "Duniter"
# Job would fail if we don't remove refs about pull requests
- bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
- mv packed-refs-new packed-refs
- bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
build:
stage: build
stage: github-sync
variables:
GIT_STRATEGY: none
tags:
- redshift
script:
- rm -rf ./*
- rm -rf .git
- git clone --mirror $CI_REPOSITORY_URL .
- git remote add github $GITHUB_URL_AND_KEY
- git config --global user.email "contact@duniter.org"
- git config --global user.name "Duniter"
# Job would fail if we don't remove refs about pull requests
- bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
- mv packed-refs-new packed-refs
- bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
only:
- nodes/typescript/duniter
.nvm_env: &nvm_env
tags:
- redshift
before_script:
- export NVM_DIR="$HOME/.nvm"
- . "$NVM_DIR/nvm.sh"
build:
<<: *nvm_env
stage: build
script:
- yarn
test:
<<: *nvm_env
stage: test
tags:
- redshift
before_script:
- export NVM_DIR="$HOME/.nvm"
- . "$NVM_DIR/nvm.sh"
script:
- yarn
- yarn test
releases:test:
stage: releases
.build_releases: &build_releases
stage: package
allow_failure: false
image: duniter/release-builder:v1.0.1
tags:
- redshift-duniter-builder
variables:
DAY: $(date +%Y%m%d)
HOUR: $(date +%H%M)
SEC: $(date +%S)
when: manual
artifacts:
paths: &releases_artifacts
- work/bin/
releases:test:
<<: *build_releases
script:
- bash "release/arch/linux/build-lin.sh" "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
artifacts:
paths:
- work/bin/
expire_in: 8h
when: manual
paths: *releases_artifacts
expire_in: 4h
except:
- tags
releases:
stage: releases
image: duniter/release-builder:v1.0.1
tags:
- redshift-duniter-builder
releases:x64:
<<: *build_releases
script:
- bash "release/arch/linux/build-lin.sh" "${CI_COMMIT_TAG#v}"
artifacts:
paths:
- work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb
- work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz
- work/bin/duniter-server-${CI_COMMIT_TAG}-linux-x64.deb
expire_in: 8h
when: manual
paths: *releases_artifacts
expire_in: 2 weeks
only:
- tags
- master
releases-message:
stage: releases-page
- tags
.release_jobs: &release_jobs
image: tensorflow/tensorflow:latest-py3
tags:
- redshift-duniter-builder
variables:
JOB_ARTIFACTS: 'releases'
EXPECTED_ARTIFACTS: '["work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","work/bin/duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
script:
- python3 .gitlab/releaser.py
when: manual
- python3 .gitlab/releaser
only:
- tags
- master
- tags
prerelease:
<<: *release_jobs
stage: prerelease
variables:
RELEASE_BIN_DIR: work/bin/
SOURCE_EXT: '["tar.gz", "zip"]'
publish:
<<: *release_jobs
stage: release
variables:
RELEASE_BIN_DIR: work/bin/
WIKI_RELEASE: Releases
allow_failure: false
when: manual
{% block prerelease %}
# :gift: Pre-release
[Go to Pipeline page :arrow_forward:](https://git.duniter.org/sveyret/duniter/pipelines/{{pipeline}})
{% endblock %}
{% block release %}
# :white_check_mark: Release
{% endblock %}
{% block notebody %}
<placeholder content="end-title" />
<placeholder content="note">
{{current_message}}
</placeholder>
# Downloads
## Downloads
| Category | Arch | Type | Size | File |
|----------|------|------|------|------|
{% for artifact in artifacts %}
***
[{{artifact.icon}} {{artifact.name}}]({{artifact.url}})
_{{artifact.size}}_
***
| {{artifact.category}} | {{artifact.arch}} | {{artifact.type}} | {{artifact.size}} | [{{artifact.icon}} {{artifact.name}}]({{artifact.url}}) |
{% endfor %}
{% endblock %}
{% block previouswiki %}
## {{tag}}
{{body}}
{% endblock %}
#!/usr/bin/python3
'''
This module is meant to overload the release note in gitlab for the current project.
Expects to find in environment following variables:
- CI_PROJECT_URL - Automatically set by gitlab-ci
- CI_COMMIT_TAG - Automatically set by gitlab-ci
- CI_PROJECT_ID - Automatically set by gitlab-ci
- CI_COMMIT_TAG - Automatically set by gitlab-ci
- RELEASER_TOKEN - Token used by technical user
- JOB_ARTIFACTS - String containing job name containing all artifacts, to set manually
- EXPECTED_ARTIFACTS - List containing all artifacts generated to set manually
'''
import math
import urllib.request
import urllib.error
import json
import os
import jinja2
def convert_size(size_bytes):
'''Print proper size'''
if size_bytes == 0:
return '0B'
size_name = ('B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB')
i = int(math.floor(math.log(size_bytes, 1024)))
power = math.pow(1024, i)
size = round(size_bytes / power, 2)
return '%s %s' % (size, size_name[i])
def get_current_message():
'''Get current release message'''
ci_project_id = os.environ['CI_PROJECT_ID']
ci_commit_tag = os.environ['CI_COMMIT_TAG']
tag_url = 'https://git.duniter.org/api/v4/projects/'
tag_url += ci_project_id
tag_url += '/repository/tags/'
tag_url += ci_commit_tag
request = urllib.request.Request(tag_url)
response = urllib.request.urlopen(request)
response_data = response.read().decode()
data = json.loads(response_data)
if data['release'] is None:
return False, ''
else:
return True, data['release']['description'].split('# Downloads')[0]
def build_artifact_url(artifact, source):
'''Given an artifact name, builds the url to download it'''
job_artifacts = os.environ['JOB_ARTIFACTS']
ci_project_url = os.environ['CI_PROJECT_URL']
ci_commit_tag = os.environ['CI_COMMIT_TAG']
if source:
source_url = ci_project_url
source_url += '/repository/'
source_url += ci_commit_tag
source_url += '/archive.'
source_url += artifact
return source_url
else:
artifact_url = ci_project_url
artifact_url += '/-/jobs/artifacts/'
artifact_url += ci_commit_tag
artifact_url += '/raw/'
artifact_url += artifact
artifact_url += '?job='
artifact_url += job_artifacts
return artifact_url
def get_artifact_weight(location):
'''Retrieve size of artifacts'''
size = os.path.getsize(location)
return convert_size(int(size))
def build_compiled_message(current_message):
'''Create a new release message using the release template'''
expected_artifacts = os.environ['EXPECTED_ARTIFACTS']
try:
expected_artifacts = json.loads(expected_artifacts)
except json.decoder.JSONDecodeError:
print('CRITICAL EXPECTED_ARTIFACTS environment variable JSON probably malformed')
print('CRITICAL Correct : \'["test_linux.txt","test_windows.txt"]\' ')
print('CRITICAL Not Correct: "[\'test_linux.txt\',\'test_windows.txt\']" ')
exit(1)
artifacts_list = []
for artifact in expected_artifacts:
artifact_dict = {
'name': artifact.split('/')[-1],
'url': build_artifact_url(artifact, False),
'size': get_artifact_weight(artifact),
'icon': ':package:'
}
artifacts_list.append(artifact_dict)
j2_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(
os.path.dirname(os.path.abspath(__file__))
),
trim_blocks=True
)
# pylint: disable=maybe-no-member
template = j2_env.get_template('release_template.md')
return template.render(
current_message=current_message,
artifacts=artifacts_list
)
def send_compiled_message(exists_release, compiled_message):
'''Send to gitlab new message'''
releaser_token = os.environ['RELEASER_TOKEN']
ci_project_id = os.environ['CI_PROJECT_ID']
ci_commit_tag = os.environ['CI_COMMIT_TAG']
release_url = 'https://git.duniter.org/api/v4/projects/'
release_url += ci_project_id
release_url += '/repository/tags/'
release_url += ci_commit_tag
release_url += '/release'
if exists_release:
# We need to send a PUT request
method = 'PUT'
else:
# We need to send a POST request
method = 'POST'
send_data = {
'tag_name':ci_commit_tag,
'description':compiled_message
}
send_data_serialized = json.dumps(send_data).encode('utf-8')
request = urllib.request.Request(release_url, data=send_data_serialized, method=method)
request.add_header('Private-Token', releaser_token)
request.add_header('Content-Type', 'application/json')
response = urllib.request.urlopen(request)
def main():
'''Execute main scenario'''
exists_release, current_message = get_current_message()
compiled_message = build_compiled_message(current_message)
send_compiled_message(exists_release, compiled_message)
print('Artifacts uploaded successfully')
main()
'''
This module is meant add release notes in gitlab for the current project.
Expects to find in environment following variables:
- CI_PROJECT_URL - Automatically set by gitlab-ci
- CI_PROJECT_ID - Automatically set by gitlab-ci
- CI_COMMIT_TAG - Automatically set by gitlab-ci
- CI_PIPELINE_ID - Automatically set by gitlab-ci
- RELEASE_BIN_DIR - Directory where releases are to be found
- SOURCE_EXT - Source extensions (pre-release only)
- WIKI_RELEASE - Wiki page where releases are stored (release only)
- RELEASER_TOKEN - Token used by technical user
'''
from releaser import Releaser
Releaser().release()
class Artifact:
'''
An artifact to be uploaded.
'''
def __init__(self, file_name, category, arch, dtype, icon):
'''
:param file_name: The name of the artifact file (may have directory).
:param category: The category (OS, distrib) for the artifact.
:param arch: The architecture name.
:param dtype: The delivery type (either server or desktop).
:param icon: The name of the icon to be used for artifact representation.
:type file_name: str
:type category: str
:type arch: str
:type dtype: str
:type icon: str
'''
self.file_name = file_name
self.category = category
self.arch = arch
self.dtype = dtype
self.icon = icon
def __lt__(self, other):
if not isinstance(other, Artifact): raise TypeError()
return self.category < other.category or \
(self.category == other.category and self.arch < other.arch) or \
(self.category == other.category and self.arch == other.arch and self.dtype < other.dtype)
def __le__(self, other):
if not isinstance(other, Artifact): raise TypeError()
return self.category <= other.category or \
(self.category == other.category and self.arch <= other.arch) or \
(self.category == other.category and self.arch == other.arch and self.dtype <= other.dtype)
def __eq__(self, other):
if not isinstance(other, Artifact): raise TypeError()
return self.category == other.category and self.arch == other.arch and self.dtype == other.dtype
def __ne__(self, other):
if not isinstance(other, Artifact): raise TypeError()
return self.category != other.category or self.arch != other.arch or self.dtype != other.dtype
def __gt__(self, other):
if not isinstance(other, Artifact): raise TypeError()
return self.category > other.category or \
(self.category == other.category and self.arch > other.arch) or \
(self.category == other.category and self.arch == other.arch and self.dtype > other.dtype)
def __ge__(self, other):
if not isinstance(other, Artifact): raise TypeError()
return self.category >= other.category or \
(self.category == other.category and self.arch >= other.arch) or \
(self.category == other.category and self.arch == other.arch and self.dtype >= other.dtype)
def to_dict(self):
'''
:return: A dictionnary containing artifact data.
:rtype: dict
'''
return {
'name': self.file_name.split('/')[-1],
'category': self.category,
'arch': self.arch,
'type': self.dtype,
'url': self._build_url(),
'size': self._get_size(),
'icon': ':{}:'.format(self.icon)
}
def _get_size(self):
'''
:return: The size of the artifact.
:rtype: FSItemSize
'''
raise NotImplementedError()
def _build_url(self):
'''
:return: The URL which can be used to get this artifact.
:rtype: str
'''
raise NotImplementedError()
import json
import os
from artifact import Artifact
from fsitemsize import FSItemSize
class BinArtifact(Artifact):
'''
A binary artifact.
'''
def __init__(self, folder, desc_file, desc_ext):
'''
:param folder: The folder where files can be found.
:param desc_file: The name of the description file.
:param desc_ext: The extention of the description file.
:type folder: str
:type desc_file: str
:type desc_ext: str
'''
try:
description = json.load(open(desc_file))
except json.decoder.JSONDecodeError:
print('CRITICAL Description file {} could not be read'.format(desc_file))
exit(1)
self.tag = description['version']
self.job = description['job']
file_name = desc_file[:-len(desc_ext)]
Artifact.__init__(self, file_name, description['category'], description['arch'], description['type'], 'package')
def _get_size(self):
return FSItemSize(int(os.path.getsize(self.file_name)))
def _build_url(self):
return '{}/-/jobs/artifacts/{}/raw/{}?job={}'.format(
os.environ['CI_PROJECT_URL'], self.tag, self.file_name, self.job)
import math
class FSItemSize:
'''
The size of a file system item.
'''
def __init__(self, bsize = None):
'''
:param bsize: Size of item in bytes.
:type bsize: int
'''
self.bsize = bsize
def __str__(self):
'''
:return: Human readable size.
:rtype: str
'''
if self.bsize is None:
return '(unknown)'
elif self.bsize == 0:
return '0 B'
size_name = ('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB')
i = int(math.floor(math.log(self.bsize, 1024)))
power = math.pow(1024, i)
size = round(self.bsize / power, 2)
return '{} {}'.format(size, size_name[i])
import urllib.request
from projectapi import ProjectApi
class Job(ProjectApi):
'''
Job data API.
'''
def __init__(self, job_id):
'''
:param job_id: The job id.
:type job_id: int
'''
ProjectApi.__init__(self, '/jobs/{}'.format(job_id))
def keep_artifacts(self):
'''
Force artifacts to be kept forever.
'''
request = self.build_request('/artifacts/keep', method='POST')
urllib.request.urlopen(request)
import json
import os
import urllib.request
from projectapi import ProjectApi
class Pipeline(ProjectApi):
'''
Pipeline data API.
'''
def __init__(self):
ProjectApi.__init__(self, '/pipelines/{}'.format(os.environ['CI_PIPELINE_ID']))
def find_job_id(self, job_name):
'''
Find the id corresponding to given job name in the pipeline.
:param job_name: The job name.
:type job_name: str
:return: The identifier.
:rtype: int
'''
request = self.build_request('/jobs')
response = urllib.request.urlopen(request)
response_data = response.read().decode()
for job in json.loads(response_data):
if job['name'] == job_name: return job['id']
print('CRITICAL No job with given name {} found'.format(job_name))
exit(1)
class PlaceHolder:
'''
Placeholder tags in Markdown texts.
'''
__PLACEHOLDER_PART = '<placeholder'
__PLACEHOLDER_START = '<placeholder content="{}">'
__PLACEHOLDER_STOP = '</placeholder>'
__PLACEHOLDER_FULL = '<placeholder content="{}" />'
def __init__(self, content_id):
'''
:param content_id: The identifier to be used for placeholder content.
:type content_id: str
'''
self.ph_start = PlaceHolder.__PLACEHOLDER_START.format(content_id)
self.ph_stop = PlaceHolder.__PLACEHOLDER_STOP
self.ph_full = PlaceHolder.__PLACEHOLDER_FULL.format(content_id)
def get_content(self, text):
'''
:param text: The text in which to extract content.
:type text: str
:return: The content between placeholder markers.
:rtype: str
'''
pos = text.find(self.ph_start)
if pos >= 0:
text = text[pos + len(self.ph_start):]
pos = text.find(self.ph_stop)
if pos >= 0: text = text[:pos]
return text
def get_before(self, text, keep_mark=False):
'''
:param text: The text in which to extract content.
:param keep_mark: If true, the mark is kept in final text.
:type text: str