Skip to content
Snippets Groups Projects
Commit 8f9abf02 authored by Alexander Khabarov's avatar Alexander Khabarov Committed by Joshua Leivers
Browse files

feat: add plugin implementation

parent 944802c4
No related branches found
No related tags found
1 merge request!1Prepare for OSS release
{
"extends": ["@commitlint/config-conventional"]
}
/node_modules/
/coverage/
/tap.xml
/dist/
include:
- project: "ci/templates"
ref: v2.1.1
file:
- "/.templates.yml"
oci:
extends: .oci
parallel:
matrix:
- NAME: node
VERSION:
- 18
install:
extends: .node-install
parallel:
matrix:
- VERSION:
- 18
icon:
extends: .icon
lint:
extends: .node-lint
test:
extends: .node-test
image: $CI_REGISTRY_IMAGE/node:$VERSION-$CI_COMMIT_SHA
parallel:
matrix:
- VERSION:
- 18
script:
- |
set -euo pipefail
npx c8 --reporter cobertura ava --tap | tee /dev/fd/2 | npx tap-junit -s @$CI_PROJECT_PATH -o .
artifacts:
reports:
junit: tap.xml
coverage_report:
coverage_format: cobertura
path: coverage/cobertura-coverage.xml
semantic-release:
extends: .semantic-release
variables:
VERSION: 18
NPM_TOKEN: "${CI_JOB_TOKEN}"
before_script:
- npm config set -- "@${CI_PROJECT_ROOT_NAMESPACE}:registry"
"${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/npm/"
- npm config set -- "${CI_API_V4_URL#https?}/packages/npm/:_authToken"
'\${NPM_TOKEN}'
/_/
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx commitlint --edit
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npm run lint
{
"proseWrap": "always",
"overrides": [
{
"files": "*.svg",
"options": {
"parser": "html"
}
}
]
}
# Internet Systems Consortium license
Copyright (c) `2023`, `ARM Limited`
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
# @semantic-release/gitlab
# @semantic-release/archive
> [**semantic-release**](https://github.com/semantic-release/semantic-release)
> plugin to create archives.
| Step | Description |
| ------------------ | --------------------------------------- |
| `verifyConditions` | Verify that the input paths exist. |
| `prepare` | Create an archive from the input paths. |
## Getting Started
```sh
npm config set always-auth true
npm install --save-dev @semantic-release/archive
```
Add the following to `.releaserc.yaml`:
```yaml
plugins:
- path: "@semantic-release/archive"
inputs:
- path1
- path2
```
## Configuration
### `inputs`
The paths to add to the archive.
These can be directories and/or filepaths.
### `archive`
The path where the archive should be created.
Defaults to `archive.tar.gz`.
ARG CI_TEMPLATES_REGISTRY_IMAGE
ARG CI_TEMPLATES_IMAGE_TAG
ARG VERSION
FROM ${CI_TEMPLATES_REGISTRY_IMAGE}/node:${VERSION}-${CI_TEMPLATES_IMAGE_TAG}
USER ci
<svg width="24px" height="24px" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<path transform="translate(12 12)" d="M-3 -10v10l-7 8h20l-7 -8v-10Z" />
</svg>
This diff is collapsed.
{
"name": "@semantic-release/archive",
"version": "1.0.0",
"description": "A `semantic release` plugin for creating archives",
"exports": {
"import": "./plugin.mjs",
"require": "./plugin.js"
},
"main": "plugin.js",
"module": "plugin.mjs",
"files": [
"plugin.js",
"plugin.mjs"
],
"scripts": {
"lint": "xo",
"fix": "xo --fix",
"test": "c8 ava",
"prepare": "is-ci || husky install"
},
"repository": {
"type": "git",
"url": "git@gitlab.gpu.arm.com:semantic-release/archive.git"
},
"keywords": [
"tar",
"tarball",
"archive",
"semantic-release"
],
"author": "",
"license": "ISC",
"dependencies": {
"@semantic-release/error": "^3",
"archiver": "^5.3.1",
"debug": "^4"
},
"peerDependencies": {
"semantic-release": ">=19"
},
"devDependencies": {
"@commitlint/cli": "^17",
"@commitlint/config-conventional": "^17",
"@ltd/j-toml": "^1",
"@semantic-release/changelog": "^6",
"@semantic-release/commit-analyzer": "^9",
"@semantic-release/exec": "^6",
"@semantic-release/git": "^10",
"@semantic-release/gitlab": "^9",
"@semantic-release/release-notes-generator": "^10",
"ava": "^5",
"c8": "^7",
"clear-module": "^4",
"eslint": "^8",
"express": "^4",
"husky": "^8",
"is-ci": "^3",
"prettier": "^2",
"semantic-release": "^20",
"sinon": "^15",
"stream-buffers": "^3",
"tap-junit": "^5",
"tempy": "^3",
"test-listen": "^1",
"xo": "^0.53"
},
"ava": {
"files": [
"test/**/*",
"!test/fixture"
],
"utilizeParallelBuilds": false
},
"c8": {
"100": true,
"include": [
"plugin.mjs",
"plugin.js"
],
"reporter": [
"text",
"html",
"cobertura"
],
"all": true
},
"release": {
"extends": [
"@semantic-release/config-release-channels",
"@semantic-release/config-gitlab-npm"
]
}
}
async function verifyConditions(pluginConfig, context) {
const {verifyConditions: shim} = await import('./plugin.mjs');
return shim(pluginConfig, context);
}
async function prepare(pluginConfig, context) {
const {prepare: shim} = await import('./plugin.mjs');
return shim(pluginConfig, context);
}
// eslint-disable-next-line unicorn/prefer-module
module.exports = {
verifyConditions,
prepare,
};
import {lstatSync, createWriteStream} from 'node:fs';
import {access} from 'node:fs/promises';
import SemanticReleaseError from '@semantic-release/error';
import archiver from 'archiver';
import dbg from 'debug';
const debug = dbg('semantic-release:archive');
const DEFAULT_ARCHIVE_PATH = 'archive.tar.gz';
const COMPRESSION = {
'.tar': ['tar', {gzip: false}],
'.tar.gz': ['tar', {gzip: true, gzipOptions: {level: 9}}],
'.tgz': ['tar', {gzip: true, gzipOptions: {level: 9}}],
'.zip': ['zip', {zlib: {level: 9}}],
};
function verbatim(...args) {
const backticks = '```';
return args
.map(a => `${backticks}\n${a}\n${backticks}`)
.join('\n');
}
function extension(path) {
const components = path.split('.');
const [tar, ext] = components.slice(-2);
if (components.length > 2 && tar === 'tar') {
return `.${tar}.${ext}`;
}
return `.${ext}`;
}
export async function verifyConditions(pluginConfig, context) {
const {logger} = context;
const {inputs, archive = DEFAULT_ARCHIVE_PATH} = pluginConfig;
debug('Validating input paths');
// Make sure we have the correct shape for the paths
if (!Array.isArray(inputs)) {
throw new SemanticReleaseError(
'`inputs` must be a list',
'EARCHIVECFG',
verbatim(inputs),
);
}
if (inputs.length === 0) {
throw new SemanticReleaseError(
'`inputs` must be non-empty',
'EARCHIVECFG',
verbatim(inputs),
);
}
if (typeof archive !== 'string') {
throw new SemanticReleaseError(
'`archive` is not a string',
'EARCHIVECFG',
verbatim(archive),
);
}
if (!Object.keys(COMPRESSION).includes(extension(archive))) {
throw new SemanticReleaseError(
'`archive` has unsupported extension',
'EARCHIVECFG',
`${extension(archive)} not in ${Object.keys(COMPRESSION).join(', ')}`,
);
}
// Make sure that the input paths exist
const exist = await Promise.allSettled(inputs.map(p => access(p)));
const missing = inputs.filter((_, i) => exist[i].status === 'rejected');
if (missing.length > 0) {
throw new SemanticReleaseError(
`Input paths do not exist: \`${missing.join(', ')}\``,
'EARCHIVECFG',
verbatim(missing),
);
}
logger.success('Input paths are valid');
}
export async function prepare(pluginConfig, context) {
const {logger} = context;
const {nextRelease: {version}} = context;
const {inputs, archive = DEFAULT_ARCHIVE_PATH} = pluginConfig;
await new Promise((resolve, reject) => {
const archiveStream = archiver(...COMPRESSION[extension(archive)]);
const output = createWriteStream(archive);
output.on('close', resolve);
archiveStream.on('warning', reject).on('error', reject).pipe(output);
for (const path of inputs) {
if (lstatSync(path).isDirectory()) {
archiveStream.directory(path, path, {path});
} else {
archiveStream.file(path, {path});
}
}
archiveStream.finalize();
});
logger.success('Created an archive for `%s`', version);
}
main.js
\ No newline at end of file
1
2
// eslint-disable-next-line unicorn/prefer-module
const test = require('ava');
test('Can load the CommonJS module', async t => {
// eslint-disable-next-line unicorn/prefer-module
const {verifyConditions, prepare} = require('../plugin.js');
t.is(typeof verifyConditions, 'function');
t.is(typeof prepare, 'function');
const cfg = {};
const ctx = {};
await t.throwsAsync(verifyConditions(cfg, ctx));
await t.throwsAsync(prepare(cfg, ctx));
});
import {env} from 'node:process';
import test from 'ava';
import {stub} from 'sinon';
import clearModule from 'clear-module';
import {WritableStreamBuffer} from 'stream-buffers';
import {temporaryDirectory} from 'tempy';
test.beforeEach(async t => {
clearModule('../plugin.mjs');
t.context.m = await import('../plugin.mjs');
t.context.cfg = {
inputs: ['test/data/file.txt', 'test/data/folder'],
};
const environ = Object.fromEntries(Object.entries(env));
t.context.log = stub();
t.context.ctx = {
cwd: temporaryDirectory(),
env: environ,
options: {},
stdout: new WritableStreamBuffer(),
stderr: new WritableStreamBuffer(),
logger: {
log: t.context.log,
success: t.context.log,
warn: t.context.log,
},
};
});
const failure = test.macro({
// eslint-disable-next-line max-params
async exec(t, code, regex, before, after) {
if (before) {
await before(t);
}
const error = await t.throwsAsync(
t.context.m.verifyConditions(t.context.cfg, t.context.ctx),
);
t.is(error.name, 'SemanticReleaseError', `${error}`);
t.is(error.code, code, `${error}`);
t.regex(error.message, regex, `${error}`);
if (after) {
await after(t);
}
},
title: (message, code, regex) =>
message ?? `Throws \`${code}\` matching ${regex.source}`,
});
const success = test.macro({
async exec(t, before, after) {
if (before) {
await before(t);
}
await t.notThrowsAsync(
t.context.m.verifyConditions(t.context.cfg, t.context.ctx),
);
t.context.ctx.nextRelease = {version: '1.0.0'};
await t.notThrowsAsync(t.context.m.prepare(t.context.cfg, t.context.ctx));
if (after) {
await after(t);
}
},
title: message =>
message ? `Successfully runs with ${message}` : 'Successfully runs',
});
test(failure, 'EARCHIVECFG', /`inputs` must be a list/, async t => {
t.context.cfg.inputs = {};
});
test(failure, 'EARCHIVECFG', /`inputs` must be non-empty/, async t => {
t.context.cfg.inputs = [];
});
test(failure, 'EARCHIVECFG', /Input paths do not exist: `.+`/, async t => {
t.context.cfg.inputs = ['/this/does/not/exist'];
});
test(failure, 'EARCHIVECFG', /`archive` is not a string/, async t => {
t.context.cfg.archive = 0;
});
test(failure, 'EARCHIVECFG', /`archive` has unsupported extension/, async t => {
t.context.cfg.archive = 'archive.undefined';
});
test(success);
  • Congregate Migrate @congregate_migrate

    mentioned in commit 4b72ebdb

    By GITLAB_TOKEN on 2023-10-16T14:38:44

    · Imported

    mentioned in commit 4b72ebdb

    By GITLAB_TOKEN on 2023-10-16T14:38:44

    Toggle commit list
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment