diff --git a/dev-test/backends/forgejo/config.yml b/dev-test/backends/forgejo/config.yml
new file mode 100644
index 000000000000..63d485beda27
--- /dev/null
+++ b/dev-test/backends/forgejo/config.yml
@@ -0,0 +1,65 @@
+backend:
+ name: forgejo
+ app_id: a582de8c-2459-4e5f-b671-80f99a0592cc
+ branch: master
+ repo: owner/repo
+
+media_folder: static/media
+public_folder: /media
+collections:
+ - name: posts
+ label: Posts
+ label_singular: 'Post'
+ folder: content/posts
+ create: true
+ slug: '{{year}}-{{month}}-{{day}}-{{slug}}'
+ fields:
+ - label: Template
+ name: template
+ widget: hidden
+ default: post
+ - label: Title
+ name: title
+ widget: string
+ - label: 'Cover Image'
+ name: 'image'
+ widget: 'image'
+ required: false
+ - label: Publish Date
+ name: date
+ widget: datetime
+ format: 'YYYY-MM-DDTHH:mm'
+ default: 1970-01-01T01:00
+ - label: Description
+ name: description
+ widget: text
+ - label: Category
+ name: category
+ widget: string
+ - label: Body
+ name: body
+ widget: markdown
+ - label: Tags
+ name: tags
+ widget: list
+ - name: pages
+ label: Pages
+ label_singular: 'Page'
+ folder: content/pages
+ create: true
+ slug: '{{slug}}'
+ fields:
+ - label: Template
+ name: template
+ widget: hidden
+ default: page
+ - label: Title
+ name: title
+ widget: string
+ - label: Draft
+ name: draft
+ widget: boolean
+ default: true
+ - label: Body
+ name: body
+ widget: markdown
diff --git a/dev-test/backends/forgejo/index.html b/dev-test/backends/forgejo/index.html
new file mode 100644
index 000000000000..dc20859bd218
--- /dev/null
+++ b/dev-test/backends/forgejo/index.html
@@ -0,0 +1,41 @@
+
+
+
+
+
+ Decap CMS Development Test
+
+
+
+
+
+
diff --git a/package-lock.json b/package-lock.json
index 36c5cefdb527..2929885c9130 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -11698,6 +11698,10 @@
"resolved": "packages/decap-cms-backend-bitbucket",
"link": true
},
+ "node_modules/decap-cms-backend-forgejo": {
+ "resolved": "packages/decap-cms-backend-forgejo",
+ "link": true
+ },
"node_modules/decap-cms-backend-git-gateway": {
"resolved": "packages/decap-cms-backend-git-gateway",
"link": true
@@ -33866,6 +33870,7 @@
"decap-cms-backend-aws-cognito-github-proxy": "^3.5.0",
"decap-cms-backend-azure": "^3.4.0",
"decap-cms-backend-bitbucket": "^3.3.0",
+ "decap-cms-backend-forgejo": "^3.3.0",
"decap-cms-backend-git-gateway": "^3.5.0",
"decap-cms-backend-gitea": "^3.3.0",
"decap-cms-backend-github": "^3.5.0",
@@ -34003,6 +34008,25 @@
"url": "https://github.com/sponsors/isaacs"
}
},
+ "packages/decap-cms-backend-forgejo": {
+ "version": "3.3.0",
+ "license": "MIT",
+ "dependencies": {
+ "js-base64": "^3.0.0",
+ "semaphore": "^1.1.0"
+ },
+ "peerDependencies": {
+ "@emotion/react": "^11.11.1",
+ "@emotion/styled": "^11.11.0",
+ "decap-cms-lib-auth": "^3.0.0",
+ "decap-cms-lib-util": "^3.0.0",
+ "decap-cms-ui-default": "^3.0.0",
+ "immutable": "^3.7.6",
+ "lodash": "^4.17.11",
+ "prop-types": "^15.7.2",
+ "react": "^19.1.0"
+ }
+ },
"packages/decap-cms-backend-git-gateway": {
"version": "3.5.0",
"license": "MIT",
diff --git a/packages/decap-cms-app/package.json b/packages/decap-cms-app/package.json
index a75d2dff6fe4..ac902d306f70 100644
--- a/packages/decap-cms-app/package.json
+++ b/packages/decap-cms-app/package.json
@@ -39,6 +39,7 @@
"decap-cms-backend-bitbucket": "^3.3.0",
"decap-cms-backend-git-gateway": "^3.5.0",
"decap-cms-backend-gitea": "^3.3.0",
+ "decap-cms-backend-forgejo": "^3.3.0",
"decap-cms-backend-github": "^3.5.0",
"decap-cms-backend-gitlab": "^3.4.0",
"decap-cms-backend-proxy": "^3.3.0",
@@ -77,4 +78,4 @@
"react-dom": "^19.1.0"
},
"incrementToForceBump": 2
-}
+}
\ No newline at end of file
diff --git a/packages/decap-cms-app/src/extensions.js b/packages/decap-cms-app/src/extensions.js
index eef9ebd77063..8e51c5dcda0d 100644
--- a/packages/decap-cms-app/src/extensions.js
+++ b/packages/decap-cms-app/src/extensions.js
@@ -6,6 +6,7 @@ import { AwsCognitoGitHubProxyBackend } from 'decap-cms-backend-aws-cognito-gith
import { GitHubBackend } from 'decap-cms-backend-github';
import { GitLabBackend } from 'decap-cms-backend-gitlab';
import { GiteaBackend } from 'decap-cms-backend-gitea';
+import { ForgejoBackend } from 'decap-cms-backend-forgejo';
import { GitGatewayBackend } from 'decap-cms-backend-git-gateway';
import { BitbucketBackend } from 'decap-cms-backend-bitbucket';
import { TestBackend } from 'decap-cms-backend-test';
@@ -38,6 +39,7 @@ CMS.registerBackend('aws-cognito-github-proxy', AwsCognitoGitHubProxyBackend);
CMS.registerBackend('github', GitHubBackend);
CMS.registerBackend('gitlab', GitLabBackend);
CMS.registerBackend('gitea', GiteaBackend);
+CMS.registerBackend('forgejo', ForgejoBackend);
CMS.registerBackend('bitbucket', BitbucketBackend);
CMS.registerBackend('test-repo', TestBackend);
CMS.registerBackend('proxy', ProxyBackend);
diff --git a/packages/decap-cms-backend-forgejo/CHANGELOG.md b/packages/decap-cms-backend-forgejo/CHANGELOG.md
new file mode 100644
index 000000000000..420e6f23d0e3
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/CHANGELOG.md
@@ -0,0 +1 @@
+# Change Log
diff --git a/packages/decap-cms-backend-forgejo/package.json b/packages/decap-cms-backend-forgejo/package.json
new file mode 100644
index 000000000000..ed69588e8e3a
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/package.json
@@ -0,0 +1,36 @@
+{
+ "name": "decap-cms-backend-forgejo",
+ "description": "Forgejo backend for Decap CMS",
+ "version": "3.3.0",
+ "repository": "https://github.com/decaporg/decap-cms/tree/main/packages/decap-cms-backend-forgejo",
+ "bugs": "https://github.com/decaporg/decap-cms/issues",
+ "license": "MIT",
+ "module": "dist/esm/index.js",
+ "main": "dist/decap-cms-backend-forgejo.js",
+ "keywords": [
+ "decap-cms",
+ "backend",
+ "forgejo"
+ ],
+ "sideEffects": false,
+ "scripts": {
+ "develop": "npm run build:esm -- --watch",
+ "build": "cross-env NODE_ENV=production webpack",
+ "build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
+ },
+ "dependencies": {
+ "js-base64": "^3.0.0",
+ "semaphore": "^1.1.0"
+ },
+ "peerDependencies": {
+ "@emotion/react": "^11.11.1",
+ "@emotion/styled": "^11.11.0",
+ "decap-cms-lib-auth": "^3.0.0",
+ "decap-cms-lib-util": "^3.0.0",
+ "decap-cms-ui-default": "^3.0.0",
+ "immutable": "^3.7.6",
+ "lodash": "^4.17.11",
+ "prop-types": "^15.7.2",
+ "react": "^19.1.0"
+ }
+}
\ No newline at end of file
diff --git a/packages/decap-cms-backend-forgejo/src/API.ts b/packages/decap-cms-backend-forgejo/src/API.ts
new file mode 100644
index 000000000000..ca24d18a82de
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/src/API.ts
@@ -0,0 +1,1045 @@
+import { Base64 } from 'js-base64';
+import trimStart from 'lodash/trimStart';
+import trim from 'lodash/trim';
+import result from 'lodash/result';
+import partial from 'lodash/partial';
+import {
+ APIError,
+ basename,
+ branchFromContentKey,
+ CMS_BRANCH_PREFIX,
+ DEFAULT_PR_BODY,
+ EditorialWorkflowError,
+ generateContentKey,
+ getAllResponses,
+ isCMSLabel,
+ labelToStatus,
+ localForage,
+ MERGE_COMMIT_MESSAGE,
+ parseContentKey,
+ readFileMetadata,
+ requestWithBackoff,
+ statusToLabel,
+ unsentRequest,
+} from 'decap-cms-lib-util';
+
+import type {
+ ApiRequest,
+ AssetProxy,
+ DataFile,
+ FetchError,
+ PersistOptions,
+} from 'decap-cms-lib-util';
+import type { Semaphore } from 'semaphore';
+import type {
+ FilesResponse,
+ ForgejoBranch,
+ ForgejoChangedFile,
+ ForgejoCompareResponse,
+ ForgejoLabel,
+ ForgejoPullRequest,
+ ForgejoRepository,
+ ForgejoUser,
+ GitGetBlobResponse,
+ GitGetTreeResponse,
+ ReposListCommitsResponse,
+} from './types';
+
+export const API_NAME = 'Forgejo';
+
+export const MOCK_PULL_REQUEST = -1;
+
+export interface Config {
+ apiRoot?: string;
+ token?: string;
+ branch?: string;
+ repo?: string;
+ originRepo?: string;
+ useOpenAuthoring?: boolean;
+ cmsLabelPrefix?: string;
+ initialWorkflowStatus?: string;
+}
+
+enum FileOperation {
+ CREATE = 'create',
+ DELETE = 'delete',
+ UPDATE = 'update',
+}
+
+export interface ChangeFileOperation {
+ content?: string;
+ from_path?: string;
+ path: string;
+ operation: FileOperation;
+ sha?: string;
+}
+
+interface MetaDataObjects {
+ entry: { path: string; sha: string };
+ files: MediaFile[];
+}
+
+export interface Metadata {
+ type: string;
+ objects: MetaDataObjects;
+ branch: string;
+ status: string;
+ collection: string;
+ commitMessage: string;
+ version?: string;
+ user: string;
+ title?: string;
+ description?: string;
+ timeStamp: string;
+}
+
+export interface BlobArgs {
+ sha: string;
+ repoURL: string;
+ parseText: boolean;
+}
+
+type Param = string | number | undefined;
+
+export type Options = RequestInit & {
+ params?: Record | string[]>;
+};
+
+type MediaFile = {
+ sha: string;
+ path: string;
+};
+
+export default class API {
+ apiRoot: string;
+ token: string;
+ branch: string;
+ repo: string;
+ originRepo: string;
+ repoOwner: string;
+ repoName: string;
+ originRepoOwner: string;
+ originRepoName: string;
+ repoURL: string;
+ originRepoURL: string;
+ useOpenAuthoring: boolean;
+ cmsLabelPrefix: string;
+ initialWorkflowStatus: string;
+
+ _userPromise?: Promise;
+ _metadataSemaphore?: Semaphore;
+
+ commitAuthor?: {};
+
+ constructor(config: Config) {
+ this.apiRoot = config.apiRoot || 'https://v14.next.forgejo.org/api/v1';
+ this.token = config.token || '';
+ this.branch = config.branch || 'master';
+ this.repo = config.repo || '';
+ this.originRepo = config.originRepo || this.repo;
+ this.useOpenAuthoring = !!config.useOpenAuthoring;
+ this.cmsLabelPrefix = config.cmsLabelPrefix || '';
+ this.initialWorkflowStatus = config.initialWorkflowStatus || 'draft';
+ this.repoURL = `/repos/${this.repo}`;
+ this.originRepoURL = `/repos/${this.originRepo}`;
+
+ const [repoParts, originRepoParts] = [this.repo.split('/'), this.originRepo.split('/')];
+ this.repoOwner = repoParts[0];
+ this.repoName = repoParts[1];
+
+ this.originRepoOwner = originRepoParts[0];
+ this.originRepoName = originRepoParts[1];
+ }
+
+ static DEFAULT_COMMIT_MESSAGE = 'Automatically generated by Static CMS';
+
+ user(): Promise<{ full_name: string; login: string; avatar_url: string }> {
+ if (!this._userPromise) {
+ this._userPromise = this.getUser();
+ }
+ return this._userPromise;
+ }
+
+ getUser() {
+ return this.request('/user') as Promise;
+ }
+
+ async hasWriteAccess() {
+ try {
+ const result: ForgejoRepository = await this.request(this.repoURL);
+ // update config repoOwner to avoid case sensitivity issues with Forgejo
+ this.repoOwner = result.owner.login;
+ return result.permissions.push;
+ } catch (error) {
+ console.error('Problem fetching repo data from Forgejo');
+ throw error;
+ }
+ }
+
+ reset() {
+ // no op
+ }
+
+ requestHeaders(headers = {}) {
+ const baseHeader: Record = {
+ 'Content-Type': 'application/json; charset=utf-8',
+ ...headers,
+ };
+
+ if (this.token) {
+ baseHeader.Authorization = `token ${this.token}`;
+ return Promise.resolve(baseHeader);
+ }
+
+ return Promise.resolve(baseHeader);
+ }
+
+ async parseJsonResponse(response: Response) {
+ const json = await response.json();
+ if (!response.ok) {
+ return Promise.reject(json);
+ }
+ return json;
+ }
+
+ urlFor(path: string, options: Options) {
+ const params = [];
+ if (options.params) {
+ for (const key in options.params) {
+ params.push(`${key}=${encodeURIComponent(options.params[key] as string)}`);
+ }
+ }
+ if (params.length) {
+ path += `?${params.join('&')}`;
+ }
+ return this.apiRoot + path;
+ }
+
+ parseResponse(response: Response) {
+ const contentType = response.headers.get('Content-Type');
+ if (contentType && contentType.match(/json/)) {
+ return this.parseJsonResponse(response);
+ }
+ const textPromise = response.text().then(text => {
+ if (!response.ok) {
+ return Promise.reject(text);
+ }
+ return text;
+ });
+ return textPromise;
+ }
+
+ handleRequestError(error: FetchError, responseStatus: number) {
+ throw new APIError(error.message, responseStatus, API_NAME);
+ }
+
+ buildRequest(req: ApiRequest) {
+ return req;
+ }
+
+ async request(
+ path: string,
+ options: Options = {},
+ parser = (response: Response) => this.parseResponse(response),
+ ) {
+ options = { cache: 'no-cache', ...options };
+ const headers = await this.requestHeaders(options.headers || {});
+ const url = this.urlFor(path, options);
+ let responseStatus = 500;
+
+ try {
+ const req = unsentRequest.fromFetchArguments(url, {
+ ...options,
+ headers,
+ }) as unknown as ApiRequest;
+ const response = await requestWithBackoff(this, req);
+ responseStatus = response.status;
+ const parsedResponse = await parser(response);
+ return parsedResponse;
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ } catch (error: any) {
+ return this.handleRequestError(error, responseStatus);
+ }
+ }
+
+ nextUrlProcessor() {
+ return (url: string) => url;
+ }
+
+ async requestAllPages(url: string, options: Options = {}) {
+ options = { cache: 'no-cache', ...options };
+ const headers = await this.requestHeaders(options.headers || {});
+ const processedURL = this.urlFor(url, options);
+ const allResponses = await getAllResponses(
+ processedURL,
+ { ...options, headers },
+ 'next',
+ this.nextUrlProcessor(),
+ );
+ const pages: T[][] = await Promise.all(
+ allResponses.map((res: Response) => this.parseResponse(res)),
+ );
+ return ([] as T[]).concat(...pages);
+ }
+
+ generateContentKey(collectionName: string, slug: string) {
+ const contentKey = generateContentKey(collectionName, slug);
+ if (!this.useOpenAuthoring) {
+ return contentKey;
+ }
+ return `${this.repo}/${contentKey}`;
+ }
+
+ parseContentKey(contentKey: string) {
+ if (!this.useOpenAuthoring) {
+ return parseContentKey(contentKey);
+ }
+
+ const repoPrefix = `${this.repo}/`;
+ // Some content keys may be prefixed with the origin repo instead of the fork repo.
+ const originRepoPrefix = this.originRepo ? `${this.originRepo}/` : null;
+
+ let keyToParse = contentKey;
+
+ if (contentKey.startsWith(repoPrefix)) {
+ keyToParse = contentKey.slice(repoPrefix.length);
+ } else if (originRepoPrefix && contentKey.startsWith(originRepoPrefix)) {
+ keyToParse = contentKey.slice(originRepoPrefix.length);
+ }
+
+ return parseContentKey(keyToParse);
+ }
+
+ async readFile(
+ path: string,
+ sha?: string | null,
+ {
+ branch = this.branch,
+ repoURL = this.repoURL,
+ parseText = true,
+ }: {
+ branch?: string;
+ repoURL?: string;
+ parseText?: boolean;
+ } = {},
+ ) {
+ if (!sha) {
+ sha = await this.getFileSha(path, { repoURL, branch });
+ }
+ const content = await this.fetchBlobContent({
+ sha: sha as string,
+ repoURL,
+ parseText,
+ });
+ return content;
+ }
+
+ async readFileMetadata(path: string, sha: string | null | undefined) {
+ const fetchFileMetadata = async () => {
+ try {
+ const result: ReposListCommitsResponse = await this.request(
+ `${this.originRepoURL}/commits`,
+ {
+ params: { path, sha: this.branch, stat: 'false' },
+ },
+ );
+ const { commit } = result[0];
+ return {
+ author: commit.author.name || commit.author.email,
+ updatedOn: commit.author.date,
+ };
+ } catch (e) {
+ return { author: '', updatedOn: '' };
+ }
+ };
+ const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
+ return fileMetadata;
+ }
+
+ async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
+ const result: GitGetBlobResponse = await this.request(`${repoURL}/git/blobs/${sha}`, {
+ cache: 'force-cache',
+ });
+
+ if (parseText) {
+ // treat content as a utf-8 string
+ const content = Base64.decode(result.content);
+ return content;
+ } else {
+ // treat content as binary and convert to blob
+ const content = Base64.atob(result.content);
+ const byteArray = new Uint8Array(content.length);
+ for (let i = 0; i < content.length; i++) {
+ byteArray[i] = content.charCodeAt(i);
+ }
+ const blob = new Blob([byteArray]);
+ return blob;
+ }
+ }
+
+ async listFiles(
+ path: string,
+ { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {},
+ folderSupport?: boolean,
+ ): Promise<{ type: string; id: string; name: string; path: string; size: number }[]> {
+ const folder = trim(path, '/');
+ const hasFolder = Boolean(folder);
+ try {
+ const branchInfo = (await this.request(
+ `${repoURL}/branches/${encodeURIComponent(branch)}`,
+ )) as ForgejoBranch;
+ const treeSha = branchInfo.commit.id;
+ const useRecursive = depth > 1 || hasFolder;
+ const result: GitGetTreeResponse = await this.request(
+ `${repoURL}/git/trees/${encodeURIComponent(treeSha)}`,
+ {
+ // Use recursive tree when we need to filter by folder or deeper depth.
+ params: useRecursive ? { recursive: 1 } : {},
+ },
+ );
+ return (
+ result.tree
+ // filter only files and/or folders up to the required depth
+ .filter(file => {
+ if ((!folderSupport ? file.type === 'blob' : true) && file.path) {
+ if (!hasFolder) {
+ return file.path.split('/').length <= depth;
+ }
+
+ const relativePath = file.path.startsWith(`${folder}/`)
+ ? file.path.slice(folder.length + 1)
+ : file.path;
+ if (!relativePath) {
+ return false;
+ }
+ return relativePath.split('/').length <= depth;
+ }
+ return false;
+ })
+ .map(file => {
+ const relativePath =
+ hasFolder && file.path.startsWith(`${folder}/`)
+ ? file.path.slice(folder.length + 1)
+ : file.path;
+ return {
+ type: file.type,
+ id: file.sha,
+ name: basename(file.path),
+ path: hasFolder ? `${folder}/${relativePath}` : file.path,
+ size: file.size!,
+ };
+ })
+ );
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ } catch (err: any) {
+ if (err && err.status === 404) {
+ console.info('[StaticCMS] This 404 was expected and handled appropriately.');
+ return [];
+ } else {
+ throw err;
+ }
+ }
+ }
+
+ async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const files: (DataFile | AssetProxy)[] = mediaFiles.concat(dataFiles as any);
+ const operations = await this.getChangeFileOperations(files, this.branch);
+ return this.changeFiles(operations, options);
+ }
+
+ async changeFiles(operations: ChangeFileOperation[], options: PersistOptions) {
+ return (await this.request(`${this.repoURL}/contents`, {
+ method: 'POST',
+ body: JSON.stringify({
+ branch: this.branch,
+ files: operations,
+ message: options.commitMessage,
+ }),
+ })) as FilesResponse;
+ }
+
+ async getChangeFileOperations(files: (DataFile | AssetProxy)[], branch: string) {
+ const items: ChangeFileOperation[] = await Promise.all(
+ files.map(async file => {
+ const content = await result(
+ file,
+ 'toBase64',
+ partial(this.toBase64, (file as DataFile).raw),
+ );
+ let sha;
+ let operation;
+ let from_path;
+ let path = trimStart(file.path, '/');
+ try {
+ sha = await this.getFileSha(file.path, { branch });
+ operation = FileOperation.UPDATE;
+ const newPath = 'newPath' in file ? (file as DataFile).newPath : undefined;
+ from_path = newPath && path;
+ path = newPath ? trimStart(newPath, '/') : path;
+ } catch {
+ sha = undefined;
+ operation = FileOperation.CREATE;
+ }
+
+ return {
+ operation,
+ content,
+ path,
+ from_path,
+ sha,
+ } as ChangeFileOperation;
+ }),
+ );
+ return items;
+ }
+
+ async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
+ // Normalize path by removing leading slash if present
+ const normalizedPath = path.startsWith('/') ? path.slice(1) : path;
+ const encodedPath = normalizedPath
+ .split('/')
+ .map(segment => encodeURIComponent(segment))
+ .join('/');
+ const result = (await this.request(`${repoURL}/contents/${encodedPath}`, {
+ params: { ref: branch },
+ })) as { sha?: string };
+
+ if (result?.sha) {
+ return result.sha;
+ }
+
+ throw new APIError('Not Found', 404, API_NAME);
+ }
+
+ async deleteFiles(paths: string[], message: string) {
+ if (this.useOpenAuthoring) {
+ throw new APIError(
+ 'Cannot delete published entries as an Open Authoring user!',
+ 403,
+ API_NAME,
+ );
+ }
+
+ const operations: ChangeFileOperation[] = await Promise.all(
+ paths.map(async path => {
+ const sha = await this.getFileSha(path);
+
+ return {
+ operation: FileOperation.DELETE,
+ path,
+ sha,
+ } as ChangeFileOperation;
+ }),
+ );
+ return this.changeFiles(operations, { commitMessage: message });
+ }
+
+ toBase64(str: string) {
+ return Promise.resolve(Base64.encode(str));
+ }
+
+ async getBranch(branchName: string): Promise {
+ return this.request(`${this.repoURL}/branches/${encodeURIComponent(branchName)}`);
+ }
+
+ async getDefaultBranch(): Promise {
+ return this.getBranch(this.branch);
+ }
+
+ async createBranch(
+ branchName: string,
+ oldBranchName: string = this.branch,
+ ): Promise {
+ return this.request(`${this.repoURL}/branches`, {
+ method: 'POST',
+ body: JSON.stringify({
+ new_branch_name: branchName,
+ old_ref_name: oldBranchName,
+ }),
+ });
+ }
+
+ async deleteBranch(branchName: string): Promise {
+ await this.request(`${this.repoURL}/branches/${encodeURIComponent(branchName)}`, {
+ method: 'DELETE',
+ });
+ }
+
+ async getPullRequests(
+ state: 'open' | 'closed' | 'all' = 'open',
+ head?: string,
+ ): Promise {
+ const params: Record = { state };
+ const pullRequests = (await this.request(`${this.originRepoURL}/pulls`, {
+ params,
+ })) as ForgejoPullRequest[];
+ if (!head) {
+ return pullRequests;
+ }
+
+ return pullRequests.filter(pr => {
+ const label = pr.head?.label;
+ if (label) {
+ return label === head;
+ }
+ const repoOwner = pr.head?.repo?.owner?.login;
+ const ref = pr.head?.ref;
+ if (repoOwner && ref) {
+ return `${repoOwner}:${ref}` === head;
+ }
+ return false;
+ });
+ }
+
+ async getOpenAuthoringPullRequest(
+ branch: string,
+ pullRequests: ForgejoPullRequest[],
+ ): Promise<{ pullRequest: ForgejoPullRequest; branch: ForgejoBranch }> {
+ // we can't use labels when using open authoring
+ // since the contributor doesn't have access to set labels
+ // a branch without a pr (or a closed pr) means a 'draft' entry
+ // a branch with an opened pr means a 'pending_review' entry
+ const data = await this.getBranch(branch).catch(() => {
+ throw new EditorialWorkflowError('content is not under editorial workflow', true);
+ });
+ // since we get all (open and closed) pull requests by branch name, make sure to filter by head sha
+ const pullRequest = pullRequests.filter(pr => pr.head.sha === data.commit.id)[0];
+ if (!pullRequest) {
+ // if no pull request is found for the branch we return a mocked one
+ const mockPR: ForgejoPullRequest = {
+ number: MOCK_PULL_REQUEST,
+ state: 'open',
+ labels: [
+ {
+ name: statusToLabel(this.initialWorkflowStatus, this.cmsLabelPrefix),
+ } as ForgejoLabel,
+ ],
+ head: { ref: branch, sha: data.commit.id },
+ };
+ return {
+ pullRequest: mockPR,
+ branch: data,
+ };
+ }
+
+ // Filter out CMS labels for open authoring
+ const nonCmsLabels = pullRequest.labels.filter(l => !isCMSLabel(l.name, this.cmsLabelPrefix));
+
+ // Add synthetic CMS label based on PR state
+ const cmsLabel =
+ pullRequest.state === 'closed'
+ ? { name: statusToLabel(this.initialWorkflowStatus, this.cmsLabelPrefix) }
+ : { name: statusToLabel('pending_review', this.cmsLabelPrefix) };
+
+ const updatedPullRequest: ForgejoPullRequest = {
+ ...pullRequest,
+ labels: [...nonCmsLabels, cmsLabel as ForgejoLabel],
+ };
+
+ return { pullRequest: updatedPullRequest, branch: data };
+ }
+
+ async getBranchPullRequest(branchName: string): Promise {
+ if (this.useOpenAuthoring) {
+ const headRef = await this.getHeadReference(branchName);
+ const pullRequests = await this.getPullRequests('all', headRef);
+ const result = await this.getOpenAuthoringPullRequest(branchName, pullRequests);
+ return result.pullRequest;
+ }
+
+ const pullRequests = await this.getPullRequests('open', `${this.repoOwner}:${branchName}`);
+ const cmsPullRequests = pullRequests.filter(pr =>
+ pr.labels.some(l => isCMSLabel(l.name, this.cmsLabelPrefix)),
+ );
+ if (cmsPullRequests.length > 0) {
+ return cmsPullRequests[0];
+ }
+ throw new EditorialWorkflowError('content is not under editorial workflow', true);
+ }
+
+ async getHeadReference(head: string) {
+ return `${this.repoOwner}:${head}`;
+ }
+
+ async createPR(
+ title: string,
+ head: string,
+ body: string = DEFAULT_PR_BODY,
+ ): Promise {
+ return this.request(`${this.originRepoURL}/pulls`, {
+ method: 'POST',
+ body: JSON.stringify({
+ title,
+ head: await this.getHeadReference(head),
+ base: this.branch,
+ body,
+ }),
+ });
+ }
+
+ async updatePR(number: number, state: 'open' | 'closed'): Promise {
+ return this.request(`${this.originRepoURL}/pulls/${number}`, {
+ method: 'PATCH',
+ body: JSON.stringify({ state }),
+ });
+ }
+
+ async closePR(number: number): Promise {
+ return this.updatePR(number, 'closed');
+ }
+
+ async mergePR(pullRequest: ForgejoPullRequest): Promise {
+ await this.request(`${this.originRepoURL}/pulls/${pullRequest.number}/merge`, {
+ method: 'POST',
+ body: JSON.stringify({
+ Do: 'merge',
+ MergeMessageField: MERGE_COMMIT_MESSAGE,
+ }),
+ });
+ }
+
+ async getPullRequestFiles(number: number): Promise {
+ if (number === MOCK_PULL_REQUEST) {
+ return [];
+ }
+ return this.request(`${this.originRepoURL}/pulls/${number}/files`);
+ }
+
+ async getDifferences(from: string, to: string): Promise {
+ // For OA, try the fork repo first, then fall back to origin
+ const repoURL = this.useOpenAuthoring ? this.repoURL : this.originRepoURL;
+ try {
+ return await this.request(
+ `${repoURL}/compare/${encodeURIComponent(from)}...${encodeURIComponent(to)}`,
+ );
+ } catch (e) {
+ if (this.useOpenAuthoring) {
+ // Retry with origin repo
+ return this.request(
+ `${this.originRepoURL}/compare/${encodeURIComponent(from)}...${encodeURIComponent(to)}`,
+ );
+ }
+ throw e;
+ }
+ }
+
+ async updatePullRequestLabels(number: number, labels: number[]): Promise {
+ return this.request(`${this.originRepoURL}/issues/${number}/labels`, {
+ method: 'PUT',
+ body: JSON.stringify({ labels }),
+ });
+ }
+
+ async getLabels(): Promise {
+ return this.request(`${this.originRepoURL}/labels`);
+ }
+
+ async createLabel(name: string, color = '0052cc'): Promise {
+ return this.request(`${this.originRepoURL}/labels`, {
+ method: 'POST',
+ body: JSON.stringify({ name, color }),
+ });
+ }
+
+ async getOrCreateLabel(name: string): Promise {
+ const labels = await this.getLabels();
+ const existing = labels.find(l => l.name === name);
+ if (existing) {
+ return existing;
+ }
+ return this.createLabel(name);
+ }
+
+ async setPullRequestStatus(pullRequest: ForgejoPullRequest, status: string): Promise {
+ // Skip label updates for open authoring as contributors don't have permission
+ // Also skip for mock PRs (no real PR exists yet)
+ if (this.useOpenAuthoring || pullRequest.number === MOCK_PULL_REQUEST) {
+ return;
+ }
+
+ const newLabel = statusToLabel(status, this.cmsLabelPrefix);
+
+ // Get or create the new status label
+ const label = await this.getOrCreateLabel(newLabel);
+
+ if (typeof label.id !== 'number') {
+ throw new Error(
+ `Status label "${label.name}" returned from getOrCreateLabel is missing a numeric id`,
+ );
+ }
+
+ // Get current labels and filter out old CMS labels and labels without ids
+ const currentLabels = pullRequest.labels
+ .filter(l => !isCMSLabel(l.name, this.cmsLabelPrefix))
+ .filter(l => typeof l.id === 'number')
+ .map(l => l.id as number);
+
+ // Add the new status label
+ await this.updatePullRequestLabels(pullRequest.number, [...currentLabels, label.id]);
+ }
+
+ async getOpenAuthoringBranches(): Promise {
+ const branches: ForgejoBranch[] = await this.requestAllPages(`${this.repoURL}/branches`);
+ const prefix = `${CMS_BRANCH_PREFIX}/${this.repo}/`;
+ return branches.filter(b => b.name.startsWith(prefix));
+ }
+
+ filterOpenAuthoringBranches = async (branch: string) => {
+ try {
+ const pullRequest = await this.getBranchPullRequest(branch);
+ const { state: currentState, merged_at: mergedAt } = pullRequest as ForgejoPullRequest;
+ if (pullRequest.number !== MOCK_PULL_REQUEST && currentState === 'closed' && mergedAt) {
+ // PR was merged, delete the branch
+ await this.deleteBranch(branch);
+ return { branch, filter: false };
+ } else {
+ return { branch, filter: true };
+ }
+ } catch (e) {
+ // Only filter out branches for expected "not found / not under workflow" errors.
+ // For other errors (e.g. transient network/API issues), keep the branch.
+ if (e instanceof APIError && e.status === 404) {
+ return { branch, filter: false };
+ }
+ if (e instanceof EditorialWorkflowError) {
+ return { branch, filter: false };
+ }
+ return { branch, filter: true };
+ }
+ };
+
+ async listUnpublishedBranches(): Promise {
+ if (this.useOpenAuthoring) {
+ // OA branches can exist without a PR
+ const cmsBranches = await this.getOpenAuthoringBranches();
+ let branches = cmsBranches.map(b => b.name);
+ const branchesWithFilter = await Promise.all(
+ branches.map(b => this.filterOpenAuthoringBranches(b)),
+ );
+ branches = branchesWithFilter.filter(b => b.filter).map(b => b.branch);
+ return branches;
+ }
+
+ // Standard mode: filter PRs by CMS labels
+ const pullRequests = await this.getPullRequests('open');
+ const cmsBranches = pullRequests
+ .filter(
+ pr =>
+ pr.head.ref.startsWith(`${CMS_BRANCH_PREFIX}/`) &&
+ pr.labels.some(l => isCMSLabel(l.name, this.cmsLabelPrefix)),
+ )
+ .map(pr => pr.head.ref);
+
+ return cmsBranches;
+ }
+
+ async retrieveUnpublishedEntryData(contentKey: string) {
+ const branch = branchFromContentKey(contentKey);
+ let pullRequest: ForgejoPullRequest;
+ let branchData: ForgejoBranch | null = null;
+
+ if (this.useOpenAuthoring) {
+ const headRef = await this.getHeadReference(branch);
+ const pullRequests = await this.getPullRequests('all', headRef);
+ const openAuthoringResult = await this.getOpenAuthoringPullRequest(branch, pullRequests);
+ pullRequest = openAuthoringResult.pullRequest;
+ branchData = openAuthoringResult.branch;
+ } else {
+ pullRequest = await this.getBranchPullRequest(branch);
+ }
+
+ // Try getDifferences first (provides SHAs), fall back to getPullRequestFiles
+ let diffs: { path: string; newFile: boolean; id: string }[];
+ try {
+ const headRef = await this.getHeadReference(branch);
+ const compareResult = await this.getDifferences(this.branch, headRef);
+ diffs = compareResult.files.map(file => ({
+ path: file.filename,
+ newFile: file.status === 'added',
+ id: file.sha || '',
+ }));
+ } catch (e) {
+ const files = await this.getPullRequestFiles(pullRequest.number);
+ diffs = files.map(file => ({
+ path: file.filename,
+ newFile: file.status === 'added',
+ id: '',
+ }));
+ }
+
+ // Both OA and standard PRs now have synthetic CMS labels, so use unified label-based lookup
+ const statusLabel = pullRequest.labels.find(l => isCMSLabel(l.name, this.cmsLabelPrefix));
+ const status = statusLabel
+ ? labelToStatus(statusLabel.name, this.cmsLabelPrefix)
+ : this.initialWorkflowStatus;
+
+ const { collection, slug } = this.parseContentKey(contentKey);
+
+ return {
+ collection,
+ slug,
+ status,
+ diffs,
+ updatedAt:
+ pullRequest?.updated_at ||
+ branchData?.commit?.author?.date ||
+ branchData?.commit?.committer?.date ||
+ new Date().toISOString(),
+ pullRequestAuthor: pullRequest?.user?.login || branchData?.commit?.author?.name || 'Unknown',
+ };
+ }
+
+ async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
+ const contentKey = this.generateContentKey(collection, slug);
+ const branch = branchFromContentKey(contentKey);
+ const pullRequest = await this.getBranchPullRequest(branch);
+
+ if (!this.useOpenAuthoring) {
+ await this.setPullRequestStatus(pullRequest, newStatus);
+ return;
+ }
+
+ // Open authoring path
+ if (newStatus === 'pending_publish') {
+ throw new Error('Open Authoring entries may not be set to the status "pending_publish".');
+ }
+
+ if (pullRequest.number !== MOCK_PULL_REQUEST) {
+ const { state } = pullRequest;
+ if (state === 'open' && newStatus === 'draft') {
+ await this.closePR(pullRequest.number);
+ }
+ if (state === 'closed' && newStatus === 'pending_review') {
+ await this.updatePR(pullRequest.number, 'open');
+ }
+ } else if (newStatus === 'pending_review') {
+ // Mock PR: create a real PR
+ const diff = await this.getDifferences(this.branch, await this.getHeadReference(branch));
+ const title = diff.commits[0]?.commit?.message || API.DEFAULT_COMMIT_MESSAGE;
+ await this.createPR(title, branch);
+ }
+ }
+
+ async deleteUnpublishedEntry(collection: string, slug: string) {
+ const contentKey = this.generateContentKey(collection, slug);
+ const branch = branchFromContentKey(contentKey);
+
+ try {
+ const pullRequest = await this.getBranchPullRequest(branch);
+ if (pullRequest.number !== MOCK_PULL_REQUEST) {
+ await this.closePR(pullRequest.number);
+ }
+ } catch (e) {
+ // Only ignore expected errors (e.g. no PR / not under editorial workflow).
+ if (e instanceof EditorialWorkflowError || (e instanceof APIError && e.status === 404)) {
+ // PR might not exist or entry is not under editorial workflow; continue to delete branch.
+ } else {
+ // Unexpected error: rethrow so we don't delete the branch in an unknown state.
+ throw e;
+ }
+ }
+
+ await this.deleteBranch(branch);
+ }
+
+ async publishUnpublishedEntry(collection: string, slug: string) {
+ const contentKey = this.generateContentKey(collection, slug);
+ const branch = branchFromContentKey(contentKey);
+
+ const pullRequest = await this.getBranchPullRequest(branch);
+ if (pullRequest.number === MOCK_PULL_REQUEST) {
+ throw new APIError('Cannot publish entry without a pull request', 400, API_NAME);
+ }
+ await this.mergePR(pullRequest);
+ await this.deleteBranch(branch);
+ }
+
+ async editorialWorkflowGit(
+ files: (DataFile | AssetProxy)[],
+ slug: string,
+ collection: string,
+ options: PersistOptions,
+ ) {
+ const contentKey = this.generateContentKey(collection, slug);
+ const branch = branchFromContentKey(contentKey);
+
+ let branchExists = false;
+ try {
+ await this.getBranch(branch);
+ branchExists = true;
+ } catch (e) {
+ // Only treat a 404 "not found" as the branch not existing; rethrow other errors.
+ if (!(e instanceof APIError && e.status === 404)) {
+ throw e;
+ }
+ }
+
+ if (!branchExists) {
+ // Create the branch from the default branch
+ await this.createBranch(branch, this.branch);
+ }
+
+ // Persist files to the branch
+ const operations = await this.getChangeFileOperations(files, branch);
+ await this.changeFilesOnBranch(operations, options, branch);
+
+ // For open authoring, don't create a PR - entries start as branch-only (draft).
+ // PRs are created later via updateUnpublishedEntryStatus when moving to pending_review.
+ if (!branchExists && !this.useOpenAuthoring) {
+ const pr = await this.createPR(options.commitMessage, branch);
+ const status = options.status || this.initialWorkflowStatus;
+ await this.setPullRequestStatus(pr, status);
+ }
+ }
+
+ async changeFilesOnBranch(
+ operations: ChangeFileOperation[],
+ options: PersistOptions,
+ branch: string,
+ ) {
+ return (await this.request(`${this.repoURL}/contents`, {
+ method: 'POST',
+ body: JSON.stringify({
+ branch,
+ files: operations,
+ message: options.commitMessage,
+ }),
+ })) as FilesResponse;
+ }
+
+ // Open Authoring (Fork) Support
+ async forkExists(): Promise {
+ try {
+ const repoName = this.originRepo.split('/')[1];
+ const userRepoPath = `/repos/${this.repoOwner}/${repoName}`;
+ const repo = (await this.request(userRepoPath)) as ForgejoRepository;
+
+ // Check if it's a fork and the parent is the origin repo
+ const forkExists: boolean =
+ repo.fork === true &&
+ !!repo.parent &&
+ repo.parent.full_name.toLowerCase() === this.originRepo.toLowerCase();
+ return forkExists;
+ } catch {
+ return false;
+ }
+ }
+
+ async createFork(): Promise {
+ return this.request(`${this.originRepoURL}/forks`, {
+ method: 'POST',
+ }) as Promise;
+ }
+
+ async mergeUpstream(): Promise {
+ try {
+ await this.request(`${this.repoURL}/sync_fork`, {
+ method: 'POST',
+ });
+ } catch (error) {
+ // continue without syncing - user will need to sync manually
+ console.warn('Failed to sync fork with upstream:', error);
+ }
+ }
+}
diff --git a/packages/decap-cms-backend-forgejo/src/AuthenticationPage.js b/packages/decap-cms-backend-forgejo/src/AuthenticationPage.js
new file mode 100644
index 000000000000..1b4c7b68c283
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/src/AuthenticationPage.js
@@ -0,0 +1,195 @@
+import React from 'react';
+import PropTypes from 'prop-types';
+import styled from '@emotion/styled';
+import { PkceAuthenticator } from 'decap-cms-lib-auth';
+import { AuthenticationPage, Icon } from 'decap-cms-ui-default';
+
+const LoginButtonIcon = styled(Icon)`
+ margin-right: 18px;
+`;
+
+const ForkApprovalContainer = styled.div`
+ display: flex;
+ flex-flow: column nowrap;
+ justify-content: space-around;
+ flex-grow: 0.2;
+`;
+const ForkButtonsContainer = styled.div`
+ display: flex;
+ flex-flow: column nowrap;
+ justify-content: space-around;
+ align-items: center;
+`;
+
+export default class ForgejoAuthenticationPage extends React.Component {
+ static propTypes = {
+ inProgress: PropTypes.bool,
+ config: PropTypes.object.isRequired,
+ onLogin: PropTypes.func.isRequired,
+ t: PropTypes.func.isRequired,
+ backend: PropTypes.object,
+ };
+
+ state = {};
+
+ componentDidMount() {
+ // Manually validate PropTypes - React 19 breaking change
+ PropTypes.checkPropTypes(
+ ForgejoAuthenticationPage.propTypes,
+ this.props,
+ 'prop',
+ 'ForgejoAuthenticationPage',
+ );
+
+ const { base_url = 'https://v14.next.forgejo.org', app_id = '' } = this.props.config.backend;
+ this.auth = new PkceAuthenticator({
+ base_url,
+ auth_endpoint: 'login/oauth/authorize',
+ app_id,
+ auth_token_endpoint: 'login/oauth/access_token',
+ auth_token_endpoint_content_type: 'application/json; charset=utf-8',
+ });
+ // Complete authentication if we were redirected back to from the provider.
+ this.auth.completeAuth((err, data) => {
+ if (err) {
+ this.setState({ loginError: err.toString() });
+ return;
+ } else if (data) {
+ const { open_authoring: openAuthoring = false } = this.props.config.backend;
+ if (openAuthoring) {
+ return this.loginWithOpenAuthoring(data)
+ .then(() => this.props.onLogin(data))
+ .catch(error => {
+ this.setState({
+ loginError: error && error.toString ? error.toString() : String(error),
+ findingFork: false,
+ requestingFork: false,
+ });
+ });
+ }
+ this.props.onLogin(data);
+ }
+ });
+ }
+
+ getPermissionToFork = () => {
+ return new Promise((resolve, reject) => {
+ this.setState({
+ requestingFork: true,
+ approveFork: () => {
+ this.setState({ requestingFork: false });
+ resolve();
+ },
+ refuseFork: () => {
+ this.setState({ requestingFork: false });
+ reject(new Error('User declined to create a fork'));
+ },
+ });
+ });
+ };
+
+ loginWithOpenAuthoring(data) {
+ const { backend } = this.props;
+
+ if (!backend) {
+ const error = new Error('Backend prop is required for open authoring login flow');
+ return Promise.reject(error);
+ }
+
+ this.setState({ findingFork: true });
+ return backend
+ .authenticateWithFork({ userData: data, getPermissionToFork: this.getPermissionToFork })
+ .catch(err => {
+ console.error(err);
+ throw err;
+ })
+ .finally(() => {
+ this.setState({
+ findingFork: false,
+ requestingFork: false,
+ approveFork: undefined,
+ refuseFork: undefined,
+ });
+ });
+ }
+
+ handleLogin = e => {
+ e.preventDefault();
+ const { open_authoring: openAuthoring = false } = this.props.config.backend;
+ this.auth.authenticate({ scope: 'repository' }, (err, data) => {
+ if (err) {
+ this.setState({ loginError: err.toString() });
+ return;
+ }
+ if (openAuthoring) {
+ return this.loginWithOpenAuthoring(data)
+ .then(() => this.props.onLogin(data))
+ .catch(error => {
+ this.setState({
+ loginError: error && error.toString ? error.toString() : String(error),
+ findingFork: false,
+ requestingFork: false,
+ });
+ });
+ }
+ this.props.onLogin(data);
+ });
+ };
+
+ renderLoginButton = () => {
+ const { inProgress, t } = this.props;
+ return inProgress || this.state.findingFork ? (
+ t('auth.loggingIn')
+ ) : (
+
+
+ {t('auth.loginWithForgejo')}
+
+ );
+ };
+
+ getAuthenticationPageRenderArgs() {
+ const { requestingFork } = this.state;
+
+ if (requestingFork) {
+ const { approveFork, refuseFork } = this.state;
+ return {
+ renderPageContent: ({ LoginButton, TextButton, showAbortButton }) => (
+
+
+ Open Authoring is enabled: we need to use a fork on your Forgejo account. (If a fork
+ already exists, we'll use that.)
+
+
+ Fork the repo
+ {showAbortButton && (
+ Don't fork the repo
+ )}
+
+
+ ),
+ };
+ }
+
+ return {
+ renderButtonContent: this.renderLoginButton,
+ };
+ }
+
+ render() {
+ const { config, t } = this.props;
+ const authenticationPageRenderArgs = this.getAuthenticationPageRenderArgs();
+ return (
+
+ );
+ }
+}
diff --git a/packages/decap-cms-backend-forgejo/src/__tests__/API.spec.js b/packages/decap-cms-backend-forgejo/src/__tests__/API.spec.js
new file mode 100644
index 000000000000..d82c09ec2d43
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/src/__tests__/API.spec.js
@@ -0,0 +1,1422 @@
+import { Base64 } from 'js-base64';
+import { APIError, EditorialWorkflowError } from 'decap-cms-lib-util';
+
+import API, { MOCK_PULL_REQUEST } from '../API';
+
+global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
+
+describe('forgejo API', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ function mockAPI(api, responses) {
+ api.request = jest.fn().mockImplementation((path, options = {}) => {
+ const normalizedPath = path.indexOf('?') !== -1 ? path.slice(0, path.indexOf('?')) : path;
+ const response = responses[normalizedPath];
+ return typeof response === 'function'
+ ? Promise.resolve(response(options))
+ : Promise.reject(new Error(`No response for path '${normalizedPath}'`));
+ });
+ }
+
+ describe('generateContentKey and parseContentKey', () => {
+ it('should generate standard content key without OA', () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo' });
+ const key = api.generateContentKey('posts', 'my-post');
+ expect(key).toEqual('posts/my-post');
+ });
+
+ it('should generate OA content key with repo prefix', () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+ const key = api.generateContentKey('posts', 'my-post');
+ expect(key).toEqual('contributor/repo/posts/my-post');
+ });
+
+ it('should parse standard content key without OA', () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo' });
+ const result = api.parseContentKey('posts/my-post');
+ expect(result).toEqual({ collection: 'posts', slug: 'my-post' });
+ });
+
+ it('should parse OA content key by stripping repo prefix', () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+ const result = api.parseContentKey('contributor/repo/posts/my-post');
+ expect(result).toEqual({ collection: 'posts', slug: 'my-post' });
+ });
+ });
+
+ describe('getHeadReference', () => {
+ it('should return owner:branch format', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo' });
+ const ref = await api.getHeadReference('cms/posts/test');
+ expect(ref).toEqual('owner:cms/posts/test');
+ });
+ });
+
+ describe('editorialWorkflowGit', () => {
+ it('should create PR with correct branch when publishing with editorial workflow', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'owner/my-repo',
+ });
+
+ // Mock getBranch to indicate branch doesn't exist yet
+ api.getBranch = jest.fn().mockRejectedValue(new APIError('Branch not found', 404, 'Forgejo'));
+ api.createBranch = jest.fn().mockResolvedValue({ name: 'cms/posts/entry' });
+
+ const changeOperations = [{ operation: 'create', path: 'content.md', content: 'test' }];
+ api.getChangeFileOperations = jest.fn().mockResolvedValue(changeOperations);
+ api.changeFilesOnBranch = jest.fn().mockResolvedValue({});
+
+ const newPr = { number: 1, labels: [], head: { ref: 'cms/posts/entry' } };
+ api.createPR = jest.fn().mockResolvedValue(newPr);
+ api.setPullRequestStatus = jest.fn().mockResolvedValue();
+
+ const files = [{ path: 'content.md', raw: 'test content' }];
+ const options = { commitMessage: 'Add entry', status: 'draft' };
+
+ await api.editorialWorkflowGit(files, 'entry', 'posts', options);
+
+ expect(api.getBranch).toHaveBeenCalledWith('cms/posts/entry');
+ expect(api.createBranch).toHaveBeenCalledWith('cms/posts/entry', 'master');
+ expect(api.getChangeFileOperations).toHaveBeenCalledWith(files, 'cms/posts/entry');
+ expect(api.changeFilesOnBranch).toHaveBeenCalledWith(
+ changeOperations,
+ options,
+ 'cms/posts/entry',
+ );
+ expect(api.createPR).toHaveBeenCalledWith('Add entry', 'cms/posts/entry');
+ expect(api.setPullRequestStatus).toHaveBeenCalledWith(newPr, 'draft');
+ });
+
+ it('should not create branch if it already exists', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'owner/my-repo',
+ });
+
+ // Mock getBranch to indicate branch already exists
+ api.getBranch = jest.fn().mockResolvedValue({ name: 'cms/posts/entry' });
+ api.createBranch = jest.fn();
+
+ const changeOperations = [{ operation: 'update', path: 'content.md', content: 'updated' }];
+ api.getChangeFileOperations = jest.fn().mockResolvedValue(changeOperations);
+ api.changeFilesOnBranch = jest.fn().mockResolvedValue({});
+
+ api.createPR = jest.fn();
+ api.setPullRequestStatus = jest.fn();
+
+ const files = [{ path: 'content.md', raw: 'updated content' }];
+ const options = { commitMessage: 'Update entry' };
+
+ await api.editorialWorkflowGit(files, 'entry', 'posts', options);
+
+ expect(api.getBranch).toHaveBeenCalledWith('cms/posts/entry');
+ expect(api.createBranch).not.toHaveBeenCalled();
+ expect(api.createPR).not.toHaveBeenCalled();
+ expect(api.setPullRequestStatus).not.toHaveBeenCalled();
+ });
+
+ it('should not create PR for open authoring (branch-only draft)', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ api.getBranch = jest.fn().mockRejectedValue(new APIError('Branch not found', 404, 'Forgejo'));
+ api.createBranch = jest.fn().mockResolvedValue({ name: 'cms/contributor/repo/posts/entry' });
+
+ const changeOperations = [{ operation: 'create', path: 'content.md', content: 'test' }];
+ api.getChangeFileOperations = jest.fn().mockResolvedValue(changeOperations);
+ api.changeFilesOnBranch = jest.fn().mockResolvedValue({});
+
+ api.createPR = jest.fn();
+ api.setPullRequestStatus = jest.fn();
+
+ const files = [{ path: 'content.md', raw: 'test content' }];
+ const options = { commitMessage: 'Add entry', status: 'draft' };
+
+ await api.editorialWorkflowGit(files, 'entry', 'posts', options);
+
+ expect(api.createBranch).toHaveBeenCalled();
+ expect(api.createPR).not.toHaveBeenCalled();
+ expect(api.setPullRequestStatus).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('request', () => {
+ const fetch = jest.fn();
+ beforeEach(() => {
+ global.fetch = fetch;
+ });
+
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('should fetch url with authorization header', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+
+ fetch.mockResolvedValue({
+ text: jest.fn().mockResolvedValue('some response'),
+ ok: true,
+ status: 200,
+ headers: { get: () => '' },
+ });
+ const result = await api.request('/some-path');
+ expect(result).toEqual('some response');
+ expect(fetch).toHaveBeenCalledTimes(1);
+ expect(fetch).toHaveBeenCalledWith(
+ 'https://v14.next.forgejo.org/api/v1/some-path',
+ expect.objectContaining({
+ cache: 'no-cache',
+ headers: {
+ Authorization: 'token token',
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ }),
+ );
+ });
+
+ it('should throw error on not ok response', async () => {
+ const api = new API({ branch: 'gt-pages', repo: 'owner/my-repo', token: 'token' });
+
+ fetch.mockResolvedValue({
+ text: jest.fn().mockResolvedValue({ message: 'some error' }),
+ ok: false,
+ status: 404,
+ headers: { get: () => '' },
+ });
+
+ await expect(api.request('some-path')).rejects.toThrow(
+ expect.objectContaining({
+ message: 'some error',
+ name: 'API_ERROR',
+ status: 404,
+ api: 'Forgejo',
+ }),
+ );
+ });
+
+ it('should allow overriding requestHeaders to return a promise ', async () => {
+ const api = new API({ branch: 'gt-pages', repo: 'owner/my-repo', token: 'token' });
+
+ api.requestHeaders = jest.fn().mockResolvedValue({
+ Authorization: 'promise-token',
+ 'Content-Type': 'application/json; charset=utf-8',
+ });
+
+ fetch.mockResolvedValue({
+ text: jest.fn().mockResolvedValue('some response'),
+ ok: true,
+ status: 200,
+ headers: { get: () => '' },
+ });
+ const result = await api.request('/some-path');
+ expect(result).toEqual('some response');
+ expect(fetch).toHaveBeenCalledTimes(1);
+ expect(fetch).toHaveBeenCalledWith(
+ 'https://v14.next.forgejo.org/api/v1/some-path',
+ expect.objectContaining({
+ cache: 'no-cache',
+ headers: {
+ Authorization: 'promise-token',
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ }),
+ );
+ });
+ });
+
+ describe('persistFiles', () => {
+ it('should create a new commit', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo' });
+
+ const responses = {
+ '/repos/owner/repo/contents/content/posts/update-post.md': () => {
+ return { sha: 'old-sha' };
+ },
+
+ '/repos/owner/repo/contents': () => ({
+ commit: { sha: 'new-sha' },
+ files: [
+ {
+ path: 'content/posts/new-post.md',
+ },
+ {
+ path: 'content/posts/update-post.md',
+ },
+ ],
+ }),
+ };
+ mockAPI(api, responses);
+
+ const entry = {
+ dataFiles: [
+ {
+ slug: 'entry',
+ path: 'content/posts/new-post.md',
+ raw: 'content',
+ },
+ {
+ slug: 'entry',
+ sha: 'old-sha',
+ path: 'content/posts/update-post.md',
+ raw: 'content',
+ },
+ ],
+ assets: [],
+ };
+ await expect(
+ api.persistFiles(entry.dataFiles, entry.assets, {
+ commitMessage: 'commitMessage',
+ newEntry: true,
+ }),
+ ).resolves.toEqual({
+ commit: { sha: 'new-sha' },
+ files: [
+ {
+ path: 'content/posts/new-post.md',
+ },
+ {
+ path: 'content/posts/update-post.md',
+ },
+ ],
+ });
+
+ expect(api.request).toHaveBeenCalledTimes(3);
+
+ expect(api.request.mock.calls[0]).toEqual([
+ '/repos/owner/repo/contents/content/posts/new-post.md',
+ { params: { ref: 'master' } },
+ ]);
+
+ expect(api.request.mock.calls[1]).toEqual([
+ '/repos/owner/repo/contents/content/posts/update-post.md',
+ { params: { ref: 'master' } },
+ ]);
+
+ expect(api.request.mock.calls[2]).toEqual([
+ '/repos/owner/repo/contents',
+ {
+ method: 'POST',
+ body: JSON.stringify({
+ branch: 'master',
+ files: [
+ {
+ operation: 'create',
+ content: Base64.encode(entry.dataFiles[0].raw),
+ path: entry.dataFiles[0].path,
+ },
+ {
+ operation: 'update',
+ content: Base64.encode(entry.dataFiles[1].raw),
+ path: entry.dataFiles[1].path,
+ sha: entry.dataFiles[1].sha,
+ },
+ ],
+ message: 'commitMessage',
+ }),
+ },
+ ]);
+ });
+ });
+
+ describe('deleteFiles', () => {
+ it('should check if files exist and delete them', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo' });
+
+ const responses = {
+ '/repos/owner/repo/contents/content/posts/delete-post-1.md': () => {
+ return { sha: 'old-sha-1' };
+ },
+ '/repos/owner/repo/contents/content/posts/delete-post-2.md': () => {
+ return { sha: 'old-sha-2' };
+ },
+
+ '/repos/owner/repo/contents': () => ({
+ commit: { sha: 'new-sha' },
+ files: [
+ {
+ path: 'content/posts/delete-post-1.md',
+ },
+ {
+ path: 'content/posts/delete-post-2.md',
+ },
+ ],
+ }),
+ };
+ mockAPI(api, responses);
+
+ const deleteFiles = ['content/posts/delete-post-1.md', 'content/posts/delete-post-2.md'];
+
+ await api.deleteFiles(deleteFiles, 'commitMessage');
+
+ expect(api.request).toHaveBeenCalledTimes(3);
+
+ expect(api.request.mock.calls[0]).toEqual([
+ '/repos/owner/repo/contents/content/posts/delete-post-1.md',
+ { params: { ref: 'master' } },
+ ]);
+
+ expect(api.request.mock.calls[1]).toEqual([
+ '/repos/owner/repo/contents/content/posts/delete-post-2.md',
+ { params: { ref: 'master' } },
+ ]);
+
+ expect(api.request.mock.calls[2]).toEqual([
+ '/repos/owner/repo/contents',
+ {
+ method: 'POST',
+ body: JSON.stringify({
+ branch: 'master',
+ files: [
+ {
+ operation: 'delete',
+ path: deleteFiles[0],
+ sha: 'old-sha-1',
+ },
+ {
+ operation: 'delete',
+ path: deleteFiles[1],
+ sha: 'old-sha-2',
+ },
+ ],
+ message: 'commitMessage',
+ }),
+ },
+ ]);
+ });
+
+ it('should reject delete for open authoring users', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ await expect(api.deleteFiles(['content/posts/post.md'], 'delete post')).rejects.toMatchObject(
+ {
+ message: 'Cannot delete published entries as an Open Authoring user!',
+ status: 403,
+ },
+ );
+ });
+ });
+
+ describe('listFiles', () => {
+ it('should get files by depth', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo' });
+
+ const tree = [
+ {
+ path: 'posts/post.md',
+ sha: 'sha-post',
+ size: 10,
+ type: 'blob',
+ },
+ {
+ path: 'posts/dir1',
+ sha: 'sha-dir1',
+ size: 0,
+ type: 'tree',
+ },
+ {
+ path: 'posts/dir1/nested-post.md',
+ sha: 'sha-nested-1',
+ size: 20,
+ type: 'blob',
+ },
+ {
+ path: 'posts/dir1/dir2',
+ sha: 'sha-dir2',
+ size: 0,
+ type: 'tree',
+ },
+ {
+ path: 'posts/dir1/dir2/nested-post.md',
+ sha: 'sha-nested-2',
+ size: 30,
+ type: 'blob',
+ },
+ ];
+ api.request = jest
+ .fn()
+ .mockResolvedValueOnce({ commit: { id: 'sha123' } })
+ .mockResolvedValueOnce({ tree });
+
+ await expect(api.listFiles('posts', { depth: 1 })).resolves.toEqual([
+ {
+ id: 'sha-post',
+ size: 10,
+ path: 'posts/post.md',
+ type: 'blob',
+ name: 'post.md',
+ },
+ ]);
+ expect(api.request).toHaveBeenCalledTimes(2);
+ expect(api.request).toHaveBeenNthCalledWith(1, '/repos/owner/repo/branches/master');
+ expect(api.request).toHaveBeenNthCalledWith(2, '/repos/owner/repo/git/trees/sha123', {
+ params: { recursive: 1 },
+ });
+
+ jest.clearAllMocks();
+ api.request = jest
+ .fn()
+ .mockResolvedValueOnce({ commit: { id: 'sha123' } })
+ .mockResolvedValueOnce({ tree });
+ await expect(api.listFiles('posts', { depth: 2 })).resolves.toEqual([
+ {
+ id: 'sha-post',
+ size: 10,
+ path: 'posts/post.md',
+ type: 'blob',
+ name: 'post.md',
+ },
+ {
+ id: 'sha-nested-1',
+ size: 20,
+ path: 'posts/dir1/nested-post.md',
+ type: 'blob',
+ name: 'nested-post.md',
+ },
+ ]);
+ expect(api.request).toHaveBeenCalledTimes(2);
+ expect(api.request).toHaveBeenNthCalledWith(1, '/repos/owner/repo/branches/master');
+ expect(api.request).toHaveBeenNthCalledWith(2, '/repos/owner/repo/git/trees/sha123', {
+ params: { recursive: 1 },
+ });
+
+ jest.clearAllMocks();
+ api.request = jest
+ .fn()
+ .mockResolvedValueOnce({ commit: { id: 'sha123' } })
+ .mockResolvedValueOnce({ tree });
+ await expect(api.listFiles('posts', { depth: 3 })).resolves.toEqual([
+ {
+ id: 'sha-post',
+ size: 10,
+ path: 'posts/post.md',
+ type: 'blob',
+ name: 'post.md',
+ },
+ {
+ id: 'sha-nested-1',
+ size: 20,
+ path: 'posts/dir1/nested-post.md',
+ type: 'blob',
+ name: 'nested-post.md',
+ },
+ {
+ id: 'sha-nested-2',
+ size: 30,
+ path: 'posts/dir1/dir2/nested-post.md',
+ type: 'blob',
+ name: 'nested-post.md',
+ },
+ ]);
+ expect(api.request).toHaveBeenCalledTimes(2);
+ expect(api.request).toHaveBeenNthCalledWith(1, '/repos/owner/repo/branches/master');
+ expect(api.request).toHaveBeenNthCalledWith(2, '/repos/owner/repo/git/trees/sha123', {
+ params: { recursive: 1 },
+ });
+ });
+ it('should get files and folders', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo' });
+
+ const tree = [
+ {
+ path: 'media/image.png',
+ sha: 'sha-image',
+ size: 50,
+ type: 'blob',
+ },
+ {
+ path: 'media/dir1',
+ sha: 'sha-media-dir1',
+ size: 0,
+ type: 'tree',
+ },
+ {
+ path: 'media/dir1/nested-image.png',
+ sha: 'sha-media-nested-1',
+ size: 60,
+ type: 'blob',
+ },
+ {
+ path: 'media/dir1/dir2',
+ sha: 'sha-media-dir2',
+ size: 0,
+ type: 'tree',
+ },
+ {
+ path: 'media/dir1/dir2/nested-image.png',
+ sha: 'sha-media-nested-2',
+ size: 70,
+ type: 'blob',
+ },
+ ];
+ api.request = jest
+ .fn()
+ .mockResolvedValueOnce({ commit: { id: 'sha123' } })
+ .mockResolvedValueOnce({ tree });
+
+ await expect(api.listFiles('media', {}, true)).resolves.toEqual([
+ {
+ id: 'sha-image',
+ size: 50,
+ path: 'media/image.png',
+ type: 'blob',
+ name: 'image.png',
+ },
+ {
+ id: 'sha-media-dir1',
+ size: 0,
+ path: 'media/dir1',
+ type: 'tree',
+ name: 'dir1',
+ },
+ ]);
+ expect(api.request).toHaveBeenCalledTimes(2);
+ expect(api.request).toHaveBeenNthCalledWith(1, '/repos/owner/repo/branches/master');
+ expect(api.request).toHaveBeenNthCalledWith(2, '/repos/owner/repo/git/trees/sha123', {
+ params: { recursive: 1 },
+ });
+ });
+
+ it('should create branch', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+ api.request = jest.fn().mockResolvedValue({ name: 'cms/new-branch' });
+
+ await expect(api.createBranch('cms/new-branch', 'master')).resolves.toEqual({
+ name: 'cms/new-branch',
+ });
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/branches', {
+ method: 'POST',
+ body: JSON.stringify({
+ new_branch_name: 'cms/new-branch',
+ old_ref_name: 'master',
+ }),
+ });
+ });
+
+ it('should create pull request with owner:branch head format', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+ api.request = jest.fn().mockResolvedValue({ number: 1 });
+
+ await expect(
+ api.createPR('title', 'cms/new-branch', 'Check out the changes!'),
+ ).resolves.toEqual({ number: 1 });
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/pulls', {
+ method: 'POST',
+ body: JSON.stringify({
+ title: 'title',
+ head: 'owner:cms/new-branch',
+ base: 'gh-pages',
+ body: 'Check out the changes!',
+ }),
+ });
+ });
+
+ it('should get pull requests', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+ api.request = jest.fn().mockResolvedValue([{ number: 1, head: { label: 'head' } }]);
+
+ await expect(api.getPullRequests('open', 'head')).resolves.toEqual([
+ { number: 1, head: { label: 'head' } },
+ ]);
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/pulls', {
+ params: { state: 'open' },
+ });
+ });
+
+ it('should list unpublished branches (standard mode)', async () => {
+ const api = new API({
+ branch: 'gh-pages',
+ repo: 'owner/my-repo',
+ token: 'token',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+ api.request = jest.fn().mockResolvedValue([
+ { head: { ref: 'cms/branch1' }, labels: [{ name: 'decap-cms/draft' }] },
+ { head: { ref: 'other/branch' }, labels: [{ name: 'decap-cms/draft' }] },
+ { head: { ref: 'cms/branch2' }, labels: [{ name: 'decap-cms/pending_review' }] },
+ { head: { ref: 'cms/branch3' }, labels: [{ name: 'other-label' }] },
+ ]);
+
+ await expect(api.listUnpublishedBranches()).resolves.toEqual(['cms/branch1', 'cms/branch2']);
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/pulls', {
+ params: { state: 'open' },
+ });
+ });
+
+ it('should list unpublished branches (OA mode) from fork branches', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ token: 'token',
+ useOpenAuthoring: true,
+ });
+
+ // Mock getOpenAuthoringBranches
+ api.getOpenAuthoringBranches = jest
+ .fn()
+ .mockResolvedValue([
+ { name: 'cms/contributor/repo/posts/entry1' },
+ { name: 'cms/contributor/repo/posts/entry2' },
+ ]);
+
+ // Mock filterOpenAuthoringBranches to allow all
+ api.filterOpenAuthoringBranches = jest
+ .fn()
+ .mockImplementation(branch => Promise.resolve({ branch, filter: true }));
+
+ const result = await api.listUnpublishedBranches();
+
+ expect(result).toEqual([
+ 'cms/contributor/repo/posts/entry1',
+ 'cms/contributor/repo/posts/entry2',
+ ]);
+ expect(api.getOpenAuthoringBranches).toHaveBeenCalled();
+ });
+
+ it('should update pull request labels', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+ api.request = jest.fn().mockResolvedValue([{ id: 1, name: 'label' }]);
+
+ await expect(api.updatePullRequestLabels(1, [1])).resolves.toEqual([
+ { id: 1, name: 'label' },
+ ]);
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/issues/1/labels', {
+ method: 'PUT',
+ body: JSON.stringify({ labels: [1] }),
+ });
+ });
+
+ it('should get labels', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+ api.request = jest.fn().mockResolvedValue([
+ { id: 1, name: 'label1' },
+ { id: 2, name: 'label2' },
+ ]);
+
+ await expect(api.getLabels()).resolves.toEqual([
+ { id: 1, name: 'label1' },
+ { id: 2, name: 'label2' },
+ ]);
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/labels');
+ });
+
+ it('should create label', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+ api.request = jest.fn().mockResolvedValue({ id: 1, name: 'new-label', color: '0052cc' });
+
+ await expect(api.createLabel('new-label', '0052cc')).resolves.toEqual({
+ id: 1,
+ name: 'new-label',
+ color: '0052cc',
+ });
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/labels', {
+ method: 'POST',
+ body: JSON.stringify({ name: 'new-label', color: '0052cc' }),
+ });
+ });
+
+ it('should get or create label when label exists', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+ const existingLabel = { id: 1, name: 'existing-label', color: '0052cc' };
+ api.getLabels = jest.fn().mockResolvedValue([existingLabel]);
+ api.createLabel = jest.fn();
+
+ await expect(api.getOrCreateLabel('existing-label')).resolves.toEqual(existingLabel);
+ expect(api.getLabels).toHaveBeenCalledTimes(1);
+ expect(api.createLabel).not.toHaveBeenCalled();
+ });
+
+ it('should get or create label when label does not exist', async () => {
+ const api = new API({ branch: 'gh-pages', repo: 'owner/my-repo', token: 'token' });
+ const newLabel = { id: 2, name: 'new-label', color: '0052cc' };
+ api.getLabels = jest.fn().mockResolvedValue([{ id: 1, name: 'other-label' }]);
+ api.createLabel = jest.fn().mockResolvedValue(newLabel);
+
+ await expect(api.getOrCreateLabel('new-label')).resolves.toEqual(newLabel);
+ expect(api.getLabels).toHaveBeenCalledTimes(1);
+ expect(api.createLabel).toHaveBeenCalledTimes(1);
+ expect(api.createLabel).toHaveBeenCalledWith('new-label');
+ });
+
+ it('should set pull request status', async () => {
+ const api = new API({
+ branch: 'gh-pages',
+ repo: 'owner/my-repo',
+ token: 'token',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+ const pullRequest = {
+ number: 1,
+ labels: [
+ { id: 1, name: 'decap-cms/draft' },
+ { id: 2, name: 'other-label' },
+ ],
+ };
+
+ const newLabel = { id: 3, name: 'decap-cms/pending_review' };
+ api.getOrCreateLabel = jest.fn().mockResolvedValue(newLabel);
+ api.updatePullRequestLabels = jest
+ .fn()
+ .mockResolvedValue([newLabel, { id: 2, name: 'other-label' }]);
+
+ await api.setPullRequestStatus(pullRequest, 'pending_review');
+
+ expect(api.getOrCreateLabel).toHaveBeenCalledTimes(1);
+ expect(api.getOrCreateLabel).toHaveBeenCalledWith('decap-cms/pending_review');
+
+ expect(api.updatePullRequestLabels).toHaveBeenCalledTimes(1);
+ expect(api.updatePullRequestLabels).toHaveBeenCalledWith(1, [2, 3]);
+ });
+ });
+
+ describe('retrieveUnpublishedEntryData', () => {
+ it('should retrieve unpublished entry data', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'owner/repo',
+ token: 'token',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+
+ const pullRequest = {
+ number: 1,
+ updated_at: '2024-01-01T00:00:00Z',
+ user: { login: 'testuser' },
+ labels: [{ id: 1, name: 'decap-cms/pending_review' }],
+ };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+
+ const compareResult = {
+ files: [
+ { filename: 'content/posts/test.md', status: 'added', sha: 'sha1' },
+ { filename: 'static/img/test.jpg', status: 'modified', sha: 'sha2' },
+ ],
+ commits: [],
+ total_commits: 1,
+ };
+ api.getDifferences = jest.fn().mockResolvedValue(compareResult);
+
+ const result = await api.retrieveUnpublishedEntryData('posts/test');
+
+ expect(api.getBranchPullRequest).toHaveBeenCalledWith('cms/posts/test');
+ expect(result).toEqual({
+ collection: 'posts',
+ slug: 'test',
+ status: 'pending_review',
+ diffs: [
+ { path: 'content/posts/test.md', newFile: true, id: 'sha1' },
+ { path: 'static/img/test.jpg', newFile: false, id: 'sha2' },
+ ],
+ updatedAt: '2024-01-01T00:00:00Z',
+ pullRequestAuthor: 'testuser',
+ });
+ });
+
+ it('should fall back to getPullRequestFiles when getDifferences fails', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'owner/repo',
+ token: 'token',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+
+ const pullRequest = {
+ number: 1,
+ updated_at: '2024-01-01T00:00:00Z',
+ user: { login: 'testuser' },
+ labels: [{ id: 1, name: 'decap-cms/pending_review' }],
+ };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+ api.getDifferences = jest.fn().mockRejectedValue(new Error('compare failed'));
+
+ const files = [{ filename: 'content/posts/test.md', status: 'added' }];
+ api.getPullRequestFiles = jest.fn().mockResolvedValue(files);
+
+ const result = await api.retrieveUnpublishedEntryData('posts/test');
+
+ expect(result.diffs).toEqual([{ path: 'content/posts/test.md', newFile: true, id: '' }]);
+ });
+
+ it('should default to initialWorkflowStatus when no CMS label found', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'owner/repo',
+ token: 'token',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+
+ const pullRequest = {
+ number: 1,
+ updated_at: '2024-01-01T00:00:00Z',
+ user: { login: 'testuser' },
+ labels: [{ id: 2, name: 'other-label' }],
+ };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+ api.getDifferences = jest
+ .fn()
+ .mockResolvedValue({ files: [], commits: [], total_commits: 0 });
+
+ const result = await api.retrieveUnpublishedEntryData('posts/test');
+
+ expect(result.status).toEqual('draft');
+ });
+ });
+
+ describe('updateUnpublishedEntryStatus', () => {
+ it('should update unpublished entry status (standard mode)', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo', token: 'token' });
+
+ const pullRequest = {
+ number: 1,
+ labels: [{ id: 1, name: 'decap-cms/draft' }],
+ };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+ api.setPullRequestStatus = jest.fn().mockResolvedValue();
+
+ await api.updateUnpublishedEntryStatus('posts', 'test', 'pending_review');
+
+ expect(api.getBranchPullRequest).toHaveBeenCalledWith('cms/posts/test');
+ expect(api.setPullRequestStatus).toHaveBeenCalledWith(pullRequest, 'pending_review');
+ });
+
+ it('should reject pending_publish for open authoring', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const pullRequest = { number: 1, state: 'open', labels: [] };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+
+ await expect(
+ api.updateUnpublishedEntryStatus('posts', 'test', 'pending_publish'),
+ ).rejects.toThrow('Open Authoring entries may not be set to the status "pending_publish".');
+ });
+
+ it('should close PR when OA entry moves to draft', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const pullRequest = { number: 5, state: 'open', labels: [] };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+ api.closePR = jest.fn().mockResolvedValue({});
+
+ await api.updateUnpublishedEntryStatus('posts', 'test', 'draft');
+
+ expect(api.closePR).toHaveBeenCalledWith(5);
+ });
+
+ it('should re-open PR when OA entry moves to pending_review', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const pullRequest = { number: 5, state: 'closed', labels: [] };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+ api.updatePR = jest.fn().mockResolvedValue({});
+
+ await api.updateUnpublishedEntryStatus('posts', 'test', 'pending_review');
+
+ expect(api.updatePR).toHaveBeenCalledWith(5, 'open');
+ });
+
+ it('should create PR from mock PR when OA entry moves to pending_review', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const mockPR = { number: MOCK_PULL_REQUEST, state: 'open', labels: [] };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(mockPR);
+ api.getDifferences = jest.fn().mockResolvedValue({
+ commits: [{ commit: { message: 'Add new post' } }],
+ files: [],
+ total_commits: 1,
+ });
+ api.createPR = jest.fn().mockResolvedValue({ number: 10 });
+
+ await api.updateUnpublishedEntryStatus('posts', 'test', 'pending_review');
+
+ expect(api.getDifferences).toHaveBeenCalled();
+ expect(api.createPR).toHaveBeenCalledWith('Add new post', expect.stringContaining('cms/'));
+ });
+ });
+
+ describe('deleteUnpublishedEntry', () => {
+ it('should delete unpublished entry by closing PR and deleting branch', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo', token: 'token' });
+
+ const pullRequest = { number: 1 };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+ api.closePR = jest.fn().mockResolvedValue();
+ api.deleteBranch = jest.fn().mockResolvedValue();
+
+ await api.deleteUnpublishedEntry('posts', 'test');
+
+ expect(api.getBranchPullRequest).toHaveBeenCalledWith('cms/posts/test');
+ expect(api.closePR).toHaveBeenCalledWith(1);
+ expect(api.deleteBranch).toHaveBeenCalledWith('cms/posts/test');
+ });
+
+ it('should delete branch even if PR does not exist', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo', token: 'token' });
+
+ api.getBranchPullRequest = jest
+ .fn()
+ .mockRejectedValue(new APIError('PR not found', 404, 'Forgejo'));
+ api.closePR = jest.fn();
+ api.deleteBranch = jest.fn().mockResolvedValue();
+
+ await api.deleteUnpublishedEntry('posts', 'test');
+
+ expect(api.closePR).not.toHaveBeenCalled();
+ expect(api.deleteBranch).toHaveBeenCalledWith('cms/posts/test');
+ });
+ });
+
+ describe('publishUnpublishedEntry', () => {
+ it('should publish unpublished entry by merging PR and deleting branch', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo', token: 'token' });
+
+ const pullRequest = { number: 1 };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(pullRequest);
+ api.mergePR = jest.fn().mockResolvedValue();
+ api.deleteBranch = jest.fn().mockResolvedValue();
+
+ await api.publishUnpublishedEntry('posts', 'test');
+
+ expect(api.getBranchPullRequest).toHaveBeenCalledWith('cms/posts/test');
+ expect(api.mergePR).toHaveBeenCalledWith(pullRequest);
+ expect(api.deleteBranch).toHaveBeenCalledWith('cms/posts/test');
+ });
+ });
+
+ describe('getBranchPullRequest', () => {
+ it('should get open pull request with CMS labels for branch (standard mode)', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'owner/my-repo',
+ token: 'token',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+
+ const openPR = {
+ number: 1,
+ head: { ref: 'cms/posts/test' },
+ state: 'open',
+ labels: [{ name: 'decap-cms/draft' }],
+ };
+ api.getPullRequests = jest.fn().mockResolvedValue([openPR]);
+
+ const result = await api.getBranchPullRequest('cms/posts/test');
+
+ expect(result).toEqual(openPR);
+ expect(api.getPullRequests).toHaveBeenCalledWith('open', 'owner:cms/posts/test');
+ });
+
+ it('should throw EditorialWorkflowError if no CMS-labeled PR found (standard mode)', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'owner/my-repo',
+ token: 'token',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+
+ // PR exists but has no CMS label
+ const pr = {
+ number: 1,
+ head: { ref: 'cms/posts/test' },
+ state: 'open',
+ labels: [{ name: 'other-label' }],
+ };
+ api.getPullRequests = jest.fn().mockResolvedValue([pr]);
+
+ await expect(api.getBranchPullRequest('cms/posts/test')).rejects.toThrow(
+ 'content is not under editorial workflow',
+ );
+ });
+
+ it('should delegate to getOpenAuthoringPullRequest for OA mode', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ token: 'token',
+ useOpenAuthoring: true,
+ });
+
+ const mockPR = {
+ number: MOCK_PULL_REQUEST,
+ state: 'open',
+ labels: [],
+ head: { ref: 'cms/test', sha: 'sha123' },
+ };
+ api.getPullRequests = jest.fn().mockResolvedValue([]);
+ api.getOpenAuthoringPullRequest = jest.fn().mockResolvedValue({
+ pullRequest: mockPR,
+ branch: { commit: { id: 'sha123' } },
+ });
+
+ const result = await api.getBranchPullRequest('cms/test');
+
+ expect(result).toEqual(mockPR);
+ expect(api.getPullRequests).toHaveBeenCalledWith('all', 'contributor:cms/test');
+ });
+ });
+
+ describe('mergePR', () => {
+ it('should merge pull request', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/my-repo', token: 'token' });
+ api.request = jest.fn().mockResolvedValue({});
+
+ const pullRequest = { number: 1 };
+ await api.mergePR(pullRequest);
+
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/pulls/1/merge', {
+ method: 'POST',
+ body: JSON.stringify({
+ Do: 'merge',
+ MergeMessageField: 'Automatically generated. Merged on Decap CMS.',
+ }),
+ });
+ });
+ });
+
+ describe('closePR', () => {
+ it('should close pull request', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/my-repo', token: 'token' });
+ api.updatePR = jest.fn().mockResolvedValue({ number: 1, state: 'closed' });
+
+ const result = await api.closePR(1);
+
+ expect(api.updatePR).toHaveBeenCalledWith(1, 'closed');
+ expect(result).toEqual({ number: 1, state: 'closed' });
+ });
+ });
+
+ describe('deleteBranch', () => {
+ it('should delete branch', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/my-repo', token: 'token' });
+ api.request = jest.fn().mockResolvedValue({});
+
+ await api.deleteBranch('cms/posts/test');
+
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/my-repo/branches/cms%2Fposts%2Ftest', {
+ method: 'DELETE',
+ });
+ });
+ });
+
+ describe('forkExists', () => {
+ it('should return true when fork exists with matching parent', async () => {
+ const api = new API({ branch: 'master', repo: 'user/repo', originRepo: 'owner/repo' });
+ const mockRepo = {
+ fork: true,
+ parent: { full_name: 'owner/repo' },
+ };
+ api.request = jest.fn().mockResolvedValue(mockRepo);
+
+ const result = await api.forkExists();
+
+ expect(result).toBe(true);
+ expect(api.request).toHaveBeenCalledWith('/repos/user/repo');
+ });
+
+ it('should return false when repo is not a fork', async () => {
+ const api = new API({ branch: 'master', repo: 'user/repo', originRepo: 'owner/repo' });
+ api.request = jest.fn().mockResolvedValue({ fork: false });
+
+ const result = await api.forkExists();
+
+ expect(result).toBe(false);
+ });
+
+ it('should return false when parent does not match origin repo', async () => {
+ const api = new API({ branch: 'master', repo: 'user/repo', originRepo: 'owner/repo' });
+ api.request = jest.fn().mockResolvedValue({
+ fork: true,
+ parent: { full_name: 'other/repo' },
+ });
+
+ const result = await api.forkExists();
+
+ expect(result).toBe(false);
+ });
+
+ it('should handle case-insensitive parent comparison', async () => {
+ const api = new API({ branch: 'master', repo: 'user/repo', originRepo: 'owner/repo' });
+ api.request = jest.fn().mockResolvedValue({
+ fork: true,
+ parent: { full_name: 'OWNER/REPO' },
+ });
+
+ const result = await api.forkExists();
+
+ expect(result).toBe(true);
+ });
+ });
+
+ describe('createFork', () => {
+ it('should create fork', async () => {
+ const api = new API({ branch: 'master', repo: 'user/repo', originRepo: 'owner/repo' });
+ api.request = jest.fn().mockResolvedValue({ full_name: 'user/repo' });
+
+ const result = await api.createFork();
+
+ expect(result).toEqual({ full_name: 'user/repo' });
+ expect(api.request).toHaveBeenCalledWith('/repos/owner/repo/forks', {
+ method: 'POST',
+ });
+ });
+ });
+
+ describe('getOpenAuthoringPullRequest', () => {
+ it('should return mock PR with initial status label when no PR exists', async () => {
+ const api = new API({ branch: 'master', repo: 'user/repo' });
+ api.getBranch = jest.fn().mockResolvedValue({
+ commit: { id: 'sha123' },
+ });
+
+ const result = await api.getOpenAuthoringPullRequest('cms/test', []);
+
+ expect(result.pullRequest.number).toBe(-1);
+ expect(result.pullRequest.head.sha).toBe('sha123');
+ // Default cmsLabelPrefix is '' which maps to 'decap-cms/' via getLabelPrefix
+ expect(result.pullRequest.labels).toEqual(
+ expect.arrayContaining([expect.objectContaining({ name: 'decap-cms/draft' })]),
+ );
+ expect(result.branch.commit.id).toBe('sha123');
+ });
+
+ it('should add synthetic pending_review label for open PR', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'user/repo',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+ const pullRequest = {
+ number: 1,
+ head: { sha: 'sha123' },
+ state: 'open',
+ labels: [
+ { id: 1, name: 'decap-cms/draft' },
+ { id: 2, name: 'bug' },
+ ],
+ };
+ api.getBranch = jest.fn().mockResolvedValue({
+ commit: { id: 'sha123' },
+ });
+
+ const result = await api.getOpenAuthoringPullRequest('cms/test', [pullRequest]);
+
+ expect(result.pullRequest.number).toBe(1);
+ // CMS labels filtered out, synthetic pending_review added, plus non-CMS labels kept
+ expect(result.pullRequest.labels).toEqual([
+ { id: 2, name: 'bug' },
+ { name: 'decap-cms/pending_review' },
+ ]);
+ });
+
+ it('should add synthetic draft label for closed PR', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'user/repo',
+ cmsLabelPrefix: 'decap-cms/',
+ });
+ const pullRequest = {
+ number: 1,
+ head: { sha: 'sha123' },
+ state: 'closed',
+ labels: [],
+ };
+ api.getBranch = jest.fn().mockResolvedValue({
+ commit: { id: 'sha123' },
+ });
+
+ const result = await api.getOpenAuthoringPullRequest('cms/test', [pullRequest]);
+
+ expect(result.pullRequest.labels).toEqual([{ name: 'decap-cms/draft' }]);
+ });
+ });
+
+ describe('getDifferences', () => {
+ it('should call compare endpoint', async () => {
+ const api = new API({ branch: 'master', repo: 'owner/repo', token: 'token' });
+ const compareResult = { files: [], commits: [], total_commits: 0 };
+ api.request = jest.fn().mockResolvedValue(compareResult);
+
+ const result = await api.getDifferences('master', 'owner:cms/posts/test');
+
+ expect(result).toEqual(compareResult);
+ expect(api.request).toHaveBeenCalledWith(
+ '/repos/owner/repo/compare/master...owner%3Acms%2Fposts%2Ftest',
+ );
+ });
+
+ it('should retry with origin repo for OA on failure', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ token: 'token',
+ });
+ const compareResult = { files: [], commits: [], total_commits: 0 };
+ api.request = jest
+ .fn()
+ .mockRejectedValueOnce(new Error('not found'))
+ .mockResolvedValueOnce(compareResult);
+
+ const result = await api.getDifferences('master', 'contributor:cms/test');
+
+ expect(result).toEqual(compareResult);
+ expect(api.request).toHaveBeenCalledTimes(2);
+ // First call to fork repo
+ expect(api.request.mock.calls[0][0]).toContain('/repos/contributor/repo/compare/');
+ // Second call to origin repo
+ expect(api.request.mock.calls[1][0]).toContain('/repos/owner/repo/compare/');
+ });
+ });
+
+ describe('filterOpenAuthoringBranches', () => {
+ it('should filter out merged PRs and delete their branches', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const mergedPR = {
+ number: 1,
+ state: 'closed',
+ merged_at: '2024-01-01T00:00:00Z',
+ labels: [],
+ };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(mergedPR);
+ api.deleteBranch = jest.fn().mockResolvedValue();
+
+ const result = await api.filterOpenAuthoringBranches('cms/contributor/repo/posts/entry');
+
+ expect(result).toEqual({ branch: 'cms/contributor/repo/posts/entry', filter: false });
+ expect(api.deleteBranch).toHaveBeenCalledWith('cms/contributor/repo/posts/entry');
+ });
+
+ it('should keep branches with unmerged PRs', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const openPR = { number: 1, state: 'open', merged_at: null, labels: [] };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(openPR);
+
+ const result = await api.filterOpenAuthoringBranches('cms/contributor/repo/posts/entry');
+
+ expect(result).toEqual({ branch: 'cms/contributor/repo/posts/entry', filter: true });
+ });
+
+ it('should keep branches with mock PRs (no real PR)', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const mockPR = { number: MOCK_PULL_REQUEST, state: 'open', merged_at: null, labels: [] };
+ api.getBranchPullRequest = jest.fn().mockResolvedValue(mockPR);
+
+ const result = await api.filterOpenAuthoringBranches('cms/contributor/repo/posts/entry');
+
+ expect(result).toEqual({ branch: 'cms/contributor/repo/posts/entry', filter: true });
+ });
+
+ it('should filter out branches on 404 errors', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const notFoundError = new APIError('Not found', 404, 'Forgejo');
+ api.getBranchPullRequest = jest.fn().mockRejectedValue(notFoundError);
+
+ const result = await api.filterOpenAuthoringBranches('cms/contributor/repo/posts/entry');
+
+ expect(result).toEqual({ branch: 'cms/contributor/repo/posts/entry', filter: false });
+ });
+
+ it('should filter out branches on EditorialWorkflowError', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const workflowError = new EditorialWorkflowError(
+ 'content is not under editorial workflow',
+ true,
+ );
+ api.getBranchPullRequest = jest.fn().mockRejectedValue(workflowError);
+
+ const result = await api.filterOpenAuthoringBranches('cms/contributor/repo/posts/entry');
+
+ expect(result).toEqual({ branch: 'cms/contributor/repo/posts/entry', filter: false });
+ });
+
+ it('should keep branches on transient network errors', async () => {
+ const api = new API({
+ branch: 'master',
+ repo: 'contributor/repo',
+ originRepo: 'owner/repo',
+ useOpenAuthoring: true,
+ });
+
+ const networkError = new APIError('Network error', 500, 'Forgejo');
+ api.getBranchPullRequest = jest.fn().mockRejectedValue(networkError);
+
+ const result = await api.filterOpenAuthoringBranches('cms/contributor/repo/posts/entry');
+
+ expect(result).toEqual({ branch: 'cms/contributor/repo/posts/entry', filter: true });
+ });
+ });
+});
diff --git a/packages/decap-cms-backend-forgejo/src/__tests__/implementation.spec.js b/packages/decap-cms-backend-forgejo/src/__tests__/implementation.spec.js
new file mode 100644
index 000000000000..446bc1f0de5f
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/src/__tests__/implementation.spec.js
@@ -0,0 +1,452 @@
+import { Cursor, CURSOR_COMPATIBILITY_SYMBOL } from 'decap-cms-lib-util';
+
+import ForgejoImplementation from '../implementation';
+
+jest.spyOn(console, 'error').mockImplementation(() => {});
+
+describe('forgejo backend implementation', () => {
+ const config = {
+ backend: {
+ repo: 'owner/repo',
+ api_root: 'https://v14.next.forgejo.org/api/v1',
+ },
+ };
+
+ const createObjectURL = jest.fn();
+ global.URL = {
+ createObjectURL,
+ };
+
+ createObjectURL.mockReturnValue('displayURL');
+
+ beforeAll(() => {
+ // eslint-disable-next-line @typescript-eslint/no-empty-function
+ });
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ afterAll(() => {
+ jest.restoreAllMocks();
+ });
+
+ describe('persistMedia', () => {
+ const persistFiles = jest.fn();
+ const mockAPI = {
+ persistFiles,
+ };
+
+ persistFiles.mockImplementation((_, files) => {
+ files.forEach((file, index) => {
+ file.sha = index;
+ });
+ });
+
+ it('should persist media file', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = mockAPI;
+
+ const mediaFile = {
+ fileObj: { size: 100, name: 'image.png' },
+ path: '/media/image.png',
+ };
+
+ expect.assertions(5);
+ await expect(
+ forgejoImplementation.persistMedia(mediaFile, { commitMessage: 'Persisting media' }),
+ ).resolves.toEqual({
+ id: 0,
+ name: 'image.png',
+ size: 100,
+ displayURL: 'displayURL',
+ path: 'media/image.png',
+ });
+
+ expect(persistFiles).toHaveBeenCalledTimes(1);
+ expect(persistFiles).toHaveBeenCalledWith([], [mediaFile], {
+ commitMessage: 'Persisting media',
+ });
+ expect(createObjectURL).toHaveBeenCalledTimes(1);
+ expect(createObjectURL).toHaveBeenCalledWith(mediaFile.fileObj);
+ });
+
+ it('should log and throw error on "persistFiles" error', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = mockAPI;
+
+ const error = new Error('failed to persist files');
+ persistFiles.mockRejectedValue(error);
+
+ const mediaFile = {
+ fileObj: { size: 100 },
+ path: '/media/image.png',
+ };
+
+ expect.assertions(5);
+ await expect(
+ forgejoImplementation.persistMedia(mediaFile, { commitMessage: 'Persisting media' }),
+ ).rejects.toThrowError(error);
+
+ expect(persistFiles).toHaveBeenCalledTimes(1);
+ expect(createObjectURL).toHaveBeenCalledTimes(0);
+ expect(console.error).toHaveBeenCalledTimes(1);
+ expect(console.error).toHaveBeenCalledWith(error);
+ });
+ });
+
+ describe('entriesByFolder', () => {
+ const listFiles = jest.fn();
+ const readFile = jest.fn();
+ const readFileMetadata = jest.fn(() => Promise.resolve({ author: '', updatedOn: '' }));
+
+ const mockAPI = {
+ listFiles,
+ readFile,
+ readFileMetadata,
+ originRepoURL: 'originRepoURL',
+ };
+
+ it('should return entries and cursor', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = mockAPI;
+
+ const files = [];
+ const count = 1501;
+ for (let i = 0; i < count; i++) {
+ const id = `${i}`.padStart(`${count}`.length, '0');
+ files.push({
+ id,
+ path: `posts/post-${id}.md`,
+ });
+ }
+
+ listFiles.mockResolvedValue(files);
+ readFile.mockImplementation((_path, id) => Promise.resolve(`${id}`));
+
+ const expectedEntries = files
+ .slice(0, 20)
+ .map(({ id, path }) => ({ data: id, file: { path, id, author: '', updatedOn: '' } }));
+
+ const expectedCursor = Cursor.create({
+ actions: ['next', 'last'],
+ meta: { page: 1, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ expectedEntries[CURSOR_COMPATIBILITY_SYMBOL] = expectedCursor;
+
+ const result = await forgejoImplementation.entriesByFolder('posts', 'md', 1);
+
+ expect(result).toEqual(expectedEntries);
+ expect(listFiles).toHaveBeenCalledTimes(1);
+ expect(listFiles).toHaveBeenCalledWith('posts', { depth: 1, repoURL: 'originRepoURL' });
+ expect(readFile).toHaveBeenCalledTimes(20);
+ });
+ });
+
+ describe('traverseCursor', () => {
+ const listFiles = jest.fn();
+ const readFile = jest.fn((_path, id) => Promise.resolve(`${id}`));
+ const readFileMetadata = jest.fn(() => Promise.resolve({}));
+
+ const mockAPI = {
+ listFiles,
+ readFile,
+ originRepoURL: 'originRepoURL',
+ readFileMetadata,
+ };
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const files = [];
+ const count = 1501;
+ for (let i = 0; i < count; i++) {
+ const id = `${i}`.padStart(`${count}`.length, '0');
+ files.push({
+ id,
+ path: `posts/post-${id}.md`,
+ });
+ }
+
+ it('should handle next action', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = mockAPI;
+
+ const cursor = Cursor.create({
+ actions: ['next', 'last'],
+ meta: { page: 1, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ const expectedEntries = files
+ .slice(20, 40)
+ .map(({ id, path }) => ({ data: id, file: { path, id } }));
+
+ const expectedCursor = Cursor.create({
+ actions: ['prev', 'first', 'next', 'last'],
+ meta: { page: 2, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ const result = await forgejoImplementation.traverseCursor(cursor, 'next');
+
+ expect(result).toEqual({
+ entries: expectedEntries,
+ cursor: expectedCursor,
+ });
+ });
+
+ it('should handle prev action', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = mockAPI;
+
+ const cursor = Cursor.create({
+ actions: ['prev', 'first', 'next', 'last'],
+ meta: { page: 2, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ const expectedEntries = files
+ .slice(0, 20)
+ .map(({ id, path }) => ({ data: id, file: { path, id } }));
+
+ const expectedCursor = Cursor.create({
+ actions: ['next', 'last'],
+ meta: { page: 1, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ const result = await forgejoImplementation.traverseCursor(cursor, 'prev');
+
+ expect(result).toEqual({
+ entries: expectedEntries,
+ cursor: expectedCursor,
+ });
+ });
+
+ it('should handle last action', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = mockAPI;
+
+ const cursor = Cursor.create({
+ actions: ['next', 'last'],
+ meta: { page: 1, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ const expectedEntries = files
+ .slice(1500)
+ .map(({ id, path }) => ({ data: id, file: { path, id } }));
+
+ const expectedCursor = Cursor.create({
+ actions: ['prev', 'first'],
+ meta: { page: 76, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ const result = await forgejoImplementation.traverseCursor(cursor, 'last');
+
+ expect(result).toEqual({
+ entries: expectedEntries,
+ cursor: expectedCursor,
+ });
+ });
+
+ it('should handle first action', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = mockAPI;
+
+ const cursor = Cursor.create({
+ actions: ['prev', 'first'],
+ meta: { page: 76, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ const expectedEntries = files
+ .slice(0, 20)
+ .map(({ id, path }) => ({ data: id, file: { path, id } }));
+
+ const expectedCursor = Cursor.create({
+ actions: ['next', 'last'],
+ meta: { page: 1, count, pageSize: 20, pageCount: 76 },
+ data: { files },
+ });
+
+ const result = await forgejoImplementation.traverseCursor(cursor, 'first');
+
+ expect(result).toEqual({
+ entries: expectedEntries,
+ cursor: expectedCursor,
+ });
+ });
+ });
+
+ describe('editorial workflow', () => {
+ it('should list unpublished entries', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = {
+ listUnpublishedBranches: jest.fn().mockResolvedValue(['cms/branch1', 'cms/branch2']),
+ };
+
+ await expect(forgejoImplementation.unpublishedEntries()).resolves.toEqual([
+ 'branch1',
+ 'branch2',
+ ]);
+ });
+
+ it('should get unpublished entry', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = {
+ generateContentKey: jest.fn().mockReturnValue('collection/slug'),
+ retrieveUnpublishedEntryData: jest
+ .fn()
+ .mockResolvedValue({ slug: 'slug', status: 'draft' }),
+ };
+
+ await expect(
+ forgejoImplementation.unpublishedEntry({ collection: 'collection', slug: 'slug' }),
+ ).resolves.toEqual({
+ slug: 'slug',
+ status: 'draft',
+ });
+ expect(forgejoImplementation.api.retrieveUnpublishedEntryData).toHaveBeenCalledWith(
+ 'collection/slug',
+ );
+ });
+
+ it('should get unpublished entry data file', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ forgejoImplementation.api = {
+ generateContentKey: jest.fn().mockReturnValue('collection/slug'),
+ readFile: jest.fn().mockResolvedValue('file-content'),
+ };
+
+ await expect(
+ forgejoImplementation.unpublishedEntryDataFile(
+ 'collection',
+ 'slug',
+ 'path/to/file',
+ 'sha-123',
+ ),
+ ).resolves.toEqual('file-content');
+ });
+
+ it('should get unpublished entry media file', async () => {
+ const forgejoImplementation = new ForgejoImplementation(config);
+ const blob = new Blob(['content']);
+ forgejoImplementation.api = {
+ generateContentKey: jest.fn().mockReturnValue('collection/slug'),
+ readFile: jest.fn().mockResolvedValue(blob),
+ };
+
+ const result = await forgejoImplementation.unpublishedEntryMediaFile(
+ 'collection',
+ 'slug',
+ 'path/to/image.png',
+ 'sha-456',
+ );
+
+ expect(result.name).toBe('image.png');
+ expect(result.file).toEqual(expect.any(File));
+ });
+ });
+
+ describe('open authoring', () => {
+ describe('authenticateWithFork', () => {
+ it('should use origin repo if user is maintainer', async () => {
+ const forgejoImplementation = new ForgejoImplementation(
+ {
+ ...config,
+ backend: { ...config.backend, open_authoring: true },
+ },
+ { useWorkflow: true },
+ );
+
+ forgejoImplementation.userIsOriginMaintainer = jest.fn().mockResolvedValue(true);
+ forgejoImplementation.currentUser = jest.fn().mockResolvedValue({ login: 'user' });
+ forgejoImplementation.api = {
+ forkExists: jest.fn(),
+ mergeUpstream: jest.fn(),
+ createFork: jest.fn(),
+ };
+
+ await forgejoImplementation.authenticateWithFork({
+ userData: { token: 'token' },
+ getPermissionToFork: jest.fn(),
+ });
+
+ expect(forgejoImplementation.repo).toBe('owner/repo');
+ expect(forgejoImplementation.useOpenAuthoring).toBe(false);
+ });
+
+ it('should create fork if user is contributor', async () => {
+ const mockForkExists = jest.fn().mockResolvedValue(false);
+ const mockCreateFork = jest.fn().mockResolvedValue({ full_name: 'contributor/repo' });
+ const mockMergeUpstream = jest.fn();
+
+ const forgejoImplementation = new ForgejoImplementation(
+ {
+ ...config,
+ backend: { ...config.backend, open_authoring: true },
+ },
+ {
+ useWorkflow: true,
+ API: {
+ forkExists: mockForkExists,
+ createFork: mockCreateFork,
+ mergeUpstream: mockMergeUpstream,
+ },
+ },
+ );
+
+ forgejoImplementation.userIsOriginMaintainer = jest.fn().mockResolvedValue(false);
+ forgejoImplementation.currentUser = jest.fn().mockResolvedValue({ login: 'contributor' });
+ forgejoImplementation.pollUntilForkExists = jest.fn().mockResolvedValue(undefined);
+
+ await forgejoImplementation.authenticateWithFork({
+ userData: { token: 'token' },
+ getPermissionToFork: jest.fn().mockResolvedValue(),
+ });
+
+ expect(forgejoImplementation.repo).toBe('contributor/repo');
+ expect(forgejoImplementation.useOpenAuthoring).toBe(true);
+ expect(mockCreateFork).toHaveBeenCalled();
+ });
+
+ it('should sync existing fork if one exists', async () => {
+ const mockForkExists = jest.fn().mockResolvedValue(true);
+ const mockMergeUpstream = jest.fn().mockResolvedValue(undefined);
+ const mockCreateFork = jest.fn();
+
+ const forgejoImplementation = new ForgejoImplementation(
+ {
+ ...config,
+ backend: { ...config.backend, open_authoring: true },
+ },
+ {
+ useWorkflow: true,
+ API: {
+ forkExists: mockForkExists,
+ mergeUpstream: mockMergeUpstream,
+ createFork: mockCreateFork,
+ },
+ },
+ );
+
+ forgejoImplementation.userIsOriginMaintainer = jest.fn().mockResolvedValue(false);
+ forgejoImplementation.currentUser = jest.fn().mockResolvedValue({ login: 'contributor' });
+
+ await forgejoImplementation.authenticateWithFork({
+ userData: { token: 'token' },
+ getPermissionToFork: jest.fn(),
+ });
+
+ expect(forgejoImplementation.repo).toBe('contributor/repo');
+ expect(forgejoImplementation.useOpenAuthoring).toBe(true);
+ expect(mockMergeUpstream).toHaveBeenCalled();
+ expect(mockCreateFork).not.toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/packages/decap-cms-backend-forgejo/src/implementation.tsx b/packages/decap-cms-backend-forgejo/src/implementation.tsx
new file mode 100644
index 000000000000..fcd36f5ebbf9
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/src/implementation.tsx
@@ -0,0 +1,646 @@
+import React from 'react';
+import { stripIndent } from 'common-tags';
+import trimStart from 'lodash/trimStart';
+import semaphore from 'semaphore';
+import {
+ asyncLock,
+ basename,
+ blobToFileObj,
+ Cursor,
+ CURSOR_COMPATIBILITY_SYMBOL,
+ entriesByFiles,
+ entriesByFolder,
+ filterByExtension,
+ getBlobSHA,
+ getMediaAsBlob,
+ getMediaDisplayURL,
+ runWithLock,
+ unsentRequest,
+ unpublishedEntries,
+ contentKeyFromBranch,
+ branchFromContentKey,
+} from 'decap-cms-lib-util';
+
+import API, { API_NAME } from './API';
+import AuthenticationPage from './AuthenticationPage';
+
+import type {
+ AssetProxy,
+ AsyncLock,
+ Config,
+ Credentials,
+ DisplayURL,
+ Entry,
+ Implementation,
+ ImplementationFile,
+ PersistOptions,
+ User,
+} from 'decap-cms-lib-util';
+import type { Semaphore } from 'semaphore';
+import type { ForgejoUser } from './types';
+
+const MAX_CONCURRENT_DOWNLOADS = 10;
+
+type ApiFile = { id: string; type: string; name: string; path: string; size: number };
+
+const { fetchWithTimeout: fetch } = unsentRequest;
+
+export default class Forgejo implements Implementation {
+ lock: AsyncLock;
+ api: API | null;
+ options: {
+ proxied: boolean;
+ API: API | null;
+ useWorkflow?: boolean;
+ initialWorkflowStatus: string;
+ };
+ originRepo: string;
+ repo?: string;
+ branch: string;
+ apiRoot: string;
+ mediaFolder?: string;
+ token: string | null;
+ openAuthoringEnabled: boolean;
+ useOpenAuthoring?: boolean;
+ alwaysForkEnabled: boolean;
+ cmsLabelPrefix: string;
+ initialWorkflowStatus: string;
+ _currentUserPromise?: Promise;
+ _userIsOriginMaintainerPromises?: {
+ [key: string]: Promise;
+ };
+ _mediaDisplayURLSem?: Semaphore;
+
+ constructor(config: Config, options = {}) {
+ this.options = {
+ proxied: false,
+ API: null,
+ useWorkflow: false,
+ initialWorkflowStatus: '',
+ ...options,
+ };
+
+ if (
+ !this.options.proxied &&
+ (config.backend.repo === null || config.backend.repo === undefined)
+ ) {
+ throw new Error('The Forgejo backend needs a "repo" in the backend configuration.');
+ }
+
+ this.api = this.options.API || null;
+ this.openAuthoringEnabled = config.backend.open_authoring || false;
+ if (this.openAuthoringEnabled) {
+ if (!this.options.useWorkflow) {
+ throw new Error(
+ 'backend.open_authoring is true but publish_mode is not set to editorial_workflow.',
+ );
+ }
+ // In open authoring mode, defer setting this.repo until after fork selection
+ this.originRepo = config.backend.repo || '';
+ } else {
+ this.repo = this.originRepo = config.backend.repo || '';
+ }
+ this.alwaysForkEnabled = config.backend.always_fork || false;
+ this.branch = config.backend.branch?.trim() || 'master';
+ this.apiRoot = config.backend.api_root || 'https://v14.next.forgejo.org/api/v1';
+ this.token = '';
+ this.mediaFolder = config.media_folder;
+ this.cmsLabelPrefix = config.backend.cms_label_prefix || '';
+ this.initialWorkflowStatus = this.options.initialWorkflowStatus || 'draft';
+ this.lock = asyncLock();
+ }
+
+ isGitBackend() {
+ return true;
+ }
+
+ async status() {
+ const auth =
+ (await this.api
+ ?.user()
+ .then(user => !!user)
+ .catch(e => {
+ console.warn('[StaticCMS] Failed getting Forgejo user', e);
+ return false;
+ })) || false;
+
+ return { auth: { status: auth }, api: { status: true, statusPage: '' } };
+ }
+
+ authComponent() {
+ const wrappedAuthenticationPage = (props: Record) => (
+
+ );
+ wrappedAuthenticationPage.displayName = 'AuthenticationPage';
+ return wrappedAuthenticationPage;
+ }
+
+ async currentUser({ token }: { token: string }) {
+ if (!this._currentUserPromise) {
+ this._currentUserPromise = fetch(`${this.apiRoot}/user`, {
+ headers: {
+ Authorization: `token ${token}`,
+ },
+ }).then(res => res.json());
+ }
+ return this._currentUserPromise;
+ }
+
+ async userIsOriginMaintainer({
+ username: usernameArg,
+ token,
+ }: {
+ username?: string;
+ token: string;
+ }) {
+ const username = usernameArg || (await this.currentUser({ token })).login;
+ this._userIsOriginMaintainerPromises = this._userIsOriginMaintainerPromises || {};
+ if (!this._userIsOriginMaintainerPromises[username]) {
+ this._userIsOriginMaintainerPromises[username] = fetch(
+ `${this.apiRoot}/repos/${this.originRepo}/collaborators/${username}/permission`,
+ {
+ headers: {
+ Authorization: `token ${token}`,
+ },
+ },
+ )
+ .then(res => res.json())
+ .then(({ permission }) => permission === 'admin' || permission === 'write');
+ }
+ return this._userIsOriginMaintainerPromises[username];
+ }
+
+ async pollUntilForkExists({ repo, token }: { repo: string; token: string }) {
+ const initialPollDelay = 250; // milliseconds
+ const maxPollDelay = 2000; // milliseconds
+ const maxWaitMs = 60000; // overall timeout in milliseconds
+ const startTime = Date.now();
+
+ let pollDelay = initialPollDelay;
+ let repoExists = false;
+
+ while (!repoExists && Date.now() - startTime < maxWaitMs) {
+ const response = await fetch(`${this.apiRoot}${repo}`, {
+ headers: { Authorization: `token ${token}` },
+ });
+
+ if (response.ok) {
+ repoExists = true;
+ } else if (response.status === 404) {
+ repoExists = false;
+ } else {
+ // For non-404, non-OK responses, fail fast instead of looping indefinitely.
+ throw new Error(
+ `Error while checking for fork existence: ${response.status} ${response.statusText}`,
+ );
+ }
+
+ // wait between polls if the repo does not yet exist
+ if (!repoExists) {
+ await new Promise(resolve => setTimeout(resolve, pollDelay));
+ // simple backoff up to a maximum delay
+ pollDelay = Math.min(pollDelay * 2, maxPollDelay);
+ }
+ }
+
+ if (!repoExists) {
+ throw new Error('Timed out waiting for fork to be created.');
+ }
+ }
+
+ async authenticateWithFork({
+ userData,
+ getPermissionToFork,
+ }: {
+ userData: User;
+ getPermissionToFork: () => Promise | void;
+ }) {
+ if (!this.openAuthoringEnabled) {
+ throw new Error('Cannot authenticate with fork; Open Authoring is turned off.');
+ }
+ const token = userData.token as string;
+
+ // Clear cached user data when token changes to prevent stale data across logins
+ this._currentUserPromise = undefined;
+ this._userIsOriginMaintainerPromises = {};
+
+ // Origin maintainers should be able to use the CMS normally. If alwaysFork
+ // is enabled we always fork (and avoid the origin maintainer check)
+ if (!this.alwaysForkEnabled && (await this.userIsOriginMaintainer({ token }))) {
+ this.repo = this.originRepo;
+ this.useOpenAuthoring = false;
+ return Promise.resolve();
+ }
+
+ // If a fork exists merge it with upstream
+ // otherwise create a new fork.
+ const currentUser = await this.currentUser({ token });
+ const repoName = this.originRepo.split('/')[1];
+ this.repo = `${currentUser.login}/${repoName}`;
+ this.useOpenAuthoring = true;
+
+ // Initialize or update API for fork operations
+ // Always recreate to ensure token and repo are up to date (unless a mock was injected for testing)
+ if (!this.options.API) {
+ const apiCtor = API;
+ this.api = new apiCtor({
+ token,
+ branch: this.branch,
+ repo: this.repo,
+ originRepo: this.originRepo,
+ apiRoot: this.apiRoot,
+ useOpenAuthoring: this.useOpenAuthoring,
+ cmsLabelPrefix: this.cmsLabelPrefix,
+ initialWorkflowStatus: this.initialWorkflowStatus,
+ });
+ }
+
+ if (await this.api!.forkExists()) {
+ await this.api!.mergeUpstream();
+ return Promise.resolve();
+ } else {
+ await getPermissionToFork();
+
+ const fork = await this.api!.createFork();
+ return this.pollUntilForkExists({ repo: `/repos/${fork.full_name}`, token });
+ }
+ }
+
+ restoreUser(user: User) {
+ return this.openAuthoringEnabled
+ ? this.authenticateWithFork({
+ userData: user,
+ // no-op: restoreUser doesn't need fork approval UX
+ // eslint-disable-next-line @typescript-eslint/no-empty-function
+ getPermissionToFork: () => {},
+ }).then(() => this.authenticate(user))
+ : this.authenticate(user);
+ }
+
+ async authenticate(state: Credentials) {
+ this.token = state.token as string;
+
+ // Clear cached user data when token changes to prevent stale data across logins
+ this._currentUserPromise = undefined;
+ this._userIsOriginMaintainerPromises = {};
+
+ const apiCtor = API;
+ this.api = new apiCtor({
+ token: this.token,
+ branch: this.branch,
+ repo: this.repo,
+ originRepo: this.originRepo,
+ apiRoot: this.apiRoot,
+ useOpenAuthoring: this.useOpenAuthoring,
+ cmsLabelPrefix: this.cmsLabelPrefix,
+ initialWorkflowStatus: this.initialWorkflowStatus,
+ });
+ const user = await this.api!.user();
+ const isCollab = await this.api!.hasWriteAccess().catch(error => {
+ error.message = stripIndent`
+ Repo "${this.repo}" not found.
+
+ Please ensure the repo information is spelled correctly.
+
+ If the repo is private, make sure you're logged into a Forgejo account with access.
+
+ If your repo is under an organization, ensure the organization has granted access to Static
+ CMS.
+ `;
+ throw error;
+ });
+
+ // Unauthorized user
+ if (!isCollab) {
+ throw new Error('Your Forgejo user account does not have access to this repo.');
+ }
+
+ // Authorized user
+ return {
+ name: user.full_name,
+ login: user.login,
+ avatar_url: user.avatar_url,
+ token: state.token as string,
+ };
+ }
+
+ logout() {
+ this.token = null;
+
+ // Clear cached user data on logout
+ this._currentUserPromise = undefined;
+ this._userIsOriginMaintainerPromises = {};
+
+ if (this.api && this.api.reset && typeof this.api.reset === 'function') {
+ return this.api.reset();
+ }
+ }
+
+ getToken() {
+ return Promise.resolve(this.token);
+ }
+
+ getCursorAndFiles = (files: ApiFile[], page: number) => {
+ const pageSize = 20;
+ const count = files.length;
+ const pageCount = Math.ceil(files.length / pageSize);
+
+ const actions = [] as string[];
+ if (page > 1) {
+ actions.push('prev');
+ actions.push('first');
+ }
+ if (page < pageCount) {
+ actions.push('next');
+ actions.push('last');
+ }
+
+ const cursor = Cursor.create({
+ actions,
+ meta: { page, count, pageSize, pageCount },
+ data: { files },
+ });
+ const pageFiles = files.slice((page - 1) * pageSize, page * pageSize);
+ return { cursor, files: pageFiles };
+ };
+
+ async entriesByFolder(folder: string, extension: string, depth: number) {
+ const repoURL = this.api!.originRepoURL;
+
+ let cursor: Cursor;
+
+ const listFiles = () =>
+ this.api!.listFiles(folder, {
+ repoURL,
+ depth,
+ }).then(files => {
+ const filtered = files.filter(file => filterByExtension(file, extension));
+ const result = this.getCursorAndFiles(filtered, 1);
+ cursor = result.cursor;
+ return result.files;
+ });
+
+ const readFile = (path: string, id: string | null | undefined) =>
+ this.api!.readFile(path, id, { repoURL }) as Promise;
+
+ const files = await entriesByFolder(
+ listFiles,
+ readFile,
+ this.api!.readFileMetadata.bind(this.api),
+ API_NAME,
+ );
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
+ // @ts-ignore
+ files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
+ return files;
+ }
+
+ async allEntriesByFolder(folder: string, extension: string, depth: number) {
+ const repoURL = this.api!.originRepoURL;
+
+ const listFiles = () =>
+ this.api!.listFiles(folder, {
+ repoURL,
+ depth,
+ }).then(files => files.filter(file => filterByExtension(file, extension)));
+
+ const readFile = (path: string, id: string | null | undefined) => {
+ return this.api!.readFile(path, id, { repoURL }) as Promise;
+ };
+
+ const files = await entriesByFolder(
+ listFiles,
+ readFile,
+ this.api!.readFileMetadata.bind(this.api),
+ API_NAME,
+ );
+ return files;
+ }
+
+ entriesByFiles(files: ImplementationFile[]) {
+ const repoURL = this.api!.repoURL;
+
+ const readFile = (path: string, id: string | null | undefined) =>
+ this.api!.readFile(path, id, { repoURL }).catch(() => '') as Promise;
+
+ return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
+ }
+
+ // Fetches a single entry.
+ getEntry(path: string) {
+ const repoURL = this.api!.originRepoURL;
+ return this.api!.readFile(path, null, { repoURL })
+ .then(data => ({
+ file: { path, id: null },
+ data: data as string,
+ }))
+ .catch(() => ({ file: { path, id: null }, data: '' }));
+ }
+
+ async getMedia(mediaFolder = this.mediaFolder, folderSupport?: boolean) {
+ if (!mediaFolder) {
+ return [];
+ }
+ return this.api!.listFiles(mediaFolder, undefined, folderSupport).then(files =>
+ files.map(({ id, name, size, path, type }) => {
+ return { id, name, size, displayURL: { id, path }, path, isDirectory: type === 'tree' };
+ }),
+ );
+ }
+
+ async getMediaFile(path: string) {
+ const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
+
+ const name = basename(path);
+ const fileObj = blobToFileObj(name, blob);
+ const url = URL.createObjectURL(fileObj);
+ const id = await getBlobSHA(blob);
+
+ return {
+ id,
+ displayURL: url,
+ path,
+ name,
+ size: fileObj.size,
+ file: fileObj,
+ url,
+ };
+ }
+
+ getMediaDisplayURL(displayURL: DisplayURL) {
+ this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
+ return getMediaDisplayURL(
+ displayURL,
+ this.api!.readFile.bind(this.api!),
+ this._mediaDisplayURLSem,
+ );
+ }
+
+ persistEntry(entry: Entry, options: PersistOptions) {
+ // persistEntry is a transactional operation
+ return runWithLock(
+ this.lock,
+ () => {
+ if (options.useWorkflow) {
+ const slug = entry.dataFiles[0].slug;
+ const collection = options.collectionName as string;
+ const files = [...entry.dataFiles, ...entry.assets];
+ return this.api!.editorialWorkflowGit(files, slug, collection, options);
+ }
+ return this.api!.persistFiles(entry.dataFiles, entry.assets, options);
+ },
+ 'Failed to acquire persist entry lock',
+ );
+ }
+
+ async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
+ try {
+ await this.api!.persistFiles([], [mediaFile], options);
+ const { sha, path, fileObj } = mediaFile as AssetProxy & { sha: string };
+ const displayURL = URL.createObjectURL(fileObj as Blob);
+ return {
+ id: sha,
+ name: fileObj!.name,
+ size: fileObj!.size,
+ displayURL,
+ path: trimStart(path, '/'),
+ };
+ } catch (error) {
+ console.error(error);
+ throw error;
+ }
+ }
+
+ async deleteFiles(paths: string[], commitMessage: string): Promise {
+ await this.api!.deleteFiles(paths, commitMessage);
+ }
+
+ async traverseCursor(cursor: Cursor, action: string) {
+ const meta = cursor.meta!;
+ const files = cursor.data!.get('files')!.toJS() as ApiFile[];
+
+ let result: { cursor: Cursor; files: ApiFile[] };
+ switch (action) {
+ case 'first': {
+ result = this.getCursorAndFiles(files, 1);
+ break;
+ }
+ case 'last': {
+ result = this.getCursorAndFiles(files, meta.get('pageCount'));
+ break;
+ }
+ case 'next': {
+ result = this.getCursorAndFiles(files, meta.get('page') + 1);
+ break;
+ }
+ case 'prev': {
+ result = this.getCursorAndFiles(files, meta.get('page') - 1);
+ break;
+ }
+ default: {
+ result = this.getCursorAndFiles(files, 1);
+ break;
+ }
+ }
+
+ const readFile = (path: string, id: string | null | undefined) =>
+ this.api!.readFile(path, id, { repoURL: this.api!.originRepoURL }).catch(
+ () => '',
+ ) as Promise;
+
+ const entries = await entriesByFiles(
+ result.files,
+ readFile,
+ this.api!.readFileMetadata.bind(this.api),
+ API_NAME,
+ );
+
+ return {
+ entries,
+ cursor: result.cursor,
+ };
+ }
+
+ async unpublishedEntries() {
+ const listEntriesKeys = () =>
+ this.api!.listUnpublishedBranches().then(branches =>
+ branches.map(branch => contentKeyFromBranch(branch)),
+ );
+
+ const ids = await unpublishedEntries(listEntriesKeys);
+ return ids;
+ }
+
+ async unpublishedEntry({
+ id,
+ collection,
+ slug,
+ }: {
+ id?: string;
+ collection?: string;
+ slug?: string;
+ }) {
+ if (id) {
+ const data = await this.api!.retrieveUnpublishedEntryData(id);
+ return data;
+ } else if (collection && slug) {
+ const contentKey = this.api!.generateContentKey(collection, slug);
+ const data = await this.api!.retrieveUnpublishedEntryData(contentKey);
+ return data;
+ } else {
+ throw new Error('Missing unpublished entry id or collection and slug');
+ }
+ }
+
+ async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
+ const contentKey = this.api!.generateContentKey(collection, slug);
+ const branch = branchFromContentKey(contentKey);
+ const data = (await this.api!.readFile(path, id, { branch })) as string;
+ return data;
+ }
+
+ async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
+ const contentKey = this.api!.generateContentKey(collection, slug);
+ const branch = branchFromContentKey(contentKey);
+ const blob = (await this.api!.readFile(path, id, { branch, parseText: false })) as Blob;
+ const name = basename(path);
+ const fileObj = blobToFileObj(name, blob);
+ return {
+ id,
+ name,
+ path,
+ size: fileObj.size,
+ displayURL: URL.createObjectURL(fileObj),
+ file: fileObj,
+ };
+ }
+
+ updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
+ return runWithLock(
+ this.lock,
+ () => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
+ 'Failed to acquire update entry status lock',
+ );
+ }
+
+ publishUnpublishedEntry(collection: string, slug: string) {
+ return runWithLock(
+ this.lock,
+ () => this.api!.publishUnpublishedEntry(collection, slug),
+ 'Failed to acquire publish entry lock',
+ );
+ }
+
+ deleteUnpublishedEntry(collection: string, slug: string) {
+ return runWithLock(
+ this.lock,
+ () => this.api!.deleteUnpublishedEntry(collection, slug),
+ 'Failed to acquire delete entry lock',
+ );
+ }
+
+ async getDeployPreview() {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ return {} as any;
+ }
+}
diff --git a/packages/decap-cms-backend-forgejo/src/index.ts b/packages/decap-cms-backend-forgejo/src/index.ts
new file mode 100644
index 000000000000..57529fd7a2ff
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/src/index.ts
@@ -0,0 +1,10 @@
+import ForgejoBackend from './implementation';
+import API from './API';
+import AuthenticationPage from './AuthenticationPage';
+
+export const DecapCmsBackendForgejo = {
+ ForgejoBackend,
+ API,
+ AuthenticationPage,
+};
+export { API, AuthenticationPage, ForgejoBackend };
diff --git a/packages/decap-cms-backend-forgejo/src/types.ts b/packages/decap-cms-backend-forgejo/src/types.ts
new file mode 100644
index 000000000000..4db17106da73
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/src/types.ts
@@ -0,0 +1,344 @@
+export type ForgejoUser = {
+ active: boolean;
+ avatar_url: string;
+ created: string;
+ description: string;
+ email: string;
+ followers_count: number;
+ following_count: number;
+ full_name: string;
+ id: number;
+ is_admin: boolean;
+ language: string;
+ last_login: string;
+ location: string;
+ login: string;
+ login_name?: string;
+ prohibit_login: boolean;
+ restricted: boolean;
+ starred_repos_count: number;
+ visibility: string;
+ website: string;
+};
+
+export type ForgejoTeam = {
+ can_create_org_repo: boolean;
+ description: string;
+ id: number;
+ includes_all_repositories: boolean;
+ name: string;
+ organization: ForgejoOrganization;
+ permission: string;
+ units: Array;
+ units_map: Map;
+};
+
+export type ForgejoOrganization = {
+ avatar_url: string;
+ description: string;
+ full_name: string;
+ id: number;
+ location: string;
+ name: string;
+ repo_admin_change_team_access: boolean;
+ username: string;
+ visibility: string;
+ website: string;
+};
+
+type CommitUser = {
+ date: string;
+ email: string;
+ name: string;
+};
+
+type CommitMeta = {
+ created: string;
+ sha: string;
+ url: string;
+};
+
+type PayloadUser = {
+ email: string;
+ name: string;
+ username: string;
+};
+
+type PayloadCommitVerification = {
+ payload: string;
+ reason: string;
+ signature: string;
+ signer: PayloadUser;
+ verified: boolean;
+};
+
+type ReposListCommitsResponseItemCommit = {
+ author: CommitUser;
+ committer: CommitUser;
+ message: string;
+ tree: CommitMeta;
+ url: string;
+ verification: PayloadCommitVerification;
+};
+
+type ForgejoRepositoryPermissions = {
+ admin: boolean;
+ pull: boolean;
+ push: boolean;
+};
+
+type ForgejoRepositoryExternalTracker = {
+ external_tracker_format: string;
+ external_tracker_regexp_pattern: string;
+ external_tracker_style: string;
+ external_tracker_url: string;
+};
+
+type ForgejoRepositoryExternalWiki = {
+ external_wiki_url: string;
+};
+
+type ForgejoRepositoryInternalTracker = {
+ allow_only_contributors_to_track_time: boolean;
+ enable_issue_dependencies: boolean;
+ enable_time_tracker: boolean;
+};
+
+type ForgejoRepositoryRepoTransfer = {
+ description: string;
+ doer: ForgejoUser;
+ recipient: ForgejoUser;
+ teams: Array;
+ enable_issue_dependencies: boolean;
+ enable_time_tracker: boolean;
+};
+
+export type ForgejoRepository = {
+ allow_merge_commits: boolean;
+ allow_rebase: boolean;
+ allow_rebase_explicit: boolean;
+ allow_rebase_update: boolean;
+ allow_squash_merge: boolean;
+ archived: boolean;
+ avatar_url: string;
+ clone_url: string;
+ created_at: string;
+ default_branch: string;
+ default_delete_branch_after_merge: boolean;
+ default_merge_style: boolean;
+ description: string;
+ empty: boolean;
+ external_tracker: ForgejoRepositoryExternalTracker;
+ external_wiki: ForgejoRepositoryExternalWiki;
+ fork: boolean;
+ forks_count: number;
+ full_name: string;
+ has_issues: boolean;
+ has_projects: boolean;
+ has_pull_requests: boolean;
+ has_wiki: boolean;
+ html_url: string;
+ id: number;
+ ignore_whitespace_conflicts: boolean;
+ internal: boolean;
+ internal_tracker: ForgejoRepositoryInternalTracker;
+ language: string;
+ languages_url: string;
+ mirror: boolean;
+ mirror_interval: string;
+ mirror_updated: string;
+ name: string;
+ open_issues_count: number;
+ open_pr_counter: number;
+ original_url: string;
+ owner: ForgejoUser;
+ parent?: { full_name: string } | null;
+ permissions: ForgejoRepositoryPermissions;
+ private: boolean;
+ release_counter: number;
+ repo_transfer: ForgejoRepositoryRepoTransfer;
+ size: number;
+ ssh_url: string;
+ stars_count: number;
+ template: boolean;
+ updated_at: string;
+ watchers_count: number;
+ website: string;
+};
+
+type ReposListCommitsResponseItemCommitAffectedFiles = {
+ filename: string;
+};
+
+type ReposListCommitsResponseItemCommitStats = {
+ additions: number;
+ deletions: number;
+ total: number;
+};
+
+type ReposListCommitsResponseItem = {
+ author: ForgejoUser;
+ commit: ReposListCommitsResponseItemCommit;
+ committer: ForgejoUser;
+ created: string;
+ files: Array;
+ html_url: string;
+ parents: Array;
+ sha: string;
+ stats: ReposListCommitsResponseItemCommitStats;
+ url: string;
+};
+
+export type ReposListCommitsResponse = Array;
+
+export type GitGetBlobResponse = {
+ content: string;
+ encoding: string;
+ sha: string;
+ size: number;
+ url: string;
+};
+
+type GitGetTreeResponseTreeItem = {
+ mode: string;
+ path: string;
+ sha: string;
+ size?: number;
+ type: string;
+ url: string;
+};
+
+export type GitGetTreeResponse = {
+ page: number;
+ sha: string;
+ total_count: number;
+ tree: Array;
+ truncated: boolean;
+ url: string;
+};
+
+type FileLinksResponse = {
+ git: string;
+ html: string;
+ self: string;
+};
+
+type ContentsResponse = {
+ _links: FileLinksResponse;
+ content?: string | null;
+ download_url: string;
+ encoding?: string | null;
+ git_url: string;
+ html_url: string;
+ last_commit_sha: string;
+ name: string;
+ path: string;
+ sha: string;
+ size: number;
+ submodule_git_url?: string | null;
+ target?: string | null;
+ type: string;
+ url: string;
+};
+
+type FileCommitResponse = {
+ author: CommitUser;
+ committer: CommitUser;
+ created: string;
+ html_url: string;
+ message: string;
+ parents: Array;
+ sha: string;
+ tree: CommitMeta;
+ url: string;
+};
+
+export type FilesResponse = {
+ commit: FileCommitResponse;
+ content: Array;
+ verification: PayloadCommitVerification;
+};
+
+// Editorial Workflow Types
+
+export type ForgejoLabel = {
+ id?: number;
+ name: string;
+ color?: string;
+ description?: string;
+ url?: string;
+};
+
+export type ForgejoBranch = {
+ commit: {
+ id: string;
+ message: string;
+ url: string;
+ author: CommitUser;
+ committer: CommitUser;
+ };
+ name: string;
+ protected: boolean;
+};
+
+export type ForgejoPullRequestHead = {
+ label?: string;
+ ref: string;
+ sha: string;
+ repo?: ForgejoRepository;
+};
+
+export type ForgejoPullRequestBase = {
+ label?: string;
+ ref?: string;
+ sha?: string;
+ repo?: ForgejoRepository;
+};
+
+// ForgejoPullRequest represents a pull request from the Forgejo API.
+// Many fields are optional to accommodate:
+// 1. Mock PRs used for open authoring branch-only drafts (MOCK_PULL_REQUEST)
+// 2. Partial API responses that may not include all fields
+export type ForgejoPullRequest = {
+ id?: number;
+ number: number;
+ state: 'open' | 'closed';
+ title?: string;
+ body?: string;
+ user?: ForgejoUser;
+ labels: ForgejoLabel[];
+ head: ForgejoPullRequestHead;
+ base?: ForgejoPullRequestBase;
+ merged?: boolean;
+ merged_at?: string | null;
+ updated_at?: string;
+ created_at?: string;
+};
+
+export type ForgejoChangedFile = {
+ filename: string;
+ status: 'added' | 'removed' | 'modified' | 'renamed';
+ additions: number;
+ deletions: number;
+ changes: number;
+ previous_filename?: string;
+ sha?: string;
+};
+
+export type ForgejoCompareCommit = {
+ sha: string;
+ commit: {
+ message: string;
+ author: {
+ name: string;
+ email: string;
+ date: string;
+ };
+ };
+};
+
+export type ForgejoCompareResponse = {
+ commits: ForgejoCompareCommit[];
+ files: ForgejoChangedFile[];
+ total_commits: number;
+};
diff --git a/packages/decap-cms-backend-forgejo/webpack.config.js b/packages/decap-cms-backend-forgejo/webpack.config.js
new file mode 100644
index 000000000000..42edd361d4a7
--- /dev/null
+++ b/packages/decap-cms-backend-forgejo/webpack.config.js
@@ -0,0 +1,3 @@
+const { getConfig } = require('../../scripts/webpack.js');
+
+module.exports = getConfig();
diff --git a/packages/decap-cms-core/index.d.ts b/packages/decap-cms-core/index.d.ts
index f29c08c4e2e6..040cfcafd1bf 100644
--- a/packages/decap-cms-core/index.d.ts
+++ b/packages/decap-cms-core/index.d.ts
@@ -10,6 +10,7 @@ declare module 'decap-cms-core' {
| 'github'
| 'gitlab'
| 'gitea'
+ | 'forgejo'
| 'bitbucket'
| 'test-repo'
| 'proxy';
diff --git a/packages/decap-cms-core/src/types/redux.ts b/packages/decap-cms-core/src/types/redux.ts
index 3c059b4801fc..39fc19266086 100644
--- a/packages/decap-cms-core/src/types/redux.ts
+++ b/packages/decap-cms-core/src/types/redux.ts
@@ -1,6 +1,6 @@
import type { Action } from 'redux';
import type { StaticallyTypedRecord } from './immutable';
-import type { Map, List, OrderedMap, Set } from 'immutable';
+import type { List, Map, OrderedMap, Set } from 'immutable';
import type { FILES, FOLDER } from '../constants/collectionTypes';
import type { MediaFile as BackendMediaFile } from '../backend';
import type { Auth } from '../reducers/auth';
@@ -18,6 +18,7 @@ export type CmsBackendType =
| 'github'
| 'gitlab'
| 'gitea'
+ | 'forgejo'
| 'bitbucket'
| 'test-repo'
| 'proxy';
@@ -485,7 +486,11 @@ export type Config = StaticallyTypedRecord<{
}>;
type PagesObject = {
- [collection: string]: { isFetching: boolean; page: number; ids: List };
+ [collection: string]: {
+ isFetching: boolean;
+ page: number;
+ ids: List;
+ };
};
type Pages = StaticallyTypedRecord;
diff --git a/packages/decap-cms-locales/src/de/index.js b/packages/decap-cms-locales/src/de/index.js
index 0fd385cea27f..f16feff92d9d 100644
--- a/packages/decap-cms-locales/src/de/index.js
+++ b/packages/decap-cms-locales/src/de/index.js
@@ -8,6 +8,7 @@ const de = {
loginWithGitHub: 'Mit GitHub einloggen',
loginWithGitLab: 'Mit GitLab einloggen',
loginWithGitea: 'Mit Gitea einloggen',
+ loginWithForgejo: 'Mit Forgejo einloggen',
errors: {
email: 'Stellen Sie sicher, Ihre E-Mail-Adresse einzugeben.',
password: 'Bitte geben Sie Ihr Passwort ein.',
diff --git a/packages/decap-cms-locales/src/en/index.js b/packages/decap-cms-locales/src/en/index.js
index b41cd36de2fd..b35f10d613bf 100644
--- a/packages/decap-cms-locales/src/en/index.js
+++ b/packages/decap-cms-locales/src/en/index.js
@@ -8,6 +8,7 @@ const en = {
loginWithGitHub: 'Login with GitHub',
loginWithGitLab: 'Login with GitLab',
loginWithGitea: 'Login with Gitea',
+ loginWithForgejo: 'Login with Forgejo',
errors: {
email: 'Make sure to enter your email.',
password: 'Please enter your password.',
diff --git a/packages/decap-cms-ui-default/src/Icon/images/_index.js b/packages/decap-cms-ui-default/src/Icon/images/_index.js
index 80408dfeeb02..6755d0c1e25d 100644
--- a/packages/decap-cms-ui-default/src/Icon/images/_index.js
+++ b/packages/decap-cms-ui-default/src/Icon/images/_index.js
@@ -17,6 +17,7 @@ import iconFolder from './folder.svg';
import iconGithub from './github.svg';
import iconGitlab from './gitlab.svg';
import iconGitea from './gitea.svg';
+import iconForgejo from './forgejo.svg';
import iconGrid from './grid.svg';
import iconH1 from './h1.svg';
import iconH2 from './h2.svg';
@@ -69,6 +70,7 @@ const images = {
github: iconGithub,
gitlab: iconGitlab,
gitea: iconGitea,
+ forgejo: iconForgejo,
grid: iconGrid,
h1: iconH1,
h2: iconH2,
diff --git a/packages/decap-cms-ui-default/src/Icon/images/forgejo.svg b/packages/decap-cms-ui-default/src/Icon/images/forgejo.svg
new file mode 100644
index 000000000000..7b6125387195
--- /dev/null
+++ b/packages/decap-cms-ui-default/src/Icon/images/forgejo.svg
@@ -0,0 +1,30 @@
+
\ No newline at end of file
diff --git a/scripts/pack-and-install.sh b/scripts/pack-and-install.sh
index 94f421ffc6a8..3d519f5ef5b2 100755
--- a/scripts/pack-and-install.sh
+++ b/scripts/pack-and-install.sh
@@ -28,6 +28,7 @@ PACKAGES=(
"decap-cms-backend-bitbucket"
"decap-cms-backend-git-gateway"
"decap-cms-backend-gitea"
+ "decap-cms-backend-forgejo"
"decap-cms-backend-github"
"decap-cms-backend-gitlab"
"decap-cms-backend-proxy"