mirror of
https://github.com/zadam/trilium.git
synced 2025-11-11 07:45:51 +01:00
Merge branch 'develop' of https://github.com/TriliumNext/Notes into style/next/forms
This commit is contained in:
6
.github/workflows/playwright.yml
vendored
6
.github/workflows/playwright.yml
vendored
@@ -1,9 +1,9 @@
|
||||
name: Playwright Tests
|
||||
on:
|
||||
push:
|
||||
branches: [ main, master ]
|
||||
branches: [ develop ]
|
||||
pull_request:
|
||||
branches: [ main, master ]
|
||||
branches: [ develop ]
|
||||
jobs:
|
||||
test:
|
||||
timeout-minutes: 60
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
- name: Run Playwright tests
|
||||
run: npx playwright test
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
if: ${{ !cancelled() }}
|
||||
with:
|
||||
name: playwright-report
|
||||
path: playwright-report/
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -9,12 +9,12 @@ po-*/
|
||||
|
||||
*.db
|
||||
!integration-tests/db/document.db
|
||||
!integration-tests/db/config.ini
|
||||
integration-tests/db/log
|
||||
integration-tests/db/sessions
|
||||
integration-tests/db/backup
|
||||
integration-tests/db/session_secret.txt
|
||||
|
||||
config.ini
|
||||
cert.key
|
||||
cert.crt
|
||||
server-package.json
|
||||
|
||||
14
.prettierrc
14
.prettierrc
@@ -3,12 +3,20 @@
|
||||
"tabWidth": 4,
|
||||
"useTabs": false,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"singleQuote": false,
|
||||
"quoteProps": "as-needed",
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"bracketSpacing": true,
|
||||
"arrowParens": "always",
|
||||
"proseWrap": "preserve",
|
||||
"htmlWhitespaceSensitivity": "css",
|
||||
"endOfLine": "lf"
|
||||
"endOfLine": "lf",
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["*.json"],
|
||||
"options": {
|
||||
"tabWidth": 2
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
@@ -1,6 +1,3 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"lokalise.i18n-ally",
|
||||
"editorconfig.editorconfig"
|
||||
]
|
||||
"recommendations": ["lokalise.i18n-ally", "editorconfig.editorconfig"]
|
||||
}
|
||||
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
@@ -14,11 +14,9 @@
|
||||
"TRILIUM_ENV": "dev",
|
||||
"TRILIUM_DATA_DIR": "./data"
|
||||
},
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"skipFiles": ["<node_internals>/**"],
|
||||
"type": "node",
|
||||
"outputCapture": "std",
|
||||
},
|
||||
"outputCapture": "std"
|
||||
}
|
||||
]
|
||||
}
|
||||
11
.vscode/settings.json
vendored
11
.vscode/settings.json
vendored
@@ -5,10 +5,7 @@
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"i18n-ally.sourceLanguage": "en",
|
||||
"i18n-ally.keystyle": "nested",
|
||||
"i18n-ally.localesPaths": [
|
||||
"./src/public/translations",
|
||||
"./translations"
|
||||
],
|
||||
"i18n-ally.localesPaths": ["./src/public/translations", "./translations"],
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "vscode.json-language-features"
|
||||
},
|
||||
@@ -18,10 +15,8 @@
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "vscode.typescript-language-features"
|
||||
},
|
||||
"github-actions.workflows.pinned.workflows": [
|
||||
".github/workflows/nightly.yml"
|
||||
],
|
||||
"github-actions.workflows.pinned.workflows": [".github/workflows/nightly.yml"],
|
||||
"[css]": {
|
||||
"editor.defaultFormatter": "vscode.css-language-features"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
4
.vscode/snippets.code-snippets
vendored
4
.vscode/snippets.code-snippets
vendored
@@ -19,8 +19,6 @@
|
||||
"JQuery HTMLElement field": {
|
||||
"scope": "typescript",
|
||||
"prefix": "jqf",
|
||||
"body": [
|
||||
"private $${1:name}!: JQuery<HTMLElement>;"
|
||||
]
|
||||
"body": ["private $${1:name}!: JQuery<HTMLElement>;"]
|
||||
}
|
||||
}
|
||||
12
Dockerfile
12
Dockerfile
@@ -1,5 +1,5 @@
|
||||
# Build stage
|
||||
FROM node:22.12.0-bullseye-slim AS builder
|
||||
FROM node:22.13.0-bullseye-slim AS builder
|
||||
|
||||
# Configure build dependencies in a single layer
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
@@ -23,7 +23,6 @@ COPY server-package.json package.json
|
||||
# Build and cleanup in a single layer
|
||||
RUN cp -R build/src/* src/. && \
|
||||
cp build/docker_healthcheck.js . && \
|
||||
rm -r build && \
|
||||
rm docker_healthcheck.ts && \
|
||||
npm install && \
|
||||
npm run webpack && \
|
||||
@@ -31,11 +30,14 @@ RUN cp -R build/src/* src/. && \
|
||||
npm cache clean --force && \
|
||||
cp src/public/app/share.js src/public/app-dist/. && \
|
||||
cp -r src/public/app/doc_notes src/public/app-dist/. && \
|
||||
rm -rf src/public/app && \
|
||||
rm src/services/asset_path.ts
|
||||
rm -rf src/public/app/* && \
|
||||
mkdir -p src/public/app/services && \
|
||||
cp -r build/src/public/app/services/mime_type_definitions.js src/public/app/services/mime_type_definitions.js && \
|
||||
rm src/services/asset_path.ts && \
|
||||
rm -r build
|
||||
|
||||
# Runtime stage
|
||||
FROM node:22.12.0-bullseye-slim
|
||||
FROM node:22.13.0-bullseye-slim
|
||||
|
||||
# Install only runtime dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Build stage
|
||||
FROM node:22.12.0-alpine AS builder
|
||||
FROM node:22.13.0-alpine AS builder
|
||||
|
||||
# Configure build dependencies
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
@@ -22,7 +22,6 @@ COPY server-package.json package.json
|
||||
# Build and cleanup in a single layer
|
||||
RUN cp -R build/src/* src/. && \
|
||||
cp build/docker_healthcheck.js . && \
|
||||
rm -r build && \
|
||||
rm docker_healthcheck.ts && \
|
||||
npm install && \
|
||||
npm run webpack && \
|
||||
@@ -31,10 +30,13 @@ RUN cp -R build/src/* src/. && \
|
||||
cp src/public/app/share.js src/public/app-dist/. && \
|
||||
cp -r src/public/app/doc_notes src/public/app-dist/. && \
|
||||
rm -rf src/public/app && \
|
||||
rm src/services/asset_path.ts
|
||||
mkdir -p src/public/app/services && \
|
||||
cp -r build/src/public/app/services/mime_type_definitions.js src/public/app/services/mime_type_definitions.js && \
|
||||
rm src/services/asset_path.ts && \
|
||||
rm -r build
|
||||
|
||||
# Runtime stage
|
||||
FROM node:22.12.0-alpine
|
||||
FROM node:22.13.0-alpine
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apk add --no-cache su-exec shadow
|
||||
|
||||
@@ -65,7 +65,7 @@ const copy = async () => {
|
||||
"node_modules/katex/dist/contrib/mhchem.min.js",
|
||||
"node_modules/katex/dist/contrib/auto-render.min.js",
|
||||
"node_modules/@highlightjs/cdn-assets/highlight.min.js",
|
||||
"node_modules/@mind-elixir/node-menu/dist/node-menu.umd.cjs",
|
||||
"node_modules/@mind-elixir/node-menu/dist/node-menu.umd.cjs"
|
||||
];
|
||||
|
||||
for (const file of nodeModulesFile) {
|
||||
@@ -87,7 +87,6 @@ const copy = async () => {
|
||||
"node_modules/panzoom/dist/",
|
||||
"node_modules/i18next/",
|
||||
"node_modules/i18next-http-backend/",
|
||||
"node_modules/eslint/bin/",
|
||||
"node_modules/jsplumb/dist/",
|
||||
"node_modules/vanilla-js-wheel-zoom/dist/",
|
||||
"node_modules/mark.js/dist/",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import anonymizationService from '../src/services/anonymization.js';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import anonymizationService from "../src/services/anonymization.js";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
fs.writeFileSync(path.resolve(__dirname, 'tpl', 'anonymize-database.sql'), anonymizationService.getFullAnonymizationScript());
|
||||
fs.writeFileSync(path.resolve(__dirname, "tpl", "anonymize-database.sql"), anonymizationService.getFullAnonymizationScript());
|
||||
|
||||
@@ -13,11 +13,15 @@ async function fetchNote(noteId = null) {
|
||||
return await resp.json();
|
||||
}
|
||||
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
const toggleMenuButton = document.getElementById('toggleMenuButton');
|
||||
const layout = document.getElementById('layout');
|
||||
document.addEventListener(
|
||||
"DOMContentLoaded",
|
||||
() => {
|
||||
const toggleMenuButton = document.getElementById("toggleMenuButton");
|
||||
const layout = document.getElementById("layout");
|
||||
|
||||
if (toggleMenuButton && layout) {
|
||||
toggleMenuButton.addEventListener('click', () => layout.classList.toggle('showMenu'));
|
||||
toggleMenuButton.addEventListener("click", () => layout.classList.toggle("showMenu"));
|
||||
}
|
||||
}, false);
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
/* !!!!!! TRILIUM CUSTOM CHANGES !!!!!! */
|
||||
|
||||
.printed-content .ck-widget__selection-handle, .printed-content .ck-widget__type-around { /* gets rid of triangles: https://github.com/zadam/trilium/issues/1129 */
|
||||
.printed-content .ck-widget__selection-handle,
|
||||
.printed-content .ck-widget__type-around {
|
||||
/* gets rid of triangles: https://github.com/zadam/trilium/issues/1129 */
|
||||
display: none;
|
||||
}
|
||||
|
||||
@@ -59,7 +61,7 @@
|
||||
.ck-content .table table td,
|
||||
.ck-content .table table th {
|
||||
min-width: 2em;
|
||||
padding: .4em;
|
||||
padding: 0.4em;
|
||||
border: 1px solid hsl(0, 0%, 75%);
|
||||
}
|
||||
/* @ckeditor/ckeditor5-table/theme/table.css */
|
||||
@@ -83,8 +85,8 @@
|
||||
text-align: center;
|
||||
color: var(--ck-color-selector-caption-text);
|
||||
background-color: var(--ck-color-selector-caption-background);
|
||||
padding: .6em;
|
||||
font-size: .75em;
|
||||
padding: 0.6em;
|
||||
font-size: 0.75em;
|
||||
outline-offset: -1px;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-page-break/theme/pagebreak.css */
|
||||
@@ -98,7 +100,7 @@
|
||||
}
|
||||
/* @ckeditor/ckeditor5-page-break/theme/pagebreak.css */
|
||||
.ck-content .page-break::after {
|
||||
content: '';
|
||||
content: "";
|
||||
position: absolute;
|
||||
border-bottom: 2px dashed hsl(0, 0%, 77%);
|
||||
width: 100%;
|
||||
@@ -107,7 +109,7 @@
|
||||
.ck-content .page-break__label {
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
padding: .3em .6em;
|
||||
padding: 0.3em 0.6em;
|
||||
display: block;
|
||||
text-transform: uppercase;
|
||||
border: 1px solid hsl(0, 0%, 77%);
|
||||
@@ -158,7 +160,7 @@
|
||||
margin-left: 0;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-content[dir=rtl] .todo-list .todo-list__label > input {
|
||||
.ck-content[dir="rtl"] .todo-list .todo-list__label > input {
|
||||
left: 0;
|
||||
margin-right: 0;
|
||||
right: -25px;
|
||||
@@ -169,7 +171,7 @@
|
||||
display: block;
|
||||
position: absolute;
|
||||
box-sizing: border-box;
|
||||
content: '';
|
||||
content: "";
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border: 1px solid hsl(0, 0%, 20%);
|
||||
@@ -182,7 +184,7 @@
|
||||
position: absolute;
|
||||
box-sizing: content-box;
|
||||
pointer-events: none;
|
||||
content: '';
|
||||
content: "";
|
||||
left: calc(var(--ck-todo-list-checkmark-size) / 3);
|
||||
top: calc(var(--ck-todo-list-checkmark-size) / 5.3);
|
||||
width: calc(var(--ck-todo-list-checkmark-size) / 5.3);
|
||||
@@ -206,20 +208,21 @@
|
||||
vertical-align: middle;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-content .todo-list .todo-list__label.todo-list__label_without-description input[type=checkbox] {
|
||||
.ck-content .todo-list .todo-list__label.todo-list__label_without-description input[type="checkbox"] {
|
||||
position: absolute;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > input,
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable=false] > input {
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable="false"] > input {
|
||||
cursor: pointer;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > input:hover::before, .ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable=false] > input:hover::before {
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > input:hover::before,
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable="false"] > input:hover::before {
|
||||
box-shadow: 0 0 0 5px hsla(0, 0%, 0%, 0.1);
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable=false] > input {
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable="false"] > input {
|
||||
-webkit-appearance: none;
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
@@ -233,18 +236,18 @@
|
||||
margin-left: 0;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content[dir=rtl] .todo-list .todo-list__label > span[contenteditable=false] > input {
|
||||
.ck-editor__editable.ck-content[dir="rtl"] .todo-list .todo-list__label > span[contenteditable="false"] > input {
|
||||
left: 0;
|
||||
margin-right: 0;
|
||||
right: -25px;
|
||||
margin-left: -15px;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable=false] > input::before {
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable="false"] > input::before {
|
||||
display: block;
|
||||
position: absolute;
|
||||
box-sizing: border-box;
|
||||
content: '';
|
||||
content: "";
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border: 1px solid hsl(0, 0%, 20%);
|
||||
@@ -252,12 +255,12 @@
|
||||
transition: 250ms ease-in-out box-shadow;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable=false] > input::after {
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable="false"] > input::after {
|
||||
display: block;
|
||||
position: absolute;
|
||||
box-sizing: content-box;
|
||||
pointer-events: none;
|
||||
content: '';
|
||||
content: "";
|
||||
left: calc(var(--ck-todo-list-checkmark-size) / 3);
|
||||
top: calc(var(--ck-todo-list-checkmark-size) / 5.3);
|
||||
width: calc(var(--ck-todo-list-checkmark-size) / 5.3);
|
||||
@@ -268,16 +271,16 @@
|
||||
transform: rotate(45deg);
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable=false] > input[checked]::before {
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable="false"] > input[checked]::before {
|
||||
background: hsl(126, 64%, 41%);
|
||||
border-color: hsl(126, 64%, 41%);
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable=false] > input[checked]::after {
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label > span[contenteditable="false"] > input[checked]::after {
|
||||
border-color: hsl(0, 0%, 100%);
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/todolist.css */
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label.todo-list__label_without-description input[type=checkbox] {
|
||||
.ck-editor__editable.ck-content .todo-list .todo-list__label.todo-list__label_without-description input[type="checkbox"] {
|
||||
position: absolute;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-list/theme/list.css */
|
||||
@@ -379,8 +382,8 @@
|
||||
word-break: break-word;
|
||||
color: var(--ck-color-image-caption-text);
|
||||
background-color: var(--ck-color-image-caption-background);
|
||||
padding: .6em;
|
||||
font-size: .75em;
|
||||
padding: 0.6em;
|
||||
font-size: 0.75em;
|
||||
outline-offset: -1px;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-image/theme/imagestyle.css */
|
||||
@@ -488,16 +491,16 @@
|
||||
/* @ckeditor/ckeditor5-basic-styles/theme/code.css */
|
||||
.ck-content code {
|
||||
background-color: hsla(0, 0%, 78%, 0.3);
|
||||
padding: .15em;
|
||||
padding: 0.15em;
|
||||
border-radius: 2px;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-font/theme/fontsize.css */
|
||||
.ck-content .text-tiny {
|
||||
font-size: .7em;
|
||||
font-size: 0.7em;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-font/theme/fontsize.css */
|
||||
.ck-content .text-small {
|
||||
font-size: .85em;
|
||||
font-size: 0.85em;
|
||||
}
|
||||
/* @ckeditor/ckeditor5-font/theme/fontsize.css */
|
||||
.ck-content .text-big {
|
||||
|
||||
148
bin/docs/assets/v0.63.6/libraries/normalize.min.css
vendored
148
bin/docs/assets/v0.63.6/libraries/normalize.min.css
vendored
@@ -1,2 +1,148 @@
|
||||
/*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */html{line-height:1.15;-webkit-text-size-adjust:100%}body{margin:0}main{display:block}h1{font-size:2em;margin:.67em 0}hr{box-sizing:content-box;height:0;overflow:visible}pre{font-family:monospace,monospace;font-size:1em}a{background-color:transparent}abbr[title]{border-bottom:none;text-decoration:underline;text-decoration:underline dotted}b,strong{font-weight:bolder}code,kbd,samp{font-family:monospace,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}img{border-style:none}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner,button::-moz-focus-inner{border-style:none;padding:0}[type=button]:-moz-focusring,[type=reset]:-moz-focusring,[type=submit]:-moz-focusring,button:-moz-focusring{outline:1px dotted ButtonText}fieldset{padding:.35em .75em .625em}legend{box-sizing:border-box;color:inherit;display:table;max-width:100%;padding:0;white-space:normal}progress{vertical-align:baseline}textarea{overflow:auto}[type=checkbox],[type=radio]{box-sizing:border-box;padding:0}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}details{display:block}summary{display:list-item}template{display:none}[hidden]{display:none}
|
||||
/*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */
|
||||
html {
|
||||
line-height: 1.15;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
}
|
||||
body {
|
||||
margin: 0;
|
||||
}
|
||||
main {
|
||||
display: block;
|
||||
}
|
||||
h1 {
|
||||
font-size: 2em;
|
||||
margin: 0.67em 0;
|
||||
}
|
||||
hr {
|
||||
box-sizing: content-box;
|
||||
height: 0;
|
||||
overflow: visible;
|
||||
}
|
||||
pre {
|
||||
font-family: monospace, monospace;
|
||||
font-size: 1em;
|
||||
}
|
||||
a {
|
||||
background-color: transparent;
|
||||
}
|
||||
abbr[title] {
|
||||
border-bottom: none;
|
||||
text-decoration: underline;
|
||||
text-decoration: underline dotted;
|
||||
}
|
||||
b,
|
||||
strong {
|
||||
font-weight: bolder;
|
||||
}
|
||||
code,
|
||||
kbd,
|
||||
samp {
|
||||
font-family: monospace, monospace;
|
||||
font-size: 1em;
|
||||
}
|
||||
small {
|
||||
font-size: 80%;
|
||||
}
|
||||
sub,
|
||||
sup {
|
||||
font-size: 75%;
|
||||
line-height: 0;
|
||||
position: relative;
|
||||
vertical-align: baseline;
|
||||
}
|
||||
sub {
|
||||
bottom: -0.25em;
|
||||
}
|
||||
sup {
|
||||
top: -0.5em;
|
||||
}
|
||||
img {
|
||||
border-style: none;
|
||||
}
|
||||
button,
|
||||
input,
|
||||
optgroup,
|
||||
select,
|
||||
textarea {
|
||||
font-family: inherit;
|
||||
font-size: 100%;
|
||||
line-height: 1.15;
|
||||
margin: 0;
|
||||
}
|
||||
button,
|
||||
input {
|
||||
overflow: visible;
|
||||
}
|
||||
button,
|
||||
select {
|
||||
text-transform: none;
|
||||
}
|
||||
[type="button"],
|
||||
[type="reset"],
|
||||
[type="submit"],
|
||||
button {
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
[type="button"]::-moz-focus-inner,
|
||||
[type="reset"]::-moz-focus-inner,
|
||||
[type="submit"]::-moz-focus-inner,
|
||||
button::-moz-focus-inner {
|
||||
border-style: none;
|
||||
padding: 0;
|
||||
}
|
||||
[type="button"]:-moz-focusring,
|
||||
[type="reset"]:-moz-focusring,
|
||||
[type="submit"]:-moz-focusring,
|
||||
button:-moz-focusring {
|
||||
outline: 1px dotted ButtonText;
|
||||
}
|
||||
fieldset {
|
||||
padding: 0.35em 0.75em 0.625em;
|
||||
}
|
||||
legend {
|
||||
box-sizing: border-box;
|
||||
color: inherit;
|
||||
display: table;
|
||||
max-width: 100%;
|
||||
padding: 0;
|
||||
white-space: normal;
|
||||
}
|
||||
progress {
|
||||
vertical-align: baseline;
|
||||
}
|
||||
textarea {
|
||||
overflow: auto;
|
||||
}
|
||||
[type="checkbox"],
|
||||
[type="radio"] {
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
}
|
||||
[type="number"]::-webkit-inner-spin-button,
|
||||
[type="number"]::-webkit-outer-spin-button {
|
||||
height: auto;
|
||||
}
|
||||
[type="search"] {
|
||||
-webkit-appearance: textfield;
|
||||
outline-offset: -2px;
|
||||
}
|
||||
[type="search"]::-webkit-search-decoration {
|
||||
-webkit-appearance: none;
|
||||
}
|
||||
::-webkit-file-upload-button {
|
||||
-webkit-appearance: button;
|
||||
font: inherit;
|
||||
}
|
||||
details {
|
||||
display: block;
|
||||
}
|
||||
summary {
|
||||
display: list-item;
|
||||
}
|
||||
template {
|
||||
display: none;
|
||||
}
|
||||
[hidden] {
|
||||
display: none;
|
||||
}
|
||||
/*# sourceMappingURL=normalize.min.css.map */
|
||||
@@ -1,5 +1,5 @@
|
||||
body {
|
||||
font-family: 'Lucida Grande', 'Lucida Sans Unicode', arial, sans-serif;
|
||||
font-family: "Lucida Grande", "Lucida Sans Unicode", arial, sans-serif;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,15 +4,11 @@ import fs from "fs";
|
||||
function getBuildDate() {
|
||||
const now = new Date();
|
||||
now.setMilliseconds(0);
|
||||
return now
|
||||
.toISOString()
|
||||
.replace(".000", "");
|
||||
return now.toISOString().replace(".000", "");
|
||||
}
|
||||
|
||||
function getGitRevision() {
|
||||
return child_process.execSync('git log -1 --format="%H"')
|
||||
.toString("utf-8")
|
||||
.trimEnd();
|
||||
return child_process.execSync('git log -1 --format="%H"').toString("utf-8").trimEnd();
|
||||
}
|
||||
|
||||
const output = `\
|
||||
|
||||
@@ -20,11 +20,7 @@ function processVersion(version) {
|
||||
version = version.replace("-beta", "");
|
||||
|
||||
// Add the nightly suffix, plus the date.
|
||||
const referenceDate = new Date()
|
||||
.toISOString()
|
||||
.substring(2, 19)
|
||||
.replace(/[-:]*/g, "")
|
||||
.replace("T", "-");
|
||||
const referenceDate = new Date().toISOString().substring(2, 19).replace(/[-:]*/g, "").replace("T", "-");
|
||||
version = `${version}-test-${referenceDate}`;
|
||||
|
||||
return version;
|
||||
|
||||
@@ -12,7 +12,5 @@ function onFileChanged(sourceFile: string) {
|
||||
|
||||
const sourceDir = "src/public";
|
||||
|
||||
chokidar
|
||||
.watch(sourceDir)
|
||||
.on("change", onFileChanged);
|
||||
chokidar.watch(sourceDir).on("change", onFileChanged);
|
||||
console.log(`Watching for changes to ${sourceDir}...`);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
module.exports = () => {
|
||||
const sql = require('../../src/services/sql');
|
||||
const utils = require('../../src/services/utils');
|
||||
const sql = require("../../src/services/sql");
|
||||
const utils = require("../../src/services/utils");
|
||||
|
||||
const existingBlobIds = new Set();
|
||||
|
||||
@@ -11,7 +11,7 @@ module.exports = () => {
|
||||
if (!existingBlobIds.has(blobId)) {
|
||||
existingBlobIds.add(blobId);
|
||||
|
||||
sql.insert('blobs', {
|
||||
sql.insert("blobs", {
|
||||
blobId,
|
||||
content: row.content,
|
||||
dateModified: row.dateModified,
|
||||
@@ -24,7 +24,7 @@ module.exports = () => {
|
||||
sql.execute("DELETE FROM entity_changes WHERE entityName = 'note_contents' AND entityId = ?", [row.noteId]);
|
||||
}
|
||||
|
||||
sql.execute('UPDATE notes SET blobId = ? WHERE noteId = ?', [blobId, row.noteId]);
|
||||
sql.execute("UPDATE notes SET blobId = ? WHERE noteId = ?", [blobId, row.noteId]);
|
||||
}
|
||||
|
||||
for (const noteRevisionId of sql.getColumn(`SELECT noteRevisionId FROM note_revision_contents`)) {
|
||||
@@ -34,7 +34,7 @@ module.exports = () => {
|
||||
if (!existingBlobIds.has(blobId)) {
|
||||
existingBlobIds.add(blobId);
|
||||
|
||||
sql.insert('blobs', {
|
||||
sql.insert("blobs", {
|
||||
blobId,
|
||||
content: row.content,
|
||||
dateModified: row.utcDateModified,
|
||||
@@ -47,7 +47,7 @@ module.exports = () => {
|
||||
sql.execute("DELETE FROM entity_changes WHERE entityName = 'note_revision_contents' AND entityId = ?", [row.noteId]);
|
||||
}
|
||||
|
||||
sql.execute('UPDATE note_revisions SET blobId = ? WHERE noteRevisionId = ?', [blobId, row.noteRevisionId]);
|
||||
sql.execute("UPDATE note_revisions SET blobId = ? WHERE noteRevisionId = ?", [blobId, row.noteRevisionId]);
|
||||
}
|
||||
|
||||
const notesWithoutBlobIds = sql.getColumn("SELECT noteId FROM notes WHERE blobId IS NULL");
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
module.exports = () => {
|
||||
const beccaLoader = require('../../src/becca/becca_loader');
|
||||
const becca = require('../../src/becca/becca');
|
||||
const cls = require('../../src/services/cls');
|
||||
const log = require('../../src/services/log');
|
||||
const sql = require('../../src/services/sql');
|
||||
const beccaLoader = require("../../src/becca/becca_loader");
|
||||
const becca = require("../../src/becca/becca");
|
||||
const cls = require("../../src/services/cls");
|
||||
const log = require("../../src/services/log");
|
||||
const sql = require("../../src/services/sql");
|
||||
|
||||
cls.init(() => {
|
||||
// emergency disabling of image compression since it appears to make problems in migration to 0.61
|
||||
@@ -18,8 +18,7 @@ module.exports = () => {
|
||||
if (attachment) {
|
||||
log.info(`Auto-converted note '${note.noteId}' into attachment '${attachment.attachmentId}'.`);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
} catch (e) {
|
||||
log.error(`Cannot convert note '${note.noteId}' to attachment: ${e.message} ${e.stack}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import http from "http";
|
||||
import ini from "ini";
|
||||
import fs from "fs";
|
||||
import dataDir from './src/services/data_dir.js';
|
||||
const config = ini.parse(fs.readFileSync(dataDir.CONFIG_INI_PATH, 'utf-8'));
|
||||
import dataDir from "./src/services/data_dir.js";
|
||||
const config = ini.parse(fs.readFileSync(dataDir.CONFIG_INI_PATH, "utf-8"));
|
||||
|
||||
if (config.Network.https) {
|
||||
// built-in TLS (terminated by trilium) is not supported yet, PRs are welcome
|
||||
@@ -10,12 +10,12 @@ if (config.Network.https) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
import port from './src/services/port.js';
|
||||
import host from './src/services/host.js';
|
||||
import port from "./src/services/port.js";
|
||||
import host from "./src/services/host.js";
|
||||
|
||||
const options: http.RequestOptions = { timeout: 2000 };
|
||||
|
||||
const callback: (res: http.IncomingMessage) => void = res => {
|
||||
const callback: (res: http.IncomingMessage) => void = (res) => {
|
||||
console.log(`STATUS: ${res.statusCode}`);
|
||||
if (res.statusCode === 200) {
|
||||
process.exit(0);
|
||||
@@ -26,16 +26,18 @@ const callback: (res: http.IncomingMessage) => void = res => {
|
||||
|
||||
let request;
|
||||
|
||||
if (port !== 0) { // TCP socket.
|
||||
if (port !== 0) {
|
||||
// TCP socket.
|
||||
const url = `http://${host}:${port}/api/health-check`;
|
||||
request = http.request(url, options, callback);
|
||||
} else { // Unix socket.
|
||||
} else {
|
||||
// Unix socket.
|
||||
options.socketPath = host;
|
||||
options.path = '/api/health-check';
|
||||
options.path = "/api/health-check";
|
||||
request = http.request(options, callback);
|
||||
}
|
||||
|
||||
request.on("error", err => {
|
||||
request.on("error", (err) => {
|
||||
console.log("ERROR");
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
@@ -1,33 +1,37 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import yargs from 'yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import dumpService from './inc/dump.js';
|
||||
import yargs from "yargs";
|
||||
import { hideBin } from "yargs/helpers";
|
||||
import dumpService from "./inc/dump.js";
|
||||
|
||||
yargs(hideBin(process.argv))
|
||||
.command('$0 <path_to_document> <target_directory>', 'dump the contents of document.db into the target directory', (yargs) => {
|
||||
.command(
|
||||
"$0 <path_to_document> <target_directory>",
|
||||
"dump the contents of document.db into the target directory",
|
||||
(yargs) => {
|
||||
return yargs
|
||||
.option('path_to_document', { alias: 'p', describe: 'path to the document.db', type: 'string', demandOption: true })
|
||||
.option('target_directory', { alias: 't', describe: 'path of the directory into which the notes should be dumped', type: 'string', demandOption: true });
|
||||
}, (argv) => {
|
||||
.option("path_to_document", { alias: "p", describe: "path to the document.db", type: "string", demandOption: true })
|
||||
.option("target_directory", { alias: "t", describe: "path of the directory into which the notes should be dumped", type: "string", demandOption: true });
|
||||
},
|
||||
(argv) => {
|
||||
try {
|
||||
dumpService.dumpDocument(argv.path_to_document, argv.target_directory, {
|
||||
includeDeleted: argv.includeDeleted,
|
||||
password: argv.password
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
} catch (e) {
|
||||
console.error(`Unrecoverable error:`, e);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
)
|
||||
.option("password", {
|
||||
type: "string",
|
||||
description: "Set password to be able to decrypt protected notes."
|
||||
})
|
||||
.option('password', {
|
||||
type: 'string',
|
||||
description: 'Set password to be able to decrypt protected notes.'
|
||||
})
|
||||
.option('include-deleted', {
|
||||
type: 'boolean',
|
||||
.option("include-deleted", {
|
||||
type: "boolean",
|
||||
default: false,
|
||||
description: 'If set to true, dump also deleted notes.'
|
||||
description: "If set to true, dump also deleted notes."
|
||||
})
|
||||
.parse();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import crypto from 'crypto';
|
||||
import sql from './sql.js';
|
||||
import decryptService from './decrypt.js';
|
||||
import crypto from "crypto";
|
||||
import sql from "./sql.js";
|
||||
import decryptService from "./decrypt.js";
|
||||
|
||||
function getDataKey(password: any) {
|
||||
if (!password) {
|
||||
@@ -10,26 +10,24 @@ function getDataKey(password: any) {
|
||||
try {
|
||||
const passwordDerivedKey = getPasswordDerivedKey(password);
|
||||
|
||||
const encryptedDataKey = getOption('encryptedDataKey');
|
||||
const encryptedDataKey = getOption("encryptedDataKey");
|
||||
|
||||
const decryptedDataKey = decryptService.decrypt(passwordDerivedKey, encryptedDataKey, 16);
|
||||
|
||||
return decryptedDataKey;
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
throw new Error(`Cannot read data key, the entered password might be wrong. The underlying error: '${e.message}', stack:\n${e.stack}`);
|
||||
}
|
||||
}
|
||||
|
||||
function getPasswordDerivedKey(password: any) {
|
||||
const salt = getOption('passwordDerivedKeySalt');
|
||||
const salt = getOption("passwordDerivedKeySalt");
|
||||
|
||||
return getScryptHash(password, salt);
|
||||
}
|
||||
|
||||
function getScryptHash(password: any, salt: any) {
|
||||
const hashed = crypto.scryptSync(password, salt, 32,
|
||||
{ N: 16384, r: 8, p: 1 });
|
||||
const hashed = crypto.scryptSync(password, salt, 32, { N: 16384, r: 8, p: 1 });
|
||||
|
||||
return hashed;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import crypto from 'crypto';
|
||||
import crypto from "crypto";
|
||||
|
||||
function decryptString(dataKey: any, cipherText: any) {
|
||||
const buffer = decrypt(dataKey, cipherText);
|
||||
@@ -7,9 +7,9 @@ function decryptString(dataKey: any, cipherText: any) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const str = buffer.toString('utf-8');
|
||||
const str = buffer.toString("utf-8");
|
||||
|
||||
if (str === 'false') {
|
||||
if (str === "false") {
|
||||
throw new Error("Could not decrypt string.");
|
||||
}
|
||||
|
||||
@@ -26,12 +26,12 @@ function decrypt(key: any, cipherText: any, ivLength = 13) {
|
||||
}
|
||||
|
||||
try {
|
||||
const cipherTextBufferWithIv = Buffer.from(cipherText.toString(), 'base64');
|
||||
const cipherTextBufferWithIv = Buffer.from(cipherText.toString(), "base64");
|
||||
const iv = cipherTextBufferWithIv.slice(0, ivLength);
|
||||
|
||||
const cipherTextBuffer = cipherTextBufferWithIv.slice(ivLength);
|
||||
|
||||
const decipher = crypto.createDecipheriv('aes-128-cbc', pad(key), pad(iv));
|
||||
const decipher = crypto.createDecipheriv("aes-128-cbc", pad(key), pad(iv));
|
||||
|
||||
const decryptedBytes = Buffer.concat([decipher.update(cipherTextBuffer), decipher.final()]);
|
||||
|
||||
@@ -45,14 +45,12 @@ function decrypt(key: any, cipherText: any, ivLength = 13) {
|
||||
}
|
||||
|
||||
return payload;
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
// recovery from https://github.com/zadam/trilium/issues/510
|
||||
if (e.message?.includes("WRONG_FINAL_BLOCK_LENGTH") || e.message?.includes("wrong final block length")) {
|
||||
console.log("Caught WRONG_FINAL_BLOCK_LENGTH, returning cipherText instead");
|
||||
return cipherText;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -61,8 +59,7 @@ function decrypt(key: any, cipherText: any, ivLength = 13) {
|
||||
function pad(data: any) {
|
||||
if (data.length > 16) {
|
||||
data = data.slice(0, 16);
|
||||
}
|
||||
else if (data.length < 16) {
|
||||
} else if (data.length < 16) {
|
||||
const zeros = Array(16 - data.length).fill(0);
|
||||
|
||||
data = Buffer.concat([data, Buffer.from(zeros)]);
|
||||
@@ -82,7 +79,7 @@ function arraysIdentical(a: any, b: any) {
|
||||
|
||||
function shaArray(content: any) {
|
||||
// we use this as simple checksum and don't rely on its security so SHA-1 is good enough
|
||||
return crypto.createHash('sha1').update(content).digest();
|
||||
return crypto.createHash("sha1").update(content).digest();
|
||||
}
|
||||
|
||||
export default {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import fs from 'fs';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import sql from './sql.js';
|
||||
import decryptService from './decrypt.js';
|
||||
import dataKeyService from './data_key.js';
|
||||
import extensionService from './extension.js';
|
||||
import fs from "fs";
|
||||
import sanitize from "sanitize-filename";
|
||||
import sql from "./sql.js";
|
||||
import decryptService from "./decrypt.js";
|
||||
import dataKeyService from "./data_key.js";
|
||||
import extensionService from "./extension.js";
|
||||
|
||||
function dumpDocument(documentPath: string, targetPath: string, options: { password: any; includeDeleted: any; }) {
|
||||
function dumpDocument(documentPath: string, targetPath: string, options: { password: any; includeDeleted: any }) {
|
||||
const stats = {
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
@@ -22,7 +22,7 @@ function dumpDocument(documentPath: string, targetPath: string, options: { passw
|
||||
const existingPaths: Record<string, any> = {};
|
||||
const noteIdToPath: Record<string, any> = {};
|
||||
|
||||
dumpNote(targetPath, 'root');
|
||||
dumpNote(targetPath, "root");
|
||||
|
||||
printDumpResults(stats, options);
|
||||
|
||||
@@ -56,10 +56,10 @@ function dumpDocument(documentPath: string, targetPath: string, options: { passw
|
||||
safeTitle = safeTitle.substring(0, 20);
|
||||
}
|
||||
|
||||
childTargetPath = targetPath + '/' + safeTitle;
|
||||
childTargetPath = targetPath + "/" + safeTitle;
|
||||
|
||||
for (let i = 1; i < 100000 && childTargetPath in existingPaths; i++) {
|
||||
childTargetPath = targetPath + '/' + safeTitle + '_' + i;
|
||||
childTargetPath = targetPath + "/" + safeTitle + "_" + i;
|
||||
}
|
||||
|
||||
existingPaths[childTargetPath] = true;
|
||||
@@ -93,8 +93,7 @@ function dumpDocument(documentPath: string, targetPath: string, options: { passw
|
||||
}
|
||||
|
||||
noteIdToPath[noteId] = childTargetPath;
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
console.error(`DUMPERROR: Writing '${noteId}' failed with error '${e.message}':\n${e.stack}`);
|
||||
|
||||
stats.failed++;
|
||||
@@ -104,13 +103,12 @@ function dumpDocument(documentPath: string, targetPath: string, options: { passw
|
||||
|
||||
if (childNoteIds.length > 0) {
|
||||
if (childTargetPath === fileNameWithPath) {
|
||||
childTargetPath += '_dir';
|
||||
childTargetPath += "_dir";
|
||||
}
|
||||
|
||||
try {
|
||||
fs.mkdirSync(childTargetPath as string, { recursive: true });
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
console.error(`DUMPERROR: Creating directory ${childTargetPath} failed with error '${e.message}'`);
|
||||
}
|
||||
|
||||
@@ -122,12 +120,12 @@ function dumpDocument(documentPath: string, targetPath: string, options: { passw
|
||||
}
|
||||
|
||||
function printDumpResults(stats: any, options: any) {
|
||||
console.log('\n----------------------- STATS -----------------------');
|
||||
console.log('Successfully dumpted notes: ', stats.succeeded.toString().padStart(5, ' '));
|
||||
console.log('Protected notes: ', stats.protected.toString().padStart(5, ' '), options.password ? '' : '(skipped)');
|
||||
console.log('Failed notes: ', stats.failed.toString().padStart(5, ' '));
|
||||
console.log('Deleted notes: ', stats.deleted.toString().padStart(5, ' '), options.includeDeleted ? "(dumped)" : "(at least, skipped)");
|
||||
console.log('-----------------------------------------------------');
|
||||
console.log("\n----------------------- STATS -----------------------");
|
||||
console.log("Successfully dumpted notes: ", stats.succeeded.toString().padStart(5, " "));
|
||||
console.log("Protected notes: ", stats.protected.toString().padStart(5, " "), options.password ? "" : "(skipped)");
|
||||
console.log("Failed notes: ", stats.failed.toString().padStart(5, " "));
|
||||
console.log("Deleted notes: ", stats.deleted.toString().padStart(5, " "), options.includeDeleted ? "(dumped)" : "(at least, skipped)");
|
||||
console.log("-----------------------------------------------------");
|
||||
|
||||
if (!options.password && stats.protected > 0) {
|
||||
console.log("\nWARNING: protected notes are present in the document but no password has been provided. Protected notes have not been dumped.");
|
||||
@@ -140,12 +138,10 @@ function isContentEmpty(content: any) {
|
||||
}
|
||||
|
||||
if (typeof content === "string") {
|
||||
return !content.trim() || content.trim() === '<p></p>';
|
||||
}
|
||||
else if (Buffer.isBuffer(content)) {
|
||||
return !content.trim() || content.trim() === "<p></p>";
|
||||
} else if (Buffer.isBuffer(content)) {
|
||||
return content.length === 0;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,15 +5,17 @@ function getFileName(note: any, childTargetPath: string, safeTitle: string) {
|
||||
let existingExtension = path.extname(safeTitle).toLowerCase();
|
||||
let newExtension;
|
||||
|
||||
if (note.type === 'text') {
|
||||
newExtension = 'html';
|
||||
} else if (note.mime === 'application/x-javascript' || note.mime === 'text/javascript') {
|
||||
newExtension = 'js';
|
||||
} else if (existingExtension.length > 0) { // if the page already has an extension, then we'll just keep it
|
||||
if (note.type === "text") {
|
||||
newExtension = "html";
|
||||
} else if (note.mime === "application/x-javascript" || note.mime === "text/javascript") {
|
||||
newExtension = "js";
|
||||
} else if (existingExtension.length > 0) {
|
||||
// if the page already has an extension, then we'll just keep it
|
||||
newExtension = null;
|
||||
} else {
|
||||
if (note.mime?.toLowerCase()?.trim() === "image/jpg") { // image/jpg is invalid but pretty common
|
||||
newExtension = 'jpg';
|
||||
if (note.mime?.toLowerCase()?.trim() === "image/jpg") {
|
||||
// image/jpg is invalid but pretty common
|
||||
newExtension = "jpg";
|
||||
} else {
|
||||
newExtension = mimeTypes.extension(note.mime) || "dat";
|
||||
}
|
||||
|
||||
@@ -2,7 +2,9 @@ import Database, { Database as DatabaseType } from "better-sqlite3";
|
||||
|
||||
let dbConnection: DatabaseType;
|
||||
|
||||
const openDatabase = (documentPath: string) => { dbConnection = new Database(documentPath, { readonly: true }) };
|
||||
const openDatabase = (documentPath: string) => {
|
||||
dbConnection = new Database(documentPath, { readonly: true });
|
||||
};
|
||||
|
||||
const getRow = (query: string, params: string[] = []): Record<string, any> => dbConnection.prepare(query).get(params) as Record<string, any>;
|
||||
const getRows = (query: string, params = []) => dbConnection.prepare(query).all(params);
|
||||
|
||||
51
e2e/i18n.spec.ts
Normal file
51
e2e/i18n.spec.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { test, expect, Page } from "@playwright/test";
|
||||
import App from "./support/app";
|
||||
|
||||
test("Displays translation on desktop", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
|
||||
await expect(page.locator("#left-pane .quick-search input"))
|
||||
.toHaveAttribute("placeholder", "Quick search");
|
||||
});
|
||||
|
||||
test("Displays translation on mobile", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto({ isMobile: true });
|
||||
|
||||
await expect(page.locator("#mobile-sidebar-wrapper .quick-search input"))
|
||||
.toHaveAttribute("placeholder", "Quick search");
|
||||
});
|
||||
|
||||
test("Displays translations in Settings", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
await app.closeAllTabs();
|
||||
await app.goToSettings();
|
||||
await app.noteTree.getByText("Appearance").click();
|
||||
|
||||
await expect(app.currentNoteSplit).toContainText("Localization");
|
||||
await expect(app.currentNoteSplit).toContainText("Language");
|
||||
});
|
||||
|
||||
test("User can change language from settings", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
|
||||
await app.closeAllTabs();
|
||||
await app.goToSettings();
|
||||
await app.noteTree.getByText("Appearance").click();
|
||||
|
||||
// Check that the default value (English) is set.
|
||||
await expect(app.currentNoteSplit).toContainText("Theme");
|
||||
const languageCombobox = await app.currentNoteSplit.getByRole("combobox").first();
|
||||
await expect(languageCombobox).toHaveValue("en");
|
||||
|
||||
// Select Chinese and ensure the translation is set.
|
||||
await languageCombobox.selectOption("cn");
|
||||
await expect(app.currentNoteSplit).toContainText("主题");
|
||||
|
||||
// Select English again.
|
||||
await languageCombobox.selectOption("en");
|
||||
await expect(app.currentNoteSplit).toContainText("Language");
|
||||
});
|
||||
59
e2e/layout/tab_bar.spec.ts
Normal file
59
e2e/layout/tab_bar.spec.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { test, expect } from "@playwright/test";
|
||||
import App from "../support/app";
|
||||
|
||||
const NOTE_TITLE = "Trilium Integration Test DB";
|
||||
|
||||
test("Can drag tabs around", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
|
||||
// [1]: Trilium Integration Test DB note
|
||||
await app.closeAllTabs();
|
||||
await app.clickNoteOnNoteTreeByTitle(NOTE_TITLE);
|
||||
await expect(app.getActiveTab()).toContainText(NOTE_TITLE);
|
||||
|
||||
// [1] [2] [3]
|
||||
await app.addNewTab();
|
||||
await app.addNewTab();
|
||||
|
||||
let tab = app.getTab(0);
|
||||
|
||||
// Drag the first tab at the end
|
||||
await tab.dragTo(app.getTab(2), { targetPosition: { x: 50, y: 0 }});
|
||||
|
||||
tab = app.getTab(2);
|
||||
await expect(tab).toContainText(NOTE_TITLE);
|
||||
|
||||
// Drag the tab to the left
|
||||
await tab.dragTo(app.getTab(0), { targetPosition: { x: 50, y: 0 }});
|
||||
await expect(app.getTab(0)).toContainText(NOTE_TITLE);
|
||||
});
|
||||
|
||||
test("Can drag tab to new window", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
|
||||
await app.closeAllTabs();
|
||||
await app.clickNoteOnNoteTreeByTitle(NOTE_TITLE);
|
||||
const tab = app.getTab(0);
|
||||
await expect(tab).toContainText(NOTE_TITLE);
|
||||
|
||||
const popupPromise = page.waitForEvent("popup");
|
||||
|
||||
const tabPos = await tab.boundingBox();
|
||||
if (tabPos) {
|
||||
const x = tabPos.x + tabPos.width / 2;
|
||||
const y = tabPos.y + tabPos.height / 2;
|
||||
await page.mouse.move(x, y);
|
||||
await page.mouse.down();
|
||||
await page.mouse.move(x, y + tabPos.height + 100, { steps: 5 });
|
||||
await page.mouse.up();
|
||||
} else {
|
||||
test.fail(true, "Unable to determine tab position");
|
||||
}
|
||||
|
||||
// Wait for the popup to show
|
||||
const popup = await popupPromise;
|
||||
const popupApp = new App(popup, context);
|
||||
await expect(popupApp.getActiveTab()).toHaveText(NOTE_TITLE);
|
||||
});
|
||||
45
e2e/note_types/code.spec.ts
Normal file
45
e2e/note_types/code.spec.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { test, expect, Page } from "@playwright/test";
|
||||
import App from "../support/app";
|
||||
|
||||
test("Displays lint warnings for backend script", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
await app.closeAllTabs();
|
||||
await app.goToNoteInNewTab("Backend script with lint warnings");
|
||||
|
||||
const codeEditor = app.currentNoteSplit.locator(".CodeMirror");
|
||||
|
||||
// Expect two warning signs in the gutter.
|
||||
expect(codeEditor.locator(".CodeMirror-gutter-wrapper .CodeMirror-lint-marker-warning")).toHaveCount(2);
|
||||
|
||||
// Hover over hello
|
||||
await codeEditor.getByText("hello").first().hover();
|
||||
await expectTooltip(page, "'hello' is defined but never used.");
|
||||
|
||||
// Hover over world
|
||||
await codeEditor.getByText("world").first().hover();
|
||||
await expectTooltip(page, "'world' is defined but never used.");
|
||||
});
|
||||
|
||||
test("Displays lint errors for backend script", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
await app.closeAllTabs();
|
||||
await app.goToNoteInNewTab("Backend script with lint errors");
|
||||
|
||||
const codeEditor = app.currentNoteSplit.locator(".CodeMirror");
|
||||
|
||||
// Expect two warning signs in the gutter.
|
||||
const errorMarker = codeEditor.locator(".CodeMirror-gutter-wrapper .CodeMirror-lint-marker-error");
|
||||
await expect(errorMarker).toHaveCount(1);
|
||||
|
||||
// Hover over hello
|
||||
await errorMarker.hover();
|
||||
await expectTooltip(page, "Parsing error: Unexpected token world");
|
||||
});
|
||||
|
||||
async function expectTooltip(page: Page, tooltip: string) {
|
||||
await expect(page.locator(".CodeMirror-lint-tooltip:visible", {
|
||||
"hasText": tooltip
|
||||
})).toBeVisible();
|
||||
}
|
||||
22
e2e/note_types/mermaid.spec.ts
Normal file
22
e2e/note_types/mermaid.spec.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { test, expect, Page } from "@playwright/test";
|
||||
import App from "../support/app";
|
||||
|
||||
test("displays simple map", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
await app.goToNoteInNewTab("Sample mindmap");
|
||||
|
||||
expect(app.currentNoteSplit).toContainText("Hello world");
|
||||
expect(app.currentNoteSplit).toContainText("1");
|
||||
expect(app.currentNoteSplit).toContainText("1a");
|
||||
});
|
||||
|
||||
test("displays note settings", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
await app.goToNoteInNewTab("Sample mindmap");
|
||||
|
||||
await app.currentNoteSplit.getByText("Hello world").click({ force: true });
|
||||
const nodeMenu = app.currentNoteSplit.locator(".node-menu");
|
||||
expect(nodeMenu).toBeVisible();
|
||||
});
|
||||
51
e2e/note_types/text.spec.ts
Normal file
51
e2e/note_types/text.spec.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { test, expect, Page } from "@playwright/test";
|
||||
import App from "../support/app";
|
||||
|
||||
test("Table of contents is displayed", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
await app.closeAllTabs();
|
||||
await app.goToNoteInNewTab("Table of contents");
|
||||
|
||||
await expect(app.sidebar).toContainText("Table of Contents");
|
||||
const rootList = app.sidebar.locator(".toc-widget > span > ol");
|
||||
|
||||
// Heading 1.1
|
||||
// Heading 1.1
|
||||
// Heading 1.2
|
||||
// Heading 2
|
||||
// Heading 2.1
|
||||
// Heading 2.2
|
||||
// Heading 2.2.1
|
||||
// Heading 2.2.1.1
|
||||
// Heading 2.2.11.1
|
||||
|
||||
await expect(rootList.locator("> li")).toHaveCount(2);
|
||||
await expect(rootList.locator("> li").first()).toHaveText("Heading 1");
|
||||
await expect(rootList.locator("> ol").first().locator("> li").first()).toHaveText("Heading 1.1");
|
||||
await expect(rootList.locator("> ol").first().locator("> li").nth(1)).toHaveText("Heading 1.2");
|
||||
|
||||
// Heading 2 has a Katex equation, check if it's rendered.
|
||||
await expect(rootList.locator("> li").nth(1)).toContainText("Heading 2");
|
||||
await expect(rootList.locator("> li").nth(1).locator(".katex")).toBeAttached();
|
||||
|
||||
await expect(rootList.locator("> ol")).toHaveCount(2);
|
||||
await expect(rootList.locator("> ol").nth(1).locator("> li")).toHaveCount(2);
|
||||
await expect(rootList.locator("> ol").nth(1).locator("> ol")).toHaveCount(1);
|
||||
await expect(rootList.locator("> ol").nth(1).locator("> ol > ol")).toHaveCount(1);
|
||||
await expect(rootList.locator("> ol").nth(1).locator("> ol > ol > ol")).toHaveCount(1);
|
||||
});
|
||||
|
||||
test("Highlights list is displayed", async ({ page, context }) => {
|
||||
const app = new App(page, context);
|
||||
await app.goto();
|
||||
await app.closeAllTabs();
|
||||
await app.goToNoteInNewTab("Highlights list");
|
||||
|
||||
await expect(app.sidebar).toContainText("Highlights List");
|
||||
const rootList = app.sidebar.locator(".highlights-list ol");
|
||||
let index=0;
|
||||
for (const highlightedEl of [ "Bold 1", "Italic 1", "Underline 1", "Colored text 1", "Background text 1", "Bold 2", "Italic 2", "Underline 2", "Colored text 2", "Background text 2" ]) {
|
||||
await expect(rootList.locator("li").nth(index++)).toContainText(highlightedEl);
|
||||
}
|
||||
});
|
||||
78
e2e/support/app.ts
Normal file
78
e2e/support/app.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { expect, Locator, Page } from "@playwright/test";
|
||||
import type { BrowserContext } from "@playwright/test";
|
||||
|
||||
interface GotoOpts {
|
||||
isMobile?: boolean;
|
||||
}
|
||||
|
||||
export default class App {
|
||||
readonly page: Page;
|
||||
readonly context: BrowserContext;
|
||||
|
||||
readonly tabBar: Locator;
|
||||
readonly noteTree: Locator;
|
||||
readonly currentNoteSplit: Locator;
|
||||
readonly sidebar: Locator;
|
||||
|
||||
constructor(page: Page, context: BrowserContext) {
|
||||
this.page = page;
|
||||
this.context = context;
|
||||
|
||||
this.tabBar = page.locator(".tab-row-widget-container");
|
||||
this.noteTree = page.locator(".tree-wrapper");
|
||||
this.currentNoteSplit = page.locator(".note-split:not(.hidden-ext)")
|
||||
this.sidebar = page.locator("#right-pane");
|
||||
}
|
||||
|
||||
async goto(opts: GotoOpts = {}) {
|
||||
await this.context.addCookies([
|
||||
{
|
||||
url: "http://127.0.0.1:8082",
|
||||
name: "trilium-device",
|
||||
value: opts.isMobile ? "mobile" : "desktop"
|
||||
}
|
||||
]);
|
||||
|
||||
await this.page.goto("/", { waitUntil: "networkidle" });
|
||||
|
||||
// Wait for the page to load.
|
||||
await expect(this.page.locator(".tree"))
|
||||
.toContainText("Trilium Integration Test");
|
||||
await this.closeAllTabs();
|
||||
}
|
||||
|
||||
async goToNoteInNewTab(noteTitle: string) {
|
||||
const autocomplete = this.currentNoteSplit.locator(".note-autocomplete");
|
||||
await autocomplete.fill(noteTitle);
|
||||
await autocomplete.press("ArrowDown");
|
||||
await autocomplete.press("Enter");
|
||||
}
|
||||
|
||||
async goToSettings() {
|
||||
await this.page.locator(".launcher-button.bx-cog").click();
|
||||
}
|
||||
|
||||
getTab(tabIndex: number) {
|
||||
return this.tabBar.locator(".note-tab-wrapper").nth(tabIndex);
|
||||
}
|
||||
|
||||
getActiveTab() {
|
||||
return this.tabBar.locator(".note-tab[active]");
|
||||
}
|
||||
|
||||
async closeAllTabs() {
|
||||
await this.getTab(0).click({ button: "right" });
|
||||
await this.page.waitForTimeout(500); // TODO: context menu won't dismiss otherwise
|
||||
await this.page.getByText("Close all tabs").click({ force: true });
|
||||
await this.page.waitForTimeout(500); // TODO: context menu won't dismiss otherwise
|
||||
}
|
||||
|
||||
async addNewTab() {
|
||||
await this.page.locator('[data-trigger-command="openNewTab"]').click();
|
||||
}
|
||||
|
||||
async clickNoteOnNoteTreeByTitle(title: string) {
|
||||
this.noteTree.getByText(title).click();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { initializeTranslations } from "./src/services/i18n.js";
|
||||
|
||||
await initializeTranslations();
|
||||
await import("./electron.js")
|
||||
await import("./electron.js");
|
||||
|
||||
@@ -12,7 +12,7 @@ import sourceMapSupport from "source-map-support";
|
||||
sourceMapSupport.install();
|
||||
|
||||
// Prevent Trilium starting twice on first install and on uninstall for the Windows installer.
|
||||
if ((await import('electron-squirrel-startup')).default) {
|
||||
if ((await import("electron-squirrel-startup")).default) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
@@ -24,9 +24,7 @@ appIconService.installLocalAppIcon();
|
||||
electronDl({ saveAs: true });
|
||||
|
||||
// needed for excalidraw export https://github.com/zadam/trilium/issues/4271
|
||||
electron.app.commandLine.appendSwitch(
|
||||
"enable-experimental-web-platform-features"
|
||||
);
|
||||
electron.app.commandLine.appendSwitch("enable-experimental-web-platform-features");
|
||||
|
||||
// Quit when all windows are closed, except on macOS. There, it's common
|
||||
// for applications and their menu bar to stay active until the user quits
|
||||
@@ -70,4 +68,4 @@ electron.app.on("will-quit", () => {
|
||||
// this is to disable electron warning spam in the dev console (local development only)
|
||||
process.env["ELECTRON_DISABLE_SECURITY_WARNINGS"] = "true";
|
||||
|
||||
await import('./src/main.js');
|
||||
await import("./src/main.js");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
|
||||
const APP_NAME = "TriliumNext Notes";
|
||||
|
||||
@@ -18,15 +18,16 @@ module.exports = {
|
||||
"translations/",
|
||||
"node_modules/@highlightjs/cdn-assets/styles"
|
||||
],
|
||||
afterComplete: [(buildPath, _electronVersion, platform, _arch, callback) => {
|
||||
afterComplete: [
|
||||
(buildPath, _electronVersion, platform, _arch, callback) => {
|
||||
const extraResources = getExtraResourcesForPlatform();
|
||||
for (const resource of extraResources) {
|
||||
const baseName = path.basename(resource);
|
||||
let sourcePath;
|
||||
if (platform === 'darwin') {
|
||||
sourcePath = path.join(buildPath, `${APP_NAME}.app`, 'Contents', 'Resources', baseName);
|
||||
if (platform === "darwin") {
|
||||
sourcePath = path.join(buildPath, `${APP_NAME}.app`, "Contents", "Resources", baseName);
|
||||
} else {
|
||||
sourcePath = path.join(buildPath, 'resources', baseName);
|
||||
sourcePath = path.join(buildPath, "resources", baseName);
|
||||
}
|
||||
let destPath;
|
||||
|
||||
@@ -39,16 +40,17 @@ module.exports = {
|
||||
// Copy files from resources folder to root
|
||||
fs.move(sourcePath, destPath)
|
||||
.then(() => callback())
|
||||
.catch(err => callback(err));
|
||||
.catch((err) => callback(err));
|
||||
}
|
||||
}]
|
||||
}
|
||||
]
|
||||
},
|
||||
rebuildConfig: {
|
||||
force: true
|
||||
},
|
||||
makers: [
|
||||
{
|
||||
name: '@electron-forge/maker-deb',
|
||||
name: "@electron-forge/maker-deb",
|
||||
config: {
|
||||
options: {
|
||||
icon: "./images/app-icons/png/128x128.png",
|
||||
@@ -57,7 +59,7 @@ module.exports = {
|
||||
}
|
||||
},
|
||||
{
|
||||
name: '@electron-forge/maker-squirrel',
|
||||
name: "@electron-forge/maker-squirrel",
|
||||
config: {
|
||||
iconUrl: "https://raw.githubusercontent.com/TriliumNext/Notes/develop/images/app-icons/icon.ico",
|
||||
setupIcon: "./images/app-icons/icon.ico",
|
||||
@@ -65,48 +67,44 @@ module.exports = {
|
||||
}
|
||||
},
|
||||
{
|
||||
name: '@electron-forge/maker-dmg',
|
||||
name: "@electron-forge/maker-dmg",
|
||||
config: {
|
||||
icon: "./images/app-icons/icon.icns",
|
||||
icon: "./images/app-icons/icon.icns"
|
||||
}
|
||||
},
|
||||
{
|
||||
name: '@electron-forge/maker-zip',
|
||||
name: "@electron-forge/maker-zip",
|
||||
config: {
|
||||
options: {
|
||||
iconUrl: "https://raw.githubusercontent.com/TriliumNext/Notes/develop/images/app-icons/icon.ico",
|
||||
icon: "./images/app-icons/icon.ico",
|
||||
icon: "./images/app-icons/icon.ico"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
plugins: [
|
||||
{
|
||||
name: '@electron-forge/plugin-auto-unpack-natives',
|
||||
config: {},
|
||||
},
|
||||
],
|
||||
name: "@electron-forge/plugin-auto-unpack-natives",
|
||||
config: {}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
function getExtraResourcesForPlatform() {
|
||||
let resources = [
|
||||
'dump-db/',
|
||||
'./bin/tpl/anonymize-database.sql'
|
||||
];
|
||||
const scripts = ['trilium-portable', 'trilium-safe-mode', 'trilium-no-cert-check']
|
||||
let resources = ["dump-db/", "./bin/tpl/anonymize-database.sql"];
|
||||
const scripts = ["trilium-portable", "trilium-safe-mode", "trilium-no-cert-check"];
|
||||
switch (process.platform) {
|
||||
case 'win32':
|
||||
case "win32":
|
||||
for (const script of scripts) {
|
||||
resources.push(`./bin/tpl/${script}.bat`)
|
||||
resources.push(`./bin/tpl/${script}.bat`);
|
||||
}
|
||||
break;
|
||||
case 'darwin':
|
||||
case "darwin":
|
||||
break;
|
||||
case 'linux':
|
||||
resources.push("images/app-icons/png/256x256.png")
|
||||
case "linux":
|
||||
resources.push("images/app-icons/png/256x256.png");
|
||||
for (const script of scripts) {
|
||||
resources.push(`./bin/tpl/${script}.sh`)
|
||||
resources.push(`./bin/tpl/${script}.sh`);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { test as setup, expect } from '@playwright/test';
|
||||
import { test as setup, expect } from "@playwright/test";
|
||||
|
||||
const authFile = 'playwright/.auth/user.json';
|
||||
const authFile = "playwright/.auth/user.json";
|
||||
|
||||
const ROOT_URL = "http://localhost:8082";
|
||||
const LOGIN_PASSWORD = "demo1234";
|
||||
|
||||
29
integration-tests/db/config.ini
Normal file
29
integration-tests/db/config.ini
Normal file
@@ -0,0 +1,29 @@
|
||||
[General]
|
||||
# Instance name can be used to distinguish between different instances using backend api.getInstanceName()
|
||||
instanceName=
|
||||
|
||||
# set to true to allow using Trilium without authentication (makes sense for server build only, desktop build doesn't need password)
|
||||
noAuthentication=true
|
||||
|
||||
# set to true to disable backups (e.g. because of limited space on server)
|
||||
noBackup=false
|
||||
|
||||
# Disable automatically generating desktop icon
|
||||
# noDesktopIcon=true
|
||||
|
||||
[Network]
|
||||
# host setting is relevant only for web deployments - set the host on which the server will listen
|
||||
# host=0.0.0.0
|
||||
# port setting is relevant only for web deployments, desktop builds run on a fixed port (changeable with TRILIUM_PORT environment variable)
|
||||
port=8080
|
||||
# true for TLS/SSL/HTTPS (secure), false for HTTP (insecure).
|
||||
https=false
|
||||
# path to certificate (run "bash bin/generate-cert.sh" to generate self-signed certificate). Relevant only if https=true
|
||||
certPath=
|
||||
keyPath=
|
||||
# setting to give trust to reverse proxies, a comma-separated list of trusted rev. proxy IPs can be specified (CIDR notation is permitted),
|
||||
# alternatively 'true' will make use of the leftmost IP in X-Forwarded-For, ultimately an integer can be used to tell about the number of hops between
|
||||
# Trilium (which is hop 0) and the first trusted rev. proxy.
|
||||
# once set, expressjs will use the X-Forwarded-For header set by the rev. proxy to determinate the real IPs of clients.
|
||||
# expressjs shortcuts are supported: loopback(127.0.0.1/8, ::1/128), linklocal(169.254.0.0/16, fe80::/10), uniquelocal(10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16, fc00::/7)
|
||||
trustedReverseProxy=false
|
||||
Binary file not shown.
@@ -1,9 +1,9 @@
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { test, expect } from "@playwright/test";
|
||||
|
||||
test("Can duplicate note with broken links", async ({ page }) => {
|
||||
await page.goto(`http://localhost:8082/#2VammGGdG6Ie`);
|
||||
await page.locator('.tree-wrapper .fancytree-active').getByText('Note map').click({ button: 'right' });
|
||||
await page.getByText('Duplicate subtree').click();
|
||||
await page.locator(".tree-wrapper .fancytree-active").getByText("Note map").click({ button: "right" });
|
||||
await page.getByText("Duplicate subtree").click();
|
||||
await expect(page.locator(".toast-body")).toBeHidden();
|
||||
await expect(page.locator('.tree-wrapper').getByText('Note map (dup)')).toBeVisible();
|
||||
await expect(page.locator(".tree-wrapper").getByText("Note map (dup)")).toBeVisible();
|
||||
});
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { test, expect } from "@playwright/test";
|
||||
|
||||
test('has title', async ({ page }) => {
|
||||
await page.goto('https://playwright.dev/');
|
||||
test("has title", async ({ page }) => {
|
||||
await page.goto("https://playwright.dev/");
|
||||
|
||||
// Expect a title "to contain" a substring.
|
||||
await expect(page).toHaveTitle(/Playwright/);
|
||||
});
|
||||
|
||||
test('get started link', async ({ page }) => {
|
||||
await page.goto('https://playwright.dev/');
|
||||
test("get started link", async ({ page }) => {
|
||||
await page.goto("https://playwright.dev/");
|
||||
|
||||
// Click the get started link.
|
||||
await page.getByRole('link', { name: 'Get started' }).click();
|
||||
await page.getByRole("link", { name: "Get started" }).click();
|
||||
|
||||
// Expects page to have a heading with the name of Installation.
|
||||
await expect(page.getByRole('heading', { name: 'Installation' })).toBeVisible();
|
||||
await expect(page.getByRole("heading", { name: "Installation" })).toBeVisible();
|
||||
});
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
import test, { expect } from "@playwright/test";
|
||||
|
||||
test('Help popup', async ({ page }) => {
|
||||
await page.goto('http://localhost:8082');
|
||||
await page.getByText('Trilium Integration Test DB').click();
|
||||
test("Help popup", async ({ page }) => {
|
||||
await page.goto("http://localhost:8082");
|
||||
await page.getByText("Trilium Integration Test DB").click();
|
||||
|
||||
await page.locator('body').press('F1');
|
||||
await page.getByRole('link', { name: 'online↗' }).click();
|
||||
expect((await page.waitForEvent('popup')).url()).toBe("https://triliumnext.github.io/Docs/")
|
||||
await page.locator("body").press("F1");
|
||||
await page.getByRole("link", { name: "online↗" }).click();
|
||||
expect((await page.waitForEvent("popup")).url()).toBe("https://triliumnext.github.io/Docs/");
|
||||
});
|
||||
|
||||
test('Complete help in search', async ({ page }) => {
|
||||
await page.goto('http://localhost:8082');
|
||||
test("Complete help in search", async ({ page }) => {
|
||||
await page.goto("http://localhost:8082");
|
||||
|
||||
// Clear all tabs
|
||||
await page.locator('.note-tab:first-of-type').locator("div").nth(1).click({ button: 'right' });
|
||||
await page.getByText('Close all tabs').click();
|
||||
await page.locator(".note-tab:first-of-type").locator("div").nth(1).click({ button: "right" });
|
||||
await page.getByText("Close all tabs").click();
|
||||
|
||||
await page.locator('#launcher-container').getByRole('button', { name: '' }).first().click();
|
||||
await page.getByRole('cell', { name: ' ' }).locator('span').first().click();
|
||||
await page.getByRole('button', { name: 'complete help on search syntax' }).click();
|
||||
expect((await page.waitForEvent('popup')).url()).toBe("https://triliumnext.github.io/Docs/Wiki/search.html");
|
||||
await page.locator("#launcher-container").getByRole("button", { name: "" }).first().click();
|
||||
await page.getByRole("cell", { name: " " }).locator("span").first().click();
|
||||
await page.getByRole("button", { name: "complete help on search syntax" }).click();
|
||||
expect((await page.waitForEvent("popup")).url()).toBe("https://triliumnext.github.io/Docs/Wiki/search.html");
|
||||
});
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import test, { expect } from "@playwright/test";
|
||||
|
||||
test("User can change language from settings", async ({ page }) => {
|
||||
await page.goto('http://localhost:8082');
|
||||
|
||||
// Clear all tabs
|
||||
await page.locator('.note-tab:first-of-type').locator("div").nth(1).click({ button: 'right' });
|
||||
await page.getByText('Close all tabs').click();
|
||||
|
||||
// Go to options -> Appearance
|
||||
await page.locator('#launcher-pane div').filter({ hasText: 'Options Open New Window' }).getByRole('button').click();
|
||||
await page.locator('#launcher-pane').getByText('Options').click();
|
||||
await page.locator('#center-pane').getByText('Appearance').click();
|
||||
|
||||
// Check that the default value (English) is set.
|
||||
await expect(page.locator('#center-pane')).toContainText('Theme');
|
||||
const languageCombobox = await page.getByRole('combobox').first();
|
||||
await expect(languageCombobox).toHaveValue("en");
|
||||
|
||||
// Select Chinese and ensure the translation is set.
|
||||
languageCombobox.selectOption("cn");
|
||||
await expect(page.locator('#center-pane')).toContainText('主题');
|
||||
|
||||
// Select English again.
|
||||
languageCombobox.selectOption("en");
|
||||
});
|
||||
|
||||
test("Restores language on start-up on desktop", async ({ page, context }) => {
|
||||
await page.goto('http://localhost:8082');
|
||||
await expect(page.locator('#launcher-pane').first()).toContainText("Open New Window");
|
||||
});
|
||||
|
||||
test("Restores language on start-up on mobile", async ({ page, context }) => {
|
||||
await context.addCookies([
|
||||
{
|
||||
url: "http://localhost:8082",
|
||||
name: "trilium-device",
|
||||
value: "mobile"
|
||||
}
|
||||
]);
|
||||
await page.goto('http://localhost:8082');
|
||||
await expect(page.locator('#launcher-pane div').first()).toContainText("Open New Window");
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { test, expect } from "@playwright/test";
|
||||
|
||||
const ROOT_URL = "http://localhost:8080";
|
||||
const LOGIN_PASSWORD = "eliandoran";
|
||||
@@ -12,7 +12,6 @@ test("Can insert equations", async ({ page }) => {
|
||||
// .click();
|
||||
|
||||
const activeNote = page.locator(".component.note-split:visible");
|
||||
const noteContent = activeNote
|
||||
.locator(".note-detail-editable-text-editor")
|
||||
const noteContent = activeNote.locator(".note-detail-editable-text-editor");
|
||||
await noteContent.press("Ctrl+M");
|
||||
});
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
import test, { expect } from "@playwright/test";
|
||||
|
||||
test("Native Title Bar not displayed on web", async ({ page }) => {
|
||||
await page.goto('http://localhost:8082/#root/_hidden/_options/_optionsAppearance');
|
||||
await expect(page.getByRole('heading', { name: 'Theme' })).toBeVisible();
|
||||
await expect(page.getByRole('heading', { name: 'Native Title Bar (requires' })).toBeHidden();
|
||||
await page.goto("http://localhost:8082/#root/_hidden/_options/_optionsAppearance");
|
||||
await expect(page.getByRole("heading", { name: "Theme" })).toBeVisible();
|
||||
await expect(page.getByRole("heading", { name: "Native Title Bar (requires" })).toBeHidden();
|
||||
});
|
||||
|
||||
test("Tray settings not displayed on web", async ({ page }) => {
|
||||
await page.goto('http://localhost:8082/#root/_hidden/_options/_optionsOther');
|
||||
await expect(page.getByRole('heading', { name: 'Note Erasure Timeout' })).toBeVisible();
|
||||
await expect(page.getByRole('heading', { name: 'Tray' })).toBeHidden();
|
||||
await page.goto("http://localhost:8082/#root/_hidden/_options/_optionsOther");
|
||||
await expect(page.getByRole("heading", { name: "Note Erasure Timeout" })).toBeVisible();
|
||||
await expect(page.getByRole("heading", { name: "Tray" })).toBeHidden();
|
||||
});
|
||||
|
||||
test("Spellcheck settings not displayed on web", async ({ page }) => {
|
||||
await page.goto('http://localhost:8082/#root/_hidden/_options/_optionsSpellcheck');
|
||||
await expect(page.getByRole('heading', { name: 'Spell Check' })).toBeVisible();
|
||||
await expect(page.getByRole('heading', { name: 'Tray' })).toBeHidden();
|
||||
await expect(page.getByText('These options apply only for desktop builds')).toBeVisible();
|
||||
await expect(page.getByText('Enable spellcheck')).toBeHidden();
|
||||
await page.goto("http://localhost:8082/#root/_hidden/_options/_optionsSpellcheck");
|
||||
await expect(page.getByRole("heading", { name: "Spell Check" })).toBeVisible();
|
||||
await expect(page.getByRole("heading", { name: "Tray" })).toBeHidden();
|
||||
await expect(page.getByText("These options apply only for desktop builds")).toBeVisible();
|
||||
await expect(page.getByText("Enable spellcheck")).toBeHidden();
|
||||
});
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import test, { expect } from "@playwright/test";
|
||||
|
||||
test("Renders on desktop", async ({ page, context }) => {
|
||||
await page.goto('http://localhost:8082');
|
||||
await expect(page.locator('.tree')).toContainText('Trilium Integration Test');
|
||||
await page.goto("http://localhost:8082");
|
||||
await expect(page.locator(".tree")).toContainText("Trilium Integration Test");
|
||||
});
|
||||
|
||||
test("Renders on mobile", async ({ page, context }) => {
|
||||
@@ -13,6 +13,6 @@ test("Renders on mobile", async ({ page, context }) => {
|
||||
value: "mobile"
|
||||
}
|
||||
]);
|
||||
await page.goto('http://localhost:8082');
|
||||
await expect(page.locator('.tree')).toContainText('Trilium Integration Test');
|
||||
await page.goto("http://localhost:8082");
|
||||
await expect(page.locator(".tree")).toContainText("Trilium Integration Test");
|
||||
});
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { test, expect } from "@playwright/test";
|
||||
|
||||
const expectedVersion = "0.90.3";
|
||||
|
||||
test("Displays update badge when there is a version available", async ({ page }) => {
|
||||
await page.goto("http://localhost:8080");
|
||||
await page.getByRole('button', { name: '' }).click();
|
||||
await page.getByRole("button", { name: "" }).click();
|
||||
await page.getByText(`Version ${expectedVersion} is available,`).click();
|
||||
|
||||
const page1 = await page.waitForEvent('popup');
|
||||
const page1 = await page.waitForEvent("popup");
|
||||
expect(page1.url()).toBe(`https://github.com/TriliumNext/Notes/releases/tag/v${expectedVersion}`);
|
||||
});
|
||||
|
||||
2
libraries/ckeditor/ckeditor.js
vendored
2
libraries/ckeditor/ckeditor.js
vendored
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
100
libraries/codemirror/eslint.js
Normal file
100
libraries/codemirror/eslint.js
Normal file
@@ -0,0 +1,100 @@
|
||||
// CodeMirror, copyright (c) by Marijn Haverbeke and others
|
||||
// Distributed under an MIT license: http://codemirror.net/LICENSE
|
||||
|
||||
(function(mod) {
|
||||
if (typeof exports == "object" && typeof module == "object") // CommonJS
|
||||
mod(require("../../lib/codemirror"));
|
||||
else if (typeof define == "function" && define.amd) // AMD
|
||||
define(["../../lib/codemirror"], mod);
|
||||
else // Plain browser env
|
||||
mod(CodeMirror);
|
||||
})(function(CodeMirror) {
|
||||
"use strict";
|
||||
|
||||
async function validatorHtml(text, options) {
|
||||
const result = /<script[^>]*>([\s\S]+)<\/script>/ig.exec(text);
|
||||
|
||||
if (result !== null) {
|
||||
// preceding code is copied over but any (non-newline) character is replaced with space
|
||||
// this will preserve line numbers etc.
|
||||
const prefix = text.substr(0, result.index).replace(/./g, " ");
|
||||
|
||||
const js = prefix + result[1];
|
||||
|
||||
return await validatorJavaScript(js, options);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
async function validatorJavaScript(text, options) {
|
||||
if (glob.isMobile()
|
||||
|| glob.getActiveContextNote() == null
|
||||
|| glob.getActiveContextNote().mime === 'application/json') {
|
||||
// eslint doesn't seem to validate pure JSON well
|
||||
return [];
|
||||
}
|
||||
|
||||
await glob.requireLibrary(glob.ESLINT);
|
||||
|
||||
if (text.length > 20000) {
|
||||
console.log("Skipping linting because of large size: ", text.length);
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
const errors = new eslint().verify(text, {
|
||||
root: true,
|
||||
parserOptions: {
|
||||
ecmaVersion: "latest"
|
||||
},
|
||||
extends: ['eslint:recommended', 'airbnb-base'],
|
||||
env: {
|
||||
'browser': true,
|
||||
'node': true
|
||||
},
|
||||
rules: {
|
||||
'import/no-unresolved': 'off',
|
||||
'func-names': 'off',
|
||||
'comma-dangle': ['warn'],
|
||||
'padded-blocks': 'off',
|
||||
'linebreak-style': 'off',
|
||||
'class-methods-use-this': 'off',
|
||||
'no-unused-vars': ['warn', { vars: 'local', args: 'after-used' }],
|
||||
'no-nested-ternary': 'off',
|
||||
'no-underscore-dangle': ['error', {'allow': ['_super', '_lookupFactory']}]
|
||||
},
|
||||
globals: {
|
||||
"api": "readonly"
|
||||
}
|
||||
});
|
||||
|
||||
console.log(errors);
|
||||
|
||||
const result = [];
|
||||
if (errors) {
|
||||
parseErrors(errors, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeMirror.registerHelper("lint", "javascript", validatorJavaScript);
|
||||
CodeMirror.registerHelper("lint", "html", validatorHtml);
|
||||
|
||||
function parseErrors(errors, output) {
|
||||
for (const error of errors) {
|
||||
const startLine = error.line - 1;
|
||||
const endLine = error.endLine !== undefined ? error.endLine - 1 : startLine;
|
||||
const startCol = error.column - 1;
|
||||
const endCol = error.endColumn !== undefined ? error.endColumn - 1 : startCol + 1;
|
||||
|
||||
output.push({
|
||||
message: error.message,
|
||||
severity: error.severity === 1 ? "warning" : "error",
|
||||
from: CodeMirror.Pos(startLine, startCol),
|
||||
to: CodeMirror.Pos(endLine, endCol)
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
112883
libraries/eslint/eslint.js
Normal file
112883
libraries/eslint/eslint.js
Normal file
File diff suppressed because one or more lines are too long
@@ -5,6 +5,6 @@
|
||||
// Then probably can change webpack comand to
|
||||
// "webpack": "cross-env NODE_OPTIONS=--import=ts-node/esm webpack -c webpack.config.ts",
|
||||
|
||||
import { register } from 'node:module';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
register('ts-node/esm', pathToFileURL('./'));
|
||||
import { register } from "node:module";
|
||||
import { pathToFileURL } from "node:url";
|
||||
register("ts-node/esm", pathToFileURL("./"));
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
"ignore": [".git", "node_modules/**/node_modules", "src/public/"],
|
||||
"verbose": false,
|
||||
"exec": "tsx",
|
||||
"watch": [
|
||||
"src/",
|
||||
"translations/"
|
||||
],
|
||||
"watch": ["src/", "translations/"],
|
||||
"signal": "SIGTERM",
|
||||
"env": {
|
||||
"NODE_ENV": "development"
|
||||
|
||||
796
package-lock.json
generated
796
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
14
package.json
14
package.json
@@ -45,7 +45,7 @@
|
||||
"prepare-dist": "rimraf ./dist && tsc && tsx ./bin/copy-dist.ts",
|
||||
"watch-dist": "tsx ./bin/watch-dist.ts",
|
||||
"update-build-info": "tsx bin/update-build-info.ts",
|
||||
"integration-edit-db": "cross-env TRILIUM_INTEGRATION_TEST=edit TRILIUM_PORT=8081 TRILIUM_DATA_DIR=./integration-tests/db nodemon src/main.ts",
|
||||
"integration-edit-db": "cross-env TRILIUM_INTEGRATION_TEST=edit TRILIUM_PORT=8081 TRILIUM_ENV=dev TRILIUM_DATA_DIR=./integration-tests/db nodemon src/main.ts",
|
||||
"integration-mem-db": "cross-env TRILIUM_INTEGRATION_TEST=memory TRILIUM_PORT=8082 TRILIUM_DATA_DIR=./integration-tests/db nodemon src/main.ts",
|
||||
"integration-mem-db-dev": "cross-env TRILIUM_INTEGRATION_TEST=memory TRILIUM_PORT=8082 TRILIUM_ENV=dev TRILIUM_DATA_DIR=./integration-tests/db nodemon src/main.ts",
|
||||
"generate-document": "cross-env nodemon src/tools/generate_document.ts 1000",
|
||||
@@ -78,13 +78,13 @@
|
||||
"dayjs": "1.11.13",
|
||||
"dayjs-plugin-utc": "0.1.2",
|
||||
"debounce": "2.2.0",
|
||||
"draggabilly": "3.0.0",
|
||||
"ejs": "3.1.10",
|
||||
"electron-debug": "4.1.0",
|
||||
"electron-dl": "4.0.0",
|
||||
"electron-squirrel-startup": "1.0.1",
|
||||
"electron-window-state": "5.0.3",
|
||||
"escape-html": "1.0.3",
|
||||
"eslint": "9.17.0",
|
||||
"express": "4.21.2",
|
||||
"express-rate-limit": "7.5.0",
|
||||
"express-session": "1.18.1",
|
||||
@@ -95,7 +95,7 @@
|
||||
"html2plaintext": "2.1.4",
|
||||
"http-proxy-agent": "7.0.2",
|
||||
"https-proxy-agent": "7.0.6",
|
||||
"i18next": "24.2.0",
|
||||
"i18next": "24.2.1",
|
||||
"i18next-fs-backend": "2.6.0",
|
||||
"i18next-http-backend": "3.0.1",
|
||||
"image-type": "5.2.0",
|
||||
@@ -107,12 +107,12 @@
|
||||
"jquery": "3.7.1",
|
||||
"jquery-hotkeys": "0.2.2",
|
||||
"jquery.fancytree": "2.38.4",
|
||||
"jsdom": "25.0.1",
|
||||
"jsdom": "26.0.0",
|
||||
"jsplumb": "2.15.6",
|
||||
"katex": "0.16.19",
|
||||
"knockout": "3.5.1",
|
||||
"mark.js": "8.11.1",
|
||||
"marked": "15.0.5",
|
||||
"marked": "15.0.6",
|
||||
"mermaid": "11.4.1",
|
||||
"mime-types": "2.1.35",
|
||||
"mind-elixir": "4.3.5",
|
||||
@@ -189,7 +189,7 @@
|
||||
"@types/xml2js": "0.4.14",
|
||||
"@types/yargs": "17.0.33",
|
||||
"cross-env": "7.0.3",
|
||||
"electron": "33.2.1",
|
||||
"electron": "33.3.1",
|
||||
"electron-packager": "17.1.2",
|
||||
"electron-rebuild": "3.2.9",
|
||||
"esm": "3.2.25",
|
||||
@@ -205,7 +205,7 @@
|
||||
"tslib": "2.8.1",
|
||||
"tsx": "4.19.2",
|
||||
"typedoc": "0.27.6",
|
||||
"typescript": "5.7.2",
|
||||
"typescript": "5.7.3",
|
||||
"webpack": "5.97.1",
|
||||
"webpack-cli": "6.0.1",
|
||||
"webpack-dev-middleware": "7.4.2"
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
import { defineConfig, devices } from '@playwright/test';
|
||||
|
||||
const SERVER_URL = 'http://127.0.0.1:8082';
|
||||
|
||||
/**
|
||||
* Read environment variables from file.
|
||||
* https://github.com/motdotla/dotenv
|
||||
*/
|
||||
// import dotenv from 'dotenv';
|
||||
// import path from 'path';
|
||||
// dotenv.config({ path: path.resolve(__dirname, '.env') });
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: './integration-tests',
|
||||
testDir: './e2e',
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: true,
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
@@ -25,36 +28,29 @@ export default defineConfig({
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
// baseURL: 'http://127.0.0.1:3000',
|
||||
baseURL: SERVER_URL,
|
||||
|
||||
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
|
||||
trace: 'on-first-retry',
|
||||
},
|
||||
|
||||
webServer: {
|
||||
command: "npm run integration-mem-db",
|
||||
url: "http://127.0.0.1:8082",
|
||||
reuseExistingServer: true,
|
||||
stdout: "ignore",
|
||||
stderr: "pipe"
|
||||
},
|
||||
|
||||
/* Configure projects for major browsers */
|
||||
projects: [
|
||||
{
|
||||
name: "setup",
|
||||
testMatch: /.*\.setup\.ts/
|
||||
name: 'chromium',
|
||||
use: { ...devices['Desktop Chrome'] },
|
||||
},
|
||||
|
||||
{
|
||||
name: "firefox",
|
||||
use: {
|
||||
...devices[ "Desktop Firefox" ],
|
||||
storageState: "playwright/.auth/user.json"
|
||||
},
|
||||
dependencies: [ "setup" ]
|
||||
name: 'firefox',
|
||||
use: { ...devices['Desktop Firefox'] },
|
||||
},
|
||||
|
||||
// {
|
||||
// name: 'webkit',
|
||||
// use: { ...devices['Desktop Safari'] },
|
||||
// },
|
||||
|
||||
/* Test against mobile viewports. */
|
||||
// {
|
||||
// name: 'Mobile Chrome',
|
||||
@@ -77,9 +73,9 @@ export default defineConfig({
|
||||
],
|
||||
|
||||
/* Run your local dev server before starting the tests */
|
||||
// webServer: {
|
||||
// command: 'npm run start',
|
||||
// url: 'http://127.0.0.1:3000',
|
||||
webServer: {
|
||||
command: 'npm run integration-mem-db-dev',
|
||||
url: SERVER_URL,
|
||||
// reuseExistingServer: !process.env.CI,
|
||||
// },
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,29 +1,24 @@
|
||||
import * as attributeParser from '../src/public/app/services/attribute_parser.js';
|
||||
import * as attributeParser from "../src/public/app/services/attribute_parser.js";
|
||||
|
||||
import {describe, it, expect, execute} from './mini_test.js';
|
||||
import { describe, it, expect, execute } from "./mini_test.js";
|
||||
|
||||
describe("Lexing", () => {
|
||||
it("simple label", () => {
|
||||
expect(attributeParser.lex("#label").map((t: any) => t.text))
|
||||
.toEqual(["#label"]);
|
||||
expect(attributeParser.lex("#label").map((t: any) => t.text)).toEqual(["#label"]);
|
||||
});
|
||||
|
||||
it("simple label with trailing spaces", () => {
|
||||
expect(attributeParser.lex(" #label ").map((t: any) => t.text))
|
||||
.toEqual(["#label"]);
|
||||
expect(attributeParser.lex(" #label ").map((t: any) => t.text)).toEqual(["#label"]);
|
||||
});
|
||||
|
||||
it("inherited label", () => {
|
||||
expect(attributeParser.lex("#label(inheritable)").map((t: any) => t.text))
|
||||
.toEqual(["#label", "(", "inheritable", ")"]);
|
||||
expect(attributeParser.lex("#label(inheritable)").map((t: any) => t.text)).toEqual(["#label", "(", "inheritable", ")"]);
|
||||
|
||||
expect(attributeParser.lex("#label ( inheritable ) ").map((t: any) => t.text))
|
||||
.toEqual(["#label", "(", "inheritable", ")"]);
|
||||
expect(attributeParser.lex("#label ( inheritable ) ").map((t: any) => t.text)).toEqual(["#label", "(", "inheritable", ")"]);
|
||||
});
|
||||
|
||||
it("label with value", () => {
|
||||
expect(attributeParser.lex("#label=Hallo").map((t: any) => t.text))
|
||||
.toEqual(["#label", "=", "Hallo"]);
|
||||
expect(attributeParser.lex("#label=Hallo").map((t: any) => t.text)).toEqual(["#label", "=", "Hallo"]);
|
||||
});
|
||||
|
||||
it("label with value", () => {
|
||||
@@ -33,19 +28,15 @@ describe("Lexing", () => {
|
||||
});
|
||||
|
||||
it("relation with value", () => {
|
||||
expect(attributeParser.lex('~relation=#root/RclIpMauTOKS/NFi2gL4xtPxM').map((t: any) => t.text))
|
||||
.toEqual(["~relation", "=", "#root/RclIpMauTOKS/NFi2gL4xtPxM"]);
|
||||
expect(attributeParser.lex("~relation=#root/RclIpMauTOKS/NFi2gL4xtPxM").map((t: any) => t.text)).toEqual(["~relation", "=", "#root/RclIpMauTOKS/NFi2gL4xtPxM"]);
|
||||
});
|
||||
|
||||
it("use quotes to define value", () => {
|
||||
expect(attributeParser.lex("#'label a'='hello\"` world'").map((t: any) => t.text))
|
||||
.toEqual(["#label a", "=", 'hello"` world']);
|
||||
expect(attributeParser.lex("#'label a'='hello\"` world'").map((t: any) => t.text)).toEqual(["#label a", "=", 'hello"` world']);
|
||||
|
||||
expect(attributeParser.lex('#"label a" = "hello\'` world"').map((t: any) => t.text))
|
||||
.toEqual(["#label a", "=", "hello'` world"]);
|
||||
expect(attributeParser.lex('#"label a" = "hello\'` world"').map((t: any) => t.text)).toEqual(["#label a", "=", "hello'` world"]);
|
||||
|
||||
expect(attributeParser.lex('#`label a` = `hello\'" world`').map((t: any) => t.text))
|
||||
.toEqual(["#label a", "=", "hello'\" world"]);
|
||||
expect(attributeParser.lex("#`label a` = `hello'\" world`").map((t: any) => t.text)).toEqual(["#label a", "=", "hello'\" world"]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -54,8 +45,8 @@ describe("Parser", () => {
|
||||
const attrs = attributeParser.parse(["#token"].map((t: any) => ({ text: t })));
|
||||
|
||||
expect(attrs.length).toEqual(1);
|
||||
expect(attrs[0].type).toEqual('label');
|
||||
expect(attrs[0].name).toEqual('token');
|
||||
expect(attrs[0].type).toEqual("label");
|
||||
expect(attrs[0].name).toEqual("token");
|
||||
expect(attrs[0].isInheritable).toBeFalsy();
|
||||
expect(attrs[0].value).toBeFalsy();
|
||||
});
|
||||
@@ -64,8 +55,8 @@ describe("Parser", () => {
|
||||
const attrs = attributeParser.parse(["#token", "(", "inheritable", ")"].map((t: any) => ({ text: t })));
|
||||
|
||||
expect(attrs.length).toEqual(1);
|
||||
expect(attrs[0].type).toEqual('label');
|
||||
expect(attrs[0].name).toEqual('token');
|
||||
expect(attrs[0].type).toEqual("label");
|
||||
expect(attrs[0].name).toEqual("token");
|
||||
expect(attrs[0].isInheritable).toBeTruthy();
|
||||
expect(attrs[0].value).toBeFalsy();
|
||||
});
|
||||
@@ -74,8 +65,8 @@ describe("Parser", () => {
|
||||
const attrs = attributeParser.parse(["#token", "=", "val"].map((t: any) => ({ text: t })));
|
||||
|
||||
expect(attrs.length).toEqual(1);
|
||||
expect(attrs[0].type).toEqual('label');
|
||||
expect(attrs[0].name).toEqual('token');
|
||||
expect(attrs[0].type).toEqual("label");
|
||||
expect(attrs[0].name).toEqual("token");
|
||||
expect(attrs[0].value).toEqual("val");
|
||||
});
|
||||
|
||||
@@ -83,29 +74,26 @@ describe("Parser", () => {
|
||||
let attrs = attributeParser.parse(["~token", "=", "#root/RclIpMauTOKS/NFi2gL4xtPxM"].map((t: any) => ({ text: t })));
|
||||
|
||||
expect(attrs.length).toEqual(1);
|
||||
expect(attrs[0].type).toEqual('relation');
|
||||
expect(attrs[0].type).toEqual("relation");
|
||||
expect(attrs[0].name).toEqual("token");
|
||||
expect(attrs[0].value).toEqual('NFi2gL4xtPxM');
|
||||
expect(attrs[0].value).toEqual("NFi2gL4xtPxM");
|
||||
|
||||
attrs = attributeParser.parse(["~token", "=", "#NFi2gL4xtPxM"].map((t: any) => ({ text: t })));
|
||||
|
||||
expect(attrs.length).toEqual(1);
|
||||
expect(attrs[0].type).toEqual('relation');
|
||||
expect(attrs[0].type).toEqual("relation");
|
||||
expect(attrs[0].name).toEqual("token");
|
||||
expect(attrs[0].value).toEqual('NFi2gL4xtPxM');
|
||||
expect(attrs[0].value).toEqual("NFi2gL4xtPxM");
|
||||
});
|
||||
});
|
||||
|
||||
describe("error cases", () => {
|
||||
it("error cases", () => {
|
||||
expect(() => attributeParser.lexAndParse('~token'))
|
||||
.toThrow('Relation "~token" in "~token" should point to a note.');
|
||||
expect(() => attributeParser.lexAndParse("~token")).toThrow('Relation "~token" in "~token" should point to a note.');
|
||||
|
||||
expect(() => attributeParser.lexAndParse("#a&b/s"))
|
||||
.toThrow(`Attribute name "a&b/s" contains disallowed characters, only alphanumeric characters, colon and underscore are allowed.`);
|
||||
expect(() => attributeParser.lexAndParse("#a&b/s")).toThrow(`Attribute name "a&b/s" contains disallowed characters, only alphanumeric characters, colon and underscore are allowed.`);
|
||||
|
||||
expect(() => attributeParser.lexAndParse("#"))
|
||||
.toThrow(`Attribute name is empty, please fill the name.`);
|
||||
expect(() => attributeParser.lexAndParse("#")).toThrow(`Attribute name is empty, please fill the name.`);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -47,8 +47,7 @@ export function expect(val: any) {
|
||||
toThrow: (errorMessage: any) => {
|
||||
try {
|
||||
val();
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
if (e.message !== errorMessage) {
|
||||
console.trace("toThrow caught exception, but messages differ");
|
||||
console.error(`expected: ${errorMessage}`);
|
||||
@@ -66,7 +65,7 @@ export function expect(val: any) {
|
||||
console.error(`got: [none]`);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function execute() {
|
||||
@@ -74,8 +73,7 @@ export function execute() {
|
||||
|
||||
if (errorCount) {
|
||||
console.log(`!!!${errorCount} tests failed!!!`);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
console.log("All tests passed!");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import sanitizeAttributeName from "../src/services/sanitize_attribute_name"
|
||||
import sanitizeAttributeName from "../src/services/sanitize_attribute_name";
|
||||
import { describe, it, execute, expect } from "./mini_test";
|
||||
|
||||
// fn value, expected value
|
||||
@@ -17,27 +17,23 @@ const testCases: [fnValue: string, expectedValue: string][] = [
|
||||
["ε", "ε"],
|
||||
["attribute ε", "attribute_ε"],
|
||||
|
||||
|
||||
// special characters
|
||||
["test/name", "test_name"],
|
||||
["test%name", "test_name"],
|
||||
["\/", "_"],
|
||||
|
||||
// empty string
|
||||
["", "unnamed"],
|
||||
]
|
||||
|
||||
|
||||
["", "unnamed"]
|
||||
];
|
||||
|
||||
describe("sanitizeAttributeName unit tests", () => {
|
||||
|
||||
testCases.forEach(testCase => {
|
||||
testCases.forEach((testCase) => {
|
||||
return it(`'${testCase[0]}' should return '${testCase[1]}'`, () => {
|
||||
const [value, expected] = testCase;
|
||||
const actual = sanitizeAttributeName(value);
|
||||
expect(actual).toEqual(expected);
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
execute()
|
||||
execute();
|
||||
|
||||
@@ -3,127 +3,61 @@ import { describe, it, execute, expect } from "../mini_test.ts";
|
||||
|
||||
const testCases: [fnValue: Parameters<typeof formatDownloadTitle>, expectedValue: ReturnType<typeof formatDownloadTitle>][] = [
|
||||
// empty fileName tests
|
||||
[
|
||||
["", "text", ""],
|
||||
"untitled.html"
|
||||
],
|
||||
[["", "text", ""], "untitled.html"],
|
||||
|
||||
[
|
||||
["", "canvas", ""],
|
||||
"untitled.json"
|
||||
],
|
||||
[["", "canvas", ""], "untitled.json"],
|
||||
|
||||
[
|
||||
["", null, ""],
|
||||
"untitled"
|
||||
],
|
||||
[["", null, ""], "untitled"],
|
||||
|
||||
// json extension from type tests
|
||||
[
|
||||
["test_file", "canvas", ""],
|
||||
"test_file.json"
|
||||
],
|
||||
[["test_file", "canvas", ""], "test_file.json"],
|
||||
|
||||
[
|
||||
["test_file", "relationMap", ""],
|
||||
"test_file.json"
|
||||
],
|
||||
[["test_file", "relationMap", ""], "test_file.json"],
|
||||
|
||||
[
|
||||
["test_file", "search", ""],
|
||||
"test_file.json"
|
||||
],
|
||||
[["test_file", "search", ""], "test_file.json"],
|
||||
|
||||
// extension based on mime type
|
||||
[
|
||||
["test_file", null, "text/csv"],
|
||||
"test_file.csv"
|
||||
],
|
||||
[["test_file", null, "text/csv"], "test_file.csv"],
|
||||
|
||||
[
|
||||
["test_file_wo_ext", "image", "image/svg+xml"],
|
||||
"test_file_wo_ext.svg"
|
||||
],
|
||||
[["test_file_wo_ext", "image", "image/svg+xml"], "test_file_wo_ext.svg"],
|
||||
|
||||
[
|
||||
["test_file_wo_ext", "file", "application/json"],
|
||||
"test_file_wo_ext.json"
|
||||
],
|
||||
[["test_file_wo_ext", "file", "application/json"], "test_file_wo_ext.json"],
|
||||
|
||||
[
|
||||
["test_file_w_fake_ext.ext", "image", "image/svg+xml"],
|
||||
"test_file_w_fake_ext.ext.svg"
|
||||
],
|
||||
[["test_file_w_fake_ext.ext", "image", "image/svg+xml"], "test_file_w_fake_ext.ext.svg"],
|
||||
|
||||
[
|
||||
["test_file_w_correct_ext.svg", "image", "image/svg+xml"],
|
||||
"test_file_w_correct_ext.svg"
|
||||
],
|
||||
[["test_file_w_correct_ext.svg", "image", "image/svg+xml"], "test_file_w_correct_ext.svg"],
|
||||
|
||||
[
|
||||
["test_file_w_correct_ext.svgz", "image", "image/svg+xml"],
|
||||
"test_file_w_correct_ext.svgz"
|
||||
],
|
||||
[["test_file_w_correct_ext.svgz", "image", "image/svg+xml"], "test_file_w_correct_ext.svgz"],
|
||||
|
||||
[
|
||||
["test_file.zip", "file", "application/zip"],
|
||||
"test_file.zip"
|
||||
],
|
||||
[["test_file.zip", "file", "application/zip"], "test_file.zip"],
|
||||
|
||||
[
|
||||
["test_file", "file", "application/zip"],
|
||||
"test_file.zip"
|
||||
],
|
||||
[["test_file", "file", "application/zip"], "test_file.zip"],
|
||||
|
||||
// application/octet-stream tests
|
||||
[
|
||||
["test_file", "file", "application/octet-stream"],
|
||||
"test_file"
|
||||
],
|
||||
[["test_file", "file", "application/octet-stream"], "test_file"],
|
||||
|
||||
[
|
||||
["test_file.zip", "file", "application/octet-stream"],
|
||||
"test_file.zip"
|
||||
],
|
||||
[["test_file.zip", "file", "application/octet-stream"], "test_file.zip"],
|
||||
|
||||
[
|
||||
["test_file.unknown", null, "application/octet-stream"],
|
||||
"test_file.unknown"
|
||||
],
|
||||
[["test_file.unknown", null, "application/octet-stream"], "test_file.unknown"],
|
||||
|
||||
// sanitized filename tests
|
||||
[
|
||||
["test/file", null, "application/octet-stream"],
|
||||
"testfile"
|
||||
],
|
||||
[["test/file", null, "application/octet-stream"], "testfile"],
|
||||
|
||||
[
|
||||
["test:file.zip", "file", "application/zip"],
|
||||
"testfile.zip"
|
||||
],
|
||||
[["test:file.zip", "file", "application/zip"], "testfile.zip"],
|
||||
|
||||
[
|
||||
[":::", "file", "application/zip"],
|
||||
".zip"
|
||||
],
|
||||
|
||||
[
|
||||
[":::a", "file", "application/zip"],
|
||||
"a.zip"
|
||||
],
|
||||
]
|
||||
[[":::", "file", "application/zip"], ".zip"],
|
||||
|
||||
[[":::a", "file", "application/zip"], "a.zip"]
|
||||
];
|
||||
|
||||
describe("utils/formatDownloadTitle unit tests", () => {
|
||||
|
||||
testCases.forEach(testCase => {
|
||||
testCases.forEach((testCase) => {
|
||||
return it(`With args '${JSON.stringify(testCase[0])}' it should return '${testCase[1]}'`, () => {
|
||||
const [value, expected] = testCase;
|
||||
const actual = formatDownloadTitle(...value);
|
||||
expect(actual).toEqual(expected);
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
})
|
||||
|
||||
execute()
|
||||
execute();
|
||||
|
||||
@@ -8,14 +8,9 @@ etapi.describeEtapi("import", () => {
|
||||
xit("import", async () => {
|
||||
const scriptDir = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const zipFileBuffer = fs.readFileSync(
|
||||
path.resolve(scriptDir, "test-export.zip")
|
||||
);
|
||||
const zipFileBuffer = fs.readFileSync(path.resolve(scriptDir, "test-export.zip"));
|
||||
|
||||
const response = await etapi.postEtapiContent(
|
||||
"notes/root/import",
|
||||
zipFileBuffer
|
||||
);
|
||||
const response = await etapi.postEtapiContent("notes/root/import", zipFileBuffer);
|
||||
expect(response.status).toEqual(201);
|
||||
|
||||
const { note, branch } = await response.json();
|
||||
@@ -23,9 +18,7 @@ etapi.describeEtapi("import", () => {
|
||||
expect(note.title).toEqual("test-export");
|
||||
expect(branch.parentNoteId).toEqual("root");
|
||||
|
||||
const content = await (
|
||||
await etapi.getEtapiContent(`notes/${note.noteId}/content`)
|
||||
).text();
|
||||
const content = await (await etapi.getEtapiContent(`notes/${note.noteId}/content`)).text();
|
||||
expect(content).toContain("test export content");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
describe("Notes", () => {
|
||||
it("zzz", () => {
|
||||
|
||||
});
|
||||
it("zzz", () => {});
|
||||
});
|
||||
|
||||
@@ -8,7 +8,7 @@ etapi.describeEtapi("notes", () => {
|
||||
type: "text",
|
||||
title: "Hello World!",
|
||||
content: "Content",
|
||||
prefix: "Custom prefix",
|
||||
prefix: "Custom prefix"
|
||||
});
|
||||
|
||||
expect(note.title).toEqual("Hello World!");
|
||||
@@ -18,9 +18,7 @@ etapi.describeEtapi("notes", () => {
|
||||
const rNote = await etapi.getEtapi(`notes/${note.noteId}`);
|
||||
expect(rNote.title).toEqual("Hello World!");
|
||||
|
||||
const rContent = await (
|
||||
await etapi.getEtapiContent(`notes/${note.noteId}/content`)
|
||||
).text();
|
||||
const rContent = await (await etapi.getEtapiContent(`notes/${note.noteId}/content`)).text();
|
||||
expect(rContent).toEqual("Content");
|
||||
|
||||
const rBranch = await etapi.getEtapi(`branches/${branch.branchId}`);
|
||||
@@ -33,7 +31,7 @@ etapi.describeEtapi("notes", () => {
|
||||
parentNoteId: "root",
|
||||
type: "text",
|
||||
title: "Hello World!",
|
||||
content: "Content",
|
||||
content: "Content"
|
||||
});
|
||||
|
||||
await etapi.patchEtapi(`notes/${note.noteId}`, {
|
||||
@@ -41,7 +39,7 @@ etapi.describeEtapi("notes", () => {
|
||||
type: "code",
|
||||
mime: "text/apl",
|
||||
dateCreated: "2000-01-01 12:34:56.999+0200",
|
||||
utcDateCreated: "2000-01-01 10:34:56.999Z",
|
||||
utcDateCreated: "2000-01-01 10:34:56.999Z"
|
||||
});
|
||||
|
||||
const rNote = await etapi.getEtapi(`notes/${note.noteId}`);
|
||||
@@ -57,14 +55,12 @@ etapi.describeEtapi("notes", () => {
|
||||
parentNoteId: "root",
|
||||
type: "text",
|
||||
title: "Hello World!",
|
||||
content: "Content",
|
||||
content: "Content"
|
||||
});
|
||||
|
||||
await etapi.putEtapiContent(`notes/${note.noteId}/content`, "new content");
|
||||
|
||||
const rContent = await (
|
||||
await etapi.getEtapiContent(`notes/${note.noteId}/content`)
|
||||
).text();
|
||||
const rContent = await (await etapi.getEtapiContent(`notes/${note.noteId}/content`)).text();
|
||||
expect(rContent).toEqual("new content");
|
||||
});
|
||||
|
||||
@@ -73,16 +69,14 @@ etapi.describeEtapi("notes", () => {
|
||||
parentNoteId: "root",
|
||||
type: "file",
|
||||
title: "Hello World!",
|
||||
content: "ZZZ",
|
||||
content: "ZZZ"
|
||||
});
|
||||
|
||||
const updatedContent = crypto.randomBytes(16);
|
||||
|
||||
await etapi.putEtapiContent(`notes/${note.noteId}/content`, updatedContent);
|
||||
|
||||
const rContent = await (
|
||||
await etapi.getEtapiContent(`notes/${note.noteId}/content`)
|
||||
).arrayBuffer();
|
||||
const rContent = await (await etapi.getEtapiContent(`notes/${note.noteId}/content`)).arrayBuffer();
|
||||
expect(Buffer.from(new Uint8Array(rContent))).toEqual(updatedContent);
|
||||
});
|
||||
|
||||
@@ -91,7 +85,7 @@ etapi.describeEtapi("notes", () => {
|
||||
parentNoteId: "root",
|
||||
type: "text",
|
||||
title: "Hello World!",
|
||||
content: "Content",
|
||||
content: "Content"
|
||||
});
|
||||
|
||||
await etapi.deleteEtapi(`notes/${note.noteId}`);
|
||||
|
||||
@@ -4,16 +4,11 @@ import BAttribute from "../../src/becca/entities/battribute.js";
|
||||
import becca from "../../src/becca/becca.js";
|
||||
import randtoken from "rand-token";
|
||||
import SearchResult from "../../src/services/search/search_result.js";
|
||||
import { NoteType } from "../../src/becca/entities/rows.js";
|
||||
import type { NoteType } from "../../src/becca/entities/rows.js";
|
||||
randtoken.generator({ source: "crypto" });
|
||||
|
||||
function findNoteByTitle(
|
||||
searchResults: Array<SearchResult>,
|
||||
title: string
|
||||
): BNote | undefined {
|
||||
return searchResults
|
||||
.map((sr) => becca.notes[sr.noteId])
|
||||
.find((note) => note.title === title);
|
||||
function findNoteByTitle(searchResults: Array<SearchResult>, title: string): BNote | undefined {
|
||||
return searchResults.map((sr) => becca.notes[sr.noteId]).find((note) => note.title === title);
|
||||
}
|
||||
|
||||
class NoteBuilder {
|
||||
@@ -29,7 +24,7 @@ class NoteBuilder {
|
||||
type: "label",
|
||||
isInheritable,
|
||||
name,
|
||||
value,
|
||||
value
|
||||
});
|
||||
|
||||
return this;
|
||||
@@ -41,7 +36,7 @@ class NoteBuilder {
|
||||
noteId: this.note.noteId,
|
||||
type: "relation",
|
||||
name,
|
||||
value: targetNote.noteId,
|
||||
value: targetNote.noteId
|
||||
});
|
||||
|
||||
return this;
|
||||
@@ -53,7 +48,7 @@ class NoteBuilder {
|
||||
noteId: childNoteBuilder.note.noteId,
|
||||
parentNoteId: this.note.noteId,
|
||||
prefix,
|
||||
notePosition: 10,
|
||||
notePosition: 10
|
||||
});
|
||||
|
||||
return this;
|
||||
@@ -70,7 +65,7 @@ function note(title: string, extraParams = {}) {
|
||||
noteId: id(),
|
||||
title: title,
|
||||
type: "text" as NoteType,
|
||||
mime: "text/html",
|
||||
mime: "text/html"
|
||||
},
|
||||
extraParams
|
||||
);
|
||||
@@ -83,5 +78,5 @@ function note(title: string, extraParams = {}) {
|
||||
export default {
|
||||
NoteBuilder,
|
||||
findNoteByTitle,
|
||||
note,
|
||||
note
|
||||
};
|
||||
|
||||
@@ -2,131 +2,78 @@ import lex from "../../src/services/search/services/lex.js";
|
||||
|
||||
describe("Lexer fulltext", () => {
|
||||
it("simple lexing", () => {
|
||||
expect(lex("hello world").fulltextTokens.map((t) => t.token)).toEqual([
|
||||
"hello",
|
||||
"world",
|
||||
]);
|
||||
expect(lex("hello world").fulltextTokens.map((t) => t.token)).toEqual(["hello", "world"]);
|
||||
|
||||
expect(lex("hello, world").fulltextTokens.map((t) => t.token)).toEqual([
|
||||
"hello",
|
||||
"world",
|
||||
]);
|
||||
expect(lex("hello, world").fulltextTokens.map((t) => t.token)).toEqual(["hello", "world"]);
|
||||
});
|
||||
|
||||
it("use quotes to keep words together", () => {
|
||||
expect(
|
||||
lex("'hello world' my friend").fulltextTokens.map((t) => t.token)
|
||||
).toEqual(["hello world", "my", "friend"]);
|
||||
expect(lex("'hello world' my friend").fulltextTokens.map((t) => t.token)).toEqual(["hello world", "my", "friend"]);
|
||||
|
||||
expect(
|
||||
lex('"hello world" my friend').fulltextTokens.map((t) => t.token)
|
||||
).toEqual(["hello world", "my", "friend"]);
|
||||
expect(lex('"hello world" my friend').fulltextTokens.map((t) => t.token)).toEqual(["hello world", "my", "friend"]);
|
||||
|
||||
expect(
|
||||
lex("`hello world` my friend").fulltextTokens.map((t) => t.token)
|
||||
).toEqual(["hello world", "my", "friend"]);
|
||||
expect(lex("`hello world` my friend").fulltextTokens.map((t) => t.token)).toEqual(["hello world", "my", "friend"]);
|
||||
});
|
||||
|
||||
it("you can use different quotes and other special characters inside quotes", () => {
|
||||
expect(
|
||||
lex("'i can use \" or ` or #~=*' without problem").fulltextTokens.map(
|
||||
(t) => t.token
|
||||
)
|
||||
).toEqual(['i can use " or ` or #~=*', "without", "problem"]);
|
||||
expect(lex("'i can use \" or ` or #~=*' without problem").fulltextTokens.map((t) => t.token)).toEqual(['i can use " or ` or #~=*', "without", "problem"]);
|
||||
});
|
||||
|
||||
it("I can use backslash to escape quotes", () => {
|
||||
expect(lex('hello \\"world\\"').fulltextTokens.map((t) => t.token)).toEqual(
|
||||
["hello", '"world"']
|
||||
);
|
||||
expect(lex('hello \\"world\\"').fulltextTokens.map((t) => t.token)).toEqual(["hello", '"world"']);
|
||||
|
||||
expect(lex("hello \\'world\\'").fulltextTokens.map((t) => t.token)).toEqual(
|
||||
["hello", "'world'"]
|
||||
);
|
||||
expect(lex("hello \\'world\\'").fulltextTokens.map((t) => t.token)).toEqual(["hello", "'world'"]);
|
||||
|
||||
expect(lex("hello \\`world\\`").fulltextTokens.map((t) => t.token)).toEqual(
|
||||
["hello", "`world`"]
|
||||
);
|
||||
expect(lex("hello \\`world\\`").fulltextTokens.map((t) => t.token)).toEqual(["hello", "`world`"]);
|
||||
|
||||
expect(
|
||||
lex('"hello \\"world\\"').fulltextTokens.map((t) => t.token)
|
||||
).toEqual(['hello "world"']);
|
||||
expect(lex('"hello \\"world\\"').fulltextTokens.map((t) => t.token)).toEqual(['hello "world"']);
|
||||
|
||||
expect(
|
||||
lex("'hello \\'world\\''").fulltextTokens.map((t) => t.token)
|
||||
).toEqual(["hello 'world'"]);
|
||||
expect(lex("'hello \\'world\\''").fulltextTokens.map((t) => t.token)).toEqual(["hello 'world'"]);
|
||||
|
||||
expect(
|
||||
lex("`hello \\`world\\``").fulltextTokens.map((t) => t.token)
|
||||
).toEqual(["hello `world`"]);
|
||||
expect(lex("`hello \\`world\\``").fulltextTokens.map((t) => t.token)).toEqual(["hello `world`"]);
|
||||
|
||||
expect(lex("\\#token").fulltextTokens.map((t) => t.token)).toEqual([
|
||||
"#token",
|
||||
]);
|
||||
expect(lex("\\#token").fulltextTokens.map((t) => t.token)).toEqual(["#token"]);
|
||||
});
|
||||
|
||||
it("quote inside a word does not have a special meaning", () => {
|
||||
const lexResult = lex("d'Artagnan is dead #hero = d'Artagnan");
|
||||
|
||||
expect(lexResult.fulltextTokens.map((t) => t.token)).toEqual([
|
||||
"d'artagnan",
|
||||
"is",
|
||||
"dead",
|
||||
]);
|
||||
expect(lexResult.fulltextTokens.map((t) => t.token)).toEqual(["d'artagnan", "is", "dead"]);
|
||||
|
||||
expect(lexResult.expressionTokens.map((t) => t.token)).toEqual([
|
||||
"#hero",
|
||||
"=",
|
||||
"d'artagnan",
|
||||
]);
|
||||
expect(lexResult.expressionTokens.map((t) => t.token)).toEqual(["#hero", "=", "d'artagnan"]);
|
||||
});
|
||||
|
||||
it("if quote is not ended then it's just one long token", () => {
|
||||
expect(lex("'unfinished quote").fulltextTokens.map((t) => t.token)).toEqual(
|
||||
["unfinished quote"]
|
||||
);
|
||||
expect(lex("'unfinished quote").fulltextTokens.map((t) => t.token)).toEqual(["unfinished quote"]);
|
||||
});
|
||||
|
||||
it("parenthesis and symbols in fulltext section are just normal characters", () => {
|
||||
expect(
|
||||
lex("what's u=p <b(r*t)h>").fulltextTokens.map((t) => t.token)
|
||||
).toEqual(["what's", "u=p", "<b(r*t)h>"]);
|
||||
expect(lex("what's u=p <b(r*t)h>").fulltextTokens.map((t) => t.token)).toEqual(["what's", "u=p", "<b(r*t)h>"]);
|
||||
});
|
||||
|
||||
it("operator characters in expressions are separate tokens", () => {
|
||||
expect(
|
||||
lex("# abc+=-def**-+d").expressionTokens.map((t) => t.token)
|
||||
).toEqual(["#", "abc", "+=-", "def", "**-+", "d"]);
|
||||
expect(lex("# abc+=-def**-+d").expressionTokens.map((t) => t.token)).toEqual(["#", "abc", "+=-", "def", "**-+", "d"]);
|
||||
});
|
||||
|
||||
it("escaping special characters", () => {
|
||||
expect(lex("hello \\#\\~\\'").fulltextTokens.map((t) => t.token)).toEqual([
|
||||
"hello",
|
||||
"#~'",
|
||||
]);
|
||||
expect(lex("hello \\#\\~\\'").fulltextTokens.map((t) => t.token)).toEqual(["hello", "#~'"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Lexer expression", () => {
|
||||
it("simple attribute existence", () => {
|
||||
expect(
|
||||
lex("#label ~relation").expressionTokens.map((t) => t.token)
|
||||
).toEqual(["#label", "~relation"]);
|
||||
expect(lex("#label ~relation").expressionTokens.map((t) => t.token)).toEqual(["#label", "~relation"]);
|
||||
});
|
||||
|
||||
it("simple label operators", () => {
|
||||
expect(lex("#label*=*text").expressionTokens.map((t) => t.token)).toEqual([
|
||||
"#label",
|
||||
"*=*",
|
||||
"text",
|
||||
]);
|
||||
expect(lex("#label*=*text").expressionTokens.map((t) => t.token)).toEqual(["#label", "*=*", "text"]);
|
||||
});
|
||||
|
||||
it("simple label operator with in quotes", () => {
|
||||
expect(lex("#label*=*'text'").expressionTokens).toEqual([
|
||||
{ token: "#label", inQuotes: false, startIndex: 0, endIndex: 5 },
|
||||
{ token: "*=*", inQuotes: false, startIndex: 6, endIndex: 8 },
|
||||
{ token: "text", inQuotes: true, startIndex: 10, endIndex: 13 },
|
||||
{ token: "text", inQuotes: true, startIndex: 10, endIndex: 13 }
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -134,7 +81,7 @@ describe("Lexer expression", () => {
|
||||
expect(lex("#label*=*text").expressionTokens).toEqual([
|
||||
{ token: "#label", inQuotes: false, startIndex: 0, endIndex: 5 },
|
||||
{ token: "*=*", inQuotes: false, startIndex: 6, endIndex: 8 },
|
||||
{ token: "text", inQuotes: false, startIndex: 9, endIndex: 12 },
|
||||
{ token: "text", inQuotes: false, startIndex: 9, endIndex: 12 }
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -143,34 +90,22 @@ describe("Lexer expression", () => {
|
||||
{ token: "#label", inQuotes: false, startIndex: 0, endIndex: 5 },
|
||||
{ token: "=", inQuotes: false, startIndex: 7, endIndex: 7 },
|
||||
// weird case for empty strings which ends up with endIndex < startIndex :-(
|
||||
{ token: "", inQuotes: true, startIndex: 10, endIndex: 9 },
|
||||
{ token: "", inQuotes: true, startIndex: 10, endIndex: 9 }
|
||||
]);
|
||||
});
|
||||
|
||||
it("note. prefix also separates fulltext from expression", () => {
|
||||
expect(
|
||||
lex(`hello fulltext note.labels.capital = Prague`).expressionTokens.map(
|
||||
(t) => t.token
|
||||
)
|
||||
).toEqual(["note", ".", "labels", ".", "capital", "=", "prague"]);
|
||||
expect(lex(`hello fulltext note.labels.capital = Prague`).expressionTokens.map((t) => t.token)).toEqual(["note", ".", "labels", ".", "capital", "=", "prague"]);
|
||||
});
|
||||
|
||||
it("note. prefix in quotes will note start expression", () => {
|
||||
expect(
|
||||
lex(`hello fulltext "note.txt"`).expressionTokens.map((t) => t.token)
|
||||
).toEqual([]);
|
||||
expect(lex(`hello fulltext "note.txt"`).expressionTokens.map((t) => t.token)).toEqual([]);
|
||||
|
||||
expect(
|
||||
lex(`hello fulltext "note.txt"`).fulltextTokens.map((t) => t.token)
|
||||
).toEqual(["hello", "fulltext", "note.txt"]);
|
||||
expect(lex(`hello fulltext "note.txt"`).fulltextTokens.map((t) => t.token)).toEqual(["hello", "fulltext", "note.txt"]);
|
||||
});
|
||||
|
||||
it("complex expressions with and, or and parenthesis", () => {
|
||||
expect(
|
||||
lex(`# (#label=text OR #second=text) AND ~relation`).expressionTokens.map(
|
||||
(t) => t.token
|
||||
)
|
||||
).toEqual([
|
||||
expect(lex(`# (#label=text OR #second=text) AND ~relation`).expressionTokens.map((t) => t.token)).toEqual([
|
||||
"#",
|
||||
"(",
|
||||
"#label",
|
||||
@@ -182,16 +117,12 @@ describe("Lexer expression", () => {
|
||||
"text",
|
||||
")",
|
||||
"and",
|
||||
"~relation",
|
||||
"~relation"
|
||||
]);
|
||||
});
|
||||
|
||||
it("dot separated properties", () => {
|
||||
expect(
|
||||
lex(
|
||||
`# ~author.title = 'Hugh Howey' AND note.'book title' = 'Silo'`
|
||||
).expressionTokens.map((t) => t.token)
|
||||
).toEqual([
|
||||
expect(lex(`# ~author.title = 'Hugh Howey' AND note.'book title' = 'Silo'`).expressionTokens.map((t) => t.token)).toEqual([
|
||||
"#",
|
||||
"~author",
|
||||
".",
|
||||
@@ -203,54 +134,29 @@ describe("Lexer expression", () => {
|
||||
".",
|
||||
"book title",
|
||||
"=",
|
||||
"silo",
|
||||
"silo"
|
||||
]);
|
||||
});
|
||||
|
||||
it("negation of label and relation", () => {
|
||||
expect(
|
||||
lex(`#!capital ~!neighbor`).expressionTokens.map((t) => t.token)
|
||||
).toEqual(["#!capital", "~!neighbor"]);
|
||||
expect(lex(`#!capital ~!neighbor`).expressionTokens.map((t) => t.token)).toEqual(["#!capital", "~!neighbor"]);
|
||||
});
|
||||
|
||||
it("negation of sub-expression", () => {
|
||||
expect(
|
||||
lex(`# not(#capital) and note.noteId != "root"`).expressionTokens.map(
|
||||
(t) => t.token
|
||||
)
|
||||
).toEqual([
|
||||
"#",
|
||||
"not",
|
||||
"(",
|
||||
"#capital",
|
||||
")",
|
||||
"and",
|
||||
"note",
|
||||
".",
|
||||
"noteid",
|
||||
"!=",
|
||||
"root",
|
||||
]);
|
||||
expect(lex(`# not(#capital) and note.noteId != "root"`).expressionTokens.map((t) => t.token)).toEqual(["#", "not", "(", "#capital", ")", "and", "note", ".", "noteid", "!=", "root"]);
|
||||
});
|
||||
|
||||
it("order by multiple labels", () => {
|
||||
expect(lex(`# orderby #a,#b`).expressionTokens.map((t) => t.token)).toEqual(
|
||||
["#", "orderby", "#a", ",", "#b"]
|
||||
);
|
||||
expect(lex(`# orderby #a,#b`).expressionTokens.map((t) => t.token)).toEqual(["#", "orderby", "#a", ",", "#b"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Lexer invalid queries and edge cases", () => {
|
||||
it("concatenated attributes", () => {
|
||||
expect(lex("#label~relation").expressionTokens.map((t) => t.token)).toEqual(
|
||||
["#label", "~relation"]
|
||||
);
|
||||
expect(lex("#label~relation").expressionTokens.map((t) => t.token)).toEqual(["#label", "~relation"]);
|
||||
});
|
||||
|
||||
it("trailing escape \\", () => {
|
||||
expect(lex("abc \\").fulltextTokens.map((t) => t.token)).toEqual([
|
||||
"abc",
|
||||
"\\",
|
||||
]);
|
||||
expect(lex("abc \\").fulltextTokens.map((t) => t.token)).toEqual(["abc", "\\"]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,25 +1,11 @@
|
||||
import handleParens from "../../src/services/search/services/handle_parens.js";
|
||||
import { TokenStructure } from "../../src/services/search/services/types.js";
|
||||
import type { TokenStructure } from "../../src/services/search/services/types.js";
|
||||
|
||||
describe("Parens handler", () => {
|
||||
it("handles parens", () => {
|
||||
const input = ["(", "hello", ")", "and", "(", "(", "pick", "one", ")", "and", "another", ")"]
|
||||
.map(token => ({token}));
|
||||
const input = ["(", "hello", ")", "and", "(", "(", "pick", "one", ")", "and", "another", ")"].map((token) => ({ token }));
|
||||
|
||||
const actual: TokenStructure = [
|
||||
[
|
||||
{token: "hello"}
|
||||
],
|
||||
{token: "and"},
|
||||
[
|
||||
[
|
||||
{token: "pick"},
|
||||
{token: "one"}
|
||||
],
|
||||
{token: "and"},
|
||||
{token: "another"}
|
||||
]
|
||||
];
|
||||
const actual: TokenStructure = [[{ token: "hello" }], { token: "and" }, [[{ token: "pick" }, { token: "one" }], { token: "and" }, { token: "another" }]];
|
||||
|
||||
expect(handleParens(input)).toEqual(actual);
|
||||
});
|
||||
|
||||
@@ -1,12 +1,274 @@
|
||||
// @ts-nocheck
|
||||
// There are many issues with the types of the parser e.g. "parse" function returns "Expression"
|
||||
// but we access properties like "subExpressions" which is not defined in the "Expression" class.
|
||||
|
||||
import AndExp from "../../src/services/search/expressions/and.js";
|
||||
import AttributeExistsExp from "../../src/services/search/expressions/attribute_exists.js";
|
||||
import Expression from "../../src/services/search/expressions/expression.js";
|
||||
import LabelComparisonExp from "../../src/services/search/expressions/label_comparison.js";
|
||||
import NotExp from "../../src/services/search/expressions/not.js";
|
||||
import NoteContentFulltextExp from "../../src/services/search/expressions/note_content_fulltext.js";
|
||||
import NoteFlatTextExp from "../../src/services/search/expressions/note_flat_text.js";
|
||||
import OrExp from "../../src/services/search/expressions/or.js";
|
||||
import OrderByAndLimitExp from "../../src/services/search/expressions/order_by_and_limit.js";
|
||||
import PropertyComparisonExp from "../../src/services/search/expressions/property_comparison.js";
|
||||
import SearchContext from "../../src/services/search/search_context.js";
|
||||
import parse from "../../src/services/search/services/parse.js";
|
||||
import { default as parseInternal, type ParseOpts } from "../../src/services/search/services/parse.js";
|
||||
|
||||
function tokens(toks: Array<string>, cur = 0): Array<any> {
|
||||
describe("Parser", () => {
|
||||
it("fulltext parser without content", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: tokens(["hello", "hi"]),
|
||||
expressionTokens: [],
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
expectExpression(rootExp.subExpressions[0], PropertyComparisonExp);
|
||||
const orExp = expectExpression(rootExp.subExpressions[2], OrExp);
|
||||
const flatTextExp = expectExpression(orExp.subExpressions[0], NoteFlatTextExp);
|
||||
expect(flatTextExp.tokens).toEqual(["hello", "hi"]);
|
||||
});
|
||||
|
||||
it("fulltext parser with content", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: tokens(["hello", "hi"]),
|
||||
expressionTokens: [],
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
const orExp = expectExpression(rootExp.subExpressions[2], OrExp);
|
||||
|
||||
const firstSub = expectExpression(orExp.subExpressions[0], NoteFlatTextExp);
|
||||
expect(firstSub.tokens).toEqual(["hello", "hi"]);
|
||||
|
||||
const secondSub = expectExpression(orExp.subExpressions[1], NoteContentFulltextExp);
|
||||
expect(secondSub.tokens).toEqual(["hello", "hi"]);
|
||||
});
|
||||
|
||||
it("simple label comparison", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#mylabel", "=", "text"]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
const labelComparisonExp = expectExpression(rootExp.subExpressions[2], LabelComparisonExp);
|
||||
expect(labelComparisonExp.attributeType).toEqual("label");
|
||||
expect(labelComparisonExp.attributeName).toEqual("mylabel");
|
||||
expect(labelComparisonExp.comparator).toBeTruthy();
|
||||
});
|
||||
|
||||
it("simple attribute negation", () => {
|
||||
let rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#!mylabel"]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
let notExp = expectExpression(rootExp.subExpressions[2], NotExp);
|
||||
let attributeExistsExp = expectExpression(notExp.subExpression, AttributeExistsExp);
|
||||
expect(attributeExistsExp.attributeType).toEqual("label");
|
||||
expect(attributeExistsExp.attributeName).toEqual("mylabel");
|
||||
|
||||
rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["~!myrelation"]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
notExp = expectExpression(rootExp.subExpressions[2], NotExp);
|
||||
attributeExistsExp = expectExpression(notExp.subExpression, AttributeExistsExp);
|
||||
expect(attributeExistsExp.attributeType).toEqual("relation");
|
||||
expect(attributeExistsExp.attributeName).toEqual("myrelation");
|
||||
});
|
||||
|
||||
it("simple label AND", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#first", "=", "text", "and", "#second", "=", "text"]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
const andExp = expectExpression(rootExp.subExpressions[2], AndExp);
|
||||
const [firstSub, secondSub] = expectSubexpressions(andExp, LabelComparisonExp, LabelComparisonExp);
|
||||
|
||||
expect(firstSub.attributeName).toEqual("first");
|
||||
expect(secondSub.attributeName).toEqual("second");
|
||||
});
|
||||
|
||||
it("simple label AND without explicit AND", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#first", "=", "text", "#second", "=", "text"]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
const andExp = expectExpression(rootExp.subExpressions[2], AndExp);
|
||||
const [firstSub, secondSub] = expectSubexpressions(andExp, LabelComparisonExp, LabelComparisonExp);
|
||||
|
||||
expect(firstSub.attributeName).toEqual("first");
|
||||
expect(secondSub.attributeName).toEqual("second");
|
||||
});
|
||||
|
||||
it("simple label OR", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#first", "=", "text", "or", "#second", "=", "text"]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
const orExp = expectExpression(rootExp.subExpressions[2], OrExp);
|
||||
const [firstSub, secondSub] = expectSubexpressions(orExp, LabelComparisonExp, LabelComparisonExp);
|
||||
expect(firstSub.attributeName).toEqual("first");
|
||||
expect(secondSub.attributeName).toEqual("second");
|
||||
});
|
||||
|
||||
it("fulltext and simple label", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: tokens(["hello"]),
|
||||
expressionTokens: tokens(["#mylabel", "=", "text"]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
const [firstSub, _, thirdSub, fourth] = expectSubexpressions(rootExp, PropertyComparisonExp, undefined, OrExp, LabelComparisonExp);
|
||||
|
||||
expect(firstSub.propertyName).toEqual("isArchived");
|
||||
|
||||
const noteFlatTextExp = expectExpression(thirdSub.subExpressions[0], NoteFlatTextExp);
|
||||
expect(noteFlatTextExp.tokens).toEqual(["hello"]);
|
||||
|
||||
expect(fourth.attributeName).toEqual("mylabel");
|
||||
});
|
||||
|
||||
it("label sub-expression", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#first", "=", "text", "or", ["#second", "=", "text", "and", "#third", "=", "text"]]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
const orExp = expectExpression(rootExp.subExpressions[2], OrExp);
|
||||
const [firstSub, secondSub] = expectSubexpressions(orExp, LabelComparisonExp, AndExp);
|
||||
|
||||
expect(firstSub.attributeName).toEqual("first");
|
||||
|
||||
const [firstSubSub, secondSubSub] = expectSubexpressions(secondSub, LabelComparisonExp, LabelComparisonExp);
|
||||
expect(firstSubSub.attributeName).toEqual("second");
|
||||
expect(secondSubSub.attributeName).toEqual("third");
|
||||
});
|
||||
|
||||
it("label sub-expression without explicit operator", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#first", ["#second", "or", "#third"], "#fourth"]),
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
const andExp = expectExpression(rootExp.subExpressions[2], AndExp);
|
||||
const [firstSub, secondSub, thirdSub] = expectSubexpressions(andExp, AttributeExistsExp, OrExp, AttributeExistsExp);
|
||||
|
||||
expect(firstSub.attributeName).toEqual("first");
|
||||
|
||||
const [firstSubSub, secondSubSub] = expectSubexpressions(secondSub, AttributeExistsExp, AttributeExistsExp);
|
||||
expect(firstSubSub.attributeName).toEqual("second");
|
||||
expect(secondSubSub.attributeName).toEqual("third");
|
||||
|
||||
expect(thirdSub.attributeName).toEqual("fourth");
|
||||
});
|
||||
|
||||
it("parses limit without order by", () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: tokens(["hello", "hi"]),
|
||||
expressionTokens: [],
|
||||
searchContext: new SearchContext({ limit: 2 })
|
||||
}, OrderByAndLimitExp);
|
||||
|
||||
expect(rootExp.limit).toBe(2);
|
||||
expect(rootExp.subExpression).toBeInstanceOf(AndExp);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Invalid expressions", () => {
|
||||
it("incomplete comparison", () => {
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
parseInternal({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#first", "="]),
|
||||
searchContext
|
||||
});
|
||||
|
||||
expect(searchContext.error).toEqual('Misplaced or incomplete expression "="');
|
||||
});
|
||||
|
||||
it("comparison between labels is impossible", () => {
|
||||
let searchContext = new SearchContext();
|
||||
searchContext.originalQuery = "#first = #second";
|
||||
|
||||
parseInternal({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#first", "=", "#second"]),
|
||||
searchContext
|
||||
});
|
||||
|
||||
expect(searchContext.error).toEqual(`Error near token "#second" in "#first = #second", it's possible to compare with constant only.`);
|
||||
|
||||
searchContext = new SearchContext();
|
||||
searchContext.originalQuery = "#first = note.relations.second";
|
||||
|
||||
parseInternal({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["#first", "=", "note", ".", "relations", "second"]),
|
||||
searchContext
|
||||
});
|
||||
|
||||
expect(searchContext.error).toEqual(`Error near token "note" in "#first = note.relations.second", it's possible to compare with constant only.`);
|
||||
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: [
|
||||
{ token: "#first", inQuotes: false },
|
||||
{ token: "=", inQuotes: false },
|
||||
{ token: "#second", inQuotes: true }
|
||||
],
|
||||
searchContext: new SearchContext()
|
||||
}, AndExp);
|
||||
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
const labelComparisonExp = expectExpression(rootExp.subExpressions[2], LabelComparisonExp);
|
||||
expect(labelComparisonExp.attributeType).toEqual("label");
|
||||
expect(labelComparisonExp.attributeName).toEqual("first");
|
||||
expect(labelComparisonExp.comparator).toBeTruthy();
|
||||
});
|
||||
|
||||
it("searching by relation without note property", () => {
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
parseInternal({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(["~first", "=", "text", "-", "abc"]),
|
||||
searchContext
|
||||
});
|
||||
|
||||
expect(searchContext.error).toEqual('Relation can be compared only with property, e.g. ~relation.title=hello in ""');
|
||||
});
|
||||
});
|
||||
|
||||
type ClassType<T extends Expression> = new (...args: any[]) => T;
|
||||
|
||||
function tokens(toks: (string | string[])[], cur = 0): Array<any> {
|
||||
return toks.map((arg) => {
|
||||
if (Array.isArray(arg)) {
|
||||
return tokens(arg, cur);
|
||||
@@ -17,303 +279,77 @@ function tokens(toks: Array<string>, cur = 0): Array<any> {
|
||||
token: arg,
|
||||
inQuotes: false,
|
||||
startIndex: cur - arg.length,
|
||||
endIndex: cur - 1,
|
||||
endIndex: cur - 1
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function assertIsArchived(exp: Expression) {
|
||||
expect(exp.constructor.name).toEqual('PropertyComparisonExp');
|
||||
expect(exp.propertyName).toEqual('isArchived');
|
||||
expect(exp.operator).toEqual('=');
|
||||
expect(exp.comparedValue).toEqual('false');
|
||||
function assertIsArchived(_exp: Expression) {
|
||||
const exp = expectExpression(_exp, PropertyComparisonExp);
|
||||
expect(exp.propertyName).toEqual("isArchived");
|
||||
expect(exp.operator).toEqual("=");
|
||||
expect(exp.comparedValue).toEqual("false");
|
||||
}
|
||||
|
||||
describe('Parser', () => {
|
||||
it('fulltext parser without content', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: tokens(['hello', 'hi']),
|
||||
expressionTokens: [],
|
||||
searchContext: new SearchContext({ excludeArchived: true }),
|
||||
});
|
||||
/**
|
||||
* Parses the corresponding {@link Expression} from plain text, while also expecting the resulting expression to be of the given type.
|
||||
*
|
||||
* @param opts the options for parsing.
|
||||
* @param type the expected type of the expression.
|
||||
* @returns the expression typecasted to the expected type.
|
||||
*/
|
||||
function parse<T extends Expression>(opts: ParseOpts, type: ClassType<T>) {
|
||||
return expectExpression(parseInternal(opts), type);
|
||||
}
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
expect(rootExp.subExpressions[0].constructor.name).toEqual('PropertyComparisonExp');
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('OrExp');
|
||||
expect(rootExp.subExpressions[2].subExpressions[0].constructor.name).toEqual('NoteFlatTextExp');
|
||||
expect(rootExp.subExpressions[2].subExpressions[0].tokens).toEqual(['hello', 'hi']);
|
||||
});
|
||||
/**
|
||||
* Expects the given {@link Expression} to be of the given type.
|
||||
*
|
||||
* @param exp an instance of an {@link Expression}.
|
||||
* @param type a type class such as {@link AndExp}, {@link OrExp}, etc.
|
||||
* @returns the same expression typecasted to the expected type.
|
||||
*/
|
||||
function expectExpression<T extends Expression>(exp: Expression, type: ClassType<T>) {
|
||||
expect(exp).toBeInstanceOf(type);
|
||||
return exp as T;
|
||||
}
|
||||
|
||||
it('fulltext parser with content', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: tokens(['hello', 'hi']),
|
||||
expressionTokens: [],
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('OrExp');
|
||||
|
||||
const subs = rootExp.subExpressions[2].subExpressions;
|
||||
|
||||
expect(subs[0].constructor.name).toEqual('NoteFlatTextExp');
|
||||
expect(subs[0].tokens).toEqual(['hello', 'hi']);
|
||||
|
||||
expect(subs[1].constructor.name).toEqual('NoteContentFulltextExp');
|
||||
expect(subs[1].tokens).toEqual(['hello', 'hi']);
|
||||
});
|
||||
|
||||
it('simple label comparison', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#mylabel', '=', 'text']),
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(rootExp.subExpressions[2].attributeType).toEqual('label');
|
||||
expect(rootExp.subExpressions[2].attributeName).toEqual('mylabel');
|
||||
expect(rootExp.subExpressions[2].comparator).toBeTruthy();
|
||||
});
|
||||
|
||||
it('simple attribute negation', () => {
|
||||
let rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#!mylabel']),
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('NotExp');
|
||||
expect(rootExp.subExpressions[2].subExpression.constructor.name).toEqual('AttributeExistsExp');
|
||||
expect(rootExp.subExpressions[2].subExpression.attributeType).toEqual('label');
|
||||
expect(rootExp.subExpressions[2].subExpression.attributeName).toEqual('mylabel');
|
||||
|
||||
rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['~!myrelation']),
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('NotExp');
|
||||
expect(rootExp.subExpressions[2].subExpression.constructor.name).toEqual('AttributeExistsExp');
|
||||
expect(rootExp.subExpressions[2].subExpression.attributeType).toEqual('relation');
|
||||
expect(rootExp.subExpressions[2].subExpression.attributeName).toEqual('myrelation');
|
||||
});
|
||||
|
||||
it('simple label AND', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#first', '=', 'text', 'and', '#second', '=', 'text']),
|
||||
searchContext: new SearchContext(true),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('AndExp');
|
||||
const [firstSub, secondSub] = rootExp.subExpressions[2].subExpressions;
|
||||
|
||||
expect(firstSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(firstSub.attributeName).toEqual('first');
|
||||
|
||||
expect(secondSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(secondSub.attributeName).toEqual('second');
|
||||
});
|
||||
|
||||
it('simple label AND without explicit AND', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#first', '=', 'text', '#second', '=', 'text']),
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('AndExp');
|
||||
const [firstSub, secondSub] = rootExp.subExpressions[2].subExpressions;
|
||||
|
||||
expect(firstSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(firstSub.attributeName).toEqual('first');
|
||||
|
||||
expect(secondSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(secondSub.attributeName).toEqual('second');
|
||||
});
|
||||
|
||||
it('simple label OR', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#first', '=', 'text', 'or', '#second', '=', 'text']),
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('OrExp');
|
||||
const [firstSub, secondSub] = rootExp.subExpressions[2].subExpressions;
|
||||
|
||||
expect(firstSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(firstSub.attributeName).toEqual('first');
|
||||
|
||||
expect(secondSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(secondSub.attributeName).toEqual('second');
|
||||
});
|
||||
|
||||
it('fulltext and simple label', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: tokens(['hello']),
|
||||
expressionTokens: tokens(['#mylabel', '=', 'text']),
|
||||
searchContext: new SearchContext({ excludeArchived: true }),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
const [firstSub, secondSub, thirdSub, fourth] = rootExp.subExpressions;
|
||||
|
||||
expect(firstSub.constructor.name).toEqual('PropertyComparisonExp');
|
||||
expect(firstSub.propertyName).toEqual('isArchived');
|
||||
|
||||
expect(thirdSub.constructor.name).toEqual('OrExp');
|
||||
expect(thirdSub.subExpressions[0].constructor.name).toEqual('NoteFlatTextExp');
|
||||
expect(thirdSub.subExpressions[0].tokens).toEqual(['hello']);
|
||||
|
||||
expect(fourth.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(fourth.attributeName).toEqual('mylabel');
|
||||
});
|
||||
|
||||
it('label sub-expression', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#first', '=', 'text', 'or', ['#second', '=', 'text', 'and', '#third', '=', 'text']]),
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('OrExp');
|
||||
const [firstSub, secondSub] = rootExp.subExpressions[2].subExpressions;
|
||||
|
||||
expect(firstSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(firstSub.attributeName).toEqual('first');
|
||||
|
||||
expect(secondSub.constructor.name).toEqual('AndExp');
|
||||
const [firstSubSub, secondSubSub] = secondSub.subExpressions;
|
||||
|
||||
expect(firstSubSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(firstSubSub.attributeName).toEqual('second');
|
||||
|
||||
expect(secondSubSub.constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(secondSubSub.attributeName).toEqual('third');
|
||||
});
|
||||
|
||||
it('label sub-expression without explicit operator', () => {
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#first', ['#second', 'or', '#third'], '#fourth']),
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('AndExp');
|
||||
const [firstSub, secondSub, thirdSub] = rootExp.subExpressions[2].subExpressions;
|
||||
|
||||
expect(firstSub.constructor.name).toEqual('AttributeExistsExp');
|
||||
expect(firstSub.attributeName).toEqual('first');
|
||||
|
||||
expect(secondSub.constructor.name).toEqual('OrExp');
|
||||
const [firstSubSub, secondSubSub] = secondSub.subExpressions;
|
||||
|
||||
expect(firstSubSub.constructor.name).toEqual('AttributeExistsExp');
|
||||
expect(firstSubSub.attributeName).toEqual('second');
|
||||
|
||||
expect(secondSubSub.constructor.name).toEqual('AttributeExistsExp');
|
||||
expect(secondSubSub.attributeName).toEqual('third');
|
||||
|
||||
expect(thirdSub.constructor.name).toEqual('AttributeExistsExp');
|
||||
expect(thirdSub.attributeName).toEqual('fourth');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid expressions', () => {
|
||||
it('incomplete comparison', () => {
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#first', '=']),
|
||||
searchContext,
|
||||
});
|
||||
|
||||
expect(searchContext.error).toEqual('Misplaced or incomplete expression "="');
|
||||
});
|
||||
|
||||
it('comparison between labels is impossible', () => {
|
||||
let searchContext = new SearchContext();
|
||||
searchContext.originalQuery = '#first = #second';
|
||||
|
||||
parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#first', '=', '#second']),
|
||||
searchContext,
|
||||
});
|
||||
|
||||
expect(searchContext.error).toEqual(
|
||||
`Error near token "#second" in "#first = #second", it's possible to compare with constant only.`
|
||||
);
|
||||
|
||||
searchContext = new SearchContext();
|
||||
searchContext.originalQuery = '#first = note.relations.second';
|
||||
|
||||
parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['#first', '=', 'note', '.', 'relations', 'second']),
|
||||
searchContext,
|
||||
});
|
||||
|
||||
expect(searchContext.error).toEqual(
|
||||
`Error near token "note" in "#first = note.relations.second", it's possible to compare with constant only.`
|
||||
);
|
||||
|
||||
const rootExp = parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: [
|
||||
{ token: '#first', inQuotes: false },
|
||||
{ token: '=', inQuotes: false },
|
||||
{ token: '#second', inQuotes: true },
|
||||
],
|
||||
searchContext: new SearchContext(),
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual('AndExp');
|
||||
assertIsArchived(rootExp.subExpressions[0]);
|
||||
|
||||
expect(rootExp.subExpressions[2].constructor.name).toEqual('LabelComparisonExp');
|
||||
expect(rootExp.subExpressions[2].attributeType).toEqual('label');
|
||||
expect(rootExp.subExpressions[2].attributeName).toEqual('first');
|
||||
expect(rootExp.subExpressions[2].comparator).toBeTruthy();
|
||||
});
|
||||
|
||||
it('searching by relation without note property', () => {
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
parse({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: tokens(['~first', '=', 'text', '-', 'abc']),
|
||||
searchContext,
|
||||
});
|
||||
|
||||
expect(searchContext.error).toEqual('Relation can be compared only with property, e.g. ~relation.title=hello in ""');
|
||||
});
|
||||
});
|
||||
/**
|
||||
* For an {@link AndExp}, it goes through all its subexpressions (up to fourth) and checks their type and returns them as a typecasted array.
|
||||
* Each subexpression can have their own type.
|
||||
*
|
||||
* @param exp the expression containing one or more subexpressions.
|
||||
* @param firstType the type of the first subexpression.
|
||||
* @param secondType the type of the second subexpression.
|
||||
* @param thirdType the type of the third subexpression.
|
||||
* @param fourthType the type of the fourth subexpression.
|
||||
* @returns an array of all the subexpressions (in order) typecasted to their expected type.
|
||||
*/
|
||||
function expectSubexpressions<FirstT extends Expression,
|
||||
SecondT extends Expression,
|
||||
ThirdT extends Expression,
|
||||
FourthT extends Expression>(
|
||||
exp: AndExp,
|
||||
firstType: ClassType<FirstT>,
|
||||
secondType?: ClassType<SecondT>,
|
||||
thirdType?: ClassType<ThirdT>,
|
||||
fourthType?: ClassType<FourthT>): [ FirstT, SecondT, ThirdT, FourthT ]
|
||||
{
|
||||
expectExpression(exp.subExpressions[0], firstType);
|
||||
if (secondType) {
|
||||
expectExpression(exp.subExpressions[1], secondType);
|
||||
}
|
||||
if (thirdType) {
|
||||
expectExpression(exp.subExpressions[2], thirdType);
|
||||
}
|
||||
if (fourthType) {
|
||||
expectExpression(exp.subExpressions[3], fourthType);
|
||||
}
|
||||
return [
|
||||
exp.subExpressions[0] as FirstT,
|
||||
exp.subExpressions[1] as SecondT,
|
||||
exp.subExpressions[2] as ThirdT,
|
||||
exp.subExpressions[3] as FourthT
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,195 +6,177 @@ import dateUtils from "../../src/services/date_utils.js";
|
||||
import becca from "../../src/becca/becca.js";
|
||||
import becca_mocking from "./becca_mocking.js";
|
||||
|
||||
describe('Search', () => {
|
||||
describe("Search", () => {
|
||||
let rootNote: any;
|
||||
|
||||
beforeEach(() => {
|
||||
becca.reset();
|
||||
|
||||
rootNote = new becca_mocking.NoteBuilder(new BNote({ noteId: 'root', title: 'root', type: 'text' }));
|
||||
rootNote = new becca_mocking.NoteBuilder(new BNote({ noteId: "root", title: "root", type: "text" }));
|
||||
new BBranch({
|
||||
branchId: 'none_root',
|
||||
noteId: 'root',
|
||||
parentNoteId: 'none',
|
||||
notePosition: 10,
|
||||
branchId: "none_root",
|
||||
noteId: "root",
|
||||
parentNoteId: "none",
|
||||
notePosition: 10
|
||||
});
|
||||
});
|
||||
|
||||
xit('simple path match', () => {
|
||||
rootNote.child(becca_mocking.note('Europe').child(becca_mocking.note('Austria')));
|
||||
xit("simple path match", () => {
|
||||
rootNote.child(becca_mocking.note("Europe").child(becca_mocking.note("Austria")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
const searchResults = searchService.findResultsWithQuery('europe austria', searchContext);
|
||||
const searchResults = searchService.findResultsWithQuery("europe austria", searchContext);
|
||||
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
});
|
||||
|
||||
xit('normal search looks also at attributes', () => {
|
||||
const austria = becca_mocking.note('Austria');
|
||||
const vienna = becca_mocking.note('Vienna');
|
||||
xit("normal search looks also at attributes", () => {
|
||||
const austria = becca_mocking.note("Austria");
|
||||
const vienna = becca_mocking.note("Vienna");
|
||||
|
||||
rootNote.child(austria.relation('capital', vienna.note)).child(vienna.label('inhabitants', '1888776'));
|
||||
rootNote.child(austria.relation("capital", vienna.note)).child(vienna.label("inhabitants", "1888776"));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
let searchResults = searchService.findResultsWithQuery('capital', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("capital", searchContext);
|
||||
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('inhabitants', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("inhabitants", searchContext);
|
||||
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Vienna')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Vienna")).toBeTruthy();
|
||||
});
|
||||
|
||||
xit('normal search looks also at type and mime', () => {
|
||||
rootNote
|
||||
.child(becca_mocking.note('Effective Java', { type: 'book', mime: '' }))
|
||||
.child(becca_mocking.note('Hello World.java', { type: 'code', mime: 'text/x-java' }));
|
||||
xit("normal search looks also at type and mime", () => {
|
||||
rootNote.child(becca_mocking.note("Effective Java", { type: "book", mime: "" })).child(becca_mocking.note("Hello World.java", { type: "code", mime: "text/x-java" }));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
let searchResults = searchService.findResultsWithQuery('book', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("book", searchContext);
|
||||
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Effective Java')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Effective Java")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('text', searchContext); // should match mime
|
||||
searchResults = searchService.findResultsWithQuery("text", searchContext); // should match mime
|
||||
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Hello World.java')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Hello World.java")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('java', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("java", searchContext);
|
||||
|
||||
expect(searchResults.length).toEqual(2);
|
||||
});
|
||||
|
||||
xit('only end leafs are results', () => {
|
||||
rootNote.child(becca_mocking.note('Europe').child(becca_mocking.note('Austria')));
|
||||
xit("only end leafs are results", () => {
|
||||
rootNote.child(becca_mocking.note("Europe").child(becca_mocking.note("Austria")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
const searchResults = searchService.findResultsWithQuery('europe', searchContext);
|
||||
const searchResults = searchService.findResultsWithQuery("europe", searchContext);
|
||||
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Europe')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Europe")).toBeTruthy();
|
||||
});
|
||||
|
||||
xit('only end leafs are results', () => {
|
||||
rootNote.child(becca_mocking.note('Europe').child(becca_mocking.note('Austria').label('capital', 'Vienna')));
|
||||
xit("only end leafs are results", () => {
|
||||
rootNote.child(becca_mocking.note("Europe").child(becca_mocking.note("Austria").label("capital", "Vienna")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
const searchResults = searchService.findResultsWithQuery('Vienna', searchContext);
|
||||
const searchResults = searchService.findResultsWithQuery("Vienna", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
});
|
||||
|
||||
it('label comparison with short syntax', () => {
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.child(becca_mocking.note('Austria').label('capital', 'Vienna'))
|
||||
.child(becca_mocking.note('Czech Republic').label('capital', 'Prague'))
|
||||
);
|
||||
it("label comparison with short syntax", () => {
|
||||
rootNote.child(becca_mocking.note("Europe").child(becca_mocking.note("Austria").label("capital", "Vienna")).child(becca_mocking.note("Czech Republic").label("capital", "Prague")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('#capital=Vienna', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("#capital=Vienna", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
|
||||
// case sensitivity:
|
||||
searchResults = searchService.findResultsWithQuery('#CAPITAL=VIENNA', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("#CAPITAL=VIENNA", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('#caPItal=vienNa', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("#caPItal=vienNa", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
});
|
||||
|
||||
it('label comparison with full syntax', () => {
|
||||
it("label comparison with full syntax", () => {
|
||||
rootNote.child(becca_mocking.note("Europe").child(becca_mocking.note("Austria").label("capital", "Vienna")).child(becca_mocking.note("Czech Republic").label("capital", "Prague")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery("# note.labels.capital=Prague", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
});
|
||||
|
||||
it("numeric label comparison", () => {
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.child(becca_mocking.note('Austria').label('capital', 'Vienna'))
|
||||
.child(becca_mocking.note('Czech Republic').label('capital', 'Prague'))
|
||||
.note("Europe")
|
||||
.label("country", "", true)
|
||||
.child(becca_mocking.note("Austria").label("population", "8859000"))
|
||||
.child(becca_mocking.note("Czech Republic").label("population", "10650000"))
|
||||
);
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('# note.labels.capital=Prague', searchContext);
|
||||
const searchResults = searchService.findResultsWithQuery("#country #population >= 10000000", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
});
|
||||
|
||||
it('numeric label comparison', () => {
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.label('country', '', true)
|
||||
.child(becca_mocking.note('Austria').label('population', '8859000'))
|
||||
.child(becca_mocking.note('Czech Republic').label('population', '10650000'))
|
||||
);
|
||||
xit("inherited label comparison", () => {
|
||||
rootNote.child(becca_mocking.note("Europe").label("country", "", true).child(becca_mocking.note("Austria")).child(becca_mocking.note("Czech Republic")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
const searchResults = searchService.findResultsWithQuery('#country #population >= 10000000', searchContext);
|
||||
const searchResults = searchService.findResultsWithQuery("austria #country", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
});
|
||||
|
||||
xit('inherited label comparison', () => {
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.label('country', '', true)
|
||||
.child(becca_mocking.note('Austria'))
|
||||
.child(becca_mocking.note('Czech Republic'))
|
||||
);
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
const searchResults = searchService.findResultsWithQuery('austria #country', searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('numeric label comparison fallback to string comparison', () => {
|
||||
it("numeric label comparison fallback to string comparison", () => {
|
||||
// dates should not be coerced into numbers which would then give wrong numbers
|
||||
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.label('country', '', true)
|
||||
.child(becca_mocking.note('Austria').label('established', '1955-07-27'))
|
||||
.child(becca_mocking.note('Czech Republic').label('established', '1993-01-01'))
|
||||
.child(becca_mocking.note('Hungary').label('established', '1920-06-04'))
|
||||
.note("Europe")
|
||||
.label("country", "", true)
|
||||
.child(becca_mocking.note("Austria").label("established", "1955-07-27"))
|
||||
.child(becca_mocking.note("Czech Republic").label("established", "1993-01-01"))
|
||||
.child(becca_mocking.note("Hungary").label("established", "1920-06-04"))
|
||||
);
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('#established <= "1955-01-01"', searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Hungary')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Hungary")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('#established > "1955-01-01"', searchContext);
|
||||
expect(searchResults.length).toEqual(2);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
});
|
||||
|
||||
it('smart date comparisons', () => {
|
||||
it("smart date comparisons", () => {
|
||||
// dates should not be coerced into numbers which would then give wrong numbers
|
||||
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('My note', { dateCreated: dateUtils.localNowDateTime() })
|
||||
.label('year', new Date().getFullYear().toString())
|
||||
.label('month', dateUtils.localNowDate().substr(0, 7))
|
||||
.label('date', dateUtils.localNowDate())
|
||||
.label('dateTime', dateUtils.localNowDateTime())
|
||||
.note("My note", { dateCreated: dateUtils.localNowDateTime() })
|
||||
.label("year", new Date().getFullYear().toString())
|
||||
.label("month", dateUtils.localNowDate().substr(0, 7))
|
||||
.label("date", dateUtils.localNowDate())
|
||||
.label("dateTime", dateUtils.localNowDateTime())
|
||||
);
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
@@ -206,263 +188,258 @@ describe('Search', () => {
|
||||
.toEqual(expectedResultCount);
|
||||
|
||||
if (expectedResultCount === 1) {
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'My note')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "My note")).toBeTruthy();
|
||||
}
|
||||
}
|
||||
|
||||
test('#year = YEAR', 1);
|
||||
test("#year = YEAR", 1);
|
||||
test("#year = 'YEAR'", 0);
|
||||
test('#year >= YEAR', 1);
|
||||
test('#year <= YEAR', 1);
|
||||
test('#year < YEAR+1', 1);
|
||||
test('#year < YEAR + 1', 1);
|
||||
test('#year < year + 1', 1);
|
||||
test('#year > YEAR+1', 0);
|
||||
test("#year >= YEAR", 1);
|
||||
test("#year <= YEAR", 1);
|
||||
test("#year < YEAR+1", 1);
|
||||
test("#year < YEAR + 1", 1);
|
||||
test("#year < year + 1", 1);
|
||||
test("#year > YEAR+1", 0);
|
||||
|
||||
test('#month = MONTH', 1);
|
||||
test('#month = month', 1);
|
||||
test("#month = MONTH", 1);
|
||||
test("#month = month", 1);
|
||||
test("#month = 'MONTH'", 0);
|
||||
|
||||
test('note.dateCreated =* month', 2);
|
||||
test("note.dateCreated =* month", 2);
|
||||
|
||||
test('#date = TODAY', 1);
|
||||
test('#date = today', 1);
|
||||
test("#date = TODAY", 1);
|
||||
test("#date = today", 1);
|
||||
test("#date = 'today'", 0);
|
||||
test('#date > TODAY', 0);
|
||||
test('#date > TODAY-1', 1);
|
||||
test('#date > TODAY - 1', 1);
|
||||
test('#date < TODAY+1', 1);
|
||||
test('#date < TODAY + 1', 1);
|
||||
test("#date > TODAY", 0);
|
||||
test("#date > TODAY-1", 1);
|
||||
test("#date > TODAY - 1", 1);
|
||||
test("#date < TODAY+1", 1);
|
||||
test("#date < TODAY + 1", 1);
|
||||
test("#date < 'TODAY + 1'", 1);
|
||||
|
||||
test('#dateTime <= NOW+10', 1);
|
||||
test('#dateTime <= NOW + 10', 1);
|
||||
test('#dateTime < NOW-10', 0);
|
||||
test('#dateTime >= NOW-10', 1);
|
||||
test('#dateTime < NOW-10', 0);
|
||||
test("#dateTime <= NOW+10", 1);
|
||||
test("#dateTime <= NOW + 10", 1);
|
||||
test("#dateTime < NOW-10", 0);
|
||||
test("#dateTime >= NOW-10", 1);
|
||||
test("#dateTime < NOW-10", 0);
|
||||
});
|
||||
|
||||
it('logical or', () => {
|
||||
it("logical or", () => {
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.label('country', '', true)
|
||||
.child(becca_mocking.note('Austria').label('languageFamily', 'germanic'))
|
||||
.child(becca_mocking.note('Czech Republic').label('languageFamily', 'slavic'))
|
||||
.child(becca_mocking.note('Hungary').label('languageFamily', 'finnougric'))
|
||||
.note("Europe")
|
||||
.label("country", "", true)
|
||||
.child(becca_mocking.note("Austria").label("languageFamily", "germanic"))
|
||||
.child(becca_mocking.note("Czech Republic").label("languageFamily", "slavic"))
|
||||
.child(becca_mocking.note("Hungary").label("languageFamily", "finnougric"))
|
||||
);
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
const searchResults = searchService.findResultsWithQuery('#languageFamily = slavic OR #languageFamily = germanic', searchContext);
|
||||
const searchResults = searchService.findResultsWithQuery("#languageFamily = slavic OR #languageFamily = germanic", searchContext);
|
||||
expect(searchResults.length).toEqual(2);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
});
|
||||
|
||||
it('fuzzy attribute search', () => {
|
||||
it("fuzzy attribute search", () => {
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.label('country', '', true)
|
||||
.child(becca_mocking.note('Austria').label('languageFamily', 'germanic'))
|
||||
.child(becca_mocking.note('Czech Republic').label('languageFamily', 'slavic'))
|
||||
.note("Europe")
|
||||
.label("country", "", true)
|
||||
.child(becca_mocking.note("Austria").label("languageFamily", "germanic"))
|
||||
.child(becca_mocking.note("Czech Republic").label("languageFamily", "slavic"))
|
||||
);
|
||||
|
||||
let searchContext = new SearchContext({ fuzzyAttributeSearch: false });
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('#language', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("#language", searchContext);
|
||||
expect(searchResults.length).toEqual(0);
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('#languageFamily=ger', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("#languageFamily=ger", searchContext);
|
||||
expect(searchResults.length).toEqual(0);
|
||||
|
||||
searchContext = new SearchContext({ fuzzyAttributeSearch: true });
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('#language', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("#language", searchContext);
|
||||
expect(searchResults.length).toEqual(2);
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('#languageFamily=ger', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("#languageFamily=ger", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
});
|
||||
|
||||
it('filter by note property', () => {
|
||||
rootNote.child(becca_mocking.note('Europe').child(becca_mocking.note('Austria')).child(becca_mocking.note('Czech Republic')));
|
||||
it("filter by note property", () => {
|
||||
rootNote.child(becca_mocking.note("Europe").child(becca_mocking.note("Austria")).child(becca_mocking.note("Czech Republic")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
const searchResults = searchService.findResultsWithQuery('# note.title =* czech', searchContext);
|
||||
const searchResults = searchService.findResultsWithQuery("# note.title =* czech", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
});
|
||||
|
||||
it("filter by note's parent", () => {
|
||||
rootNote
|
||||
.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.child(becca_mocking.note('Austria'))
|
||||
.child(becca_mocking.note('Czech Republic').child(becca_mocking.note('Prague')))
|
||||
.note("Europe")
|
||||
.child(becca_mocking.note("Austria"))
|
||||
.child(becca_mocking.note("Czech Republic").child(becca_mocking.note("Prague")))
|
||||
)
|
||||
.child(becca_mocking.note('Asia').child(becca_mocking.note('Taiwan')));
|
||||
.child(becca_mocking.note("Asia").child(becca_mocking.note("Taiwan")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('# note.parents.title = Europe', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("# note.parents.title = Europe", searchContext);
|
||||
expect(searchResults.length).toEqual(2);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# note.parents.title = Asia', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# note.parents.title = Asia", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Taiwan')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Taiwan")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# note.parents.parents.title = Europe', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# note.parents.parents.title = Europe", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Prague')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Prague")).toBeTruthy();
|
||||
});
|
||||
|
||||
it("filter by note's ancestor", () => {
|
||||
rootNote
|
||||
.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.child(becca_mocking.note('Austria'))
|
||||
.child(becca_mocking.note('Czech Republic').child(becca_mocking.note('Prague').label('city')))
|
||||
.note("Europe")
|
||||
.child(becca_mocking.note("Austria"))
|
||||
.child(becca_mocking.note("Czech Republic").child(becca_mocking.note("Prague").label("city")))
|
||||
)
|
||||
.child(becca_mocking.note('Asia').child(becca_mocking.note('Taiwan').child(becca_mocking.note('Taipei').label('city'))));
|
||||
.child(becca_mocking.note("Asia").child(becca_mocking.note("Taiwan").child(becca_mocking.note("Taipei").label("city"))));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('#city AND note.ancestors.title = Europe', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("#city AND note.ancestors.title = Europe", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Prague')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Prague")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('#city AND note.ancestors.title = Asia', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("#city AND note.ancestors.title = Asia", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Taipei')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Taipei")).toBeTruthy();
|
||||
});
|
||||
|
||||
it("filter by note's child", () => {
|
||||
rootNote
|
||||
.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.child(becca_mocking.note('Austria').child(becca_mocking.note('Vienna')))
|
||||
.child(becca_mocking.note('Czech Republic').child(becca_mocking.note('Prague')))
|
||||
.note("Europe")
|
||||
.child(becca_mocking.note("Austria").child(becca_mocking.note("Vienna")))
|
||||
.child(becca_mocking.note("Czech Republic").child(becca_mocking.note("Prague")))
|
||||
)
|
||||
.child(becca_mocking.note('Oceania').child(becca_mocking.note('Australia')));
|
||||
.child(becca_mocking.note("Oceania").child(becca_mocking.note("Australia")));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('# note.children.title =* Aust', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("# note.children.title =* Aust", searchContext);
|
||||
expect(searchResults.length).toEqual(2);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Europe')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Oceania')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Europe")).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Oceania")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery(
|
||||
'# note.children.title =* Aust AND note.children.title *= republic',
|
||||
searchContext
|
||||
);
|
||||
searchResults = searchService.findResultsWithQuery("# note.children.title =* Aust AND note.children.title *= republic", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Europe')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Europe")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# note.children.children.title = Prague', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# note.children.children.title = Prague", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Europe')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Europe")).toBeTruthy();
|
||||
});
|
||||
|
||||
it("filter by relation's note properties using short syntax", () => {
|
||||
const austria = becca_mocking.note('Austria');
|
||||
const portugal = becca_mocking.note('Portugal');
|
||||
const austria = becca_mocking.note("Austria");
|
||||
const portugal = becca_mocking.note("Portugal");
|
||||
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.note("Europe")
|
||||
.child(austria)
|
||||
.child(becca_mocking.note('Czech Republic').relation('neighbor', austria.note))
|
||||
.child(becca_mocking.note("Czech Republic").relation("neighbor", austria.note))
|
||||
.child(portugal)
|
||||
.child(becca_mocking.note('Spain').relation('neighbor', portugal.note))
|
||||
.child(becca_mocking.note("Spain").relation("neighbor", portugal.note))
|
||||
);
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('# ~neighbor.title = Austria', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("# ~neighbor.title = Austria", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# ~neighbor.title = Portugal', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# ~neighbor.title = Portugal", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Spain')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Spain")).toBeTruthy();
|
||||
});
|
||||
|
||||
it("filter by relation's note properties using long syntax", () => {
|
||||
const austria = becca_mocking.note('Austria');
|
||||
const portugal = becca_mocking.note('Portugal');
|
||||
const austria = becca_mocking.note("Austria");
|
||||
const portugal = becca_mocking.note("Portugal");
|
||||
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.note("Europe")
|
||||
.child(austria)
|
||||
.child(becca_mocking.note('Czech Republic').relation('neighbor', austria.note))
|
||||
.child(becca_mocking.note("Czech Republic").relation("neighbor", austria.note))
|
||||
.child(portugal)
|
||||
.child(becca_mocking.note('Spain').relation('neighbor', portugal.note))
|
||||
.child(becca_mocking.note("Spain").relation("neighbor", portugal.note))
|
||||
);
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
const searchResults = searchService.findResultsWithQuery('# note.relations.neighbor.title = Austria', searchContext);
|
||||
const searchResults = searchService.findResultsWithQuery("# note.relations.neighbor.title = Austria", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
});
|
||||
|
||||
it('filter by multiple level relation', () => {
|
||||
const austria = becca_mocking.note('Austria');
|
||||
const slovakia = becca_mocking.note('Slovakia');
|
||||
const italy = becca_mocking.note('Italy');
|
||||
const ukraine = becca_mocking.note('Ukraine');
|
||||
it("filter by multiple level relation", () => {
|
||||
const austria = becca_mocking.note("Austria");
|
||||
const slovakia = becca_mocking.note("Slovakia");
|
||||
const italy = becca_mocking.note("Italy");
|
||||
const ukraine = becca_mocking.note("Ukraine");
|
||||
|
||||
rootNote.child(
|
||||
becca_mocking
|
||||
.note('Europe')
|
||||
.child(austria.relation('neighbor', italy.note).relation('neighbor', slovakia.note))
|
||||
.child(becca_mocking.note('Czech Republic').relation('neighbor', austria.note).relation('neighbor', slovakia.note))
|
||||
.child(slovakia.relation('neighbor', ukraine.note))
|
||||
.note("Europe")
|
||||
.child(austria.relation("neighbor", italy.note).relation("neighbor", slovakia.note))
|
||||
.child(becca_mocking.note("Czech Republic").relation("neighbor", austria.note).relation("neighbor", slovakia.note))
|
||||
.child(slovakia.relation("neighbor", ukraine.note))
|
||||
.child(ukraine)
|
||||
);
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('# note.relations.neighbor.relations.neighbor.title = Italy', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("# note.relations.neighbor.relations.neighbor.title = Italy", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# note.relations.neighbor.relations.neighbor.title = Ukraine', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# note.relations.neighbor.relations.neighbor.title = Ukraine", searchContext);
|
||||
expect(searchResults.length).toEqual(2);
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Czech Republic')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, 'Austria')).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Czech Republic")).toBeTruthy();
|
||||
expect(becca_mocking.findNoteByTitle(searchResults, "Austria")).toBeTruthy();
|
||||
});
|
||||
|
||||
it('test note properties', () => {
|
||||
const austria = becca_mocking.note('Austria');
|
||||
it("test note properties", () => {
|
||||
const austria = becca_mocking.note("Austria");
|
||||
|
||||
austria.relation('myself', austria.note);
|
||||
austria.label('capital', 'Vienna');
|
||||
austria.label('population', '8859000');
|
||||
austria.relation("myself", austria.note);
|
||||
austria.label("capital", "Vienna");
|
||||
austria.label("population", "8859000");
|
||||
|
||||
rootNote
|
||||
.child(becca_mocking.note('Asia'))
|
||||
.child(
|
||||
becca_mocking.note('Europe').child(austria.child(becca_mocking.note('Vienna')).child(becca_mocking.note('Sebastian Kurz')))
|
||||
)
|
||||
.child(becca_mocking.note('Mozart').child(austria));
|
||||
.child(becca_mocking.note("Asia"))
|
||||
.child(becca_mocking.note("Europe").child(austria.child(becca_mocking.note("Vienna")).child(becca_mocking.note("Sebastian Kurz"))))
|
||||
.child(becca_mocking.note("Mozart").child(austria));
|
||||
|
||||
austria.note.isProtected = false;
|
||||
austria.note.dateCreated = '2020-05-14 12:11:42.001+0200';
|
||||
austria.note.dateModified = '2020-05-14 13:11:42.001+0200';
|
||||
austria.note.utcDateCreated = '2020-05-14 10:11:42.001Z';
|
||||
austria.note.utcDateModified = '2020-05-14 11:11:42.001Z';
|
||||
austria.note.dateCreated = "2020-05-14 12:11:42.001+0200";
|
||||
austria.note.dateModified = "2020-05-14 13:11:42.001+0200";
|
||||
austria.note.utcDateCreated = "2020-05-14 10:11:42.001Z";
|
||||
austria.note.utcDateModified = "2020-05-14 11:11:42.001Z";
|
||||
// austria.note.contentLength = 1001;
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
@@ -472,141 +449,130 @@ describe('Search', () => {
|
||||
expect(searchResults.length).toEqual(expectedResultCount);
|
||||
}
|
||||
|
||||
test('type', 'text', 7);
|
||||
test('TYPE', 'TEXT', 7);
|
||||
test('type', 'code', 0);
|
||||
test("type", "text", 7);
|
||||
test("TYPE", "TEXT", 7);
|
||||
test("type", "code", 0);
|
||||
|
||||
test('mime', 'text/html', 6);
|
||||
test('mime', 'application/json', 0);
|
||||
test("mime", "text/html", 6);
|
||||
test("mime", "application/json", 0);
|
||||
|
||||
test('isProtected', 'false', 7);
|
||||
test('isProtected', 'FALSE', 7);
|
||||
test('isProtected', 'true', 0);
|
||||
test('isProtected', 'TRUE', 0);
|
||||
test("isProtected", "false", 7);
|
||||
test("isProtected", "FALSE", 7);
|
||||
test("isProtected", "true", 0);
|
||||
test("isProtected", "TRUE", 0);
|
||||
|
||||
test('dateCreated', "'2020-05-14 12:11:42.001+0200'", 1);
|
||||
test('dateCreated', 'wrong', 0);
|
||||
test("dateCreated", "'2020-05-14 12:11:42.001+0200'", 1);
|
||||
test("dateCreated", "wrong", 0);
|
||||
|
||||
test('dateModified', "'2020-05-14 13:11:42.001+0200'", 1);
|
||||
test('dateModified', 'wrong', 0);
|
||||
test("dateModified", "'2020-05-14 13:11:42.001+0200'", 1);
|
||||
test("dateModified", "wrong", 0);
|
||||
|
||||
test('utcDateCreated', "'2020-05-14 10:11:42.001Z'", 1);
|
||||
test('utcDateCreated', 'wrong', 0);
|
||||
test("utcDateCreated", "'2020-05-14 10:11:42.001Z'", 1);
|
||||
test("utcDateCreated", "wrong", 0);
|
||||
|
||||
test('utcDateModified', "'2020-05-14 11:11:42.001Z'", 1);
|
||||
test('utcDateModified', 'wrong', 0);
|
||||
test("utcDateModified", "'2020-05-14 11:11:42.001Z'", 1);
|
||||
test("utcDateModified", "wrong", 0);
|
||||
|
||||
test('parentCount', '2', 1);
|
||||
test('parentCount', '3', 0);
|
||||
test("parentCount", "2", 1);
|
||||
test("parentCount", "3", 0);
|
||||
|
||||
test('childrenCount', '2', 1);
|
||||
test('childrenCount', '10', 0);
|
||||
test("childrenCount", "2", 1);
|
||||
test("childrenCount", "10", 0);
|
||||
|
||||
test('attributeCount', '3', 1);
|
||||
test('attributeCount', '4', 0);
|
||||
test("attributeCount", "3", 1);
|
||||
test("attributeCount", "4", 0);
|
||||
|
||||
test('labelCount', '2', 1);
|
||||
test('labelCount', '3', 0);
|
||||
test("labelCount", "2", 1);
|
||||
test("labelCount", "3", 0);
|
||||
|
||||
test('relationCount', '1', 1);
|
||||
test('relationCount', '2', 0);
|
||||
test("relationCount", "1", 1);
|
||||
test("relationCount", "2", 0);
|
||||
});
|
||||
|
||||
it('test order by', () => {
|
||||
const italy = becca_mocking.note('Italy').label('capital', 'Rome');
|
||||
const slovakia = becca_mocking.note('Slovakia').label('capital', 'Bratislava');
|
||||
const austria = becca_mocking.note('Austria').label('capital', 'Vienna');
|
||||
const ukraine = becca_mocking.note('Ukraine').label('capital', 'Kiev');
|
||||
it("test order by", () => {
|
||||
const italy = becca_mocking.note("Italy").label("capital", "Rome");
|
||||
const slovakia = becca_mocking.note("Slovakia").label("capital", "Bratislava");
|
||||
const austria = becca_mocking.note("Austria").label("capital", "Vienna");
|
||||
const ukraine = becca_mocking.note("Ukraine").label("capital", "Kiev");
|
||||
|
||||
rootNote.child(becca_mocking.note('Europe').child(ukraine).child(slovakia).child(austria).child(italy));
|
||||
rootNote.child(becca_mocking.note("Europe").child(ukraine).child(slovakia).child(austria).child(italy));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('# note.parents.title = Europe orderBy note.title', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("# note.parents.title = Europe orderBy note.title", searchContext);
|
||||
expect(searchResults.length).toEqual(4);
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual('Austria');
|
||||
expect(becca.notes[searchResults[1].noteId].title).toEqual('Italy');
|
||||
expect(becca.notes[searchResults[2].noteId].title).toEqual('Slovakia');
|
||||
expect(becca.notes[searchResults[3].noteId].title).toEqual('Ukraine');
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual("Austria");
|
||||
expect(becca.notes[searchResults[1].noteId].title).toEqual("Italy");
|
||||
expect(becca.notes[searchResults[2].noteId].title).toEqual("Slovakia");
|
||||
expect(becca.notes[searchResults[3].noteId].title).toEqual("Ukraine");
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# note.parents.title = Europe orderBy note.labels.capital', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# note.parents.title = Europe orderBy note.labels.capital", searchContext);
|
||||
expect(searchResults.length).toEqual(4);
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual('Slovakia');
|
||||
expect(becca.notes[searchResults[1].noteId].title).toEqual('Ukraine');
|
||||
expect(becca.notes[searchResults[2].noteId].title).toEqual('Italy');
|
||||
expect(becca.notes[searchResults[3].noteId].title).toEqual('Austria');
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual("Slovakia");
|
||||
expect(becca.notes[searchResults[1].noteId].title).toEqual("Ukraine");
|
||||
expect(becca.notes[searchResults[2].noteId].title).toEqual("Italy");
|
||||
expect(becca.notes[searchResults[3].noteId].title).toEqual("Austria");
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# note.parents.title = Europe orderBy note.labels.capital DESC', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# note.parents.title = Europe orderBy note.labels.capital DESC", searchContext);
|
||||
expect(searchResults.length).toEqual(4);
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual('Austria');
|
||||
expect(becca.notes[searchResults[1].noteId].title).toEqual('Italy');
|
||||
expect(becca.notes[searchResults[2].noteId].title).toEqual('Ukraine');
|
||||
expect(becca.notes[searchResults[3].noteId].title).toEqual('Slovakia');
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual("Austria");
|
||||
expect(becca.notes[searchResults[1].noteId].title).toEqual("Italy");
|
||||
expect(becca.notes[searchResults[2].noteId].title).toEqual("Ukraine");
|
||||
expect(becca.notes[searchResults[3].noteId].title).toEqual("Slovakia");
|
||||
|
||||
searchResults = searchService.findResultsWithQuery(
|
||||
'# note.parents.title = Europe orderBy note.labels.capital DESC limit 2',
|
||||
searchContext
|
||||
);
|
||||
searchResults = searchService.findResultsWithQuery("# note.parents.title = Europe orderBy note.labels.capital DESC limit 2", searchContext);
|
||||
expect(searchResults.length).toEqual(2);
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual('Austria');
|
||||
expect(becca.notes[searchResults[1].noteId].title).toEqual('Italy');
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual("Austria");
|
||||
expect(becca.notes[searchResults[1].noteId].title).toEqual("Italy");
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# note.parents.title = Europe orderBy #capital DESC limit 1', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# note.parents.title = Europe orderBy #capital DESC limit 1", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('# note.parents.title = Europe orderBy #capital DESC limit 1000', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("# note.parents.title = Europe orderBy #capital DESC limit 1000", searchContext);
|
||||
expect(searchResults.length).toEqual(4);
|
||||
});
|
||||
|
||||
it('test not(...)', () => {
|
||||
const italy = becca_mocking.note('Italy').label('capital', 'Rome');
|
||||
const slovakia = becca_mocking.note('Slovakia').label('capital', 'Bratislava');
|
||||
it("test not(...)", () => {
|
||||
const italy = becca_mocking.note("Italy").label("capital", "Rome");
|
||||
const slovakia = becca_mocking.note("Slovakia").label("capital", "Bratislava");
|
||||
|
||||
rootNote.child(becca_mocking.note('Europe').child(slovakia).child(italy));
|
||||
rootNote.child(becca_mocking.note("Europe").child(slovakia).child(italy));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('# not(#capital) and note.noteId != root', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("# not(#capital) and note.noteId != root", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual('Europe');
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual("Europe");
|
||||
|
||||
searchResults = searchService.findResultsWithQuery('#!capital and note.noteId != root', searchContext);
|
||||
searchResults = searchService.findResultsWithQuery("#!capital and note.noteId != root", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual('Europe');
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual("Europe");
|
||||
});
|
||||
|
||||
xit('test note.text *=* something', () => {
|
||||
const italy = becca_mocking.note('Italy').label('capital', 'Rome');
|
||||
const slovakia = becca_mocking.note('Slovakia').label('capital', 'Bratislava');
|
||||
xit("test note.text *=* something", () => {
|
||||
const italy = becca_mocking.note("Italy").label("capital", "Rome");
|
||||
const slovakia = becca_mocking.note("Slovakia").label("capital", "Bratislava");
|
||||
|
||||
rootNote.child(becca_mocking.note('Europe').child(slovakia).child(italy));
|
||||
rootNote.child(becca_mocking.note("Europe").child(slovakia).child(italy));
|
||||
|
||||
const searchContext = new SearchContext();
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('# note.text *=* vaki and note.noteId != root', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("# note.text *=* vaki and note.noteId != root", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual('Slovakia');
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual("Slovakia");
|
||||
});
|
||||
|
||||
xit('test that fulltext does not match archived notes', () => {
|
||||
const italy = becca_mocking.note('Italy').label('capital', 'Rome');
|
||||
const slovakia = becca_mocking.note('Slovakia').label('capital', 'Bratislava');
|
||||
xit("test that fulltext does not match archived notes", () => {
|
||||
const italy = becca_mocking.note("Italy").label("capital", "Rome");
|
||||
const slovakia = becca_mocking.note("Slovakia").label("capital", "Bratislava");
|
||||
|
||||
rootNote
|
||||
.child(
|
||||
becca_mocking
|
||||
.note('Reddit')
|
||||
.label('archived', '', true)
|
||||
.child(becca_mocking.note('Post X'))
|
||||
.child(becca_mocking.note('Post Y'))
|
||||
)
|
||||
.child(becca_mocking.note('Reddit is bad'));
|
||||
rootNote.child(becca_mocking.note("Reddit").label("archived", "", true).child(becca_mocking.note("Post X")).child(becca_mocking.note("Post Y"))).child(becca_mocking.note("Reddit is bad"));
|
||||
|
||||
const searchContext = new SearchContext({ includeArchivedNotes: false });
|
||||
|
||||
let searchResults = searchService.findResultsWithQuery('reddit', searchContext);
|
||||
let searchResults = searchService.findResultsWithQuery("reddit", searchContext);
|
||||
expect(searchResults.length).toEqual(1);
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual('Reddit is bad');
|
||||
expect(becca.notes[searchResults[0].noteId].title).toEqual("Reddit is bad");
|
||||
});
|
||||
|
||||
// FIXME: test what happens when we order without any filter criteria
|
||||
|
||||
@@ -5,77 +5,74 @@ import SearchContext from "../../src/services/search/search_context.js";
|
||||
|
||||
const dsc = new SearchContext();
|
||||
|
||||
describe('Value extractor', () => {
|
||||
describe("Value extractor", () => {
|
||||
beforeEach(() => {
|
||||
becca.reset();
|
||||
});
|
||||
|
||||
it('simple title extraction', async () => {
|
||||
const europe = becca_mocking.note('Europe').note;
|
||||
it("simple title extraction", async () => {
|
||||
const europe = becca_mocking.note("Europe").note;
|
||||
|
||||
const valueExtractor = new ValueExtractor(dsc, ['note', 'title']);
|
||||
const valueExtractor = new ValueExtractor(dsc, ["note", "title"]);
|
||||
|
||||
expect(valueExtractor.validate()).toBeFalsy();
|
||||
expect(valueExtractor.extract(europe)).toEqual('Europe');
|
||||
expect(valueExtractor.extract(europe)).toEqual("Europe");
|
||||
});
|
||||
|
||||
it('label extraction', async () => {
|
||||
const austria = becca_mocking.note('Austria').label('Capital', 'Vienna').note;
|
||||
it("label extraction", async () => {
|
||||
const austria = becca_mocking.note("Austria").label("Capital", "Vienna").note;
|
||||
|
||||
let valueExtractor = new ValueExtractor(dsc, ['note', 'labels', 'capital']);
|
||||
let valueExtractor = new ValueExtractor(dsc, ["note", "labels", "capital"]);
|
||||
|
||||
expect(valueExtractor.validate()).toBeFalsy();
|
||||
expect(valueExtractor.extract(austria)).toEqual('Vienna');
|
||||
expect(valueExtractor.extract(austria)).toEqual("Vienna");
|
||||
|
||||
valueExtractor = new ValueExtractor(dsc, ['#capital']);
|
||||
valueExtractor = new ValueExtractor(dsc, ["#capital"]);
|
||||
|
||||
expect(valueExtractor.validate()).toBeFalsy();
|
||||
expect(valueExtractor.extract(austria)).toEqual('Vienna');
|
||||
expect(valueExtractor.extract(austria)).toEqual("Vienna");
|
||||
});
|
||||
|
||||
it('parent/child property extraction', async () => {
|
||||
const vienna = becca_mocking.note('Vienna');
|
||||
const europe = becca_mocking.note('Europe').child(becca_mocking.note('Austria').child(vienna));
|
||||
it("parent/child property extraction", async () => {
|
||||
const vienna = becca_mocking.note("Vienna");
|
||||
const europe = becca_mocking.note("Europe").child(becca_mocking.note("Austria").child(vienna));
|
||||
|
||||
let valueExtractor = new ValueExtractor(dsc, ['note', 'children', 'children', 'title']);
|
||||
let valueExtractor = new ValueExtractor(dsc, ["note", "children", "children", "title"]);
|
||||
|
||||
expect(valueExtractor.validate()).toBeFalsy();
|
||||
expect(valueExtractor.extract(europe.note)).toEqual('Vienna');
|
||||
expect(valueExtractor.extract(europe.note)).toEqual("Vienna");
|
||||
|
||||
valueExtractor = new ValueExtractor(dsc, ['note', 'parents', 'parents', 'title']);
|
||||
valueExtractor = new ValueExtractor(dsc, ["note", "parents", "parents", "title"]);
|
||||
|
||||
expect(valueExtractor.validate()).toBeFalsy();
|
||||
expect(valueExtractor.extract(vienna.note)).toEqual('Europe');
|
||||
expect(valueExtractor.extract(vienna.note)).toEqual("Europe");
|
||||
});
|
||||
|
||||
it('extract through relation', async () => {
|
||||
const czechRepublic = becca_mocking.note('Czech Republic').label('capital', 'Prague');
|
||||
const slovakia = becca_mocking.note('Slovakia').label('capital', 'Bratislava');
|
||||
const austria = becca_mocking.note('Austria').relation('neighbor', czechRepublic.note).relation('neighbor', slovakia.note);
|
||||
it("extract through relation", async () => {
|
||||
const czechRepublic = becca_mocking.note("Czech Republic").label("capital", "Prague");
|
||||
const slovakia = becca_mocking.note("Slovakia").label("capital", "Bratislava");
|
||||
const austria = becca_mocking.note("Austria").relation("neighbor", czechRepublic.note).relation("neighbor", slovakia.note);
|
||||
|
||||
let valueExtractor = new ValueExtractor(dsc, ['note', 'relations', 'neighbor', 'labels', 'capital']);
|
||||
let valueExtractor = new ValueExtractor(dsc, ["note", "relations", "neighbor", "labels", "capital"]);
|
||||
|
||||
expect(valueExtractor.validate()).toBeFalsy();
|
||||
expect(valueExtractor.extract(austria.note)).toEqual('Prague');
|
||||
expect(valueExtractor.extract(austria.note)).toEqual("Prague");
|
||||
|
||||
valueExtractor = new ValueExtractor(dsc, ['~neighbor', 'labels', 'capital']);
|
||||
valueExtractor = new ValueExtractor(dsc, ["~neighbor", "labels", "capital"]);
|
||||
|
||||
expect(valueExtractor.validate()).toBeFalsy();
|
||||
expect(valueExtractor.extract(austria.note)).toEqual('Prague');
|
||||
expect(valueExtractor.extract(austria.note)).toEqual("Prague");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid value extractor property path', () => {
|
||||
it('each path must start with "note" (or label/relation)', () => expect(new ValueExtractor(dsc, ['neighbor']).validate()).toBeTruthy());
|
||||
describe("Invalid value extractor property path", () => {
|
||||
it('each path must start with "note" (or label/relation)', () => expect(new ValueExtractor(dsc, ["neighbor"]).validate()).toBeTruthy());
|
||||
|
||||
it('extra path element after terminal label', () =>
|
||||
expect(new ValueExtractor(dsc, ['~neighbor', 'labels', 'capital', 'noteId']).validate()).toBeTruthy());
|
||||
it("extra path element after terminal label", () => expect(new ValueExtractor(dsc, ["~neighbor", "labels", "capital", "noteId"]).validate()).toBeTruthy());
|
||||
|
||||
it('extra path element after terminal title', () =>
|
||||
expect(new ValueExtractor(dsc, ['note', 'title', 'isProtected']).validate()).toBeTruthy());
|
||||
it("extra path element after terminal title", () => expect(new ValueExtractor(dsc, ["note", "title", "isProtected"]).validate()).toBeTruthy());
|
||||
|
||||
it('relation name and note property is missing', () => expect(new ValueExtractor(dsc, ['note', 'relations']).validate()).toBeTruthy());
|
||||
it("relation name and note property is missing", () => expect(new ValueExtractor(dsc, ["note", "relations"]).validate()).toBeTruthy());
|
||||
|
||||
it('relation is specified but target note property is not specified', () =>
|
||||
expect(new ValueExtractor(dsc, ['note', 'relations', 'myrel']).validate()).toBeTruthy());
|
||||
it("relation is specified but target note property is not specified", () => expect(new ValueExtractor(dsc, ["note", "relations", "myrel"]).validate()).toBeTruthy());
|
||||
});
|
||||
|
||||
@@ -2,28 +2,20 @@ import child_process from "child_process";
|
||||
|
||||
let etapiAuthToken: string | undefined;
|
||||
|
||||
const getEtapiAuthorizationHeader = (): string =>
|
||||
"Basic " + Buffer.from(`etapi:${etapiAuthToken}`).toString("base64");
|
||||
const getEtapiAuthorizationHeader = (): string => "Basic " + Buffer.from(`etapi:${etapiAuthToken}`).toString("base64");
|
||||
|
||||
const PORT: string = "9999";
|
||||
const HOST: string = "http://localhost:" + PORT;
|
||||
|
||||
type SpecDefinitionsFunc = () => void;
|
||||
|
||||
function describeEtapi(
|
||||
description: string,
|
||||
specDefinitions: SpecDefinitionsFunc
|
||||
): void {
|
||||
function describeEtapi(description: string, specDefinitions: SpecDefinitionsFunc): void {
|
||||
describe(description, () => {
|
||||
let appProcess: ReturnType<typeof child_process.spawn>;
|
||||
|
||||
beforeAll(async () => {
|
||||
beforeAll(async () => {});
|
||||
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
|
||||
});
|
||||
afterAll(() => {});
|
||||
|
||||
specDefinitions();
|
||||
});
|
||||
@@ -33,8 +25,8 @@ async function getEtapiResponse(url: string): Promise<Response> {
|
||||
return await fetch(`${HOST}/etapi/${url}`, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: getEtapiAuthorizationHeader(),
|
||||
},
|
||||
Authorization: getEtapiAuthorizationHeader()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -47,8 +39,8 @@ async function getEtapiContent(url: string): Promise<Response> {
|
||||
const response = await fetch(`${HOST}/etapi/${url}`, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: getEtapiAuthorizationHeader(),
|
||||
},
|
||||
Authorization: getEtapiAuthorizationHeader()
|
||||
}
|
||||
});
|
||||
|
||||
checkStatus(response);
|
||||
@@ -56,32 +48,26 @@ async function getEtapiContent(url: string): Promise<Response> {
|
||||
return response;
|
||||
}
|
||||
|
||||
async function postEtapi(
|
||||
url: string,
|
||||
data: Record<string, unknown> = {}
|
||||
): Promise<any> {
|
||||
async function postEtapi(url: string, data: Record<string, unknown> = {}): Promise<any> {
|
||||
const response = await fetch(`${HOST}/etapi/${url}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: getEtapiAuthorizationHeader(),
|
||||
Authorization: getEtapiAuthorizationHeader()
|
||||
},
|
||||
body: JSON.stringify(data),
|
||||
body: JSON.stringify(data)
|
||||
});
|
||||
return await processEtapiResponse(response);
|
||||
}
|
||||
|
||||
async function postEtapiContent(
|
||||
url: string,
|
||||
data: BodyInit
|
||||
): Promise<Response> {
|
||||
async function postEtapiContent(url: string, data: BodyInit): Promise<Response> {
|
||||
const response = await fetch(`${HOST}/etapi/${url}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/octet-stream",
|
||||
Authorization: getEtapiAuthorizationHeader(),
|
||||
Authorization: getEtapiAuthorizationHeader()
|
||||
},
|
||||
body: data,
|
||||
body: data
|
||||
});
|
||||
|
||||
checkStatus(response);
|
||||
@@ -89,32 +75,26 @@ async function postEtapiContent(
|
||||
return response;
|
||||
}
|
||||
|
||||
async function putEtapi(
|
||||
url: string,
|
||||
data: Record<string, unknown> = {}
|
||||
): Promise<any> {
|
||||
async function putEtapi(url: string, data: Record<string, unknown> = {}): Promise<any> {
|
||||
const response = await fetch(`${HOST}/etapi/${url}`, {
|
||||
method: "PUT",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: getEtapiAuthorizationHeader(),
|
||||
Authorization: getEtapiAuthorizationHeader()
|
||||
},
|
||||
body: JSON.stringify(data),
|
||||
body: JSON.stringify(data)
|
||||
});
|
||||
return await processEtapiResponse(response);
|
||||
}
|
||||
|
||||
async function putEtapiContent(
|
||||
url: string,
|
||||
data?: BodyInit
|
||||
): Promise<Response> {
|
||||
async function putEtapiContent(url: string, data?: BodyInit): Promise<Response> {
|
||||
const response = await fetch(`${HOST}/etapi/${url}`, {
|
||||
method: "PUT",
|
||||
headers: {
|
||||
"Content-Type": "application/octet-stream",
|
||||
Authorization: getEtapiAuthorizationHeader(),
|
||||
Authorization: getEtapiAuthorizationHeader()
|
||||
},
|
||||
body: data,
|
||||
body: data
|
||||
});
|
||||
|
||||
checkStatus(response);
|
||||
@@ -122,17 +102,14 @@ async function putEtapiContent(
|
||||
return response;
|
||||
}
|
||||
|
||||
async function patchEtapi(
|
||||
url: string,
|
||||
data: Record<string, unknown> = {}
|
||||
): Promise<any> {
|
||||
async function patchEtapi(url: string, data: Record<string, unknown> = {}): Promise<any> {
|
||||
const response = await fetch(`${HOST}/etapi/${url}`, {
|
||||
method: "PATCH",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: getEtapiAuthorizationHeader(),
|
||||
Authorization: getEtapiAuthorizationHeader()
|
||||
},
|
||||
body: JSON.stringify(data),
|
||||
body: JSON.stringify(data)
|
||||
});
|
||||
return await processEtapiResponse(response);
|
||||
}
|
||||
@@ -141,8 +118,8 @@ async function deleteEtapi(url: string): Promise<any> {
|
||||
const response = await fetch(`${HOST}/etapi/${url}`, {
|
||||
method: "DELETE",
|
||||
headers: {
|
||||
Authorization: getEtapiAuthorizationHeader(),
|
||||
},
|
||||
Authorization: getEtapiAuthorizationHeader()
|
||||
}
|
||||
});
|
||||
return await processEtapiResponse(response);
|
||||
}
|
||||
@@ -173,5 +150,5 @@ export default {
|
||||
putEtapi,
|
||||
putEtapiContent,
|
||||
patchEtapi,
|
||||
deleteEtapi,
|
||||
deleteEtapi
|
||||
};
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
{
|
||||
"spec_dir": "spec",
|
||||
"spec_files": ["./**/*.spec.ts"],
|
||||
"spec_dir": "",
|
||||
"spec_files": [
|
||||
"spec/**/*.spec.ts",
|
||||
"src/**/*.spec.ts"
|
||||
],
|
||||
"helpers": ["helpers/**/*.js"],
|
||||
"stopSpecOnExpectationFailure": false,
|
||||
"random": true
|
||||
|
||||
@@ -5,8 +5,7 @@ describe("Utils", () => {
|
||||
expect(trimIndentation`\
|
||||
Hello
|
||||
world
|
||||
123`
|
||||
).toBe(`\
|
||||
123`).toBe(`\
|
||||
Hello
|
||||
world
|
||||
123`);
|
||||
|
||||
@@ -3,7 +3,7 @@ export function trimIndentation(strings: TemplateStringsArray) {
|
||||
|
||||
// Count the number of spaces on the first line.
|
||||
let numSpaces = 0;
|
||||
while (str.charAt(numSpaces) == ' ' && numSpaces < str.length) {
|
||||
while (str.charAt(numSpaces) == " " && numSpaces < str.length) {
|
||||
numSpaces++;
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ export function trimIndentation(strings: TemplateStringsArray) {
|
||||
const output = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let numSpacesLine = 0;
|
||||
while (str.charAt(numSpacesLine) == ' ' && numSpacesLine < str.length) {
|
||||
while (str.charAt(numSpacesLine) == " " && numSpacesLine < str.length) {
|
||||
numSpacesLine++;
|
||||
}
|
||||
output.push(lines[i].substring(numSpacesLine));
|
||||
|
||||
@@ -6,7 +6,7 @@ sqlInit.dbReady.then(async () => {
|
||||
try {
|
||||
console.log("Starting anonymization...");
|
||||
|
||||
const resp = await anonymizationService.createAnonymizedCopy('full');
|
||||
const resp = await anonymizationService.createAnonymizedCopy("full");
|
||||
|
||||
if (resp.success) {
|
||||
console.log(`Anonymized file has been saved to: ${resp.anonymizedFilePath}`);
|
||||
@@ -15,8 +15,7 @@ sqlInit.dbReady.then(async () => {
|
||||
} else {
|
||||
console.log("Anonymization failed.");
|
||||
}
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
console.error(e.message, e.stack);
|
||||
}
|
||||
|
||||
|
||||
36
src/app.ts
36
src/app.ts
@@ -16,8 +16,8 @@ import { startScheduledCleanup } from "./services/erase.js";
|
||||
import sql_init from "./services/sql_init.js";
|
||||
import { t } from "i18next";
|
||||
|
||||
await import('./services/handlers.js');
|
||||
await import('./becca/becca_loader.js');
|
||||
await import("./services/handlers.js");
|
||||
await import("./becca/becca_loader.js");
|
||||
|
||||
const app = express();
|
||||
|
||||
@@ -27,8 +27,8 @@ const scriptDir = dirname(fileURLToPath(import.meta.url));
|
||||
sql_init.initializeDb();
|
||||
|
||||
// view engine setup
|
||||
app.set('views', path.join(scriptDir, 'views'));
|
||||
app.set('view engine', 'ejs');
|
||||
app.set("views", path.join(scriptDir, "views"));
|
||||
app.set("view engine", "ejs");
|
||||
|
||||
app.use((req, res, next) => {
|
||||
res.locals.t = t;
|
||||
@@ -39,21 +39,23 @@ if (!utils.isElectron()) {
|
||||
app.use(compression()); // HTTP compression
|
||||
}
|
||||
|
||||
app.use(helmet({
|
||||
app.use(
|
||||
helmet({
|
||||
hidePoweredBy: false, // errors out in electron
|
||||
contentSecurityPolicy: false,
|
||||
crossOriginEmbedderPolicy: false
|
||||
}));
|
||||
})
|
||||
);
|
||||
|
||||
app.use(express.text({ limit: '500mb' }));
|
||||
app.use(express.json({ limit: '500mb' }));
|
||||
app.use(express.raw({ limit: '500mb' }));
|
||||
app.use(express.text({ limit: "500mb" }));
|
||||
app.use(express.json({ limit: "500mb" }));
|
||||
app.use(express.raw({ limit: "500mb" }));
|
||||
app.use(express.urlencoded({ extended: false }));
|
||||
app.use(cookieParser());
|
||||
app.use(express.static(path.join(scriptDir, 'public/root')));
|
||||
app.use(`/manifest.webmanifest`, express.static(path.join(scriptDir, 'public/manifest.webmanifest')));
|
||||
app.use(`/robots.txt`, express.static(path.join(scriptDir, 'public/robots.txt')));
|
||||
app.use(`/icon.png`, express.static(path.join(scriptDir, 'public/icon.png')));
|
||||
app.use(express.static(path.join(scriptDir, "public/root")));
|
||||
app.use(`/manifest.webmanifest`, express.static(path.join(scriptDir, "public/manifest.webmanifest")));
|
||||
app.use(`/robots.txt`, express.static(path.join(scriptDir, "public/robots.txt")));
|
||||
app.use(`/icon.png`, express.static(path.join(scriptDir, "public/icon.png")));
|
||||
app.use(sessionParser);
|
||||
app.use(favicon(`${scriptDir}/../images/app-icons/icon.ico`));
|
||||
|
||||
@@ -66,17 +68,17 @@ error_handlers.register(app);
|
||||
await import("./services/sync.js");
|
||||
|
||||
// triggers backup timer
|
||||
await import('./services/backup.js');
|
||||
await import("./services/backup.js");
|
||||
|
||||
// trigger consistency checks timer
|
||||
await import('./services/consistency_checks.js');
|
||||
await import("./services/consistency_checks.js");
|
||||
|
||||
await import('./services/scheduler.js');
|
||||
await import("./services/scheduler.js");
|
||||
|
||||
startScheduledCleanup();
|
||||
|
||||
if (utils.isElectron()) {
|
||||
(await import('@electron/remote/main/index.js')).initialize();
|
||||
(await import("@electron/remote/main/index.js")).initialize();
|
||||
}
|
||||
|
||||
export default app;
|
||||
|
||||
@@ -8,7 +8,7 @@ import BAttribute from "./entities/battribute.js";
|
||||
import BBranch from "./entities/bbranch.js";
|
||||
import BRevision from "./entities/brevision.js";
|
||||
import BAttachment from "./entities/battachment.js";
|
||||
import { AttachmentRow, BlobRow, RevisionRow } from './entities/rows.js';
|
||||
import type { AttachmentRow, BlobRow, RevisionRow } from "./entities/rows.js";
|
||||
import BBlob from "./entities/bblob.js";
|
||||
import BRecentNote from "./entities/brecent_note.js";
|
||||
import AbstractBeccaEntity from "./entities/abstract_becca_entity.js";
|
||||
@@ -55,13 +55,13 @@ export default class Becca {
|
||||
}
|
||||
|
||||
getRoot() {
|
||||
return this.getNote('root');
|
||||
return this.getNote("root");
|
||||
}
|
||||
|
||||
findAttributes(type: string, name: string): BAttribute[] {
|
||||
name = name.trim().toLowerCase();
|
||||
|
||||
if (name.startsWith('#') || name.startsWith('~')) {
|
||||
if (name.startsWith("#") || name.startsWith("~")) {
|
||||
name = name.substr(1);
|
||||
}
|
||||
|
||||
@@ -177,8 +177,7 @@ export default class Becca {
|
||||
WHERE attachmentId = ? AND isDeleted = 0`
|
||||
: `SELECT * FROM attachments WHERE attachmentId = ? AND isDeleted = 0`;
|
||||
|
||||
return sql.getRows<AttachmentRow>(query, [attachmentId])
|
||||
.map(row => new BAttachment(row))[0];
|
||||
return sql.getRows<AttachmentRow>(query, [attachmentId]).map((row) => new BAttachment(row))[0];
|
||||
}
|
||||
|
||||
getAttachmentOrThrow(attachmentId: string, opts: AttachmentOpts = {}): BAttachment {
|
||||
@@ -190,8 +189,7 @@ export default class Becca {
|
||||
}
|
||||
|
||||
getAttachments(attachmentIds: string[]): BAttachment[] {
|
||||
return sql.getManyRows<AttachmentRow>("SELECT * FROM attachments WHERE attachmentId IN (???) AND isDeleted = 0", attachmentIds)
|
||||
.map(row => new BAttachment(row));
|
||||
return sql.getManyRows<AttachmentRow>("SELECT * FROM attachments WHERE attachmentId IN (???) AND isDeleted = 0", attachmentIds).map((row) => new BAttachment(row));
|
||||
}
|
||||
|
||||
getBlob(entity: { blobId?: string }): BBlob | null {
|
||||
@@ -220,18 +218,13 @@ export default class Becca {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (entityName === 'revisions') {
|
||||
if (entityName === "revisions") {
|
||||
return this.getRevision(entityId);
|
||||
} else if (entityName === 'attachments') {
|
||||
} else if (entityName === "attachments") {
|
||||
return this.getAttachment(entityId);
|
||||
}
|
||||
|
||||
const camelCaseEntityName = entityName.toLowerCase().replace(/(_[a-z])/g,
|
||||
group =>
|
||||
group
|
||||
.toUpperCase()
|
||||
.replace('_', '')
|
||||
);
|
||||
const camelCaseEntityName = entityName.toLowerCase().replace(/(_[a-z])/g, (group) => group.toUpperCase().replace("_", ""));
|
||||
|
||||
if (!(camelCaseEntityName in this)) {
|
||||
throw new Error(`Unknown entity name '${camelCaseEntityName}' (original argument '${entityName}')`);
|
||||
@@ -242,12 +235,12 @@ export default class Becca {
|
||||
|
||||
getRecentNotesFromQuery(query: string, params: string[] = []): BRecentNote[] {
|
||||
const rows = sql.getRows<BRecentNote>(query, params);
|
||||
return rows.map(row => new BRecentNote(row));
|
||||
return rows.map((row) => new BRecentNote(row));
|
||||
}
|
||||
|
||||
getRevisionsFromQuery(query: string, params: string[] = []): BRevision[] {
|
||||
const rows = sql.getRows<RevisionRow>(query, params);
|
||||
return rows.map(row => new BRevision(row));
|
||||
return rows.map((row) => new BRevision(row));
|
||||
}
|
||||
|
||||
/** Should be called when the set of all non-skeleton notes changes (added/removed) */
|
||||
|
||||
@@ -11,7 +11,7 @@ import BOption from "./entities/boption.js";
|
||||
import BEtapiToken from "./entities/betapi_token.js";
|
||||
import cls from "../services/cls.js";
|
||||
import entityConstructor from "../becca/entity_constructor.js";
|
||||
import { AttributeRow, BranchRow, EtapiTokenRow, NoteRow, OptionRow } from './entities/rows.js';
|
||||
import type { AttributeRow, BranchRow, EtapiTokenRow, NoteRow, OptionRow } from "./entities/rows.js";
|
||||
import AbstractBeccaEntity from "./entities/abstract_becca_entity.js";
|
||||
import ws from "../services/ws.js";
|
||||
|
||||
@@ -119,13 +119,13 @@ eventService.subscribeBeccaLoader(eventService.ENTITY_CHANGED, ({ entityName, en
|
||||
* It should be therefore treated as a row.
|
||||
*/
|
||||
function postProcessEntityUpdate(entityName: string, entityRow: any) {
|
||||
if (entityName === 'notes') {
|
||||
if (entityName === "notes") {
|
||||
noteUpdated(entityRow);
|
||||
} else if (entityName === 'branches') {
|
||||
} else if (entityName === "branches") {
|
||||
branchUpdated(entityRow);
|
||||
} else if (entityName === 'attributes') {
|
||||
} else if (entityName === "attributes") {
|
||||
attributeUpdated(entityRow);
|
||||
} else if (entityName === 'note_reordering') {
|
||||
} else if (entityName === "note_reordering") {
|
||||
noteReorderingUpdated(entityRow);
|
||||
}
|
||||
}
|
||||
@@ -135,13 +135,13 @@ eventService.subscribeBeccaLoader([eventService.ENTITY_DELETED, eventService.ENT
|
||||
return;
|
||||
}
|
||||
|
||||
if (entityName === 'notes') {
|
||||
if (entityName === "notes") {
|
||||
noteDeleted(entityId);
|
||||
} else if (entityName === 'branches') {
|
||||
} else if (entityName === "branches") {
|
||||
branchDeleted(entityId);
|
||||
} else if (entityName === 'attributes') {
|
||||
} else if (entityName === "attributes") {
|
||||
attributeDeleted(entityId);
|
||||
} else if (entityName === 'etapi_tokens') {
|
||||
} else if (entityName === "etapi_tokens") {
|
||||
etapiTokenDeleted(entityId);
|
||||
}
|
||||
});
|
||||
@@ -162,9 +162,8 @@ function branchDeleted(branchId: string) {
|
||||
const childNote = becca.notes[branch.noteId];
|
||||
|
||||
if (childNote) {
|
||||
childNote.parents = childNote.parents.filter(parent => parent.noteId !== branch.parentNoteId);
|
||||
childNote.parentBranches = childNote.parentBranches
|
||||
.filter(parentBranch => parentBranch.branchId !== branch.branchId);
|
||||
childNote.parents = childNote.parents.filter((parent) => parent.noteId !== branch.parentNoteId);
|
||||
childNote.parentBranches = childNote.parentBranches.filter((parentBranch) => parentBranch.branchId !== branch.branchId);
|
||||
|
||||
if (childNote.parents.length > 0) {
|
||||
// subtree notes might lose some inherited attributes
|
||||
@@ -175,7 +174,7 @@ function branchDeleted(branchId: string) {
|
||||
const parentNote = becca.notes[branch.parentNoteId];
|
||||
|
||||
if (parentNote) {
|
||||
parentNote.children = parentNote.children.filter(child => child.noteId !== branch.noteId);
|
||||
parentNote.children = parentNote.children.filter((child) => child.noteId !== branch.noteId);
|
||||
}
|
||||
|
||||
delete becca.childParentToBranch[`${branch.noteId}-${branch.parentNoteId}`];
|
||||
@@ -230,12 +229,12 @@ function attributeDeleted(attributeId: string) {
|
||||
note.invalidateThisCache();
|
||||
}
|
||||
|
||||
note.ownedAttributes = note.ownedAttributes.filter(attr => attr.attributeId !== attribute.attributeId);
|
||||
note.ownedAttributes = note.ownedAttributes.filter((attr) => attr.attributeId !== attribute.attributeId);
|
||||
|
||||
const targetNote = attribute.targetNote;
|
||||
|
||||
if (targetNote) {
|
||||
targetNote.targetRelations = targetNote.targetRelations.filter(rel => rel.attributeId !== attribute.attributeId);
|
||||
targetNote.targetRelations = targetNote.targetRelations.filter((rel) => rel.attributeId !== attribute.attributeId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,7 +243,7 @@ function attributeDeleted(attributeId: string) {
|
||||
const key = `${attribute.type}-${attribute.name.toLowerCase()}`;
|
||||
|
||||
if (key in becca.attributeIndex) {
|
||||
becca.attributeIndex[key] = becca.attributeIndex[key].filter(attr => attr.attributeId !== attribute.attributeId);
|
||||
becca.attributeIndex[key] = becca.attributeIndex[key].filter((attr) => attr.attributeId !== attribute.attributeId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -282,8 +281,7 @@ function etapiTokenDeleted(etapiTokenId: string) {
|
||||
eventService.subscribeBeccaLoader(eventService.ENTER_PROTECTED_SESSION, () => {
|
||||
try {
|
||||
becca.decryptProtectedNotes();
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
log.error(`Could not decrypt protected notes: ${e.message} ${e.stack}`);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -37,7 +37,7 @@ function getNoteTitle(childNoteId: string, parentNoteId?: string) {
|
||||
|
||||
const branch = parentNote ? becca.getBranchFromChildAndParent(childNote.noteId, parentNote.noteId) : null;
|
||||
|
||||
return `${(branch && branch.prefix) ? `${branch.prefix} - ` : ''}${title}`;
|
||||
return `${branch && branch.prefix ? `${branch.prefix} - ` : ""}${title}`;
|
||||
}
|
||||
|
||||
function getNoteTitleArrayForPath(notePathArray: string[]) {
|
||||
@@ -51,7 +51,7 @@ function getNoteTitleArrayForPath(notePathArray: string[]) {
|
||||
|
||||
const titles = [];
|
||||
|
||||
let parentNoteId = 'root';
|
||||
let parentNoteId = "root";
|
||||
let hoistedNotePassed = false;
|
||||
|
||||
// this is a notePath from outside of hoisted subtree, so the full title path needs to be returned
|
||||
@@ -79,7 +79,7 @@ function getNoteTitleArrayForPath(notePathArray: string[]) {
|
||||
function getNoteTitleForPath(notePathArray: string[]) {
|
||||
const titles = getNoteTitleArrayForPath(notePathArray);
|
||||
|
||||
return titles.join(' / ');
|
||||
return titles.join(" / ");
|
||||
}
|
||||
|
||||
export default {
|
||||
|
||||
@@ -9,7 +9,7 @@ import cls from "../../services/cls.js";
|
||||
import log from "../../services/log.js";
|
||||
import protectedSessionService from "../../services/protected_session.js";
|
||||
import blobService from "../../services/blob.js";
|
||||
import Becca, { ConstructorData } from '../becca-interface.js';
|
||||
import Becca, { type ConstructorData } from "../becca-interface.js";
|
||||
import becca from "../becca.js";
|
||||
|
||||
interface ContentOpts {
|
||||
@@ -23,7 +23,6 @@ interface ContentOpts {
|
||||
* @type T the same entity type needed for self-reference in {@link ConstructorData}.
|
||||
*/
|
||||
abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
|
||||
utcDateModified?: string;
|
||||
dateCreated?: string;
|
||||
dateModified?: string;
|
||||
@@ -35,7 +34,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
blobId?: string;
|
||||
|
||||
protected beforeSaving(opts?: {}) {
|
||||
const constructorData = (this.constructor as unknown as ConstructorData<T>);
|
||||
const constructorData = this.constructor as unknown as ConstructorData<T>;
|
||||
if (!(this as any)[constructorData.primaryKeyName]) {
|
||||
(this as any)[constructorData.primaryKeyName] = utils.newEntityId();
|
||||
}
|
||||
@@ -50,19 +49,19 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
}
|
||||
|
||||
protected putEntityChange(isDeleted: boolean) {
|
||||
const constructorData = (this.constructor as unknown as ConstructorData<T>);
|
||||
const constructorData = this.constructor as unknown as ConstructorData<T>;
|
||||
entityChangesService.putEntityChange({
|
||||
entityName: constructorData.entityName,
|
||||
entityId: (this as any)[constructorData.primaryKeyName],
|
||||
hash: this.generateHash(isDeleted),
|
||||
isErased: false,
|
||||
utcDateChanged: this.getUtcDateChanged(),
|
||||
isSynced: constructorData.entityName !== 'options' || !!this.isSynced
|
||||
isSynced: constructorData.entityName !== "options" || !!this.isSynced
|
||||
});
|
||||
}
|
||||
|
||||
generateHash(isDeleted?: boolean): string {
|
||||
const constructorData = (this.constructor as unknown as ConstructorData<T>);
|
||||
const constructorData = this.constructor as unknown as ConstructorData<T>;
|
||||
let contentToHash = "";
|
||||
|
||||
for (const propertyName of constructorData.hashedProperties) {
|
||||
@@ -102,7 +101,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
* Saves entity - executes SQL, but doesn't commit the transaction on its own
|
||||
*/
|
||||
save(opts?: {}): this {
|
||||
const constructorData = (this.constructor as unknown as ConstructorData<T>);
|
||||
const constructorData = this.constructor as unknown as ConstructorData<T>;
|
||||
const entityName = constructorData.entityName;
|
||||
const primaryKeyName = constructorData.primaryKeyName;
|
||||
|
||||
@@ -115,7 +114,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
sql.transactional(() => {
|
||||
sql.upsert(entityName, primaryKeyName, pojo);
|
||||
|
||||
if (entityName === 'recent_notes') {
|
||||
if (entityName === "recent_notes") {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -144,7 +143,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
opts.forceFrontendReload = !!opts.forceFrontendReload;
|
||||
|
||||
if (content === null || content === undefined) {
|
||||
const constructorData = (this.constructor as unknown as ConstructorData<T>);
|
||||
const constructorData = this.constructor as unknown as ConstructorData<T>;
|
||||
throw new Error(`Cannot set null content to ${constructorData.primaryKeyName} '${(this as any)[constructorData.primaryKeyName]}'`);
|
||||
}
|
||||
|
||||
@@ -206,9 +205,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
if (this.isProtected) {
|
||||
// a "random" prefix makes sure that the calculated hash/blobId is different for a decrypted/encrypted content
|
||||
const encryptedPrefixSuffix = "t$[nvQg7q)&_ENCRYPTED_?M:Bf&j3jr_";
|
||||
return Buffer.isBuffer(unencryptedContent)
|
||||
? Buffer.concat([Buffer.from(encryptedPrefixSuffix), unencryptedContent])
|
||||
: `${encryptedPrefixSuffix}${unencryptedContent}`;
|
||||
return Buffer.isBuffer(unencryptedContent) ? Buffer.concat([Buffer.from(encryptedPrefixSuffix), unencryptedContent]) : `${encryptedPrefixSuffix}${unencryptedContent}`;
|
||||
} else {
|
||||
return unencryptedContent;
|
||||
}
|
||||
@@ -222,7 +219,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
* notes/attachments), but the trade-off comes out clearly positive.
|
||||
*/
|
||||
const newBlobId = utils.hashedBlobId(unencryptedContentForHashCalculation);
|
||||
const blobNeedsInsert = !sql.getValue('SELECT 1 FROM blobs WHERE blobId = ?', [newBlobId]);
|
||||
const blobNeedsInsert = !sql.getValue("SELECT 1 FROM blobs WHERE blobId = ?", [newBlobId]);
|
||||
|
||||
if (!blobNeedsInsert) {
|
||||
return newBlobId;
|
||||
@@ -242,7 +239,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
const hash = blobService.calculateContentHash(pojo);
|
||||
|
||||
entityChangesService.putEntityChange({
|
||||
entityName: 'blobs',
|
||||
entityName: "blobs",
|
||||
entityId: newBlobId,
|
||||
hash: hash,
|
||||
isErased: false,
|
||||
@@ -254,7 +251,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
});
|
||||
|
||||
eventService.emit(eventService.ENTITY_CHANGED, {
|
||||
entityName: 'blobs',
|
||||
entityName: "blobs",
|
||||
entity: this
|
||||
});
|
||||
|
||||
@@ -265,7 +262,7 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
const row = sql.getRow<{ content: string | Buffer }>(`SELECT content FROM blobs WHERE blobId = ?`, [this.blobId]);
|
||||
|
||||
if (!row) {
|
||||
const constructorData = (this.constructor as unknown as ConstructorData<T>);
|
||||
const constructorData = this.constructor as unknown as ConstructorData<T>;
|
||||
throw new Error(`Cannot find content for ${constructorData.primaryKeyName} '${(this as any)[constructorData.primaryKeyName]}', blobId '${this.blobId}'`);
|
||||
}
|
||||
|
||||
@@ -278,21 +275,22 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
* This is a low-level method, for notes and branches use `note.deleteNote()` and 'branch.deleteBranch()` instead.
|
||||
*/
|
||||
markAsDeleted(deleteId: string | null = null) {
|
||||
const constructorData = (this.constructor as unknown as ConstructorData<T>);
|
||||
const constructorData = this.constructor as unknown as ConstructorData<T>;
|
||||
const entityId = (this as any)[constructorData.primaryKeyName];
|
||||
const entityName = constructorData.entityName;
|
||||
|
||||
this.utcDateModified = dateUtils.utcNowDateTime();
|
||||
|
||||
sql.execute(`UPDATE ${entityName} SET isDeleted = 1, deleteId = ?, utcDateModified = ?
|
||||
sql.execute(
|
||||
`UPDATE ${entityName} SET isDeleted = 1, deleteId = ?, utcDateModified = ?
|
||||
WHERE ${constructorData.primaryKeyName} = ?`,
|
||||
[deleteId, this.utcDateModified, entityId]);
|
||||
[deleteId, this.utcDateModified, entityId]
|
||||
);
|
||||
|
||||
if (this.dateModified) {
|
||||
this.dateModified = dateUtils.localNowDateTime();
|
||||
|
||||
sql.execute(`UPDATE ${entityName} SET dateModified = ? WHERE ${constructorData.primaryKeyName} = ?`,
|
||||
[this.dateModified, entityId]);
|
||||
sql.execute(`UPDATE ${entityName} SET dateModified = ? WHERE ${constructorData.primaryKeyName} = ?`, [this.dateModified, entityId]);
|
||||
}
|
||||
|
||||
log.info(`Marking ${entityName} ${entityId} as deleted`);
|
||||
@@ -303,15 +301,17 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
|
||||
}
|
||||
|
||||
markAsDeletedSimple() {
|
||||
const constructorData = (this.constructor as unknown as ConstructorData<T>);
|
||||
const constructorData = this.constructor as unknown as ConstructorData<T>;
|
||||
const entityId = (this as any)[constructorData.primaryKeyName];
|
||||
const entityName = constructorData.entityName;
|
||||
|
||||
this.utcDateModified = dateUtils.utcNowDateTime();
|
||||
|
||||
sql.execute(`UPDATE ${entityName} SET isDeleted = 1, utcDateModified = ?
|
||||
sql.execute(
|
||||
`UPDATE ${entityName} SET isDeleted = 1, utcDateModified = ?
|
||||
WHERE ${constructorData.primaryKeyName} = ?`,
|
||||
[this.utcDateModified, entityId]);
|
||||
[this.utcDateModified, entityId]
|
||||
);
|
||||
|
||||
log.info(`Marking ${entityName} ${entityId} as deleted`);
|
||||
|
||||
|
||||
@@ -6,14 +6,14 @@ import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import protectedSessionService from "../../services/protected_session.js";
|
||||
import log from "../../services/log.js";
|
||||
import { AttachmentRow } from './rows.js';
|
||||
import type { AttachmentRow } from "./rows.js";
|
||||
import BNote from "./bnote.js";
|
||||
import BBranch from "./bbranch.js";
|
||||
import noteService from "../../services/notes.js";
|
||||
|
||||
const attachmentRoleToNoteTypeMapping = {
|
||||
'image': 'image',
|
||||
'file': 'file'
|
||||
image: "image",
|
||||
file: "file"
|
||||
};
|
||||
|
||||
interface ContentOpts {
|
||||
@@ -31,9 +31,15 @@ interface ContentOpts {
|
||||
* larger amounts of data and generally not accessible to the user.
|
||||
*/
|
||||
class BAttachment extends AbstractBeccaEntity<BAttachment> {
|
||||
static get entityName() { return "attachments"; }
|
||||
static get primaryKeyName() { return "attachmentId"; }
|
||||
static get hashedProperties() { return ["attachmentId", "ownerId", "role", "mime", "title", "blobId", "utcDateScheduledForErasureSince"]; }
|
||||
static get entityName() {
|
||||
return "attachments";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "attachmentId";
|
||||
}
|
||||
static get hashedProperties() {
|
||||
return ["attachmentId", "ownerId", "role", "mime", "title", "blobId", "utcDateScheduledForErasureSince"];
|
||||
}
|
||||
|
||||
noteId?: number;
|
||||
attachmentId?: string;
|
||||
@@ -102,13 +108,15 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
|
||||
}
|
||||
|
||||
isContentAvailable() {
|
||||
return !this.attachmentId // new attachment which was not encrypted yet
|
||||
|| !this.isProtected
|
||||
|| protectedSessionService.isProtectedSessionAvailable()
|
||||
return (
|
||||
!this.attachmentId || // new attachment which was not encrypted yet
|
||||
!this.isProtected ||
|
||||
protectedSessionService.isProtectedSessionAvailable()
|
||||
);
|
||||
}
|
||||
|
||||
getTitleOrProtected() {
|
||||
return this.isContentAvailable() ? this.title : '[protected]';
|
||||
return this.isContentAvailable() ? this.title : "[protected]";
|
||||
}
|
||||
|
||||
decrypt() {
|
||||
@@ -121,8 +129,7 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
|
||||
try {
|
||||
this.title = protectedSessionService.decryptString(this.title) || "";
|
||||
this.isDecrypted = true;
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
log.error(`Could not decrypt attachment ${this.attachmentId}: ${e.message} ${e.stack}`);
|
||||
}
|
||||
}
|
||||
@@ -136,22 +143,22 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
|
||||
this._setContent(content, opts);
|
||||
}
|
||||
|
||||
convertToNote(): { note: BNote, branch: BBranch } {
|
||||
convertToNote(): { note: BNote; branch: BBranch } {
|
||||
// TODO: can this ever be "search"?
|
||||
if (this.type as string === 'search') {
|
||||
if ((this.type as string) === "search") {
|
||||
throw new Error(`Note of type search cannot have child notes`);
|
||||
}
|
||||
|
||||
if (!this.getNote()) {
|
||||
throw new Error("Cannot find note of this attachment. It is possible that this is note revision's attachment. " +
|
||||
"Converting note revision's attachments to note is not (yet) supported.");
|
||||
throw new Error("Cannot find note of this attachment. It is possible that this is note revision's attachment. " + "Converting note revision's attachments to note is not (yet) supported.");
|
||||
}
|
||||
|
||||
if (!(this.role in attachmentRoleToNoteTypeMapping)) {
|
||||
throw new Error(`Mapping from attachment role '${this.role}' to note's type is not defined`);
|
||||
}
|
||||
|
||||
if (!this.isContentAvailable()) { // isProtected is the same for attachment
|
||||
if (!this.isContentAvailable()) {
|
||||
// isProtected is the same for attachment
|
||||
throw new Error(`Cannot convert protected attachment outside of protected session`);
|
||||
}
|
||||
|
||||
@@ -168,7 +175,7 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
|
||||
|
||||
const parentNote = this.getNote();
|
||||
|
||||
if (this.role === 'image' && parentNote.type === 'text') {
|
||||
if (this.role === "image" && parentNote.type === "text") {
|
||||
const origContent = parentNote.getContent();
|
||||
|
||||
if (typeof origContent !== "string") {
|
||||
@@ -191,7 +198,7 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
|
||||
}
|
||||
|
||||
getFileName() {
|
||||
const type = this.role === 'image' ? 'image' : 'file';
|
||||
const type = this.role === "image" ? "image" : "file";
|
||||
|
||||
return utils.formatDownloadTitle(this.title, type, this.mime);
|
||||
}
|
||||
@@ -200,9 +207,14 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
|
||||
super.beforeSaving();
|
||||
|
||||
if (this.position === undefined || this.position === null) {
|
||||
this.position = 10 + sql.getValue<number>(`SELECT COALESCE(MAX(position), 0)
|
||||
this.position =
|
||||
10 +
|
||||
sql.getValue<number>(
|
||||
`SELECT COALESCE(MAX(position), 0)
|
||||
FROM attachments
|
||||
WHERE ownerId = ?`, [this.noteId]);
|
||||
WHERE ownerId = ?`,
|
||||
[this.noteId]
|
||||
);
|
||||
}
|
||||
|
||||
this.dateModified = dateUtils.localNowDateTime();
|
||||
@@ -234,8 +246,7 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
|
||||
if (pojo.isProtected) {
|
||||
if (this.isDecrypted) {
|
||||
pojo.title = protectedSessionService.encrypt(pojo.title || "") || undefined;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// updating protected note outside of protected session means we will keep original ciphertexts
|
||||
delete pojo.title;
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import promotedAttributeDefinitionParser from "../../services/promoted_attribute_definition_parser.js";
|
||||
import sanitizeAttributeName from "../../services/sanitize_attribute_name.js";
|
||||
import { AttributeRow, AttributeType } from './rows.js';
|
||||
import type { AttributeRow, AttributeType } from "./rows.js";
|
||||
|
||||
interface SavingOpts {
|
||||
skipValidation?: boolean;
|
||||
@@ -16,9 +16,15 @@ interface SavingOpts {
|
||||
* and relation (representing named relationship between source and target note)
|
||||
*/
|
||||
class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
static get entityName() { return "attributes"; }
|
||||
static get primaryKeyName() { return "attributeId"; }
|
||||
static get hashedProperties() { return ["attributeId", "noteId", "type", "name", "value", "isInheritable"]; }
|
||||
static get entityName() {
|
||||
return "attributes";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "attributeId";
|
||||
}
|
||||
static get hashedProperties() {
|
||||
return ["attributeId", "noteId", "type", "name", "value", "isInheritable"];
|
||||
}
|
||||
|
||||
attributeId!: string;
|
||||
noteId!: string;
|
||||
@@ -40,16 +46,7 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
}
|
||||
|
||||
updateFromRow(row: AttributeRow) {
|
||||
this.update([
|
||||
row.attributeId,
|
||||
row.noteId,
|
||||
row.type,
|
||||
row.name,
|
||||
row.value,
|
||||
row.isInheritable,
|
||||
row.position,
|
||||
row.utcDateModified
|
||||
]);
|
||||
this.update([row.attributeId, row.noteId, row.type, row.name, row.value, row.isInheritable, row.position, row.utcDateModified]);
|
||||
}
|
||||
|
||||
update([attributeId, noteId, type, name, value, isInheritable, position, utcDateModified]: any) {
|
||||
@@ -97,22 +94,22 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
throw new Error(`Invalid empty name in attribute '${this.attributeId}' of note '${this.noteId}'`);
|
||||
}
|
||||
|
||||
if (this.type === 'relation' && !(this.value in this.becca.notes)) {
|
||||
if (this.type === "relation" && !(this.value in this.becca.notes)) {
|
||||
throw new Error(`Cannot save relation '${this.name}' of note '${this.noteId}' since it targets not existing note '${this.value}'.`);
|
||||
}
|
||||
}
|
||||
|
||||
get isAffectingSubtree() {
|
||||
return this.isInheritable
|
||||
|| (this.type === 'relation' && ['template', 'inherit'].includes(this.name));
|
||||
return this.isInheritable || (this.type === "relation" && ["template", "inherit"].includes(this.name));
|
||||
}
|
||||
|
||||
get targetNoteId() { // alias
|
||||
return this.type === 'relation' ? this.value : undefined;
|
||||
get targetNoteId() {
|
||||
// alias
|
||||
return this.type === "relation" ? this.value : undefined;
|
||||
}
|
||||
|
||||
isAutoLink() {
|
||||
return this.type === 'relation' && ['internalLink', 'imageLink', 'relationMapLink', 'includeNoteLink'].includes(this.name);
|
||||
return this.type === "relation" && ["internalLink", "imageLink", "relationMapLink", "includeNoteLink"].includes(this.name);
|
||||
}
|
||||
|
||||
get note() {
|
||||
@@ -120,7 +117,7 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
}
|
||||
|
||||
get targetNote() {
|
||||
if (this.type === 'relation') {
|
||||
if (this.type === "relation") {
|
||||
return this.becca.notes[this.value];
|
||||
}
|
||||
}
|
||||
@@ -136,7 +133,7 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
}
|
||||
|
||||
getTargetNote() {
|
||||
if (this.type !== 'relation') {
|
||||
if (this.type !== "relation") {
|
||||
throw new Error(`Attribute '${this.attributeId}' is not a relation.`);
|
||||
}
|
||||
|
||||
@@ -148,7 +145,7 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
}
|
||||
|
||||
isDefinition() {
|
||||
return this.type === 'label' && (this.name.startsWith('label:') || this.name.startsWith('relation:'));
|
||||
return this.type === "label" && (this.name.startsWith("label:") || this.name.startsWith("relation:"));
|
||||
}
|
||||
|
||||
getDefinition() {
|
||||
@@ -156,9 +153,9 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
}
|
||||
|
||||
getDefinedName() {
|
||||
if (this.type === 'label' && this.name.startsWith('label:')) {
|
||||
if (this.type === "label" && this.name.startsWith("label:")) {
|
||||
return this.name.substr(6);
|
||||
} else if (this.type === 'label' && this.name.startsWith('relation:')) {
|
||||
} else if (this.type === "label" && this.name.startsWith("relation:")) {
|
||||
return this.name.substr(9);
|
||||
} else {
|
||||
return this.name;
|
||||
@@ -182,7 +179,8 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
}
|
||||
|
||||
if (this.position === undefined || this.position === null) {
|
||||
const maxExistingPosition = this.getNote().getAttributes()
|
||||
const maxExistingPosition = this.getNote()
|
||||
.getAttributes()
|
||||
.reduce((maxPosition, attr) => Math.max(maxPosition, attr.position || 0), 0);
|
||||
|
||||
this.position = maxExistingPosition + 10;
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
import { BlobRow } from "./rows.js";
|
||||
import type { BlobRow } from "./rows.js";
|
||||
|
||||
// TODO: Why this does not extend the abstract becca?
|
||||
class BBlob extends AbstractBeccaEntity<BBlob> {
|
||||
static get entityName() { return "blobs"; }
|
||||
static get primaryKeyName() { return "blobId"; }
|
||||
static get hashedProperties() { return ["blobId", "content"]; }
|
||||
static get entityName() {
|
||||
return "blobs";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "blobId";
|
||||
}
|
||||
static get hashedProperties() {
|
||||
return ["blobId", "content"];
|
||||
}
|
||||
|
||||
content!: string | Buffer;
|
||||
contentLength!: number;
|
||||
|
||||
@@ -7,7 +7,7 @@ import utils from "../../services/utils.js";
|
||||
import TaskContext from "../../services/task_context.js";
|
||||
import cls from "../../services/cls.js";
|
||||
import log from "../../services/log.js";
|
||||
import { BranchRow } from './rows.js';
|
||||
import type { BranchRow } from "./rows.js";
|
||||
import handlers from "../../services/handlers.js";
|
||||
|
||||
/**
|
||||
@@ -18,10 +18,16 @@ import handlers from "../../services/handlers.js";
|
||||
* Always check noteId instead.
|
||||
*/
|
||||
class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
static get entityName() { return "branches"; }
|
||||
static get primaryKeyName() { return "branchId"; }
|
||||
static get entityName() {
|
||||
return "branches";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "branchId";
|
||||
}
|
||||
// notePosition is not part of hash because it would produce a lot of updates in case of reordering
|
||||
static get hashedProperties() { return ["branchId", "noteId", "parentNoteId", "prefix"]; }
|
||||
static get hashedProperties() {
|
||||
return ["branchId", "noteId", "parentNoteId", "prefix"];
|
||||
}
|
||||
|
||||
branchId?: string;
|
||||
noteId!: string;
|
||||
@@ -42,15 +48,7 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
}
|
||||
|
||||
updateFromRow(row: BranchRow) {
|
||||
this.update([
|
||||
row.branchId,
|
||||
row.noteId,
|
||||
row.parentNoteId,
|
||||
row.prefix,
|
||||
row.notePosition,
|
||||
row.isExpanded,
|
||||
row.utcDateModified
|
||||
]);
|
||||
this.update([row.branchId, row.noteId, row.parentNoteId, row.prefix, row.notePosition, row.isExpanded, row.utcDateModified]);
|
||||
}
|
||||
|
||||
update([branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified]: any) {
|
||||
@@ -78,7 +76,7 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
childNote.parentBranches.push(this);
|
||||
}
|
||||
|
||||
if (this.noteId === 'root') {
|
||||
if (this.noteId === "root") {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -109,7 +107,7 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
|
||||
/** @returns root branch will have undefined parent, all other branches have to have a parent note */
|
||||
get parentNote(): BNote | undefined {
|
||||
if (!(this.parentNoteId in this.becca.notes) && this.parentNoteId !== 'none') {
|
||||
if (!(this.parentNoteId in this.becca.notes) && this.parentNoteId !== "none") {
|
||||
// entities can come out of order in sync/import, create skeleton which will be filled later
|
||||
this.becca.addNote(this.parentNoteId, new BNote({ noteId: this.parentNoteId }));
|
||||
}
|
||||
@@ -118,7 +116,7 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
}
|
||||
|
||||
get isDeleted() {
|
||||
return (this.branchId == undefined || !(this.branchId in this.becca.branches));
|
||||
return this.branchId == undefined || !(this.branchId in this.becca.branches);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -129,7 +127,7 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
* of deletion should not act as a clone.
|
||||
*/
|
||||
get isWeak() {
|
||||
return ['_share', '_lbBookmarks'].includes(this.parentNoteId);
|
||||
return ["_share", "_lbBookmarks"].includes(this.parentNoteId);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -145,7 +143,7 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
}
|
||||
|
||||
if (!taskContext) {
|
||||
taskContext = new TaskContext('no-progress-reporting');
|
||||
taskContext = new TaskContext("no-progress-reporting");
|
||||
}
|
||||
|
||||
taskContext.increaseProgressCount();
|
||||
@@ -157,13 +155,11 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
|
||||
if (parentBranches.length === 1 && parentBranches[0] === this) {
|
||||
// needs to be run before branches and attributes are deleted and thus attached relations disappear
|
||||
handlers.runAttachedRelations(note, 'runOnNoteDeletion', note);
|
||||
handlers.runAttachedRelations(note, "runOnNoteDeletion", note);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.noteId === 'root'
|
||||
|| this.noteId === cls.getHoistedNoteId()) {
|
||||
|
||||
if (this.noteId === "root" || this.noteId === cls.getHoistedNoteId()) {
|
||||
throw new Error("Can't delete root or hoisted branch/note");
|
||||
}
|
||||
|
||||
@@ -203,8 +199,7 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
note.markAsDeleted(deleteId);
|
||||
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -225,8 +220,9 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (maxNotePos < childBranch.notePosition
|
||||
&& childBranch.noteId !== '_hidden' // hidden has a very large notePosition to always stay last
|
||||
if (
|
||||
maxNotePos < childBranch.notePosition &&
|
||||
childBranch.noteId !== "_hidden" // hidden has a very large notePosition to always stay last
|
||||
) {
|
||||
maxNotePos = childBranch.notePosition;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
import { EtapiTokenRow } from "./rows.js";
|
||||
import type { EtapiTokenRow } from "./rows.js";
|
||||
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
@@ -15,9 +15,15 @@ import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
* from tokenHash and token.
|
||||
*/
|
||||
class BEtapiToken extends AbstractBeccaEntity<BEtapiToken> {
|
||||
static get entityName() { return "etapi_tokens"; }
|
||||
static get primaryKeyName() { return "etapiTokenId"; }
|
||||
static get hashedProperties() { return ["etapiTokenId", "name", "tokenHash", "utcDateCreated", "utcDateModified", "isDeleted"]; }
|
||||
static get entityName() {
|
||||
return "etapi_tokens";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "etapiTokenId";
|
||||
}
|
||||
static get hashedProperties() {
|
||||
return ["etapiTokenId", "name", "tokenHash", "utcDateCreated", "utcDateModified", "isDeleted"];
|
||||
}
|
||||
|
||||
etapiTokenId?: string;
|
||||
name!: string;
|
||||
@@ -66,7 +72,7 @@ class BEtapiToken extends AbstractBeccaEntity<BEtapiToken> {
|
||||
utcDateCreated: this.utcDateCreated,
|
||||
utcDateModified: this.utcDateModified,
|
||||
isDeleted: this.isDeleted
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
beforeSaving() {
|
||||
|
||||
@@ -14,18 +14,18 @@ import TaskContext from "../../services/task_context.js";
|
||||
import dayjs from "dayjs";
|
||||
import utc from "dayjs/plugin/utc.js";
|
||||
import eventService from "../../services/events.js";
|
||||
import { AttachmentRow, AttributeType, NoteRow, NoteType, RevisionRow } from './rows.js';
|
||||
import type { AttachmentRow, AttributeType, NoteRow, NoteType, RevisionRow } from "./rows.js";
|
||||
import BBranch from "./bbranch.js";
|
||||
import BAttribute from "./battribute.js";
|
||||
import { NotePojo } from '../becca-interface.js';
|
||||
import type { NotePojo } from "../becca-interface.js";
|
||||
import searchService from "../../services/search/services/search.js";
|
||||
import cloningService, { CloneResponse } from "../../services/cloning.js";
|
||||
import cloningService, { type CloneResponse } from "../../services/cloning.js";
|
||||
import noteService from "../../services/notes.js";
|
||||
import handlers from "../../services/handlers.js";
|
||||
dayjs.extend(utc);
|
||||
|
||||
const LABEL = 'label';
|
||||
const RELATION = 'relation';
|
||||
const LABEL = "label";
|
||||
const RELATION = "relation";
|
||||
|
||||
interface NotePathRecord {
|
||||
isArchived: boolean;
|
||||
@@ -47,7 +47,7 @@ interface AttachmentOpts {
|
||||
|
||||
interface Relationship {
|
||||
parentNoteId: string;
|
||||
childNoteId: string
|
||||
childNoteId: string;
|
||||
}
|
||||
|
||||
interface ConvertOpts {
|
||||
@@ -59,9 +59,15 @@ interface ConvertOpts {
|
||||
* Trilium's main entity, which can represent text note, image, code note, file attachment etc.
|
||||
*/
|
||||
class BNote extends AbstractBeccaEntity<BNote> {
|
||||
static get entityName() { return "notes"; }
|
||||
static get primaryKeyName() { return "noteId"; }
|
||||
static get hashedProperties() { return ["noteId", "title", "isProtected", "type", "mime", "blobId"]; }
|
||||
static get entityName() {
|
||||
return "notes";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "noteId";
|
||||
}
|
||||
static get hashedProperties() {
|
||||
return ["noteId", "title", "isProtected", "type", "mime", "blobId"];
|
||||
}
|
||||
|
||||
noteId!: string;
|
||||
title!: string;
|
||||
@@ -105,18 +111,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
updateFromRow(row: Partial<NoteRow>) {
|
||||
this.update([
|
||||
row.noteId,
|
||||
row.title,
|
||||
row.type,
|
||||
row.mime,
|
||||
row.isProtected,
|
||||
row.blobId,
|
||||
row.dateCreated,
|
||||
row.dateModified,
|
||||
row.utcDateCreated,
|
||||
row.utcDateModified
|
||||
]);
|
||||
this.update([row.noteId, row.title, row.type, row.mime, row.isProtected, row.blobId, row.dateCreated, row.dateModified, row.utcDateCreated, row.utcDateModified]);
|
||||
}
|
||||
|
||||
update([noteId, title, type, mime, isProtected, blobId, dateCreated, dateModified, utcDateCreated, utcDateModified]: any) {
|
||||
@@ -164,13 +159,15 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
isContentAvailable() {
|
||||
return !this.noteId // new note which was not encrypted yet
|
||||
|| !this.isProtected
|
||||
|| protectedSessionService.isProtectedSessionAvailable()
|
||||
return (
|
||||
!this.noteId || // new note which was not encrypted yet
|
||||
!this.isProtected ||
|
||||
protectedSessionService.isProtectedSessionAvailable()
|
||||
);
|
||||
}
|
||||
|
||||
getTitleOrProtected() {
|
||||
return this.isContentAvailable() ? this.title : '[protected]';
|
||||
return this.isContentAvailable() ? this.title : "[protected]";
|
||||
}
|
||||
|
||||
getParentBranches() {
|
||||
@@ -181,7 +178,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
* Returns <i>strong</i> (as opposed to <i>weak</i>) parent branches. See isWeak for details.
|
||||
*/
|
||||
getStrongParentBranches() {
|
||||
return this.getParentBranches().filter(branch => !branch.isWeak);
|
||||
return this.getParentBranches().filter((branch) => !branch.isWeak);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -204,8 +201,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
getChildBranches(): BBranch[] {
|
||||
return this.children
|
||||
.map(childNote => this.becca.getBranchFromChildAndParent(childNote.noteId, this.noteId)) as BBranch[];
|
||||
return this.children.map((childNote) => this.becca.getBranchFromChildAndParent(childNote.noteId, this.noteId)) as BBranch[];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -237,8 +233,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
getJsonContentSafely() {
|
||||
try {
|
||||
return this.getJsonContent();
|
||||
}
|
||||
catch (e) {
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -250,7 +245,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
setJsonContent(content: {}) {
|
||||
this.setContent(JSON.stringify(content, null, '\t'));
|
||||
this.setContent(JSON.stringify(content, null, "\t"));
|
||||
}
|
||||
|
||||
get dateCreatedObj() {
|
||||
@@ -271,7 +266,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
/** @returns true if this note is the root of the note tree. Root note has "root" noteId */
|
||||
isRoot() {
|
||||
return this.noteId === 'root';
|
||||
return this.noteId === "root";
|
||||
}
|
||||
|
||||
/** @returns true if this note is of application/json content type */
|
||||
@@ -281,22 +276,20 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
/** @returns true if this note is JavaScript (code or attachment) */
|
||||
isJavaScript() {
|
||||
return (this.type === "code" || this.type === "file" || this.type === 'launcher')
|
||||
&& (this.mime.startsWith("application/javascript")
|
||||
|| this.mime === "application/x-javascript"
|
||||
|| this.mime === "text/javascript");
|
||||
return (
|
||||
(this.type === "code" || this.type === "file" || this.type === "launcher") &&
|
||||
(this.mime.startsWith("application/javascript") || this.mime === "application/x-javascript" || this.mime === "text/javascript")
|
||||
);
|
||||
}
|
||||
|
||||
/** @returns true if this note is HTML */
|
||||
isHtml() {
|
||||
return ["code", "file", "render"].includes(this.type)
|
||||
&& this.mime === "text/html";
|
||||
return ["code", "file", "render"].includes(this.type) && this.mime === "text/html";
|
||||
}
|
||||
|
||||
/** @returns true if this note is an image */
|
||||
isImage() {
|
||||
return this.type === 'image'
|
||||
|| (this.type === 'file' && this.mime?.startsWith('image/'));
|
||||
return this.type === "image" || (this.type === "file" && this.mime?.startsWith("image/"));
|
||||
}
|
||||
|
||||
/** @deprecated use hasStringContent() instead */
|
||||
@@ -311,15 +304,15 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
/** @returns JS script environment - either "frontend" or "backend" */
|
||||
getScriptEnv() {
|
||||
if (this.isHtml() || (this.isJavaScript() && this.mime.endsWith('env=frontend'))) {
|
||||
if (this.isHtml() || (this.isJavaScript() && this.mime.endsWith("env=frontend"))) {
|
||||
return "frontend";
|
||||
}
|
||||
|
||||
if (this.type === 'render') {
|
||||
if (this.type === "render") {
|
||||
return "frontend";
|
||||
}
|
||||
|
||||
if (this.isJavaScript() && this.mime.endsWith('env=backend')) {
|
||||
if (this.isJavaScript() && this.mime.endsWith("env=backend")) {
|
||||
return "backend";
|
||||
}
|
||||
|
||||
@@ -343,15 +336,12 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
if (type && name) {
|
||||
return this.__attributeCache.filter(attr => attr.name === name && attr.type === type);
|
||||
}
|
||||
else if (type) {
|
||||
return this.__attributeCache.filter(attr => attr.type === type);
|
||||
}
|
||||
else if (name) {
|
||||
return this.__attributeCache.filter(attr => attr.name === name);
|
||||
}
|
||||
else {
|
||||
return this.__attributeCache.filter((attr) => attr.name === name && attr.type === type);
|
||||
} else if (type) {
|
||||
return this.__attributeCache.filter((attr) => attr.type === type);
|
||||
} else if (name) {
|
||||
return this.__attributeCache.filter((attr) => attr.name === name);
|
||||
} else {
|
||||
return this.__attributeCache;
|
||||
}
|
||||
}
|
||||
@@ -372,7 +362,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
const newPath = [...path, this.noteId];
|
||||
|
||||
// inheritable attrs on root are typically not intended to be applied to hidden subtree #3537
|
||||
if (this.noteId !== 'root' && this.noteId !== '_hidden') {
|
||||
if (this.noteId !== "root" && this.noteId !== "_hidden") {
|
||||
for (const parentNote of this.parents) {
|
||||
parentAttributes.push(...parentNote.__getInheritableAttributes(newPath));
|
||||
}
|
||||
@@ -380,15 +370,17 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
const templateAttributes = [];
|
||||
|
||||
for (const ownedAttr of parentAttributes) { // parentAttributes so we process also inherited templates
|
||||
if (ownedAttr.type === 'relation' && ['template', 'inherit'].includes(ownedAttr.name)) {
|
||||
for (const ownedAttr of parentAttributes) {
|
||||
// parentAttributes so we process also inherited templates
|
||||
if (ownedAttr.type === "relation" && ["template", "inherit"].includes(ownedAttr.name)) {
|
||||
const templateNote = this.becca.notes[ownedAttr.value];
|
||||
|
||||
if (templateNote) {
|
||||
templateAttributes.push(
|
||||
...templateNote.__getAttributes(newPath)
|
||||
...templateNote
|
||||
.__getAttributes(newPath)
|
||||
// template attr is used as a marker for templates, but it's not meant to be inherited
|
||||
.filter(attr => !(attr.type === 'label' && (attr.name === 'template' || attr.name === 'workspacetemplate')))
|
||||
.filter((attr) => !(attr.type === "label" && (attr.name === "template" || attr.name === "workspacetemplate")))
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -431,38 +423,31 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
__validateTypeName(type?: string | null, name?: string | null) {
|
||||
if (type && type !== 'label' && type !== 'relation') {
|
||||
if (type && type !== "label" && type !== "relation") {
|
||||
throw new Error(`Unrecognized attribute type '${type}'. Only 'label' and 'relation' are possible values.`);
|
||||
}
|
||||
|
||||
if (name) {
|
||||
const firstLetter = name.charAt(0);
|
||||
if (firstLetter === '#' || firstLetter === '~') {
|
||||
if (firstLetter === "#" || firstLetter === "~") {
|
||||
throw new Error(`Detect '#' or '~' in the attribute's name. In the API, attribute names should be set without these characters.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
hasAttribute(type: string, name: string, value: string | null = null): boolean {
|
||||
return !!this.getAttributes().find(attr =>
|
||||
attr.name === name
|
||||
&& (value === undefined || value === null || attr.value === value)
|
||||
&& attr.type === type
|
||||
);
|
||||
return !!this.getAttributes().find((attr) => attr.name === name && (value === undefined || value === null || attr.value === value) && attr.type === type);
|
||||
}
|
||||
|
||||
getAttributeCaseInsensitive(type: string, name: string, value?: string | null) {
|
||||
name = name.toLowerCase();
|
||||
value = value ? value.toLowerCase() : null;
|
||||
|
||||
return this.getAttributes().find(
|
||||
attr => attr.name.toLowerCase() === name
|
||||
&& (!value || attr.value.toLowerCase() === value)
|
||||
&& attr.type === type);
|
||||
return this.getAttributes().find((attr) => attr.name.toLowerCase() === name && (!value || attr.value.toLowerCase() === value) && attr.type === type);
|
||||
}
|
||||
|
||||
getRelationTarget(name: string) {
|
||||
const relation = this.getAttributes().find(attr => attr.name === name && attr.type === 'relation');
|
||||
const relation = this.getAttributes().find((attr) => attr.name === name && attr.type === "relation");
|
||||
|
||||
return relation ? relation.targetNote : null;
|
||||
}
|
||||
@@ -487,7 +472,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
return false;
|
||||
}
|
||||
|
||||
return label && label.value !== 'false';
|
||||
return label && label.value !== "false";
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -600,7 +585,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
getAttribute(type: string, name: string): BAttribute | null {
|
||||
const attributes = this.getAttributes();
|
||||
|
||||
return attributes.find(attr => attr.name === name && attr.type === type) || null;
|
||||
return attributes.find((attr) => attr.name === name && attr.type === type) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -638,7 +623,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
* @returns all note's label values, including inherited ones
|
||||
*/
|
||||
getLabelValues(name: string): string[] {
|
||||
return this.getLabels(name).map(l => l.value);
|
||||
return this.getLabels(name).map((l) => l.value);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -654,7 +639,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
* @returns all note's label values, excluding inherited ones
|
||||
*/
|
||||
getOwnedLabelValues(name: string): string[] {
|
||||
return this.getOwnedAttributes(LABEL, name).map(l => l.value);
|
||||
return this.getOwnedAttributes(LABEL, name).map((l) => l.value);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -686,18 +671,14 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
this.__validateTypeName(type, name);
|
||||
|
||||
if (type && name && value !== undefined && value !== null) {
|
||||
return this.ownedAttributes.filter(attr => attr.name === name && attr.value === value && attr.type === type);
|
||||
}
|
||||
else if (type && name) {
|
||||
return this.ownedAttributes.filter(attr => attr.name === name && attr.type === type);
|
||||
}
|
||||
else if (type) {
|
||||
return this.ownedAttributes.filter(attr => attr.type === type);
|
||||
}
|
||||
else if (name) {
|
||||
return this.ownedAttributes.filter(attr => attr.name === name);
|
||||
}
|
||||
else {
|
||||
return this.ownedAttributes.filter((attr) => attr.name === name && attr.value === value && attr.type === type);
|
||||
} else if (type && name) {
|
||||
return this.ownedAttributes.filter((attr) => attr.name === name && attr.type === type);
|
||||
} else if (type) {
|
||||
return this.ownedAttributes.filter((attr) => attr.type === type);
|
||||
} else if (name) {
|
||||
return this.ownedAttributes.filter((attr) => attr.name === name);
|
||||
} else {
|
||||
return this.ownedAttributes;
|
||||
}
|
||||
}
|
||||
@@ -714,7 +695,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
get isArchived() {
|
||||
return this.hasAttribute('label', 'archived');
|
||||
return this.hasAttribute("label", "archived");
|
||||
}
|
||||
|
||||
areAllNotePathsArchived() {
|
||||
@@ -734,7 +715,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
hasInheritableArchivedLabel() {
|
||||
for (const attr of this.getAttributes()) {
|
||||
if (attr.name === 'archived' && attr.type === LABEL && attr.isInheritable) {
|
||||
if (attr.name === "archived" && attr.type === LABEL && attr.isInheritable) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -755,9 +736,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
});
|
||||
|
||||
this.parents = this.parentBranches
|
||||
.map(branch => branch.parentNote)
|
||||
.filter(note => !!note) as BNote[];
|
||||
this.parents = this.parentBranches.map((branch) => branch.parentNote).filter((note) => !!note) as BNote[];
|
||||
}
|
||||
|
||||
sortChildren() {
|
||||
@@ -771,7 +750,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
const aBranch = becca.getBranchFromChildAndParent(a.noteId, this.noteId);
|
||||
const bBranch = becca.getBranchFromChildAndParent(b.noteId, this.noteId);
|
||||
|
||||
return ((aBranch?.notePosition || 0) - (bBranch?.notePosition || 0)) || 0;
|
||||
return (aBranch?.notePosition || 0) - (bBranch?.notePosition || 0) || 0;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -796,13 +775,13 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
for (const attr of this.getAttributes()) {
|
||||
// it's best to use space as separator since spaces are filtered from the search string by the tokenization into words
|
||||
this.__flatTextCache += `${attr.type === 'label' ? '#' : '~'}${attr.name}`;
|
||||
this.__flatTextCache += `${attr.type === "label" ? "#" : "~"}${attr.name}`;
|
||||
|
||||
if (attr.value) {
|
||||
this.__flatTextCache += `=${attr.value}`;
|
||||
}
|
||||
|
||||
this.__flatTextCache += ' ';
|
||||
this.__flatTextCache += " ";
|
||||
}
|
||||
|
||||
this.__flatTextCache = utils.normalize(this.__flatTextCache);
|
||||
@@ -835,7 +814,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
for (const targetRelation of this.targetRelations) {
|
||||
if (targetRelation.name === 'template' || targetRelation.name === 'inherit') {
|
||||
if (targetRelation.name === "template" || targetRelation.name === "inherit") {
|
||||
const note = targetRelation.note;
|
||||
|
||||
if (note) {
|
||||
@@ -846,17 +825,15 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
getRelationDefinitions() {
|
||||
return this.getLabels()
|
||||
.filter(l => l.name.startsWith("relation:"));
|
||||
return this.getLabels().filter((l) => l.name.startsWith("relation:"));
|
||||
}
|
||||
|
||||
getLabelDefinitions() {
|
||||
return this.getLabels()
|
||||
.filter(l => l.name.startsWith("relation:"));
|
||||
return this.getLabels().filter((l) => l.name.startsWith("relation:"));
|
||||
}
|
||||
|
||||
isInherited() {
|
||||
return !!this.targetRelations.find(rel => rel.name === 'template' || rel.name === 'inherit');
|
||||
return !!this.targetRelations.find((rel) => rel.name === "template" || rel.name === "inherit");
|
||||
}
|
||||
|
||||
getSubtreeNotesIncludingTemplated(): BNote[] {
|
||||
@@ -864,7 +841,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
function inner(note: BNote) {
|
||||
// _hidden is not counted as subtree for the purpose of inheritance
|
||||
if (set.has(note) || note.noteId === '_hidden') {
|
||||
if (set.has(note) || note.noteId === "_hidden") {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -875,7 +852,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
for (const targetRelation of note.targetRelations) {
|
||||
if (targetRelation.name === 'template' || targetRelation.name === 'inherit') {
|
||||
if (targetRelation.name === "template" || targetRelation.name === "inherit") {
|
||||
const targetNote = targetRelation.note;
|
||||
|
||||
if (targetNote) {
|
||||
@@ -891,26 +868,23 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
getSearchResultNotes(): BNote[] {
|
||||
if (this.type !== 'search') {
|
||||
if (this.type !== "search") {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const result = searchService.searchFromNote(this);
|
||||
const becca = this.becca;
|
||||
return (result.searchResultNoteIds)
|
||||
.map(resultNoteId => becca.notes[resultNoteId])
|
||||
.filter(note => !!note);
|
||||
}
|
||||
catch (e: any) {
|
||||
return result.searchResultNoteIds.map((resultNoteId) => becca.notes[resultNoteId]).filter((note) => !!note);
|
||||
} catch (e: any) {
|
||||
log.error(`Could not resolve search note ${this.noteId}: ${e.message}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
getSubtree({ includeArchived = true, includeHidden = false, resolveSearch = false } = {}): {
|
||||
notes: BNote[],
|
||||
relationships: Relationship[]
|
||||
notes: BNote[];
|
||||
relationships: Relationship[];
|
||||
} {
|
||||
const noteSet = new Set<BNote>();
|
||||
const relationships: Relationship[] = []; // list of tuples parentNoteId -> childNoteId
|
||||
@@ -920,14 +894,13 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
for (const resultNote of searchNote.getSearchResultNotes()) {
|
||||
addSubtreeNotesInner(resultNote, searchNote);
|
||||
}
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
log.error(`Could not resolve search note ${searchNote?.noteId}: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function addSubtreeNotesInner(note: BNote, parentNote: BNote | null = null) {
|
||||
if (note.noteId === '_hidden' && !includeHidden) {
|
||||
if (note.noteId === "_hidden" && !includeHidden) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -949,12 +922,11 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
noteSet.add(note);
|
||||
|
||||
if (note.type === 'search') {
|
||||
if (note.type === "search") {
|
||||
if (resolveSearch) {
|
||||
resolveSearchNote(note);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
for (const childNote of note.children) {
|
||||
addSubtreeNotesInner(childNote, note);
|
||||
}
|
||||
@@ -971,9 +943,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
/** @returns includes the subtree root note as well */
|
||||
getSubtreeNoteIds({ includeArchived = true, includeHidden = false, resolveSearch = false } = {}) {
|
||||
return this.getSubtree({includeArchived, includeHidden, resolveSearch})
|
||||
.notes
|
||||
.map(note => note.noteId);
|
||||
return this.getSubtree({ includeArchived, includeHidden, resolveSearch }).notes.map((note) => note.noteId);
|
||||
}
|
||||
|
||||
/** @deprecated use getSubtreeNoteIds() instead */
|
||||
@@ -990,31 +960,31 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
get labelCount() {
|
||||
return this.getAttributes().filter(attr => attr.type === 'label').length;
|
||||
return this.getAttributes().filter((attr) => attr.type === "label").length;
|
||||
}
|
||||
|
||||
get ownedLabelCount() {
|
||||
return this.ownedAttributes.filter(attr => attr.type === 'label').length;
|
||||
return this.ownedAttributes.filter((attr) => attr.type === "label").length;
|
||||
}
|
||||
|
||||
get relationCount() {
|
||||
return this.getAttributes().filter(attr => attr.type === 'relation' && !attr.isAutoLink()).length;
|
||||
return this.getAttributes().filter((attr) => attr.type === "relation" && !attr.isAutoLink()).length;
|
||||
}
|
||||
|
||||
get relationCountIncludingLinks() {
|
||||
return this.getAttributes().filter(attr => attr.type === 'relation').length;
|
||||
return this.getAttributes().filter((attr) => attr.type === "relation").length;
|
||||
}
|
||||
|
||||
get ownedRelationCount() {
|
||||
return this.ownedAttributes.filter(attr => attr.type === 'relation' && !attr.isAutoLink()).length;
|
||||
return this.ownedAttributes.filter((attr) => attr.type === "relation" && !attr.isAutoLink()).length;
|
||||
}
|
||||
|
||||
get ownedRelationCountIncludingLinks() {
|
||||
return this.ownedAttributes.filter(attr => attr.type === 'relation').length;
|
||||
return this.ownedAttributes.filter((attr) => attr.type === "relation").length;
|
||||
}
|
||||
|
||||
get targetRelationCount() {
|
||||
return this.targetRelations.filter(attr => !attr.isAutoLink()).length;
|
||||
return this.targetRelations.filter((attr) => !attr.isAutoLink()).length;
|
||||
}
|
||||
|
||||
get targetRelationCountIncludingLinks() {
|
||||
@@ -1055,7 +1025,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
getAncestorNoteIds(): string[] {
|
||||
return this.getAncestors().map(note => note.noteId);
|
||||
return this.getAncestors().map((note) => note.noteId);
|
||||
}
|
||||
|
||||
hasAncestor(ancestorNoteId: string): boolean {
|
||||
@@ -1069,7 +1039,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
isInHiddenSubtree() {
|
||||
return this.noteId === '_hidden' || this.hasAncestor('_hidden');
|
||||
return this.noteId === "_hidden" || this.hasAncestor("_hidden");
|
||||
}
|
||||
|
||||
getTargetRelations() {
|
||||
@@ -1082,7 +1052,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
const arr: BNote[] = [this];
|
||||
|
||||
for (const targetRelation of this.targetRelations) {
|
||||
if (targetRelation.name === 'template' || targetRelation.name === 'inherit') {
|
||||
if (targetRelation.name === "template" || targetRelation.name === "inherit") {
|
||||
const note = targetRelation.note;
|
||||
|
||||
if (note) {
|
||||
@@ -1109,8 +1079,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
getRevisions(): BRevision[] {
|
||||
return sql.getRows<RevisionRow>("SELECT * FROM revisions WHERE noteId = ? ORDER BY revisions.utcDateCreated ASC", [this.noteId])
|
||||
.map(row => new BRevision(row));
|
||||
return sql.getRows<RevisionRow>("SELECT * FROM revisions WHERE noteId = ? ORDER BY revisions.utcDateCreated ASC", [this.noteId]).map((row) => new BRevision(row));
|
||||
}
|
||||
|
||||
getAttachments(opts: AttachmentOpts = {}) {
|
||||
@@ -1126,8 +1095,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
ORDER BY position`
|
||||
: `SELECT * FROM attachments WHERE ownerId = ? AND isDeleted = 0 ORDER BY position`;
|
||||
|
||||
return sql.getRows<AttachmentRow>(query, [this.noteId])
|
||||
.map(row => new BAttachment(row));
|
||||
return sql.getRows<AttachmentRow>(query, [this.noteId]).map((row) => new BAttachment(row));
|
||||
}
|
||||
|
||||
getAttachmentById(attachmentId: string, opts: AttachmentOpts = {}) {
|
||||
@@ -1140,24 +1108,27 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
WHERE ownerId = ? AND attachmentId = ? AND isDeleted = 0`
|
||||
: `SELECT * FROM attachments WHERE ownerId = ? AND attachmentId = ? AND isDeleted = 0`;
|
||||
|
||||
return sql.getRows<AttachmentRow>(query, [this.noteId, attachmentId])
|
||||
.map(row => new BAttachment(row))[0];
|
||||
return sql.getRows<AttachmentRow>(query, [this.noteId, attachmentId]).map((row) => new BAttachment(row))[0];
|
||||
}
|
||||
|
||||
getAttachmentsByRole(role: string): BAttachment[] {
|
||||
return sql.getRows<AttachmentRow>(`
|
||||
return sql
|
||||
.getRows<AttachmentRow>(
|
||||
`
|
||||
SELECT attachments.*
|
||||
FROM attachments
|
||||
WHERE ownerId = ?
|
||||
AND role = ?
|
||||
AND isDeleted = 0
|
||||
ORDER BY position`, [this.noteId, role])
|
||||
.map(row => new BAttachment(row));
|
||||
ORDER BY position`,
|
||||
[this.noteId, role]
|
||||
)
|
||||
.map((row) => new BAttachment(row));
|
||||
}
|
||||
|
||||
getAttachmentByTitle(title: string): BAttachment | undefined {
|
||||
// cannot use SQL to filter by title since it can be encrypted
|
||||
return this.getAttachments().filter(attachment => attachment.title === title)[0];
|
||||
return this.getAttachments().filter((attachment) => attachment.title === title)[0];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1166,15 +1137,16 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
* @returns array of notePaths (each represented by array of noteIds constituting the particular note path)
|
||||
*/
|
||||
getAllNotePaths(): string[][] {
|
||||
if (this.noteId === 'root') {
|
||||
return [['root']];
|
||||
if (this.noteId === "root") {
|
||||
return [["root"]];
|
||||
}
|
||||
|
||||
const parentNotes = this.getParentNotes();
|
||||
|
||||
const notePaths = parentNotes.length === 1
|
||||
const notePaths =
|
||||
parentNotes.length === 1
|
||||
? parentNotes[0].getAllNotePaths() // optimization for the most common case
|
||||
: parentNotes.flatMap(parentNote => parentNote.getAllNotePaths());
|
||||
: parentNotes.flatMap((parentNote) => parentNote.getAllNotePaths());
|
||||
|
||||
for (const notePath of notePaths) {
|
||||
notePath.push(this.noteId);
|
||||
@@ -1183,14 +1155,14 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
return notePaths;
|
||||
}
|
||||
|
||||
getSortedNotePathRecords(hoistedNoteId: string = 'root'): NotePathRecord[] {
|
||||
const isHoistedRoot = hoistedNoteId === 'root';
|
||||
getSortedNotePathRecords(hoistedNoteId: string = "root"): NotePathRecord[] {
|
||||
const isHoistedRoot = hoistedNoteId === "root";
|
||||
|
||||
const notePaths = this.getAllNotePaths().map(path => ({
|
||||
const notePaths = this.getAllNotePaths().map((path) => ({
|
||||
notePath: path,
|
||||
isInHoistedSubTree: isHoistedRoot || path.includes(hoistedNoteId),
|
||||
isArchived: path.some(noteId => this.becca.notes[noteId].isArchived),
|
||||
isHidden: path.includes('_hidden')
|
||||
isArchived: path.some((noteId) => this.becca.notes[noteId].isArchived),
|
||||
isHidden: path.includes("_hidden")
|
||||
}));
|
||||
|
||||
notePaths.sort((a, b) => {
|
||||
@@ -1213,7 +1185,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
*
|
||||
* @return array of noteIds constituting the particular note path
|
||||
*/
|
||||
getBestNotePath(hoistedNoteId: string = 'root'): string[] {
|
||||
getBestNotePath(hoistedNoteId: string = "root"): string[] {
|
||||
return this.getSortedNotePathRecords(hoistedNoteId)[0]?.notePath;
|
||||
}
|
||||
|
||||
@@ -1222,7 +1194,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
*
|
||||
* @return serialized note path (e.g. 'root/a1h315/js725h')
|
||||
*/
|
||||
getBestNotePathString(hoistedNoteId: string = 'root'): string {
|
||||
getBestNotePathString(hoistedNoteId: string = "root"): string {
|
||||
const notePath = this.getBestNotePath(hoistedNoteId);
|
||||
|
||||
return notePath?.join("/");
|
||||
@@ -1232,14 +1204,14 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
* @return boolean - true if there's no non-hidden path, note is not cloned to the visible tree
|
||||
*/
|
||||
isHiddenCompletely() {
|
||||
if (this.noteId === 'root') {
|
||||
if (this.noteId === "root") {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const parentNote of this.parents) {
|
||||
if (parentNote.noteId === 'root') {
|
||||
if (parentNote.noteId === "root") {
|
||||
return false;
|
||||
} else if (parentNote.noteId === '_hidden') {
|
||||
} else if (parentNote.noteId === "_hidden") {
|
||||
continue;
|
||||
} else if (!parentNote.isHiddenCompletely()) {
|
||||
return false;
|
||||
@@ -1255,7 +1227,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
isDescendantOfNote(ancestorNoteId: string): boolean {
|
||||
const notePaths = this.getAllNotePaths();
|
||||
|
||||
return notePaths.some(path => path.includes(ancestorNoteId));
|
||||
return notePaths.some((path) => path.includes(ancestorNoteId));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1267,7 +1239,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
*/
|
||||
setAttribute(type: AttributeType, name: string, value?: string) {
|
||||
const attributes = this.getOwnedAttributes();
|
||||
const attr = attributes.find(attr => attr.type === type && attr.name === name);
|
||||
const attr = attributes.find((attr) => attr.type === type && attr.name === name);
|
||||
|
||||
value = value?.toString() || "";
|
||||
|
||||
@@ -1276,8 +1248,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
attr.value = value;
|
||||
attr.save();
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
new BAttribute({
|
||||
noteId: this.noteId,
|
||||
type: type,
|
||||
@@ -1354,8 +1325,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
toggleAttribute(type: AttributeType, enabled: boolean, name: string, value?: string) {
|
||||
if (enabled) {
|
||||
this.setAttribute(type, name, value);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
this.removeAttribute(type, name, value);
|
||||
}
|
||||
}
|
||||
@@ -1443,11 +1413,11 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
isEligibleForConversionToAttachment(opts: ConvertOpts = { autoConversion: false }) {
|
||||
if (this.type !== 'image' || !this.isContentAvailable() || this.hasChildren() || this.getParentBranches().length !== 1) {
|
||||
if (this.type !== "image" || !this.isContentAvailable() || this.hasChildren() || this.getParentBranches().length !== 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const targetRelations = this.getTargetRelations().filter(relation => relation.name === 'imageLink');
|
||||
const targetRelations = this.getTargetRelations().filter((relation) => relation.name === "imageLink");
|
||||
|
||||
if (opts.autoConversion && targetRelations.length === 0) {
|
||||
return false;
|
||||
@@ -1460,7 +1430,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
if (referencingNote && parentNote !== referencingNote) {
|
||||
return false;
|
||||
} else if (parentNote.type !== 'text' || !parentNote.isContentAvailable()) {
|
||||
} else if (parentNote.type !== "text" || !parentNote.isContentAvailable()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1491,7 +1461,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
|
||||
const parentNote = this.getParentNotes()[0];
|
||||
const attachment = parentNote.saveAttachment({
|
||||
role: 'image',
|
||||
role: "image",
|
||||
mime: this.mime,
|
||||
title: this.title,
|
||||
content: content
|
||||
@@ -1532,11 +1502,11 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
|
||||
if (!taskContext) {
|
||||
taskContext = new TaskContext('no-progress-reporting');
|
||||
taskContext = new TaskContext("no-progress-reporting");
|
||||
}
|
||||
|
||||
// needs to be run before branches and attributes are deleted and thus attached relations disappear
|
||||
handlers.runAttachedRelations(this, 'runOnNoteDeletion', this);
|
||||
handlers.runAttachedRelations(this, "runOnNoteDeletion", this);
|
||||
taskContext.noteDeletionHandlerTriggered = true;
|
||||
|
||||
for (const branch of this.getParentBranches()) {
|
||||
@@ -1551,15 +1521,14 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
this.__flatTextCache = null;
|
||||
|
||||
this.isDecrypted = true;
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
log.error(`Could not decrypt note ${this.noteId}: ${e.message} ${e.stack}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
isLaunchBarConfig() {
|
||||
return this.type === 'launcher' || ['_lbRoot', '_lbAvailableLaunchers', '_lbVisibleLaunchers'].includes(this.noteId);
|
||||
return this.type === "launcher" || ["_lbRoot", "_lbAvailableLaunchers", "_lbVisibleLaunchers"].includes(this.noteId);
|
||||
}
|
||||
|
||||
isOptions() {
|
||||
@@ -1576,7 +1545,8 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
return sql.transactional(() => {
|
||||
let noteContent = this.getContent();
|
||||
|
||||
const revision = new BRevision({
|
||||
const revision = new BRevision(
|
||||
{
|
||||
noteId: this.noteId,
|
||||
// title and text should be decrypted now
|
||||
title: this.title,
|
||||
@@ -1588,7 +1558,9 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
utcDateModified: dateUtils.utcNowDateTime(),
|
||||
dateLastEdited: this.dateModified,
|
||||
dateCreated: dateUtils.localNowDateTime()
|
||||
}, true);
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
revision.save(); // to generate revisionId, which is then used to save attachments
|
||||
|
||||
@@ -1602,19 +1574,20 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
revisionAttachment.ownerId = revision.revisionId;
|
||||
revisionAttachment.setContent(noteAttachment.getContent(), { forceSave: true });
|
||||
|
||||
if (this.type === 'text' && typeof noteContent === "string") {
|
||||
if (this.type === "text" && typeof noteContent === "string") {
|
||||
// content is rewritten to point to the revision attachments
|
||||
noteContent = noteContent.replaceAll(`attachments/${noteAttachment.attachmentId}`,
|
||||
`attachments/${revisionAttachment.attachmentId}`);
|
||||
noteContent = noteContent.replaceAll(`attachments/${noteAttachment.attachmentId}`, `attachments/${revisionAttachment.attachmentId}`);
|
||||
|
||||
noteContent = noteContent.replaceAll(new RegExp(`href="[^"]*attachmentId=${noteAttachment.attachmentId}[^"]*"`, 'gi'),
|
||||
`href="api/attachments/${revisionAttachment.attachmentId}/download"`);
|
||||
noteContent = noteContent.replaceAll(
|
||||
new RegExp(`href="[^"]*attachmentId=${noteAttachment.attachmentId}[^"]*"`, "gi"),
|
||||
`href="api/attachments/${revisionAttachment.attachmentId}/download"`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
revision.setContent(noteContent);
|
||||
|
||||
this.eraseExcessRevisionSnapshots()
|
||||
this.eraseExcessRevisionSnapshots();
|
||||
return revision;
|
||||
});
|
||||
}
|
||||
@@ -1625,14 +1598,14 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
// lable has a higher priority
|
||||
let revisionSnapshotNumberLimit = parseInt(this.getLabelValue("versioningLimit") ?? "");
|
||||
if (!Number.isInteger(revisionSnapshotNumberLimit)) {
|
||||
revisionSnapshotNumberLimit = parseInt(optionService.getOption('revisionSnapshotNumberLimit'));
|
||||
revisionSnapshotNumberLimit = parseInt(optionService.getOption("revisionSnapshotNumberLimit"));
|
||||
}
|
||||
if (revisionSnapshotNumberLimit >= 0) {
|
||||
const revisions = this.getRevisions();
|
||||
if (revisions.length - revisionSnapshotNumberLimit > 0) {
|
||||
const revisionIds = revisions
|
||||
.slice(0, revisions.length - revisionSnapshotNumberLimit)
|
||||
.map(revision => revision.revisionId)
|
||||
.map((revision) => revision.revisionId)
|
||||
.filter((id): id is string => id !== undefined);
|
||||
eraseService.eraseRevisions(revisionIds);
|
||||
}
|
||||
@@ -1643,20 +1616,22 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
* @param matchBy - choose by which property we detect if to update an existing attachment.
|
||||
* Supported values are either 'attachmentId' (default) or 'title'
|
||||
*/
|
||||
saveAttachment({attachmentId, role, mime, title, content, position}: AttachmentRow, matchBy = 'attachmentId') {
|
||||
if (!['attachmentId', 'title'].includes(matchBy)) {
|
||||
saveAttachment({ attachmentId, role, mime, title, content, position }: AttachmentRow, matchBy = "attachmentId") {
|
||||
if (!["attachmentId", "title"].includes(matchBy)) {
|
||||
throw new Error(`Unsupported value '${matchBy}' for matchBy param, has to be either 'attachmentId' or 'title'.`);
|
||||
}
|
||||
|
||||
let attachment;
|
||||
|
||||
if (matchBy === 'title' && title) {
|
||||
if (matchBy === "title" && title) {
|
||||
attachment = this.getAttachmentByTitle(title);
|
||||
} else if (matchBy === 'attachmentId' && attachmentId) {
|
||||
} else if (matchBy === "attachmentId" && attachmentId) {
|
||||
attachment = this.becca.getAttachmentOrThrow(attachmentId);
|
||||
}
|
||||
|
||||
attachment = attachment || new BAttachment({
|
||||
attachment =
|
||||
attachment ||
|
||||
new BAttachment({
|
||||
ownerId: this.noteId,
|
||||
title,
|
||||
role,
|
||||
@@ -1706,8 +1681,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
if (pojo.isProtected) {
|
||||
if (this.isDecrypted && pojo.title) {
|
||||
pojo.title = protectedSessionService.encrypt(pojo.title) || undefined;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// updating protected note outside of protected session means we will keep original ciphertexts
|
||||
delete pojo.title;
|
||||
}
|
||||
|
||||
@@ -2,15 +2,21 @@
|
||||
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
import { OptionRow } from './rows.js';
|
||||
import type { OptionRow } from "./rows.js";
|
||||
|
||||
/**
|
||||
* Option represents a name-value pair, either directly configurable by the user or some system property.
|
||||
*/
|
||||
class BOption extends AbstractBeccaEntity<BOption> {
|
||||
static get entityName() { return "options"; }
|
||||
static get primaryKeyName() { return "name"; }
|
||||
static get hashedProperties() { return ["name", "value"]; }
|
||||
static get entityName() {
|
||||
return "options";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "name";
|
||||
}
|
||||
static get hashedProperties() {
|
||||
return ["name", "value"];
|
||||
}
|
||||
|
||||
name!: string;
|
||||
value!: string;
|
||||
@@ -43,7 +49,7 @@ class BOption extends AbstractBeccaEntity<BOption> {
|
||||
value: this.value,
|
||||
isSynced: this.isSynced,
|
||||
utcDateModified: this.utcDateModified
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
import { RecentNoteRow } from "./rows.js";
|
||||
import type { RecentNoteRow } from "./rows.js";
|
||||
|
||||
import dateUtils from "../../services/date_utils.js";
|
||||
import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
@@ -9,9 +9,15 @@ import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
* RecentNote represents recently visited note.
|
||||
*/
|
||||
class BRecentNote extends AbstractBeccaEntity<BRecentNote> {
|
||||
static get entityName() { return "recent_notes"; }
|
||||
static get primaryKeyName() { return "noteId"; }
|
||||
static get hashedProperties() { return ["noteId", "notePath"]; }
|
||||
static get entityName() {
|
||||
return "recent_notes";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "noteId";
|
||||
}
|
||||
static get hashedProperties() {
|
||||
return ["noteId", "notePath"];
|
||||
}
|
||||
|
||||
noteId!: string;
|
||||
notePath!: string;
|
||||
@@ -33,7 +39,7 @@ class BRecentNote extends AbstractBeccaEntity<BRecentNote> {
|
||||
noteId: this.noteId,
|
||||
notePath: this.notePath,
|
||||
utcDateCreated: this.utcDateCreated
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import becca from "../becca.js";
|
||||
import AbstractBeccaEntity from "./abstract_becca_entity.js";
|
||||
import sql from "../../services/sql.js";
|
||||
import BAttachment from "./battachment.js";
|
||||
import { AttachmentRow, RevisionRow } from './rows.js';
|
||||
import type { AttachmentRow, RevisionRow } from "./rows.js";
|
||||
import eraseService from "../../services/erase.js";
|
||||
|
||||
interface ContentOpts {
|
||||
@@ -24,10 +24,15 @@ interface GetByIdOpts {
|
||||
* It's used for seamless note versioning.
|
||||
*/
|
||||
class BRevision extends AbstractBeccaEntity<BRevision> {
|
||||
static get entityName() { return "revisions"; }
|
||||
static get primaryKeyName() { return "revisionId"; }
|
||||
static get hashedProperties() { return ["revisionId", "noteId", "title", "isProtected", "dateLastEdited", "dateCreated",
|
||||
"utcDateLastEdited", "utcDateCreated", "utcDateModified", "blobId"]; }
|
||||
static get entityName() {
|
||||
return "revisions";
|
||||
}
|
||||
static get primaryKeyName() {
|
||||
return "revisionId";
|
||||
}
|
||||
static get hashedProperties() {
|
||||
return ["revisionId", "noteId", "title", "isProtected", "dateLastEdited", "dateCreated", "utcDateLastEdited", "utcDateCreated", "utcDateModified", "blobId"];
|
||||
}
|
||||
|
||||
revisionId?: string;
|
||||
noteId!: string;
|
||||
@@ -75,9 +80,11 @@ class BRevision extends AbstractBeccaEntity<BRevision> {
|
||||
}
|
||||
|
||||
isContentAvailable() {
|
||||
return !this.revisionId // new note which was not encrypted yet
|
||||
|| !this.isProtected
|
||||
|| protectedSessionService.isProtectedSessionAvailable()
|
||||
return (
|
||||
!this.revisionId || // new note which was not encrypted yet
|
||||
!this.isProtected ||
|
||||
protectedSessionService.isProtectedSessionAvailable()
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -108,8 +115,7 @@ class BRevision extends AbstractBeccaEntity<BRevision> {
|
||||
getJsonContentSafely(): {} | null {
|
||||
try {
|
||||
return this.getJsonContent();
|
||||
}
|
||||
catch (e) {
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -119,12 +125,16 @@ class BRevision extends AbstractBeccaEntity<BRevision> {
|
||||
}
|
||||
|
||||
getAttachments(): BAttachment[] {
|
||||
return sql.getRows<AttachmentRow>(`
|
||||
return sql
|
||||
.getRows<AttachmentRow>(
|
||||
`
|
||||
SELECT attachments.*
|
||||
FROM attachments
|
||||
WHERE ownerId = ?
|
||||
AND isDeleted = 0`, [this.revisionId])
|
||||
.map(row => new BAttachment(row));
|
||||
AND isDeleted = 0`,
|
||||
[this.revisionId]
|
||||
)
|
||||
.map((row) => new BAttachment(row));
|
||||
}
|
||||
|
||||
getAttachmentById(attachmentId: String, opts: GetByIdOpts = {}): BAttachment | null {
|
||||
@@ -137,24 +147,27 @@ class BRevision extends AbstractBeccaEntity<BRevision> {
|
||||
WHERE ownerId = ? AND attachmentId = ? AND isDeleted = 0`
|
||||
: `SELECT * FROM attachments WHERE ownerId = ? AND attachmentId = ? AND isDeleted = 0`;
|
||||
|
||||
return sql.getRows<AttachmentRow>(query, [this.revisionId, attachmentId])
|
||||
.map(row => new BAttachment(row))[0];
|
||||
return sql.getRows<AttachmentRow>(query, [this.revisionId, attachmentId]).map((row) => new BAttachment(row))[0];
|
||||
}
|
||||
|
||||
getAttachmentsByRole(role: string): BAttachment[] {
|
||||
return sql.getRows<AttachmentRow>(`
|
||||
return sql
|
||||
.getRows<AttachmentRow>(
|
||||
`
|
||||
SELECT attachments.*
|
||||
FROM attachments
|
||||
WHERE ownerId = ?
|
||||
AND role = ?
|
||||
AND isDeleted = 0
|
||||
ORDER BY position`, [this.revisionId, role])
|
||||
.map(row => new BAttachment(row));
|
||||
ORDER BY position`,
|
||||
[this.revisionId, role]
|
||||
)
|
||||
.map((row) => new BAttachment(row));
|
||||
}
|
||||
|
||||
getAttachmentByTitle(title: string): BAttachment {
|
||||
// cannot use SQL to filter by title since it can be encrypted
|
||||
return this.getAttachments().filter(attachment => attachment.title === title)[0];
|
||||
return this.getAttachments().filter((attachment) => attachment.title === title)[0];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -199,8 +212,7 @@ class BRevision extends AbstractBeccaEntity<BRevision> {
|
||||
if (pojo.isProtected) {
|
||||
if (protectedSessionService.isProtectedSessionAvailable()) {
|
||||
pojo.title = protectedSessionService.encrypt(this.title) || undefined;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// updating protected note outside of protected session means we will keep original ciphertexts
|
||||
delete pojo.title;
|
||||
}
|
||||
|
||||
@@ -100,8 +100,25 @@ export interface BranchRow {
|
||||
* end user. Those types should be used only for checking against, they are
|
||||
* not for direct use.
|
||||
*/
|
||||
export const ALLOWED_NOTE_TYPES = [ "file", "image", "search", "noteMap", "launcher", "doc", "contentWidget", "text", "relationMap", "render", "canvas", "mermaid", "book", "webView", "code", "mindMap" ] as const;
|
||||
export type NoteType = typeof ALLOWED_NOTE_TYPES[number];
|
||||
export const ALLOWED_NOTE_TYPES = [
|
||||
"file",
|
||||
"image",
|
||||
"search",
|
||||
"noteMap",
|
||||
"launcher",
|
||||
"doc",
|
||||
"contentWidget",
|
||||
"text",
|
||||
"relationMap",
|
||||
"render",
|
||||
"canvas",
|
||||
"mermaid",
|
||||
"book",
|
||||
"webView",
|
||||
"code",
|
||||
"mindMap"
|
||||
] as const;
|
||||
export type NoteType = (typeof ALLOWED_NOTE_TYPES)[number];
|
||||
|
||||
export interface NoteRow {
|
||||
noteId: string;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ConstructorData } from './becca-interface.js';
|
||||
import type { ConstructorData } from "./becca-interface.js";
|
||||
import AbstractBeccaEntity from "./entities/abstract_becca_entity.js";
|
||||
import BAttachment from "./entities/battachment.js";
|
||||
import BAttribute from "./entities/battribute.js";
|
||||
@@ -13,15 +13,15 @@ import BRevision from "./entities/brevision.js";
|
||||
type EntityClass = new (row?: any) => AbstractBeccaEntity<any>;
|
||||
|
||||
const ENTITY_NAME_TO_ENTITY: Record<string, ConstructorData<any> & EntityClass> = {
|
||||
"attachments": BAttachment,
|
||||
"attributes": BAttribute,
|
||||
"blobs": BBlob,
|
||||
"branches": BBranch,
|
||||
"etapi_tokens": BEtapiToken,
|
||||
"notes": BNote,
|
||||
"options": BOption,
|
||||
"recent_notes": BRecentNote,
|
||||
"revisions": BRevision
|
||||
attachments: BAttachment,
|
||||
attributes: BAttribute,
|
||||
blobs: BBlob,
|
||||
branches: BBranch,
|
||||
etapi_tokens: BEtapiToken,
|
||||
notes: BNote,
|
||||
options: BOption,
|
||||
recent_notes: BRecentNote,
|
||||
revisions: BRevision
|
||||
};
|
||||
|
||||
function getEntityFromEntityName(entityName: keyof typeof ENTITY_NAME_TO_ENTITY) {
|
||||
|
||||
@@ -7,11 +7,7 @@ import BNote from "./entities/bnote.js";
|
||||
|
||||
const DEBUG = false;
|
||||
|
||||
const IGNORED_ATTRS = [
|
||||
"datenote",
|
||||
"monthnote",
|
||||
"yearnote"
|
||||
];
|
||||
const IGNORED_ATTRS = ["datenote", "monthnote", "yearnote"];
|
||||
|
||||
const IGNORED_ATTR_NAMES = [
|
||||
"includenotelink",
|
||||
@@ -30,7 +26,7 @@ const IGNORED_ATTR_NAMES = [
|
||||
"similarnoteswidgetdisabled",
|
||||
"disableinclusion",
|
||||
"rendernote",
|
||||
"pageurl",
|
||||
"pageurl"
|
||||
];
|
||||
|
||||
interface DateLimits {
|
||||
@@ -42,9 +38,9 @@ interface DateLimits {
|
||||
|
||||
function filterUrlValue(value: string) {
|
||||
return value
|
||||
.replace(/https?:\/\//ig, "")
|
||||
.replace(/www.js\./ig, "")
|
||||
.replace(/(\.net|\.com|\.org|\.info|\.edu)/ig, "");
|
||||
.replace(/https?:\/\//gi, "")
|
||||
.replace(/www.js\./gi, "")
|
||||
.replace(/(\.net|\.com|\.org|\.info|\.edu)/gi, "");
|
||||
}
|
||||
|
||||
function buildRewardMap(note: BNote) {
|
||||
@@ -61,8 +57,7 @@ function buildRewardMap(note: BNote) {
|
||||
const currentReward = map.get(word) || 0;
|
||||
|
||||
// reward grows with the length of matched string
|
||||
const length = word.length
|
||||
- 0.9; // to penalize specifically very short words - 1 and 2 characters
|
||||
const length = word.length - 0.9; // to penalize specifically very short words - 1 and 2 characters
|
||||
|
||||
map.set(word, currentReward + rewardFactor * Math.pow(length, 0.7));
|
||||
}
|
||||
@@ -70,7 +65,7 @@ function buildRewardMap(note: BNote) {
|
||||
}
|
||||
|
||||
for (const ancestorNote of note.getAncestors()) {
|
||||
if (ancestorNote.noteId === 'root') {
|
||||
if (ancestorNote.noteId === "root") {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -94,9 +89,7 @@ function buildRewardMap(note: BNote) {
|
||||
}
|
||||
|
||||
for (const attr of note.getAttributes()) {
|
||||
if (attr.name.startsWith('child:')
|
||||
|| attr.name.startsWith('relation:')
|
||||
|| attr.name.startsWith('label:')) {
|
||||
if (attr.name.startsWith("child:") || attr.name.startsWith("relation:") || attr.name.startsWith("label:")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -111,13 +104,13 @@ function buildRewardMap(note: BNote) {
|
||||
addToRewardMap(attr.name, reward);
|
||||
}
|
||||
|
||||
if (attr.name === 'cliptype') {
|
||||
if (attr.name === "cliptype") {
|
||||
reward /= 2;
|
||||
}
|
||||
|
||||
let value = attr.value;
|
||||
|
||||
if (value.startsWith('http')) {
|
||||
if (value.startsWith("http")) {
|
||||
value = filterUrlValue(value);
|
||||
|
||||
// words in URLs are not that valuable
|
||||
@@ -127,7 +120,7 @@ function buildRewardMap(note: BNote) {
|
||||
addToRewardMap(value, reward);
|
||||
}
|
||||
|
||||
if (note.type === 'text' && note.isDecrypted) {
|
||||
if (note.type === "text" && note.isDecrypted) {
|
||||
const content = note.getContent();
|
||||
const dom = new JSDOM(content);
|
||||
|
||||
@@ -135,7 +128,7 @@ function buildRewardMap(note: BNote) {
|
||||
for (const el of dom.window.document.querySelectorAll(elName)) {
|
||||
addToRewardMap(el.textContent, rewardFactor);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// the title is the top with weight 1 so smaller headings will have lower weight
|
||||
|
||||
@@ -154,12 +147,12 @@ function buildRewardMap(note: BNote) {
|
||||
const mimeCache: Record<string, string> = {};
|
||||
|
||||
function trimMime(mime: string) {
|
||||
if (!mime || mime === 'text/html') {
|
||||
if (!mime || mime === "text/html") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(mime in mimeCache)) {
|
||||
const chunks = mime.split('/');
|
||||
const chunks = mime.split("/");
|
||||
|
||||
let str = "";
|
||||
|
||||
@@ -167,7 +160,7 @@ function trimMime(mime: string) {
|
||||
// we're not interested in 'text/' or 'application/' prefix
|
||||
str = chunks[1];
|
||||
|
||||
if (str.startsWith('-x')) {
|
||||
if (str.startsWith("-x")) {
|
||||
str = str.substr(2);
|
||||
}
|
||||
}
|
||||
@@ -185,7 +178,7 @@ function buildDateLimits(baseNote: BNote): DateLimits {
|
||||
minDate: dateUtils.utcDateTimeStr(new Date(dateCreatedTs - 3600 * 1000)),
|
||||
minExcludedDate: dateUtils.utcDateTimeStr(new Date(dateCreatedTs - 5 * 1000)),
|
||||
maxExcludedDate: dateUtils.utcDateTimeStr(new Date(dateCreatedTs + 5 * 1000)),
|
||||
maxDate: dateUtils.utcDateTimeStr(new Date(dateCreatedTs + 3600 * 1000)),
|
||||
maxDate: dateUtils.utcDateTimeStr(new Date(dateCreatedTs + 3600 * 1000))
|
||||
};
|
||||
}
|
||||
|
||||
@@ -193,9 +186,34 @@ function buildDateLimits(baseNote: BNote): DateLimits {
|
||||
const wordCache = new Map();
|
||||
|
||||
const WORD_BLACKLIST = [
|
||||
"a", "the", "in", "for", "from", "but", "s", "so", "if", "while", "until",
|
||||
"whether", "after", "before", "because", "since", "when", "where", "how",
|
||||
"than", "then", "and", "either", "or", "neither", "nor", "both", "also"
|
||||
"a",
|
||||
"the",
|
||||
"in",
|
||||
"for",
|
||||
"from",
|
||||
"but",
|
||||
"s",
|
||||
"so",
|
||||
"if",
|
||||
"while",
|
||||
"until",
|
||||
"whether",
|
||||
"after",
|
||||
"before",
|
||||
"because",
|
||||
"since",
|
||||
"when",
|
||||
"where",
|
||||
"how",
|
||||
"than",
|
||||
"then",
|
||||
"and",
|
||||
"either",
|
||||
"or",
|
||||
"neither",
|
||||
"nor",
|
||||
"both",
|
||||
"also"
|
||||
];
|
||||
|
||||
function splitToWords(text: string) {
|
||||
@@ -212,8 +230,7 @@ function splitToWords(text: string) {
|
||||
// special case for english plurals
|
||||
else if (words[idx].length > 2 && words[idx].endsWith("es")) {
|
||||
words[idx] = words[idx].substr(0, words[idx] - 2);
|
||||
}
|
||||
else if (words[idx].length > 1 && words[idx].endsWith("s")) {
|
||||
} else if (words[idx].length > 1 && words[idx].endsWith("s")) {
|
||||
words[idx] = words[idx].substr(0, words[idx] - 1);
|
||||
}
|
||||
}
|
||||
@@ -227,9 +244,7 @@ function splitToWords(text: string) {
|
||||
* that it doesn't actually need to be shown to the user.
|
||||
*/
|
||||
function hasConnectingRelation(sourceNote: BNote, targetNote: BNote) {
|
||||
return sourceNote.getAttributes().find(attr => attr.type === 'relation'
|
||||
&& ['includenotelink', 'imagelink'].includes(attr.name)
|
||||
&& attr.value === targetNote.noteId);
|
||||
return sourceNote.getAttributes().find((attr) => attr.type === "relation" && ["includenotelink", "imagelink"].includes(attr.name) && attr.value === targetNote.noteId);
|
||||
}
|
||||
|
||||
async function findSimilarNotes(noteId: string) {
|
||||
@@ -246,14 +261,13 @@ async function findSimilarNotes(noteId: string) {
|
||||
|
||||
try {
|
||||
dateLimits = buildDateLimits(baseNote);
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
throw new Error(`Date limits failed with ${e.message}, entity: ${JSON.stringify(baseNote.getPojo())}`);
|
||||
}
|
||||
|
||||
const rewardMap = buildRewardMap(baseNote);
|
||||
let ancestorRewardCache: Record<string, number> = {};
|
||||
const ancestorNoteIds = new Set(baseNote.getAncestors().map(note => note.noteId));
|
||||
const ancestorNoteIds = new Set(baseNote.getAncestors().map((note) => note.noteId));
|
||||
ancestorNoteIds.add(baseNote.noteId);
|
||||
|
||||
let displayRewards = false;
|
||||
@@ -270,7 +284,7 @@ async function findSimilarNotes(noteId: string) {
|
||||
const lengthPenalization = 1 / Math.pow(text.length, 0.3);
|
||||
|
||||
for (const word of splitToWords(text)) {
|
||||
const reward = (rewardMap.get(word) * factor * lengthPenalization) || 0;
|
||||
const reward = rewardMap.get(word) * factor * lengthPenalization || 0;
|
||||
|
||||
if (displayRewards && reward > 0) {
|
||||
console.log(`Reward ${Math.round(reward * 10) / 10} for word: ${word}`);
|
||||
@@ -294,7 +308,6 @@ async function findSimilarNotes(noteId: string) {
|
||||
|
||||
for (const parentNote of note.parents) {
|
||||
if (!ancestorNoteIds.has(parentNote.noteId)) {
|
||||
|
||||
if (displayRewards) {
|
||||
console.log("Considering", parentNote.title);
|
||||
}
|
||||
@@ -304,8 +317,7 @@ async function findSimilarNotes(noteId: string) {
|
||||
}
|
||||
|
||||
for (const branch of parentNote.getParentBranches()) {
|
||||
score += gatherRewards(branch.prefix, 0.3)
|
||||
+ gatherAncestorRewards(branch.parentNote);
|
||||
score += gatherRewards(branch.prefix, 0.3) + gatherAncestorRewards(branch.parentNote);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -317,8 +329,7 @@ async function findSimilarNotes(noteId: string) {
|
||||
}
|
||||
|
||||
function computeScore(candidateNote: BNote) {
|
||||
let score = gatherRewards(trimMime(candidateNote.mime))
|
||||
+ gatherAncestorRewards(candidateNote);
|
||||
let score = gatherRewards(trimMime(candidateNote.mime)) + gatherAncestorRewards(candidateNote);
|
||||
|
||||
if (candidateNote.isDecrypted) {
|
||||
score += gatherRewards(candidateNote.title);
|
||||
@@ -329,9 +340,7 @@ async function findSimilarNotes(noteId: string) {
|
||||
}
|
||||
|
||||
for (const attr of candidateNote.getAttributes()) {
|
||||
if (attr.name.startsWith('child:')
|
||||
|| attr.name.startsWith('relation:')
|
||||
|| attr.name.startsWith('label:')) {
|
||||
if (attr.name.startsWith("child:") || attr.name.startsWith("relation:") || attr.name.startsWith("label:")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -349,8 +358,7 @@ async function findSimilarNotes(noteId: string) {
|
||||
if (!value.startsWith) {
|
||||
log.info(`Unexpected falsy value for attribute ${JSON.stringify(attr.getPojo())}`);
|
||||
continue;
|
||||
}
|
||||
else if (value.startsWith('http')) {
|
||||
} else if (value.startsWith("http")) {
|
||||
value = filterUrlValue(value);
|
||||
|
||||
// words in URLs are not that valuable
|
||||
@@ -384,9 +392,7 @@ async function findSimilarNotes(noteId: string) {
|
||||
}
|
||||
|
||||
score += 1;
|
||||
}
|
||||
else if (utcDateCreated.substr(0, 10) === dateLimits.minDate.substr(0, 10)
|
||||
|| utcDateCreated.substr(0, 10) === dateLimits.maxDate.substr(0, 10)) {
|
||||
} else if (utcDateCreated.substr(0, 10) === dateLimits.minDate.substr(0, 10) || utcDateCreated.substr(0, 10) === dateLimits.maxDate.substr(0, 10)) {
|
||||
if (displayRewards) {
|
||||
console.log("Adding reward for same day of creation");
|
||||
}
|
||||
@@ -400,9 +406,7 @@ async function findSimilarNotes(noteId: string) {
|
||||
}
|
||||
|
||||
for (const candidateNote of Object.values(becca.notes)) {
|
||||
if (candidateNote.noteId === baseNote.noteId
|
||||
|| hasConnectingRelation(candidateNote, baseNote)
|
||||
|| hasConnectingRelation(baseNote, candidateNote)) {
|
||||
if (candidateNote.noteId === baseNote.noteId || hasConnectingRelation(candidateNote, baseNote) || hasConnectingRelation(baseNote, candidateNote)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -430,7 +434,7 @@ async function findSimilarNotes(noteId: string) {
|
||||
}
|
||||
}
|
||||
|
||||
results.sort((a, b) => a.score > b.score ? -1 : 1);
|
||||
results.sort((a, b) => (a.score > b.score ? -1 : 1));
|
||||
|
||||
if (DEBUG) {
|
||||
console.log("REWARD MAP", rewardMap);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { Router } from 'express';
|
||||
import { Router } from "express";
|
||||
import appInfo from "../services/app_info.js";
|
||||
import eu from "./etapi_utils.js";
|
||||
|
||||
function register(router: Router) {
|
||||
eu.route(router, 'get', '/etapi/app-info', (req, res, next) => {
|
||||
eu.route(router, "get", "/etapi/app-info", (req, res, next) => {
|
||||
res.status(200).json(appInfo);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3,21 +3,21 @@ import eu from "./etapi_utils.js";
|
||||
import mappers from "./mappers.js";
|
||||
import v from "./validators.js";
|
||||
import utils from "../services/utils.js";
|
||||
import { Router } from 'express';
|
||||
import { AttachmentRow } from '../becca/entities/rows.js';
|
||||
import { ValidatorMap } from './etapi-interface.js';
|
||||
import { Router } from "express";
|
||||
import type { AttachmentRow } from "../becca/entities/rows.js";
|
||||
import type { ValidatorMap } from "./etapi-interface.js";
|
||||
|
||||
function register(router: Router) {
|
||||
const ALLOWED_PROPERTIES_FOR_CREATE_ATTACHMENT: ValidatorMap = {
|
||||
'ownerId': [v.notNull, v.isNoteId],
|
||||
'role': [v.notNull, v.isString],
|
||||
'mime': [v.notNull, v.isString],
|
||||
'title': [v.notNull, v.isString],
|
||||
'position': [v.notNull, v.isInteger],
|
||||
'content': [v.isString],
|
||||
ownerId: [v.notNull, v.isNoteId],
|
||||
role: [v.notNull, v.isString],
|
||||
mime: [v.notNull, v.isString],
|
||||
title: [v.notNull, v.isString],
|
||||
position: [v.notNull, v.isInteger],
|
||||
content: [v.isString]
|
||||
};
|
||||
|
||||
eu.route(router, 'post', '/etapi/attachments', (req, res, next) => {
|
||||
eu.route(router, "post", "/etapi/attachments", (req, res, next) => {
|
||||
const _params: Partial<AttachmentRow> = {};
|
||||
eu.validateAndPatch(_params, req.body, ALLOWED_PROPERTIES_FOR_CREATE_ATTACHMENT);
|
||||
const params = _params as AttachmentRow;
|
||||
@@ -30,26 +30,25 @@ function register(router: Router) {
|
||||
const attachment = note.saveAttachment(params);
|
||||
|
||||
res.status(201).json(mappers.mapAttachmentToPojo(attachment));
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
throw new eu.EtapiError(500, eu.GENERIC_CODE, e.message);
|
||||
}
|
||||
});
|
||||
|
||||
eu.route(router, 'get', '/etapi/attachments/:attachmentId', (req, res, next) => {
|
||||
eu.route(router, "get", "/etapi/attachments/:attachmentId", (req, res, next) => {
|
||||
const attachment = eu.getAndCheckAttachment(req.params.attachmentId);
|
||||
|
||||
res.json(mappers.mapAttachmentToPojo(attachment));
|
||||
});
|
||||
|
||||
const ALLOWED_PROPERTIES_FOR_PATCH = {
|
||||
'role': [v.notNull, v.isString],
|
||||
'mime': [v.notNull, v.isString],
|
||||
'title': [v.notNull, v.isString],
|
||||
'position': [v.notNull, v.isInteger],
|
||||
role: [v.notNull, v.isString],
|
||||
mime: [v.notNull, v.isString],
|
||||
title: [v.notNull, v.isString],
|
||||
position: [v.notNull, v.isInteger]
|
||||
};
|
||||
|
||||
eu.route(router, 'patch', '/etapi/attachments/:attachmentId', (req, res, next) => {
|
||||
eu.route(router, "patch", "/etapi/attachments/:attachmentId", (req, res, next) => {
|
||||
const attachment = eu.getAndCheckAttachment(req.params.attachmentId);
|
||||
|
||||
if (attachment.isProtected) {
|
||||
@@ -62,7 +61,7 @@ function register(router: Router) {
|
||||
res.json(mappers.mapAttachmentToPojo(attachment));
|
||||
});
|
||||
|
||||
eu.route(router, 'get', '/etapi/attachments/:attachmentId/content', (req, res, next) => {
|
||||
eu.route(router, "get", "/etapi/attachments/:attachmentId/content", (req, res, next) => {
|
||||
const attachment = eu.getAndCheckAttachment(req.params.attachmentId);
|
||||
|
||||
if (attachment.isProtected) {
|
||||
@@ -71,15 +70,15 @@ function register(router: Router) {
|
||||
|
||||
const filename = utils.formatDownloadTitle(attachment.title, attachment.role, attachment.mime);
|
||||
|
||||
res.setHeader('Content-Disposition', utils.getContentDisposition(filename));
|
||||
res.setHeader("Content-Disposition", utils.getContentDisposition(filename));
|
||||
|
||||
res.setHeader("Cache-Control", "no-cache, no-store, must-revalidate");
|
||||
res.setHeader('Content-Type', attachment.mime);
|
||||
res.setHeader("Content-Type", attachment.mime);
|
||||
|
||||
res.send(attachment.getContent());
|
||||
});
|
||||
|
||||
eu.route(router, 'put', '/etapi/attachments/:attachmentId/content', (req, res, next) => {
|
||||
eu.route(router, "put", "/etapi/attachments/:attachmentId/content", (req, res, next) => {
|
||||
const attachment = eu.getAndCheckAttachment(req.params.attachmentId);
|
||||
|
||||
if (attachment.isProtected) {
|
||||
@@ -91,7 +90,7 @@ function register(router: Router) {
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
|
||||
eu.route(router, 'delete', '/etapi/attachments/:attachmentId', (req, res, next) => {
|
||||
eu.route(router, "delete", "/etapi/attachments/:attachmentId", (req, res, next) => {
|
||||
const attachment = becca.getAttachment(req.params.attachmentId);
|
||||
|
||||
if (!attachment) {
|
||||
|
||||
@@ -3,29 +3,29 @@ import eu from "./etapi_utils.js";
|
||||
import mappers from "./mappers.js";
|
||||
import attributeService from "../services/attributes.js";
|
||||
import v from "./validators.js";
|
||||
import { Router } from 'express';
|
||||
import { AttributeRow } from '../becca/entities/rows.js';
|
||||
import { ValidatorMap } from './etapi-interface.js';
|
||||
import { Router } from "express";
|
||||
import type { AttributeRow } from "../becca/entities/rows.js";
|
||||
import type { ValidatorMap } from "./etapi-interface.js";
|
||||
|
||||
function register(router: Router) {
|
||||
eu.route(router, 'get', '/etapi/attributes/:attributeId', (req, res, next) => {
|
||||
eu.route(router, "get", "/etapi/attributes/:attributeId", (req, res, next) => {
|
||||
const attribute = eu.getAndCheckAttribute(req.params.attributeId);
|
||||
|
||||
res.json(mappers.mapAttributeToPojo(attribute));
|
||||
});
|
||||
|
||||
const ALLOWED_PROPERTIES_FOR_CREATE_ATTRIBUTE: ValidatorMap = {
|
||||
'attributeId': [v.mandatory, v.notNull, v.isValidEntityId],
|
||||
'noteId': [v.mandatory, v.notNull, v.isNoteId],
|
||||
'type': [v.mandatory, v.notNull, v.isAttributeType],
|
||||
'name': [v.mandatory, v.notNull, v.isString],
|
||||
'value': [v.notNull, v.isString],
|
||||
'isInheritable': [v.notNull, v.isBoolean],
|
||||
'position': [v.notNull, v.isInteger]
|
||||
attributeId: [v.mandatory, v.notNull, v.isValidEntityId],
|
||||
noteId: [v.mandatory, v.notNull, v.isNoteId],
|
||||
type: [v.mandatory, v.notNull, v.isAttributeType],
|
||||
name: [v.mandatory, v.notNull, v.isString],
|
||||
value: [v.notNull, v.isString],
|
||||
isInheritable: [v.notNull, v.isBoolean],
|
||||
position: [v.notNull, v.isInteger]
|
||||
};
|
||||
|
||||
eu.route(router, 'post', '/etapi/attributes', (req, res, next) => {
|
||||
if (req.body.type === 'relation') {
|
||||
eu.route(router, "post", "/etapi/attributes", (req, res, next) => {
|
||||
if (req.body.type === "relation") {
|
||||
eu.getAndCheckNote(req.body.value);
|
||||
}
|
||||
|
||||
@@ -37,27 +37,26 @@ function register(router: Router) {
|
||||
const attr = attributeService.createAttribute(params);
|
||||
|
||||
res.status(201).json(mappers.mapAttributeToPojo(attr));
|
||||
}
|
||||
catch (e: any) {
|
||||
} catch (e: any) {
|
||||
throw new eu.EtapiError(500, eu.GENERIC_CODE, e.message);
|
||||
}
|
||||
});
|
||||
|
||||
const ALLOWED_PROPERTIES_FOR_PATCH_LABEL = {
|
||||
'value': [v.notNull, v.isString],
|
||||
'position': [v.notNull, v.isInteger]
|
||||
value: [v.notNull, v.isString],
|
||||
position: [v.notNull, v.isInteger]
|
||||
};
|
||||
|
||||
const ALLOWED_PROPERTIES_FOR_PATCH_RELATION = {
|
||||
'position': [v.notNull, v.isInteger]
|
||||
position: [v.notNull, v.isInteger]
|
||||
};
|
||||
|
||||
eu.route(router, 'patch', '/etapi/attributes/:attributeId', (req, res, next) => {
|
||||
eu.route(router, "patch", "/etapi/attributes/:attributeId", (req, res, next) => {
|
||||
const attribute = eu.getAndCheckAttribute(req.params.attributeId);
|
||||
|
||||
if (attribute.type === 'label') {
|
||||
if (attribute.type === "label") {
|
||||
eu.validateAndPatch(attribute, req.body, ALLOWED_PROPERTIES_FOR_PATCH_LABEL);
|
||||
} else if (attribute.type === 'relation') {
|
||||
} else if (attribute.type === "relation") {
|
||||
eu.getAndCheckNote(req.body.value);
|
||||
|
||||
eu.validateAndPatch(attribute, req.body, ALLOWED_PROPERTIES_FOR_PATCH_RELATION);
|
||||
@@ -68,7 +67,7 @@ function register(router: Router) {
|
||||
res.json(mappers.mapAttributeToPojo(attribute));
|
||||
});
|
||||
|
||||
eu.route(router, 'delete', '/etapi/attributes/:attributeId', (req, res, next) => {
|
||||
eu.route(router, "delete", "/etapi/attributes/:attributeId", (req, res, next) => {
|
||||
const attribute = becca.getAttribute(req.params.attributeId);
|
||||
|
||||
if (!attribute) {
|
||||
|
||||
@@ -2,10 +2,10 @@ import becca from "../becca/becca.js";
|
||||
import eu from "./etapi_utils.js";
|
||||
import passwordEncryptionService from "../services/encryption/password_encryption.js";
|
||||
import etapiTokenService from "../services/etapi_tokens.js";
|
||||
import { RequestHandler, Router } from 'express';
|
||||
import type { RequestHandler, Router } from "express";
|
||||
|
||||
function register(router: Router, loginMiddleware: RequestHandler[]) {
|
||||
eu.NOT_AUTHENTICATED_ROUTE(router, 'post', '/etapi/auth/login', loginMiddleware, (req, res, next) => {
|
||||
eu.NOT_AUTHENTICATED_ROUTE(router, "post", "/etapi/auth/login", loginMiddleware, (req, res, next) => {
|
||||
const { password, tokenName } = req.body;
|
||||
|
||||
if (!passwordEncryptionService.verifyPassword(password)) {
|
||||
@@ -19,7 +19,7 @@ function register(router: Router, loginMiddleware: RequestHandler[]) {
|
||||
});
|
||||
});
|
||||
|
||||
eu.route(router, 'post', '/etapi/auth/logout', (req, res, next) => {
|
||||
eu.route(router, "post", "/etapi/auth/logout", (req, res, next) => {
|
||||
const parsed = etapiTokenService.parseAuthToken(req.headers.authorization);
|
||||
|
||||
if (!parsed || !parsed.etapiTokenId) {
|
||||
@@ -41,4 +41,4 @@ function register(router: Router, loginMiddleware: RequestHandler[]) {
|
||||
|
||||
export default {
|
||||
register
|
||||
}
|
||||
};
|
||||
|
||||
@@ -4,7 +4,7 @@ import eu from "./etapi_utils.js";
|
||||
import backupService from "../services/backup.js";
|
||||
|
||||
function register(router: Router) {
|
||||
eu.route(router, 'put', '/etapi/backup/:backupName', async (req, res, next) => {
|
||||
eu.route(router, "put", "/etapi/backup/:backupName", async (req, res, next) => {
|
||||
await backupService.backupNow(req.params.backupName);
|
||||
|
||||
res.sendStatus(204);
|
||||
|
||||
@@ -6,24 +6,24 @@ import mappers from "./mappers.js";
|
||||
import BBranch from "../becca/entities/bbranch.js";
|
||||
import entityChangesService from "../services/entity_changes.js";
|
||||
import v from "./validators.js";
|
||||
import { BranchRow } from "../becca/entities/rows.js";
|
||||
import type { BranchRow } from "../becca/entities/rows.js";
|
||||
|
||||
function register(router: Router) {
|
||||
eu.route(router, 'get', '/etapi/branches/:branchId', (req, res, next) => {
|
||||
eu.route(router, "get", "/etapi/branches/:branchId", (req, res, next) => {
|
||||
const branch = eu.getAndCheckBranch(req.params.branchId);
|
||||
|
||||
res.json(mappers.mapBranchToPojo(branch));
|
||||
});
|
||||
|
||||
const ALLOWED_PROPERTIES_FOR_CREATE_BRANCH = {
|
||||
'noteId': [v.mandatory, v.notNull, v.isNoteId],
|
||||
'parentNoteId': [v.mandatory, v.notNull, v.isNoteId],
|
||||
'notePosition': [v.notNull, v.isInteger],
|
||||
'prefix': [v.isString],
|
||||
'isExpanded': [v.notNull, v.isBoolean]
|
||||
noteId: [v.mandatory, v.notNull, v.isNoteId],
|
||||
parentNoteId: [v.mandatory, v.notNull, v.isNoteId],
|
||||
notePosition: [v.notNull, v.isInteger],
|
||||
prefix: [v.isString],
|
||||
isExpanded: [v.notNull, v.isBoolean]
|
||||
};
|
||||
|
||||
eu.route(router, 'post', '/etapi/branches', (req, res, next) => {
|
||||
eu.route(router, "post", "/etapi/branches", (req, res, next) => {
|
||||
const _params = {};
|
||||
eu.validateAndPatch(_params, req.body, ALLOWED_PROPERTIES_FOR_CREATE_BRANCH);
|
||||
const params: BranchRow = _params as BranchRow;
|
||||
@@ -49,12 +49,12 @@ function register(router: Router) {
|
||||
});
|
||||
|
||||
const ALLOWED_PROPERTIES_FOR_PATCH = {
|
||||
'notePosition': [v.notNull, v.isInteger],
|
||||
'prefix': [v.isString],
|
||||
'isExpanded': [v.notNull, v.isBoolean]
|
||||
notePosition: [v.notNull, v.isInteger],
|
||||
prefix: [v.isString],
|
||||
isExpanded: [v.notNull, v.isBoolean]
|
||||
};
|
||||
|
||||
eu.route(router, 'patch', '/etapi/branches/:branchId', (req, res, next) => {
|
||||
eu.route(router, "patch", "/etapi/branches/:branchId", (req, res, next) => {
|
||||
const branch = eu.getAndCheckBranch(req.params.branchId);
|
||||
|
||||
eu.validateAndPatch(branch, req.body, ALLOWED_PROPERTIES_FOR_PATCH);
|
||||
@@ -63,7 +63,7 @@ function register(router: Router) {
|
||||
res.json(mappers.mapBranchToPojo(branch));
|
||||
});
|
||||
|
||||
eu.route(router, 'delete', '/etapi/branches/:branchId', (req, res, next) => {
|
||||
eu.route(router, "delete", "/etapi/branches/:branchId", (req, res, next) => {
|
||||
const branch = becca.getBranch(req.params.branchId);
|
||||
|
||||
if (!branch) {
|
||||
@@ -75,7 +75,7 @@ function register(router: Router) {
|
||||
res.sendStatus(204);
|
||||
});
|
||||
|
||||
eu.route(router, 'post', '/etapi/refresh-note-ordering/:parentNoteId', (req, res, next) => {
|
||||
eu.route(router, "post", "/etapi/refresh-note-ordering/:parentNoteId", (req, res, next) => {
|
||||
eu.getAndCheckNote(req.params.parentNoteId);
|
||||
|
||||
entityChangesService.putNoteReorderingEntityChange(req.params.parentNoteId, "etapi");
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
export type ValidatorFunc = (obj: unknown) => (string | undefined);
|
||||
export type ValidatorFunc = (obj: unknown) => string | undefined;
|
||||
|
||||
export type ValidatorMap = Record<string, ValidatorFunc[]>;
|
||||
|
||||
@@ -26,20 +26,20 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CreateNoteDef'
|
||||
$ref: "#/components/schemas/CreateNoteDef"
|
||||
responses:
|
||||
'201':
|
||||
"201":
|
||||
description: note created
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/NoteWithBranch'
|
||||
$ref: "#/components/schemas/NoteWithBranch"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/notes:
|
||||
get:
|
||||
description: Search notes
|
||||
@@ -54,13 +54,13 @@ paths:
|
||||
examples:
|
||||
fulltext:
|
||||
summary: Fulltext search for keywords (not exact match)
|
||||
value: 'towers tolkien'
|
||||
value: "towers tolkien"
|
||||
fulltextExactMatch:
|
||||
summary: Fulltext search for exact match (notice the double quotes)
|
||||
value: '"Two Towers"'
|
||||
fulltextWithLabel:
|
||||
summary: Fulltext search for keyword AND matching label
|
||||
value: 'towers #book'
|
||||
value: "towers #book"
|
||||
- name: fastSearch
|
||||
in: query
|
||||
required: false
|
||||
@@ -80,7 +80,7 @@ paths:
|
||||
required: false
|
||||
description: search only in a subtree identified by the subtree noteId. By default whole tree is searched.
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
- name: ancestorDepth
|
||||
in: query
|
||||
required: false
|
||||
@@ -108,7 +108,7 @@ paths:
|
||||
type: string
|
||||
example:
|
||||
- title
|
||||
- '#publicationDate'
|
||||
- "#publicationDate"
|
||||
- isProtected
|
||||
- isArchived
|
||||
- dateCreated
|
||||
@@ -155,41 +155,41 @@ paths:
|
||||
type: boolean
|
||||
default: false
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: search response
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SearchResponse'
|
||||
$ref: "#/components/schemas/SearchResponse"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/notes/{noteId}:
|
||||
parameters:
|
||||
- name: noteId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
get:
|
||||
description: Returns a note identified by its ID
|
||||
operationId: getNoteById
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: note response
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
patch:
|
||||
description: patch a note identified by the noteId with changes in the body
|
||||
operationId: patchNoteById
|
||||
@@ -198,44 +198,44 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: note updated
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
delete:
|
||||
description: deletes a single note based on the noteId supplied
|
||||
operationId: deleteNoteById
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: note deleted
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/notes/{noteId}/content:
|
||||
parameters:
|
||||
- name: noteId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
get:
|
||||
description: Returns note content identified by its ID
|
||||
operationId: getNoteContent
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: note content response
|
||||
content:
|
||||
text/html:
|
||||
@@ -252,7 +252,7 @@ paths:
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: note content updated
|
||||
/notes/{noteId}/export:
|
||||
parameters:
|
||||
@@ -260,7 +260,7 @@ paths:
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
- name: format
|
||||
in: query
|
||||
required: false
|
||||
@@ -273,7 +273,7 @@ paths:
|
||||
description: Exports ZIP file export of a given note subtree. To export whole document, use "root" for noteId
|
||||
operationId: exportNoteSubtree
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: export ZIP file
|
||||
content:
|
||||
application/zip:
|
||||
@@ -285,37 +285,37 @@ paths:
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/notes/{noteId}/import:
|
||||
parameters:
|
||||
- name: noteId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
post:
|
||||
description: Imports ZIP file into a given note.
|
||||
operationId: importZip
|
||||
responses:
|
||||
'201':
|
||||
"201":
|
||||
description: note created
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/NoteWithBranch'
|
||||
$ref: "#/components/schemas/NoteWithBranch"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/notes/{noteId}/revision:
|
||||
parameters:
|
||||
- name: noteId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
- name: format
|
||||
in: query
|
||||
required: false
|
||||
@@ -328,14 +328,14 @@ paths:
|
||||
description: Create a note revision for the given note
|
||||
operationId: createRevision
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: revision has been created
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/branches:
|
||||
post:
|
||||
description: >
|
||||
@@ -348,49 +348,49 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Branch'
|
||||
$ref: "#/components/schemas/Branch"
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: branch updated (branch between parent note and child note already existed)
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Branch'
|
||||
'201':
|
||||
$ref: "#/components/schemas/Branch"
|
||||
"201":
|
||||
description: branch created
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Branch'
|
||||
$ref: "#/components/schemas/Branch"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/branches/{branchId}:
|
||||
parameters:
|
||||
- name: branchId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
get:
|
||||
description: Returns a branch identified by its ID
|
||||
operationId: getBranchById
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: branch response
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Branch'
|
||||
$ref: "#/components/schemas/Branch"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
patch:
|
||||
description: patch a branch identified by the branchId with changes in the body. Only prefix and notePosition can be updated. If you want to update other properties, you need to delete the old branch and create a new one.
|
||||
operationId: patchBranchById
|
||||
@@ -399,34 +399,34 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Branch'
|
||||
$ref: "#/components/schemas/Branch"
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: branch updated
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Branch'
|
||||
$ref: "#/components/schemas/Branch"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
delete:
|
||||
description: >
|
||||
deletes a branch based on the branchId supplied. If this is the last branch of the (child) note,
|
||||
then the note is deleted as well.
|
||||
operationId: deleteBranchById
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: branch deleted
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/attachments:
|
||||
post:
|
||||
description: create an attachment
|
||||
@@ -436,43 +436,43 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CreateAttachment'
|
||||
$ref: "#/components/schemas/CreateAttachment"
|
||||
responses:
|
||||
'201':
|
||||
"201":
|
||||
description: attachment created
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attachment'
|
||||
$ref: "#/components/schemas/Attachment"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/attachments/{attachmentId}:
|
||||
parameters:
|
||||
- name: attachmentId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
get:
|
||||
description: Returns an attachment identified by its ID
|
||||
operationId: getAttachmentById
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: attachment response
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attachment'
|
||||
$ref: "#/components/schemas/Attachment"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
patch:
|
||||
description: patch an attachment identified by the attachmentId with changes in the body. Only role, mime, title, and position are patchable.
|
||||
operationId: patchAttachmentById
|
||||
@@ -481,44 +481,44 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attachment'
|
||||
$ref: "#/components/schemas/Attachment"
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: attribute updated
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attachment'
|
||||
$ref: "#/components/schemas/Attachment"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
delete:
|
||||
description: deletes an attachment based on the attachmentId supplied.
|
||||
operationId: deleteAttachmentById
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: attachment deleted
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/attachments/{attachmentId}/content:
|
||||
parameters:
|
||||
- name: attachmentId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
get:
|
||||
description: Returns attachment content identified by its ID
|
||||
operationId: getAttachmentContent
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: attachment content response
|
||||
content:
|
||||
text/html:
|
||||
@@ -535,7 +535,7 @@ paths:
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: attachment content updated
|
||||
/attributes:
|
||||
post:
|
||||
@@ -546,43 +546,43 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attribute'
|
||||
$ref: "#/components/schemas/Attribute"
|
||||
responses:
|
||||
'201':
|
||||
"201":
|
||||
description: attribute created
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attribute'
|
||||
$ref: "#/components/schemas/Attribute"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/attributes/{attributeId}:
|
||||
parameters:
|
||||
- name: attributeId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
get:
|
||||
description: Returns an attribute identified by its ID
|
||||
operationId: getAttributeById
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: attribute response
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attribute'
|
||||
$ref: "#/components/schemas/Attribute"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
patch:
|
||||
description: patch an attribute identified by the attributeId with changes in the body. For labels, only value and position can be updated. For relations, only position can be updated. If you want to modify other properties, you need to delete the old attribute and create a new one.
|
||||
operationId: patchAttributeById
|
||||
@@ -591,39 +591,39 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attribute'
|
||||
$ref: "#/components/schemas/Attribute"
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: attribute updated
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Attribute'
|
||||
$ref: "#/components/schemas/Attribute"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
delete:
|
||||
description: deletes an attribute based on the attributeId supplied.
|
||||
operationId: deleteAttributeById
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: attribute deleted
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/refresh-note-ordering/{parentNoteId}:
|
||||
parameters:
|
||||
- name: parentNoteId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
post:
|
||||
description: >
|
||||
notePositions in branches are not automatically pushed to connected clients and need a specific instruction.
|
||||
@@ -631,14 +631,14 @@ paths:
|
||||
Note that you need to supply "parentNoteId" of branch(es) with changed positions.
|
||||
operationId: postRefreshNoteOrdering
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: note ordering will be asynchronously updated in all connected clients
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/inbox/{date}:
|
||||
get:
|
||||
description: >
|
||||
@@ -654,18 +654,18 @@ paths:
|
||||
format: date
|
||||
example: 2022-02-22
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: inbox note
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/calendar/days/{date}:
|
||||
get:
|
||||
description: returns a day note for a given date. Gets created if doesn't exist.
|
||||
@@ -679,18 +679,18 @@ paths:
|
||||
format: date
|
||||
example: 2022-02-22
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: day note
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/calendar/weeks/{date}:
|
||||
get:
|
||||
description: returns a week note for a given date. Gets created if doesn't exist.
|
||||
@@ -704,18 +704,18 @@ paths:
|
||||
format: date
|
||||
example: 2022-02-22
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: week note
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/calendar/months/{month}:
|
||||
get:
|
||||
description: returns a week note for a given date. Gets created if doesn't exist.
|
||||
@@ -726,21 +726,21 @@ paths:
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
pattern: '[0-9]{4}-[0-9]{2}'
|
||||
pattern: "[0-9]{4}-[0-9]{2}"
|
||||
example: 2022-02
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: month note
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/calendar/years/{year}:
|
||||
get:
|
||||
description: returns a week note for a given date. Gets created if doesn't exist.
|
||||
@@ -751,21 +751,21 @@ paths:
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
pattern: '[0-9]{4}-[0-9]{2}'
|
||||
pattern: "[0-9]{4}-[0-9]{2}"
|
||||
example: 2022-02
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: year note
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/auth/login:
|
||||
post:
|
||||
description: get an ETAPI token based on password for further use with ETAPI
|
||||
@@ -781,7 +781,7 @@ paths:
|
||||
type: string
|
||||
description: user's password used to e.g. login to Trilium server and/or protect notes
|
||||
responses:
|
||||
'201':
|
||||
"201":
|
||||
description: auth token
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
@@ -790,44 +790,44 @@ paths:
|
||||
authToken:
|
||||
type: string
|
||||
example: Bc4bFn0Ffiok_4NpbVCDnFz7B2WU+pdhW8B5Ne3DiR5wXrEyqdjgRIsk=
|
||||
'429':
|
||||
"429":
|
||||
description: Client IP has been blacklisted because too many requests (possibly failed authentications) were made within a short time frame, try again later
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/auth/logout:
|
||||
post:
|
||||
description: logout (delete/deactivate) an ETAPI token
|
||||
operationId: logout
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: logout successful
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/app-info:
|
||||
get:
|
||||
description: returns information about the running Trilium instance
|
||||
operationId: getAppInfo
|
||||
responses:
|
||||
'200':
|
||||
"200":
|
||||
description: app info
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/AppInfo'
|
||||
$ref: "#/components/schemas/AppInfo"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
/backup/{backupName}:
|
||||
parameters:
|
||||
- name: backupName
|
||||
@@ -835,19 +835,19 @@ paths:
|
||||
required: true
|
||||
description: If the backupName is e.g. "now", then the backup will be written to "backup-now.db" file
|
||||
schema:
|
||||
$ref: '#/components/schemas/StringId'
|
||||
$ref: "#/components/schemas/StringId"
|
||||
put:
|
||||
description: Create a database backup under a given name
|
||||
operationId: createBackup
|
||||
responses:
|
||||
'204':
|
||||
"204":
|
||||
description: backup has been created
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
application/json; charset=utf-8:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
$ref: "#/components/schemas/Error"
|
||||
components:
|
||||
securitySchemes:
|
||||
EtapiTokenAuth:
|
||||
@@ -872,7 +872,7 @@ components:
|
||||
- content
|
||||
properties:
|
||||
parentNoteId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
description: Note ID of the parent note in the tree
|
||||
title:
|
||||
type: string
|
||||
@@ -908,22 +908,22 @@ components:
|
||||
type: boolean
|
||||
description: true if this note (as a folder) should appear expanded
|
||||
noteId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
description: DON'T specify unless you want to force a specific noteId
|
||||
branchId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
description: DON'T specify unless you want to force a specific branchId
|
||||
dateCreated:
|
||||
$ref: '#/components/schemas/LocalDateTime'
|
||||
$ref: "#/components/schemas/LocalDateTime"
|
||||
description: Local timestap of the note creation. Specify only if you want to override the default (current datetime in the current timezone/offset).
|
||||
utcDateCreated:
|
||||
$ref: '#/components/schemas/UtcDateTime'
|
||||
$ref: "#/components/schemas/UtcDateTime"
|
||||
description: UTC timestap of the note creation. Specify only if you want to override the default (current datetime).
|
||||
Note:
|
||||
type: object
|
||||
properties:
|
||||
noteId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
readOnly: true
|
||||
title:
|
||||
type: string
|
||||
@@ -939,42 +939,42 @@ components:
|
||||
type: string
|
||||
description: ID of the blob object which effectively serves as a content hash
|
||||
attributes:
|
||||
$ref: '#/components/schemas/AttributeList'
|
||||
$ref: "#/components/schemas/AttributeList"
|
||||
readOnly: true
|
||||
parentNoteIds:
|
||||
$ref: '#/components/schemas/EntityIdList'
|
||||
$ref: "#/components/schemas/EntityIdList"
|
||||
readOnly: true
|
||||
childNoteIds:
|
||||
$ref: '#/components/schemas/EntityIdList'
|
||||
$ref: "#/components/schemas/EntityIdList"
|
||||
readOnly: true
|
||||
parentBranchIds:
|
||||
$ref: '#/components/schemas/EntityIdList'
|
||||
$ref: "#/components/schemas/EntityIdList"
|
||||
readOnly: true
|
||||
childBranchIds:
|
||||
$ref: '#/components/schemas/EntityIdList'
|
||||
$ref: "#/components/schemas/EntityIdList"
|
||||
readOnly: true
|
||||
dateCreated:
|
||||
$ref: '#/components/schemas/LocalDateTime'
|
||||
$ref: "#/components/schemas/LocalDateTime"
|
||||
dateModified:
|
||||
$ref: '#/components/schemas/LocalDateTime'
|
||||
$ref: "#/components/schemas/LocalDateTime"
|
||||
readOnly: true
|
||||
utcDateCreated:
|
||||
$ref: '#/components/schemas/UtcDateTime'
|
||||
$ref: "#/components/schemas/UtcDateTime"
|
||||
utcDateModified:
|
||||
$ref: '#/components/schemas/UtcDateTime'
|
||||
$ref: "#/components/schemas/UtcDateTime"
|
||||
readOnly: true
|
||||
Branch:
|
||||
type: object
|
||||
description: Branch places the note into the tree, it represents the relationship between a parent note and child note
|
||||
properties:
|
||||
branchId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
noteId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
readOnly: true
|
||||
description: identifies the child note
|
||||
parentNoteId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
readOnly: true
|
||||
description: identifies the parent note
|
||||
prefix:
|
||||
@@ -985,24 +985,24 @@ components:
|
||||
isExpanded:
|
||||
type: boolean
|
||||
utcDateModified:
|
||||
$ref: '#/components/schemas/UtcDateTime'
|
||||
$ref: "#/components/schemas/UtcDateTime"
|
||||
readOnly: true
|
||||
NoteWithBranch:
|
||||
type: object
|
||||
properties:
|
||||
note:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
branch:
|
||||
$ref: '#/components/schemas/Branch'
|
||||
$ref: "#/components/schemas/Branch"
|
||||
Attachment:
|
||||
type: object
|
||||
description: Attachment is owned by a note, has title and content
|
||||
properties:
|
||||
attachmentId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
readOnly: true
|
||||
ownerId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
description: identifies the owner of the attachment, is either noteId or revisionId
|
||||
role:
|
||||
type: string
|
||||
@@ -1017,13 +1017,13 @@ components:
|
||||
type: string
|
||||
description: ID of the blob object which effectively serves as a content hash
|
||||
dateModified:
|
||||
$ref: '#/components/schemas/LocalDateTime'
|
||||
$ref: "#/components/schemas/LocalDateTime"
|
||||
readOnly: true
|
||||
utcDateModified:
|
||||
$ref: '#/components/schemas/UtcDateTime'
|
||||
$ref: "#/components/schemas/UtcDateTime"
|
||||
readOnly: true
|
||||
utcDateScheduledForErasureSince:
|
||||
$ref: '#/components/schemas/UtcDateTime'
|
||||
$ref: "#/components/schemas/UtcDateTime"
|
||||
readOnly: true
|
||||
contentLength:
|
||||
type: integer
|
||||
@@ -1032,7 +1032,7 @@ components:
|
||||
type: object
|
||||
properties:
|
||||
ownerId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
description: identifies the owner of the attachment, is either noteId or revisionId
|
||||
role:
|
||||
type: string
|
||||
@@ -1050,9 +1050,9 @@ components:
|
||||
description: Attribute (Label, Relation) is a key-value record attached to a note.
|
||||
properties:
|
||||
attributeId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
noteId:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
readOnly: true
|
||||
description: identifies the child note
|
||||
type:
|
||||
@@ -1070,12 +1070,12 @@ components:
|
||||
isInheritable:
|
||||
type: boolean
|
||||
utcDateModified:
|
||||
$ref: '#/components/schemas/UtcDateTime'
|
||||
$ref: "#/components/schemas/UtcDateTime"
|
||||
readOnly: true
|
||||
AttributeList:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Attribute'
|
||||
$ref: "#/components/schemas/Attribute"
|
||||
SearchResponse:
|
||||
type: object
|
||||
required:
|
||||
@@ -1084,22 +1084,22 @@ components:
|
||||
results:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Note'
|
||||
$ref: "#/components/schemas/Note"
|
||||
debugInfo:
|
||||
type: object
|
||||
description: debugging info on parsing the search query enabled with &debug=true parameter
|
||||
EntityId:
|
||||
type: string
|
||||
pattern: '[a-zA-Z0-9_]{4,32}'
|
||||
pattern: "[a-zA-Z0-9_]{4,32}"
|
||||
example: evnnmvHTCgIn
|
||||
StringId:
|
||||
type: string
|
||||
pattern: '[a-zA-Z0-9_]{1,32}'
|
||||
pattern: "[a-zA-Z0-9_]{1,32}"
|
||||
example: my_ID
|
||||
EntityIdList:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/EntityId'
|
||||
$ref: "#/components/schemas/EntityId"
|
||||
LocalDateTime:
|
||||
type: string
|
||||
pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}[\+\-][0-9]{4}'
|
||||
|
||||
@@ -4,9 +4,9 @@ import log from "../services/log.js";
|
||||
import becca from "../becca/becca.js";
|
||||
import etapiTokenService from "../services/etapi_tokens.js";
|
||||
import config from "../services/config.js";
|
||||
import { NextFunction, Request, RequestHandler, Response, Router } from 'express';
|
||||
import { ValidatorMap } from './etapi-interface.js';
|
||||
import { ApiRequestHandler } from "../routes/routes.js";
|
||||
import type { NextFunction, Request, RequestHandler, Response, Router } from "express";
|
||||
import type { ValidatorMap } from "./etapi-interface.js";
|
||||
import type { ApiRequestHandler } from "../routes/routes.js";
|
||||
const GENERIC_CODE = "GENERIC";
|
||||
|
||||
type HttpMethod = "all" | "get" | "post" | "put" | "delete" | "patch" | "options" | "head";
|
||||
@@ -30,20 +30,21 @@ class EtapiError extends Error {
|
||||
|
||||
function sendError(res: Response, statusCode: number, code: string, message: string) {
|
||||
return res
|
||||
.set('Content-Type', 'application/json')
|
||||
.set("Content-Type", "application/json")
|
||||
.status(statusCode)
|
||||
.send(JSON.stringify({
|
||||
"status": statusCode,
|
||||
"code": code,
|
||||
"message": message
|
||||
}));
|
||||
.send(
|
||||
JSON.stringify({
|
||||
status: statusCode,
|
||||
code: code,
|
||||
message: message
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function checkEtapiAuth(req: Request, res: Response, next: NextFunction) {
|
||||
if (noAuthentication || etapiTokenService.isValidAuthHeader(req.headers.authorization)) {
|
||||
next();
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
sendError(res, 401, "NOT_AUTHENTICATED", "Not authenticated");
|
||||
}
|
||||
}
|
||||
@@ -54,8 +55,8 @@ function processRequest(req: Request, res: Response, routeHandler: ApiRequestHan
|
||||
cls.namespace.bindEmitter(res);
|
||||
|
||||
cls.init(() => {
|
||||
cls.set('componentId', "etapi");
|
||||
cls.set('localNowDateTime', req.headers['trilium-local-now-datetime']);
|
||||
cls.set("componentId", "etapi");
|
||||
cls.set("localNowDateTime", req.headers["trilium-local-now-datetime"]);
|
||||
|
||||
const cb = () => routeHandler(req, res, next);
|
||||
|
||||
@@ -85,8 +86,7 @@ function getAndCheckNote(noteId: string) {
|
||||
|
||||
if (note) {
|
||||
return note;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
throw new EtapiError(404, "NOTE_NOT_FOUND", `Note '${noteId}' not found.`);
|
||||
}
|
||||
}
|
||||
@@ -96,8 +96,7 @@ function getAndCheckAttachment(attachmentId: string) {
|
||||
|
||||
if (attachment) {
|
||||
return attachment;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
throw new EtapiError(404, "ATTACHMENT_NOT_FOUND", `Attachment '${attachmentId}' not found.`);
|
||||
}
|
||||
}
|
||||
@@ -107,8 +106,7 @@ function getAndCheckBranch(branchId: string) {
|
||||
|
||||
if (branch) {
|
||||
return branch;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
throw new EtapiError(404, "BRANCH_NOT_FOUND", `Branch '${branchId}' not found.`);
|
||||
}
|
||||
}
|
||||
@@ -118,8 +116,7 @@ function getAndCheckAttribute(attributeId: string) {
|
||||
|
||||
if (attribute) {
|
||||
return attribute;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
throw new EtapiError(404, "ATTRIBUTE_NOT_FOUND", `Attribute '${attributeId}' not found.`);
|
||||
}
|
||||
}
|
||||
@@ -128,8 +125,7 @@ function validateAndPatch(target: any, source: any, allowedProperties: Validator
|
||||
for (const key of Object.keys(source)) {
|
||||
if (!(key in allowedProperties)) {
|
||||
throw new EtapiError(400, "PROPERTY_NOT_ALLOWED", `Property '${key}' is not allowed for this method.`);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
for (const validator of allowedProperties[key]) {
|
||||
const validationResult = validator(source[key]);
|
||||
|
||||
@@ -157,4 +153,4 @@ export default {
|
||||
getAndCheckBranch,
|
||||
getAndCheckAttribute,
|
||||
getAndCheckAttachment
|
||||
}
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user