mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-27 18:12:29 +08:00
chore(server): fix some type errors
This commit is contained in:
parent
05c4721bd5
commit
ba7c93967e
@ -251,7 +251,7 @@ export default class Becca {
|
||||
getAllNoteSet() {
|
||||
// caching this since it takes 10s of milliseconds to fill this initial NoteSet for many notes
|
||||
if (!this.allNoteSetCache) {
|
||||
const allNotes = [];
|
||||
const allNotes: BNote[] = [];
|
||||
|
||||
for (const noteId in this.notes) {
|
||||
const note = this.notes[noteId];
|
||||
|
@ -76,7 +76,7 @@ function getNoteTitleArrayForPath(notePathArray: string[]) {
|
||||
return [getNoteTitle(notePathArray[0])];
|
||||
}
|
||||
|
||||
const titles = [];
|
||||
const titles: string[] = [];
|
||||
|
||||
let parentNoteId = "root";
|
||||
let hoistedNotePassed = false;
|
||||
|
@ -388,7 +388,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
}
|
||||
}
|
||||
|
||||
const templateAttributes = [];
|
||||
const templateAttributes: BAttribute[] = [];
|
||||
|
||||
for (const ownedAttr of parentAttributes) {
|
||||
// parentAttributes so we process also inherited templates
|
||||
|
@ -254,7 +254,7 @@ function hasConnectingRelation(sourceNote: BNote, targetNote: BNote) {
|
||||
}
|
||||
|
||||
async function findSimilarNotes(noteId: string): Promise<SimilarNote[] | undefined> {
|
||||
const results = [];
|
||||
const results: SimilarNote[] = [];
|
||||
let i = 0;
|
||||
|
||||
const baseNote = becca.notes[noteId];
|
||||
|
@ -12,6 +12,11 @@ interface Backlink {
|
||||
excerpts?: string[];
|
||||
}
|
||||
|
||||
interface TreeLink {
|
||||
sourceNoteId: string;
|
||||
targetNoteId: string;
|
||||
}
|
||||
|
||||
function buildDescendantCountMap(noteIdsToCount: string[]) {
|
||||
if (!Array.isArray(noteIdsToCount)) {
|
||||
throw new Error("noteIdsToCount: type error");
|
||||
@ -50,7 +55,7 @@ function getNeighbors(note: BNote, depth: number): string[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
const retNoteIds = [];
|
||||
const retNoteIds: string[] = [];
|
||||
|
||||
function isIgnoredRelation(relation: BAttribute) {
|
||||
return ["relationMapLink", "template", "inherit", "image", "ancestor"].includes(relation.name);
|
||||
@ -196,7 +201,7 @@ function getTreeMap(req: Request) {
|
||||
const noteIds = new Set<string>();
|
||||
notes.forEach(([noteId]) => noteId && noteIds.add(noteId));
|
||||
|
||||
const links = [];
|
||||
const links: TreeLink[] = [];
|
||||
|
||||
for (const { parentNoteId, childNoteId } of subtree.relationships) {
|
||||
if (!noteIds.has(parentNoteId) || !noteIds.has(childNoteId)) {
|
||||
@ -246,7 +251,7 @@ function findExcerpts(sourceNote: BNote, referencedNoteId: string) {
|
||||
const html = sourceNote.getContent();
|
||||
const document = new JSDOM(html).window.document;
|
||||
|
||||
const excerpts = [];
|
||||
const excerpts: string[] = [];
|
||||
|
||||
removeImages(document);
|
||||
|
||||
|
@ -9,6 +9,12 @@ import { changeLanguage, getLocales } from "../../services/i18n.js";
|
||||
import type { OptionNames } from "@triliumnext/commons";
|
||||
import config from "../../services/config.js";
|
||||
|
||||
interface UserTheme {
|
||||
val: string; // value of the theme, used in the URL
|
||||
title: string; // title of the theme, displayed in the UI
|
||||
noteId: string; // ID of the note containing the theme
|
||||
}
|
||||
|
||||
// options allowed to be updated directly in the Options dialog
|
||||
const ALLOWED_OPTIONS = new Set<OptionNames>([
|
||||
"eraseEntitiesAfterTimeInSeconds",
|
||||
@ -177,7 +183,7 @@ function update(name: string, value: string) {
|
||||
|
||||
function getUserThemes() {
|
||||
const notes = searchService.searchNotes("#appTheme", { ignoreHoistedNote: true });
|
||||
const ret = [];
|
||||
const ret: UserTheme[] = [];
|
||||
|
||||
for (const note of notes) {
|
||||
let value = note.getOwnedLabelValue("appTheme");
|
||||
|
@ -21,7 +21,7 @@ interface RecentChangeRow {
|
||||
function getRecentChanges(req: Request) {
|
||||
const { ancestorNoteId } = req.params;
|
||||
|
||||
let recentChanges = [];
|
||||
let recentChanges: RecentChangeRow[] = [];
|
||||
|
||||
const revisionRows = sql.getRows<RecentChangeRow>(`
|
||||
SELECT
|
||||
|
@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
import scriptService from "../../services/script.js";
|
||||
import scriptService, { type Bundle } from "../../services/script.js";
|
||||
import attributeService from "../../services/attributes.js";
|
||||
import becca from "../../becca/becca.js";
|
||||
import syncService from "../../services/sync.js";
|
||||
@ -54,7 +54,7 @@ function run(req: Request) {
|
||||
function getBundlesWithLabel(label: string, value?: string) {
|
||||
const notes = attributeService.getNotesWithLabel(label, value);
|
||||
|
||||
const bundles = [];
|
||||
const bundles: Bundle[] = [];
|
||||
|
||||
for (const note of notes) {
|
||||
const bundle = scriptService.getScriptBundleForFrontend(note);
|
||||
@ -97,7 +97,7 @@ function getRelationBundles(req: Request) {
|
||||
const targetNoteIds = filtered.map((relation) => relation.value);
|
||||
const uniqueNoteIds = Array.from(new Set(targetNoteIds));
|
||||
|
||||
const bundles = [];
|
||||
const bundles: Bundle[] = [];
|
||||
|
||||
for (const noteId of uniqueNoteIds) {
|
||||
const note = becca.getNoteOrThrow(noteId);
|
||||
|
@ -6,9 +6,14 @@ import type { Request } from "express";
|
||||
import ValidationError from "../../errors/validation_error.js";
|
||||
import { safeExtractMessageAndStackFromError } from "../../services/utils.js";
|
||||
|
||||
interface Table {
|
||||
name: string;
|
||||
columns: unknown[];
|
||||
}
|
||||
|
||||
function getSchema() {
|
||||
const tableNames = sql.getColumn(/*sql*/`SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name`);
|
||||
const tables = [];
|
||||
const tableNames = sql.getColumn<string>(/*sql*/`SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name`);
|
||||
const tables: Table[] = [];
|
||||
|
||||
for (const tableName of tableNames) {
|
||||
tables.push({
|
||||
@ -31,7 +36,7 @@ function execute(req: Request) {
|
||||
const queries = content.split("\n---");
|
||||
|
||||
try {
|
||||
const results = [];
|
||||
const results: unknown[] = [];
|
||||
|
||||
for (let query of queries) {
|
||||
query = query.trim();
|
||||
|
@ -5,6 +5,7 @@ import log from "../../services/log.js";
|
||||
import NotFoundError from "../../errors/not_found_error.js";
|
||||
import type { Request } from "express";
|
||||
import type BNote from "../../becca/entities/bnote.js";
|
||||
import type { AttributeRow, BranchRow, NoteRow } from "@triliumnext/commons";
|
||||
|
||||
function getNotesAndBranchesAndAttributes(_noteIds: string[] | Set<string>) {
|
||||
const noteIds = new Set(_noteIds);
|
||||
@ -53,7 +54,7 @@ function getNotesAndBranchesAndAttributes(_noteIds: string[] | Set<string>) {
|
||||
collectEntityIds(note);
|
||||
}
|
||||
|
||||
const notes = [];
|
||||
const notes: NoteRow[] = [];
|
||||
|
||||
for (const noteId of collectedNoteIds) {
|
||||
const note = becca.notes[noteId];
|
||||
@ -68,7 +69,7 @@ function getNotesAndBranchesAndAttributes(_noteIds: string[] | Set<string>) {
|
||||
});
|
||||
}
|
||||
|
||||
const branches = [];
|
||||
const branches: BranchRow[] = [];
|
||||
|
||||
if (noteIds.has("root")) {
|
||||
branches.push({
|
||||
@ -99,7 +100,7 @@ function getNotesAndBranchesAndAttributes(_noteIds: string[] | Set<string>) {
|
||||
});
|
||||
}
|
||||
|
||||
const attributes = [];
|
||||
const attributes: AttributeRow[] = [];
|
||||
|
||||
for (const attributeId of collectedAttributeIds) {
|
||||
const attribute = becca.attributes[attributeId];
|
||||
|
@ -7,6 +7,11 @@ import eraseService from "./erase.js";
|
||||
|
||||
type SectorHash = Record<string, string>;
|
||||
|
||||
interface FailedCheck {
|
||||
entityName: string;
|
||||
sector: string[1];
|
||||
}
|
||||
|
||||
function getEntityHashes() {
|
||||
// blob erasure is not synced, we should check before each sync if there's some blob to erase
|
||||
eraseService.eraseUnusedBlobs();
|
||||
@ -56,7 +61,7 @@ function getEntityHashes() {
|
||||
|
||||
function checkContentHashes(otherHashes: Record<string, SectorHash>) {
|
||||
const entityHashes = getEntityHashes();
|
||||
const failedChecks = [];
|
||||
const failedChecks: FailedCheck[] = [];
|
||||
|
||||
for (const entityName in entityHashes) {
|
||||
const thisSectorHashes: SectorHash = entityHashes[entityName] || {};
|
||||
|
@ -160,7 +160,7 @@ function extractJsStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for class declarations
|
||||
const classes = [];
|
||||
const classes: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.startsWith('class ') || line.includes(' class ')) {
|
||||
@ -173,7 +173,7 @@ function extractJsStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for function declarations
|
||||
const functions = [];
|
||||
const functions: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.startsWith('function ') ||
|
||||
@ -212,7 +212,7 @@ function extractPythonStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for class declarations
|
||||
const classes = [];
|
||||
const classes: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.startsWith('class ')) {
|
||||
@ -225,7 +225,7 @@ function extractPythonStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for function declarations
|
||||
const functions = [];
|
||||
const functions: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.startsWith('def ')) {
|
||||
@ -263,7 +263,7 @@ function extractClassBasedStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for class declarations
|
||||
const classes = [];
|
||||
const classes: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.match(/^(public|private|protected)?\s*(class|interface|enum)\s+\w+/)) {
|
||||
@ -276,7 +276,7 @@ function extractClassBasedStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for method declarations
|
||||
const methods = [];
|
||||
const methods: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.match(/^(public|private|protected)?\s*(static)?\s*[\w<>[\]]+\s+\w+\s*\(/)) {
|
||||
@ -319,7 +319,7 @@ function extractGoStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for type declarations (structs, interfaces)
|
||||
const types = [];
|
||||
const types: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.startsWith('type ') && (line.includes(' struct ') || line.includes(' interface '))) {
|
||||
@ -332,7 +332,7 @@ function extractGoStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for function declarations
|
||||
const functions = [];
|
||||
const functions: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.startsWith('func ')) {
|
||||
@ -366,7 +366,7 @@ function extractRustStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for struct/enum/trait declarations
|
||||
const types = [];
|
||||
const types: string[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.startsWith('struct ') || line.startsWith('enum ') || line.startsWith('trait ')) {
|
||||
@ -379,8 +379,8 @@ function extractRustStructure(content: string): string {
|
||||
}
|
||||
|
||||
// Look for function/impl declarations
|
||||
const functions = [];
|
||||
const impls = [];
|
||||
const functions: string[] = [];
|
||||
const impls: string[] = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
|
@ -198,7 +198,7 @@ export async function semanticChunking(
|
||||
|
||||
// Try to split on headers first
|
||||
const headerPattern = /#{1,6}\s+.+|<h[1-6][^>]*>.*?<\/h[1-6]>/g;
|
||||
const sections = [];
|
||||
const sections: string[] = [];
|
||||
|
||||
let lastIndex = 0;
|
||||
let match;
|
||||
|
@ -439,7 +439,7 @@ export class QueryDecompositionTool {
|
||||
|
||||
// If no pattern match, try to extract noun phrases
|
||||
const words = query.split(/\s+/);
|
||||
const potentialEntities = [];
|
||||
const potentialEntities: string[] = [];
|
||||
let currentPhrase = '';
|
||||
|
||||
for (const word of words) {
|
||||
|
@ -246,7 +246,7 @@ export abstract class BaseEmbeddingProvider {
|
||||
*/
|
||||
protected generateNoteContextText(context: NoteEmbeddingContext): string {
|
||||
// Build a relationship-focused summary first
|
||||
const relationshipSummary = [];
|
||||
const relationshipSummary: string[] = [];
|
||||
|
||||
// Summarize the note's place in the hierarchy
|
||||
if (context.parentTitles.length > 0) {
|
||||
|
@ -13,6 +13,21 @@ import indexService from '../index_service.js';
|
||||
// Track which notes are currently being processed
|
||||
const notesInProcess = new Set<string>();
|
||||
|
||||
interface FailedItemRow {
|
||||
noteId: string;
|
||||
operation: string;
|
||||
attempts: number;
|
||||
lastAttempt: string;
|
||||
error: string | null;
|
||||
failed: number;
|
||||
}
|
||||
|
||||
interface FailedItemWithTitle extends FailedItemRow {
|
||||
title?: string;
|
||||
failureType: 'chunks' | 'full';
|
||||
isPermanent: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues a note for embedding update
|
||||
*/
|
||||
@ -77,17 +92,17 @@ export async function queueNoteForEmbedding(noteId: string, operation = 'UPDATE'
|
||||
*/
|
||||
export async function getFailedEmbeddingNotes(limit: number = 100): Promise<any[]> {
|
||||
// Get notes with failed embedding attempts or permanently failed flag
|
||||
const failedQueueItems = await sql.getRows(`
|
||||
const failedQueueItems = sql.getRows<FailedItemRow>(`
|
||||
SELECT noteId, operation, attempts, lastAttempt, error, failed
|
||||
FROM embedding_queue
|
||||
WHERE attempts > 0 OR failed = 1
|
||||
ORDER BY failed DESC, attempts DESC, lastAttempt DESC
|
||||
LIMIT ?`,
|
||||
[limit]
|
||||
) as {noteId: string, operation: string, attempts: number, lastAttempt: string, error: string, failed: number}[];
|
||||
);
|
||||
|
||||
// Add titles to the failed notes
|
||||
const failedNotesWithTitles = [];
|
||||
const failedNotesWithTitles: FailedItemWithTitle[] = [];
|
||||
for (const item of failedQueueItems) {
|
||||
const note = becca.getNote(item.noteId);
|
||||
if (note) {
|
||||
|
@ -95,7 +95,7 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
||||
const providerPrecedence = await options.getOption('aiProviderPrecedence');
|
||||
if (providerPrecedence) {
|
||||
// Parse provider precedence list
|
||||
let providers = [];
|
||||
let providers: string[] = [];
|
||||
if (providerPrecedence.includes(',')) {
|
||||
providers = providerPrecedence.split(',').map(p => p.trim());
|
||||
} else if (providerPrecedence.startsWith('[') && providerPrecedence.endsWith(']')) {
|
||||
|
@ -52,7 +52,7 @@ class NoteFlatTextExp extends Expression {
|
||||
return;
|
||||
}
|
||||
|
||||
const foundAttrTokens = [];
|
||||
const foundAttrTokens: string[] = [];
|
||||
|
||||
for (const token of remainingTokens) {
|
||||
if (note.type.includes(token) || note.mime.includes(token)) {
|
||||
@ -73,7 +73,7 @@ class NoteFlatTextExp extends Expression {
|
||||
|
||||
for (const parentNote of note.parents) {
|
||||
const title = normalize(beccaService.getNoteTitle(note.noteId, parentNote.noteId));
|
||||
const foundTokens = foundAttrTokens.slice();
|
||||
const foundTokens: string[] = foundAttrTokens.slice();
|
||||
|
||||
for (const token of remainingTokens) {
|
||||
if (title.includes(token)) {
|
||||
@ -100,7 +100,7 @@ class NoteFlatTextExp extends Expression {
|
||||
continue;
|
||||
}
|
||||
|
||||
const foundAttrTokens = [];
|
||||
const foundAttrTokens: string[] = [];
|
||||
|
||||
for (const token of this.tokens) {
|
||||
if (note.type.includes(token) || note.mime.includes(token)) {
|
||||
@ -153,7 +153,7 @@ class NoteFlatTextExp extends Expression {
|
||||
* Returns noteIds which have at least one matching tokens
|
||||
*/
|
||||
getCandidateNotes(noteSet: NoteSet): BNote[] {
|
||||
const candidateNotes = [];
|
||||
const candidateNotes: BNote[] = [];
|
||||
|
||||
for (const note of noteSet.notes) {
|
||||
for (const token of this.tokens) {
|
||||
|
@ -126,17 +126,17 @@ export type NoteType = (typeof ALLOWED_NOTE_TYPES)[number];
|
||||
|
||||
export interface NoteRow {
|
||||
noteId: string;
|
||||
deleteId: string;
|
||||
deleteId?: string;
|
||||
title: string;
|
||||
type: NoteType;
|
||||
mime: string;
|
||||
isProtected: boolean;
|
||||
isDeleted: boolean;
|
||||
blobId: string;
|
||||
dateCreated: string;
|
||||
dateModified: string;
|
||||
utcDateCreated: string;
|
||||
utcDateModified: string;
|
||||
isProtected?: boolean;
|
||||
isDeleted?: boolean;
|
||||
blobId?: string;
|
||||
dateCreated?: string;
|
||||
dateModified?: string;
|
||||
utcDateCreated?: string;
|
||||
utcDateModified?: string;
|
||||
content?: string | Buffer;
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user