Merge branch 'develop' into beta

This commit is contained in:
ThatOneCalculator 2023-04-01 12:28:11 -07:00
commit 118907551a
127 changed files with 5120 additions and 3385 deletions

1
.npmrc Normal file
View File

@ -0,0 +1 @@
use-lockfile-v6=true

View File

@ -3,7 +3,7 @@ FROM node:19-alpine as build
WORKDIR /calckey
# Install compilation dependencies
RUN apk add --no-cache --no-progress git alpine-sdk python3
RUN apk add --no-cache --no-progress git alpine-sdk python3 rust
# Copy only the dependency-related files first, to cache efficiently
COPY package.json pnpm*.yaml ./

View File

@ -91,6 +91,7 @@ If you have access to a server that supports one of the sources below, I recomme
### 🏗️ Build dependencies
- 🦀 [Rust toolchain](https://www.rust-lang.org/)
- 🦬 C/C++ compiler & build tools
- `build-essential` on Debian/Ubuntu Linux
- `base-devel` on Arch Linux

BIN
custom/assets/sounds/None.mp3 (Stored with Git LFS)

Binary file not shown.

View File

@ -24,6 +24,26 @@ NODE_ENV=production pnpm run migrate
# build using prefered method
```
Depending on the version you're migrating from, you may have to open Postgres with `psql -d your_database` and run the following commands:
```sql
ALTER TABLE "meta" ADD COLUMN "disableLocalTimeline" boolean DEFAULT false;
ALTER TABLE "meta" ADD COLUMN "disableGlobalTimeline" boolean DEFAULT false;
ALTER TABLE "meta" ADD COLUMN "localDriveCapacityMb" integer DEFAULT 512;
ALTER TABLE "meta" ADD COLUMN "remoteDriveCapacityMb" integer DEFAULT 128;
ALTER TABLE "user" ADD COLUMN "isSilenced" boolean DEFAULT false;
ALTER TABLE "user" ADD COLUMN "isAdmin" boolean DEFAULT false;
ALTER TABLE "user" ADD COLUMN "isModerator" boolean DEFAULT false;
ALTER TABLE "user" ADD COLUMN "remoteDriveCapacityMb" integer DEFAULT 128;
ALTER TABLE "user" ADD COLUMN "driveCapacityOverrideMb" integer DEFAULT 128;
ALTER TABLE "instance" ADD COLUMN "caughtAt" date;
ALTER TABLE "instance" ADD COLUMN "latestRequestSentAt" date;
ALTER TABLE "instance" ADD COLUMN "latestStatus" character varying(512);
ALTER TABLE "instance" ADD COLUMN "lastCommunicatedAt" date;
```
then quit with `\q`, and restart Calckey.
## Misskey v12.119 and before
```sh
@ -56,3 +76,7 @@ git pull --ff
NODE_ENV=production pnpm run migrate
# build using prefered method
```
## Reverse
You ***cannot*** migrate back to Misskey from Calckey due to re-hashing passwords on signin with argon2. You can migrate from to Calckey to Foundkey, though.

View File

@ -96,6 +96,9 @@ unfollow: "Unfollow"
followRequestPending: "Follow request pending"
enterEmoji: "Enter an emoji"
renote: "Boost"
renoteAsUnlisted: "Boost (Unlisted)"
renoteToFollowers: "Boost (Followers)"
renoteToRecipients: "Boost (Recipients)"
unrenote: "Take back boost"
renoted: "Boosted."
cantRenote: "This post can't be boosted."
@ -323,7 +326,7 @@ dayX: "{day}"
monthX: "{month}"
yearX: "{year}"
pages: "Pages"
integration: "Integration"
integration: "Integrations"
connectService: "Connect"
disconnectService: "Disconnect"
enableLocalTimeline: "Enable local timeline"
@ -939,6 +942,12 @@ moveFromDescription: "This will set an alias of your old account so that you can
migrationConfirm: "Are you absolutely sure you want to migrate your acccount to {account}? Once you do this, you won't be able to reverse it, and you won't be able to use your account normally again.\nAlso, please ensure that you've set this current account as the account you're moving from."
defaultReaction: "Default emoji reaction for outgoing and incoming posts"
license: "License"
indexPosts: "Index posts"
indexFrom: "Index from Post ID onwards (leave blank to index every post)"
indexNotice: "Now indexing. This will probably take a while, please don't restart your server for at least an hour."
customKaTeXMacro: "Custom KaTeX Macro"
customKaTeXMacroDescription: "Set up macros to write mathematical expressions easily! The notation conforms to the LaTeX command definitions and is written as \\newcommand{\\name}{content} or \\newcommand{\\name}[number of arguments]{content}. For example, \\newcommand{\\add}[2]{#1 + #2} will expand \\add{3}{foo} to 3 + foo. The curly brackets surrounding the macro name can be changed to round or square brackets. This affects the brackets used for arguments. One (and only one) macro can be defined per line, and you can't break the line in the middle of the definition. Invalid lines are simply ignored. Only simple string substitution functions are supported; advanced syntax, such as conditional branching, cannot be used here."
enableCustomKaTeXMacro: "Enable custom KaTeX macro"
_sensitiveMediaDetection:
description: "Reduces the effort of server moderation through automatically recognizing NSFW media via Machine Learning. This will slightly increase the load on the server."
@ -1373,7 +1382,7 @@ _poll:
_visibility:
public: "Public"
publicDescription: "Your post will be visible for all users"
home: "Home"
home: "Unlisted"
homeDescription: "Post to home timeline only"
followers: "Followers"
followersDescription: "Make visible to your followers only"
@ -1782,9 +1791,9 @@ _deck:
swapDown: "Swap with the below column"
stackLeft: "Stack with the left column"
popRight: "Pop column to the right"
profile: "Profile"
newProfile: "New profile"
deleteProfile: "Delete profile"
profile: "Workspace"
newProfile: "New workspace"
deleteProfile: "Delete workspace"
introduction: "Create the perfect interface for you by arranging columns freely!"
introduction2: "Click on the + on the right of the screen to add new colums whenever you want."
widgetsIntroduction: "Please select \"Edit widgets\" in the column menu and add a widget."

View File

@ -96,6 +96,9 @@ unfollow: "フォロー解除"
followRequestPending: "フォロー許可待ち"
enterEmoji: "絵文字を入力"
renote: "ブースト"
renoteAsUnlisted: "ホームにブースト"
renoteToFollowers: "フォロワー限定でブースト"
renoteToRecipients: "宛先のユーザーにブースト"
unrenote: "ブースト解除"
renoted: "ブーストしました。"
cantRenote: "この投稿はブーストできません。"
@ -849,6 +852,9 @@ overridedDeviceKind: "デバイスタイプ"
smartphone: "スマートフォン"
tablet: "タブレット"
auto: "自動"
showLocalPosts: "ローカルの投稿を表示する場所"
homeTimeline: "ホームタイムライン"
socialTimeline: "ソーシャルタイムライン"
themeColor: "テーマカラー"
size: "サイズ"
numberOfColumn: "列の数"
@ -936,6 +942,9 @@ moveFromDescription: "別のアカウントからこのアカウントにフォ
migrationConfirm: "本当にこのアカウントを {account} に引っ越しますか?一度引っ越しを行うと取り消せず、二度とこのアカウントを元の状態で使用することはできません。\nまた、引っ越し先のアカウントでエイリアスを作成したことを確認してください。"
defaultReaction: "リモートとローカルの投稿に対するデフォルトの絵文字リアクション"
license: "ライセンス"
customKaTeXMacro: "カスタムKaTeXマクロ"
customKaTeXMacroDescription: "数式入力を楽にするためのマクロを設定しましょう記法はLaTeXにおけるコマンドの定義と同様に \\newcommand{\\name}{content} または \\newcommand{\\add}[2]{#1 + #2} のように記述します。後者の例では \\add{3}{foo} が 3 + foo に展開されます。また、マクロの名前を囲む波括弧を丸括弧 () および角括弧 [] に変更した場合、マクロの引数に使用する括弧が変更されます。マクロの定義は一行に一つのみで、途中で改行はできません。マクロの定義が無効な行は無視されます。文字列を単純に置換する機能のみに対応していて、条件分岐などの高度な構文は使用できません。"
enableCustomKaTeXMacro: "カスタムKaTeXマクロを有効にする"
_sensitiveMediaDetection:
description: "機械学習を使って自動でセンシティブなメディアを検出し、モデレーションに役立てることができます。サーバーの負荷が少し増えます。"
@ -1240,10 +1249,10 @@ _tutorial:
step4_2: "最初は{introduction}に投稿したり、シンプルに「こんにちは、アカウント作ってみました!」などの投稿をする人もいます。"
step5_1: "タイムライン、タイムラインだらけ!"
step5_2: "あなたのインスタンスでは{timelines}種類のタイムラインが有効になっています。"
step5_3: "ホーム{icon}タイムラインでは、あなたがフォローしているアカウントの投稿を見ることができます。"
step5_3: "ホーム{icon}タイムラインでは、あなたがフォローしているアカウントとこのインスタンスのみんなの投稿を見ることができます。もしフォローしているアカウントの投稿だけ見たい場合は、設定から変更できます。"
step5_4: "ローカル{icon}タイムラインでは、このインスタンスのみんなの投稿を見ることができます。"
step5_5: "おすすめ{icon}タイムラインでは、管理人がおすすめするインスタンスの投稿を見ることができます。"
step5_6: "ソーシャル{icon}タイムラインでは、ホームタイムラインとローカルタイムラインの投稿を同時に見ることができます。"
step5_5: "ソーシャル{icon}タイムラインでは、あなたがフォローしているアカウントの投稿を見ることができます。"
step5_6: "おすすめ{icon}タイムラインでは、管理人がおすすめするインスタンスの投稿を見ることができます。"
step5_7: "グローバル{icon}タイムラインでは、接続している他のすべてのインスタンスからの投稿を見ることができます。"
step6_1: "じゃあ、ここはどんな場所なの?"
step6_2: "実は、あなたはただCalckeyに参加しただけではありません。ここは、何千もの相互接続されたサーバーが構成する Fediverse への入口です。各サーバーは「インスタンス」と呼ばれます。"

View File

@ -892,6 +892,9 @@ navbar: "导航栏"
shuffle: "随机"
account: "账户"
move: "移动"
customKaTeXMacro: "自定义 KaTeX 宏"
customKaTeXMacroDescription: "使用宏来轻松的输入数学表达式吧!宏的用法与 LaTeX 中的命令定义相同。你可以使用 \\newcommand{\\name}{content} 或 \\newcommand{\\name}[number of arguments]{content} 来输入数学表达式。举个例子,\\newcommand{\\add}[2]{#1 + #2} 会将 \\add{3}{foo} 展开为 3 + foo。此外宏名称外的花括号 {} 可以被替换为圆括号 () 和方括号 [],这会影响用于参数的括号。每行只能够定义一个宏,无法在中间换行,且无效的行将被忽略。只支持简单字符串替换功能,不支持高级语法,如条件分支等。"
enableCustomKaTeXMacro: "启用自定义 KaTeX 宏"
_sensitiveMediaDetection:
description: "可以使用机器学习技术自动检测敏感媒体,以便进行审核。服务器负载将略微增加。"
sensitivity: "检测敏感度"

View File

@ -892,6 +892,9 @@ navbar: "導覽列"
shuffle: "隨機"
account: "帳戶"
move: "移動 "
customKaTeXMacro: "自定義 KaTeX 宏"
customKaTeXMacroDescription: "使用宏來輕鬆的輸入數學表達式吧!宏的用法與 LaTeX 中的命令定義相同。你可以使用 \\newcommand{\\name}{content} 或 \\newcommand{\\name}[number of arguments]{content} 來輸入數學表達式。舉個例子,\\newcommand{\\add}[2]{#1 + #2} 會將 \\add{3}{foo} 展開為 3 + foo。此外宏名稱外的花括號 {} 可以被替換為圓括號 () 和方括號 [],這會影響用於參數的括號。每行只能夠定義一個宏,無法在中間換行,且無效的行將被忽略。只支持簡單字符串替換功能,不支持高級語法,如條件分支等。"
enableCustomKaTeXMacro: "啟用自定義 KaTeX 宏"
_sensitiveMediaDetection:
description: "您可以使用機器學習自動檢測敏感媒體並將其用於審核。 伺服器的負荷會稍微增加。"
sensitivity: "檢測敏感度"

View File

@ -1,12 +1,12 @@
{
"name": "calckey",
"version": "13.2.0-beta31",
"version": "13.2.0-beta4",
"codename": "aqua",
"repository": {
"type": "git",
"url": "https://codeberg.org/calckey/calckey.git"
},
"packageManager": "pnpm@7.30.1",
"packageManager": "pnpm@8.1.0",
"private": true,
"scripts": {
"rebuild": "pnpm run clean && pnpm -r run build && pnpm run gulp",
@ -38,6 +38,7 @@
"dependencies": {
"@bull-board/api": "^4.10.2",
"@bull-board/ui": "^4.10.2",
"@napi-rs/cli": "^2.15.0",
"@tensorflow/tfjs": "^3.21.0",
"calckey-js": "^0.0.22",
"js-yaml": "4.1.0",

View File

@ -0,0 +1,17 @@
export class CleanCharts1680375641101 {
constructor() {
this.name = 'CleanCharts1680375641101';
}
async up(queryRunner) {
await queryRunner.query(`delete from __chart__hashtag where ___local_users = 0 and ___remote_users = 0;`);
await queryRunner.query(`delete from __chart_day__hashtag where ___local_users = 0 and ___remote_users = 0;`);
await queryRunner.query(`COMMIT;`);
await queryRunner.query(`vacuum __chart__hashtag;`);
await queryRunner.query(`vacuum __chart_day__hashtag;`);
await queryRunner.query(`COMMIT;`);
}
async down(queryRunner) {
await queryRunner.query(`delete from __chart__hashtag where ___local_users = 0 and ___remote_users = 0;`);
await queryRunner.query(`delete from __chart_day__hashtag where ___local_users = 0 and ___remote_users = 0;`);
}
}

View File

@ -0,0 +1,3 @@
[target.aarch64-unknown-linux-musl]
linker = "aarch64-linux-musl-gcc"
rustflags = ["-C", "target-feature=-crt-static"]

200
packages/backend/native-utils/.gitignore vendored Normal file
View File

@ -0,0 +1,200 @@
# Created by https://www.toptal.com/developers/gitignore/api/node
# Edit at https://www.toptal.com/developers/gitignore?templates=node
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# End of https://www.toptal.com/developers/gitignore/api/node
# Created by https://www.toptal.com/developers/gitignore/api/macos
# Edit at https://www.toptal.com/developers/gitignore?templates=macos
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### macOS Patch ###
# iCloud generated files
*.icloud
# End of https://www.toptal.com/developers/gitignore/api/macos
# Created by https://www.toptal.com/developers/gitignore/api/windows
# Edit at https://www.toptal.com/developers/gitignore?templates=windows
### Windows ###
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.toptal.com/developers/gitignore/api/windows
# napi-rs generated files
built/
#Added by cargo
/target
Cargo.lock
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
*.node

View File

@ -0,0 +1,13 @@
target
Cargo.lock
.cargo
.github
npm
.eslintrc
.prettierignore
rustfmt.toml
yarn.lock
*.node
.yarn
__test__
renovate.json

View File

@ -0,0 +1,18 @@
[package]
edition = "2021"
name = "native-utils"
version = "0.0.0"
[lib]
crate-type = ["cdylib"]
[dependencies]
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
napi = { version = "2.12.0", default-features = false, features = ["napi4"] }
napi-derive = "2.12.0"
[build-dependencies]
napi-build = "2.0.1"
[profile.release]
lto = true

View File

@ -0,0 +1,7 @@
import test from 'ava'
import { sum } from '../index.js'
test('sum from native', (t) => {
t.is(sum(1, 2), 3)
})

View File

@ -0,0 +1,5 @@
extern crate napi_build;
fn main() {
napi_build::setup();
}

View File

@ -0,0 +1,3 @@
# `native-utils-android-arm-eabi`
This is the **armv7-linux-androideabi** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-android-arm-eabi",
"version": "0.0.0",
"os": [
"android"
],
"cpu": [
"arm"
],
"main": "native-utils.android-arm-eabi.node",
"files": [
"native-utils.android-arm-eabi.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-android-arm64`
This is the **aarch64-linux-android** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-android-arm64",
"version": "0.0.0",
"os": [
"android"
],
"cpu": [
"arm64"
],
"main": "native-utils.android-arm64.node",
"files": [
"native-utils.android-arm64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-darwin-arm64`
This is the **aarch64-apple-darwin** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-darwin-arm64",
"version": "0.0.0",
"os": [
"darwin"
],
"cpu": [
"arm64"
],
"main": "native-utils.darwin-arm64.node",
"files": [
"native-utils.darwin-arm64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-darwin-universal`
This is the **universal-apple-darwin** binary for `native-utils`

View File

@ -0,0 +1,15 @@
{
"name": "native-utils-darwin-universal",
"version": "0.0.0",
"os": [
"darwin"
],
"main": "native-utils.darwin-universal.node",
"files": [
"native-utils.darwin-universal.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-darwin-x64`
This is the **x86_64-apple-darwin** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-darwin-x64",
"version": "0.0.0",
"os": [
"darwin"
],
"cpu": [
"x64"
],
"main": "native-utils.darwin-x64.node",
"files": [
"native-utils.darwin-x64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-freebsd-x64`
This is the **x86_64-unknown-freebsd** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-freebsd-x64",
"version": "0.0.0",
"os": [
"freebsd"
],
"cpu": [
"x64"
],
"main": "native-utils.freebsd-x64.node",
"files": [
"native-utils.freebsd-x64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-linux-arm-gnueabihf`
This is the **armv7-unknown-linux-gnueabihf** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-linux-arm-gnueabihf",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm"
],
"main": "native-utils.linux-arm-gnueabihf.node",
"files": [
"native-utils.linux-arm-gnueabihf.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-linux-arm64-gnu`
This is the **aarch64-unknown-linux-gnu** binary for `native-utils`

View File

@ -0,0 +1,21 @@
{
"name": "native-utils-linux-arm64-gnu",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"main": "native-utils.linux-arm64-gnu.node",
"files": [
"native-utils.linux-arm64-gnu.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"glibc"
]
}

View File

@ -0,0 +1,3 @@
# `native-utils-linux-arm64-musl`
This is the **aarch64-unknown-linux-musl** binary for `native-utils`

View File

@ -0,0 +1,21 @@
{
"name": "native-utils-linux-arm64-musl",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"main": "native-utils.linux-arm64-musl.node",
"files": [
"native-utils.linux-arm64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"musl"
]
}

View File

@ -0,0 +1,3 @@
# `native-utils-linux-x64-gnu`
This is the **x86_64-unknown-linux-gnu** binary for `native-utils`

View File

@ -0,0 +1,21 @@
{
"name": "native-utils-linux-x64-gnu",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"main": "native-utils.linux-x64-gnu.node",
"files": [
"native-utils.linux-x64-gnu.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"glibc"
]
}

View File

@ -0,0 +1,3 @@
# `native-utils-linux-x64-musl`
This is the **x86_64-unknown-linux-musl** binary for `native-utils`

View File

@ -0,0 +1,21 @@
{
"name": "native-utils-linux-x64-musl",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"main": "native-utils.linux-x64-musl.node",
"files": [
"native-utils.linux-x64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"musl"
]
}

View File

@ -0,0 +1,3 @@
# `native-utils-win32-arm64-msvc`
This is the **aarch64-pc-windows-msvc** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-win32-arm64-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"arm64"
],
"main": "native-utils.win32-arm64-msvc.node",
"files": [
"native-utils.win32-arm64-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-win32-ia32-msvc`
This is the **i686-pc-windows-msvc** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-win32-ia32-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"ia32"
],
"main": "native-utils.win32-ia32-msvc.node",
"files": [
"native-utils.win32-ia32-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,3 @@
# `native-utils-win32-x64-msvc`
This is the **x86_64-pc-windows-msvc** binary for `native-utils`

View File

@ -0,0 +1,18 @@
{
"name": "native-utils-win32-x64-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"x64"
],
"main": "native-utils.win32-x64-msvc.node",
"files": [
"native-utils.win32-x64-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -0,0 +1,44 @@
{
"name": "native-utils",
"version": "0.0.0",
"main": "built/index.js",
"types": "built/index.d.ts",
"napi": {
"name": "native-utils",
"triples": {
"additional": [
"aarch64-apple-darwin",
"aarch64-linux-android",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"aarch64-pc-windows-msvc",
"armv7-unknown-linux-gnueabihf",
"x86_64-unknown-linux-musl",
"x86_64-unknown-freebsd",
"i686-pc-windows-msvc",
"armv7-linux-androideabi",
"universal-apple-darwin"
]
}
},
"license": "MIT",
"devDependencies": {
"@napi-rs/cli": "^2.15.0",
"ava": "^5.1.1"
},
"ava": {
"timeout": "3m"
},
"engines": {
"node": ">= 10"
},
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform --release ./built/",
"build:debug": "napi build --platform",
"prepublishOnly": "napi prepublish -t npm",
"test": "ava",
"universal": "napi universal",
"version": "napi version"
}
}

View File

@ -0,0 +1,2 @@
tab_spaces = 2
edition = "2021"

View File

@ -0,0 +1,2 @@
pub mod mastodon_api;

View File

@ -0,0 +1,70 @@
use napi::{bindgen_prelude::*, Error, Status};
use napi_derive::napi;
static CHAR_COLLECTION: &str = "0123456789abcdefghijklmnopqrstuvwxyz";
// -- NAPI exports --
#[napi]
pub enum IdConvertType {
MastodonId,
CalckeyId,
}
#[napi]
pub fn convert_id(in_id: String, id_convert_type: IdConvertType) -> napi::Result<String> {
use IdConvertType::*;
match id_convert_type {
MastodonId => {
let mut out: i64 = 0;
for (i, c) in in_id.to_lowercase().chars().rev().enumerate() {
out += num_from_char(c)? as i64 * 36_i64.pow(i as u32);
}
Ok(out.to_string())
}
CalckeyId => {
let mut input: i64 = match in_id.parse() {
Ok(s) => s,
Err(_) => {
return Err(Error::new(
Status::InvalidArg,
"Unable to parse ID as MasstodonId",
))
}
};
let mut out = String::new();
while input != 0 {
out.insert(0, char_from_num((input % 36) as u8)?);
input /= 36;
}
Ok(out)
}
}
}
// -- end --
#[inline(always)]
fn num_from_char(character: char) -> napi::Result<u8> {
for (i, c) in CHAR_COLLECTION.chars().enumerate() {
if c == character {
return Ok(i as u8);
}
}
Err(Error::new(
Status::InvalidArg,
"Invalid character in parsed base36 id",
))
}
#[inline(always)]
fn char_from_num(number: u8) -> napi::Result<char> {
CHAR_COLLECTION
.chars()
.nth(number as usize)
.ok_or(Error::from_status(Status::Unknown))
}

View File

@ -9,7 +9,7 @@
"migrate": "typeorm migration:run -d ormconfig.js",
"revertmigration": "typeorm migration:revert -d ormconfig.js",
"check:connect": "node ./check_connect.js",
"build": "pnpm swc src -d built -D",
"build": "napi build --platform --release --cargo-cwd native-utils ./native-utils/built/ && pnpm swc src -d built -D",
"watch": "pnpm swc src -d built -D -w",
"lint": "pnpm rome check \"src/**/*.ts\"",
"mocha": "cross-env NODE_ENV=test TS_NODE_FILES=true TS_NODE_TRANSPILE_ONLY=true TS_NODE_PROJECT=\"./test/tsconfig.json\" mocha",
@ -26,6 +26,7 @@
"@bull-board/api": "^4.6.4",
"@bull-board/koa": "^4.6.4",
"@bull-board/ui": "^4.6.4",
"@calckey/megalodon": "5.1.23",
"@discordapp/twemoji": "14.0.2",
"@elastic/elasticsearch": "7.17.0",
"@koa/cors": "3.4.3",
@ -38,12 +39,12 @@
"@tensorflow/tfjs": "^4.2.0",
"ajv": "8.11.2",
"archiver": "5.3.1",
"koa-body": "^6.0.1",
"argon2": "^0.30.3",
"autobind-decorator": "2.4.0",
"autolinker": "4.0.0",
"axios": "^1.3.2",
"autwh": "0.1.0",
"aws-sdk": "2.1277.0",
"axios": "^1.3.2",
"bcryptjs": "2.4.3",
"blurhash": "1.1.5",
"bull": "4.10.2",
@ -72,19 +73,20 @@
"jsonld": "6.0.0",
"jsrsasign": "10.6.1",
"koa": "2.13.4",
"koa-remove-trailing-slashes": "2.0.3",
"koa-body": "^6.0.1",
"koa-bodyparser": "4.3.0",
"koa-favicon": "2.1.0",
"koa-json-body": "5.3.0",
"koa-logger": "3.2.1",
"koa-mount": "4.0.0",
"koa-remove-trailing-slashes": "2.0.3",
"koa-send": "5.0.1",
"koa-slow": "2.1.0",
"koa-views": "7.0.2",
"@calckey/megalodon": "5.1.22",
"mfm-js": "0.23.2",
"mime-types": "2.1.35",
"multer": "1.4.4-lts.1",
"native-utils": "link:native-utils",
"nested-property": "4.0.0",
"node-fetch": "3.3.0",
"nodemailer": "6.8.0",

View File

@ -1 +1 @@
declare module 'koa-remove-trailing-slashes';
declare module "koa-remove-trailing-slashes";

View File

@ -9,6 +9,7 @@ import { envOption } from "../env.js";
import "reflect-metadata";
import { masterMain } from "./master.js";
import { workerMain } from "./worker.js";
import os from "node:os";
const logger = new Logger("core", "cyan");
const clusterLogger = logger.createSubLogger("cluster", "orange", false);
@ -31,6 +32,16 @@ export default async function () {
await workerMain();
}
if (cluster.isPrimary) {
// Leave the master process with a marginally lower priority but not too low.
os.setPriority(2);
}
if (cluster.isWorker) {
// Set workers to a much lower priority so that the master process will be
// able to respond to api calls even if the workers gank everything.
os.setPriority(10);
}
// For when Calckey is started in a child process during unit testing.
// Otherwise, process.send cannot be used, so start it.
if (process.send) {

View File

@ -7,32 +7,26 @@ const logger = dbLogger.createSubLogger("sonic", "gray", false);
logger.info("Connecting to Sonic");
const handlers = (type: string): SonicChannel.Handlers => (
{
connected: () => {
logger.succ(`Connected to Sonic ${type}`);
},
disconnected: (error) => {
logger.warn(`Disconnected from Sonic ${type}, error: ${error}`);
},
error: (error) => {
logger.warn(`Sonic ${type} error: ${error}`);
},
retrying: () => {
logger.info(`Sonic ${type} retrying`);
},
timeout: () => {
logger.warn(`Sonic ${type} timeout`);
},
}
)
const handlers = (type: string): SonicChannel.Handlers => ({
connected: () => {
logger.succ(`Connected to Sonic ${type}`);
},
disconnected: (error) => {
logger.warn(`Disconnected from Sonic ${type}, error: ${error}`);
},
error: (error) => {
logger.warn(`Sonic ${type} error: ${error}`);
},
retrying: () => {
logger.info(`Sonic ${type} retrying`);
},
timeout: () => {
logger.warn(`Sonic ${type} timeout`);
},
});
const hasConfig =
config.sonic
&& ( config.sonic.host
|| config.sonic.port
|| config.sonic.auth
)
config.sonic && (config.sonic.host || config.sonic.port || config.sonic.auth);
const host = hasConfig ? config.sonic.host ?? "localhost" : "";
const port = hasConfig ? config.sonic.port ?? 1491 : 0;
@ -42,10 +36,14 @@ const bucket = hasConfig ? config.sonic.bucket ?? "default" : "";
export default hasConfig
? {
search: new SonicChannel.Search({host, port, auth}).connect(handlers("search")),
ingest: new SonicChannel.Ingest({host, port, auth}).connect(handlers("ingest")),
search: new SonicChannel.Search({ host, port, auth }).connect(
handlers("search"),
),
ingest: new SonicChannel.Ingest({ host, port, auth }).connect(
handlers("ingest"),
),
collection,
bucket,
}
}
: null;

View File

@ -0,0 +1,20 @@
import bcrypt from "bcryptjs";
import * as argon2 from "argon2";
export async function hashPassword(password: string): Promise<string> {
return argon2.hash(password);
}
export async function comparePassword(
password: string,
hash: string,
): Promise<boolean> {
if (isOldAlgorithm(hash)) return bcrypt.compare(password, hash);
return argon2.verify(hash, password);
}
export function isOldAlgorithm(hash: string): boolean {
// bcrypt hashes start with $2[ab]$
return hash.startsWith("$2");
}

View File

@ -0,0 +1,19 @@
export type Post = {
text: string | null;
cw: string | null;
localOnly: boolean;
createdAt: Date;
};
export function parse(acct: any): Post {
return {
text: acct.text,
cw: acct.cw,
localOnly: acct.localOnly,
createdAt: new Date(acct.createdAt),
};
}
export function toJson(acct: Post): string {
return { text: acct.text, cw: acct.cw, localOnly: acct.localOnly }.toString();
}

View File

@ -265,12 +265,22 @@ export const NoteRepository = db.getRepository(Note).extend({
if (packed.user.isCat && packed.text) {
const tokens = packed.text ? mfm.parse(packed.text) : [];
mfm.inspect(tokens, (node) => {
if (node.type === "text") {
// TODO: quoteなtextはskip
function nyaizeNode(node: mfm.MfmNode) {
if (node.type === "quote")
return;
if (node.type === "text")
node.props.text = nyaize(node.props.text);
if (node.children) {
for (const child of node.children) {
nyaizeNode(child);
}
}
});
}
for (const node of tokens)
nyaizeNode(node);
packed.text = mfm.toString(tokens);
}

View File

@ -314,6 +314,25 @@ export function createImportFollowingJob(
);
}
export function createImportPostsJob(
user: ThinUser,
fileId: DriveFile["id"],
signatureCheck: boolean,
) {
return dbQueue.add(
"importPosts",
{
user: user,
fileId: fileId,
signatureCheck: signatureCheck,
},
{
removeOnComplete: true,
removeOnFail: true,
},
);
}
export function createImportMutingJob(user: ThinUser, fileId: DriveFile["id"]) {
return dbQueue.add(
"importMuting",
@ -421,14 +440,10 @@ export function createCleanRemoteFilesJob() {
}
export function createIndexAllNotesJob(data = {}) {
return backgroundQueue.add(
"indexAllNotes",
data,
{
removeOnComplete: true,
removeOnFail: true,
},
);
return backgroundQueue.add("indexAllNotes", data, {
removeOnComplete: true,
removeOnFail: true,
});
}
export function webhookDeliver(

View File

@ -3,26 +3,30 @@ import type Bull from "bull";
import { queueLogger } from "../../logger.js";
import { Notes } from "@/models/index.js";
import { MoreThan } from "typeorm";
import { index } from "@/services/note/create.js"
import { index } from "@/services/note/create.js";
import { Note } from "@/models/entities/note.js";
const logger = queueLogger.createSubLogger("index-all-notes");
export default async function indexAllNotes(
job: Bull.Job<Record<string, unknown>>,
done: ()=>void,
done: () => void,
): Promise<void> {
logger.info("Indexing all notes...");
let cursor: string|null = job.data.cursor as string ?? null;
let indexedCount: number = job.data.indexedCount as number ?? 0;
let total: number = job.data.total as number ?? 0;
let cursor: string | null = (job.data.cursor as string) ?? null;
let indexedCount: number = (job.data.indexedCount as number) ?? 0;
let total: number = (job.data.total as number) ?? 0;
let running = true;
const take = 50000;
const batch = 100;
while (running) {
logger.info(`Querying for ${take} notes ${indexedCount}/${total ? total : '?'} at ${cursor}`);
logger.info(
`Querying for ${take} notes ${indexedCount}/${
total ? total : "?"
} at ${cursor}`,
);
let notes: Note[] = [];
try {
@ -49,22 +53,21 @@ export default async function indexAllNotes(
try {
const count = await Notes.count();
total = count;
job.update({ indexedCount, cursor, total })
} catch (e) {
}
job.update({ indexedCount, cursor, total });
} catch (e) {}
for (let i = 0; i < notes.length; i += batch) {
const chunk = notes.slice(i, i + batch);
await Promise.all(chunk.map(note => index(note)));
await Promise.all(chunk.map((note) => index(note)));
indexedCount += chunk.length;
const pct = (indexedCount / total)*100;
job.update({ indexedCount, cursor, total })
job.progress(+(pct.toFixed(1)));
logger.info(`Indexed notes ${indexedCount}/${total ? total : '?'}`);
const pct = (indexedCount / total) * 100;
job.update({ indexedCount, cursor, total });
job.progress(+pct.toFixed(1));
logger.info(`Indexed notes ${indexedCount}/${total ? total : "?"}`);
}
cursor = notes[notes.length - 1].id;
job.update({ indexedCount, cursor, total })
job.update({ indexedCount, cursor, total });
if (notes.length < take) {
running = false;

View File

@ -3,10 +3,7 @@ import indexAllNotes from "./index-all-notes.js";
const jobs = {
indexAllNotes,
} as Record<
string,
Bull.ProcessCallbackFunction<Record<string, unknown>>
>;
} as Record<string, Bull.ProcessCallbackFunction<Record<string, unknown>>>;
export default function (q: Bull.Queue) {
for (const [k, v] of Object.entries(jobs)) {

View File

@ -0,0 +1,131 @@
import { IsNull } from "typeorm";
import follow from "@/services/following/create.js";
import * as Post from "@/misc/post.js";
import create from "@/services/note/create.js";
import { downloadTextFile } from "@/misc/download-text-file.js";
import { Users, DriveFiles } from "@/models/index.js";
import type { DbUserImportPostsJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js";
import type Bull from "bull";
import { htmlToMfm } from "@/remote/activitypub/misc/html-to-mfm.js";
const logger = queueLogger.createSubLogger("import-posts");
export async function importPosts(
job: Bull.Job<DbUserImportPostsJobData>,
done: any,
): Promise<void> {
logger.info(`Importing posts of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
const file = await DriveFiles.findOneBy({
id: job.data.fileId,
});
if (file == null) {
done();
return;
}
const json = await downloadTextFile(file.url);
let linenum = 0;
try {
const parsed = JSON.parse(json);
if (parsed instanceof Array) {
logger.info("Parsing key style posts");
for (const post of JSON.parse(json)) {
try {
linenum++;
if (post.replyId != null) {
continue;
}
if (post.renoteId != null) {
continue;
}
if (post.visibility !== "public") {
continue;
}
const { text, cw, localOnly, createdAt } = Post.parse(post);
logger.info(`Posting[${linenum}] ...`);
const note = await create(user, {
createdAt: createdAt,
files: undefined,
poll: undefined,
text: text || undefined,
reply: null,
renote: null,
cw: cw,
localOnly,
visibility: "public",
visibleUsers: [],
channel: null,
apMentions: new Array(0),
apHashtags: undefined,
apEmojis: undefined,
});
} catch (e) {
logger.warn(`Error in line:${linenum} ${e}`);
}
}
} else if (parsed instanceof Object) {
logger.info("Parsing animal style posts");
for (const post of parsed.orderedItems) {
try {
linenum++;
if (post.object.inReplyTo != null) {
continue;
}
if (post.directMessage) {
continue;
}
if (job.data.signatureCheck) {
if (!post.signature) {
continue;
}
}
let text;
try {
text = htmlToMfm(post.object.content, post.object.tag);
} catch (e) {
continue;
}
logger.info(`Posting[${linenum}] ...`);
const note = await create(user, {
createdAt: new Date(post.object.published),
files: undefined,
poll: undefined,
text: text || undefined,
reply: null,
renote: null,
cw: post.sensitive,
localOnly: false,
visibility: "public",
visibleUsers: [],
channel: null,
apMentions: new Array(0),
apHashtags: undefined,
apEmojis: undefined,
});
} catch (e) {
logger.warn(`Error in line:${linenum} ${e}`);
}
}
}
} catch (e) {
// handle error
logger.warn(`Error reading: ${e}`);
}
logger.succ("Imported");
done();
}

View File

@ -11,6 +11,7 @@ import { importFollowing } from "./import-following.js";
import { importUserLists } from "./import-user-lists.js";
import { deleteAccount } from "./delete-account.js";
import { importMuting } from "./import-muting.js";
import { importPosts } from "./import-posts.js";
import { importBlocking } from "./import-blocking.js";
import { importCustomEmojis } from "./import-custom-emojis.js";
@ -26,6 +27,7 @@ const jobs = {
importMuting,
importBlocking,
importUserLists,
importPosts,
importCustomEmojis,
deleteAccount,
} as Record<

View File

@ -20,7 +20,7 @@ export default async (job: Bull.Job<WebhookDeliverJobData>) => {
"X-Calckey-Host": config.host,
"X-Calckey-Hook-Id": job.data.webhookId,
"X-Calckey-Hook-Secret": job.data.secret,
'Content-Type': 'application/json'
"Content-Type": "application/json",
},
body: JSON.stringify({
hookId: job.data.webhookId,

View File

@ -21,6 +21,7 @@ export type InboxJobData = {
export type DbJobData =
| DbUserJobData
| DbUserImportPostsJobData
| DbUserImportJobData
| DbUserDeleteJobData;
@ -40,6 +41,12 @@ export type DbUserImportJobData = {
fileId: DriveFile["id"];
};
export type DbUserImportPostsJobData = {
user: ThinUser;
fileId: DriveFile["id"];
signatureCheck: boolean;
};
export type ObjectStorageJobData =
| ObjectStorageFileJobData
| Record<string, unknown>;

View File

@ -112,13 +112,13 @@ export async function createNote(
const note: IPost = object;
if (note.id && !note.id.startsWith("https://")) {
throw new Error(`unexpected shcema of note.id: ${note.id}`);
throw new Error(`unexpected schema of note.id: ${note.id}`);
}
const url = getOneApHrefNullable(note.url);
if (url && !url.startsWith("https://")) {
throw new Error(`unexpected shcema of note url: ${url}`);
throw new Error(`unexpected schema of note url: ${url}`);
}
logger.debug(`Note fetched: ${JSON.stringify(note, null, 2)}`);

View File

@ -205,7 +205,9 @@ export async function createPerson(
if (typeof person.followers === "string") {
try {
let data = await fetch(person.followers, { headers: { "Accept": "application/json" } });
let data = await fetch(person.followers, {
headers: { Accept: "application/json" },
});
let json_data = JSON.parse(await data.text());
followersCount = json_data.totalItems;
@ -218,7 +220,9 @@ export async function createPerson(
if (typeof person.following === "string") {
try {
let data = await fetch(person.following, { headers: { "Accept": "application/json" } });
let data = await fetch(person.following, {
headers: { Accept: "application/json" },
});
let json_data = JSON.parse(await data.text());
followingCount = json_data.totalItems;
@ -227,7 +231,6 @@ export async function createPerson(
}
}
// Create user
let user: IRemoteUser;
try {
@ -255,14 +258,20 @@ export async function createPerson(
followersUri: person.followers
? getApId(person.followers)
: undefined,
followersCount: followersCount !== undefined
? followersCount
: person.followers && typeof person.followers !== "string" && isCollectionOrOrderedCollection(person.followers)
followersCount:
followersCount !== undefined
? followersCount
: person.followers &&
typeof person.followers !== "string" &&
isCollectionOrOrderedCollection(person.followers)
? person.followers.totalItems
: undefined,
followingCount: followingCount !== undefined
? followingCount
: person.following && typeof person.following !== "string" && isCollectionOrOrderedCollection(person.following)
followingCount:
followingCount !== undefined
? followingCount
: person.following &&
typeof person.following !== "string" &&
isCollectionOrOrderedCollection(person.following)
? person.following.totalItems
: undefined,
featured: person.featured ? getApId(person.featured) : undefined,
@ -440,7 +449,9 @@ export async function updatePerson(
if (typeof person.followers === "string") {
try {
let data = await fetch(person.followers, { headers: { "Accept": "application/json" } } );
let data = await fetch(person.followers, {
headers: { Accept: "application/json" },
});
let json_data = JSON.parse(await data.text());
followersCount = json_data.totalItems;
@ -449,12 +460,13 @@ export async function updatePerson(
}
}
let followingCount: number | undefined;
if (typeof person.following === "string") {
try {
let data = await fetch(person.following, { headers: { "Accept": "application/json" } } );
let data = await fetch(person.following, {
headers: { Accept: "application/json" },
});
let json_data = JSON.parse(await data.text());
followingCount = json_data.totalItems;
@ -470,14 +482,20 @@ export async function updatePerson(
person.sharedInbox ||
(person.endpoints ? person.endpoints.sharedInbox : undefined),
followersUri: person.followers ? getApId(person.followers) : undefined,
followersCount: followersCount !== undefined
? followersCount
: person.followers && typeof person.followers !== "string" && isCollectionOrOrderedCollection(person.followers)
followersCount:
followersCount !== undefined
? followersCount
: person.followers &&
typeof person.followers !== "string" &&
isCollectionOrOrderedCollection(person.followers)
? person.followers.totalItems
: undefined,
followingCount: followingCount !== undefined
? followingCount
: person.following && typeof person.following !== "string" && isCollectionOrOrderedCollection(person.following)
followingCount:
followingCount !== undefined
? followingCount
: person.following &&
typeof person.following !== "string" &&
isCollectionOrOrderedCollection(person.following)
? person.following.totalItems
: undefined,
featured: person.featured,

View File

@ -4,6 +4,10 @@ import type { Note } from "@/models/entities/note.js";
export default (object: any, note: Note) => {
const attributedTo = `${config.url}/users/${note.userId}`;
const mentions = (
JSON.parse(note.mentionedRemoteUsers) as IMentionedRemoteUsers
).map((x) => x.uri);
let to: string[] = [];
let cc: string[] = [];
@ -13,6 +17,10 @@ export default (object: any, note: Note) => {
} else if (note.visibility === "home") {
to = [`${attributedTo}/followers`];
cc = ["https://www.w3.org/ns/activitystreams#Public"];
} else if (note.visibility === "followers") {
to = [`${attributedTo}/followers`];
} else if (note.visibility === "specified") {
to = mentions;
} else {
return null;
}

View File

@ -182,6 +182,7 @@ import * as ep___i_exportBlocking from "./endpoints/i/export-blocking.js";
import * as ep___i_exportFollowing from "./endpoints/i/export-following.js";
import * as ep___i_exportMute from "./endpoints/i/export-mute.js";
import * as ep___i_exportNotes from "./endpoints/i/export-notes.js";
import * as ep___i_importPosts from "./endpoints/i/import-posts.js";
import * as ep___i_exportUserLists from "./endpoints/i/export-user-lists.js";
import * as ep___i_favorites from "./endpoints/i/favorites.js";
import * as ep___i_gallery_likes from "./endpoints/i/gallery/likes.js";
@ -527,6 +528,7 @@ const eps = [
["i/export-following", ep___i_exportFollowing],
["i/export-mute", ep___i_exportMute],
["i/export-notes", ep___i_exportNotes],
["i/import-posts", ep___i_importPosts],
["i/export-user-lists", ep___i_exportUserLists],
["i/favorites", ep___i_favorites],
["i/gallery/likes", ep___i_gallery_likes],

View File

@ -54,7 +54,11 @@ export const paramDef = {
folderId: { type: "string", format: "misskey:id", nullable: true },
name: { type: "string" },
isSensitive: { type: "boolean" },
comment: { type: "string", nullable: true, maxLength: DB_MAX_IMAGE_COMMENT_LENGTH },
comment: {
type: "string",
nullable: true,
maxLength: DB_MAX_IMAGE_COMMENT_LENGTH,
},
},
required: ["fileId"],
} as const;

View File

@ -11,7 +11,8 @@ export const meta = {
res: {
type: "object",
optional: false, nullable: false,
optional: false,
nullable: false,
ref: "Emoji",
},
} as const;

View File

@ -0,0 +1,44 @@
import define from "../../define.js";
import { createImportPostsJob } from "@/queue/index.js";
import { ApiError } from "../../error.js";
import { DriveFiles } from "@/models/index.js";
import { DAY } from "@/const.js";
export const meta = {
secure: true,
requireCredential: true,
limit: {
duration: DAY,
max: 1,
},
errors: {
noSuchFile: {
message: "No such file.",
code: "NO_SUCH_FILE",
id: "e674141e-bd2a-ba85-e616-aefb187c9c2a",
},
emptyFile: {
message: "That file is empty.",
code: "EMPTY_FILE",
id: "d2f12af1-e7b4-feac-86a3-519548f2728e",
},
},
} as const;
export const paramDef = {
type: "object",
properties: {
fileId: { type: "string", format: "misskey:id" },
signatureCheck: { type: "boolean" },
},
required: ["fileId"],
} as const;
export default define(meta, paramDef, async (ps, user) => {
const file = await DriveFiles.findOneBy({ id: ps.fileId });
if (file == null) throw new ApiError(meta.errors.noSuchFile);
if (file.size === 0) throw new ApiError(meta.errors.emptyFile);
createImportPostsJob(user, file.id, ps.signatureCheck);
});

View File

@ -151,7 +151,7 @@ export default define(meta, paramDef, async (ps, user) => {
}
// テキストが無いかつ添付ファイルも無かったらエラー
if ((ps.text == null || ps.text.trim() === '') && file == null) {
if ((ps.text == null || ps.text.trim() === "") && file == null) {
throw new ApiError(meta.errors.contentRequired);
}

View File

@ -139,7 +139,7 @@ export default define(meta, paramDef, async (ps, me) => {
})
.map((key) => key.id);
ids.push(...res);
ids.push(...res);
}
// Sort all the results by note id DESC (newest first)
@ -160,7 +160,7 @@ export default define(meta, paramDef, async (ps, me) => {
});
// The notes are checked for visibility and muted/blocked users when packed
found.push(...await Notes.packMany(notes, me));
found.push(...(await Notes.packMany(notes, me)));
start += chunkSize;
}

View File

@ -7,7 +7,10 @@ import Router from "@koa/router";
import multer from "@koa/multer";
import bodyParser from "koa-bodyparser";
import cors from "@koa/cors";
import { apiMastodonCompatible, getClient } from "./mastodon/ApiMastodonCompatibleService.js";
import {
apiMastodonCompatible,
getClient,
} from "./mastodon/ApiMastodonCompatibleService.js";
import { Instances, AccessTokens, Users } from "@/models/index.js";
import config from "@/config/index.js";
import fs from "fs";
@ -21,35 +24,10 @@ import discord from "./service/discord.js";
import github from "./service/github.js";
import twitter from "./service/twitter.js";
import { koaBody } from "koa-body";
import { convertId, IdConvertType as IdType } from "native-utils";
export enum IdType {
CalckeyId,
MastodonId
};
export function convertId(idIn: string, idConvertTo: IdType ) {
let idArray = []
switch (idConvertTo) {
case IdType.MastodonId:
idArray = [...idIn].map(item => item.charCodeAt(0));
idArray = idArray.map(item => {
if (item.toString().length < 3) {
return `0${item.toString()}`
}
else return item.toString()
});
return idArray.join('');
case IdType.CalckeyId:
for (let i = 0; i < idIn.length; i += 3) {
if ((idIn.length % 3) !== 0) {
idIn = `0${idIn}`
}
idArray.push(idIn.slice(i, i+3));
}
idArray = idArray.map(item => String.fromCharCode(item));
return idArray.join('');
}
};
// re-export native rust id conversion (function and enum)
export { IdType, convertId };
// Init app
const app = new Koa();
@ -99,7 +77,6 @@ mastoRouter.use(
}),
);
mastoFileRouter.post("/v1/media", upload.single("file"), async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;

View File

@ -77,7 +77,10 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.search((ctx.request.query as any).acct, 'accounts');
const data = await client.search(
(ctx.request.query as any).acct,
"accounts",
);
let resp = data.data.accounts[0];
resp.id = convertId(resp.id, IdType.MastodonId);
ctx.body = resp;
@ -88,26 +91,23 @@ export function apiAccountMastodon(router: Router): void {
ctx.body = e.response.data;
}
});
router.get<{ Params: { id: string } }>(
"/v1/accounts/:id(^.*\\d.*$)",
async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const calcId = convertId(ctx.params.id, IdType.CalckeyId);
const data = await client.getAccount(calcId);
let resp = data.data;
resp.id = convertId(resp.id, IdType.MastodonId);
ctx.body = resp;
} catch (e: any) {
console.error(e);
console.error(e.response.data);
ctx.status = 401;
ctx.body = e.response.data;
}
},
);
router.get<{ Params: { id: string } }>("/v1/accounts/:id", async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const calcId = convertId(ctx.params.id, IdType.CalckeyId);
const data = await client.getAccount(calcId);
let resp = data.data;
resp.id = convertId(resp.id, IdType.MastodonId);
ctx.body = resp;
} catch (e: any) {
console.error(e);
console.error(e.response.data);
ctx.status = 401;
ctx.body = e.response.data;
}
});
router.get<{ Params: { id: string } }>(
"/v1/accounts/:id/statuses",
async (ctx) => {
@ -122,11 +122,19 @@ export function apiAccountMastodon(router: Router): void {
let resp = data.data;
for (let statIdx = 0; statIdx < resp.length; statIdx++) {
resp[statIdx].id = convertId(resp[statIdx].id, IdType.MastodonId);
resp[statIdx].in_reply_to_account_id = resp[statIdx].in_reply_to_account_id ? convertId(resp[statIdx].in_reply_to_account_id, IdType.MastodonId) : null;
resp[statIdx].in_reply_to_id = resp[statIdx].in_reply_to_id ? convertId(resp[statIdx].in_reply_to_id, IdType.MastodonId) : null;
let mentions = resp[statIdx].mentions
resp[statIdx].in_reply_to_account_id = resp[statIdx]
.in_reply_to_account_id
? convertId(resp[statIdx].in_reply_to_account_id, IdType.MastodonId)
: null;
resp[statIdx].in_reply_to_id = resp[statIdx].in_reply_to_id
? convertId(resp[statIdx].in_reply_to_id, IdType.MastodonId)
: null;
let mentions = resp[statIdx].mentions;
for (let mtnIdx = 0; mtnIdx < mentions.length; mtnIdx++) {
resp[statIdx].mentions[mtnIdx].id = convertId(mentions[mtnIdx].id, IdType.MastodonId);
resp[statIdx].mentions[mtnIdx].id = convertId(
mentions[mtnIdx].id,
IdType.MastodonId,
);
}
}
ctx.body = resp;
@ -210,7 +218,9 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.followAccount(convertId(ctx.params.id, IdType.CalckeyId));
const data = await client.followAccount(
convertId(ctx.params.id, IdType.CalckeyId),
);
let acct = data.data;
acct.following = true;
acct.id = convertId(acct.id, IdType.MastodonId);
@ -230,7 +240,9 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.unfollowAccount(convertId(ctx.params.id, IdType.CalckeyId));
const data = await client.unfollowAccount(
convertId(ctx.params.id, IdType.CalckeyId),
);
let acct = data.data;
acct.id = convertId(acct.id, IdType.MastodonId);
acct.following = false;
@ -250,7 +262,9 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.blockAccount(convertId(ctx.params.id, IdType.CalckeyId));
const data = await client.blockAccount(
convertId(ctx.params.id, IdType.CalckeyId),
);
let resp = data.data;
resp.id = convertId(resp.id, IdType.MastodonId);
ctx.body = resp;
@ -269,7 +283,9 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.unblockAccount(convertId(ctx.params.id, IdType.MastodonId));
const data = await client.unblockAccount(
convertId(ctx.params.id, IdType.MastodonId),
);
let resp = data.data;
resp.id = convertId(resp.id, IdType.MastodonId);
ctx.body = resp;
@ -310,7 +326,9 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.unmuteAccount(convertId(ctx.params.id, IdType.CalckeyId));
const data = await client.unmuteAccount(
convertId(ctx.params.id, IdType.CalckeyId),
);
let resp = data.data;
resp.id = convertId(resp.id, IdType.MastodonId);
ctx.body = resp;
@ -344,7 +362,7 @@ export function apiAccountMastodon(router: Router): void {
for (let i = 0; i < ids.length; i++) {
reqIds.push(convertId(ids[i], IdType.CalckeyId));
}
const data = await client.getRelationships(reqIds);
let resp = data.data;
for (let acctIdx = 0; acctIdx < resp.length; acctIdx++) {
@ -365,15 +383,25 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = (await client.getBookmarks(limitToInt(ctx.query as any))) as any;
const data = (await client.getBookmarks(
limitToInt(ctx.query as any),
)) as any;
let resp = data.data;
for (let statIdx = 0; statIdx < resp.length; statIdx++) {
resp[statIdx].id = convertId(resp[statIdx].id, IdType.MastodonId);
resp[statIdx].in_reply_to_account_id = resp[statIdx].in_reply_to_account_id ? convertId(resp[statIdx].in_reply_to_account_id, IdType.MastodonId) : null;
resp[statIdx].in_reply_to_id = resp[statIdx].in_reply_to_id ? convertId(resp[statIdx].in_reply_to_id, IdType.MastodonId) : null;
let mentions = resp[statIdx].mentions
resp[statIdx].in_reply_to_account_id = resp[statIdx]
.in_reply_to_account_id
? convertId(resp[statIdx].in_reply_to_account_id, IdType.MastodonId)
: null;
resp[statIdx].in_reply_to_id = resp[statIdx].in_reply_to_id
? convertId(resp[statIdx].in_reply_to_id, IdType.MastodonId)
: null;
let mentions = resp[statIdx].mentions;
for (let mtnIdx = 0; mtnIdx < mentions.length; mtnIdx++) {
resp[statIdx].mentions[mtnIdx].id = convertId(mentions[mtnIdx].id, IdType.MastodonId);
resp[statIdx].mentions[mtnIdx].id = convertId(
mentions[mtnIdx].id,
IdType.MastodonId,
);
}
}
ctx.body = resp;
@ -393,11 +421,19 @@ export function apiAccountMastodon(router: Router): void {
let resp = data.data;
for (let statIdx = 0; statIdx < resp.length; statIdx++) {
resp[statIdx].id = convertId(resp[statIdx].id, IdType.MastodonId);
resp[statIdx].in_reply_to_account_id = resp[statIdx].in_reply_to_account_id ? convertId(resp[statIdx].in_reply_to_account_id, IdType.MastodonId) : null;
resp[statIdx].in_reply_to_id = resp[statIdx].in_reply_to_id ? convertId(resp[statIdx].in_reply_to_id, IdType.MastodonId) : null;
let mentions = resp[statIdx].mentions
resp[statIdx].in_reply_to_account_id = resp[statIdx]
.in_reply_to_account_id
? convertId(resp[statIdx].in_reply_to_account_id, IdType.MastodonId)
: null;
resp[statIdx].in_reply_to_id = resp[statIdx].in_reply_to_id
? convertId(resp[statIdx].in_reply_to_id, IdType.MastodonId)
: null;
let mentions = resp[statIdx].mentions;
for (let mtnIdx = 0; mtnIdx < mentions.length; mtnIdx++) {
resp[statIdx].mentions[mtnIdx].id = convertId(mentions[mtnIdx].id, IdType.MastodonId);
resp[statIdx].mentions[mtnIdx].id = convertId(
mentions[mtnIdx].id,
IdType.MastodonId,
);
}
}
ctx.body = resp;
@ -471,7 +507,9 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.acceptFollowRequest(convertId(ctx.params.id, IdType.CalckeyId));
const data = await client.acceptFollowRequest(
convertId(ctx.params.id, IdType.CalckeyId),
);
let resp = data.data;
resp.id = convertId(resp.id, IdType.MastodonId);
ctx.body = resp;
@ -490,7 +528,9 @@ export function apiAccountMastodon(router: Router): void {
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.rejectFollowRequest(convertId(ctx.params.id, IdType.CalckeyId));
const data = await client.rejectFollowRequest(
convertId(ctx.params.id, IdType.CalckeyId),
);
let resp = data.data;
resp.id = convertId(resp.id, IdType.MastodonId);
ctx.body = resp;

View File

@ -44,7 +44,7 @@ const writeScope = [
export function apiAuthMastodon(router: Router): void {
router.post("/v1/apps", async (ctx) => {
const BASE_URL = `${ctx.request.protocol}://${ctx.request.hostname}`;
const client = getClient(BASE_URL, '');
const client = getClient(BASE_URL, "");
const body: any = ctx.request.body || ctx.request.query;
try {
let scope = body.scopes;
@ -68,9 +68,9 @@ export function apiAuthMastodon(router: Router): void {
website: body.website,
redirect_uri: red,
client_id: Buffer.from(appData.url || "").toString("base64"),
client_secret: appData.clientSecret
client_secret: appData.clientSecret,
};
console.log(returns)
console.log(returns);
ctx.body = returns;
} catch (e: any) {
console.error(e);

View File

@ -11,17 +11,20 @@ export async function getInstance(response: Entity.Instance) {
return {
uri: response.uri,
title: response.title || "Calckey",
short_description: response.description.substring(0, 50) || "See real server website",
description: response.description || "This is a vanilla Calckey Instance. It doesnt seem to have a description. BTW you are using the Mastodon api to access this server :)",
short_description:
response.description.substring(0, 50) || "See real server website",
description:
response.description ||
"This is a vanilla Calckey Instance. It doesnt seem to have a description. BTW you are using the Mastodon api to access this server :)",
email: response.email || "",
version: "3.0.0 compatible (3.5+ Calckey)", //I hope this version string is correct, we will need to test it.
urls: response.urls,
stats: {
user_count: (await totalUsers),
status_count: (await totalStatuses),
domain_count: response.stats.domain_count
user_count: await totalUsers,
status_count: await totalStatuses,
domain_count: response.stats.domain_count,
},
thumbnail: response.thumbnail || 'https://http.cat/404',
thumbnail: response.thumbnail || "https://http.cat/404",
languages: meta.langs,
registrations: !meta.disableRegistration || response.registrations,
approval_required: !response.registrations,

View File

@ -44,7 +44,7 @@ export function apiSearchMastodon(router: Router): void {
}
} catch (e: any) {
console.error(e);
ctx.status = (401);
ctx.status = 401;
ctx.body = e.response.data;
}
});
@ -52,11 +52,15 @@ export function apiSearchMastodon(router: Router): void {
const BASE_URL = `${ctx.request.protocol}://${ctx.request.hostname}`;
const accessTokens = ctx.headers.authorization;
try {
const data = await getHighlight(BASE_URL, ctx.request.hostname, accessTokens);
const data = await getHighlight(
BASE_URL,
ctx.request.hostname,
accessTokens,
);
ctx.body = data;
} catch (e: any) {
console.error(e);
ctx.status = (401);
ctx.status = 401;
ctx.body = e.response.data;
}
});
@ -75,7 +79,7 @@ export function apiSearchMastodon(router: Router): void {
ctx.body = data;
} catch (e: any) {
console.error(e);
ctx.status = (401);
ctx.status = 401;
ctx.body = e.response.data;
}
});

View File

@ -2,13 +2,13 @@ import Router from "@koa/router";
import { getClient } from "../ApiMastodonCompatibleService.js";
import { emojiRegexAtStartToEnd } from "@/misc/emoji-regex.js";
import axios from "axios";
import querystring from 'node:querystring'
import qs from 'qs'
import querystring from "node:querystring";
import qs from "qs";
import { limitToInt } from "./timeline.js";
function normalizeQuery(data: any) {
const str = querystring.stringify(data);
return qs.parse(str);
const str = querystring.stringify(data);
return qs.parse(str);
}
export function apiStatusMastodon(router: Router): void {
@ -18,11 +18,14 @@ export function apiStatusMastodon(router: Router): void {
const client = getClient(BASE_URL, accessTokens);
try {
let body: any = ctx.request.body;
if ((!body.poll && body['poll[options][]']) || (!body.media_ids && body['media_ids[]'])) {
body = normalizeQuery(body)
if (
(!body.poll && body["poll[options][]"]) ||
(!body.media_ids && body["media_ids[]"])
) {
body = normalizeQuery(body);
}
const text = body.status;
const removed = text.replace(/@\S+/g, "").replace(/\s|/g, '')
const removed = text.replace(/@\S+/g, "").replace(/\s|/g, "");
const isDefaultEmoji = emojiRegexAtStartToEnd.test(removed);
const isCustomEmoji = /^:[a-zA-Z0-9@_]+:$/.test(removed);
if ((body.in_reply_to_id && isDefaultEmoji) || isCustomEmoji) {
@ -46,9 +49,10 @@ export function apiStatusMastodon(router: Router): void {
}
}
if (!body.media_ids) body.media_ids = undefined;
if (body.media_ids && !body.media_ids.length) body.media_ids = undefined;
const { sensitive } = body
body.sensitive = typeof sensitive === 'string' ? sensitive === 'true' : sensitive
if (body.media_ids && !body.media_ids.length) body.media_ids = undefined;
const { sensitive } = body;
body.sensitive =
typeof sensitive === "string" ? sensitive === "true" : sensitive;
const data = await client.postStatus(text, body);
ctx.body = data.data;
} catch (e: any) {
@ -57,38 +61,32 @@ export function apiStatusMastodon(router: Router): void {
ctx.body = e.response.data;
}
});
router.get<{ Params: { id: string } }>(
"/v1/statuses/:id",
async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.getStatus(ctx.params.id);
ctx.body = data.data;
} catch (e: any) {
console.error(e);
ctx.status = 401;
ctx.body = e.response.data;
}
},
);
router.delete<{ Params: { id: string } }>(
"/v1/statuses/:id",
async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.deleteStatus(ctx.params.id);
ctx.body = data.data;
} catch (e: any) {
console.error(e.response.data, request.params.id);
ctx.status = 401;
ctx.body = e.response.data;
}
},
);
router.get<{ Params: { id: string } }>("/v1/statuses/:id", async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.getStatus(ctx.params.id);
ctx.body = data.data;
} catch (e: any) {
console.error(e);
ctx.status = 401;
ctx.body = e.response.data;
}
});
router.delete<{ Params: { id: string } }>("/v1/statuses/:id", async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.deleteStatus(ctx.params.id);
ctx.body = data.data;
} catch (e: any) {
console.error(e.response.data, request.params.id);
ctx.status = 401;
ctx.body = e.response.data;
}
});
interface IReaction {
id: string;
createdAt: string;
@ -103,12 +101,15 @@ export function apiStatusMastodon(router: Router): void {
const client = getClient(BASE_URL, accessTokens);
try {
const id = ctx.params.id;
const data = await client.getStatusContext(id, limitToInt(ctx.query as any));
const data = await client.getStatusContext(
id,
limitToInt(ctx.query as any),
);
const status = await client.getStatus(id);
let reqInstance = axios.create({
headers: {
Authorization : ctx.headers.authorization
}
Authorization: ctx.headers.authorization,
},
});
const reactionsAxios = await reqInstance.get(
`${BASE_URL}/api/notes/reactions?noteId=${id}`,
@ -296,57 +297,48 @@ export function apiStatusMastodon(router: Router): void {
}
},
);
router.get<{ Params: { id: string } }>(
"/v1/media/:id",
async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.getMedia(ctx.params.id);
ctx.body = data.data;
} catch (e: any) {
console.error(e);
ctx.status = 401;
ctx.body = e.response.data;
}
},
);
router.put<{ Params: { id: string } }>(
"/v1/media/:id",
async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.updateMedia(
ctx.params.id,
ctx.request.body as any,
);
ctx.body = data.data;
} catch (e: any) {
console.error(e);
ctx.status = 401;
ctx.body = e.response.data;
}
},
);
router.get<{ Params: { id: string } }>(
"/v1/polls/:id",
async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.getPoll(ctx.params.id);
ctx.body = data.data;
} catch (e: any) {
console.error(e);
ctx.status = 401;
ctx.body = e.response.data;
}
},
);
router.get<{ Params: { id: string } }>("/v1/media/:id", async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.getMedia(ctx.params.id);
ctx.body = data.data;
} catch (e: any) {
console.error(e);
ctx.status = 401;
ctx.body = e.response.data;
}
});
router.put<{ Params: { id: string } }>("/v1/media/:id", async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.updateMedia(
ctx.params.id,
ctx.request.body as any,
);
ctx.body = data.data;
} catch (e: any) {
console.error(e);
ctx.status = 401;
ctx.body = e.response.data;
}
});
router.get<{ Params: { id: string } }>("/v1/polls/:id", async (ctx) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.getPoll(ctx.params.id);
ctx.body = data.data;
} catch (e: any) {
console.error(e);
ctx.status = 401;
ctx.body = e.response.data;
}
});
router.post<{ Params: { id: string } }>(
"/v1/polls/:id/votes",
async (ctx) => {

View File

@ -16,7 +16,8 @@ export function limitToInt(q: ParsedUrlQuery) {
export function argsToBools(q: ParsedUrlQuery) {
// Values taken from https://docs.joinmastodon.org/client/intro/#boolean
const toBoolean = (value: string) => !['0', 'f', 'F', 'false', 'FALSE', 'off', 'OFF'].includes(value);
const toBoolean = (value: string) =>
!["0", "f", "F", "false", "FALSE", "off", "OFF"].includes(value);
let object: any = q;
if (q.only_media)
@ -35,25 +36,26 @@ export function toTextWithReaction(status: Entity.Status[], host: string) {
if (!t.emoji_reactions) return t;
if (t.reblog) t.reblog = toTextWithReaction([t.reblog], host)[0];
const reactions = t.emoji_reactions.map((r) => {
const emojiNotation = r.url ? `:${r.name.replace('@.', '')}:` : r.name
return `${emojiNotation} (${r.count}${r.me ? `* ` : ''})`
const emojiNotation = r.url ? `:${r.name.replace("@.", "")}:` : r.name;
return `${emojiNotation} (${r.count}${r.me ? `* ` : ""})`;
});
const reaction = t.emoji_reactions as Entity.Reaction[];
const emoji = t.emojis || []
const emoji = t.emojis || [];
for (const r of reaction) {
if (!r.url) continue
emoji.push({
'shortcode': r.name,
'url': r.url,
'static_url': r.url,
'visible_in_picker': true,
},)
if (!r.url) continue;
emoji.push({
shortcode: r.name,
url: r.url,
static_url: r.url,
visible_in_picker: true,
category: "",
});
}
const isMe = reaction.findIndex((r) => r.me) > -1;
const total = reaction.reduce((sum, reaction) => sum + reaction.count, 0);
t.favourited = isMe;
t.favourites_count = total;
t.emojis = emoji
t.emojis = emoji;
t.content = `<p>${autoLinker(t.content, host)}</p><p>${reactions.join(
", ",
)}</p>`;
@ -125,23 +127,20 @@ export function apiTimelineMastodon(router: Router): void {
}
},
);
router.get(
"/v1/timelines/home",
async (ctx, reply) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.getHomeTimeline(limitToInt(ctx.query));
ctx.body = toTextWithReaction(data.data, ctx.hostname);
} catch (e: any) {
console.error(e);
console.error(e.response.data);
ctx.status = 401;
ctx.body = e.response.data;
}
},
);
router.get("/v1/timelines/home", async (ctx, reply) => {
const BASE_URL = `${ctx.protocol}://${ctx.hostname}`;
const accessTokens = ctx.headers.authorization;
const client = getClient(BASE_URL, accessTokens);
try {
const data = await client.getHomeTimeline(limitToInt(ctx.query));
ctx.body = toTextWithReaction(data.data, ctx.hostname);
} catch (e: any) {
console.error(e);
console.error(e.response.data);
ctx.status = 401;
ctx.body = e.response.data;
}
});
router.get<{ Params: { listId: string } }>(
"/v1/timelines/list/:listId",
async (ctx, reply) => {

View File

@ -12,6 +12,11 @@ import {
} from "@/models/index.js";
import type { ILocalUser } from "@/models/entities/user.js";
import { genId } from "@/misc/gen-id.js";
import {
comparePassword,
hashPassword,
isOldAlgorithm,
} from "@/misc/password.js";
import { verifyLogin, hash } from "../2fa.js";
import { randomBytes } from "node:crypto";
import { IsNull } from "typeorm";
@ -88,7 +93,12 @@ export default async (ctx: Koa.Context) => {
const profile = await UserProfiles.findOneByOrFail({ userId: user.id });
// Compare password
const same = await bcrypt.compare(password, profile.password!);
const same = await comparePassword(password, profile.password!);
if (same && isOldAlgorithm(profile.password!)) {
profile.password = await hashPassword(password);
await UserProfiles.save(profile);
}
async function fail(status?: number, failure?: { id: string }) {
// Append signin history

View File

@ -10,7 +10,7 @@ export default class extends Channel {
public static shouldShare = false;
public static requireCredential = false;
private channelId: string;
private typers: Record<User["id"], Date> = {};
private typers: Map<User["id"], Date> = new Map();
private emitTypersIntervalId: ReturnType<typeof setInterval>;
constructor(id: string, connection: Channel["connection"]) {
@ -44,8 +44,8 @@ export default class extends Channel {
private onEvent(data: StreamMessages["channel"]["payload"]) {
if (data.type === "typing") {
const id = data.body;
const begin = this.typers[id] == null;
this.typers[id] = new Date();
const begin = !this.typers.has(id);
this.typers.set(id, new Date());
if (begin) {
this.emitTypers();
}
@ -58,10 +58,11 @@ export default class extends Channel {
// Remove not typing users
for (const [userId, date] of Object.entries(this.typers)) {
if (now.getTime() - date.getTime() > 5000)
this.typers[userId] = undefined;
this.typers.delete(userId);
}
const users = await Users.packMany(Object.keys(this.typers), null, {
const userIds = Array.from(this.typers.keys());
const users = await Users.packMany(userIds, null, {
detail: false,
});

View File

@ -20,7 +20,7 @@ export default class extends Channel {
private subCh:
| `messagingStream:${User["id"]}-${User["id"]}`
| `messagingStream:${UserGroup["id"]}`;
private typers: Record<User["id"], Date> = {};
private typers: Map<User["id"], Date> = new Map();
private emitTypersIntervalId: ReturnType<typeof setInterval>;
constructor(id: string, connection: Channel["connection"]) {
@ -66,8 +66,8 @@ export default class extends Channel {
) {
if (data.type === "typing") {
const id = data.body;
const begin = this.typers[id] == null;
this.typers[id] = new Date();
const begin = !this.typers.has(id);
this.typers.set(id, new Date());
if (begin) {
this.emitTypers();
}
@ -107,12 +107,13 @@ export default class extends Channel {
const now = new Date();
// Remove not typing users
for (const [userId, date] of Object.entries(this.typers)) {
for (const [userId, date] of this.typers.entries()) {
if (now.getTime() - date.getTime() > 5000)
this.typers[userId] = undefined;
this.typers.delete(userId);
}
const users = await Users.packMany(Object.keys(this.typers), null, {
const userIds = Array.from(this.typers.keys());
const users = await Users.packMany(userIds, null, {
detail: false,
});

View File

@ -42,7 +42,7 @@ export default class Connection {
private wsConnection: websocket.connection;
public subscriber: StreamEventEmitter;
private channels: Channel[] = [];
private subscribingNotes: any = {};
private subscribingNotes: Map<string, number> = new Map();
private cachedNotes: Packed<"Note">[] = [];
private isMastodonCompatible: boolean = false;
private host: string;
@ -339,13 +339,10 @@ export default class Connection {
private onSubscribeNote(payload: any) {
if (!payload.id) return;
if (this.subscribingNotes[payload.id] == null) {
this.subscribingNotes[payload.id] = 0;
}
const current = this.subscribingNotes.get(payload.id) || 0;
this.subscribingNotes.set(payload.id, current + 1);
this.subscribingNotes[payload.id]++;
if (this.subscribingNotes[payload.id] === 1) {
if (!current) {
this.subscriber.on(`noteStream:${payload.id}`, this.onNoteStreamMessage);
}
}
@ -356,11 +353,13 @@ export default class Connection {
private onUnsubscribeNote(payload: any) {
if (!payload.id) return;
this.subscribingNotes[payload.id]--;
if (this.subscribingNotes[payload.id] <= 0) {
this.subscribingNotes[payload.id] = undefined;
const current = this.subscribingNotes.get(payload.id) || 0;
if (current <= 1) {
this.subscribingNotes.delete(payload.id);
this.subscriber.off(`noteStream:${payload.id}`, this.onNoteStreamMessage);
return;
}
this.subscribingNotes.set(payload.id, current - 1);
}
private async onNoteStreamMessage(data: StreamMessages["note"]["payload"]) {
@ -414,7 +413,7 @@ export default class Connection {
const client = getClient(this.host, this.accessToken);
client.getStatus(payload.id).then((data) => {
const newPost = toTextWithReaction([data.data], this.host);
const targetPost = newPost[0]
const targetPost = newPost[0];
for (const stream of this.currentSubscribe) {
this.wsConnection.send(
JSON.stringify({

View File

@ -31,7 +31,7 @@ import webServer from "./web/index.js";
import { initializeStreamingServer } from "./api/streaming.js";
import { koaBody } from "koa-body";
import removeTrailingSlash from "koa-remove-trailing-slashes";
import {v4 as uuid} from "uuid";
import { v4 as uuid } from "uuid";
export const serverLogger = new Logger("server", "gray", false);
@ -162,19 +162,19 @@ mastoRouter.get("/oauth/authorize", async (ctx) => {
const { client_id, state, redirect_uri } = ctx.request.query;
console.log(ctx.request.req);
let param = "mastodon=true";
if (state)
param += `&state=${state}`;
if (redirect_uri)
param += `&redirect_uri=${redirect_uri}`;
const client = client_id? client_id : "";
ctx.redirect(`${Buffer.from(client.toString(), 'base64').toString()}?${param}`);
if (state) param += `&state=${state}`;
if (redirect_uri) param += `&redirect_uri=${redirect_uri}`;
const client = client_id ? client_id : "";
ctx.redirect(
`${Buffer.from(client.toString(), "base64").toString()}?${param}`,
);
});
mastoRouter.post("/oauth/token", async (ctx) => {
const body: any = ctx.request.body || ctx.request.query;
console.log('token-request', body);
console.log('token-query', ctx.request.query);
if (body.grant_type === 'client_credentials') {
console.log("token-request", body);
console.log("token-query", ctx.request.query);
if (body.grant_type === "client_credentials") {
const ret = {
access_token: uuid(),
token_type: "Bearer",
@ -197,8 +197,8 @@ mastoRouter.post("/oauth/token", async (ctx) => {
// return;
//}
//token = `${m[1]}-${m[2]}-${m[3]}-${m[4]}-${m[5]}`
console.log(body.code, token)
token = body.code
console.log(body.code, token);
token = body.code;
}
if (client_id instanceof Array) {
client_id = client_id.toString();
@ -214,10 +214,10 @@ mastoRouter.post("/oauth/token", async (ctx) => {
const ret = {
access_token: atData.accessToken,
token_type: "Bearer",
scope: body.scope || 'read write follow push',
scope: body.scope || "read write follow push",
created_at: Math.floor(new Date().getTime() / 1000),
};
console.log('token-response', ret)
console.log("token-response", ret);
ctx.body = ret;
} catch (err: any) {
console.error(err);

View File

@ -2,7 +2,7 @@ extends ./base
block vars
- const user = note.user;
- const title = privateMode ? instanceName : (user.name ? `${user.name} (@${user.username})` : `@${user.username}`);
- const title = privateMode ? instanceName : (user.name ? `${user.name} (@${user.username}${user.host ? `@${user.host}` : ''})` : `@${user.username}`);
- const url = `${config.url}/notes/${note.id}`;
- const isRenote = note.renote && note.text == null && note.fileIds.length == 0 && note.poll == null;
- const isImage = note.files.length !== 0 && note.files[0].type.startsWith('image');

View File

@ -17,6 +17,7 @@ import {
} from "@/prelude/time.js";
import { getChartInsertLock } from "@/misc/app-lock.js";
import { db } from "@/db/postgre.js";
import promiseLimit from "promise-limit";
const logger = new Logger("chart", "white", process.env.NODE_ENV !== "test");
@ -472,7 +473,8 @@ export default abstract class Chart<T extends Schema> {
protected commit(diff: Commit<T>, group: string | null = null): void {
for (const [k, v] of Object.entries(diff)) {
if (v == null || v === 0 || (Array.isArray(v) && v.length === 0))
diff[k] = undefined;
// rome-ignore lint/performance/noDelete: needs to be deleted not just set to undefined
delete diff[k];
}
this.buffer.push({
diff,
@ -554,7 +556,7 @@ export default abstract class Chart<T extends Schema> {
// bake unique count
for (const [k, v] of Object.entries(finalDiffs)) {
if (this.schema[k].uniqueIncrement) {
if (this.schema[k].uniqueIncrement && Array.isArray(v) && v.length > 0) {
const name = (columnPrefix +
k.replaceAll(".", columnDot)) as keyof Columns<T>;
const tempColumnName = (uniqueTempColumnPrefix +
@ -646,16 +648,32 @@ export default abstract class Chart<T extends Schema> {
);
};
const groups = removeDuplicates(this.buffer.map((log) => log.group));
const startCount = this.buffer.length;
const groups = removeDuplicates(this.buffer.map((log) => log.group));
const groupCount = groups.length;
// Limit the number of concurrent chart update queries executed on the database
// to 25 at a time, so as avoid excessive IO spinlocks like when 8k queries are
// sent out at once.
const limit = promiseLimit(25);
const startTime = Date.now();
await Promise.all(
groups.map((group) =>
Promise.all([
this.claimCurrentLog(group, "hour"),
this.claimCurrentLog(group, "day"),
]).then(([logHour, logDay]) => update(logHour, logDay)),
groups.map((group) =>
limit(() =>
Promise.all([
this.claimCurrentLog(group, "hour"),
this.claimCurrentLog(group, "day"),
]).then(([logHour, logDay]) => update(logHour, logDay)),
),
),
);
const duration = Date.now() - startTime;
logger.info(
`Saved ${startCount} (${groupCount} unique) ${this.name} items in ${duration}ms (${this.buffer.length} remaining)`,
);
}
public async tick(

View File

@ -21,7 +21,7 @@
"experimentalDecorators": true,
"emitDecoratorMetadata": true,
"resolveJsonModule": true,
"isolatedModules": true,
"isolatedModules": false,
"rootDir": "./src",
"baseUrl": "./",
"paths": {

View File

@ -178,6 +178,13 @@ export default defineComponent({
border-radius: 0;
box-shadow: none;
&:first-child {
border-radius: var(--radius) var(--radius) 0 0;
}
&:last-child {
border-radius: 0 0 var(--radius) var(--radius);
}
&:not(:last-child) {
border-bottom: solid 0.5px var(--divider);
}

View File

@ -47,7 +47,6 @@ const bgCss = bg.toRgbString();
border-radius: 999px;
max-width: 100%;
white-space: nowrap;
overflow: clip;
text-overflow: ellipsis;
color: var(--mention);

View File

@ -10,20 +10,26 @@
<template v-for="(item, i) in items2">
<div v-if="item === null" class="divider"></div>
<span v-else-if="item.type === 'label'" class="label item">
<span>{{ item.text }}</span>
<span :style="item.textStyle || ''">{{ item.text }}</span>
</span>
<span v-else-if="item.type === 'pending'" :tabindex="i" class="pending item">
<span><MkEllipsis/></span>
</span>
<MkA v-else-if="item.type === 'link'" :to="item.to" :tabindex="i" class="_button item" @click.passive="close(true)" @mouseenter.passive="onItemMouseEnter(item)" @mouseleave.passive="onItemMouseLeave(item)">
<i v-if="item.icon" class="ph-fw ph-lg" :class="item.icon"></i>
<span v-else-if="item.icons">
<i v-for="icon in item.icons" class="ph-fw ph-lg" :class="icon"></i>
</span>
<MkAvatar v-if="item.avatar" :user="item.avatar" class="avatar"/>
<span>{{ item.text }}</span>
<span :style="item.textStyle || ''">{{ item.text }}</span>
<span v-if="item.indicate" class="indicator"><i class="ph-circle ph-fill"></i></span>
</MkA>
<a v-else-if="item.type === 'a'" :href="item.href" :target="item.target" :download="item.download" :tabindex="i" class="_button item" @click="close(true)" @mouseenter.passive="onItemMouseEnter(item)" @mouseleave.passive="onItemMouseLeave(item)">
<i v-if="item.icon" class="ph-fw ph-lg" :class="item.icon"></i>
<span>{{ item.text }}</span>
<span v-else-if="item.icons">
<i v-for="icon in item.icons" class="ph-fw ph-lg" :class="icon"></i>
</span>
<span :style="item.textStyle || ''">{{ item.text }}</span>
<span v-if="item.indicate" class="indicator"><i class="ph-circle ph-fill"></i></span>
</a>
<button v-else-if="item.type === 'user' && !items.hidden" :tabindex="i" class="_button item" :class="{ active: item.active }" :disabled="item.active" @click="clicked(item.action, $event)" @mouseenter.passive="onItemMouseEnter(item)" @mouseleave.passive="onItemMouseLeave(item)">
@ -31,17 +37,23 @@
<span v-if="item.indicate" class="indicator"><i class="ph-circle ph-fill"></i></span>
</button>
<span v-else-if="item.type === 'switch'" :tabindex="i" class="item" @mouseenter.passive="onItemMouseEnter(item)" @mouseleave.passive="onItemMouseLeave(item)">
<FormSwitch v-model="item.ref" :disabled="item.disabled" class="form-switch">{{ item.text }}</FormSwitch>
<FormSwitch v-model="item.ref" :disabled="item.disabled" class="form-switch" :style="item.textStyle || ''">{{ item.text }}</FormSwitch>
</span>
<button v-else-if="item.type === 'parent'" :tabindex="i" class="_button item parent" :class="{ childShowing: childShowingItem === item }" @mouseenter="showChildren(item, $event)">
<i v-if="item.icon" class="ph-fw ph-lg" :class="item.icon"></i>
<span>{{ item.text }}</span>
<span v-else-if="item.icons">
<i v-for="icon in item.icons" class="ph-fw ph-lg" :class="icon"></i>
</span>
<span :style="item.textStyle || ''">{{ item.text }}</span>
<span class="caret"><i class="ph-caret-right ph-bold ph-lg ph-fw ph-lg"></i></span>
</button>
<button v-else-if="!item.hidden" :tabindex="i" class="_button item" :class="{ danger: item.danger, active: item.active }" :disabled="item.active" @click="clicked(item.action, $event)" @mouseenter.passive="onItemMouseEnter(item)" @mouseleave.passive="onItemMouseLeave(item)">
<i v-if="item.icon" class="ph-fw ph-lg" :class="item.icon"></i>
<span v-else-if="item.icons">
<i v-for="icon in item.icons" class="ph-fw ph-lg" :class="icon"></i>
</span>
<MkAvatar v-if="item.avatar" :user="item.avatar" class="avatar"/>
<span>{{ item.text }}</span>
<span :style="item.textStyle || ''">{{ item.text }}</span>
<span v-if="item.indicate" class="indicator"><i class="ph-circle ph-fill"></i></span>
</button>
</template>

View File

@ -382,7 +382,6 @@ function readPromo() {
width: 0;
flex-grow: 1;
position: relative;
margin-bottom: -10px;
line-height: 28px;
}
> .line {
@ -452,17 +451,13 @@ function readPromo() {
}
}
}
& + .article {
padding-top: 10px !important;
}
}
> .article {
padding: 28px 32px 16px;
padding: 4px 32px 10px;
cursor: pointer;
@media (pointer: coarse) {
cursor: default;
}
@ -662,17 +657,16 @@ function readPromo() {
padding-top: 6px;
> .note-context {
padding-inline: 16px;
margin-top: 0;
margin-top: 8px;
> :not(.line) {
margin-top: 5px;
margin-top: 0px;
}
> .line {
margin-right: 10px;
}
}
> .article {
padding: 16px 16px 9px;
padding: 4px 16px 8px;
> .main > .header-container > .avatar {
margin-right: 10px;
// top: calc(14px + var(--stickyTop, 0px));

View File

@ -56,7 +56,6 @@ const showTicker = (defaultStore.state.instanceTicker === 'always') || (defaultS
border-radius: 100px;
font-size: .8em;
text-shadow: 0 2px 2px var(--shadow);
> .avatar {
width: 3.7em;
height: 3.7em;
@ -74,10 +73,11 @@ const showTicker = (defaultStore.state.instanceTicker === 'always') || (defaultS
width: 0;
overflow: hidden;
text-overflow: ellipsis;
gap: .1em 0;
}
&:last-child {
max-width: 50%;
gap: .2em .5em;
gap: .3em .5em;
}
.article > .main & {
display: flex;

View File

@ -7,7 +7,7 @@
</div>
<div class="body">
<div class="content">
<Mfm :text="text.trim()" :author="$i" :i="$i"/>
<Mfm :text="preprocess(text).trim()" :author="$i" :i="$i"/>
</div>
</div>
</div>
@ -16,6 +16,7 @@
<script lang="ts" setup>
import { } from 'vue';
import { preprocess } from '@/scripts/preprocess';
const props = defineProps<{
text: string;

View File

@ -17,7 +17,7 @@
<MkA v-if="note.replyId" :to="`/notes/${note.replyId}`" class="reply-icon" @click.stop>
<i class="ph-arrow-bend-left-up ph-bold ph-lg"></i>
</MkA>
<MkA v-if="conversation && note.renoteId && note.renoteId != parentId" :to="`/notes/${note.renoteId}`" class="reply-icon" @click.stop>
<MkA v-if="conversation && note.renoteId && note.renoteId != parentId && !note.replyId" :to="`/notes/${note.renoteId}`" class="reply-icon" @click.stop>
<i class="ph-quotes ph-bold ph-lg"></i>
</MkA>
<Mfm v-if="note.cw != ''" class="text" :text="note.cw" :author="note.user" :i="$i" :custom-emojis="note.emojis"/>
@ -373,7 +373,7 @@ function noteClick(e) {
&.reply-to, &.reply-to-more {
padding-bottom: 0;
&:first-child {
padding-top: 30px;
padding-top: 24px;
}
.line::before {
margin-bottom: -16px;

View File

@ -42,6 +42,7 @@ defineExpose({
&.noGap {
> .notes {
background: var(--panel);
border-radius: var(--radius);
}
}
&:not(.noGap) {

View File

@ -100,5 +100,6 @@ onUnmounted(() => {
<style lang="scss" scoped>
.elsfgstc {
background: var(--panel);
border-radius: var(--radius);
}
</style>

View File

@ -92,6 +92,7 @@ import { $i, getAccounts, openAccountMenu as openAccountMenu_ } from '@/account'
import { uploadFile } from '@/scripts/upload';
import { deepClone } from '@/scripts/clone';
import XCheatSheet from '@/components/MkCheatSheetDialog.vue';
import { preprocess } from '@/scripts/preprocess';
const modal = inject('modal');
@ -200,7 +201,7 @@ const submitText = $computed((): string => {
});
const textLength = $computed((): number => {
return length((text + imeText).trim());
return length((preprocess(text) + imeText).trim());
});
const maxTextLength = $computed((): number => {
@ -557,8 +558,10 @@ function deleteDraft() {
}
async function post() {
const processedText = preprocess(text);
let postData = {
text: text === '' ? undefined : text,
text: processedText === '' ? undefined : processedText,
fileIds: files.length > 0 ? files.map(f => f.id) : undefined,
replyId: props.reply ? props.reply.id : undefined,
renoteId: props.renote ? props.renote.id : quoteId ? quoteId : undefined,

View File

@ -64,24 +64,91 @@ const renote = async (viaKeyboard = false, ev?: MouseEvent) => {
const users = renotes.map(x => x.user.id);
const hasRenotedBefore = users.includes($i.id);
let buttonActions = [{
text: i18n.ts.renote,
icon: 'ph-repeat ph-bold ph-lg',
danger: false,
action: () => {
os.api('notes/create', {
renoteId: props.note.id,
visibility: props.note.visibility,
});
const el = ev && (ev.currentTarget ?? ev.target) as HTMLElement | null | undefined;
if (el) {
const rect = el.getBoundingClientRect();
const x = rect.left + (el.offsetWidth / 2);
const y = rect.top + (el.offsetHeight / 2);
os.popup(Ripple, { x, y }, {}, 'end');
}
},
}];
let buttonActions = [];
if (props.note.visibility === 'public') {
buttonActions.push({
text: i18n.ts.renote,
textStyle: 'font-weight: bold',
icon: 'ph-repeat ph-bold ph-lg',
danger: false,
action: () => {
os.api('notes/create', {
renoteId: props.note.id,
visibility: 'public',
});
const el = ev && (ev.currentTarget ?? ev.target) as HTMLElement | null | undefined;
if (el) {
const rect = el.getBoundingClientRect();
const x = rect.left + (el.offsetWidth / 2);
const y = rect.top + (el.offsetHeight / 2);
os.popup(Ripple, { x, y }, {}, 'end');
}
},
});
}
if (['public', 'home'].includes(props.note.visibility)) {
buttonActions.push({
text: i18n.ts.renoteAsUnlisted,
icons: ['ph-repeat ph-bold ph-lg', 'ph-house ph-bold ph-lg'],
danger: false,
action: () => {
os.api('notes/create', {
renoteId: props.note.id,
visibility: 'home',
});
const el = ev && (ev.currentTarget ?? ev.target) as HTMLElement | null | undefined;
if (el) {
const rect = el.getBoundingClientRect();
const x = rect.left + (el.offsetWidth / 2);
const y = rect.top + (el.offsetHeight / 2);
os.popup(Ripple, { x, y }, {}, 'end');
}
},
});
}
if (props.note.visibility === 'specified') {
buttonActions.push({
text: i18n.ts.renoteToRecipients,
icons: ['ph-repeat ph-bold ph-lg', 'ph-envelope-simple-open ph-bold ph-lg'],
danger: false,
action: () => {
os.api('notes/create', {
renoteId: props.note.id,
visibility: 'specified',
visibleUserIds: props.note.visibleUserIds,
});
const el = ev && (ev.currentTarget ?? ev.target) as HTMLElement | null | undefined;
if (el) {
const rect = el.getBoundingClientRect();
const x = rect.left + (el.offsetWidth / 2);
const y = rect.top + (el.offsetHeight / 2);
os.popup(Ripple, { x, y }, {}, 'end');
}
},
});
} else {
buttonActions.push({
text: i18n.ts.renoteToFollowers,
icons: ['ph-repeat ph-bold ph-lg', 'ph-lock-simple-open ph-bold ph-lg'],
danger: false,
action: () => {
os.api('notes/create', {
renoteId: props.note.id,
visibility: 'followers',
});
const el = ev && (ev.currentTarget ?? ev.target) as HTMLElement | null | undefined;
if (el) {
const rect = el.getBoundingClientRect();
const x = rect.left + (el.offsetWidth / 2);
const y = rect.top + (el.offsetHeight / 2);
os.popup(Ripple, { x, y }, {}, 'end');
}
},
});
}
if (!defaultStore.state.seperateRenoteQuote) {
buttonActions.push({

View File

@ -6,7 +6,7 @@
<MkA v-if="note.replyId" :to="`/notes/${note.replyId}`" class="reply-icon" @click.stop>
<i class="ph-arrow-bend-left-up ph-bold ph-lg"></i>
</MkA>
<MkA v-if="conversation && note.renoteId && note.renoteId != parentId" :to="`/notes/${note.renoteId}`" class="reply-icon" @click.stop>
<MkA v-if="conversation && note.renoteId && note.renoteId != parentId && !note.replyId" :to="`/notes/${note.renoteId}`" class="reply-icon" @click.stop>
<i class="ph-quotes ph-bold ph-lg"></i>
</MkA>
</template>

View File

@ -148,73 +148,4 @@ export const navbarItemDef = reactive({
location.reload();
},
},
help: {
title: "help",
icon: "ph-question ph-bold ph-lg",
action: (ev) => {
os.popupMenu(
[
{
text: instance.name ?? host,
type: "label",
},
{
type: "link",
text: i18n.ts.instanceInfo,
icon: "ph-info ph-bold ph-lg",
to: "/about",
},
{
type: "link",
text: i18n.ts.aboutMisskey,
icon: "ph-lightbulb ph-bold ph-lg",
to: "/about-calckey",
},
{
type: "link",
text: i18n.ts._apps.apps,
icon: "ph-device-mobile ph-bold ph-lg",
to: "/apps",
},
{
type: "button",
action: async () => {
defaultStore.set("tutorial", 0);
os.popup(XTutorial, {}, {}, "closed");
},
text: i18n.ts.replayTutorial,
icon: "ph-circle-wavy-question ph-bold ph-lg",
},
null,
{
type: "parent",
text: i18n.ts.developer,
icon: "ph-code ph-bold ph-lg",
children: [
{
type: "link",
to: "/api-console",
text: "API Console",
icon: "ph-terminal-window ph-bold ph-lg",
},
{
text: i18n.ts.document,
icon: "ph-file-doc ph-bold ph-lg",
action: () => {
window.open("/api-doc", "_blank");
},
},
{
type: "link",
to: "/scratchpad",
text: "AiScript Scratchpad",
icon: "ph-scribble-loop ph-bold ph-lg",
},
],
},
],
ev.currentTarget ?? ev.target,
);
},
},
});

Some files were not shown because too many files have changed in this diff Show More