mirror of
https://iceshrimp.dev/crimekillz/trashposs
synced 2024-11-22 00:43:49 +01:00
Port SeaORM migrations to TypeORM
This commit is contained in:
parent
7cb576a535
commit
30de454b69
27
.pnp.cjs
generated
27
.pnp.cjs
generated
@ -19,10 +19,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||||||
"name": "backend",\
|
"name": "backend",\
|
||||||
"reference": "workspace:packages/backend"\
|
"reference": "workspace:packages/backend"\
|
||||||
},\
|
},\
|
||||||
{\
|
|
||||||
"name": "native-utils",\
|
|
||||||
"reference": "workspace:packages/backend/native-utils"\
|
|
||||||
},\
|
|
||||||
{\
|
{\
|
||||||
"name": "client",\
|
"name": "client",\
|
||||||
"reference": "workspace:packages/client"\
|
"reference": "workspace:packages/client"\
|
||||||
@ -48,7 +44,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||||||
["iceshrimp", ["workspace:."]],\
|
["iceshrimp", ["workspace:."]],\
|
||||||
["iceshrimp-js", ["workspace:packages/iceshrimp-js"]],\
|
["iceshrimp-js", ["workspace:packages/iceshrimp-js"]],\
|
||||||
["megalodon", ["workspace:packages/megalodon"]],\
|
["megalodon", ["workspace:packages/megalodon"]],\
|
||||||
["native-utils", ["workspace:packages/backend/native-utils"]],\
|
|
||||||
["sw", ["workspace:packages/sw"]]\
|
["sw", ["workspace:packages/sw"]]\
|
||||||
],\
|
],\
|
||||||
"fallbackPool": [\
|
"fallbackPool": [\
|
||||||
@ -4952,10 +4947,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||||||
"napi-build-utils",\
|
"napi-build-utils",\
|
||||||
"npm:1.0.2"\
|
"npm:1.0.2"\
|
||||||
],\
|
],\
|
||||||
[\
|
|
||||||
"native-utils",\
|
|
||||||
"link:native-utils::locator=backend%40workspace%3Apackages%2Fbackend"\
|
|
||||||
],\
|
|
||||||
[\
|
[\
|
||||||
"natural-compare",\
|
"natural-compare",\
|
||||||
"npm:1.4.0"\
|
"npm:1.4.0"\
|
||||||
@ -15072,7 +15063,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||||||
["mocha", "npm:10.2.0"],\
|
["mocha", "npm:10.2.0"],\
|
||||||
["msgpackr", "npm:1.9.5"],\
|
["msgpackr", "npm:1.9.5"],\
|
||||||
["multer", "npm:1.4.4-lts.1"],\
|
["multer", "npm:1.4.4-lts.1"],\
|
||||||
["native-utils", "link:native-utils::locator=backend%40workspace%3Apackages%2Fbackend"],\
|
|
||||||
["nested-property", "npm:4.0.0"],\
|
["nested-property", "npm:4.0.0"],\
|
||||||
["node-fetch", "npm:3.3.1"],\
|
["node-fetch", "npm:3.3.1"],\
|
||||||
["nodemailer", "npm:6.9.3"],\
|
["nodemailer", "npm:6.9.3"],\
|
||||||
@ -26950,23 +26940,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||||||
"linkType": "HARD"\
|
"linkType": "HARD"\
|
||||||
}]\
|
}]\
|
||||||
]],\
|
]],\
|
||||||
["native-utils", [\
|
|
||||||
["link:native-utils::locator=backend%40workspace%3Apackages%2Fbackend", {\
|
|
||||||
"packageLocation": "./packages/backend/native-utils/",\
|
|
||||||
"packageDependencies": [\
|
|
||||||
["native-utils", "link:native-utils::locator=backend%40workspace%3Apackages%2Fbackend"]\
|
|
||||||
],\
|
|
||||||
"linkType": "SOFT",\
|
|
||||||
"discardFromLookup": true\
|
|
||||||
}],\
|
|
||||||
["workspace:packages/backend/native-utils", {\
|
|
||||||
"packageLocation": "./packages/backend/native-utils/",\
|
|
||||||
"packageDependencies": [\
|
|
||||||
["native-utils", "workspace:packages/backend/native-utils"]\
|
|
||||||
],\
|
|
||||||
"linkType": "SOFT"\
|
|
||||||
}]\
|
|
||||||
]],\
|
|
||||||
["natural-compare", [\
|
["natural-compare", [\
|
||||||
["npm:1.4.0", {\
|
["npm:1.4.0", {\
|
||||||
"packageLocation": "./.yarn/cache/natural-compare-npm-1.4.0-97b75b362d-23ad088b08.zip/node_modules/natural-compare/",\
|
"packageLocation": "./.yarn/cache/natural-compare-npm-1.4.0-97b75b362d-23ad088b08.zip/node_modules/natural-compare/",\
|
||||||
|
@ -34,7 +34,6 @@
|
|||||||
},
|
},
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"packages/backend",
|
"packages/backend",
|
||||||
"packages/backend/native-utils",
|
|
||||||
"packages/client",
|
"packages/client",
|
||||||
"packages/sw",
|
"packages/sw",
|
||||||
"packages/iceshrimp-js",
|
"packages/iceshrimp-js",
|
||||||
|
52
packages/backend/migration/1695747439252-drop-reversi.js
Normal file
52
packages/backend/migration/1695747439252-drop-reversi.js
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
export class DropReversi1695747439252 {
|
||||||
|
name = "DropReversi1695747439252";
|
||||||
|
|
||||||
|
async up(queryRunner) {
|
||||||
|
await queryRunner.query(`DROP TABLE IF EXISTS "reversi_game"`);
|
||||||
|
await queryRunner.query(`DROP TABLE IF EXISTS "reversi_matching"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner) {
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE TABLE "reversi_game" ("id" character varying(32) NOT NULL, "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL, "startedAt" TIMESTAMP WITH TIME ZONE, "user1Id" character varying(32) NOT NULL, "user2Id" character varying(32) NOT NULL, "user1Accepted" boolean NOT NULL DEFAULT false, "user2Accepted" boolean NOT NULL DEFAULT false, "black" integer, "isStarted" boolean NOT NULL DEFAULT false, "isEnded" boolean NOT NULL DEFAULT false, "winnerId" character varying(32), "surrendered" character varying(32), "logs" jsonb NOT NULL DEFAULT '[]', "map" character varying(64) array NOT NULL, "bw" character varying(32) NOT NULL, "isLlotheo" boolean NOT NULL DEFAULT false, "canPutEverywhere" boolean NOT NULL DEFAULT false, "loopedBoard" boolean NOT NULL DEFAULT false, "form1" jsonb DEFAULT null, "form2" jsonb DEFAULT null, "crc32" character varying(32), CONSTRAINT "PK_76b30eeba71b1193ad7c5311c3f" PRIMARY KEY ("id"))`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE INDEX "IDX_b46ec40746efceac604142be1c" ON "reversi_game" ("createdAt") `,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE TABLE "reversi_matching" ("id" character varying(32) NOT NULL, "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL, "parentId" character varying(32) NOT NULL, "childId" character varying(32) NOT NULL, CONSTRAINT "PK_880bd0afbab232f21c8b9d146cf" PRIMARY KEY ("id"))`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE INDEX "IDX_b604d92d6c7aec38627f6eaf16" ON "reversi_matching" ("createdAt") `,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE INDEX "IDX_3b25402709dd9882048c2bbade" ON "reversi_matching" ("parentId") `,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE INDEX "IDX_e247b23a3c9b45f89ec1299d06" ON "reversi_matching" ("childId") `,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE "reversi_game" ADD CONSTRAINT "FK_f7467510c60a45ce5aca6292743" FOREIGN KEY ("user1Id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE "reversi_game" ADD CONSTRAINT "FK_6649a4e8c5d5cf32fb03b5da9f6" FOREIGN KEY ("user2Id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE "reversi_matching" ADD CONSTRAINT "FK_3b25402709dd9882048c2bbade0" FOREIGN KEY ("parentId") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE "reversi_matching" ADD CONSTRAINT "FK_e247b23a3c9b45f89ec1299d066" FOREIGN KEY ("childId") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`COMMENT ON COLUMN "reversi_game"."createdAt" IS 'The created date of the ReversiGame.'`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`COMMENT ON COLUMN "reversi_game"."startedAt" IS 'The started date of the ReversiGame.'`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(`COMMENT ON COLUMN "reversi_game"."form1" IS NULL`);
|
||||||
|
await queryRunner.query(`COMMENT ON COLUMN "reversi_game"."form2" IS NULL`);
|
||||||
|
await queryRunner.query(
|
||||||
|
`COMMENT ON COLUMN "reversi_matching"."createdAt" IS 'The created date of the ReversiMatching.'`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
10
packages/backend/migration/1695748502971-index-note-url.js
Normal file
10
packages/backend/migration/1695748502971-index-note-url.js
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
export class IndexNoteUrl1695748502971 {
|
||||||
|
name = "IndexNoteUrl1695748502971";
|
||||||
|
async up(queryRunner) {
|
||||||
|
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_note_url" ON "note" ("url") `);
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner) {
|
||||||
|
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_note_url"`);
|
||||||
|
}
|
||||||
|
}
|
23
packages/backend/migration/1695748874491-drop-ads.js
Normal file
23
packages/backend/migration/1695748874491-drop-ads.js
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
export class DropAds1695748874491 {
|
||||||
|
name = "DropAds1695748874491";
|
||||||
|
async up(queryRunner) {
|
||||||
|
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_2da24ce20ad209f1d9dc032457"`);
|
||||||
|
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_1129c2ef687fc272df040bafaa"`);
|
||||||
|
await queryRunner.query(`DROP TABLE IF EXISTS "ad"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner) {
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE TABLE "ad" ("id" character varying(32) NOT NULL, "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL, "expiresAt" TIMESTAMP WITH TIME ZONE NOT NULL, "place" character varying(32) NOT NULL, "priority" character varying(32) NOT NULL, "url" character varying(1024) NOT NULL, "imageUrl" character varying(1024) NOT NULL, "memo" character varying(8192) NOT NULL, CONSTRAINT "PK_0193d5ef09746e88e9ea92c634d" PRIMARY KEY ("id")); COMMENT ON COLUMN "ad"."createdAt" IS 'The created date of the Ad.'; COMMENT ON COLUMN "ad"."expiresAt" IS 'The expired date of the Ad.'`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE INDEX "IDX_1129c2ef687fc272df040bafaa" ON "ad" ("createdAt") `,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE INDEX "IDX_2da24ce20ad209f1d9dc032457" ON "ad" ("expiresAt") `,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE "ad" ADD "ratio" integer NOT NULL DEFAULT '1'`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,12 @@
|
|||||||
|
export class InstanceAccountDomainCleanup1695749386779 {
|
||||||
|
name = "InstanceAccountDomainCleanup1695749386779";
|
||||||
|
async up(queryRunner) {
|
||||||
|
await queryRunner.query(`ALTER TABLE "instance" DROP COLUMN IF EXISTS "accountDomain"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "instance" DROP COLUMN IF EXISTS "account_domain"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner) {
|
||||||
|
// This migration is only here to ensure consistent state if upgrading from certain dev branch commits, skipping the final TypeORM migration.
|
||||||
|
// As such, there is no need to revert it.
|
||||||
|
}
|
||||||
|
}
|
@ -1,3 +0,0 @@
|
|||||||
[target.aarch64-unknown-linux-musl]
|
|
||||||
linker = "aarch64-linux-musl-gcc"
|
|
||||||
rustflags = ["-C", "target-feature=-crt-static"]
|
|
@ -1,3 +0,0 @@
|
|||||||
[*.rs]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 4
|
|
199
packages/backend/native-utils/.gitignore
vendored
199
packages/backend/native-utils/.gitignore
vendored
@ -1,199 +0,0 @@
|
|||||||
# Created by https://www.toptal.com/developers/gitignore/api/node
|
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node
|
|
||||||
|
|
||||||
### Node ###
|
|
||||||
# Logs
|
|
||||||
logs
|
|
||||||
*.log
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
lerna-debug.log*
|
|
||||||
|
|
||||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
|
||||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
|
||||||
|
|
||||||
# Runtime data
|
|
||||||
pids
|
|
||||||
*.pid
|
|
||||||
*.seed
|
|
||||||
*.pid.lock
|
|
||||||
|
|
||||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
|
||||||
lib-cov
|
|
||||||
|
|
||||||
# Coverage directory used by tools like istanbul
|
|
||||||
coverage
|
|
||||||
*.lcov
|
|
||||||
|
|
||||||
# nyc test coverage
|
|
||||||
.nyc_output
|
|
||||||
|
|
||||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
|
||||||
.grunt
|
|
||||||
|
|
||||||
# Bower dependency directory (https://bower.io/)
|
|
||||||
bower_components
|
|
||||||
|
|
||||||
# node-waf configuration
|
|
||||||
.lock-wscript
|
|
||||||
|
|
||||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
|
||||||
build/Release
|
|
||||||
|
|
||||||
# Dependency directories
|
|
||||||
node_modules/
|
|
||||||
jspm_packages/
|
|
||||||
|
|
||||||
# TypeScript v1 declaration files
|
|
||||||
typings/
|
|
||||||
|
|
||||||
# TypeScript cache
|
|
||||||
*.tsbuildinfo
|
|
||||||
|
|
||||||
# Optional npm cache directory
|
|
||||||
.npm
|
|
||||||
|
|
||||||
# Optional eslint cache
|
|
||||||
.eslintcache
|
|
||||||
|
|
||||||
# Microbundle cache
|
|
||||||
.rpt2_cache/
|
|
||||||
.rts2_cache_cjs/
|
|
||||||
.rts2_cache_es/
|
|
||||||
.rts2_cache_umd/
|
|
||||||
|
|
||||||
# Optional REPL history
|
|
||||||
.node_repl_history
|
|
||||||
|
|
||||||
# Output of 'npm pack'
|
|
||||||
*.tgz
|
|
||||||
|
|
||||||
# Yarn Integrity file
|
|
||||||
.yarn-integrity
|
|
||||||
|
|
||||||
# dotenv environment variables file
|
|
||||||
.env
|
|
||||||
.env.test
|
|
||||||
|
|
||||||
# parcel-bundler cache (https://parceljs.org/)
|
|
||||||
.cache
|
|
||||||
|
|
||||||
# Next.js build output
|
|
||||||
.next
|
|
||||||
|
|
||||||
# Nuxt.js build / generate output
|
|
||||||
.nuxt
|
|
||||||
dist
|
|
||||||
|
|
||||||
# Gatsby files
|
|
||||||
.cache/
|
|
||||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
|
||||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
|
||||||
# public
|
|
||||||
|
|
||||||
# vuepress build output
|
|
||||||
.vuepress/dist
|
|
||||||
|
|
||||||
# Serverless directories
|
|
||||||
.serverless/
|
|
||||||
|
|
||||||
# FuseBox cache
|
|
||||||
.fusebox/
|
|
||||||
|
|
||||||
# DynamoDB Local files
|
|
||||||
.dynamodb/
|
|
||||||
|
|
||||||
# TernJS port file
|
|
||||||
.tern-port
|
|
||||||
|
|
||||||
# Stores VSCode versions used for testing VSCode extensions
|
|
||||||
.vscode-test
|
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/node
|
|
||||||
|
|
||||||
# Created by https://www.toptal.com/developers/gitignore/api/macos
|
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=macos
|
|
||||||
|
|
||||||
### macOS ###
|
|
||||||
# General
|
|
||||||
.DS_Store
|
|
||||||
.AppleDouble
|
|
||||||
.LSOverride
|
|
||||||
|
|
||||||
# Icon must end with two
|
|
||||||
Icon
|
|
||||||
|
|
||||||
|
|
||||||
# Thumbnails
|
|
||||||
._*
|
|
||||||
|
|
||||||
# Files that might appear in the root of a volume
|
|
||||||
.DocumentRevisions-V100
|
|
||||||
.fseventsd
|
|
||||||
.Spotlight-V100
|
|
||||||
.TemporaryItems
|
|
||||||
.Trashes
|
|
||||||
.VolumeIcon.icns
|
|
||||||
.com.apple.timemachine.donotpresent
|
|
||||||
|
|
||||||
# Directories potentially created on remote AFP share
|
|
||||||
.AppleDB
|
|
||||||
.AppleDesktop
|
|
||||||
Network Trash Folder
|
|
||||||
Temporary Items
|
|
||||||
.apdisk
|
|
||||||
|
|
||||||
### macOS Patch ###
|
|
||||||
# iCloud generated files
|
|
||||||
*.icloud
|
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/macos
|
|
||||||
|
|
||||||
# Created by https://www.toptal.com/developers/gitignore/api/windows
|
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=windows
|
|
||||||
|
|
||||||
### Windows ###
|
|
||||||
# Windows thumbnail cache files
|
|
||||||
Thumbs.db
|
|
||||||
Thumbs.db:encryptable
|
|
||||||
ehthumbs.db
|
|
||||||
ehthumbs_vista.db
|
|
||||||
|
|
||||||
# Dump file
|
|
||||||
*.stackdump
|
|
||||||
|
|
||||||
# Folder config file
|
|
||||||
[Dd]esktop.ini
|
|
||||||
|
|
||||||
# Recycle Bin used on file shares
|
|
||||||
$RECYCLE.BIN/
|
|
||||||
|
|
||||||
# Windows Installer files
|
|
||||||
*.cab
|
|
||||||
*.msi
|
|
||||||
*.msix
|
|
||||||
*.msm
|
|
||||||
*.msp
|
|
||||||
|
|
||||||
# Windows shortcuts
|
|
||||||
*.lnk
|
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/windows
|
|
||||||
|
|
||||||
# napi-rs generated files
|
|
||||||
built/
|
|
||||||
|
|
||||||
#Added by cargo
|
|
||||||
|
|
||||||
/target
|
|
||||||
|
|
||||||
.pnp.*
|
|
||||||
.yarn/*
|
|
||||||
!.yarn/patches
|
|
||||||
!.yarn/plugins
|
|
||||||
!.yarn/releases
|
|
||||||
!.yarn/sdks
|
|
||||||
!.yarn/versions
|
|
||||||
|
|
||||||
*.node
|
|
@ -1,13 +0,0 @@
|
|||||||
target
|
|
||||||
Cargo.lock
|
|
||||||
.cargo
|
|
||||||
.github
|
|
||||||
npm
|
|
||||||
.eslintrc
|
|
||||||
.prettierignore
|
|
||||||
rustfmt.toml
|
|
||||||
yarn.lock
|
|
||||||
*.node
|
|
||||||
.yarn
|
|
||||||
__test__
|
|
||||||
renovate.json
|
|
@ -1,5 +0,0 @@
|
|||||||
extern crate napi_build;
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
napi_build::setup();
|
|
||||||
}
|
|
199
packages/backend/native-utils/migration/.gitignore
vendored
199
packages/backend/native-utils/migration/.gitignore
vendored
@ -1,199 +0,0 @@
|
|||||||
# Created by https://www.toptal.com/developers/gitignore/api/node
|
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node
|
|
||||||
|
|
||||||
### Node ###
|
|
||||||
# Logs
|
|
||||||
logs
|
|
||||||
*.log
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
lerna-debug.log*
|
|
||||||
|
|
||||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
|
||||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
|
||||||
|
|
||||||
# Runtime data
|
|
||||||
pids
|
|
||||||
*.pid
|
|
||||||
*.seed
|
|
||||||
*.pid.lock
|
|
||||||
|
|
||||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
|
||||||
lib-cov
|
|
||||||
|
|
||||||
# Coverage directory used by tools like istanbul
|
|
||||||
coverage
|
|
||||||
*.lcov
|
|
||||||
|
|
||||||
# nyc test coverage
|
|
||||||
.nyc_output
|
|
||||||
|
|
||||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
|
||||||
.grunt
|
|
||||||
|
|
||||||
# Bower dependency directory (https://bower.io/)
|
|
||||||
bower_components
|
|
||||||
|
|
||||||
# node-waf configuration
|
|
||||||
.lock-wscript
|
|
||||||
|
|
||||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
|
||||||
build/Release
|
|
||||||
|
|
||||||
# Dependency directories
|
|
||||||
node_modules/
|
|
||||||
jspm_packages/
|
|
||||||
|
|
||||||
# TypeScript v1 declaration files
|
|
||||||
typings/
|
|
||||||
|
|
||||||
# TypeScript cache
|
|
||||||
*.tsbuildinfo
|
|
||||||
|
|
||||||
# Optional npm cache directory
|
|
||||||
.npm
|
|
||||||
|
|
||||||
# Optional eslint cache
|
|
||||||
.eslintcache
|
|
||||||
|
|
||||||
# Microbundle cache
|
|
||||||
.rpt2_cache/
|
|
||||||
.rts2_cache_cjs/
|
|
||||||
.rts2_cache_es/
|
|
||||||
.rts2_cache_umd/
|
|
||||||
|
|
||||||
# Optional REPL history
|
|
||||||
.node_repl_history
|
|
||||||
|
|
||||||
# Output of 'npm pack'
|
|
||||||
*.tgz
|
|
||||||
|
|
||||||
# Yarn Integrity file
|
|
||||||
.yarn-integrity
|
|
||||||
|
|
||||||
# dotenv environment variables file
|
|
||||||
.env
|
|
||||||
.env.test
|
|
||||||
|
|
||||||
# parcel-bundler cache (https://parceljs.org/)
|
|
||||||
.cache
|
|
||||||
|
|
||||||
# Next.js build output
|
|
||||||
.next
|
|
||||||
|
|
||||||
# Nuxt.js build / generate output
|
|
||||||
.nuxt
|
|
||||||
dist
|
|
||||||
|
|
||||||
# Gatsby files
|
|
||||||
.cache/
|
|
||||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
|
||||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
|
||||||
# public
|
|
||||||
|
|
||||||
# vuepress build output
|
|
||||||
.vuepress/dist
|
|
||||||
|
|
||||||
# Serverless directories
|
|
||||||
.serverless/
|
|
||||||
|
|
||||||
# FuseBox cache
|
|
||||||
.fusebox/
|
|
||||||
|
|
||||||
# DynamoDB Local files
|
|
||||||
.dynamodb/
|
|
||||||
|
|
||||||
# TernJS port file
|
|
||||||
.tern-port
|
|
||||||
|
|
||||||
# Stores VSCode versions used for testing VSCode extensions
|
|
||||||
.vscode-test
|
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/node
|
|
||||||
|
|
||||||
# Created by https://www.toptal.com/developers/gitignore/api/macos
|
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=macos
|
|
||||||
|
|
||||||
### macOS ###
|
|
||||||
# General
|
|
||||||
.DS_Store
|
|
||||||
.AppleDouble
|
|
||||||
.LSOverride
|
|
||||||
|
|
||||||
# Icon must end with two
|
|
||||||
Icon
|
|
||||||
|
|
||||||
|
|
||||||
# Thumbnails
|
|
||||||
._*
|
|
||||||
|
|
||||||
# Files that might appear in the root of a volume
|
|
||||||
.DocumentRevisions-V100
|
|
||||||
.fseventsd
|
|
||||||
.Spotlight-V100
|
|
||||||
.TemporaryItems
|
|
||||||
.Trashes
|
|
||||||
.VolumeIcon.icns
|
|
||||||
.com.apple.timemachine.donotpresent
|
|
||||||
|
|
||||||
# Directories potentially created on remote AFP share
|
|
||||||
.AppleDB
|
|
||||||
.AppleDesktop
|
|
||||||
Network Trash Folder
|
|
||||||
Temporary Items
|
|
||||||
.apdisk
|
|
||||||
|
|
||||||
### macOS Patch ###
|
|
||||||
# iCloud generated files
|
|
||||||
*.icloud
|
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/macos
|
|
||||||
|
|
||||||
# Created by https://www.toptal.com/developers/gitignore/api/windows
|
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=windows
|
|
||||||
|
|
||||||
### Windows ###
|
|
||||||
# Windows thumbnail cache files
|
|
||||||
Thumbs.db
|
|
||||||
Thumbs.db:encryptable
|
|
||||||
ehthumbs.db
|
|
||||||
ehthumbs_vista.db
|
|
||||||
|
|
||||||
# Dump file
|
|
||||||
*.stackdump
|
|
||||||
|
|
||||||
# Folder config file
|
|
||||||
[Dd]esktop.ini
|
|
||||||
|
|
||||||
# Recycle Bin used on file shares
|
|
||||||
$RECYCLE.BIN/
|
|
||||||
|
|
||||||
# Windows Installer files
|
|
||||||
*.cab
|
|
||||||
*.msi
|
|
||||||
*.msix
|
|
||||||
*.msm
|
|
||||||
*.msp
|
|
||||||
|
|
||||||
# Windows shortcuts
|
|
||||||
*.lnk
|
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/windows
|
|
||||||
|
|
||||||
# napi-rs generated files
|
|
||||||
built/
|
|
||||||
|
|
||||||
#Added by cargo
|
|
||||||
|
|
||||||
/target
|
|
||||||
|
|
||||||
.pnp.*
|
|
||||||
.yarn/*
|
|
||||||
!.yarn/patches
|
|
||||||
!.yarn/plugins
|
|
||||||
!.yarn/releases
|
|
||||||
!.yarn/sdks
|
|
||||||
!.yarn/versions
|
|
||||||
|
|
||||||
*.node
|
|
2494
packages/backend/native-utils/migration/Cargo.lock
generated
2494
packages/backend/native-utils/migration/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,33 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "migration"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "migration"
|
|
||||||
path = "src/lib.rs"
|
|
||||||
|
|
||||||
[features]
|
|
||||||
default = []
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
serde_json = "1.0.96"
|
|
||||||
tokio = { version = "1.28.2", features = ["full"] }
|
|
||||||
serde_yaml = "0.9.21"
|
|
||||||
serde = { version = "1.0.163", features = ["derive"] }
|
|
||||||
urlencoding = "2.1.2"
|
|
||||||
redis = { version = "0.23.0", features = ["tokio-rustls-comp"] }
|
|
||||||
sea-orm = "0.11.3"
|
|
||||||
url = { version = "2.4.0", features = ["serde"] }
|
|
||||||
|
|
||||||
[dependencies.sea-orm-migration]
|
|
||||||
version = "0.11.0"
|
|
||||||
features = [
|
|
||||||
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
|
|
||||||
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
|
|
||||||
# e.g.
|
|
||||||
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
|
|
||||||
"sqlx-postgres", # `DATABASE_DRIVER` feature
|
|
||||||
"sqlx-sqlite",
|
|
||||||
]
|
|
@ -1,55 +0,0 @@
|
|||||||
# Making migrations
|
|
||||||
|
|
||||||
For more information, please read https://www.sea-ql.org/SeaORM/docs/migration/setting-up-migration/
|
|
||||||
|
|
||||||
- Install `sea-orm-cli`
|
|
||||||
```sh
|
|
||||||
cargo install sea-orm-cli
|
|
||||||
```
|
|
||||||
|
|
||||||
- Generate
|
|
||||||
```sh
|
|
||||||
sea-orm-cli migrate generate ****
|
|
||||||
```
|
|
||||||
|
|
||||||
# Running Migrator CLI
|
|
||||||
|
|
||||||
- Generate a new migration file
|
|
||||||
```sh
|
|
||||||
cargo run -- migrate generate MIGRATION_NAME
|
|
||||||
```
|
|
||||||
- Apply all pending migrations
|
|
||||||
```sh
|
|
||||||
cargo run
|
|
||||||
```
|
|
||||||
```sh
|
|
||||||
cargo run -- up
|
|
||||||
```
|
|
||||||
- Apply first 10 pending migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- up -n 10
|
|
||||||
```
|
|
||||||
- Rollback last applied migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- down
|
|
||||||
```
|
|
||||||
- Rollback last 10 applied migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- down -n 10
|
|
||||||
```
|
|
||||||
- Drop all tables from the database, then reapply all migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- fresh
|
|
||||||
```
|
|
||||||
- Rollback all applied migrations, then reapply all migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- refresh
|
|
||||||
```
|
|
||||||
- Rollback all applied migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- reset
|
|
||||||
```
|
|
||||||
- Check the status of all migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- status
|
|
||||||
```
|
|
@ -1,26 +0,0 @@
|
|||||||
pub use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
mod m20230531_180824_drop_reversi;
|
|
||||||
mod m20230627_185451_index_note_url;
|
|
||||||
mod m20230709_000510_move_antenna_to_cache;
|
|
||||||
mod m20230726_213530_drop_ads;
|
|
||||||
mod m20230801_160334_add_instance_account_domain;
|
|
||||||
mod m20230802_190415_fix_instance_account_domain;
|
|
||||||
mod m20230905_210205_drop_instance_account_domain;
|
|
||||||
|
|
||||||
pub struct Migrator;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigratorTrait for Migrator {
|
|
||||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
|
||||||
vec![
|
|
||||||
Box::new(m20230531_180824_drop_reversi::Migration),
|
|
||||||
Box::new(m20230627_185451_index_note_url::Migration),
|
|
||||||
Box::new(m20230709_000510_move_antenna_to_cache::Migration),
|
|
||||||
Box::new(m20230726_213530_drop_ads::Migration),
|
|
||||||
Box::new(m20230801_160334_add_instance_account_domain::Migration),
|
|
||||||
Box::new(m20230802_190415_fix_instance_account_domain::Migration),
|
|
||||||
Box::new(m20230905_210205_drop_instance_account_domain::Migration),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,51 +0,0 @@
|
|||||||
use sea_orm_migration::{
|
|
||||||
prelude::*,
|
|
||||||
sea_orm::{DbBackend, Statement},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
if manager.get_database_backend() == DbBackend::Sqlite {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let db = manager.get_connection();
|
|
||||||
db.query_one(Statement::from_string(
|
|
||||||
DbBackend::Postgres,
|
|
||||||
Table::drop()
|
|
||||||
.table(ReversiGame::Table)
|
|
||||||
.if_exists()
|
|
||||||
.to_string(PostgresQueryBuilder),
|
|
||||||
))
|
|
||||||
.await?;
|
|
||||||
db.query_one(Statement::from_string(
|
|
||||||
DbBackend::Postgres,
|
|
||||||
Table::drop()
|
|
||||||
.table(ReversiMatching::Table)
|
|
||||||
.if_exists()
|
|
||||||
.to_string(PostgresQueryBuilder),
|
|
||||||
))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, _manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
// Replace the sample below with your own migration scripts
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Learn more at https://docs.rs/sea-query#iden
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum ReversiGame {
|
|
||||||
Table,
|
|
||||||
}
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum ReversiMatching {
|
|
||||||
Table,
|
|
||||||
}
|
|
@ -1,38 +0,0 @@
|
|||||||
use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.create_index(
|
|
||||||
Index::create()
|
|
||||||
.name("IDX_note_url")
|
|
||||||
.table(Note::Table)
|
|
||||||
.col(Note::Url)
|
|
||||||
.if_not_exists()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.drop_index(
|
|
||||||
Index::drop()
|
|
||||||
.name("IDX_note_url")
|
|
||||||
.table(Note::Table)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Learn more at https://docs.rs/sea-query#iden
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum Note {
|
|
||||||
Table,
|
|
||||||
Url,
|
|
||||||
}
|
|
@ -1,248 +0,0 @@
|
|||||||
use redis::streams::StreamMaxlen;
|
|
||||||
use sea_orm::Statement;
|
|
||||||
use sea_orm_migration::prelude::*;
|
|
||||||
use std::env;
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
let cache_url = env::var("CACHE_URL").unwrap();
|
|
||||||
let skip_copy = env::var("ANTENNA_MIGRATION_SKIP").unwrap_or_default();
|
|
||||||
let copy_limit = env::var("ANTENNA_MIGRATION_COPY_LIMIT").unwrap_or_default();
|
|
||||||
let read_limit: u64 = env::var("ANTENNA_MIGRATION_READ_LIMIT")
|
|
||||||
.unwrap_or("10000".to_string())
|
|
||||||
.parse()
|
|
||||||
.unwrap();
|
|
||||||
let copy_limit: i64 = match copy_limit.parse() {
|
|
||||||
Ok(limit) => limit,
|
|
||||||
Err(_) => 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
if skip_copy == "true" {
|
|
||||||
println!("Skipped antenna migration");
|
|
||||||
} else {
|
|
||||||
let prefix = env::var("CACHE_PREFIX").unwrap();
|
|
||||||
|
|
||||||
let db = manager.get_connection();
|
|
||||||
let bk = manager.get_database_backend();
|
|
||||||
|
|
||||||
let count_stmt =
|
|
||||||
Statement::from_string(bk, "SELECT COUNT(1) FROM antenna_note".to_owned());
|
|
||||||
let total_num = db
|
|
||||||
.query_one(count_stmt)
|
|
||||||
.await?
|
|
||||||
.unwrap()
|
|
||||||
.try_get_by_index::<i64>(0)?;
|
|
||||||
let copy_limit = if copy_limit > 0 {
|
|
||||||
copy_limit
|
|
||||||
} else {
|
|
||||||
total_num
|
|
||||||
};
|
|
||||||
println!(
|
|
||||||
"Copying {} out of {} entries in antenna_note.",
|
|
||||||
copy_limit, total_num
|
|
||||||
);
|
|
||||||
|
|
||||||
let stmt_base = Query::select()
|
|
||||||
.column((AntennaNote::Table, AntennaNote::Id))
|
|
||||||
.column(AntennaNote::AntennaId)
|
|
||||||
.column(AntennaNote::NoteId)
|
|
||||||
.from(AntennaNote::Table)
|
|
||||||
.order_by((AntennaNote::Table, AntennaNote::Id), Order::Asc)
|
|
||||||
.limit(read_limit)
|
|
||||||
.to_owned();
|
|
||||||
|
|
||||||
let mut stmt = stmt_base.clone();
|
|
||||||
|
|
||||||
let client = redis::Client::open(cache_url).unwrap();
|
|
||||||
let mut redis_conn = client.get_connection().unwrap();
|
|
||||||
|
|
||||||
let mut remaining = total_num;
|
|
||||||
let mut pagination: i64 = 0;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let res = db.query_all(bk.build(&stmt)).await?;
|
|
||||||
if res.len() == 0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
let val: Vec<(String, String, String)> = res
|
|
||||||
.iter()
|
|
||||||
.filter_map(|q| q.try_get_many_by_index().ok())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
remaining -= val.len() as i64;
|
|
||||||
if remaining <= copy_limit {
|
|
||||||
let mut pipe = redis::pipe();
|
|
||||||
for v in &val {
|
|
||||||
pipe.xadd_maxlen(
|
|
||||||
format!("{}:antennaTimeline:{}", prefix, v.1),
|
|
||||||
StreamMaxlen::Approx(200),
|
|
||||||
"*",
|
|
||||||
&[("note", v.2.to_owned())],
|
|
||||||
)
|
|
||||||
.ignore();
|
|
||||||
}
|
|
||||||
pipe.query::<()>(&mut redis_conn).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let copied = total_num - remaining;
|
|
||||||
let copied = std::cmp::min(copied, total_num);
|
|
||||||
pagination += 1;
|
|
||||||
if pagination % 10 == 0 {
|
|
||||||
println!(
|
|
||||||
"Migrating antenna [{:.2}%]",
|
|
||||||
(copied as f64 / total_num as f64) * 100_f64,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some((last_id, _, _)) = val.last() {
|
|
||||||
stmt = stmt_base
|
|
||||||
.clone()
|
|
||||||
.and_where(
|
|
||||||
Expr::col((AntennaNote::Table, AntennaNote::Id)).gt(last_id.to_owned()),
|
|
||||||
)
|
|
||||||
.to_owned();
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("Migrating antenna [100.00%]");
|
|
||||||
}
|
|
||||||
|
|
||||||
manager
|
|
||||||
.drop_table(
|
|
||||||
Table::drop()
|
|
||||||
.table(AntennaNote::Table)
|
|
||||||
.if_exists()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.create_table(
|
|
||||||
Table::create()
|
|
||||||
.table(AntennaNote::Table)
|
|
||||||
.if_not_exists()
|
|
||||||
.col(
|
|
||||||
ColumnDef::new(AntennaNote::Id)
|
|
||||||
.string_len(32)
|
|
||||||
.not_null()
|
|
||||||
.primary_key(),
|
|
||||||
)
|
|
||||||
.col(
|
|
||||||
ColumnDef::new(AntennaNote::NoteId)
|
|
||||||
.string_len(32)
|
|
||||||
.not_null(),
|
|
||||||
)
|
|
||||||
.col(
|
|
||||||
ColumnDef::new(AntennaNote::AntennaId)
|
|
||||||
.string_len(32)
|
|
||||||
.not_null(),
|
|
||||||
)
|
|
||||||
.col(
|
|
||||||
ColumnDef::new(AntennaNote::Read)
|
|
||||||
.boolean()
|
|
||||||
.default(false)
|
|
||||||
.not_null(),
|
|
||||||
)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
manager
|
|
||||||
.create_index(
|
|
||||||
Index::create()
|
|
||||||
.name("IDX_0d775946662d2575dfd2068a5f")
|
|
||||||
.table(AntennaNote::Table)
|
|
||||||
.col(AntennaNote::AntennaId)
|
|
||||||
.if_not_exists()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
manager
|
|
||||||
.create_index(
|
|
||||||
Index::create()
|
|
||||||
.name("IDX_bd0397be22147e17210940e125")
|
|
||||||
.table(AntennaNote::Table)
|
|
||||||
.col(AntennaNote::NoteId)
|
|
||||||
.if_not_exists()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
manager
|
|
||||||
.create_index(
|
|
||||||
Index::create()
|
|
||||||
.name("IDX_335a0bf3f904406f9ef3dd51c2")
|
|
||||||
.table(AntennaNote::Table)
|
|
||||||
.col(AntennaNote::NoteId)
|
|
||||||
.col(AntennaNote::AntennaId)
|
|
||||||
.unique()
|
|
||||||
.if_not_exists()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
manager
|
|
||||||
.create_index(
|
|
||||||
Index::create()
|
|
||||||
.name("IDX_9937ea48d7ae97ffb4f3f063a4")
|
|
||||||
.table(AntennaNote::Table)
|
|
||||||
.col(AntennaNote::Read)
|
|
||||||
.if_not_exists()
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
manager
|
|
||||||
.create_foreign_key(
|
|
||||||
ForeignKey::create()
|
|
||||||
.name("FK_0d775946662d2575dfd2068a5f5")
|
|
||||||
.from(AntennaNote::Table, AntennaNote::AntennaId)
|
|
||||||
.to(Antenna::Table, Antenna::Id)
|
|
||||||
.on_delete(ForeignKeyAction::Cascade)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
manager
|
|
||||||
.create_foreign_key(
|
|
||||||
ForeignKey::create()
|
|
||||||
.name("FK_bd0397be22147e17210940e125b")
|
|
||||||
.from(AntennaNote::Table, AntennaNote::NoteId)
|
|
||||||
.to(Note::Table, Note::Id)
|
|
||||||
.on_delete(ForeignKeyAction::Cascade)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Learn more at https://docs.rs/sea-query#iden
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum AntennaNote {
|
|
||||||
Table,
|
|
||||||
Id,
|
|
||||||
#[iden = "noteId"]
|
|
||||||
NoteId,
|
|
||||||
#[iden = "antennaId"]
|
|
||||||
AntennaId,
|
|
||||||
Read,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum Antenna {
|
|
||||||
Table,
|
|
||||||
Id,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum Note {
|
|
||||||
Table,
|
|
||||||
Id,
|
|
||||||
}
|
|
@ -1,69 +0,0 @@
|
|||||||
use sea_orm::entity::prelude::*;
|
|
||||||
use sea_orm::Schema;
|
|
||||||
|
|
||||||
use sea_orm_migration::{
|
|
||||||
prelude::*,
|
|
||||||
sea_orm::{DbBackend, Statement},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
if manager.get_database_backend() == DbBackend::Sqlite {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let db = manager.get_connection();
|
|
||||||
db.query_one(Statement::from_string(
|
|
||||||
DbBackend::Postgres,
|
|
||||||
Table::drop()
|
|
||||||
.table(Entity)
|
|
||||||
.if_exists()
|
|
||||||
.to_string(PostgresQueryBuilder),
|
|
||||||
))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
if manager.get_database_backend() == DbBackend::Sqlite {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let db = manager.get_connection();
|
|
||||||
let builder = db.get_database_backend();
|
|
||||||
let schema = Schema::new(builder);
|
|
||||||
|
|
||||||
db.execute(builder.build(&schema.create_table_from_entity(Entity)))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Default)]
|
|
||||||
#[sea_orm(table_name = "ad")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key, auto_increment = false)]
|
|
||||||
pub id: String,
|
|
||||||
#[sea_orm(column_name = "createdAt")]
|
|
||||||
pub created_at: DateTimeWithTimeZone,
|
|
||||||
#[sea_orm(column_name = "expiresAt")]
|
|
||||||
pub expires_at: DateTimeWithTimeZone,
|
|
||||||
pub place: String,
|
|
||||||
pub priority: String,
|
|
||||||
pub url: String,
|
|
||||||
#[sea_orm(column_name = "imageUrl")]
|
|
||||||
pub image_url: String,
|
|
||||||
pub memo: String,
|
|
||||||
pub ratio: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
|
||||||
pub enum Relation {}
|
|
||||||
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
@ -1,36 +0,0 @@
|
|||||||
use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.alter_table(
|
|
||||||
Table::alter()
|
|
||||||
.table(Instance::Table)
|
|
||||||
.add_column(ColumnDef::new(Instance::AccountDomain).string())
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.alter_table(
|
|
||||||
Table::alter()
|
|
||||||
.table(Instance::Table)
|
|
||||||
.drop_column(Instance::AccountDomain)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Learn more at https://docs.rs/sea-query#iden
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum Instance {
|
|
||||||
Table,
|
|
||||||
AccountDomain,
|
|
||||||
}
|
|
@ -1,37 +0,0 @@
|
|||||||
use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.alter_table(
|
|
||||||
Table::alter()
|
|
||||||
.table(Instance::Table)
|
|
||||||
.rename_column(Alias::new("account_domain"), Instance::AccountDomain)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.alter_table(
|
|
||||||
Table::alter()
|
|
||||||
.table(Instance::Table)
|
|
||||||
.rename_column(Instance::AccountDomain, Alias::new("account_domain"))
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Learn more at https://docs.rs/sea-query#iden
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum Instance {
|
|
||||||
Table,
|
|
||||||
#[iden = "accountDomain"]
|
|
||||||
AccountDomain,
|
|
||||||
}
|
|
@ -1,37 +0,0 @@
|
|||||||
use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.alter_table(
|
|
||||||
Table::alter()
|
|
||||||
.table(Instance::Table)
|
|
||||||
.drop_column(Instance::AccountDomain)
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.alter_table(
|
|
||||||
Table::alter()
|
|
||||||
.table(Instance::Table)
|
|
||||||
.add_column(ColumnDef::new(Instance::AccountDomain).string())
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Learn more at https://docs.rs/sea-query#iden
|
|
||||||
#[derive(Iden)]
|
|
||||||
enum Instance {
|
|
||||||
Table,
|
|
||||||
#[iden = "accountDomain"]
|
|
||||||
AccountDomain,
|
|
||||||
}
|
|
@ -1,103 +0,0 @@
|
|||||||
use serde::Deserialize;
|
|
||||||
use std::env;
|
|
||||||
use std::fs;
|
|
||||||
use urlencoding::encode;
|
|
||||||
|
|
||||||
use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
const DB_URL_ENV: &str = "DATABASE_URL";
|
|
||||||
const CACHE_URL_ENV: &str = "CACHE_URL";
|
|
||||||
const CACHE_PREFIX_ENV: &str = "CACHE_PREFIX";
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() {
|
|
||||||
let cwd = env::current_dir().unwrap();
|
|
||||||
let yml = fs::File::open(cwd.join("../../.config/default.yml"))
|
|
||||||
.expect("Failed to open '.config/default.yml'");
|
|
||||||
let config: Config = serde_yaml::from_reader(yml).expect("Failed to parse yaml");
|
|
||||||
|
|
||||||
if env::var_os(DB_URL_ENV).is_none() {
|
|
||||||
env::set_var(
|
|
||||||
DB_URL_ENV,
|
|
||||||
format!(
|
|
||||||
"postgres://{}:{}@{}:{}/{}",
|
|
||||||
config.db.user,
|
|
||||||
encode(&config.db.pass),
|
|
||||||
config.db.host,
|
|
||||||
config.db.port,
|
|
||||||
config.db.db,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
if env::var_os(CACHE_URL_ENV).is_none() {
|
|
||||||
let redis_conf = match config.cache_server {
|
|
||||||
None => config.redis,
|
|
||||||
Some(conf) => conf,
|
|
||||||
};
|
|
||||||
let redis_proto = match redis_conf.tls {
|
|
||||||
None => "redis",
|
|
||||||
Some(_) => "rediss",
|
|
||||||
};
|
|
||||||
let redis_user = redis_conf.user.unwrap_or("default".to_string());
|
|
||||||
let redis_uri_userpass = format!(
|
|
||||||
"{}:{}",
|
|
||||||
redis_user,
|
|
||||||
encode(&redis_conf.pass.unwrap_or_default())
|
|
||||||
);
|
|
||||||
let redis_uri_hostport = format!("{}:{}", redis_conf.host, redis_conf.port);
|
|
||||||
let redis_uri = format!(
|
|
||||||
"{}://{}@{}/{}",
|
|
||||||
redis_proto, redis_uri_userpass, redis_uri_hostport, redis_conf.db
|
|
||||||
);
|
|
||||||
env::set_var(CACHE_URL_ENV, redis_uri);
|
|
||||||
env::set_var(
|
|
||||||
CACHE_PREFIX_ENV,
|
|
||||||
if redis_conf.prefix.is_empty() {
|
|
||||||
config.url.host_str().unwrap()
|
|
||||||
} else {
|
|
||||||
&redis_conf.prefix
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
cli::run_cli(migration::Migrator).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct Config {
|
|
||||||
pub url: url::Url,
|
|
||||||
pub db: DbConfig,
|
|
||||||
pub redis: RedisConfig,
|
|
||||||
pub cache_server: Option<RedisConfig>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Deserialize)]
|
|
||||||
pub struct DbConfig {
|
|
||||||
pub host: String,
|
|
||||||
pub port: u32,
|
|
||||||
pub db: String,
|
|
||||||
pub user: String,
|
|
||||||
pub pass: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Deserialize)]
|
|
||||||
pub struct RedisConfig {
|
|
||||||
pub host: String,
|
|
||||||
pub port: u32,
|
|
||||||
pub user: Option<String>,
|
|
||||||
pub pass: Option<String>,
|
|
||||||
pub tls: Option<TlsConfig>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub db: u32,
|
|
||||||
#[serde(default)]
|
|
||||||
pub prefix: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct TlsConfig {
|
|
||||||
pub host: String,
|
|
||||||
pub reject_unauthorized: bool,
|
|
||||||
}
|
|
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "native-utils",
|
|
||||||
"version": "0.0.0",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"build": "yarn build:migration",
|
|
||||||
"build:migration": "cargo build --locked --manifest-path migration/Cargo.toml && mkdir -p ./built && cp ./migration/target/debug/migration ./built/migration",
|
|
||||||
"format": "cargo fmt --all",
|
|
||||||
"lint": "cargo clippy --fix"
|
|
||||||
}
|
|
||||||
}
|
|
@ -6,12 +6,10 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node ./built/index.js",
|
"start": "node ./built/index.js",
|
||||||
"start:test": "NODE_ENV=test node ./built/index.js",
|
"start:test": "NODE_ENV=test node ./built/index.js",
|
||||||
"migrate": "yarn migrate:typeorm && yarn migrate:cargo",
|
"migrate": "yarn migrate:typeorm",
|
||||||
"migrate:typeorm": "typeorm migration:run -d ormconfig.js",
|
"migrate:typeorm": "typeorm migration:run -d ormconfig.js",
|
||||||
"migrate:cargo": "./native-utils/built/migration up",
|
"revertmigration": "yarn revertmigration:typeorm",
|
||||||
"revertmigration": "yarn revertmigration:cargo && yarn revertmigration:typeorm",
|
|
||||||
"revertmigration:typeorm": "typeorm migration:revert -d ormconfig.js",
|
"revertmigration:typeorm": "typeorm migration:revert -d ormconfig.js",
|
||||||
"revertmigration:cargo": "./native-utils/built/migration down",
|
|
||||||
"check:connect": "node ./check_connect.js",
|
"check:connect": "node ./check_connect.js",
|
||||||
"build": "swc src -d built -D",
|
"build": "swc src -d built -D",
|
||||||
"build:debug": "swc src -d built -s -D",
|
"build:debug": "swc src -d built -s -D",
|
||||||
@ -98,7 +96,6 @@
|
|||||||
"mime-types": "2.1.35",
|
"mime-types": "2.1.35",
|
||||||
"msgpackr": "1.9.5",
|
"msgpackr": "1.9.5",
|
||||||
"multer": "1.4.4-lts.1",
|
"multer": "1.4.4-lts.1",
|
||||||
"native-utils": "link:native-utils",
|
|
||||||
"nested-property": "4.0.0",
|
"nested-property": "4.0.0",
|
||||||
"node-fetch": "3.3.1",
|
"node-fetch": "3.3.1",
|
||||||
"nodemailer": "6.9.3",
|
"nodemailer": "6.9.3",
|
||||||
|
@ -12,15 +12,6 @@ const { join } = require("node:path");
|
|||||||
force: true,
|
force: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
fs.rmSync(join(__dirname, "/../packages/backend/native-utils/built"), {
|
|
||||||
recursive: true,
|
|
||||||
force: true,
|
|
||||||
});
|
|
||||||
fs.rmSync(join(__dirname, "/../packages/backend/native-utils/node_modules"), {
|
|
||||||
recursive: true,
|
|
||||||
force: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
fs.rmSync(join(__dirname, "/../packages/client/built"), {
|
fs.rmSync(join(__dirname, "/../packages/client/built"), {
|
||||||
recursive: true,
|
recursive: true,
|
||||||
force: true,
|
force: true,
|
||||||
@ -65,9 +56,4 @@ const { join } = require("node:path");
|
|||||||
cwd: join(__dirname, "/../"),
|
cwd: join(__dirname, "/../"),
|
||||||
stdio: "inherit",
|
stdio: "inherit",
|
||||||
});
|
});
|
||||||
|
|
||||||
execa("cargo", ["clean"], {
|
|
||||||
cwd: join(__dirname, "/../packages/backend/native-utils"),
|
|
||||||
stdio: "inherit",
|
|
||||||
});
|
|
||||||
})();
|
})();
|
||||||
|
@ -7,10 +7,6 @@ const { join } = require("node:path");
|
|||||||
recursive: true,
|
recursive: true,
|
||||||
force: true,
|
force: true,
|
||||||
});
|
});
|
||||||
fs.rmSync(join(__dirname, "/../packages/backend/native-utils/built"), {
|
|
||||||
recursive: true,
|
|
||||||
force: true,
|
|
||||||
});
|
|
||||||
fs.rmSync(join(__dirname, "/../packages/client/built"), {
|
fs.rmSync(join(__dirname, "/../packages/client/built"), {
|
||||||
recursive: true,
|
recursive: true,
|
||||||
force: true,
|
force: true,
|
||||||
|
13
yarn.lock
13
yarn.lock
@ -5860,7 +5860,6 @@ __metadata:
|
|||||||
mocha: 10.2.0
|
mocha: 10.2.0
|
||||||
msgpackr: 1.9.5
|
msgpackr: 1.9.5
|
||||||
multer: 1.4.4-lts.1
|
multer: 1.4.4-lts.1
|
||||||
native-utils: "link:native-utils"
|
|
||||||
nested-property: 4.0.0
|
nested-property: 4.0.0
|
||||||
node-fetch: 3.3.1
|
node-fetch: 3.3.1
|
||||||
nodemailer: 6.9.3
|
nodemailer: 6.9.3
|
||||||
@ -16039,18 +16038,6 @@ __metadata:
|
|||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
"native-utils@link:native-utils::locator=backend%40workspace%3Apackages%2Fbackend":
|
|
||||||
version: 0.0.0-use.local
|
|
||||||
resolution: "native-utils@link:native-utils::locator=backend%40workspace%3Apackages%2Fbackend"
|
|
||||||
languageName: node
|
|
||||||
linkType: soft
|
|
||||||
|
|
||||||
"native-utils@workspace:packages/backend/native-utils":
|
|
||||||
version: 0.0.0-use.local
|
|
||||||
resolution: "native-utils@workspace:packages/backend/native-utils"
|
|
||||||
languageName: unknown
|
|
||||||
linkType: soft
|
|
||||||
|
|
||||||
"natural-compare-lite@npm:^1.4.0":
|
"natural-compare-lite@npm:^1.4.0":
|
||||||
version: 1.4.0
|
version: 1.4.0
|
||||||
resolution: "natural-compare-lite@npm:1.4.0"
|
resolution: "natural-compare-lite@npm:1.4.0"
|
||||||
|
Loading…
Reference in New Issue
Block a user