ci-cd: removed useless things like the eslint and tsconfig packages, we just use oxlint and next lint now. Removed all of the hono context stuff that we didnt need, ran knip a bunch, removed the last of cloudflare, and updated docs and dependencies. Also finalized the db, auth, cache, and other stuff to run in the node environment

This commit is contained in:
Logan Reeder
2025-08-25 17:10:11 -06:00
parent 2dc74e0258
commit c88eda5230
70 changed files with 1459 additions and 11643 deletions

View File

@@ -70,10 +70,8 @@ jspm_packages/
.husky/_
# Development and CI files
.eslintcache
.prettierignore
.prettierrc
eslint.config.ts
knip.config.ts
lint-staged.config.js

View File

@@ -51,14 +51,10 @@ POSTGRES_DB=nimbus
# Valkey
VALKEY_PORT=6379
VALKEY_HOST=localhost
VALKEY_HOST=cache
VALKEY_USERNAME=valkey
VALKEY_PASSWORD=valkey
# Upstash Redis
UPSTASH_REDIS_REST_URL=
UPSTASH_REDIS_REST_TOKEN=
# Resend. Get your api key from here https://resend.com
EMAIL_FROM=support@yourdomain.com
RESEND_API_KEY=your_resend_api_key
@@ -82,11 +78,5 @@ AWS_SECRET_ACCESS_KEY=your_s3_secret_key_here
# Node environment
NODE_ENV=development
# Edge environment
IS_EDGE_RUNTIME=false
# Wrangler dev
WRANGLER_DEV=false
# Integration tests
DROPBOX_TEST_ACCESS_TOKEN=

View File

@@ -1,15 +0,0 @@
# for oxlint
# https://oxc.rs/docs/guide/usage/linter/generated-cli.html#ignore-files
# I know it is deprecated...just ignore the warning
# https://eslint.org/docs/latest/use/configure/ignore-deprecated
**/server/lib/google-drive/**
**/server/lib/one-drive/**
**/node_modules
**/.next
**/.wrangler
**/dist
**/build
**/coverage
**/out

View File

@@ -46,7 +46,7 @@ jobs:
run: bun install --frozen-lockfile
- name: Run formatter
run: bun format:check
run: bun format
- name: Run linter
run: bun lint

View File

@@ -15,9 +15,6 @@
.env.*
.env*disabled
# Cloudflare
cloudflare-env.d.ts
# Generated files
drizzle/

View File

@@ -3,8 +3,6 @@
"prettier.ignorePath": ".prettierignore",
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
// https://stackoverflow.com/a/76549788/3571431
"eslint.workingDirectories": [{ "mode": "auto" }],
"oxc.enable": true,
"[toml]": {
"editor.defaultFormatter": "tamasfe.even-better-toml"

View File

@@ -1,9 +1,12 @@
{
"prettier.configPath": ".prettierrc",
"prettier.ignorePath": ".prettierignore",
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
// https://stackoverflow.com/a/76549788/3571431
"eslint.workingDirectories": [{ "mode": "auto" }],
"oxc.enable": true
"auto_install_extensions": {
"oxlint": true
},
"format_on_save": "on",
"formatter": {
"external": {
"command": "prettier",
"arguments": ["--stdin-filepath", "{buffer_path}"]
}
}
}

View File

@@ -1,12 +1,5 @@
# 🚀 Deployment Guide
## Table of Contents
- [Docker Compose Deployment](#docker-compose-deployment)
- [Cloudflare Workers Deployment](#cloudflare-workers-deployment)
---
## 🐳 Docker Compose Deployment
This is the simplest way to deploy Nimbus locally or on a single server.
@@ -64,6 +57,8 @@ Nimbus requires the following environment variables to work, some will be pre-fi
- `EMAIL_FROM`
- `RESEND_API_KEY`
- `NEXT_PUBLIC_POSTHOG_KEY`
- `NEXT_PUBLIC_POSTHOG_HOST`
### 3. Start Services
@@ -88,70 +83,8 @@ docker compose down
- Database: `pg_isready -h localhost -p 5432`
- Cache: `redis-cli -h localhost -p 6379 ping`
- API: `curl http://localhost:1284/kamehame`
- API: `curl http://localhost:1284/health`
### 7. Enjoy!
Your container should now be available at [http://localhost:3000](http://localhost:3000) or at the domain you set up!
---
## ☁️ Cloudflare Workers Deployment
### Prerequisites
- [Bun](https://bun.sh/)
- [Wrangler CLI](https://developers.cloudflare.com/workers/wrangler/install-and-update/)
- Cloudflare account with Workers enabled
> For any issues, please refer to the official Cloudflare Workers [documentation](https://developers.cloudflare.com/workers/).
### 1. Install Dependencies and Sign In to Wrangler
```bash
bun install
bun wrangler --version
bun wrangler login
```
### 2. Configure Worker
The `wrangler.toml` is already configured to instantly deploy, however you will need to add certain environment variables for the worker to function fully.
If testing, make sure to add the wrangler.dev url cloudflare provisions for you to the `NEXT_PUBLIC_BACKEND_URL` and `NEXT_PUBLIC_FRONTEND_URL` environment variables in both the wrangler.toml and .env files.
If deploying to production, make sure to use your production url in the `NEXT_PUBLIC_BACKEND_URL` and `NEXT_PUBLIC_FRONTEND_URL` environment variables in both the wrangler.toml and .env files.
1. Add the values outlined in the .env.example file to the wrangler.toml file or to your Worker settings on the Cloudflare dashboard.
2. Configure your .env file with your production values.
3. Run `bun env:sync` to sync your .env file to the web workspace or the frontend build will fail.
4. Run `cp .env .dev.vars` to copy your .env file to the .dev.vars file for testing it locally before deployment.
> **Note:** The worker front end is build with [opennext](https://opennext.js.org/) and therefore acts exactly as a Nextjs build, so env variables are extracted from the .env file at the root of the workspace (`apps/web` in this case). Technically, the only variables required for the frontend build are `NEXT_PUBLIC_BACKEND_URL` and `NEXT_PUBLIC_FRONTEND_URL`, but it is recommended to sync all variables.
### 3. Deploy Worker
You will need to run this command in the `apps/web` directory and the `apps/server` directory respectively.
```bash
bun run deploy
```
### 4. Set Up Custom Domain (Optional)
Refer to the official [documentation](https://developers.cloudflare.com/workers/configuration/routing/custom-domains/) for adding custom domains to your workers
### 5. Environment Variables
Set environment variables in the [Cloudflare Workers dashboard](https://developers.cloudflare.com/workers/configuration/environment-variables/) or using Wrangler:
```bash
wrangler secret put API_KEY
wrangler secret put DATABASE_URL
```
> **Note:** `wrangler.toml` is not ignored in the `.gitignore`, so if you add environment variables for your deployment, they will be committed to version control. Make sure to remove them before pushing to a public repository.
### 6. Enjoy!
Your worker should now be available at the domain you set up or at the wrangler.dev url cloudflare provisions for you!

View File

@@ -7,16 +7,13 @@
".": "./src/index.ts"
},
"scripts": {
"lint": "bun run oxlint --fix",
"format": "bun prettier . --write --list-different",
"dev": "bun --watch src/index.ts",
"dev:hot": "bun --hot src/index.ts",
"check": "tsc --noEmit",
"build": "bun run check && bun build src/index.ts --target bun --minify --outdir dist",
"start": "bun dist/index.js",
"cf:dev": "sudo wrangler dev",
"cf:deploy:preview": "wrangler deploy --env preview",
"cf:deploy:staging": "wrangler deploy --env staging",
"cf:deploy:production": "wrangler deploy --env production",
"sync-wrangler-secrets": "bun run scripts/sync-wrangler-secrets.ts",
"docker:build": "bun run build && docker build -t nimbus-server-manual .",
"docker:run": "source .env && docker run --name nimbus-server-manual --env-file .env -p $SERVER_PORT:$SERVER_PORT nimbus-server-manual:latest",
"docker:up": "bun run build && docker compose up -d",
@@ -44,12 +41,11 @@
"hono": "^4.8.5",
"nanoid": "^5.1.5",
"pg": "^8.16.3",
"resend": "^4.7.0",
"rate-limiter-flexible": "^7.2.0",
"zod": "^4.0.5"
},
"devDependencies": {
"@microsoft/microsoft-graph-types": "^2.40.0",
"@nimbus/tsconfig": "workspace:*",
"@nimbus/vitest": "workspace:*",
"@types/pg": "^8.15.4"
}

View File

@@ -1,188 +0,0 @@
import { execSync } from "node:child_process";
import { readFile } from "node:fs/promises";
import { parseArgs } from "node:util";
import { join } from "node:path";
// Supported environments
type Environment = "preview" | "staging" | "production";
interface Secret {
name: string;
value: string;
}
// Parse command line arguments
const { values: args } = parseArgs({
options: {
env: {
type: "string",
short: "e",
default: "preview",
},
help: {
type: "boolean",
short: "h",
default: false,
},
},
});
// Help message
const printHelp = () => {
console.log(`
Usage: bun run sync-wrangler-secrets.ts [options]
Options:
-e, --env <environment> Environment to sync secrets to (preview, staging, or production, default: preview)
-h, --help Show help
`);
};
// Validate environment
const validateEnvironment = (env: string): env is Environment => {
const validEnvs: Environment[] = ["preview", "staging", "production"];
if (!validEnvs.includes(env as Environment)) {
console.error(`Error: Invalid environment '${env}'. Must be one of: ${validEnvs.join(", ")}`);
return false;
}
return true;
};
// Get environment file
const getEnvFile = (env: Environment): string => {
switch (env) {
case "preview":
return ".dev.vars.preview";
case "staging":
return ".dev.vars.staging";
case "production":
return ".dev.vars.production";
}
};
// Get Wrangler secrets
const getWranglerSecrets = async (env: Environment): Promise<Set<string>> => {
try {
const output = execSync(`bun run wrangler secret list --env ${env}`, { stdio: "pipe" });
const secrets = JSON.parse(output.toString()) as Secret[];
return new Set(secrets.map(secret => secret.name));
} catch (error) {
console.error("Error fetching Wrangler secrets:", error);
return new Set();
}
};
// Delete Wrangler secret
const deleteWranglerSecret = async (name: string, env: Environment): Promise<boolean> => {
try {
console.log(`Deleting secret: ${name}`);
// There is no way to automatically delete by passing --yes|-y, so you have to do it manually
// but this automates the process of deleting secrets that are no longer in the .dev.vars file
execSync(`bun run wrangler secret delete ${name} --env ${env}`, { stdio: "inherit" });
return true;
} catch (error) {
console.error(`Error deleting secret ${name}:`, error);
return false;
}
};
// Parse .dev.vars file
const parseDevVars = async (env: Environment): Promise<Record<string, string>> => {
const envFile = getEnvFile(env);
try {
const content = await readFile(join(process.cwd(), envFile), "utf-8");
return content
.split("\n")
.filter(line => line && !line.startsWith("#"))
.reduce(
(acc, line) => {
const [key, ...value] = line.split("=");
if (key && value.length > 0) {
acc[key.trim()] = value
.join("=")
.trim()
.replace(/(^['"]|['"]$)/g, "");
}
return acc;
},
{} as Record<string, string>
);
} catch (error) {
console.error(`Error reading ${envFile}`, error);
process.exit(1);
}
};
// Main function
const main = async () => {
if (args.help) {
printHelp();
return;
}
const env = args.env?.toLowerCase() as Environment;
if (!validateEnvironment(env)) {
process.exit(1);
}
console.log(`Syncing secrets for environment: ${env}\n`);
// Get current Wrangler secrets
console.log("Fetching existing Wrangler secrets...");
const wranglerSecrets = await getWranglerSecrets(env);
console.log(`Found ${wranglerSecrets.size} existing secrets\n`);
// Parse local .dev.vars
console.log("Reading local .dev.vars file...");
const localSecrets = await parseDevVars(env);
const localSecretKeys = new Set(Object.keys(localSecrets));
console.log(`Found ${localSecretKeys.size} secrets in .dev.vars\n`);
// Delete secrets that exist in Wrangler but not in local .dev.vars
const secretsToDelete = [...wranglerSecrets].filter(secret => !localSecretKeys.has(secret));
if (secretsToDelete.length > 0) {
console.log(`Found ${secretsToDelete.length} secrets to delete:`);
for (const secret of secretsToDelete) {
await deleteWranglerSecret(secret, env);
}
console.log("");
} else {
console.log("No secrets to delete.\n");
}
// Create/update secrets from .dev.vars
if (localSecretKeys.size > 0) {
console.log("Updating secrets from .dev.vars...");
try {
// Use wrangler secret bulk with stdin to create/update all secrets
execSync(`bun run wrangler secret bulk --env ${env}`, {
input: JSON.stringify(localSecrets, null, 2),
stdio: ["pipe", "inherit", "inherit"],
});
console.log("Secrets updated successfully!\n");
} catch (error) {
console.error("Error updating secrets:", error);
}
}
// Verify the final state
console.log("Verifying secrets...");
const finalSecrets = await getWranglerSecrets(env);
const missingSecrets = [...localSecretKeys].filter(key => !finalSecrets.has(key));
if (missingSecrets.length > 0) {
console.error("Error: The following secrets were not synced successfully:");
missingSecrets.forEach(secret => console.error(`- ${secret}`));
process.exit(1);
} else {
console.log("All secrets are in sync!");
console.log(`\nTotal secrets in ${env}: ${finalSecrets.size}`);
}
};
// Run the script
main().catch(console.error);

View File

@@ -1,18 +1,8 @@
import type { auth, Auth, SessionUser } from "@nimbus/auth/auth";
import type { Provider } from "./providers/interface/provider";
import { getContext } from "hono/context-storage";
import type { auth, Auth } from "@nimbus/auth/auth";
import type { CacheClient } from "@nimbus/cache";
import { Hono, type Env as HonoEnv } from "hono";
import type { DB } from "@nimbus/db";
export interface BaseRouterVars {
user: typeof auth.$Infer.Session.user | null;
session: typeof auth.$Infer.Session.session | null;
db: DB;
cache: CacheClient;
auth: Auth;
}
export interface HonoContext {
user: typeof auth.$Infer.Session.user | null;
session: typeof auth.$Infer.Session.session | null;
@@ -21,43 +11,3 @@ export interface HonoContext {
auth: Auth;
provider: Provider;
}
export interface ProtectedRouterVars extends BaseRouterVars {
user: SessionUser;
}
export interface DriveProviderRouterVars extends ProtectedRouterVars {
provider: Provider;
}
export interface PublicRouterEnv {
Variables: BaseRouterVars;
}
export interface ProtectedRouterEnv {
Variables: ProtectedRouterVars;
}
export interface DriveProviderRouterEnv {
Variables: DriveProviderRouterVars;
}
function createHono<T extends HonoEnv>() {
return new Hono<T>();
}
export function createPublicRouter() {
return createHono<PublicRouterEnv>();
}
export function createProtectedRouter() {
return createHono<ProtectedRouterEnv>();
}
export function createDriveProviderRouter() {
return createHono<DriveProviderRouterEnv>();
}
export function getDriveProviderContext() {
return getContext<DriveProviderRouterEnv>();
}

View File

@@ -1,14 +1,15 @@
import { contextStorage } from "hono/context-storage";
import { cacheClient } from "@nimbus/cache";
import { createPublicRouter } from "./hono";
import { serve } from "@hono/node-server";
import { type HonoContext } from "./hono";
import { env } from "@nimbus/env/server";
import { auth } from "@nimbus/auth/auth";
import { cors } from "hono/cors";
import { db } from "@nimbus/db";
import routes from "./routes";
import { Hono } from "hono";
const app = createPublicRouter()
const app = new Hono<{ Variables: HonoContext }>()
.use(contextStorage())
.use(
cors({

View File

@@ -22,7 +22,7 @@ const MOCK_FOLDER_RESPONSE = {
path_display: "/test-folder",
};
export function createMockDropboxClient() {
function createMockDropboxClient() {
return {
filesCreateFolderV2: vi.fn(),
filesUpload: vi.fn(),

View File

@@ -1,19 +0,0 @@
// May use in the one-drive provider at some point
export interface File {
id: string;
name: string;
size?: number;
file?: {
mimeType?: string;
};
folder?: any;
parentReference?: {
id: string;
path: string;
};
createdDateTime?: string;
lastModifiedDateTime?: string;
webUrl?: string;
"@microsoft.graph.downloadUrl"?: string;
}

View File

@@ -1,7 +1,7 @@
import { CreateBucketCommand, S3Client } from "@aws-sdk/client-s3";
import { S3Provider } from "../s3-provider";
export const config = {
const config = {
endpoint: "http://localhost:9000",
region: "us-east-1",
accessKeyId: "minioadmin",
@@ -10,7 +10,7 @@ export const config = {
};
// Create S3 client
export const createLocalS3Client = () => {
const createLocalS3Client = () => {
return new S3Client({
endpoint: config.endpoint,
region: config.region,
@@ -93,31 +93,6 @@ export function createTestS3Provider(bucketName: string): S3Provider {
});
}
/**
* Creates a test file with random content
*/
export function createTestFile(name: string, content?: string) {
return {
name,
mimeType: "text/plain",
size: content?.length || 11,
parentId: "",
content: Buffer.from(content || "test content"),
};
}
/**
* Creates a test folder
*/
export function createTestFolder(name: string) {
return {
name,
mimeType: "application/x-directory",
size: 0,
parentId: "",
};
}
/**
* Generates a unique test file name
*/

View File

@@ -7,8 +7,11 @@ import type {
MoveFileSchema,
UpdateFileSchema,
} from "@nimbus/shared";
import { getDriveProviderContext } from "../../hono";
import type { Provider } from "../../providers/interface/provider";
import { getContext } from "hono/context-storage";
import { TagService } from "../tags/tag-service";
import type { auth } from "@nimbus/auth/auth";
import type { HonoContext } from "../../hono";
import type { Readable } from "node:stream";
interface CreateFileOptions {
@@ -19,22 +22,17 @@ interface CreateFileOptions {
export class FileService {
private tagService: TagService;
private get c() {
const context = getDriveProviderContext();
if (!context) {
throw new Error("Context is not available in TagService. It must be used within a request cycle.");
}
return context;
}
private user: typeof auth.$Infer.Session.user | null;
private provider: Provider;
constructor() {
this.tagService = new TagService();
this.user = getContext<{ Variables: HonoContext }>().var.user;
this.provider = getContext<{ Variables: HonoContext }>().var.provider;
}
async listFiles(options: GetFilesSchema) {
const user = this.c.var.user;
const drive = this.c.var.provider;
const res = await drive.listChildren(options.parentId, {
const res = await this.provider.listChildren(options.parentId, {
pageSize: options.pageSize,
pageToken: options.pageToken,
fields: options.returnedValues,
@@ -49,7 +47,7 @@ export class FileService {
res.items.map(async item => {
if (!item.id) return { ...item, tags: [] };
try {
const tags = await this.tagService.getFileTags(item.id, user.id);
const tags = await this.tagService.getFileTags(item.id, this.user!.id);
return { ...item, tags };
} catch (error) {
console.error(`Failed to get tags for file ${item.id}:`, error);
@@ -62,40 +60,33 @@ export class FileService {
}
async getById(options: GetFileByIdSchema) {
const user = this.c.var.user;
const drive = this.c.var.provider;
const file = await drive.getById(options.fileId, options.returnedValues);
const file = await this.provider.getById(options.fileId, options.returnedValues);
if (!file) {
return null;
}
const tags = await this.tagService.getFileTags(options.fileId, user.id);
const tags = await this.tagService.getFileTags(options.fileId, this.user!.id);
return { ...file, tags } as File;
}
async updateFile(options: UpdateFileSchema) {
const drive = this.c.var.provider;
return drive.update(options.fileId, { name: options.name });
return this.provider.update(options.fileId, { name: options.name });
}
async deleteFile(options: DeleteFileSchema) {
const drive = this.c.var.provider;
return drive.delete(options.fileId);
return this.provider.delete(options.fileId);
}
async createFile(options: CreateFileOptions, fileStream?: Readable) {
const drive = this.c.var.provider;
return drive.create(options, fileStream);
return this.provider.create(options, fileStream);
}
async downloadFile(options: DownloadFileSchema) {
const drive = this.c.var.provider;
return drive.download(options.fileId, options);
return this.provider.download(options.fileId, options);
}
async moveFile(options: MoveFileSchema) {
const drive = this.c.var.provider;
return drive.move(options.sourceId, options.targetParentId, options.newName);
return this.provider.move(options.sourceId, options.targetParentId, options.newName);
}
}

View File

@@ -1,22 +1,14 @@
import { and, count, eq, inArray, isNull } from "drizzle-orm";
import { getDriveProviderContext } from "../../hono";
import type { FileTag, Tag } from "@nimbus/shared";
import { fileTag, tag } from "@nimbus/db/schema";
import { db } from "@nimbus/db";
import { nanoid } from "nanoid";
export class TagService {
private get c() {
const context = getDriveProviderContext();
if (!context) {
throw new Error("Context is not available in TagService. It must be used within a request cycle.");
}
return context;
}
// Get all tags for a user with file counts
async getUserTags(userId: string): Promise<Tag[]> {
// Get all tags for the user
const userTags = await this.c.var.db.query.tag.findMany({
const userTags = await db.query.tag.findMany({
where: (table, { eq }) => eq(table.userId, userId),
orderBy: (table, { asc }) => asc(table.name),
});
@@ -24,7 +16,7 @@ export class TagService {
// Get file counts for each tag
const tagsWithCounts = await Promise.all(
userTags.map(async tagRecord => {
const fileCount = await this.c.var.db
const fileCount = await db
.select({ count: count() })
.from(fileTag)
.where(and(eq(fileTag.tagId, tagRecord.id), eq(fileTag.userId, userId)));
@@ -45,13 +37,13 @@ export class TagService {
// Get a specific tag by ID
async getTagById(tagId: string, userId: string): Promise<Tag | null> {
const record = await this.c.var.db.query.tag.findFirst({
const record = await db.query.tag.findFirst({
where: (table, { and, eq }) => and(eq(table.id, tagId), eq(table.userId, userId)),
});
if (!record) return null;
const fileCount = await this.c.var.db
const fileCount = await db
.select({ count: count() })
.from(fileTag)
.where(and(eq(fileTag.tagId, tagId), eq(fileTag.userId, userId)));
@@ -83,7 +75,7 @@ export class TagService {
? and(eq(tag.name, name), eq(tag.userId, userId), eq(tag.parentId, parentId))
: and(eq(tag.name, name), eq(tag.userId, userId), isNull(tag.parentId));
const existingTag = await this.c.var.db.query.tag.findFirst({
const existingTag = await db.query.tag.findFirst({
where: existingTagQuery,
});
@@ -99,7 +91,7 @@ export class TagService {
userId,
};
await this.c.var.db.insert(tag).values(newTag);
await db.insert(tag).values(newTag);
return {
...newTag,
@@ -140,7 +132,7 @@ export class TagService {
? and(eq(tag.name, updates.name), eq(tag.userId, userId), eq(tag.parentId, newParentId))
: and(eq(tag.name, updates.name), eq(tag.userId, userId), isNull(tag.parentId));
const nameConflict = await this.c.var.db.query.tag.findFirst({
const nameConflict = await db.query.tag.findFirst({
where: nameConflictQuery,
});
@@ -156,7 +148,7 @@ export class TagService {
if (updates.parentId !== undefined) updateData.parentId = updates.parentId || null;
updateData.updatedAt = new Date();
await this.c.var.db
await db
.update(tag)
.set(updateData)
.where(and(eq(tag.id, tagId), eq(tag.userId, userId)));
@@ -176,10 +168,10 @@ export class TagService {
const childTagIds = await this.getAllChildTagIds(tagId, userId);
const allTagIds = [tagId, ...childTagIds];
await this.c.var.db.delete(fileTag).where(and(inArray(fileTag.tagId, allTagIds), eq(fileTag.userId, userId)));
await db.delete(fileTag).where(and(inArray(fileTag.tagId, allTagIds), eq(fileTag.userId, userId)));
// Delete the tag and all its children
await this.c.var.db.delete(tag).where(and(inArray(tag.id, allTagIds), eq(tag.userId, userId)));
await db.delete(tag).where(and(inArray(tag.id, allTagIds), eq(tag.userId, userId)));
}
// Add tags to a file
@@ -193,7 +185,7 @@ export class TagService {
}
// Check for existing associations
const existingAssociations = await this.c.var.db.query.fileTag.findMany({
const existingAssociations = await db.query.fileTag.findMany({
where: (table, { and, eq, inArray }) =>
and(eq(table.fileId, fileId), inArray(table.tagId, tagIds), eq(table.userId, userId)),
});
@@ -216,7 +208,7 @@ export class TagService {
userId,
}));
await this.c.var.db.insert(fileTag).values(newAssociations);
await db.insert(fileTag).values(newAssociations);
const newAssociationsWithDates = newAssociations.map(assoc => ({
...assoc,
@@ -234,14 +226,14 @@ export class TagService {
// Remove tags from a file
async removeTagsFromFile(fileId: string, tagIds: string[], userId: string): Promise<void> {
await this.c.var.db
await db
.delete(fileTag)
.where(and(eq(fileTag.fileId, fileId), inArray(fileTag.tagId, tagIds), eq(fileTag.userId, userId)));
}
// Get all tags for a specific file
async getFileTags(fileId: string, userId: string): Promise<Tag[]> {
const fileTagAssociations = await this.c.var.db.query.fileTag.findMany({
const fileTagAssociations = await db.query.fileTag.findMany({
where: (table, { and, eq }) => and(eq(table.fileId, fileId), eq(table.userId, userId)),
});
@@ -249,7 +241,7 @@ export class TagService {
if (tagIds.length === 0) return [];
const tags = await this.c.var.db.query.tag.findMany({
const tags = await db.query.tag.findMany({
where: (table, { and, inArray, eq }) => and(inArray(table.id, tagIds), eq(table.userId, userId)),
});
@@ -263,7 +255,7 @@ export class TagService {
// Get all child tag IDs recursively
private async getAllChildTagIds(parentId: string, userId: string): Promise<string[]> {
const childTags = await this.c.var.db.query.tag.findMany({
const childTags = await db.query.tag.findMany({
where: (table, { and, eq }) => and(eq(table.parentId, parentId), eq(table.userId, userId)),
});
@@ -304,6 +296,6 @@ export class TagService {
// Delete all fileTag associations for a file
async deleteFileTagsByFileId(fileId: string, userId: string): Promise<void> {
await this.c.var.db.delete(fileTag).where(and(eq(fileTag.fileId, fileId), eq(fileTag.userId, userId)));
await db.delete(fileTag).where(and(eq(fileTag.fileId, fileId), eq(fileTag.userId, userId)));
}
}

View File

@@ -1,12 +1,13 @@
import { emailObjectSchema, type WaitlistCount } from "@nimbus/shared";
import { sendError, sendSuccess } from "../utils";
import { zValidator } from "@hono/zod-validator";
import { createPublicRouter } from "../../hono";
import { type HonoContext } from "../../hono";
import { waitlist } from "@nimbus/db/schema";
import { count } from "drizzle-orm";
import { nanoid } from "nanoid";
import { Hono } from "hono";
const waitlistRouter = createPublicRouter()
const waitlistRouter = new Hono<{ Variables: HonoContext }>()
.get("/count", async c => {
try {
const result = await c.var.db.select({ count: count() }).from(waitlist);

View File

@@ -59,6 +59,7 @@ export function decrypt(encryptedText: string): string {
return encryptedText;
}
// ! THIS IS NEVER USED, FIGURE OUT WHAT TO DO WITH IT OR REMOVE IT
/**
* Determines if text is in encrypted format
*/

View File

@@ -1,5 +1,5 @@
{
"extends": "@nimbus/tsconfig/base",
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true,
"noEmit": false,

View File

@@ -1,20 +0,0 @@
name = "nimbus-server"
main = "src/index.ts"
compatibility_date = "2025-05-18"
compatibility_flags = ["nodejs_compat"]
observability = { enabled = true }
[dev]
# wrangler dev must be run on port 80 and with sudo because wrangler sets
# Access-Control-Allow-Origin to http://localhost:1284:1284:3000
port = 80
host = "localhost"
local_protocol = "http"
[env.preview]
[env.staging]
routes = [{ pattern = "staging.api.nimbus.storage", custom_domain = true }]
[env.production]
routes = [{ pattern = "api.nimbus.storage", custom_domain = true }]

3
apps/web/.eslintrc.json Normal file
View File

@@ -0,0 +1,3 @@
{
"extends": ["next/core-web-vitals", "next/typescript"]
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +0,0 @@
import { buildEslintConfig } from "@nimbus/eslint";
import { FlatCompat } from "@eslint/eslintrc";
import { fileURLToPath } from "node:url";
import { dirname } from "node:path";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
});
const baseConfig = buildEslintConfig();
const nextConfig = compat.extends("next/core-web-vitals", "next/typescript");
const eslintConfig = [...baseConfig, ...nextConfig];
export default eslintConfig;

View File

@@ -8,12 +8,6 @@
"dev": "next dev --turbopack",
"build": "next build && bash scripts/handle-nextjs-standalone-build.sh",
"start": "bun run .next/build-start-folder/apps/web/server.js",
"cf:build": "opennextjs-cloudflare build",
"cf:preview": "opennextjs-cloudflare preview",
"cf:deploy:preview": "opennextjs-cloudflare deploy --env preview",
"cf:deploy:staging": "opennextjs-cloudflare deploy --env staging",
"cf:deploy:production": "opennextjs-cloudflare deploy --env production",
"cf:typegen": "wrangler types --env-interface CloudflareEnv cloudflare-env.d.ts",
"docker:build": "bun run build && docker build -t nimbus-web-manual .",
"docker:run": "source .env && docker run --name nimbus-web-manual --env-file .env -p $WEB_PORT:$WEB_PORT nimbus-web-manual:latest",
"docker:up": "bun run build && docker compose up -d",
@@ -70,14 +64,10 @@
"zod": "^4.0.14"
},
"devDependencies": {
"@eslint/eslintrc": "^3.3.1",
"@nimbus/eslint": "workspace:*",
"@nimbus/tsconfig": "workspace:*",
"@nimbus/vitest": "workspace:*",
"@types/react": "^19.1.9",
"@types/react-dom": "^19.1.7",
"eslint": "^9.32.0",
"eslint-config-next": "15.4",
"tailwind-scrollbar": "^4.0.2"
"eslint-config-next": "15.4"
}
}

View File

@@ -1,9 +0,0 @@
import { parseError } from "@/utils/error";
export function ErrorMessage({ error }: { error: unknown }) {
return (
<div className="flex flex-1 flex-col items-center justify-center space-y-2">
<p>{parseError(error)}</p>
</div>
);
}

View File

@@ -1,11 +0,0 @@
import { Button } from "@/components/ui/button";
import { parseError } from "@/utils/error";
export function ErrorMessageWithRetry({ error, retryFn }: { error: unknown; retryFn: () => void }) {
return (
<div className="flex flex-1 flex-col items-center justify-center space-y-2">
<p>{parseError(error)}</p>
<Button onClick={retryFn}>Try again</Button>
</div>
);
}

View File

@@ -1,66 +0,0 @@
"use client";
import { useCallback, useState } from "react";
import { useRouter } from "next/navigation";
import { toast } from "sonner";
interface UseApiAuthOptions {
onUnauthorized?: () => void;
redirectTo?: string;
}
export function useApiAuth(options: UseApiAuthOptions = {}) {
const router = useRouter();
const [isUnauthorized, setIsUnauthorized] = useState(false);
const { onUnauthorized, redirectTo = "/signin" } = options;
const handleUnauthorized = useCallback(() => {
setIsUnauthorized(true);
if (onUnauthorized) {
onUnauthorized();
} else {
toast.error("Session expired. Please sign in again.");
const currentPath = window.location.pathname;
const redirectUrl = `${redirectTo}?redirect=${encodeURIComponent(currentPath)}`;
router.push(redirectUrl);
}
}, [onUnauthorized, redirectTo, router]);
const checkAuthStatus = useCallback(
async (response: Response) => {
if (response.status === 401) {
handleUnauthorized();
return false;
}
return true;
},
[handleUnauthorized]
);
const fetchWithAuth = useCallback(
async (url: string, options?: RequestInit) => {
try {
const response = await fetch(url, {
...options,
credentials: "include",
});
await checkAuthStatus(response);
return response;
} catch (error) {
console.error("API request failed:", error);
throw error;
}
},
[checkAuthStatus]
);
return {
isUnauthorized,
handleUnauthorized,
checkAuthStatus,
fetchWithAuth,
};
}

View File

@@ -34,6 +34,7 @@ export function useGetFiles({ parentId, pageSize, pageToken, returnedValues }: G
});
}
// DO NOT REMOVE: this will be needed for future features
export function useGetFile({ fileId, returnedValues }: GetFileByIdSchema) {
const { clientPromise, providerId, accountId } = useAccountProvider();
return useQuery({
@@ -209,8 +210,7 @@ export function useUploadFile() {
});
}
// TODO(feat): add upload folder
// DO NOT REMOVE: this will be needed for future features
export function useUploadFolder() {}
export function useDownloadFile() {

View File

@@ -1,27 +0,0 @@
/**
* Format file size to human-readable string
* @param bytes File size in bytes
* @returns Formatted size string (e.g., "1.5 MB")
*/
export function formatFileSize(bytes: number): string {
if (bytes === 0) return "—";
const k = 1024;
const sizes = ["Bytes", "KB", "MB", "GB", "TB"];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(1))} ${sizes[i]}`;
}
/**
* Extract file extension from filename
* @param filename Name of the file
* @returns File extension without the dot (e.g., "pdf")
*/
export function getFileExtension(filename: string): string {
const lastDotIndex = filename.lastIndexOf(".");
if (lastDotIndex === -1 || lastDotIndex === filename.length - 1) {
return "";
}
return filename.slice(lastDotIndex + 1).toLowerCase();
}

View File

@@ -3,7 +3,7 @@ import type { AppType } from "@nimbus/server";
import env from "@nimbus/env/client";
import { hc } from "hono/client";
export const createClient = (options?: ClientRequestOptions) => {
const createClient = (options?: ClientRequestOptions) => {
if (!env.NEXT_PUBLIC_BACKEND_URL) {
throw new Error("NEXT_PUBLIC_BACKEND_URL is not configured");
}

View File

@@ -1,16 +0,0 @@
/**
* Parses an error into a string.
* @param error - The error to parse.
* @returns The parsed error.
*/
export function parseError(error: unknown): string {
if (typeof error === "string") {
return error;
}
if (error instanceof Error) {
return error.message;
}
return "An unknown error occurred";
}

View File

@@ -1,5 +1,5 @@
{
"extends": "@nimbus/tsconfig/base",
"extends": "../../tsconfig.json",
"compilerOptions": {
"jsx": "preserve",
"lib": ["esnext", "dom", "dom.iterable"],

View File

@@ -1,22 +0,0 @@
name = "nimbus-web"
main = ".open-next/worker.js"
compatibility_date = "2025-05-18"
compatibility_flags = ["nodejs_compat", "global_fetch_strictly_public"]
observability = { enabled = true }
[assets]
binding = "ASSETS"
directory = ".open-next/assets"
[dev]
port = 3000
local_protocol = "http"
host = "localhost"
[env.preview]
[env.staging]
routes = [{ pattern = "staging.nimbus.storage", custom_domain = true }]
[env.production]
routes = [{ pattern = "nimbus.storage", custom_domain = true }]

1109
bun.lock

File diff suppressed because it is too large Load Diff

View File

@@ -1,25 +0,0 @@
# This docker compose is used by the scripts/fly:deploy.sh script
# APP_DIR_PATH
# PLATFORM
# REGISTRY
# APP_NAME
# TAG
# APP_PORT
services:
app:
build:
# This is CRUCIAL as it sets the working directory for docker compose and dockerfile
# https://docs.docker.com/reference/compose-file/build/#context
context: ${APP_DIR_PATH}
platforms:
- ${PLATFORM}
dockerfile: Dockerfile
image: ${REGISTRY}/${APP_NAME}:${TAG}
container_name: ${APP_NAME}
restart: unless-stopped
env_file: .env
environment:
NODE_ENV: production
ports:
- "${APP_PORT}:${APP_PORT}"

View File

@@ -1,5 +0,0 @@
import { buildEslintConfig } from "@nimbus/eslint";
const eslintConfig = [...buildEslintConfig(), { ignores: ["apps/web/**", "apps/server/.wrangler/**"] }];
export default eslintConfig;

View File

@@ -8,7 +8,7 @@ const config = {
"apps/web/**/*.{ts,tsx,js,jsx}": () => "bun run --cwd=apps/web lint --fix",
// Lint and fix TypeScript and JavaScript files
"**/*.{ts,tsx,js,jsx}": ["oxlint --fix", "eslint --fix --no-warn-ignored", "prettier --write --list-different"],
"**/*.{ts,tsx,js,jsx}": ["oxlint --fix", "prettier --write --list-different"],
// Format JSON and YAML files
"**/*.{json,md,yml,yaml}": ["prettier --write --list-different"],

View File

@@ -4,14 +4,10 @@
"devDependencies": {
"@changesets/cli": "^2.29.5",
"@cloudflare/workers-types": "^4.20250803.0",
"@nimbus/eslint": "workspace:*",
"@nimbus/tsconfig": "workspace:*",
"@nimbus/vitest": "workspace:*",
"@vitest/coverage-v8": "^3.2.4",
"@vitest/ui": "^3.2.4",
"concurrently": "^9.2.0",
"dotenv": "^17.2.1",
"eslint": "^9.32.0",
"glob": "^11.0.3",
"husky": "^9.1.7",
"knip": "^5.62.0",
@@ -24,8 +20,7 @@
"typescript": "^5.9.2",
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^3.2.4",
"vitest-mock-extended": "^3.1.0",
"wrangler": "^4.27.0"
"vitest-mock-extended": "^3.1.0"
},
"packageManager": "bun@1.2.17",
"private": true,
@@ -51,10 +46,8 @@
"cache:reset": "bun run --cwd=packages/cache docker:reset",
"build:web": "bun run --cwd=apps/web build",
"build:server": "bun run --cwd=apps/server build",
"build": "concurrently --names \"WEB,SERVER\" --prefix-colors \"cyan,magenta\" --prefix \"[{name}]\" --kill-others-on-fail \"bun run --cwd=apps/web build\" \"bun run --cwd=apps/server build\"",
"start:web": "bun run --cwd=apps/web start",
"start:server": "bun run --cwd=apps/server start",
"start": "concurrently --names \"WEB,SERVER\" --prefix-colors \"cyan,magenta\" --prefix \"[{name}]\" --kill-others-on-fail \"bun run --cwd=apps/web start\" \"bun run --cwd=apps/server start\"",
"build": "turbo build",
"start": "turbo start",
"env:sync": "bun run scripts/copy-env-files.ts",
"env:sync:dry-run": "bun run scripts/copy-env-files.ts --dry-run",
"env:clean": "bun run scripts/delete-child-env-files.ts",
@@ -66,9 +59,7 @@
"docker:remove": "docker compose down --rmi local -v",
"docker:reset": "bun run docker:remove && bun run docker:up",
"format": "bun prettier . --write --list-different",
"format:check": "bun prettier . --check",
"lint:fix": "bun run oxlint --fix && bunx eslint . --fix && cd apps/web && bun run lint --fix",
"lint": "bun run oxlint && bunx eslint . && cd apps/web && bun run lint",
"lint": "bun run oxlint --fix && cd apps/web && bun run lint --fix",
"test": "vitest run",
"test:watch": "vitest",
"test:ui": "vitest --ui",

View File

@@ -9,7 +9,6 @@
"./auth": "./src/auth.ts"
},
"devDependencies": {
"@nimbus/tsconfig": "workspace:*",
"@nimbus/vitest": "workspace:*"
},
"dependencies": {
@@ -18,7 +17,6 @@
"@nimbus/env": "workspace:*",
"better-auth": "^1.3.4",
"drizzle-orm": "^0.44.4",
"iovalkey": "^0.3.3",
"resend": "^4.7.0"
}
}

View File

@@ -130,23 +130,23 @@ export const auth = betterAuth({
}),
],
// secondaryStorage: {
// // better-auth expects a JSON string
// get: async (key: string) => {
// const value = await (cacheClient as CacheClient).get(key);
// return value;
// },
// set: async (key: string, value: string, ttl?: number) => {
// if (ttl) {
// await (cacheClient as CacheClient).set(key, value, "EX", ttl);
// } else {
// await cacheClient.set(key, value);
// }
// },
// delete: async (key: string) => {
// await cacheClient.del(key);
// },
// },
secondaryStorage: {
// better-auth expects a JSON string
get: async (key: string) => {
const value = await (cacheClient as CacheClient).get(key);
return value;
},
set: async (key: string, value: string, ttl?: number) => {
if (ttl) {
await (cacheClient as CacheClient).set(key, value, "EX", ttl);
} else {
await cacheClient.set(key, value);
}
},
delete: async (key: string) => {
await cacheClient.del(key);
},
},
// https://www.better-auth.com/docs/reference/options#user
user: {

View File

@@ -1,24 +0,0 @@
/**
* Extracts a token from a URL by trying multiple strategies:
* 1. Query parameters (token, resetToken, t)
* 2. Last path segment (if not 'reset-password')
*
* @param url - The URL to extract the token from
* @returns The extracted token or null if not found
*/
export function extractTokenFromUrl(url: string): string | null {
const urlObj = new URL(url);
// Try different possible parameter names
let token =
urlObj.searchParams.get("token") || urlObj.searchParams.get("resetToken") || urlObj.searchParams.get("t") || null;
if (!token) {
const pathParts = urlObj.pathname.split("/");
const lastPart = pathParts[pathParts.length - 1];
token = lastPart && lastPart !== "reset-password" ? lastPart : null;
}
return token;
}

View File

@@ -1,10 +1,7 @@
import { dbMock, mockFindFirst, mockSet, mockUpdate, mockWhere } from "@nimbus/db/mock";
import { afterAccountCreation, auth, type Auth } from "../src/auth";
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { RedisClient } from "@nimbus/cache";
import { mock } from "vitest-mock-extended";
import { afterAccountCreation, auth } from "../src/auth";
import { betterAuth } from "better-auth";
import type { Resend } from "resend";
// Mock better-auth
vi.mock("better-auth", () => ({

View File

@@ -1,3 +1,3 @@
{
"extends": "@nimbus/tsconfig/base"
"extends": "../../tsconfig.json"
}

View File

@@ -8,7 +8,8 @@
"./rate-limiters": "./src/rate-limiters.ts"
},
"scripts": {
"build": "bun build src/rate-limiters.ts --outdir dist --target bun",
"lint": "bun run oxlint . --fix",
"format": "bun prettier . --write --list-different",
"docker:up": "docker compose up -d",
"docker:down": "docker compose down",
"docker:remove": "docker compose down --rmi local -v",
@@ -16,10 +17,10 @@
},
"dependencies": {
"iovalkey": "^0.3.3",
"rate-limiter-flexible": "^7.2.0"
"rate-limiter-flexible": "^7.2.0",
"@nimbus/env": "workspace:*"
},
"devDependencies": {
"@nimbus/tsconfig": "workspace:*",
"@nimbus/vitest": "workspace:*"
}
}

View File

@@ -1,41 +1 @@
import { ValkeyRedis, createRedisClient, missingEnvErrorMessage, valkeyEnvVars } from "../src";
import { describe, expect, it } from "vitest";
describe("createRedisClient", () => {
it("should return ValkeyRedis instance", () => {
const env = {
VALKEY_HOST: "localhost",
VALKEY_PORT: "6379",
VALKEY_USERNAME: "test",
VALKEY_PASSWORD: "test",
};
const { redisClient } = createRedisClient(env);
expect(redisClient).toBeInstanceOf(ValkeyRedis);
});
it("should throw error if Valkey env vars are missing", () => {
expect(() => createRedisClient({})).toThrow(missingEnvErrorMessage(valkeyEnvVars));
});
it("should throw error if only some Valkey env vars are provided", () => {
expect(() =>
createRedisClient({
VALKEY_HOST: "localhost",
VALKEY_PORT: "6379",
})
).toThrow(missingEnvErrorMessage(valkeyEnvVars));
});
it("should configure Redis client with correct options", () => {
const env = {
VALKEY_HOST: "redis.example.com",
VALKEY_PORT: "6380",
VALKEY_USERNAME: "myuser",
VALKEY_PASSWORD: "mypass",
};
const { redisClient, closeRedisClient } = createRedisClient(env);
expect(redisClient).toBeInstanceOf(ValkeyRedis);
expect(typeof closeRedisClient).toBe("function");
});
});
// TODO: the model for the KV requires different tests. I removed the old ones to not confuse others

View File

@@ -1,3 +1,3 @@
{
"extends": "@nimbus/tsconfig/base"
"extends": "../../tsconfig.json"
}

View File

@@ -46,12 +46,8 @@
"tableFrom": "file_tag",
"tableTo": "tag",
"schemaTo": "public",
"columnsFrom": [
"tag_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["tag_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
},
@@ -60,12 +56,8 @@
"tableFrom": "file_tag",
"tableTo": "user",
"schemaTo": "public",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -189,12 +181,8 @@
"tableFrom": "account",
"tableTo": "user",
"schemaTo": "public",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -265,12 +253,8 @@
"tableFrom": "session",
"tableTo": "user",
"schemaTo": "public",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -278,9 +262,7 @@
"compositePrimaryKeys": {},
"uniqueConstraints": {
"session_token_unique": {
"columns": [
"token"
],
"columns": ["token"],
"nullsNotDistinct": false,
"name": "session_token_unique"
}
@@ -366,9 +348,7 @@
"compositePrimaryKeys": {},
"uniqueConstraints": {
"waitlist_email_unique": {
"columns": [
"email"
],
"columns": ["email"],
"nullsNotDistinct": false,
"name": "waitlist_email_unique"
}
@@ -441,9 +421,7 @@
"compositePrimaryKeys": {},
"uniqueConstraints": {
"user_email_unique": {
"columns": [
"email"
],
"columns": ["email"],
"nullsNotDistinct": false,
"name": "user_email_unique"
}
@@ -507,12 +485,8 @@
"tableFrom": "tag",
"tableTo": "tag",
"schemaTo": "public",
"columnsFrom": [
"parent_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["parent_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
},
@@ -521,12 +495,8 @@
"tableFrom": "tag",
"tableTo": "user",
"schemaTo": "public",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -610,12 +580,8 @@
"tableFrom": "pinned_file",
"tableTo": "user",
"schemaTo": "public",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}

View File

@@ -146,12 +146,8 @@
"name": "account_user_id_user_id_fk",
"tableFrom": "account",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -203,12 +199,8 @@
"name": "file_tag_tag_id_tag_id_fk",
"tableFrom": "file_tag",
"tableTo": "tag",
"columnsFrom": [
"tag_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["tag_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
},
@@ -216,12 +208,8 @@
"name": "file_tag_user_id_user_id_fk",
"tableFrom": "file_tag",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -304,12 +292,8 @@
"name": "pinned_file_user_id_user_id_fk",
"tableFrom": "pinned_file",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -379,12 +363,8 @@
"name": "session_user_id_user_id_fk",
"tableFrom": "session",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -394,9 +374,7 @@
"session_token_unique": {
"name": "session_token_unique",
"nullsNotDistinct": false,
"columns": [
"token"
]
"columns": ["token"]
}
},
"policies": {},
@@ -457,12 +435,8 @@
"name": "tag_user_id_user_id_fk",
"tableFrom": "tag",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
},
@@ -470,12 +444,8 @@
"name": "tag_parent_id_tag_id_fk",
"tableFrom": "tag",
"tableTo": "tag",
"columnsFrom": [
"parent_id"
],
"columnsTo": [
"id"
],
"columnsFrom": ["parent_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
@@ -552,9 +522,7 @@
"user_email_unique": {
"name": "user_email_unique",
"nullsNotDistinct": false,
"columns": [
"email"
]
"columns": ["email"]
}
},
"policies": {},
@@ -640,9 +608,7 @@
"waitlist_email_unique": {
"name": "waitlist_email_unique",
"nullsNotDistinct": false,
"columns": [
"email"
]
"columns": ["email"]
}
},
"policies": {},

View File

@@ -4,7 +4,8 @@
"type": "module",
"private": true,
"scripts": {
"build": "bun build src/index.ts --outdir dist --target bun",
"lint": "bun run oxlint . --fix",
"format": "bun prettier . --write --list-different",
"generate": "bun --bun run drizzle-kit generate --config=drizzle.config.ts",
"migrate": "bun --bun run drizzle-kit migrate --config=drizzle.config.ts",
"push": "bun --bun run drizzle-kit push --config=drizzle.config.ts",
@@ -23,11 +24,11 @@
"./mock": "./src/mock.ts"
},
"devDependencies": {
"@nimbus/tsconfig": "workspace:*",
"@nimbus/vitest": "workspace:*",
"drizzle-kit": "^0.31.4"
},
"dependencies": {
"@nimbus/env": "workspace:*",
"drizzle-orm": "^0.44.4",
"pg": "^8.16.3",
"postgres": "^3.4.7"

View File

@@ -1,60 +0,0 @@
import { account, fileTag, pinnedFile, session, tag, user } from "./schema";
import { relations } from "drizzle-orm/relations";
// https://orm.drizzle.team/docs/relations
export const fileTagRelations = relations(fileTag, ({ one }) => ({
tag: one(tag, {
fields: [fileTag.tagId],
references: [tag.id],
}),
user: one(user, {
fields: [fileTag.userId],
references: [user.id],
}),
}));
export const tagRelations = relations(tag, ({ one, many }) => ({
fileTags: many(fileTag),
tag: one(tag, {
fields: [tag.parentId],
references: [tag.id],
relationName: "tag_parentId_tag_id",
}),
tags: many(tag, {
relationName: "tag_parentId_tag_id",
}),
user: one(user, {
fields: [tag.userId],
references: [user.id],
}),
}));
export const userRelations = relations(user, ({ many }) => ({
fileTags: many(fileTag),
accounts: many(account),
sessions: many(session),
tags: many(tag),
pinnedFiles: many(pinnedFile),
}));
export const accountRelations = relations(account, ({ one }) => ({
user: one(user, {
fields: [account.userId],
references: [user.id],
}),
}));
export const sessionRelations = relations(session, ({ one }) => ({
user: one(user, {
fields: [session.userId],
references: [user.id],
}),
}));
export const pinnedFileRelations = relations(pinnedFile, ({ one }) => ({
user: one(user, {
fields: [pinnedFile.userId],
references: [user.id],
}),
}));

View File

@@ -1,153 +1 @@
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { createDb, type DatabaseEnv, type DB } from "../src";
import postgres from "postgres";
import * as pg from "pg";
vi.mock("pg", () => ({
Pool: vi.fn(() => ({
connect: vi.fn(),
end: vi.fn(),
query: vi.fn(),
})),
}));
vi.mock("drizzle-orm/postgres-js", () => ({
drizzle: vi.fn(() => ({})),
}));
vi.mock("postgres", () => ({
default: vi.fn(),
}));
const mockPostgres = postgres;
const { Pool: MockPool } = pg;
describe("Database Connection Tests", () => {
const testDatabaseUrl = "postgresql://testuser:testpass@localhost:5432/testdb";
const baseEnv: DatabaseEnv = {
NODE_ENV: "test",
DATABASE_URL: testDatabaseUrl,
};
beforeEach(() => {
vi.clearAllMocks();
delete process.env.TEST_EDGE_MODE;
});
afterEach(() => {
vi.resetModules();
});
describe("createDb function", () => {
it("should create a database connection successfully", () => {
const { db, closeDb } = createDb(baseEnv);
expect(db).toBeDefined();
expect(typeof db).toBe("object");
expect(closeDb).toBeInstanceOf(Function);
});
it("should handle empty database URL", () => {
expect(() => createDb({ ...baseEnv, DATABASE_URL: "" }));
});
it("should handle invalid database URL format", () => {
expect(() => createDb({ ...baseEnv, DATABASE_URL: "invalid-url" }));
});
it("should create connection with valid postgresql URL", () => {
const validUrls = [
"postgresql://user:pass@localhost:5432/db",
"postgres://user:pass@localhost:5432/db",
"postgresql://user@localhost/db",
];
validUrls.forEach(url => {
expect(() => createDb({ ...baseEnv, DATABASE_URL: url })).not.toThrow();
});
});
});
describe("Environment-specific database creation", () => {
it("should use postgres-js in development environment", () => {
const devEnv = { ...baseEnv, NODE_ENV: "development" };
createDb(devEnv);
expect(mockPostgres).toHaveBeenCalledWith(devEnv.DATABASE_URL, { prepare: false });
});
it("should use pg.Pool in production environment", () => {
const nodeEnv = { ...baseEnv, NODE_ENV: "production" };
createDb(nodeEnv);
expect(MockPool).toHaveBeenCalledWith({ connectionString: nodeEnv.DATABASE_URL });
});
});
describe("Database operations", () => {
let db: DB;
beforeEach(() => {
db = createDb(baseEnv).db;
});
it("should support basic query operations", async () => {
expect(db).toBeDefined();
if (typeof db === "object" && db !== null) {
expect(db).toBeInstanceOf(Object);
}
});
it("should handle connection errors gracefully", () => {
const invalidUrl = "postgresql://invalid";
expect(() => {
createDb({ ...baseEnv, DATABASE_URL: invalidUrl });
});
});
});
describe("Connection string parsing", () => {
const validConnectionStrings = [
"postgresql://localhost/test",
"postgresql://user@localhost/test",
"postgresql://user:password@localhost/test",
"postgresql://user:password@localhost:5432/test",
"postgres://user:password@localhost:5432/test",
];
it("should accept various valid connection string formats", () => {
validConnectionStrings.forEach(connectionString => {
expect(() => createDb({ ...baseEnv, DATABASE_URL: connectionString })).not.toThrow();
});
});
const invalidConnectionStrings = ["", "not-a-url", "http://localhost/test", "mysql://localhost/test"];
it("should reject invalid connection string formats", () => {
invalidConnectionStrings.forEach(connectionString => {
expect(() => createDb({ ...baseEnv, DATABASE_URL: connectionString }));
});
});
});
describe("Environment variable handling", () => {
it("should work without environment variables when URL is provided", () => {
// Clear any environment variables
const originalEnv = process.env;
process.env = {};
expect(() => createDb(baseEnv)).not.toThrow();
process.env = originalEnv;
});
it("should handle missing required environment variables gracefully", () => {
const originalEnv = process.env;
process.env = {};
expect(() => createDb(baseEnv)).not.toThrow();
process.env = originalEnv;
});
});
});
// TODO: the model for the DB requires different tests. I removed the old ones to not confuse others

View File

@@ -1,3 +1,3 @@
{
"extends": "@nimbus/tsconfig/base"
"extends": "../../tsconfig.json"
}

View File

@@ -1,7 +1,7 @@
import { createEnv } from "@t3-oss/env-core";
import { z } from "zod";
export const env = createEnv({
const env = createEnv({
runtimeEnv: {
NEXT_PUBLIC_BACKEND_URL: process.env.NEXT_PUBLIC_BACKEND_URL,
NEXT_PUBLIC_FRONTEND_URL: process.env.NEXT_PUBLIC_FRONTEND_URL,
@@ -14,8 +14,8 @@ export const env = createEnv({
// Client-side environment variables
NEXT_PUBLIC_BACKEND_URL: z.url(),
NEXT_PUBLIC_FRONTEND_URL: z.url(),
NEXT_PUBLIC_POSTHOG_KEY: z.string(),
NEXT_PUBLIC_POSTHOG_HOST: z.string(),
NEXT_PUBLIC_POSTHOG_KEY: z.string().optional(),
NEXT_PUBLIC_POSTHOG_HOST: z.string().optional(),
},
});

View File

@@ -54,8 +54,8 @@ export const env = createEnv({
}),
// Email
EMAIL_FROM: z.email(),
RESEND_API_KEY: z.string(),
EMAIL_FROM: z.email().optional(),
RESEND_API_KEY: z.string().optional(),
// For docker
SERVER_PORT: z.coerce.number(),

View File

@@ -1,3 +1,3 @@
{
"extends": "@nimbus/tsconfig/base"
"extends": "../../tsconfig.json"
}

View File

@@ -1,7 +0,0 @@
# @nimbus/eslint
## 0.0.1
### Patch Changes
- 7e2271f: init changeset

View File

@@ -1,18 +0,0 @@
{
"name": "@nimbus/eslint",
"version": "0.0.1",
"type": "module",
"private": true,
"exports": {
".": "./src/index.ts"
},
"dependencies": {
"@eslint/js": "^9.32.0",
"eslint-plugin-oxlint": "^1.9.0",
"eslint-plugin-sonarjs": "^3.0.4",
"eslint-plugin-unicorn": "^60.0.0",
"eslint-plugin-unused-imports": "^4.1.4",
"globals": "^16.3.0",
"typescript-eslint": "^8.38.0"
}
}

View File

@@ -1,77 +0,0 @@
import eslintUnusedImports from "eslint-plugin-unused-imports";
import eslintPluginUnicorn from "eslint-plugin-unicorn";
import eslintPluginSonarjs from "eslint-plugin-sonarjs";
import oxlintPlugin from "eslint-plugin-oxlint";
import tseslint from "typescript-eslint";
import eslint from "@eslint/js";
import globals from "globals";
const tsconfigRootDir = process.cwd();
export function buildEslintConfig() {
// Base configuration for all files
const baseConfig = {
ignores: ["**/node_modules", "**/.next", "**/dist", "**/build", "**/coverage", "**/out"],
};
// TypeScript specific configuration
const typescriptConfig = tseslint.config(
{
files: ["**/*.ts", "**/*.tsx"],
languageOptions: {
parserOptions: {
project: true, // Automatically find the nearest tsconfig.json
tsconfigRootDir,
},
},
rules: {
"@typescript-eslint/no-floating-promises": "error",
},
},
tseslint.configs.strict,
tseslint.configs.stylistic
);
// JavaScript configuration
const javascriptConfig = {
files: ["**/*.js", "**/*.jsx"],
extends: [eslint.configs.recommended],
};
// Common plugins and rules
const commonConfig = {
plugins: {
sonarjs: eslintPluginSonarjs,
unicorn: eslintPluginUnicorn,
unusedImports: eslintUnusedImports,
},
};
// Oxlint configuration
const oxlintConfig = oxlintPlugin.configs["flat/all"];
// Test files configuration
const testConfig = {
files: ["**/__tests__/**/*.js", "**/*.test.js", "**/*.test.ts"],
languageOptions: {
globals: {
...globals.node,
},
},
rules: {
"@typescript-eslint/no-floating-promises": "off" as const,
},
};
// Combine all configurations
const eslintConfig = tseslint.config(
baseConfig,
commonConfig,
typescriptConfig,
javascriptConfig,
testConfig,
oxlintConfig
);
return eslintConfig;
}

View File

@@ -1,3 +0,0 @@
{
"extends": "@nimbus/tsconfig/base"
}

View File

@@ -11,7 +11,6 @@
"@nimbus/db": "workspace:*"
},
"devDependencies": {
"@nimbus/tsconfig": "workspace:*",
"@nimbus/vitest": "workspace:*"
}
}

View File

@@ -32,7 +32,6 @@ export const updateFileSchema = z.object({
fileId: fileIdSchema,
name: z.string().min(1, "Name cannot be empty").max(100, "Name cannot be longer than 100 characters"),
// TODO: implement updating more than just name
// name: z.string().min(1, "Name cannot be empty").max(100, "Name cannot be longer than 100 characters").optional(),
});
export const createFileSchema = z.object({

View File

@@ -19,7 +19,7 @@ export const createTagSchema = z.object({
parentId: z.string().nullable().optional(),
});
export const tagIdObjectSchema = z.object({
const tagIdObjectSchema = z.object({
id: tagIdSchema,
});
export const getTagByIdSchema = tagIdObjectSchema;

View File

@@ -1,5 +1,5 @@
{
"extends": "@nimbus/tsconfig/base",
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true,
"noEmit": false,

View File

@@ -1,7 +0,0 @@
# @nimbus/tsconfig
## 0.0.1
### Patch Changes
- 7e2271f: init changeset

View File

@@ -1,29 +0,0 @@
{
"$schema": "https://json.schemastore.org/tsconfig",
"compilerOptions": {
"target": "ESNext",
"lib": ["esnext"],
"allowJs": true,
"checkJs": true,
"skipLibCheck": true,
"strict": true,
"noEmit": true,
"esModuleInterop": true,
"module": "ESNext",
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
"incremental": true,
"verbatimModuleSyntax": true,
"allowImportingTsExtensions": true,
"allowArbitraryExtensions": true,
"moduleDetection": "force",
"noFallthroughCasesInSwitch": true,
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false,
"noUncheckedIndexedAccess": true,
"types": ["node", "@cloudflare/workers-types"]
}
}

View File

@@ -1,9 +0,0 @@
{
"name": "@nimbus/tsconfig",
"version": "0.0.1",
"type": "module",
"private": true,
"exports": {
"./base": "./base.json"
}
}

View File

@@ -1,3 +1,29 @@
{
"extends": "@nimbus/tsconfig/base"
"$schema": "https://json.schemastore.org/tsconfig",
"compilerOptions": {
"target": "ESNext",
"lib": ["esnext"],
"allowJs": true,
"checkJs": true,
"skipLibCheck": true,
"strict": true,
"noEmit": true,
"esModuleInterop": true,
"module": "ESNext",
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
"incremental": true,
"verbatimModuleSyntax": true,
"allowImportingTsExtensions": true,
"allowArbitraryExtensions": true,
"moduleDetection": "force",
"noFallthroughCasesInSwitch": true,
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false,
"noUncheckedIndexedAccess": true,
"types": ["node", "@cloudflare/workers-types"]
}
}

View File

@@ -4,10 +4,7 @@
"tasks": {
"build": {
"dependsOn": ["^build"],
"outputs": [".next/**", "!.next/cache/**"]
},
"check-types": {
"dependsOn": ["^check-types"]
"outputs": [".next/**", "!.next/cache/**", "dist/**"]
},
"dev": {
"persistent": true,
@@ -20,6 +17,13 @@
"format": {
"persistent": true,
"cache": false
},
"start": {
"persistent": true,
"cache": false
},
"check-types": {
"dependsOn": ["^check-types"]
}
}
}