添加上传和下载文件夹

This commit is contained in:
yoyuzh
2026-03-19 13:39:48 +08:00
parent 64e146dfee
commit 41a83d2805
17 changed files with 729 additions and 33 deletions

View File

@@ -0,0 +1,5 @@
name = "explorer"
description = "Read-only codebase investigator. It traces current behavior, finds entry points, and collects evidence from files, configs, and docs without making edits."
nickname_candidates = ["explorer", "trace", "inspect"]
sandbox_mode = "read-only"
include_apply_patch_tool = false

View File

@@ -0,0 +1,5 @@
name = "implementer"
description = "Code-writing agent. It makes focused changes in frontend, backend, scripts, or docs after planning/exploration are complete, and leaves broad verification to tester."
nickname_candidates = ["implementer", "impl", "builder"]
sandbox_mode = "workspace-write"
include_apply_patch_tool = true

View File

@@ -0,0 +1,5 @@
name = "orchestrator"
description = "Default top-level agent for this repo. It coordinates specialist agents, keeps scope aligned with the user request, and owns the final synthesis."
nickname_candidates = ["orchestrator", "lead", "coord"]
sandbox_mode = "read-only"
include_apply_patch_tool = false

View File

@@ -0,0 +1,5 @@
name = "planner"
description = "Planning-only agent. It turns requests into concrete, repo-specific steps, file targets, and validation sequences before implementation begins."
nickname_candidates = ["planner", "plan"]
sandbox_mode = "read-only"
include_apply_patch_tool = false

View File

@@ -0,0 +1,5 @@
name = "reviewer"
description = "Read-only review agent. It inspects diffs for bugs, regressions, missing tests, and command coverage gaps before work is considered done."
nickname_candidates = ["reviewer", "review", "audit"]
sandbox_mode = "read-only"
include_apply_patch_tool = false

View File

@@ -0,0 +1,5 @@
name = "tester"
description = "Verification-only agent. It runs lint, test, build, package, and type-check commands that already exist in this repo, reports failures, and does not edit source files."
nickname_candidates = ["tester", "qa", "verify"]
sandbox_mode = "workspace-write"
include_apply_patch_tool = false

32
.codex/config.toml Normal file
View File

@@ -0,0 +1,32 @@
[features]
multi_agent = true
[agents.orchestrator]
description = "Primary coordinator for this monorepo. Routes work across planner, explorer, implementer, tester, and reviewer."
config_file = ".codex/agents/orchestrator.toml"
nickname_candidates = ["orchestrator", "lead", "coord"]
[agents.planner]
description = "Builds repo-specific execution plans before code changes."
config_file = ".codex/agents/planner.toml"
nickname_candidates = ["planner", "plan"]
[agents.explorer]
description = "Maps existing behavior and code paths without editing files."
config_file = ".codex/agents/explorer.toml"
nickname_candidates = ["explorer", "trace", "inspect"]
[agents.implementer]
description = "Owns code changes and nearby test updates."
config_file = ".codex/agents/implementer.toml"
nickname_candidates = ["implementer", "impl", "builder"]
[agents.tester]
description = "Runs repository-backed verification commands only."
config_file = ".codex/agents/tester.toml"
nickname_candidates = ["tester", "qa", "verify"]
[agents.reviewer]
description = "Performs read-only review for bugs, regressions, and test gaps."
config_file = ".codex/agents/reviewer.toml"
nickname_candidates = ["reviewer", "review", "audit"]

6
.gitignore vendored
View File

@@ -21,7 +21,11 @@ front/.env.production
.vscode/
.idea/
.gemini/
.codex/
.codex/*
!.codex/config.toml
!.codex/agents/
.codex/agents/*
!.codex/agents/*.toml
开发测试账号.md
.DS_Store

74
AGENTS.md Normal file
View File

@@ -0,0 +1,74 @@
# Repository AGENTS
This repository is split across a Java backend, a Vite/React frontend, a small `docs/` area, and utility scripts. Use the project-level agents defined in `.codex/agents/` instead of improvising overlapping roles.
## Real project structure
- `backend/`: Spring Boot 3.3.8, Java 17, Maven, domain packages under `com.yoyuzh.{auth,cqu,files,config,common}`.
- `front/`: Vite 6, React 19, TypeScript, Tailwind CSS v4, route/page code under `src/pages`, reusable UI under `src/components`, shared logic under `src/lib`.
- `docs/`: currently contains implementation plans under `docs/superpowers/plans/`.
- `scripts/`: deployment, migration, smoke, and local startup helpers.
## Command source of truth
Use only commands that already exist in `front/package.json`, `backend/pom.xml`, `backend/README.md`, `front/README.md`, or the checked-in script files.
### Frontend commands (`cd front`)
- `npm run dev`
- `npm run build`
- `npm run preview`
- `npm run clean`
- `npm run lint`
- `npm run test`
Important: in this repo, `npm run lint` runs `tsc --noEmit`. There is no separate ESLint command, and there is no separate `typecheck` script beyond `npm run lint`.
### Backend commands (`cd backend`)
- `mvn spring-boot:run`
- `mvn spring-boot:run -Dspring-boot.run.profiles=dev`
- `mvn test`
- `mvn package`
Important: there is no dedicated backend lint command and no dedicated backend typecheck command declared in `backend/pom.xml` or `backend/README.md`. Do not invent one.
### Script files
- `scripts/deploy-front-oss.mjs`
- `scripts/migrate-file-storage-to-oss.mjs`
- `scripts/oss-deploy-lib.mjs`
- `scripts/oss-deploy-lib.test.mjs`
- `scripts/local-smoke.ps1`
- `scripts/start-backend-dev.ps1`
- `scripts/start-frontend-dev.ps1`
If you need one of these, run it explicitly from the file that already exists instead of inventing a new wrapper command.
## Role routing
- `orchestrator`: default coordinator. It decides which specialist agent should work next, keeps cross-directory work aligned, and writes the final handoff. It should stay read-only.
- `planner`: planning only. It produces file-level plans, command plans, and sequencing. It should stay read-only.
- `explorer`: investigation only. It maps code paths, current behavior, and relevant configs/tests. It should stay read-only.
- `implementer`: code changes only. It owns edits in `backend/`, `front/`, `scripts/`, or docs, and may update nearby tests when the implementation requires it.
- `tester`: verification only. It runs existing repo-backed commands and reports exact failures or missing commands. It should not rewrite source files.
- `reviewer`: review only. It inspects diffs for correctness, regressions, missing tests, and command coverage gaps. It should stay read-only.
## Default workflow
1. Start in `orchestrator`.
2. Use `planner` when the task spans multiple files, multiple layers, or both frontend and backend.
3. Use `explorer` before implementation if the existing behavior or owning module is not obvious.
4. Use `implementer` for the actual code changes.
5. Use `tester` after implementation. Prefer the narrowest real command set that still proves the change.
6. Use `reviewer` before final delivery, especially for cross-layer changes or auth/files/storage flows.
## Repo-specific guardrails
- Do not run `npm` commands at the repository root. This repo has a root `package-lock.json` but no root `package.json`.
- Frontend API proxying is defined in `front/vite.config.ts`, with `VITE_BACKEND_URL` defaulting to `http://localhost:8080`.
- Backend local development behavior is split between `backend/src/main/resources/application.yml` and `application-dev.yml`; the `dev` profile uses H2 and mock CQU data.
- Backend tests already exist under `backend/src/test/java/com/yoyuzh/...`; prefer adding or updating tests in the matching package.
- Frontend tests already exist under `front/src/**/*.test.ts`; keep new tests next to the state or library module they verify.
Directory-level `AGENTS.md` files in `backend/`, `front/`, and `docs/` add more specific rules and override this file where they are more specific.

32
backend/AGENTS.md Normal file
View File

@@ -0,0 +1,32 @@
# Backend AGENTS
This directory is the Spring Boot backend for `yoyuzh.xyz`. Keep changes aligned with the current package layout instead of introducing a new architecture.
## Backend layout
- `src/main/java/com/yoyuzh/auth`: authentication, JWT, login/register/profile DTOs and services.
- `src/main/java/com/yoyuzh/files`: file APIs and storage flows, including `files/storage`.
- `src/main/java/com/yoyuzh/cqu`: CQU schedule/grade aggregation.
- `src/main/java/com/yoyuzh/config`: Spring and security configuration.
- `src/main/java/com/yoyuzh/common`: shared exceptions and common utilities.
- `src/main/resources`: runtime config and logging.
- `src/test/java/com/yoyuzh/...`: matching package-level tests.
## Real backend commands
Run these from `backend/`:
- `mvn spring-boot:run`
- `mvn spring-boot:run -Dspring-boot.run.profiles=dev`
- `mvn test`
- `mvn package`
There is no dedicated backend lint command and no dedicated backend typecheck command in the checked-in Maven config or README. If a task asks for lint/typecheck, say that the backend currently does not define those commands.
## Backend rules
- Keep controller, service, DTO, config, and storage responsibilities separated along the current package boundaries.
- When changing `auth`, `files`, or `cqu`, check whether an existing test package already covers that area before adding new files elsewhere.
- Respect the existing `dev` profile in `application-dev.yml`; do not hardcode assumptions that bypass H2 or mock CQU behavior.
- If a change affects file storage behavior, note that the repo currently supports local storage and OSS-related migration/deploy scripts.
- Prefer Maven-based verification from this directory instead of ad hoc shell pipelines.

View File

@@ -17,10 +17,17 @@ import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.Comparator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
@Service
public class FileService {
@@ -43,6 +50,7 @@ public class FileService {
String normalizedPath = normalizeDirectoryPath(path);
String filename = normalizeUploadFilename(multipartFile.getOriginalFilename());
validateUpload(user.getId(), normalizedPath, filename, multipartFile.getSize());
ensureDirectoryHierarchy(user, normalizedPath);
fileContentStorage.upload(user.getId(), normalizedPath, filename, multipartFile);
return saveFileMetadata(user, normalizedPath, filename, filename, multipartFile.getContentType(), multipartFile.getSize());
@@ -76,6 +84,7 @@ public class FileService {
String filename = normalizeLeafName(request.filename());
String storageName = normalizeLeafName(request.storageName());
validateUpload(user.getId(), normalizedPath, filename, request.size());
ensureDirectoryHierarchy(user, normalizedPath);
fileContentStorage.completeUpload(user.getId(), normalizedPath, storageName, request.contentType(), request.size());
return saveFileMetadata(user, normalizedPath, filename, storageName, request.contentType(), request.size());
@@ -201,7 +210,7 @@ public class FileService {
public ResponseEntity<?> download(User user, Long fileId) {
StoredFile storedFile = getOwnedFile(user, fileId, "下载");
if (storedFile.isDirectory()) {
throw new BusinessException(ErrorCode.UNKNOWN, "目录不支持下载");
return downloadDirectory(user, storedFile);
}
if (fileContentStorage.supportsDirectDownload()) {
@@ -240,6 +249,44 @@ public class FileService {
return new DownloadUrlResponse("/api/files/download/" + storedFile.getId());
}
private ResponseEntity<byte[]> downloadDirectory(User user, StoredFile directory) {
String logicalPath = buildLogicalPath(directory);
String archiveName = directory.getFilename() + ".zip";
List<StoredFile> descendants = storedFileRepository.findByUserIdAndPathEqualsOrDescendant(user.getId(), logicalPath)
.stream()
.sorted(Comparator.comparing(StoredFile::getPath).thenComparing(StoredFile::getFilename))
.toList();
byte[] archiveBytes;
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream, StandardCharsets.UTF_8)) {
Set<String> createdEntries = new LinkedHashSet<>();
writeDirectoryEntry(zipOutputStream, createdEntries, directory.getFilename() + "/");
for (StoredFile descendant : descendants) {
String entryName = buildZipEntryName(directory.getFilename(), logicalPath, descendant);
if (descendant.isDirectory()) {
writeDirectoryEntry(zipOutputStream, createdEntries, entryName + "/");
continue;
}
ensureParentDirectoryEntries(zipOutputStream, createdEntries, entryName);
writeFileEntry(zipOutputStream, createdEntries, entryName,
fileContentStorage.readFile(user.getId(), descendant.getPath(), descendant.getStorageName()));
}
zipOutputStream.finish();
archiveBytes = outputStream.toByteArray();
} catch (IOException ex) {
throw new BusinessException(ErrorCode.UNKNOWN, "目录压缩失败");
}
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION,
"attachment; filename*=UTF-8''" + URLEncoder.encode(archiveName, StandardCharsets.UTF_8))
.contentType(MediaType.parseMediaType("application/zip"))
.body(archiveBytes);
}
private FileMetadataResponse saveFileMetadata(User user,
String normalizedPath,
String filename,
@@ -275,6 +322,37 @@ public class FileService {
}
}
private void ensureDirectoryHierarchy(User user, String normalizedPath) {
if ("/".equals(normalizedPath)) {
return;
}
String[] segments = normalizedPath.substring(1).split("/");
String currentPath = "/";
for (String segment : segments) {
if (storedFileRepository.existsByUserIdAndPathAndFilename(user.getId(), currentPath, segment)) {
currentPath = "/".equals(currentPath) ? "/" + segment : currentPath + "/" + segment;
continue;
}
String logicalPath = "/".equals(currentPath) ? "/" + segment : currentPath + "/" + segment;
fileContentStorage.ensureDirectory(user.getId(), logicalPath);
StoredFile storedFile = new StoredFile();
storedFile.setUser(user);
storedFile.setFilename(segment);
storedFile.setPath(currentPath);
storedFile.setStorageName(segment);
storedFile.setContentType("directory");
storedFile.setSize(0L);
storedFile.setDirectory(true);
storedFileRepository.save(storedFile);
currentPath = logicalPath;
}
}
private String normalizeUploadFilename(String originalFilename) {
String filename = StringUtils.cleanPath(originalFilename);
if (!StringUtils.hasText(filename)) {
@@ -328,6 +406,43 @@ public class FileService {
: storedFile.getPath() + "/" + storedFile.getFilename();
}
private String buildZipEntryName(String rootDirectoryName, String rootLogicalPath, StoredFile storedFile) {
StringBuilder entryName = new StringBuilder(rootDirectoryName).append('/');
if (!storedFile.getPath().equals(rootLogicalPath)) {
entryName.append(storedFile.getPath().substring(rootLogicalPath.length() + 1)).append('/');
}
entryName.append(storedFile.getFilename());
return entryName.toString();
}
private void ensureParentDirectoryEntries(ZipOutputStream zipOutputStream, Set<String> createdEntries, String entryName) throws IOException {
int slashIndex = entryName.indexOf('/');
while (slashIndex >= 0) {
writeDirectoryEntry(zipOutputStream, createdEntries, entryName.substring(0, slashIndex + 1));
slashIndex = entryName.indexOf('/', slashIndex + 1);
}
}
private void writeDirectoryEntry(ZipOutputStream zipOutputStream, Set<String> createdEntries, String entryName) throws IOException {
if (!createdEntries.add(entryName)) {
return;
}
zipOutputStream.putNextEntry(new ZipEntry(entryName));
zipOutputStream.closeEntry();
}
private void writeFileEntry(ZipOutputStream zipOutputStream, Set<String> createdEntries, String entryName, byte[] content)
throws IOException {
if (!createdEntries.add(entryName)) {
return;
}
zipOutputStream.putNextEntry(new ZipEntry(entryName));
zipOutputStream.write(content);
zipOutputStream.closeEntry();
}
private String normalizeLeafName(String filename) {
String cleaned = StringUtils.cleanPath(filename == null ? "" : filename).trim();
if (!StringUtils.hasText(cleaned)) {

View File

@@ -12,12 +12,18 @@ import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.mock.web.MockMultipartFile;
import java.io.ByteArrayInputStream;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.zip.ZipInputStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
@@ -112,6 +118,23 @@ class FileServiceTest {
verify(fileContentStorage).completeUpload(7L, "/docs", "notes.txt", "text/plain", 12L);
}
@Test
void shouldCreateMissingDirectoriesBeforeCompletingNestedUpload() {
User user = createUser(7L);
when(storedFileRepository.existsByUserIdAndPathAndFilename(7L, "/projects/site", "logo.png")).thenReturn(false);
when(storedFileRepository.existsByUserIdAndPathAndFilename(7L, "/", "projects")).thenReturn(false);
when(storedFileRepository.existsByUserIdAndPathAndFilename(7L, "/projects", "site")).thenReturn(false);
when(storedFileRepository.save(any(StoredFile.class))).thenAnswer(invocation -> invocation.getArgument(0));
fileService.completeUpload(user,
new CompleteUploadRequest("/projects/site", "logo.png", "logo.png", "image/png", 12L));
verify(fileContentStorage).ensureDirectory(7L, "/projects");
verify(fileContentStorage).ensureDirectory(7L, "/projects/site");
verify(fileContentStorage).completeUpload(7L, "/projects/site", "logo.png", "image/png", 12L);
verify(storedFileRepository, times(3)).save(any(StoredFile.class));
}
@Test
void shouldRenameFileThroughConfiguredStorage() {
User user = createUser(7L);
@@ -229,6 +252,47 @@ class FileServiceTest {
verify(fileContentStorage, never()).createDownloadUrl(any(), any(), any(), any());
}
@Test
void shouldDownloadDirectoryAsZipArchive() throws Exception {
User user = createUser(7L);
StoredFile directory = createDirectory(10L, user, "/docs", "archive");
StoredFile childDirectory = createDirectory(11L, user, "/docs/archive", "nested");
StoredFile childFile = createFile(12L, user, "/docs/archive", "notes.txt");
StoredFile nestedFile = createFile(13L, user, "/docs/archive/nested", "todo.txt");
when(storedFileRepository.findById(10L)).thenReturn(Optional.of(directory));
when(storedFileRepository.findByUserIdAndPathEqualsOrDescendant(7L, "/docs/archive"))
.thenReturn(List.of(childDirectory, childFile, nestedFile));
when(fileContentStorage.readFile(7L, "/docs/archive", "notes.txt"))
.thenReturn("hello".getBytes(StandardCharsets.UTF_8));
when(fileContentStorage.readFile(7L, "/docs/archive/nested", "todo.txt"))
.thenReturn("world".getBytes(StandardCharsets.UTF_8));
var response = fileService.download(user, 10L);
assertThat(response.getHeaders().getFirst(HttpHeaders.CONTENT_DISPOSITION))
.contains("archive.zip");
assertThat(response.getHeaders().getContentType())
.isEqualTo(MediaType.parseMediaType("application/zip"));
Map<String, String> entries = new LinkedHashMap<>();
try (ZipInputStream zipInputStream = new ZipInputStream(
new ByteArrayInputStream((byte[]) response.getBody()), StandardCharsets.UTF_8)) {
var entry = zipInputStream.getNextEntry();
while (entry != null) {
entries.put(entry.getName(), entry.isDirectory() ? "" : new String(zipInputStream.readAllBytes(), StandardCharsets.UTF_8));
entry = zipInputStream.getNextEntry();
}
}
assertThat(entries).containsEntry("archive/", "");
assertThat(entries).containsEntry("archive/nested/", "");
assertThat(entries).containsEntry("archive/notes.txt", "hello");
assertThat(entries).containsEntry("archive/nested/todo.txt", "world");
verify(fileContentStorage).readFile(7L, "/docs/archive", "notes.txt");
verify(fileContentStorage).readFile(7L, "/docs/archive/nested", "todo.txt");
}
private User createUser(Long id) {
User user = new User();
user.setId(id);

10
docs/AGENTS.md Normal file
View File

@@ -0,0 +1,10 @@
# Docs AGENTS
This directory currently stores implementation plans under `docs/superpowers/plans/`. Keep docs here concrete and repository-specific.
## Docs rules
- Prefer documenting commands that already exist in `front/package.json`, `backend/pom.xml`, `backend/README.md`, `front/README.md`, or checked-in script files.
- Do not introduce placeholder commands such as an imaginary root `npm test`, backend lint script, or standalone frontend typecheck script.
- When documenting validation, state gaps explicitly. In this repo, backend lint/typecheck commands are not defined, and frontend type checking currently happens through `npm run lint`.
- Keep plan or handoff documents tied to actual repo paths like `backend/...`, `front/...`, `scripts/...`, and `docs/...`.

37
front/AGENTS.md Normal file
View File

@@ -0,0 +1,37 @@
# Frontend AGENTS
This directory is a Vite + React + TypeScript frontend. Follow the current split between pages, shared state/helpers, auth context, and reusable UI.
## Frontend layout
- `src/pages`: route-level screens and page-scoped state modules.
- `src/lib`: API helpers, cache helpers, schedule utilities, shared types, and test files.
- `src/auth`: authentication context/provider.
- `src/components/layout`: page shell/layout components.
- `src/components/ui`: reusable UI primitives.
- `src/index.css`: global styles.
## Real frontend commands
Run these from `front/`:
- `npm run dev`
- `npm run build`
- `npm run preview`
- `npm run clean`
- `npm run lint`
- `npm run test`
Important:
- `npm run lint` is the current TypeScript check because it runs `tsc --noEmit`.
- There is no separate ESLint script.
- There is no separate `typecheck` script beyond `npm run lint`.
## Frontend rules
- Keep route behavior in `src/pages` and shared non-UI logic in `src/lib`.
- Add or update tests next to the state/helper module they exercise, following the existing `*.test.ts` pattern.
- Preserve the current Vite alias usage: `@/*` resolves from the `front/` directory root.
- If a change depends on backend API behavior, verify the proxy expectations in `vite.config.ts` before hardcoding URLs.
- Use the existing `npm run build`, `npm run test`, and `npm run lint` commands for validation; do not invent a separate frontend verification command.

View File

@@ -11,6 +11,7 @@ import {
ChevronRight,
ChevronUp,
FileUp,
FolderUp,
Upload,
UploadCloud,
Plus,
@@ -34,10 +35,15 @@ import { cn } from '@/src/lib/utils';
import {
buildUploadProgressSnapshot,
createUploadMeasurement,
createUploadTasks,
completeUploadTask,
createUploadTask,
failUploadTask,
prepareUploadTaskForCompletion,
prepareFolderUploadEntries,
prepareUploadFile,
shouldUploadEntriesSequentially,
type PendingUploadEntry,
type UploadMeasurement,
type UploadTask,
} from './files-upload';
@@ -63,6 +69,12 @@ const DIRECTORIES = [
{ name: '图片', icon: Folder },
];
function sleep(ms: number) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
function toBackendPath(pathParts: string[]) {
return pathParts.length === 0 ? '/' : `/${pathParts.join('/')}`;
}
@@ -115,6 +127,7 @@ export default function Files() {
const initialPath = readCachedValue<string[]>(getFilesLastPathCacheKey()) ?? [];
const initialCachedFiles = readCachedValue<FileMetadata[]>(getFilesListCacheKey(toBackendPath(initialPath))) ?? [];
const fileInputRef = useRef<HTMLInputElement | null>(null);
const directoryInputRef = useRef<HTMLInputElement | null>(null);
const uploadMeasurementsRef = useRef(new Map<string, UploadMeasurement>());
const [currentPath, setCurrentPath] = useState<string[]>(initialPath);
const currentPathRef = useRef(currentPath);
@@ -128,7 +141,7 @@ export default function Files() {
const [fileToDelete, setFileToDelete] = useState<UiFile | null>(null);
const [newFileName, setNewFileName] = useState('');
const [activeDropdown, setActiveDropdown] = useState<number | null>(null);
const [viewMode, setViewMode] = useState<'list' | 'grid'>('grid');
const [viewMode, setViewMode] = useState<'list' | 'grid'>('list');
const [renameError, setRenameError] = useState('');
const [isRenaming, setIsRenaming] = useState(false);
@@ -157,6 +170,15 @@ export default function Files() {
});
}, [currentPath]);
useEffect(() => {
if (!directoryInputRef.current) {
return;
}
directoryInputRef.current.setAttribute('webkitdirectory', '');
directoryInputRef.current.setAttribute('directory', '');
}, []);
const handleSidebarClick = (pathParts: string[]) => {
setCurrentPath(pathParts);
setSelectedFile(null);
@@ -192,25 +214,28 @@ export default function Files() {
fileInputRef.current?.click();
};
const handleFileChange = async (event: React.ChangeEvent<HTMLInputElement>) => {
const files = event.target.files ? (Array.from(event.target.files) as File[]) : [];
event.target.value = '';
const handleUploadFolderClick = () => {
directoryInputRef.current?.click();
};
if (files.length === 0) {
const runUploadEntries = async (entries: PendingUploadEntry[]) => {
if (entries.length === 0) {
return;
}
const uploadPathParts = [...currentPath];
const uploadPath = toBackendPath(uploadPathParts);
const reservedNames = new Set<string>(currentFiles.map((file) => file.name));
setIsUploadPanelOpen(true);
uploadMeasurementsRef.current.clear();
const uploadJobs = files.map(async (file) => {
const preparedUpload = prepareUploadFile(file, reservedNames);
reservedNames.add(preparedUpload.file.name);
const uploadFile = preparedUpload.file;
const uploadTask = createUploadTask(uploadFile, uploadPathParts, undefined, preparedUpload.noticeMessage);
setUploads((previous) => [...previous, uploadTask]);
const batchTasks = createUploadTasks(entries);
setUploads(batchTasks);
const runSingleUpload = async (
{file: uploadFile, pathParts: uploadPathParts}: PendingUploadEntry,
uploadTask: UploadTask,
) => {
const uploadPath = toBackendPath(uploadPathParts);
const startedAt = Date.now();
uploadMeasurementsRef.current.set(uploadTask.id, createUploadMeasurement(startedAt));
try {
const updateProgress = ({loaded, total}: {loaded: number; total: number}) => {
@@ -303,6 +328,10 @@ export default function Files() {
}
uploadMeasurementsRef.current.delete(uploadTask.id);
setUploads((previous) =>
previous.map((task) => (task.id === uploadTask.id ? prepareUploadTaskForCompletion(task) : task)),
);
await sleep(120);
setUploads((previous) =>
previous.map((task) => (task.id === uploadTask.id ? completeUploadTask(task) : task)),
);
@@ -315,12 +344,62 @@ export default function Files() {
);
return null;
}
};
const results = shouldUploadEntriesSequentially(entries)
? await entries.reduce<Promise<Array<FileMetadata | null>>>(
async (previousPromise, entry, index) => {
const previous = await previousPromise;
const current = await runSingleUpload(entry, batchTasks[index]);
return [...previous, current];
},
Promise.resolve([]),
)
: await Promise.all(entries.map((entry, index) => runSingleUpload(entry, batchTasks[index])));
if (results.some(Boolean)) {
await loadCurrentPath(currentPathRef.current).catch(() => undefined);
}
};
const handleFileChange = async (event: React.ChangeEvent<HTMLInputElement>) => {
const files = event.target.files ? (Array.from(event.target.files) as File[]) : [];
event.target.value = '';
if (files.length === 0) {
return;
}
const reservedNames = new Set<string>(currentFiles.map((file) => file.name));
const entries: PendingUploadEntry[] = files.map((file) => {
const preparedUpload = prepareUploadFile(file, reservedNames);
reservedNames.add(preparedUpload.file.name);
return {
file: preparedUpload.file,
pathParts: [...currentPath],
source: 'file' as const,
noticeMessage: preparedUpload.noticeMessage,
};
});
const results = await Promise.all(uploadJobs);
if (results.some(Boolean) && toBackendPath(currentPathRef.current) === uploadPath) {
await loadCurrentPath(uploadPathParts).catch(() => undefined);
await runUploadEntries(entries);
};
const handleFolderChange = async (event: React.ChangeEvent<HTMLInputElement>) => {
const files = event.target.files ? (Array.from(event.target.files) as File[]) : [];
event.target.value = '';
if (files.length === 0) {
return;
}
const entries = prepareFolderUploadEntries(
files,
[...currentPath],
currentFiles.map((file) => file.name),
);
await runUploadEntries(entries);
};
const handleCreateFolder = async () => {
@@ -399,17 +478,29 @@ export default function Files() {
await loadCurrentPath(currentPath).catch(() => undefined);
};
const handleDownload = async () => {
if (!selectedFile || selectedFile.type === 'folder') {
const handleDownload = async (targetFile: UiFile | null = selectedFile) => {
if (!targetFile) {
return;
}
if (targetFile.type === 'folder') {
const response = await apiDownload(`/files/download/${targetFile.id}`);
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = `${targetFile.name}.zip`;
link.click();
window.URL.revokeObjectURL(url);
return;
}
try {
const response = await apiRequest<DownloadUrlResponse>(`/files/download/${selectedFile.id}/url`);
const response = await apiRequest<DownloadUrlResponse>(`/files/download/${targetFile.id}/url`);
const url = response.url;
const link = document.createElement('a');
link.href = url;
link.download = selectedFile.name;
link.download = targetFile.name;
link.rel = 'noreferrer';
link.target = '_blank';
link.click();
@@ -420,12 +511,12 @@ export default function Files() {
}
}
const response = await apiDownload(`/files/download/${selectedFile.id}`);
const response = await apiDownload(`/files/download/${targetFile.id}`);
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = selectedFile.name;
link.download = targetFile.name;
link.click();
window.URL.revokeObjectURL(url);
};
@@ -573,6 +664,7 @@ export default function Files() {
file={file}
activeDropdown={activeDropdown}
onToggle={(fileId) => setActiveDropdown((previous) => (previous === fileId ? null : fileId))}
onDownload={handleDownload}
onRename={openRenameModal}
onDelete={openDeleteModal}
onClose={() => setActiveDropdown(null)}
@@ -601,6 +693,7 @@ export default function Files() {
file={file}
activeDropdown={activeDropdown}
onToggle={(fileId) => setActiveDropdown((previous) => (previous === fileId ? null : fileId))}
onDownload={handleDownload}
onRename={openRenameModal}
onDelete={openDeleteModal}
onClose={() => setActiveDropdown(null)}
@@ -634,10 +727,14 @@ export default function Files() {
<Button variant="default" className="gap-2" onClick={handleUploadClick}>
<Upload className="w-4 h-4" />
</Button>
<Button variant="outline" className="gap-2" onClick={handleUploadFolderClick}>
<FolderUp className="w-4 h-4" />
</Button>
<Button variant="outline" className="gap-2" onClick={handleCreateFolder}>
<Plus className="w-4 h-4" />
</Button>
<input ref={fileInputRef} type="file" multiple className="hidden" onChange={handleFileChange} />
<input ref={directoryInputRef} type="file" multiple className="hidden" onChange={handleFolderChange} />
</div>
</Card>
@@ -686,14 +783,19 @@ export default function Files() {
<Trash2 className="w-4 h-4" />
</Button>
</div>
{selectedFile.type !== 'folder' && (
<Button variant="default" className="w-full gap-2" onClick={handleDownload}>
<Download className="w-4 h-4" />
</Button>
)}
{selectedFile.type === 'folder' && (
<Button variant="default" className="w-full gap-2" onClick={() => handleFolderDoubleClick(selectedFile)}>
<div className="space-y-3">
<Button variant="default" className="w-full gap-2" onClick={() => handleFolderDoubleClick(selectedFile)}>
</Button>
<Button variant="default" className="w-full gap-2" onClick={() => void handleDownload(selectedFile)}>
<Download className="w-4 h-4" />
</Button>
</div>
)}
{selectedFile.type !== 'folder' && (
<Button variant="default" className="w-full gap-2" onClick={() => void handleDownload(selectedFile)}>
<Download className="w-4 h-4" />
</Button>
)}
</div>
@@ -939,6 +1041,7 @@ function FileActionMenu({
file,
activeDropdown,
onToggle,
onDownload,
onRename,
onDelete,
onClose,
@@ -946,6 +1049,7 @@ function FileActionMenu({
file: UiFile;
activeDropdown: number | null;
onToggle: (fileId: number) => void;
onDownload: (file: UiFile) => Promise<void>;
onRename: (file: UiFile) => void;
onDelete: (file: UiFile) => void;
onClose: () => void;
@@ -979,6 +1083,16 @@ function FileActionMenu({
transition={{ duration: 0.15 }}
className="absolute right-0 top-full z-50 mt-1 w-32 overflow-hidden rounded-lg border border-white/10 bg-[#1e293b] py-1 shadow-xl"
>
<button
onClick={(event) => {
event.stopPropagation();
void onDownload(file);
onClose();
}}
className="flex w-full items-center gap-2 px-3 py-2 text-left text-sm text-slate-300 transition-colors hover:bg-white/10 hover:text-white"
>
<Download className="w-4 h-4" /> {file.type === 'folder' ? '下载文件夹' : '下载文件'}
</button>
<button
onClick={(event) => {
event.stopPropagation();

View File

@@ -4,9 +4,13 @@ import test from 'node:test';
import {
buildUploadProgressSnapshot,
completeUploadTask,
createUploadTasks,
createUploadTask,
prepareUploadTaskForCompletion,
formatTransferSpeed,
prepareFolderUploadEntries,
prepareUploadFile,
shouldUploadEntriesSequentially,
} from './files-upload';
test('createUploadTask uses current path as upload destination', () => {
@@ -95,3 +99,79 @@ test('prepareUploadFile keeps files without conflicts unchanged', () => {
assert.equal(prepared.file.name, 'syllabus');
assert.equal(prepared.noticeMessage, undefined);
});
test('prepareFolderUploadEntries keeps relative directories and renames conflicting root folders', () => {
const first = new File(['alpha'], 'a.txt', {type: 'text/plain'});
Object.defineProperty(first, 'webkitRelativePath', {
configurable: true,
value: '设计稿/a.txt',
});
const second = new File(['beta'], 'b.txt', {type: 'text/plain'});
Object.defineProperty(second, 'webkitRelativePath', {
configurable: true,
value: '设计稿/子目录/b.txt',
});
const entries = prepareFolderUploadEntries([first, second], ['文档'], ['设计稿']);
assert.equal(entries[0].pathParts.join('/'), '文档/设计稿 (1)');
assert.equal(entries[1].pathParts.join('/'), '文档/设计稿 (1)/子目录');
assert.equal(entries[0].noticeMessage, '检测到同名文件夹,已自动重命名为 设计稿 (1)');
assert.equal(entries[1].noticeMessage, '检测到同名文件夹,已自动重命名为 设计稿 (1)');
assert.equal(shouldUploadEntriesSequentially(entries), true);
});
test('shouldUploadEntriesSequentially keeps plain file uploads in parallel mode', () => {
const entries = [
{
file: new File(['alpha'], 'a.txt', {type: 'text/plain'}),
pathParts: ['文档'],
source: 'file' as const,
},
{
file: new File(['beta'], 'b.txt', {type: 'text/plain'}),
pathParts: ['文档'],
source: 'file' as const,
},
];
assert.equal(shouldUploadEntriesSequentially(entries), false);
});
test('createUploadTasks creates a stable task list for the whole batch', () => {
const entries = [
{
file: new File(['alpha'], 'a.txt', {type: 'text/plain'}),
pathParts: ['文档'],
source: 'file' as const,
noticeMessage: 'alpha',
},
{
file: new File(['beta'], 'b.txt', {type: 'text/plain'}),
pathParts: ['文档', '资料'],
source: 'folder' as const,
noticeMessage: 'beta',
},
];
const tasks = createUploadTasks(entries);
assert.equal(tasks.length, 2);
assert.equal(tasks[0].fileName, 'a.txt');
assert.equal(tasks[0].destination, '/文档');
assert.equal(tasks[0].noticeMessage, 'alpha');
assert.equal(tasks[1].fileName, 'b.txt');
assert.equal(tasks[1].destination, '/文档/资料');
assert.equal(tasks[1].noticeMessage, 'beta');
});
test('prepareUploadTaskForCompletion keeps a visible progress state before marking complete', () => {
const task = createUploadTask(new File(['alpha'], 'a.txt', {type: 'text/plain'}), ['文档'], 'task-3');
const nextTask = prepareUploadTaskForCompletion(task);
assert.equal(nextTask.status, 'uploading');
assert.equal(nextTask.progress, 99);
assert.equal(nextTask.speed, '即将完成...');
});

View File

@@ -1,3 +1,5 @@
import { getNextAvailableName } from './files-state';
export type UploadTaskStatus = 'uploading' | 'completed' | 'error';
export interface UploadTask {
@@ -18,6 +20,13 @@ export interface UploadMeasurement {
lastUpdatedAt: number;
}
export interface PendingUploadEntry {
file: File;
pathParts: string[];
source: 'file' | 'folder';
noticeMessage?: string;
}
function getUploadType(file: File) {
const extension = file.name.includes('.') ? file.name.split('.').pop()?.toLowerCase() : '';
@@ -56,6 +65,18 @@ function splitFileName(fileName: string) {
};
}
function getRelativePathSegments(file: File) {
const rawRelativePath = ('webkitRelativePath' in file && typeof file.webkitRelativePath === 'string' && file.webkitRelativePath)
? file.webkitRelativePath
: file.name;
return rawRelativePath
.replaceAll('\\', '/')
.split('/')
.map((segment) => segment.trim())
.filter(Boolean);
}
export function getUploadDestination(pathParts: string[]) {
return pathParts.length === 0 ? '/' : `/${pathParts.join('/')}`;
}
@@ -86,6 +107,67 @@ export function prepareUploadFile(file: File, usedNames: Set<string>) {
};
}
export function prepareFolderUploadEntries(
files: File[],
currentPathParts: string[],
existingRootNames: string[],
): PendingUploadEntry[] {
const rootReservedNames = new Set(existingRootNames);
const renamedRootFolders = new Map<string, string>();
const usedNamesByDestination = new Map<string, Set<string>>();
return files.map((file) => {
const relativeSegments = getRelativePathSegments(file);
if (relativeSegments.length === 0) {
return {
file,
pathParts: [...currentPathParts],
source: 'folder',
};
}
let noticeMessage: string | undefined;
if (relativeSegments.length > 1) {
const originalRootFolder = relativeSegments[0];
let renamedRootFolder = renamedRootFolders.get(originalRootFolder);
if (!renamedRootFolder) {
renamedRootFolder = getNextAvailableName(originalRootFolder, rootReservedNames);
rootReservedNames.add(renamedRootFolder);
renamedRootFolders.set(originalRootFolder, renamedRootFolder);
}
if (renamedRootFolder !== originalRootFolder) {
relativeSegments[0] = renamedRootFolder;
noticeMessage = `检测到同名文件夹,已自动重命名为 ${renamedRootFolder}`;
}
}
const pathParts = [...currentPathParts, ...relativeSegments.slice(0, -1)];
const destinationKey = getUploadDestination(pathParts);
const usedNames = usedNamesByDestination.get(destinationKey) ?? new Set<string>();
const preparedUpload = prepareUploadFile(
new File([file], relativeSegments.at(-1) ?? file.name, {
type: file.type,
lastModified: file.lastModified,
}),
usedNames,
);
usedNames.add(preparedUpload.file.name);
usedNamesByDestination.set(destinationKey, usedNames);
return {
file: preparedUpload.file,
pathParts,
source: 'folder',
noticeMessage: noticeMessage ?? preparedUpload.noticeMessage,
};
});
}
export function shouldUploadEntriesSequentially(entries: PendingUploadEntry[]) {
return entries.some((entry) => entry.source === 'folder');
}
export function createUploadTask(
file: File,
pathParts: string[],
@@ -104,6 +186,28 @@ export function createUploadTask(
};
}
export function createUploadTasks(entries: PendingUploadEntry[]) {
return entries.map((entry) =>
createUploadTask(entry.file, entry.pathParts, undefined, entry.noticeMessage),
);
}
export function createUploadMeasurement(startedAt: number): UploadMeasurement {
return {
startedAt,
lastLoaded: 0,
lastUpdatedAt: startedAt,
};
}
export function prepareUploadTaskForCompletion(task: UploadTask): UploadTask {
return {
...task,
progress: Math.max(task.progress, 99),
speed: task.speed && task.speed !== '等待上传...' ? task.speed : '即将完成...',
};
}
export function formatTransferSpeed(bytesPerSecond: number) {
if (bytesPerSecond < 1024) {
return `${Math.round(bytesPerSecond)} B/s`;