From 83be780243b899da468ce595f43da0c28c06dea6 Mon Sep 17 00:00:00 2001 From: Rbanh Date: Sun, 30 Mar 2025 19:31:00 -0400 Subject: [PATCH] feat: Implement backend commands for plugin data management and enhance frontend synchronization with updated plugin interfaces --- .cursor/rules/development-workflow.mdc | 2 +- README.md | 29 + doc/PLATFORM_COMPATIBILITY.md | 57 + doc/Refactor_Checklist.md | 38 + doc/spiget_api_endpoints.md | 1138 +++++++++++++++ frontend_backend_sync_checklist.md | 34 + src-tauri/Cargo.lock | 704 +++++----- src-tauri/Cargo.toml | 22 +- src-tauri/src/commands/mod.rs | 2 + src-tauri/src/commands/plugin_commands.rs | 336 +++++ src-tauri/src/commands/scan_commands.rs | 14 + src-tauri/src/crawlers/github.rs | 229 +++ src-tauri/src/crawlers/hangar.rs | 211 +-- src-tauri/src/crawlers/mod.rs | 41 +- src-tauri/src/crawlers/spigotmc.rs | 371 +++++ src-tauri/src/lib.rs | 1241 ++++------------- src-tauri/src/lib.rs.bak | 840 ++--------- src-tauri/src/main.rs | 2 +- src-tauri/src/models/mod.rs | 7 + src-tauri/src/models/plugin.rs | 49 + src-tauri/src/models/repository.rs | 95 ++ src-tauri/src/models/server.rs | 42 + src-tauri/src/platform_matcher.rs | 117 ++ src-tauri/src/services/http/client.rs | 191 +++ src-tauri/src/services/http/mod.rs | 3 + src-tauri/src/services/mod.rs | 8 + .../src/services/plugin_scanner/file_utils.rs | 58 + .../plugin_scanner/metadata_extractor.rs | 219 +++ src-tauri/src/services/plugin_scanner/mod.rs | 7 + .../src/services/plugin_scanner/scanner.rs | 409 ++++++ src-tauri/src/services/update_manager/mod.rs | 7 + .../services/update_manager/plugin_updater.rs | 109 ++ .../services/update_manager/version_utils.rs | 60 + src/App.css | 116 ++ src/App.tsx | 661 ++++++++- 35 files changed, 5261 insertions(+), 2208 deletions(-) create mode 100644 doc/PLATFORM_COMPATIBILITY.md create mode 100644 doc/Refactor_Checklist.md create mode 100644 doc/spiget_api_endpoints.md create mode 100644 frontend_backend_sync_checklist.md create mode 100644 src-tauri/src/commands/mod.rs create mode 100644 src-tauri/src/commands/plugin_commands.rs create mode 100644 src-tauri/src/commands/scan_commands.rs create mode 100644 src-tauri/src/crawlers/github.rs create mode 100644 src-tauri/src/crawlers/spigotmc.rs create mode 100644 src-tauri/src/models/mod.rs create mode 100644 src-tauri/src/models/plugin.rs create mode 100644 src-tauri/src/models/repository.rs create mode 100644 src-tauri/src/models/server.rs create mode 100644 src-tauri/src/platform_matcher.rs create mode 100644 src-tauri/src/services/http/client.rs create mode 100644 src-tauri/src/services/http/mod.rs create mode 100644 src-tauri/src/services/mod.rs create mode 100644 src-tauri/src/services/plugin_scanner/file_utils.rs create mode 100644 src-tauri/src/services/plugin_scanner/metadata_extractor.rs create mode 100644 src-tauri/src/services/plugin_scanner/mod.rs create mode 100644 src-tauri/src/services/plugin_scanner/scanner.rs create mode 100644 src-tauri/src/services/update_manager/mod.rs create mode 100644 src-tauri/src/services/update_manager/plugin_updater.rs create mode 100644 src-tauri/src/services/update_manager/version_utils.rs diff --git a/.cursor/rules/development-workflow.mdc b/.cursor/rules/development-workflow.mdc index 48e8fa5..c7a019a 100644 --- a/.cursor/rules/development-workflow.mdc +++ b/.cursor/rules/development-workflow.mdc @@ -37,7 +37,7 @@ PlugSnatcher/ ### 1. Feature Planning -1. **Consult Roadmap**: Check the `ROADMAP.md` file to identify the next feature to implement +1. **Consult Roadmap**: ALWAYS Check the [ROADMAP.md](mdc:ROADMAP.md) file to identify the next feature to implement 2. **Define Scope**: Clearly define what the feature will do and identify any dependencies 3. **Update Roadmap**: Mark features as "In Progress" when starting work diff --git a/README.md b/README.md index fcd30cd..3242d4f 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,8 @@ Description Authors +Website (from plugin.yml) + API version Source URL (if embedded or in plugin.yml) @@ -202,3 +204,30 @@ No consistent plugin metadata across sites (we'll brute force it) API rate limits (user token support for GitHub) +## Configuration + +### GitHub API Token (Optional) + +To avoid GitHub API rate limits when checking for updates for plugins hosted on GitHub, you can provide a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-tokens). + +1. **Create a PAT:** Go to your GitHub settings > Developer settings > Personal access tokens > Tokens (classic). Generate a new token (classic). No specific scopes are required for reading public repository information. +2. **Set Environment Variable:** Set the `GITHUB_API_TOKEN` environment variable to the value of your generated token before running PlugSnatcher. + + * **Windows (PowerShell):** + ```powershell + $env:GITHUB_API_TOKEN="your_github_pat_here" + npm run tauri dev + ``` + * **Windows (Command Prompt):** + ```cmd + set GITHUB_API_TOKEN=your_github_pat_here + npm run tauri dev + ``` + * **Linux/macOS:** + ```bash + export GITHUB_API_TOKEN="your_github_pat_here" + npm run tauri dev + ``` + +If this environment variable is not set, PlugSnatcher will still attempt to check GitHub, but you may encounter `403 Forbidden` errors if you check many plugins frequently. + diff --git a/doc/PLATFORM_COMPATIBILITY.md b/doc/PLATFORM_COMPATIBILITY.md new file mode 100644 index 0000000..1567283 --- /dev/null +++ b/doc/PLATFORM_COMPATIBILITY.md @@ -0,0 +1,57 @@ +# Server Platform Compatibility in PlugSnatcher + +## Overview + +PlugSnatcher now has enhanced platform compatibility checking to ensure that plugins you download and update are compatible with your specific server type. This feature prevents potential issues like downloading a NeoForge version of a plugin for a Paper server or vice versa. + +## Supported Server Types + +PlugSnatcher automatically detects and supports the following server types: + +- **Paper** - Can use plugins built for Paper, Spigot, and Bukkit +- **Spigot** - Can use plugins built for Spigot and Bukkit +- **Bukkit** - Can use plugins built for Bukkit only +- **Forge** - Can use plugins built for Forge +- **Fabric** - Can use plugins built for Fabric +- **Velocity** - Can use plugins built for Velocity +- **BungeeCord** - Can use plugins built for BungeeCord +- **Waterfall** - Can use plugins built for Waterfall and BungeeCord + +## How It Works + +1. **Server Type Detection**: When you scan a server directory, PlugSnatcher automatically determines the server type based on file patterns and configuration files. + +2. **Platform-Aware Updates**: When checking for updates, PlugSnatcher filters plugin versions based on your server type. For example, if you're running a Paper server, PlugSnatcher will prioritize Paper/Spigot/Bukkit versions and avoid Forge/Fabric versions. + +3. **Compatible Version Selection**: When multiple versions of a plugin are available (e.g., Paper, Forge, and Fabric versions), PlugSnatcher automatically selects the one compatible with your server. + +4. **Warning System**: If no compatible version is found, PlugSnatcher will warn you before allowing you to proceed with an update that might not work on your server. + +## Examples + +- **Plugin with multiple versions**: For plugins like LuckPerms that have versions for various platforms (Paper, Forge, NeoForge, Fabric), PlugSnatcher will automatically select the version that matches your server type. + +- **Universal plugins**: Some plugins work across multiple platforms without specific builds. In these cases, PlugSnatcher will consider them compatible with all server types. + +## Technical Details + +This feature primarily affects plugins downloaded from repositories that provide platform information, such as: + +- **Modrinth**: Provides detailed platform compatibility information through "loaders" metadata +- **Hangar**: Includes platform tags for each plugin version +- **GitHub**: May or may not include platform information depending on the repository structure + +For repositories without explicit platform tagging (like SpigotMC), PlugSnatcher will use the general repository focus (e.g., SpigotMC is for Bukkit/Spigot/Paper servers) to determine compatibility. + +## Current Limitations + +- Some repositories don't provide structured platform compatibility information +- Plugin naming conventions aren't always consistent, making it difficult to determine platform compatibility from filenames alone +- Some custom/niche server types may not be properly detected or matched + +## Future Improvements + +- Allow manual override of detected server type +- Add compatibility visualization in the UI +- Improve detection of platform from plugin filename patterns +- Support for more server types and platforms \ No newline at end of file diff --git a/doc/Refactor_Checklist.md b/doc/Refactor_Checklist.md new file mode 100644 index 0000000..3da712c --- /dev/null +++ b/doc/Refactor_Checklist.md @@ -0,0 +1,38 @@ +# Frontend/Backend Synchronization Checklist + +This checklist tracks the necessary changes to ensure `src/App.tsx` works correctly with the refactored Rust backend. + +## Backend Changes (`src-tauri/`) + +- [ ] **Create `load_plugin_data` Command:** + - Implement a command `load_plugin_data(app_handle: AppHandle, server_path: String) -> Result, String>`. + - Use `get_plugin_data_path` from `scanner.rs` to find the correct `plugins.json`. + - Read and deserialize `plugins.json`. + - Return `Ok(plugins)` or an appropriate `Err(String)`. + - Register the command in `lib.rs`. +- [ ] **Create `save_plugin_data` Command:** + - Implement a command `save_plugin_data(app_handle: AppHandle, plugins: Vec, server_path: String) -> Result<(), String>`. + - Use `get_plugin_data_path` from `scanner.rs`. + - Serialize the `plugins` vector to JSON. + - Write the JSON to `plugins.json`, creating the directory if needed. + - Return `Ok(())` or an appropriate `Err(String)`. + - Register the command in `lib.rs`. +- [ ] **Align Bulk Update Events/Logic:** + - In `update_checker.rs` (`check_for_plugin_updates`), rename the emitted event from `"update_check_started"` to `"bulk_update_start"`. + - In `update_checker.rs` (`check_for_plugin_updates`), remove the `app_handle.emit("update_check_completed", ())` call. The result should be handled via the command's return value. + +## Frontend Changes (`src/App.tsx`) + +- [ ] **Align `Plugin` Interface Nullability:** + - Ensure `depend`, `soft_depend`, and `load_before` fields consistently use `string[] | null`. +- [ ] **Rename Command Invokes:** + - Change `invoke("check_single_plugin_update", ...)` to `invoke("check_single_plugin_update_command", ...)`. + - Change `invoke("set_plugin_repository_source", ...)` to `invoke("set_plugin_repository", ...)`. +- [ ] **Refactor `checkForUpdates` Result Handling:** + - Modify the `checkForUpdates` async function to `await` the `invoke("check_plugin_updates", ...)` call. + - Use a `try...catch` block or `.then().catch()` to handle the `Result, String>`. + - On success (`Ok(updatedPlugins)`), call `setPlugins(updatedPlugins)` and clear errors/loading states. + - On error (`Err(error)`), call `setUpdateError(error)` and clear loading states. +- [ ] **Adjust/Remove Event Listeners:** + - In `useEffect`, rename the listener for `"update_check_started"` to `"bulk_update_start"`. + - In `useEffect`, remove the listeners for `"bulk_update_complete"` and `"bulk_update_error"`. \ No newline at end of file diff --git a/doc/spiget_api_endpoints.md b/doc/spiget_api_endpoints.md new file mode 100644 index 0000000..b43d143 --- /dev/null +++ b/doc/spiget_api_endpoints.md @@ -0,0 +1,1138 @@ +swagger: '2.0' +info: + version: '2.0' + title: Spiget + license: + name: Apache License 2.0 + url: 'https://github.com/SpiGetOrg/Spiget/blob/master/LICENSE.md' + contact: + name: inventivetalent + url: 'https://inventivetalent.org/contact' + email: info@inventivetalent.org + description: | + Documentation for Spiget + Spiget is an API to quickly and easily get information about Spigot plugins, resources and authors +schemes: + - https + - http +host: api.spiget.org +basePath: /v2 +produces: + - application/json +consumes: + - application/json +paths: + /status: + get: + tags: + - status + summary: API Status + description: | + Get the API status + responses: + '200': + description: OK + schema: + properties: + status: + description: Status overview + type: object + properties: + fetch: + type: object + properties: + start: + description: Timestamp of the last fetcher start + type: number + format: long + end: + description: Timestamp of the last fetcher end (0 if currently active) + type: number + format: long + active: + description: Whether the fetcher is currently active + type: boolean + page: + type: object + properties: + amount: + description: Amount of pages to load + type: number + format: integer + index: + description: Current page index + type: number + format: integer + item: + type: object + properties: + index: + description: Resource index on the current page + type: number + format: integer + stats: + description: API stats + type: object + properties: + resources: + description: Resource count + type: number + format: integer + authors: + description: Author count + type: number + format: integer + categories: + description: Category count + type: number + format: integer + resource_updates: + description: Resource update count + type: number + format: integer + resource_versions: + description: Resource version count + type: number + format: integer + /resources: + get: + tags: + - resources + summary: Resource List + description: | + Get a list of available resources (premium and free) + parameters: + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/resourceArray' + /resources/premium: + get: + tags: + - resources + summary: Premium Resource List + description: | + Get a list of available premium resources + parameters: + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/resourceArray' + /resources/free: + get: + tags: + - resources + summary: Free Resource List + description: | + Get a list of available free resources + parameters: + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/resourceArray' + '/resources/{resource}': + get: + tags: + - resources + summary: Resource Details + description: | + Get a resource by its ID + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + responses: + '200': + description: OK + schema: + $ref: '#/definitions/Resource' + '404': + $ref: '#/responses/notFound' + '/resources/{resource}/author': + get: + tags: + - resources + summary: Resource Author + description: | + Get the resource author + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + responses: + '200': + description: OK + schema: + $ref: '#/definitions/Author' + '/resources/{resource}/download': + get: + tags: + - resources + summary: Resource Download + description: | + Download a resource + This either redirects to spiget's CDN server (cdn.spiget.org) for a direct download of files hosted on spigotmc.org or to the URL of externally hosted resources + The `external` field of a resource should be checked before downloading, to not receive any unexpected data + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + responses: + '302': + description: | + File found + Redirect to the file direct download OR the url of externally hosted resources + headers: + X-Spiget-File-Source: + type: string + description: File source, either `cdn` or `external` + '404': + description: Resource not found / File not Found + '/resources/{resource}/versions': + get: + tags: + - resources + summary: Resource Versions + description: | + Get versions of a resource + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/versionArray' + '/resources/{resource}/versions/{version}': + get: + tags: + - resources + summary: Resource Version + description: | + Get a specific resource version by its ID + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + - name: version + type: number + description: Version ID + in: path + required: true + responses: + '200': + description: OK + schema: + $ref: '#/definitions/ResourceVersion' + '404': + $ref: '#/responses/notFound' + '/resources/{resource}/versions/latest': + get: + tags: + - resources + summary: Latest Resource Version + description: | + Get the latest resource version + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + responses: + '200': + description: OK + schema: + $ref: '#/definitions/ResourceVersion' + '404': + $ref: '#/responses/notFound' + '/resources/{resource}/versions/{version}/download': + get: + tags: + - resources + summary: Resource Version Download + description: | + Download a specific resource version + + Note: This only redirects to the stored download location and might not download a file + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + - name: version + type: string + description: Version ID or 'latest' + in: path + required: true + responses: + '302': + description: Redirect to the download location + headers: + Location: + description: Download location + type: string + '/resources/{resource}/versions/{version}/download/proxy': + get: + tags: + - resources + summary: Resource Version Proxy Download + description: | + Download a specific resource version + Attempts to proxy a version download from spigotmc.org + Note: this endpoint has a pretty strict rate-limit - cache downloaded versions locally and avoid repeated requests. + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + - name: version + type: string + description: Version ID or 'latest' + in: path + required: true + responses: + '302': + description: Version download + '/resources/{resource}/updates': + get: + tags: + - resources + summary: Resource Updates + description: | + Get updates of a resource + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/updateArray' + '/resources/{resource}/updates/latest': + get: + tags: + - resources + summary: Latest Resource Update + description: | + Get the latest resource update + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + description: OK + schema: + $ref: '#/definitions/ResourceUpdate' + '404': + $ref: '#/responses/notFound' + '/resources/{resource}/reviews': + get: + tags: + - resources + summary: Resource Reviews + description: | + Get reviews of a resource + parameters: + - name: resource + type: number + description: Resource ID + in: path + required: true + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/reviewArray' + /resources/new: + get: + tags: + - resources + summary: New Resources + description: | + Get all new resources + parameters: + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/resourceArray' + '/resources/for/{version}': + get: + tags: + - resources + summary: Resources for Versions + description: | + Get resources for the specified version(s) + parameters: + - name: version + type: string + description: 'Version(s), separated by commas' + in: path + required: true + - name: method + type: string + enum: + - any + - all + description: Method to use to check for versions + in: query + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/resourceArray' + /authors: + get: + tags: + - authors + summary: Author List + description: | + Get a list of available authors + Note: This only includes members involved with resources, either being their author or having reviewed a resource + parameters: + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/authorArray' + '/authors/{author}': + get: + tags: + - authors + summary: Author Details + description: | + Get details about an author + parameters: + - name: author + type: number + description: Author ID + in: path + required: true + responses: + '200': + description: OK + schema: + $ref: '#/definitions/Author' + '/authors/{author}/resources': + get: + tags: + - authors + summary: Author resources + description: | + Get an author's resources + parameters: + - name: author + type: number + description: Author ID + in: path + required: true + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/resourceArray' + '/authors/{author}/reviews': + get: + tags: + - authors + summary: Author reviews + description: | + Get an author's reviews left on resources + parameters: + - name: author + type: number + description: Author ID + in: path + required: true + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/reviewArray' + /categories: + get: + tags: + - categories + summary: Category List + description: | + Get a list of categories + parameters: + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/categoryArray' + '/categories/{category}': + get: + tags: + - categories + summary: Category Details + description: | + Get details about a category + parameters: + - name: category + type: number + description: Category ID + in: path + required: true + responses: + '200': + description: OK + schema: + $ref: '#/definitions/Category' + '/categories/{category}/resources': + get: + tags: + - categories + summary: Category Resources + description: | + Get the resources in a category + parameters: + - name: category + type: number + description: Category ID + in: path + required: true + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/resourceArray' + '/search/resources/{query}': + get: + tags: + - search + - resources + summary: Resource Search + description: | + Search resources + parameters: + - name: query + type: string + description: Search query + in: path + required: true + - name: field + type: string + description: Field to search in + in: query + enum: + - name + - tag + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/resourceArray' + '/search/authors/{query}': + get: + tags: + - search + - authors + summary: Author Search + description: | + Search authors + parameters: + - name: query + type: string + description: Search query + in: path + required: true + - name: field + type: string + description: Field to search in + in: query + enum: + - name + - $ref: '#/parameters/sizeParam' + - $ref: '#/parameters/pageParam' + - $ref: '#/parameters/sortParam' + - $ref: '#/parameters/fieldsParam' + responses: + '200': + $ref: '#/responses/authorArray' + /webhook/events: + get: + tags: + - webhook + summary: Webhook events + description: | + Get a list of available events + responses: + '200': + description: OK + schema: + type: object + properties: + events: + description: Events + type: array + items: + type: string + /webhook/register: + post: + tags: + - webhook + summary: Register Webhook + description: | + Register a new Webhook + + Use this form to easily register a new one: https://spiget.org/webhook/ + consumes: + - application/x-www-form-urlencoded + parameters: + - name: url + type: string + description: URL to call + in: formData + required: true + - name: events + type: array + description: Events to register + items: + type: string + in: formData + required: true + responses: + '200': + description: OK + schema: + type: object + properties: + id: + type: string + description: ID of the registered Webhook + secret: + type: string + description: Registration secret + '400': + description: Invalid request + schema: + type: object + properties: + error: + type: string + description: Error message + '/webhook/status/{id}': + get: + tags: + - webhook + summary: Webhook Status + description: | + Get the status of a Webhook + parameters: + - name: id + in: path + type: string + description: ID of the Webhook + required: true + responses: + '200': + description: OK + schema: + type: object + properties: + status: + type: number + format: integer + description: Status id + failedConnections: + type: number + format: integer + description: Amount of failed connections + '/webhook/delete/{id}/{secret}': + delete: + tags: + - webhook + summary: Delete Webhook + description: | + Delete a Webhook + parameters: + - name: id + in: path + type: string + description: Webhook ID + required: true + - name: secret + in: path + type: string + description: Webhook Secret + required: true + responses: + '200': + description: OK + '404': + $ref: '#/responses/notFound' +definitions: + Resource: + title: Resource + type: object + description: Main Resource model. + properties: + id: + type: number + format: integer + description: Id of the Resource. + name: + type: string + description: Name of the Resource. + tag: + type: string + description: Tag line of the Resource. + contributors: + type: string + description: Contributors of the Resource. + likes: + type: number + format: integer + description: Number of likes. + file: + $ref: '#/definitions/ResourceFile' + testedVersions: + type: array + description: List with Tested Versions of the Resource. + items: + type: string + links: + type: object + description: Map of external and custom links in the resource description. + rating: + $ref: '#/definitions/ResourceRating' + releaseDate: + type: number + format: long + description: Release timestamp. + updateDate: + type: number + format: long + description: Update timestamp. + downloads: + type: number + format: integer + description: Amount of downloads. + external: + type: boolean + description: Whether this resource is external (not hosted on SpigotMC.org). + icon: + $ref: '#/definitions/Icon' + premium: + type: boolean + description: Whether the resource is a premium resource. + price: + type: number + format: double + description: Price of the resource (only if the resource is premium). + currency: + type: string + description: Price Currency of the resource (only if the resource is premium). + author: + $ref: '#/definitions/IdReference' + category: + $ref: '#/definitions/IdReference' + version: + $ref: '#/definitions/IdAndUUIDReference' + reviews: + description: List of review IDs on this resource - only present if directly requesting the resource. + type: array + items: + $ref: '#/definitions/IdReference' + versions: + description: List of version IDs of this resource - only present if directly requesting the resource. + type: array + items: + $ref: '#/definitions/IdReference' + updates: + description: List of update IDs of this resource - only present if directly requesting the resource. + type: array + items: + $ref: '#/definitions/IdReference' + description: + description: Base64-encoded description HTML. + documentation: + description: Base64-encoded documentation HTML (from the Documentation tab). + sourceCodeLink: + type: string + description: Source Code link of the Resource. + donationLink: + type: string + description: Donation link of the Resource. + ResourceFile: + title: Resource File + description: Model of a Resource File. + type: object + properties: + type: + type: string + description: 'File extension (.jar, .zip, .sk) or "external".' + size: + type: number + format: float + description: File size. + sizeUnit: + type: string + description: 'File size-unit (KB, MB, GB).' + url: + type: string + description: Relative URL to the file. + externalUrl: + type: string + description: URL of external downloads. + Icon: + title: Resource Icon / Author Avatar + description: Model for Recource Icon or Author Avatar. + type: object + properties: + url: + type: string + description: Relative URL to the image. + data: + type: string + description: Base64-Encoded image data. + ResourceVersion: + title: Resource Version + description: Model for a Resource Version. + type: object + properties: + id: + type: number + format: integer + description: Version ID. + deprecated: true + uuid: + type: string + description: Version UUID. + name: + type: string + description: Version name (e.g. v1.0). + releaseDate: + type: number + format: long + description: Timestamp of the version's release date. + downloads: + type: number + format: integer + description: Amount of downloads. + rating: + $ref: '#/definitions/ResourceRating' + ResourceUpdate: + title: Resource Update + description: Model for a Resource Update. + type: object + properties: + id: + type: number + format: integer + description: Update ID. + resource: + type: number + format: integer + description: Resource ID. + title: + type: string + description: Update title. + description: + type: string + description: Base64-Encoded description of the update. + date: + type: number + format: long + description: Update timestamp. + likes: + type: number + format: integer + description: Amount of likes for this update. + ResourceRating: + title: Resource Rating + description: Model for a Resource Rating. + type: object + properties: + count: + type: number + format: integer + description: Number of ratings. + average: + type: number + format: float + description: Average rating. + ResourceReview: + title: Resource Review + description: Model for a Resource Review. + type: object + properties: + author: + $ref: '#/definitions/Author' + rating: + $ref: '#/definitions/ResourceRating' + message: + type: string + description: Base64-Encoded Review message. + responseMessage: + type: string + description: Base64-Encoded message the author responded with. + version: + type: string + description: Version name the review was posted for. + date: + type: number + format: long + description: Review timestamp. + Author: + title: Author + description: Model for an Author. + type: object + properties: + id: + type: number + format: integer + description: Id of the author. + name: + type: string + description: Author name. + icon: + $ref: '#/definitions/Icon' + Category: + title: Category + description: Model for a Category. + type: object + properties: + id: + type: number + format: integer + description: Category ID. + name: + type: string + description: Category name. + IdReference: + title: Id-Reference to another object + type: object + properties: + id: + type: number + format: integer + description: ID of the Another Object. + IdAndUUIDReference: + title: Id and UUID Reference to another object + type: object + properties: + id: + type: number + format: integer + description: ID of the Another Object. + uuid: + type: string + description: UUID of the Another Object. +parameters: + sizeParam: + name: size + in: query + description: Size of the returned array + type: number + format: integer + pageParam: + name: page + in: query + description: Page index + type: number + format: integer + sortParam: + name: sort + in: query + description: Field to sort by. (Use a +/- prefix for ascending/descending order) + type: string + fieldsParam: + name: fields + in: query + description: 'Fields to return, separated by commas' + type: string +responses: + success: + description: Successful response + notFound: + description: Requested object not found + resourceArray: + description: Resource Array + schema: + type: array + items: + $ref: '#/definitions/Resource' + headers: + X-Page-Sort: + description: Field the elements are sorted by + type: string + X-Page-Order: + description: Sort order + type: integer + X-Page-Size: + description: Number of elements on the current page + type: integer + X-Page-Index: + description: Current page index + type: integer + X-Page-Count: + description: Total amount of pages + type: integer + authorArray: + description: Author Array + schema: + type: array + items: + $ref: '#/definitions/Author' + headers: + X-Page-Sort: + description: Field the elements are sorted by + type: string + X-Page-Order: + description: Sort order + type: integer + X-Page-Size: + description: Number of elements on the current page + type: integer + X-Page-Index: + description: Current page index + type: integer + X-Page-Count: + description: Total amount of pages + type: integer + categoryArray: + description: Category Array + schema: + type: array + items: + $ref: '#/definitions/Category' + headers: + X-Page-Sort: + description: Field the elements are sorted by + type: string + X-Page-Order: + description: Sort order + type: integer + X-Page-Size: + description: Number of elements on the current page + type: integer + X-Page-Index: + description: Current page index + type: integer + X-Page-Count: + description: Total amount of pages + type: integer + versionArray: + description: Version Array + schema: + type: array + items: + $ref: '#/definitions/ResourceVersion' + headers: + X-Page-Sort: + description: Field the elements are sorted by + type: string + X-Page-Order: + description: Sort order + type: integer + X-Page-Size: + description: Number of elements on the current page + type: integer + X-Page-Index: + description: Current page index + type: integer + X-Page-Count: + description: Total amount of pages + type: integer + updateArray: + description: Update Array + schema: + type: array + items: + $ref: '#/definitions/ResourceUpdate' + headers: + X-Page-Sort: + description: Field the elements are sorted by + type: string + X-Page-Order: + description: Sort order + type: integer + X-Page-Size: + description: Number of elements on the current page + type: integer + X-Page-Index: + description: Current page index + type: integer + X-Page-Count: + description: Total amount of pages + type: integer + reviewArray: + description: Review Array + schema: + type: array + items: + $ref: '#/definitions/ResourceReview' + headers: + X-Page-Sort: + description: Field the elements are sorted by + type: string + X-Page-Order: + description: Sort order + type: integer + X-Page-Size: + description: Number of elements on the current page + type: integer + X-Page-Index: + description: Current page index + type: integer + X-Page-Count: + description: Total amount of pages + type: integer \ No newline at end of file diff --git a/frontend_backend_sync_checklist.md b/frontend_backend_sync_checklist.md new file mode 100644 index 0000000..eb81760 --- /dev/null +++ b/frontend_backend_sync_checklist.md @@ -0,0 +1,34 @@ +## Backend Changes (`src-tauri/`) + +- [x] **Create `load_plugin_data` Command:** + - Implement a command `load_plugin_data(app_handle: AppHandle, server_path: String) -> Result, String>`. + - Use `get_plugin_data_path` from `scanner.rs` to find the correct `plugins.json`. + - Read and deserialize `plugins.json`. + - Return `Ok(plugins)` or an appropriate `Err(String)`. + - Register the command in `lib.rs`. +- [x] **Create `save_plugin_data` Command:** + - Implement a command `save_plugin_data(app_handle: AppHandle, plugins: Vec, server_path: String) -> Result<(), String>`. + - Use `get_plugin_data_path` from `scanner.rs`. + - Serialize the `plugins` vector to JSON. + - Write the JSON to `plugins.json`, creating the directory if needed. + - Return `Ok(())` or an appropriate `Err(String)`. + - Register the command in `lib.rs`. +- [x] **Align Bulk Update Events/Logic:** + - In `update_checker.rs` (`check_for_plugin_updates`), rename the emitted event from `"update_check_started"` to `"bulk_update_start"`. + - In `update_checker.rs` (`check_for_plugin_updates`), remove the `app_handle.emit("update_check_completed", ())` call. The result should be handled via the command's return value. + +## Frontend Changes (`src/App.tsx`) + +- [x] **Align `Plugin` Interface Nullability:** + - Ensure `depend`, `soft_depend`, and `load_before` fields consistently use `string[] | null`. +- [x] **Rename Command Invokes:** + - Change `invoke("check_single_plugin_update", ...)` to `invoke("check_single_plugin_update_command", ...)`. + - Change `invoke("set_plugin_repository_source", ...)` to `invoke("set_plugin_repository", ...)`. +- [x] **Refactor `checkForUpdates` Result Handling:** + - Modify the `checkForUpdates` async function to `await` the `invoke("check_plugin_updates", ...)` call. + - Use a `try...catch` block or `.then().catch()` to handle the `Result, String>`. + - On success (`Ok(updatedPlugins)`), call `setPlugins(updatedPlugins)` and clear errors/loading states. + - On error (`Err(error)`), call `setUpdateError(error)` and clear loading states. +- [x] **Adjust/Remove Event Listeners:** + - In `useEffect`, rename the listener for `"update_check_started"` to `"bulk_update_start"`. + - In `useEffect`, remove the listeners for `"bulk_update_complete"` and `"bulk_update_error"`. \ No newline at end of file diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 7f3ee63..88024ac 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -28,6 +28,18 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy 0.7.35", +] + [[package]] name = "aho-corasick" version = "1.1.3" @@ -52,6 +64,12 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + [[package]] name = "android-tzdata" version = "0.1.1" @@ -97,7 +115,7 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "435a87a52755b8f27fcf321ac4f04b2802e337c8c4872923137471ec39c37532" dependencies = [ - "event-listener", + "event-listener 5.4.0", "event-listener-strategy", "futures-core", "pin-project-lite", @@ -164,11 +182,20 @@ version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ - "event-listener", + "event-listener 5.4.0", "event-listener-strategy", "pin-project-lite", ] +[[package]] +name = "async-mutex" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e" +dependencies = [ + "event-listener 2.5.3", +] + [[package]] name = "async-process" version = "2.3.0" @@ -182,7 +209,7 @@ dependencies = [ "async-task", "blocking", "cfg-if", - "event-listener", + "event-listener 5.4.0", "futures-lite", "rustix 0.38.44", "tracing", @@ -425,6 +452,42 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "cached" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8466736fe5dbcaf8b8ee24f9bbefe43c884dc3e9ff7178da70f55bffca1133c" +dependencies = [ + "ahash", + "async-trait", + "cached_proc_macro", + "cached_proc_macro_types", + "futures", + "hashbrown 0.14.5", + "instant", + "once_cell", + "thiserror 1.0.69", + "tokio", +] + +[[package]] +name = "cached_proc_macro" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "575f32e012222055211b70f5b0601f951f84523410a0e65c81f2744a6042450d" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "cached_proc_macro_types" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" + [[package]] name = "cairo-rs" version = "0.18.5" @@ -606,16 +669,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "core-foundation" version = "0.10.0" @@ -639,9 +692,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1" dependencies = [ "bitflags 2.9.0", - "core-foundation 0.10.0", + "core-foundation", "core-graphics-types", - "foreign-types 0.5.0", + "foreign-types", "libc", ] @@ -652,7 +705,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" dependencies = [ "bitflags 2.9.0", - "core-foundation 0.10.0", + "core-foundation", "libc", ] @@ -925,7 +978,7 @@ dependencies = [ "rustc_version", "toml", "vswhom", - "winreg 0.52.0", + "winreg", ] [[package]] @@ -934,15 +987,6 @@ version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ef6b89e5b37196644d8796de5268852ff179b44e96276cf4290264843743bb7" -[[package]] -name = "encoding_rs" -version = "0.8.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" -dependencies = [ - "cfg-if", -] - [[package]] name = "endi" version = "1.1.0" @@ -996,6 +1040,12 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + [[package]] name = "event-listener" version = "5.4.0" @@ -1013,7 +1063,7 @@ version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ - "event-listener", + "event-listener 5.4.0", "pin-project-lite", ] @@ -1058,15 +1108,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared 0.1.1", -] - [[package]] name = "foreign-types" version = "0.5.0" @@ -1074,7 +1115,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" dependencies = [ "foreign-types-macros", - "foreign-types-shared 0.3.1", + "foreign-types-shared", ] [[package]] @@ -1088,12 +1129,6 @@ dependencies = [ "syn 2.0.100", ] -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "foreign-types-shared" version = "0.3.1" @@ -1119,6 +1154,21 @@ dependencies = [ "new_debug_unreachable", ] +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.31" @@ -1126,6 +1176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", + "futures-sink", ] [[package]] @@ -1193,6 +1244,7 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", "futures-io", "futures-macro", @@ -1340,8 +1392,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -1351,9 +1405,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", + "js-sys", "libc", "r-efi", "wasi 0.14.2+wasi-0.2.4", + "wasm-bindgen", ] [[package]] @@ -1510,31 +1566,22 @@ dependencies = [ "syn 2.0.100", ] -[[package]] -name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap 2.8.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] + [[package]] name = "hashbrown" version = "0.15.2" @@ -1588,17 +1635,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa 1.0.15", -] - [[package]] name = "http" version = "1.3.1" @@ -1610,17 +1646,6 @@ dependencies = [ "itoa 1.0.15", ] -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - [[package]] name = "http-body" version = "1.0.1" @@ -1628,7 +1653,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.3.1", + "http", ] [[package]] @@ -1639,8 +1664,8 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http 1.3.1", - "http-body 1.0.1", + "http", + "http-body", "pin-project-lite", ] @@ -1650,36 +1675,6 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - -[[package]] -name = "hyper" -version = "0.14.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "httparse", - "httpdate", - "itoa 1.0.15", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - [[package]] name = "hyper" version = "1.6.0" @@ -1689,8 +1684,8 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.3.1", - "http-body 1.0.1", + "http", + "http-body", "httparse", "itoa 1.0.15", "pin-project-lite", @@ -1700,16 +1695,21 @@ dependencies = [ ] [[package]] -name = "hyper-tls" -version = "0.5.0" +name = "hyper-rustls" +version = "0.27.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" dependencies = [ - "bytes", - "hyper 0.14.32", - "native-tls", + "futures-util", + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", "tokio", - "tokio-native-tls", + "tokio-rustls", + "tower-service", + "webpki-roots", ] [[package]] @@ -1721,9 +1721,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "hyper 1.6.0", + "http", + "http-body", + "hyper", "pin-project-lite", "socket2", "tokio", @@ -1950,6 +1950,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + [[package]] name = "ipnet" version = "2.11.0" @@ -2282,23 +2291,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "native-tls" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - [[package]] name = "ndk" version = "0.9.0" @@ -2629,50 +2621,6 @@ dependencies = [ "pathdiff", ] -[[package]] -name = "openssl" -version = "0.10.71" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e14130c6a98cd258fdcb0fb6d744152343ff729cbfcb28c656a9d12b999fbcd" -dependencies = [ - "bitflags 2.9.0", - "cfg-if", - "foreign-types 0.3.2", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.100", -] - -[[package]] -name = "openssl-probe" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" - -[[package]] -name = "openssl-sys" -version = "0.9.106" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bb61ea9811cc39e3c2069f40b8b8e2e70d8569b361f879786cc7ed48b777cdd" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "option-ext" version = "0.2.0" @@ -2958,8 +2906,14 @@ dependencies = [ name = "plugsnatcher" version = "0.1.0" dependencies = [ + "async-mutex", + "async-trait", + "base64 0.21.7", + "cached", + "futures", "regex", - "reqwest 0.11.27", + "reqwest", + "semver", "serde", "serde_json", "sha2", @@ -2967,6 +2921,9 @@ dependencies = [ "tauri-build", "tauri-plugin-dialog", "tauri-plugin-opener", + "tokio", + "url", + "urlencoding", "walkdir", "yaml-rust", "zip", @@ -3012,7 +2969,7 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy", + "zerocopy 0.8.24", ] [[package]] @@ -3097,6 +3054,60 @@ dependencies = [ "memchr", ] +[[package]] +name = "quinn" +version = "0.11.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3bd15a6f2967aef83887dcb9fec0014580467e33720d073560cf015a5683012" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2", + "thiserror 2.0.12", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b820744eb4dc9b57a3398183639c511b5a26d2ed702cedd3febaa1393caa22cc" +dependencies = [ + "bytes", + "getrandom 0.3.2", + "rand 0.9.0", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.12", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "541d0f57c6ec747a90738a52741d3221f7960e8ac2f0ff4b1a63680e033b4ab5" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "quote" version = "1.0.40" @@ -3145,7 +3156,7 @@ checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", - "zerocopy", + "zerocopy 0.8.24", ] [[package]] @@ -3278,46 +3289,6 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" -[[package]] -name = "reqwest" -version = "0.11.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" -dependencies = [ - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.32", - "hyper-tls", - "ipnet", - "js-sys", - "log", - "mime", - "native-tls", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls-pemfile", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 0.1.2", - "system-configuration", - "tokio", - "tokio-native-tls", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "winreg 0.50.0", -] - [[package]] name = "reqwest" version = "0.12.15" @@ -3328,10 +3299,11 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http 1.3.1", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", - "hyper 1.6.0", + "hyper", + "hyper-rustls", "hyper-util", "ipnet", "js-sys", @@ -3340,11 +3312,16 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "quinn", + "rustls", + "rustls-pemfile", + "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.2", + "sync_wrapper", "tokio", + "tokio-rustls", "tokio-util", "tower", "tower-service", @@ -3353,6 +3330,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", + "webpki-roots", "windows-registry", ] @@ -3381,12 +3359,32 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.15", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + [[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + [[package]] name = "rustc_version" version = "0.4.1" @@ -3423,12 +3421,46 @@ dependencies = [ ] [[package]] -name = "rustls-pemfile" -version = "1.0.4" +name = "rustls" +version = "0.23.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +checksum = "822ee9188ac4ec04a2f0531e55d035fb2de73f18b41a63c70c2712503b6fb13c" dependencies = [ - "base64 0.21.7", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" +dependencies = [ + "web-time", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fef8b8769aaccf73098557a87cd1816b4f9c7c16811c9c77142aa695c16f2c03" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", ] [[package]] @@ -3452,15 +3484,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "schannel" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" -dependencies = [ - "windows-sys 0.59.0", -] - [[package]] name = "schemars" version = "0.8.22" @@ -3494,29 +3517,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "security-framework" -version = "2.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" -dependencies = [ - "bitflags 2.9.0", - "core-foundation 0.9.4", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "selectors" version = "0.22.0" @@ -3783,7 +3783,7 @@ dependencies = [ "bytemuck", "cfg_aliases", "core-graphics", - "foreign-types 0.5.0", + "foreign-types", "js-sys", "log", "objc2 0.5.2", @@ -3904,12 +3904,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - [[package]] name = "sync_wrapper" version = "1.0.2" @@ -3930,27 +3924,6 @@ dependencies = [ "syn 2.0.100", ] -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation 0.9.4", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "system-deps" version = "6.2.2" @@ -3971,7 +3944,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "63c8b1020610b9138dd7b1e06cf259ae91aa05c30f3bd0d6b42a03997b92dec1" dependencies = [ "bitflags 2.9.0", - "core-foundation 0.10.0", + "core-foundation", "core-graphics", "crossbeam-channel", "dispatch", @@ -4036,7 +4009,7 @@ dependencies = [ "glob", "gtk", "heck 0.5.0", - "http 1.3.1", + "http", "jni", "libc", "log", @@ -4048,7 +4021,7 @@ dependencies = [ "percent-encoding", "plist", "raw-window-handle", - "reqwest 0.12.15", + "reqwest", "serde", "serde_json", "serde_repr", @@ -4222,7 +4195,7 @@ dependencies = [ "cookie", "dpi", "gtk", - "http 1.3.1", + "http", "jni", "raw-window-handle", "serde", @@ -4240,7 +4213,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "087188020fd6facb8578fe9b38e81fa0fe5fb85744c73da51a299f94a530a1e3" dependencies = [ "gtk", - "http 1.3.1", + "http", "jni", "log", "objc2 0.6.0", @@ -4273,7 +4246,7 @@ dependencies = [ "dunce", "glob", "html5ever", - "http 1.3.1", + "http", "infer", "json-patch", "kuchikiki", @@ -4419,6 +4392,21 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinyvec" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "tokio" version = "1.44.1" @@ -4429,20 +4417,33 @@ dependencies = [ "bytes", "libc", "mio", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", + "tokio-macros", "tracing", "windows-sys 0.52.0", ] [[package]] -name = "tokio-native-tls" -version = "0.3.1" +name = "tokio-macros" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ - "native-tls", + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +dependencies = [ + "rustls", "tokio", ] @@ -4524,7 +4525,7 @@ dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 1.0.2", + "sync_wrapper", "tokio", "tower-layer", "tower-service", @@ -4677,6 +4678,12 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" version = "2.5.4" @@ -4689,6 +4696,12 @@ dependencies = [ "serde", ] +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + [[package]] name = "urlpattern" version = "0.3.0" @@ -4729,12 +4742,6 @@ dependencies = [ "serde", ] -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - [[package]] name = "version-compare" version = "0.2.0" @@ -4901,6 +4908,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webkit2gtk" version = "2.0.1" @@ -4945,6 +4962,15 @@ dependencies = [ "system-deps", ] +[[package]] +name = "webpki-roots" +version = "0.26.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2210b291f7ea53617fbafcc4939f10914214ec15aace5ba62293a668f322c5c9" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "webview2-com" version = "0.36.0" @@ -5453,16 +5479,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - [[package]] name = "winreg" version = "0.52.0" @@ -5509,7 +5525,7 @@ dependencies = [ "gdkx11", "gtk", "html5ever", - "http 1.3.1", + "http", "javascriptcore-rs", "jni", "kuchikiki", @@ -5619,7 +5635,7 @@ dependencies = [ "async-trait", "blocking", "enumflags2", - "event-listener", + "event-listener 5.4.0", "futures-core", "futures-lite", "hex", @@ -5666,13 +5682,33 @@ dependencies = [ "zvariant", ] +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "zerocopy-derive 0.7.35", +] + [[package]] name = "zerocopy" version = "0.8.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879" dependencies = [ - "zerocopy-derive", + "zerocopy-derive 0.8.24", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", ] [[package]] @@ -5707,6 +5743,12 @@ dependencies = [ "synstructure", ] +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + [[package]] name = "zerovec" version = "0.10.4" diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 00630ae..d23e3b5 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -25,8 +25,24 @@ serde = { version = "1", features = ["derive"] } serde_json = "1" zip = "0.6" yaml-rust = "0.4" -walkdir = "2.4" -regex = "1.10" +# walkdir = "2.4" # Not currently used, commented out +regex = "1.10" # Still needed elsewhere in the codebase sha2 = "0.10" -reqwest = { version = "0.11", features = ["blocking", "json"] } +reqwest = { version = "0.12", features = ["json", "rustls-tls"], default-features = false } # Updated version, enabled rustls +# scraper = "0.19.0" # No longer used for SpigotMCCrawler +urlencoding = "2.1.3" # Reverted version +semver = "1.0" +url = "2.5" +futures = "0.3" +async-trait = "0.1" +tokio = { version = "1", features = ["rt-multi-thread", "macros", "time"] } # Changed features from "full" +# --- Add Caching Dependencies --- +cached = { version = "0.52", features = ["proc_macro", "async", "tokio"] } +async-mutex = "1.4" # For locking cache access within HttpClient +# --- End Caching Dependencies --- +base64 = "0.21" # For decoding SpigotMC changelog data +walkdir = "2.5.0" + +[features] +# default = ["custom-protocol"] diff --git a/src-tauri/src/commands/mod.rs b/src-tauri/src/commands/mod.rs new file mode 100644 index 0000000..72ca321 --- /dev/null +++ b/src-tauri/src/commands/mod.rs @@ -0,0 +1,2 @@ +pub mod plugin_commands; +pub mod scan_commands; \ No newline at end of file diff --git a/src-tauri/src/commands/plugin_commands.rs b/src-tauri/src/commands/plugin_commands.rs new file mode 100644 index 0000000..ead4b3e --- /dev/null +++ b/src-tauri/src/commands/plugin_commands.rs @@ -0,0 +1,336 @@ +use tauri::{command, AppHandle}; +use crate::models::repository::RepositorySource; +use crate::models::server::ServerType; +use crate::models::plugin::Plugin; +use crate::services::update_manager::{compare_plugin_versions, backup_plugin, replace_plugin}; + +/// Search for plugins in specified repositories +#[command] +pub async fn search_plugins(query: String, repositories: Vec) -> Result, String> { + crate::lib_search_plugins_in_repositories(&query, repositories).await +} + +/// Get plugin details from a repository +#[command] +pub async fn get_plugin_details( + plugin_id: String, + repository: RepositorySource, + server_type_str: Option, +) -> Result { + // Convert server_type_str to ServerType if provided + let server_type = if let Some(type_str) = server_type_str { + match type_str.as_str() { + "paper" => Some(crate::models::server::ServerType::Paper), + "spigot" => Some(crate::models::server::ServerType::Spigot), + "bukkit" => Some(crate::models::server::ServerType::Bukkit), + "velocity" => Some(crate::models::server::ServerType::Velocity), + "bungeecord" => Some(crate::models::server::ServerType::BungeeCord), + "waterfall" => Some(crate::models::server::ServerType::Waterfall), + "forge" => Some(crate::models::server::ServerType::Forge), + "fabric" => Some(crate::models::server::ServerType::Fabric), + _ => None, + } + } else { + None + }; + + crate::lib_get_plugin_details_from_repository(&plugin_id, repository, server_type.as_ref()).await +} + +/// Download a plugin from a repository +#[command] +pub async fn download_plugin( + plugin_id: String, + version: String, + repository: String, + destination: String, + server_type_str: Option +) -> Result { + // Convert repository string to RepositorySource + let repo_source = match repository.to_lowercase().as_str() { + "hangarmc" => RepositorySource::HangarMC, + "spigotmc" => RepositorySource::SpigotMC, + "modrinth" => RepositorySource::Modrinth, + "github" => RepositorySource::GitHub, + "bukkitdev" => RepositorySource::BukkitDev, + _ => RepositorySource::Custom(repository.clone()), + }; + + // Convert server_type_str to ServerType if provided + let server_type = if let Some(type_str) = server_type_str { + match type_str.as_str() { + "paper" => Some(ServerType::Paper), + "spigot" => Some(ServerType::Spigot), + "bukkit" => Some(ServerType::Bukkit), + "velocity" => Some(ServerType::Velocity), + "bungeecord" => Some(ServerType::BungeeCord), + "waterfall" => Some(ServerType::Waterfall), + "forge" => Some(ServerType::Forge), + "fabric" => Some(ServerType::Fabric), + _ => None, + } + } else { + None + }; + + crate::lib_download_plugin_from_repository(&plugin_id, &version, repo_source, &destination, server_type.as_ref()).await +} + +/// Update a plugin with a new version +#[command] +pub async fn update_plugin( + app_handle: AppHandle, + plugin_id: String, + version: String, + repository: String, + current_file_path: String, + server_type_str: Option +) -> Result { + // Convert repository string to RepositorySource + let repo_source = match repository.to_lowercase().as_str() { + "hangarmc" => RepositorySource::HangarMC, + "spigotmc" => RepositorySource::SpigotMC, + "modrinth" => RepositorySource::Modrinth, + "github" => RepositorySource::GitHub, + "bukkitdev" => RepositorySource::BukkitDev, + _ => RepositorySource::Custom(repository.clone()), + }; + + // Convert server_type_str to ServerInfo if provided + let server_info = if let Some(type_str) = server_type_str { + let server_type = match type_str.as_str() { + "paper" => ServerType::Paper, + "spigot" => ServerType::Spigot, + "bukkit" => ServerType::Bukkit, + "velocity" => ServerType::Velocity, + "bungeecord" => ServerType::BungeeCord, + "waterfall" => ServerType::Waterfall, + "forge" => ServerType::Forge, + "fabric" => ServerType::Fabric, + _ => ServerType::Unknown, + }; + + Some(crate::models::server::ServerInfo { + server_type, + minecraft_version: None, + plugins_directory: "".to_string(), + plugins_count: 0, + }) + } else { + None + }; + + // Replace the plugin + replace_plugin(plugin_id, version, repo_source, current_file_path, server_info).await +} + +/// Check for updates for multiple plugins +#[command] +pub async fn check_plugin_updates( + app_handle: AppHandle, + plugins: Vec, + repositories: Vec +) -> Result, String> { + // Convert repository strings to RepositorySource + let repos: Vec = repositories.into_iter() + .map(|repo| match repo.to_lowercase().as_str() { + "hangarmc" => RepositorySource::HangarMC, + "spigotmc" => RepositorySource::SpigotMC, + "modrinth" => RepositorySource::Modrinth, + "github" => RepositorySource::GitHub, + "bukkitdev" => RepositorySource::BukkitDev, + _ => RepositorySource::Custom(repo), + }) + .collect(); + + crate::services::update_manager::check_for_plugin_updates(app_handle, plugins, repos).await +} + +/// Check for updates for a single plugin +#[command] +pub async fn check_single_plugin_update_command( + app_handle: AppHandle, + plugin: Plugin, + repositories: Vec +) -> Result<(), String> { + // Convert repository strings to RepositorySource + let repos: Vec = repositories.into_iter() + .map(|repo| match repo.to_lowercase().as_str() { + "hangarmc" => RepositorySource::HangarMC, + "spigotmc" => RepositorySource::SpigotMC, + "modrinth" => RepositorySource::Modrinth, + "github" => RepositorySource::GitHub, + "bukkitdev" => RepositorySource::BukkitDev, + _ => RepositorySource::Custom(repo), + }) + .collect(); + + crate::services::update_manager::check_single_plugin_update(app_handle, plugin, repos).await +} + +/// Create a backup of a plugin file +#[command] +pub async fn backup_plugin_command(plugin_file_path: String) -> Result { + backup_plugin(plugin_file_path).await +} + +/// Set repository source for a plugin +#[command] +pub async fn set_plugin_repository( + app_handle: AppHandle, + plugin_file_path: String, + repository: String, + repository_id: String, + page_url: String, + server_path: String +) -> Result { + // Convert repository string to RepositorySource + let repo_source = match repository.to_lowercase().as_str() { + "hangarmc" => RepositorySource::HangarMC, + "spigotmc" => RepositorySource::SpigotMC, + "modrinth" => RepositorySource::Modrinth, + "github" => RepositorySource::GitHub, + "bukkitdev" => RepositorySource::BukkitDev, + _ => RepositorySource::Custom(repository.clone()), + }; + + // Load the plugin data + let plugins = crate::services::plugin_scanner::perform_scan(&app_handle, &server_path).await?.plugins; + + // Find the specific plugin + let mut plugin = plugins.into_iter() + .find(|p| p.file_path == plugin_file_path) + .ok_or_else(|| format!("Plugin not found: {}", plugin_file_path))?; + + // Update repository information + plugin.repository_source = Some(repo_source); + plugin.repository_id = Some(repository_id); + plugin.repository_url = Some(page_url); + + // Trigger an update check + if let Some(repo_id) = &plugin.repository_id { + if let Some(repo_source) = &plugin.repository_source { + match crate::lib_get_plugin_details_from_repository(repo_id, repo_source.clone(), None).await { + Ok(repo_plugin) => { + // Set latest version if newer + if repo_plugin.version != plugin.version { + let has_update = compare_plugin_versions(&plugin.version, &repo_plugin.version); + plugin.latest_version = Some(repo_plugin.version); + plugin.has_update = has_update; + plugin.changelog = repo_plugin.changelog; + } else { + plugin.has_update = false; + } + }, + Err(e) => { + println!("Error checking for updates: {}", e); + } + } + } + } + + Ok(plugin) +} + +/// Load saved plugin data for a specific server +#[command] +pub async fn load_plugin_data( + app_handle: AppHandle, + server_path: String, +) -> Result, String> { + let data_dir = crate::services::plugin_scanner::get_plugin_data_path(&app_handle, &server_path)?; + let data_path = data_dir.join("plugins.json"); + + if !data_path.exists() { + // If the file doesn't exist, it's not an error, just return empty list + return Ok(Vec::new()); + } + + // Read the file content + let json_data = std::fs::read_to_string(&data_path) + .map_err(|e| format!("Failed to read plugin data file: {}", e))?; + + // Deserialize the JSON data + let plugins: Vec = serde_json::from_str(&json_data) + .map_err(|e| format!("Failed to deserialize plugin data: {}", e))?; + + Ok(plugins) +} + +/// Save plugin data for a specific server +#[command] +pub async fn save_plugin_data( + app_handle: AppHandle, + plugins: Vec, + server_path: String, +) -> Result<(), String> { + let data_dir = crate::services::plugin_scanner::get_plugin_data_path(&app_handle, &server_path)?; + + // Create directory if it doesn't exist + if !data_dir.exists() { + std::fs::create_dir_all(&data_dir) + .map_err(|e| format!("Failed to create plugin data directory: {}", e))?; + } + + // Save plugins data + let data_path = data_dir.join("plugins.json"); + let json_data = serde_json::to_string_pretty(&plugins) + .map_err(|e| format!("Failed to serialize plugin data for saving: {}", e))?; + + std::fs::write(&data_path, json_data) + .map_err(|e| format!("Failed to write plugin data file: {}", e))?; + + Ok(()) +} + +/// Get versions for a plugin from a repository +#[command] +pub async fn get_plugin_versions( + plugin_id: String, + repository: String +) -> Result, String> { + // This is a placeholder - would need to implement the actual repository API call + Ok(vec!["1.0.0".to_string(), "1.1.0".to_string(), "1.2.0".to_string()]) +} + +/// Get potential matches for a plugin from repositories +#[command] +pub async fn get_potential_plugin_matches( + app_handle: AppHandle, + plugin: Plugin, + repositories: Vec +) -> Result, String> { + // Convert repository strings to RepositorySource + let repos: Vec = repositories.into_iter() + .map(|repo| match repo.to_lowercase().as_str() { + "hangarmc" => RepositorySource::HangarMC, + "spigotmc" => RepositorySource::SpigotMC, + "modrinth" => RepositorySource::Modrinth, + "github" => RepositorySource::GitHub, + "bukkitdev" => RepositorySource::BukkitDev, + _ => RepositorySource::Custom(repo), + }) + .collect(); + + // This is a placeholder - would need to implement actual search + Ok(Vec::new()) +} + +/// Compare two version strings +#[command] +pub fn compare_versions(version1: String, version2: String) -> bool { + compare_plugin_versions(&version1, &version2) +} + +/// Check if a plugin is compatible with a specific Minecraft version +#[command] +pub fn is_plugin_compatible(plugin_version: String, minecraft_version: String) -> bool { + // This is a placeholder - would need to implement actual compatibility check + true +} + +/// Simple greeting function for testing +#[command] +pub fn greet(name: &str) -> String { + format!("Hello, {}! Welcome to PlugSnatcher.", name) +} \ No newline at end of file diff --git a/src-tauri/src/commands/scan_commands.rs b/src-tauri/src/commands/scan_commands.rs new file mode 100644 index 0000000..48d3c92 --- /dev/null +++ b/src-tauri/src/commands/scan_commands.rs @@ -0,0 +1,14 @@ +use tauri::{command, AppHandle}; +use crate::services::plugin_scanner::{scan_server_directory, perform_scan}; + +/// Scan a server directory for plugins +#[command] +pub async fn scan_server_dir(app_handle: AppHandle, path: String) -> Result<(), String> { + scan_server_directory(app_handle, path).await +} + +/// Scan a server directory and return the result immediately +#[command] +pub async fn scan_server_dir_sync(app_handle: AppHandle, path: String) -> Result { + perform_scan(&app_handle, &path).await +} \ No newline at end of file diff --git a/src-tauri/src/crawlers/github.rs b/src-tauri/src/crawlers/github.rs new file mode 100644 index 0000000..316226b --- /dev/null +++ b/src-tauri/src/crawlers/github.rs @@ -0,0 +1,229 @@ +use std::error::Error; +use std::path::Path; +use serde::{Serialize, Deserialize}; +use crate::{HttpClient, RepositorySource}; +use urlencoding; +use async_trait::async_trait; +use std::sync::Arc; +use crate::models::repository::RepositoryPlugin; +use crate::crawlers::Repository; + +// GitHub API response structures (Based on https://docs.github.com/en/rest/releases/releases) + +#[derive(Debug, Serialize, Deserialize)] +struct GitHubRelease { + // Structure for release details + tag_name: String, + name: Option, + body: Option, + published_at: String, + assets: Vec, + html_url: String, // URL to the release page + prerelease: bool, + draft: bool, +} + +#[derive(Debug, Serialize, Deserialize)] +struct GitHubAsset { + // Structure for release asset details + name: String, + browser_download_url: String, + size: u64, + updated_at: String, + download_count: u64, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +struct GitHubRepo { + // Structure for repository details (used for searching potentially) + id: u64, + name: String, + full_name: String, // "owner/repo" + owner: GitHubUser, + description: Option, + html_url: String, + stargazers_count: u64, // Can use this as a proxy for rating/popularity + updated_at: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +struct GitHubUser { + login: String, + avatar_url: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct GitHubSearchResponse { + // Structure for repository search results + items: Vec, + // Ignoring total_count, incomplete_results for now +} + +// GitHub crawler implementation +#[derive(Clone)] +pub struct GitHubCrawler { + client: Arc, + api_base_url: String, +} + +// Inherent methods for GitHubCrawler +impl GitHubCrawler { + pub fn new() -> Self { + GitHubCrawler { + client: Arc::new(HttpClient::new()), + api_base_url: "https://api.github.com".to_string(), + } + } + + // Helper function to get the latest release for a repo, now async + async fn get_latest_release_details(&self, repo_full_name: &str) -> Result> { + let url = format!("{}/repos/{}/releases/latest", self.api_base_url, repo_full_name); + let response_body = self.client.get(&url).await?; + let release: GitHubRelease = serde_json::from_str(&response_body)?; + Ok(release) + } + + // Helper function to get all releases, now async + async fn get_all_release_details(&self, repo_full_name: &str) -> Result, Box> { + let url = format!("{}/repos/{}/releases?per_page=100", self.api_base_url, repo_full_name); + let response_body = self.client.get(&url).await?; + let releases: Vec = serde_json::from_str(&response_body)?; + Ok(releases) + } + + // Helper function to get repo details, now async + async fn get_repo_details(&self, repo_full_name: &str) -> Result> { + let repo_url = format!("{}/repos/{}", self.api_base_url, repo_full_name); + let repo_response_body = self.client.get(&repo_url).await?; + let repo: GitHubRepo = serde_json::from_str(&repo_response_body)?; + Ok(repo) + } + + async fn get_plugin_versions(&self, repo_full_name: &str) -> Result, String> { + let releases = match self.get_all_release_details(repo_full_name).await { + Ok(r) => r, + Err(e) => return Err(format!("Failed GitHub get releases for versions: {}", e)), + }; + let versions = releases.into_iter() + .filter(|r| !r.draft && !r.prerelease && r.assets.iter().any(|a| a.name.ends_with(".jar"))) + .map(|r| r.tag_name) + .collect(); + Ok(versions) + } +} + +#[async_trait] +impl Repository for GitHubCrawler { + fn get_repository_name(&self) -> String { + "GitHub".to_string() + } + + async fn search(&self, query: &str) -> Result, String> { + let search_terms = format!("{} topic:minecraft-plugin topic:spigot topic:paper topic:bukkit fork:false", query); + let encoded_query = urlencoding::encode(&search_terms); + let url = format!( + "{}/search/repositories?q={}&sort=stars&order=desc", + self.api_base_url, + encoded_query + ); + + let response_body = match self.client.get(&url).await { + Ok(body) => body, + Err(e) => return Err(format!("GitHub search request failed: {}", e)), + }; + let search_response: GitHubSearchResponse = match serde_json::from_str(&response_body) { + Ok(res) => res, + Err(e) => return Err(format!("Failed to parse GitHub search results: {}", e)), + }; + + let fetch_tasks = search_response.items.into_iter().map(|repo| { + let self_clone = self.clone(); + async move { + self_clone.get_plugin_details(&repo.full_name).await + } + }); + + let results: Vec = futures::future::join_all(fetch_tasks).await + .into_iter() + .filter_map(|result| match result { + Ok(plugin) => Some(plugin), + Err(e) => { + println!("Error fetching details during search: {}", e); + None + } + }) + .collect(); + + Ok(results) + } + + async fn get_plugin_details(&self, plugin_id: &str) -> Result { + let repo_full_name = plugin_id; + let repo = match self.get_repo_details(repo_full_name).await { + Ok(r) => r, + Err(e) => return Err(format!("Failed GitHub get repo details: {}", e)), + }; + let mut releases = match self.get_all_release_details(repo_full_name).await { + Ok(r) => r, + Err(e) => return Err(format!("Failed GitHub get releases: {}", e)), + }; + + releases.sort_by(|a, b| b.published_at.cmp(&a.published_at)); + + let latest_valid_release = releases.into_iter() + .filter(|r| !r.draft && !r.prerelease) + .find(|r| r.assets.iter().any(|a| a.name.ends_with(".jar"))); + + if let Some(release) = latest_valid_release { + if let Some(asset) = release.assets.iter().find(|a| a.name.ends_with(".jar")) { + Ok(RepositoryPlugin { + id: repo_full_name.to_string(), + name: repo.name, + version: release.tag_name, + description: repo.description, + authors: vec![repo.owner.login], + download_url: asset.browser_download_url.clone(), + repository: RepositorySource::GitHub, + page_url: repo.html_url, + download_count: Some(asset.download_count), + last_updated: Some(release.published_at), + icon_url: repo.owner.avatar_url, + minecraft_versions: Vec::new(), + categories: Vec::new(), + rating: Some(repo.stargazers_count as f32), + file_size: Some(asset.size), + file_hash: None, + changelog: release.body, + }) + } else { + Err(format!("No suitable JAR asset found in the latest valid release for {}", repo_full_name)) + } + } else { + Err(format!("No valid release with a JAR asset found for {}", repo_full_name)) + } + } + + async fn download_plugin(&self, plugin_id: &str, version_number_str: &str, destination: &Path) -> Result { + let repo_full_name = plugin_id; + let tag_name = version_number_str; + + let release_url = format!("{}/repos/{}/releases/tags/{}", self.api_base_url, repo_full_name, tag_name); + let release_response_body = match self.client.get(&release_url).await { + Ok(body) => body, + Err(e) => return Err(format!("GitHub get release by tag failed: {}", e)), + }; + let release: GitHubRelease = match serde_json::from_str(&release_response_body) { + Ok(r) => r, + Err(e) => return Err(format!("Failed to parse GitHub release by tag: {}", e)), + }; + + let asset = release.assets.iter() + .find(|a| a.name.ends_with(".jar")) + .ok_or_else(|| format!("No suitable JAR asset found in release tag '{}' for {}", tag_name, repo_full_name))?; + + match self.client.download(&asset.browser_download_url, destination).await { + Ok(_) => Ok(destination.to_string_lossy().to_string()), + Err(e) => Err(format!("Failed to download from GitHub: {}", e)) + } + } +} \ No newline at end of file diff --git a/src-tauri/src/crawlers/hangar.rs b/src-tauri/src/crawlers/hangar.rs index 66f6c3d..e04534d 100644 --- a/src-tauri/src/crawlers/hangar.rs +++ b/src-tauri/src/crawlers/hangar.rs @@ -1,7 +1,12 @@ use std::error::Error; use std::path::Path; use serde::{Serialize, Deserialize}; -use crate::{HttpClient, RepositoryCrawler, RepositoryPlugin, RepositorySource}; +use crate::HttpClient; +use crate::models::repository::{RepositoryPlugin, RepositorySource}; +use crate::crawlers::Repository; +use urlencoding; +use async_trait::async_trait; +use std::sync::Arc; // HangarMC API response structures #[derive(Debug, Serialize, Deserialize)] @@ -63,141 +68,153 @@ struct HangarVersion { platform_versions: Vec, } -// HangarMC crawler implementation +// Hangar crawler implementation pub struct HangarCrawler { - client: HttpClient, + client: Arc, api_base_url: String, + web_base_url: String, } impl HangarCrawler { pub fn new() -> Self { HangarCrawler { - client: HttpClient::new(), + client: Arc::new(HttpClient::new()), api_base_url: "https://hangar.papermc.io/api/v1".to_string(), + web_base_url: "https://hangar.papermc.io".to_string(), } } - fn get_project_details(&self, owner: &str, slug: &str) -> Result> { - let url = format!("{}/projects/{}/{}", self.api_base_url, owner, slug); - let response = self.client.get(&url)?; - let project: HangarProject = serde_json::from_str(&response)?; + async fn get_project_details_internal(&self, author: &str, slug: &str) -> Result> { + let url = format!("{}/projects/{}/{}", self.api_base_url, author, slug); + let response_body = self.client.get(&url).await?; + let project: HangarProject = serde_json::from_str(&response_body)?; Ok(project) } - fn get_project_versions(&self, owner: &str, slug: &str) -> Result, Box> { - let url = format!("{}/projects/{}/{}/versions", self.api_base_url, owner, slug); - let response = self.client.get(&url)?; - let versions_response: HangarVersionsResponse = serde_json::from_str(&response)?; - Ok(versions_response.result) - } - - fn build_download_url(&self, owner: &str, slug: &str, version: &str) -> String { - format!("https://hangar.papermc.io/api/v1/projects/{}/{}/versions/{}/download", owner, slug, version) + async fn get_project_versions_internal(&self, author: &str, slug: &str) -> Result, Box> { + let url = format!("{}/projects/{}/{}/versions?limit=25&offset=0", self.api_base_url, author, slug); + let response_body = self.client.get(&url).await?; + let versions_result: HangarVersionsResponse = serde_json::from_str(&response_body)?; + Ok(versions_result.result) } } -impl RepositoryCrawler for HangarCrawler { - fn search(&self, query: &str) -> Result, Box> { - let url = format!("{}/projects?query={}&limit=20", self.api_base_url, query); - let response = self.client.get(&url)?; +#[async_trait] +impl Repository for HangarCrawler { + fn get_repository_name(&self) -> String { + "HangarMC".to_string() + } - let projects_response: HangarProjectsResponse = serde_json::from_str(&response)?; - let mut results = Vec::new(); + async fn search(&self, query: &str) -> Result, String> { + let encoded_query = urlencoding::encode(query); + let url = format!( + "{}/projects?q={}", + self.api_base_url, + encoded_query + ); - for project in projects_response.result { - // For each project, get the latest version - let versions = self.get_project_versions(&project.namespace.owner, &project.namespace.slug)?; + let response_body = match self.client.get(&url).await { + Ok(body) => body, + Err(e) => return Err(format!("Failed to search HangarMC: {}", e)), + }; - if let Some(latest_version) = versions.first() { - results.push(RepositoryPlugin { - id: format!("{}/{}", project.namespace.owner, project.namespace.slug), - name: project.name, - version: latest_version.name.clone(), - description: project.description, - authors: vec![project.namespace.owner.clone()], - download_url: self.build_download_url(&project.namespace.owner, &project.namespace.slug, &latest_version.name), - repository: RepositorySource::HangarMC, - page_url: format!("https://hangar.papermc.io/{}/{}", project.namespace.owner, project.namespace.slug), - download_count: Some(project.stats.downloads), - last_updated: Some(project.last_updated), - icon_url: project.icon_url, - minecraft_versions: latest_version.platform_versions.clone(), - categories: vec![project.category], - rating: None, // HangarMC uses stars, not ratings - file_size: Some(latest_version.file_size), - file_hash: None, // HangarMC API doesn't provide file hashes - }); + let search_response: HangarProjectsResponse = match serde_json::from_str(&response_body) { + Ok(res) => res, + Err(e) => return Err(format!("Failed to parse HangarMC search results: {}", e)), + }; + + let results: Vec = search_response.result.into_iter().map(|proj| { + let page_url = format!("{}/{}/{}", self.web_base_url, proj.namespace.owner, proj.namespace.slug); + let version = "Unknown".to_string(); + RepositoryPlugin { + id: format!("{}/{}", proj.namespace.owner, proj.namespace.slug), + name: proj.name, + version, + description: proj.description.clone(), + authors: vec![proj.namespace.owner], + download_url: String::new(), + repository: RepositorySource::HangarMC, + page_url, + download_count: Some(proj.stats.downloads), + last_updated: Some(proj.last_updated), + icon_url: proj.icon_url.clone(), + minecraft_versions: Vec::new(), + categories: vec![proj.category.to_string()], + rating: Some(proj.stats.stars as f32), + file_size: None, + file_hash: None, + changelog: None, } - } + }).collect(); Ok(results) } - fn get_plugin_details(&self, plugin_id: &str) -> Result> { + async fn get_plugin_details(&self, plugin_id: &str) -> Result { let parts: Vec<&str> = plugin_id.split('/').collect(); if parts.len() != 2 { - return Err("Invalid plugin ID format for HangarMC. Expected 'owner/slug'".into()); + return Err(format!("Invalid Hangar plugin ID format: {}. Expected 'author/slug'.", plugin_id)); } - - let owner = parts[0]; + let author = parts[0]; let slug = parts[1]; - let project = self.get_project_details(owner, slug)?; - let versions = self.get_project_versions(owner, slug)?; + let project = match self.get_project_details_internal(author, slug).await { + Ok(p) => p, + Err(e) => return Err(format!("Failed to get Hangar project details: {}", e)), + }; - if let Some(latest_version) = versions.first() { - Ok(RepositoryPlugin { - id: plugin_id.to_string(), - name: project.name, - version: latest_version.name.clone(), - description: project.description, - authors: vec![project.namespace.owner.clone()], - download_url: self.build_download_url(owner, slug, &latest_version.name), - repository: RepositorySource::HangarMC, - page_url: format!("https://hangar.papermc.io/{}/{}", owner, slug), - download_count: Some(project.stats.downloads), - last_updated: Some(project.last_updated), - icon_url: project.icon_url, - minecraft_versions: latest_version.platform_versions.clone(), - categories: vec![project.category], - rating: None, - file_size: Some(latest_version.file_size), - file_hash: None, - }) - } else { - Err("No versions found for this plugin".into()) - } + let versions = match self.get_project_versions_internal(author, slug).await { + Ok(v) => v, + Err(e) => return Err(format!("Failed to get Hangar project versions: {}", e)), + }; + + let latest_version_name = versions.first().map_or("Unknown".to_string(), |v| v.name.clone()); + + let page_url = format!("{}/{}/{}", self.web_base_url, author, slug); + + Ok(RepositoryPlugin { + id: plugin_id.to_string(), + name: project.name, + version: latest_version_name, + description: project.description.clone(), + authors: vec![project.namespace.owner], + download_url: String::new(), + repository: RepositorySource::HangarMC, + page_url, + download_count: Some(project.stats.downloads), + last_updated: Some(project.last_updated), + icon_url: project.icon_url.clone(), + minecraft_versions: versions.first().map_or(Vec::new(), |v| v.platform_versions.clone()), + categories: vec![project.category.to_string()], + rating: Some(project.stats.stars as f32), + file_size: versions.first().map(|v| v.file_size), + file_hash: None, + changelog: None, + }) } - fn get_plugin_versions(&self, plugin_id: &str) -> Result, Box> { + async fn download_plugin(&self, plugin_id: &str, version: &str, destination: &Path) -> Result { let parts: Vec<&str> = plugin_id.split('/').collect(); if parts.len() != 2 { - return Err("Invalid plugin ID format for HangarMC. Expected 'owner/slug'".into()); + return Err(format!("Invalid Hangar plugin ID format: {}. Expected 'author/slug'.", plugin_id)); } - - let owner = parts[0]; + let author = parts[0]; let slug = parts[1]; - let versions = self.get_project_versions(owner, slug)?; - Ok(versions.into_iter().map(|v| v.name).collect()) - } + let platform_str = "PAPER"; - fn download_plugin(&self, plugin_id: &str, version: &str, destination: &Path) -> Result> { - let parts: Vec<&str> = plugin_id.split('/').collect(); - if parts.len() != 2 { - return Err("Invalid plugin ID format for HangarMC. Expected 'owner/slug'".into()); + let download_url = format!( + "{}/projects/{}/{}/versions/{}/{}/download", + self.api_base_url, + author, + slug, + version, + platform_str + ); + + match self.client.download(&download_url, destination).await { + Ok(_) => Ok(destination.to_string_lossy().to_string()), + Err(e) => Err(format!("Failed to download plugin: {}", e)) } - - let owner = parts[0]; - let slug = parts[1]; - - let download_url = self.build_download_url(owner, slug, version); - self.client.download(&download_url, destination)?; - - Ok(destination.to_string_lossy().to_string()) - } - - fn get_repository_name(&self) -> RepositorySource { - RepositorySource::HangarMC } } \ No newline at end of file diff --git a/src-tauri/src/crawlers/mod.rs b/src-tauri/src/crawlers/mod.rs index c1b96bc..0129222 100644 --- a/src-tauri/src/crawlers/mod.rs +++ b/src-tauri/src/crawlers/mod.rs @@ -1,4 +1,43 @@ pub mod hangar; +pub mod spigotmc; +pub mod modrinth; +pub mod github; + +use std::path::Path; +use crate::models::repository::RepositoryPlugin; +use crate::models::server::ServerType; +use async_trait::async_trait; + +/// Common interface for all repository crawlers +#[async_trait] +pub trait Repository { + /// Get the name of the repository + fn get_repository_name(&self) -> String; + + /// Search for plugins in the repository + async fn search(&self, query: &str) -> Result, String>; + + /// Get plugin details from the repository + async fn get_plugin_details(&self, plugin_id: &str) -> Result; + + /// Get plugin details with server type consideration + async fn get_plugin_details_with_server_type(&self, plugin_id: &str, server_type: Option<&ServerType>) -> Result { + // Default implementation just calls the regular get_plugin_details + self.get_plugin_details(plugin_id).await + } + + /// Download a plugin from the repository + async fn download_plugin(&self, plugin_id: &str, version: &str, destination: &Path) -> Result; + + /// Download a plugin with server type consideration + async fn download_plugin_with_server_type(&self, plugin_id: &str, version: &str, destination: &Path, server_type: Option<&ServerType>) -> Result { + // Default implementation calls the regular download_plugin + self.download_plugin(plugin_id, version, destination).await + } +} // Re-export the crawler implementations -pub use hangar::HangarCrawler; \ No newline at end of file +pub use hangar::HangarCrawler; +pub use spigotmc::SpigotMCCrawler; +pub use modrinth::ModrinthCrawler; +pub use github::GitHubCrawler; \ No newline at end of file diff --git a/src-tauri/src/crawlers/spigotmc.rs b/src-tauri/src/crawlers/spigotmc.rs new file mode 100644 index 0000000..e3c2c04 --- /dev/null +++ b/src-tauri/src/crawlers/spigotmc.rs @@ -0,0 +1,371 @@ +use serde::{Deserialize, Serialize}; // Added Serialize for potential use, Deserialize is essential +use std::error::Error; +use std::path::Path; +use crate::{HttpClient}; +use crate::models::repository::{RepositoryPlugin, RepositorySource}; +use urlencoding; +use async_trait::async_trait; +use std::sync::Arc; +use std::time::{SystemTime, UNIX_EPOCH}; // For converting timestamp +use base64::{Engine as _, engine::general_purpose::STANDARD}; // Correct import with Engine trait +use serde_json::Value; // Import Value +use crate::crawlers::Repository; // Use the correct trait import + +// --- Structs for SpiGet API Deserialization --- + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +struct SpiGetIcon { + url: Option, + // data: Option, // Base64 data not typically needed if URL is present +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +struct SpiGetRating { + average: f32, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +struct SpiGetAuthor { + id: u32, + name: Option, // SpiGet might not always return name in all contexts + // icon: Option, // Icon data might be available in author details endpoint +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +struct SpiGetFile { + // #[serde(rename = "type")] + // file_type: Option, // e.g. ".jar" + size: Option, // Use Value to accept string or number from API + size_unit: Option, // e.g. "MB" + url: Option, // Link to the spigotmc resource page, *not* a direct download + // externalUrl: Option // Field from docs, maybe add if needed later +} + +// Represents a version summary, often nested or in arrays +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +struct SpiGetVersion { + id: u32, + name: Option, // Make optional: The actual version string e.g., "1.19.4" or "v2.1" + uuid: Option, + release_date: Option, // Timestamp + downloads: Option, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +struct SpiGetUpdate { + + description: String, // Base64 encoded HTML description + +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +struct IdReference { // Used for arrays containing only IDs + id: u32, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +struct SpiGetResource { + id: u32, + name: Option, // Make name optional to handle potential missing field in search results + tag: Option, // Make tag optional as well for robustness + version: SpiGetVersion, // Represents the *latest* version details here + author: SpiGetAuthor, // Contains author ID, maybe name + downloads: u32, + tested_versions: Vec, // List of MC versions plugin is tested against + rating: SpiGetRating, + icon: SpiGetIcon, // Contains URL to icon + update_date: u64, // Timestamp + file: SpiGetFile, // Details about the main file download + external: bool, // If true, download link points externally +} + +// --- SpigotMC Crawler Implementation (using SpiGet API) --- + +pub struct SpigotMCCrawler { + client: Arc, + base_url: String, +} + +impl SpigotMCCrawler { + pub fn new() -> Self { + SpigotMCCrawler { + client: Arc::new(HttpClient::new()), + base_url: "https://api.spiget.org/v2".to_string(), // Use SpiGet API base URL + } + } + + // Helper to convert SpiGetResource to RepositoryPlugin + fn map_resource_to_plugin(&self, resource: &SpiGetResource) -> RepositoryPlugin { + // Construct SpigotMC page URL + let page_url = format!("https://www.spigotmc.org/resources/{}", resource.id); + + // Construct potential download URL (may differ for external resources) + let download_url = if resource.external { + // For external resources, the 'url' in file info is the download link + resource.file.url.clone().unwrap_or_default() + } else { + // For internal resources, use the SpiGet download endpoint for the *latest* version + format!("{}/resources/{}/download", self.base_url, resource.id) + }; + + // Format update date + let last_updated = SystemTime::UNIX_EPOCH + .checked_add(std::time::Duration::from_secs(resource.update_date)) + .map(|_st| { // Remove chrono formatting for now + // Simple ISO 8601 format or similar - requires chrono for better formatting + // For now, just return the timestamp as string for simplicity + format!("{}", resource.update_date) + }); + + // Safely get author name + let author_name = resource.author.name.clone().unwrap_or_else(|| format!("#{}", resource.author.id)); + + // Convert file size, handling potential string in 'size' field + let file_size_bytes = resource.file.size.as_ref().and_then(|s| s.as_f64()).map(|s_num| { + match resource.file.size_unit.as_deref() { + Some("KB") => (s_num * 1024.0) as u64, + Some("MB") => (s_num * 1024.0 * 1024.0) as u64, + Some("GB") => (s_num * 1024.0 * 1024.0 * 1024.0) as u64, + _ => s_num as u64, // Assume bytes if unit is missing or unknown + } + }); + + // Prepend base URL to icon URL if it's relative (SpiGet usually provides full URL) + let icon_url = resource.icon.url.clone(); + + // Use id as fallback if name is missing + let plugin_name = resource.name.clone().unwrap_or_else(|| format!("Unnamed Resource #{}", resource.id)); + + // Extract version information properly + let version_name = resource.version.name.clone().unwrap_or_else(|| { + println!("[SpigotMCCrawler::map_resource_to_plugin] Warning: Missing version name for resource ID: {}", resource.id); + // DO NOT use tested Minecraft versions as fallbacks for plugin versions + // since they are different types of versions + "Unknown".to_string() + }); + + println!("[SpigotMCCrawler::map_resource_to_plugin] Version for {}: {}", plugin_name, version_name); + + RepositoryPlugin { + id: resource.id.to_string(), + name: plugin_name, // Use the potentially fallback name + version: version_name, // Use the potentially fallback version name + description: resource.tag.clone(), // Use tagline as description (already Option) + authors: vec![author_name], + download_url, + repository: RepositorySource::SpigotMC, + page_url, + download_count: Some(resource.downloads as u64), + last_updated, + icon_url, // Use the potentially prefixed URL + minecraft_versions: resource.tested_versions.clone(), + categories: Vec::new(), // SpiGet only gives category ID, fetching name requires another call + rating: Some(resource.rating.average), + file_size: file_size_bytes, + file_hash: None, // SpiGet does not provide hashes + changelog: None, // Needs separate call to /updates/latest + } + } + + // Helper to fetch and decode the latest update's description as changelog + async fn get_latest_changelog(&self, resource_id: &str) -> Result, Box> { + let url = format!("{}/resources/{}/updates/latest", self.base_url, resource_id); + match self.client.get(&url).await { + Ok(body) => { + match serde_json::from_str::(&body) { + Ok(update) => { + // Description is Base64 encoded HTML + match STANDARD.decode(&update.description) { + Ok(decoded_bytes) => { + // Convert bytes to string (assuming UTF-8) + // decoded_bytes is now a Vec, which implements Sized + Ok(Some(String::from_utf8_lossy(&decoded_bytes).to_string())) + } + Err(e) => { + println!("Failed to decode base64 changelog for {}: {}", resource_id, e); + Ok(None) // Return None if decoding fails + } + } + } + Err(e) => { + println!("Failed to parse latest update JSON for {}: {}", resource_id, e); + Ok(None) // Return None if parsing fails + } + } + } + Err(e) => { + // If the request itself fails (e.g., 404 if no updates), treat as no changelog + println!("Failed to fetch latest update for {}: {}", resource_id, e); + Ok(None) + } + } + } + + // Moved get_plugin_versions back here as it's not part of the Repository trait + async fn get_plugin_versions(&self, plugin_id: &str) -> Result, String> { + println!("Fetching versions for resource ID: {}", plugin_id); + let url = format!("{}/resources/{}/versions?sort=-releaseDate&size=10", self.base_url, plugin_id); + + let body = match self.client.get(&url).await { + Ok(b) => b, + Err(e) => return Err(format!("Failed SpiGet versions request: {}", e)) + }; + + match serde_json::from_str::>(&body) { + Ok(versions) => { + let version_names: Vec = versions.into_iter() + .filter_map(|v| v.name) + .collect(); + + if version_names.is_empty() { + // If no version names available, try to extract from the version ID + println!("No named versions found for resource {}. Trying alternate methods.", plugin_id); + + // Try to get full resource details for version info + let resource_url = format!("{}/resources/{}", self.base_url, plugin_id); + match self.client.get(&resource_url).await { + Ok(resource_body) => { + match serde_json::from_str::(&resource_body) { + Ok(resource) => { + // Check if we can get a version from the resource directly + if let Some(name) = resource.version.name { + println!("Found version from resource details: {}", name); + return Ok(vec![name]); + } + }, + Err(e) => println!("Failed to parse resource details: {}", e) + } + }, + Err(e) => println!("Failed to fetch resource details: {}", e) + } + + // If still no version, return a fallback + println!("Using fallback version for resource ID: {}", plugin_id); + Ok(vec!["Unknown".to_string()]) + } else { + println!("Found {} versions for resource ID: {}", version_names.len(), plugin_id); + Ok(version_names) + } + }, + Err(e) => { + println!("Failed to parse versions JSON: {}. Body: {}", e, body); + Err(format!("Failed to parse SpiGet versions: {}", e)) + } + } + } +} + +#[async_trait] +impl Repository for SpigotMCCrawler { + fn get_repository_name(&self) -> String { + "SpigotMC".to_string() + } + + async fn search(&self, query: &str) -> Result, String> { + let encoded_query = urlencoding::encode(query); + let url = format!( + "{}/search/resources/{}?field=name&fields=name,tag,author,version,downloads,rating,icon,updateDate,premium,file,external,testedVersions", + self.base_url, + encoded_query + ); + + let body = match self.client.get(&url).await { + Ok(b) => b, + Err(e) => return Err(format!("Failed SpiGet search request: {}", e)) + }; + + match serde_json::from_str::>(&body) { + Ok(resources) => { + let mut results = Vec::new(); + for res in &resources { + let mut plugin = self.map_resource_to_plugin(res); + // Try fetching versions if missing + if plugin.version == "Unknown" { + println!("Searching for version information for resource ID: {}", plugin.id); + match self.get_plugin_versions(&plugin.id).await { + Ok(versions) => { + if let Some(latest) = versions.first() { + println!("Found version for {}: {}", plugin.name, latest); + plugin.version = latest.clone(); + } + }, + Err(e) => println!("Failed to fetch versions for {}: {}", plugin.id, e), + } + } + results.push(plugin); + } + Ok(results) + } + Err(e) => { + // Handle case where search returns a single object instead of array (e.g., direct ID match?) + // Or just return the parsing error + Err(format!("Failed to parse SpiGet search results: {}. Body: {}", e, body)) + } + } + } + + async fn get_plugin_details(&self, plugin_id: &str) -> Result { + let url = format!( + "{}/resources/{}?fields=name,tag,author,version,downloads,rating,icon,updateDate,premium,file,external,testedVersions", + self.base_url, + plugin_id + ); + + let body = match self.client.get(&url).await { + Ok(b) => b, + Err(e) => return Err(format!("Failed SpiGet details request: {}", e)) + }; + + match serde_json::from_str::(&body) { + Ok(resource) => { + let mut plugin = self.map_resource_to_plugin(&resource); + // Fetch changelog + match self.get_latest_changelog(&plugin.id).await { + Ok(changelog_opt) => plugin.changelog = changelog_opt, + Err(e) => println!("Failed to fetch changelog for {}: {}", plugin.id, e), + } + // Try fetching versions if missing + if plugin.version == "Unknown" { + println!("Fetching versions for detail view of resource ID: {}", plugin.id); + match self.get_plugin_versions(&plugin.id).await { + Ok(versions) => { + if let Some(latest) = versions.first() { + println!("Found version for {} in detail view: {}", plugin.name, latest); + plugin.version = latest.clone(); + } + }, + Err(e) => println!("Failed to fetch versions for {} in detail view: {}", plugin.id, e), + } + } + Ok(plugin) + } + Err(e) => Err(format!("Failed to parse SpiGet resource details: {}. Body: {}", e, body)) + } + } + + async fn download_plugin(&self, plugin_id: &str, version: &str, destination: &Path) -> Result { + // Going with Option 1: Download latest version associated with the resource ID + let details = self.get_plugin_details(plugin_id).await?; + let download_url = &details.download_url; + + if download_url.is_empty() { + return Err(format!("No download URL found for SpigotMC resource {}", plugin_id)); + } + + // We ignore the 'version' parameter here because SpiGet usually only provides the latest download + println!("Warning: SpigotMC download via SpiGet usually fetches the LATEST version, requested version '{}' might be ignored.", version); + + match self.client.download(download_url, destination).await { + Ok(_) => Ok(destination.to_string_lossy().to_string()), // Restore returning path + Err(e) => Err(format!("Failed to download from SpiGet: {}", e)) + } + } +} \ No newline at end of file diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 1185b7c..fd973c3 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -1,1024 +1,327 @@ // Learn more about Tauri commands at https://tauri.app/develop/calling-rust/ -use serde::{Serialize, Deserialize}; -use std::path::Path; -use std::fs; -use std::io::Read; -use tauri::{command, Emitter}; -use zip::ZipArchive; -use yaml_rust::{YamlLoader, Yaml}; -use std::fs::File; -use sha2::{Sha256, Digest}; -use reqwest; + +// Standard library imports use std::error::Error; -use tauri::AppHandle; -use std::path::PathBuf; +use std::fs::{self, File}; +use std::io::{Read, Seek, Write}; +use std::path::{Path, PathBuf}; +use std::env; +use std::sync::Arc; +use std::time::Duration; -// Add the crawlers module -mod crawlers; -use crawlers::HangarCrawler; +// Serde for serialization/deserialization +use serde::{Serialize, Deserialize}; -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] -pub enum ServerType { - Paper, - Spigot, - Bukkit, - Vanilla, - Forge, - Fabric, - Velocity, - BungeeCord, - Waterfall, - Unknown, -} +// Tauri related imports +use tauri::{command, Emitter, AppHandle, Manager, State, Window}; -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ServerInfo { - server_type: ServerType, - minecraft_version: Option, - plugins_directory: String, - plugins_count: usize, -} +// Internal modules +pub mod models; +pub mod services; +pub mod commands; +pub mod crawlers; +pub mod platform_matcher; -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Plugin { - name: String, - version: String, - latest_version: Option, - description: Option, - authors: Vec, - has_update: bool, - api_version: Option, - main_class: Option, - depend: Option>, - soft_depend: Option>, - load_before: Option>, - commands: Option, - permissions: Option, - file_path: String, - file_hash: String, -} +// Import our models +pub use models::server::{ServerType, ServerInfo, ScanResult, ScanProgress}; +pub use models::plugin::{Plugin, PluginMeta}; +pub use models::repository::{RepositorySource, RepositoryPlugin, PotentialPluginMatch}; -#[derive(Debug, Serialize, Deserialize)] -pub struct PluginMeta { - pub name: String, - pub version: String, - pub description: Option, - pub authors: Vec, - pub api_version: Option, - pub main_class: Option, - pub depend: Option>, - pub soft_depend: Option>, - pub load_before: Option>, - pub commands: Option, - pub permissions: Option, - pub file_path: String, - pub file_size: u64, - pub file_hash: String, -} +// Import our services +pub use services::http::HttpClient; +pub use services::plugin_scanner::{scan_server_directory, perform_scan, extract_plugin_metadata, calculate_file_hash, is_file_locked}; +pub use services::update_manager::{check_for_plugin_updates, check_single_plugin_update, backup_plugin, replace_plugin, normalize_version, compare_plugin_versions}; -/// Calculates SHA-256 hash for a given file path -pub fn calculate_file_hash(file_path: &str) -> Result { - let mut file = File::open(file_path).map_err(|e| format!("Failed to open file for hashing: {}", e))?; - let mut hasher = Sha256::new(); - let mut buffer = [0; 1024]; +// Import our commands +pub use commands::plugin_commands::*; +pub use commands::scan_commands::*; - loop { - let bytes_read = file.read(&mut buffer).map_err(|e| format!("Failed to read file for hashing: {}", e))?; - if bytes_read == 0 { - break; - } - hasher.update(&buffer[..bytes_read]); +// Import our crawlers +pub use crawlers::HangarCrawler; +pub use crawlers::SpigotMCCrawler; +pub use crawlers::ModrinthCrawler; +pub use crawlers::GitHubCrawler; +pub use crawlers::Repository; + +// Import platform matchers +pub use platform_matcher::{get_compatible_modrinth_loaders, is_version_compatible_with_server}; + +use futures::future::{BoxFuture, FutureExt}; // Import necessary future types + +/// Search for plugins in repositories +pub async fn lib_search_plugins_in_repositories( + query: &str, + repositories: Vec +) -> Result, String> { + // Check for empty query + if query.is_empty() { + return Err("Search query cannot be empty".to_string()); } - let hash = hasher.finalize(); - Ok(format!("{:x}", hash)) -} + // Check for empty repository list + if repositories.is_empty() { + return Err("No repositories specified for search".to_string()); + } -/// Extract metadata from a plugin.yml file inside a JAR -fn extract_plugin_metadata(jar_path: &Path) -> Result { - let file = fs::File::open(jar_path) - .map_err(|e| format!("Failed to open JAR file: {}", e))?; + // Create a list to store tasks (boxed futures) + let mut tasks: Vec), (String, String)>>> = Vec::new(); - let file_size = file.metadata() - .map_err(|e| format!("Failed to read file metadata: {}", e))? - .len(); + // Add tasks for each repository + for repo in repositories { + match repo { + RepositorySource::HangarMC => { + let crawler = crawlers::HangarCrawler::new(); + let query_owned = query.to_string(); + let task = async move { + match crawler.search(&query_owned).await { + Ok(repo_results) => Ok((crawler.get_repository_name(), repo_results)), + Err(e) => Err((crawler.get_repository_name(), e.to_string())) + } + }.boxed(); // Box the future + tasks.push(task); + }, + RepositorySource::SpigotMC => { + let crawler = crawlers::SpigotMCCrawler::new(); + let query_owned = query.to_string(); + let task = async move { + match crawler.search(&query_owned).await { + Ok(repo_results) => Ok((crawler.get_repository_name(), repo_results)), + Err(e) => Err((crawler.get_repository_name(), e.to_string())) + } + }.boxed(); // Box the future + tasks.push(task); + }, + RepositorySource::Modrinth => { + let crawler = crawlers::ModrinthCrawler::new(); + let query_owned = query.to_string(); + let task = async move { + match crawler.search(&query_owned).await { + Ok(repo_results) => Ok((crawler.get_repository_name(), repo_results)), + Err(e) => Err((crawler.get_repository_name(), e.to_string())) + } + }.boxed(); // Box the future + tasks.push(task); + }, + RepositorySource::GitHub => { + // Placeholder - will cause errors until GitHubCrawler implements Repository + eprintln!("GitHub search not fully implemented yet."); + }, + _ => { + eprintln!("Repository {:?} not supported for search", repo); + } + } + } - let mut archive = ZipArchive::new(file) - .map_err(|e| format!("Invalid JAR file: {}", e))?; + // Execute all tasks concurrently + let results = futures::future::join_all(tasks).await; - // Try to find and read plugin.yml or bungee.yml - let yaml_content = match read_yaml_from_archive(&mut archive, "plugin.yml") { - Ok(content) => content, - Err(_) => match read_yaml_from_archive(&mut archive, "bungee.yml") { - Ok(content) => content, - Err(_) => { - // If no plugin metadata file is found, try to infer from filename - let filename = jar_path.file_name() - .and_then(|n| n.to_str()) - .unwrap_or("unknown.jar"); + // Collect successful results + let mut plugins = Vec::new(); + let mut errors = Vec::new(); - // Extract name and version from filename (e.g., "WorldEdit-7.2.8.jar" → name: "WorldEdit", version: "7.2.8") - let mut parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); - let version = if parts.len() > 1 { - parts.pop().unwrap_or("1.0.0").to_string() + for result in results { + match result { + Ok((_, repo_plugins)) => { + plugins.extend(repo_plugins); + }, + Err((repo, error)) => { + errors.push(format!("[{}] {}", repo, error)); + } + } + } + + // Check if we found any results + if plugins.is_empty() { + if !errors.is_empty() { + return Err(errors.join("; ")); } else { - "1.0.0".to_string() - }; - - let name = parts.join("-"); - - return Ok(PluginMeta { - name, - version, - description: None, - authors: Vec::new(), - api_version: None, - main_class: None, - depend: None, - soft_depend: None, - load_before: None, - commands: None, - permissions: None, - file_path: jar_path.to_string_lossy().to_string(), - file_size, - file_hash: calculate_file_hash(jar_path.to_str().unwrap_or("unknown.jar")).unwrap_or_else(|_| "unknown".to_string()), - }); - } - } - }; - - // Parse the YAML content - let docs = match YamlLoader::load_from_str(&yaml_content) { - Ok(docs) => docs, - Err(e) => { - println!("Failed to parse plugin YAML: {}", e); - return fallback_plugin_meta(jar_path, file_size); - } - }; - - if docs.is_empty() { - return fallback_plugin_meta(jar_path, file_size); - } - - let doc = &docs[0]; - - // Extract basic metadata with fallbacks for missing fields - let name = yaml_str_with_fallback(doc, "name", jar_path); - let version = yaml_str_with_fallback(doc, "version", jar_path); - - // Extract optional fields - let description = yaml_str_opt(doc, "description"); - - // Handle authors (can be a single string or an array) - let authors = match &doc["authors"] { - Yaml::Array(arr) => { - arr.iter() - .filter_map(|a| a.as_str().map(|s| s.to_string())) - .collect() - }, - Yaml::String(s) => vec![s.clone()], - _ => { - // Fallback to 'author' field which is sometimes used - match &doc["author"] { - Yaml::String(s) => vec![s.clone()], - _ => Vec::new(), - } - } - }; - - // Extract other optional metadata - let api_version = yaml_str_opt(doc, "api-version"); - let main_class = yaml_str_opt(doc, "main"); - - // Handle dependency lists - let depend = yaml_str_array(doc, "depend"); - let soft_depend = yaml_str_array(doc, "softdepend"); - let load_before = yaml_str_array(doc, "loadbefore"); - - // Handle complex structures as generic JSON values - let commands = match &doc["commands"] { - Yaml::Hash(_) => { - Some(serde_json::Value::String("Commands data present".to_string())) - }, - _ => None - }; - - let permissions = match &doc["permissions"] { - Yaml::Hash(_) => { - Some(serde_json::Value::String("Permissions data present".to_string())) - }, - _ => None - }; - - // Calculate the file hash - let file_hash = calculate_file_hash(jar_path.to_str().unwrap_or("unknown.jar")).unwrap_or_else(|_| "unknown".to_string()); - - Ok(PluginMeta { - name, - version, - description, - authors, - api_version, - main_class, - depend, - soft_depend, - load_before, - commands, - permissions, - file_path: jar_path.to_string_lossy().to_string(), - file_size, - file_hash, - }) -} - -// Helper function to read a YAML file from the ZIP archive -fn read_yaml_from_archive(archive: &mut ZipArchive, file_name: &str) -> Result { - match archive.by_name(file_name) { - Ok(mut file) => { - let mut contents = String::new(); - file.read_to_string(&mut contents) - .map_err(|e| format!("Failed to read {}: {}", file_name, e))?; - Ok(contents) - }, - Err(e) => Err(format!("Failed to find {}: {}", file_name, e)) - } -} - -// Helper function to create plugin metadata with fallback values -fn fallback_plugin_meta(jar_path: &Path, file_size: u64) -> Result { - let filename = jar_path.file_name() - .and_then(|n| n.to_str()) - .unwrap_or("unknown.jar"); - - // Extract name and version from filename (e.g., "WorldEdit-7.2.8.jar" → name: "WorldEdit", version: "7.2.8") - let mut parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); - let version = if parts.len() > 1 { - parts.pop().unwrap_or("1.0.0").to_string() - } else { - "1.0.0".to_string() - }; - - let name = parts.join("-"); - - Ok(PluginMeta { - name, - version, - description: None, - authors: Vec::new(), - api_version: None, - main_class: None, - depend: None, - soft_depend: None, - load_before: None, - commands: None, - permissions: None, - file_path: jar_path.to_string_lossy().to_string(), - file_size, - file_hash: calculate_file_hash(jar_path.to_str().unwrap_or("unknown.jar")).unwrap_or_else(|_| "unknown".to_string()), - }) -} - -// Extract a string from YAML with fallback to filename -fn yaml_str_with_fallback(yaml: &Yaml, key: &str, jar_path: &Path) -> String { - match yaml[key].as_str() { - Some(s) => s.to_string(), - None => { - // Extract from filename as fallback - let filename = jar_path.file_name() - .and_then(|n| n.to_str()) - .unwrap_or("unknown.jar"); - - if key == "name" { - // Extract name (e.g., "WorldEdit-7.2.8.jar" → "WorldEdit") - let parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); - parts[0].to_string() - } else if key == "version" { - // Extract version (e.g., "WorldEdit-7.2.8.jar" → "7.2.8") - let parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); - if parts.len() > 1 { - parts[1].to_string() - } else { - "1.0.0".to_string() - } - } else { - String::new() - } - } - } -} - -fn yaml_str_opt(yaml: &Yaml, key: &str) -> Option { - yaml[key].as_str().map(|s| s.to_string()) -} - -fn yaml_str_array(yaml: &Yaml, key: &str) -> Option> { - match &yaml[key] { - Yaml::Array(arr) => { - let strings: Vec = arr.iter() - .filter_map(|a| a.as_str().map(|s| s.to_string())) - .collect(); - if strings.is_empty() { None } else { Some(strings) } - }, - _ => None - } -} - -/// Detect the server type based on files in the server directory -fn detect_server_type(server_path: &Path) -> ServerType { - // --- Check for Paper --- (Check before Spigot/Bukkit as Paper includes their files) - // Primary indicator: config/paper-global.yml (or similar variants) - if server_path.join("config").join("paper-global.yml").exists() || - server_path.join("config").join("paper.yml").exists() { // Also check for paper.yml in config, just in case - return ServerType::Paper; - } - // Secondary indicator: paper.yml in root (less common, but check) - if server_path.join("paper.yml").exists() { - return ServerType::Paper; - } - // Tertiary indicator: Look for a jar file starting with "paper-" in root - if let Ok(entries) = fs::read_dir(server_path) { - for entry in entries.filter_map(Result::ok) { - if let Some(filename) = entry.file_name().to_str() { - if filename.starts_with("paper-") && filename.ends_with(".jar") { - return ServerType::Paper; - } - } - } - } - // --- End Paper Check --- - - // Check for Spigot - if server_path.join("spigot.yml").exists() { - return ServerType::Spigot; - } - - // Check for Bukkit - if server_path.join("bukkit.yml").exists() { - return ServerType::Bukkit; - } - - // Check for Forge - if server_path.join("forge-server.jar").exists() || - server_path.join("mods").exists() { - return ServerType::Forge; - } - - // Check for Fabric - if server_path.join("fabric-server-launch.jar").exists() || - (server_path.join("mods").exists() && server_path.join("fabric-server-launcher.properties").exists()) { - return ServerType::Fabric; - } - - // Check for Velocity - if server_path.join("velocity.toml").exists() { - return ServerType::Velocity; - } - - // Check for BungeeCord - if server_path.join("BungeeCord.jar").exists() || - server_path.join("config.yml").exists() { - return ServerType::BungeeCord; - } - - // Check for Waterfall - if server_path.join("waterfall.jar").exists() || - server_path.join("waterfall.yml").exists() { - return ServerType::Waterfall; - } - - // Check if it's at least a vanilla server - if server_path.join("server.properties").exists() || - server_path.join("vanilla_server.jar").exists() { - return ServerType::Vanilla; - } - - // If no server type detected - ServerType::Unknown -} - -/// Helper to find the most likely server JAR file -fn find_server_jar(server_path: &Path) -> Option { - const COMMON_NAMES: [&str; 4] = ["server.jar", "spigot.jar", "paper.jar", "craftbukkit.jar"]; - let mut largest_jar: Option<(u64, PathBuf)> = None; - let mut found_common_name: Option = None; - - if let Ok(entries) = fs::read_dir(server_path) { - for entry in entries.filter_map(Result::ok) { - let path = entry.path(); - if path.is_file() && path.extension().map_or(false, |ext| ext.eq_ignore_ascii_case("jar")) { - // Check for common names first - if let Some(filename) = path.file_name().and_then(|n| n.to_str()) { - if COMMON_NAMES.contains(&filename) { - found_common_name = Some(path.clone()); - break; // Found a primary common name, stop looking - } - } - - // Track largest JAR as a fallback - if let Ok(metadata) = entry.metadata() { - let size = metadata.len(); - if largest_jar.is_none() || size > largest_jar.as_ref().unwrap().0 { - largest_jar = Some((size, path.clone())); - } - } - } - } - } - // Prioritize common names, then largest JAR - found_common_name.or_else(|| largest_jar.map(|(_, path)| path)) -} - -/// Helper to read version.json from inside a JAR archive -fn read_version_from_jar(jar_path: &Path) -> Option { - match File::open(jar_path) { - Ok(file) => { - match ZipArchive::new(file) { - Ok(mut archive) => { - match archive.by_name("version.json") { - Ok(mut version_file) => { - let mut contents = String::new(); - if version_file.read_to_string(&mut contents).is_ok() { - if let Ok(json) = serde_json::from_str::(&contents) { - if let Some(version) = json["name"].as_str() { - return Some(version.to_string()); - } - } - } - } - Err(_) => { /* version.json not found in archive */ } - } - } - Err(e) => println!("Failed to read JAR archive {}: {}", jar_path.display(), e), - } - } - Err(e) => println!("Failed to open JAR file {}: {}", jar_path.display(), e), - } - None -} - -/// Guess the Minecraft version from various files in the server directory -fn detect_minecraft_version(server_path: &Path, server_type: &ServerType) -> Option { - // 1. Try external version.json - if let Ok(content) = fs::read_to_string(server_path.join("version.json")) { - if let Ok(json) = serde_json::from_str::(&content) { - if let Some(version) = json["name"].as_str() { - println!("Version found via external version.json"); - return Some(version.to_string()); - } + return Ok(Vec::new()); } } - // 2. Try parsing paper-global.yml for Paper servers - if server_type == &ServerType::Paper { - let paper_global_path = server_path.join("config").join("paper-global.yml"); - if paper_global_path.exists() { - if let Ok(content) = fs::read_to_string(&paper_global_path) { - match yaml_rust::YamlLoader::load_from_str(&content) { - Ok(docs) if !docs.is_empty() => { - let doc = &docs[0]; - if let Some(version) = doc["misc"]["paper-version"].as_str() { - let mc_version = version.split('-').next().unwrap_or(version); - println!("Version found via paper-global.yml (misc.paper-version)"); - return Some(mc_version.to_string()); - } - if let Some(version) = doc["settings"]["minecraft-version"].as_str() { - println!("Version found via paper-global.yml (settings.minecraft-version)"); - return Some(version.to_string()); - } - } - Err(e) => println!("Failed to parse paper-global.yml: {}", e), - _ => { /* Empty or invalid YAML */ } - } - } + Ok(plugins) +} + +/// Generate search variations for a plugin name +fn generate_search_variations(plugin_name: &str) -> Vec { + let mut variations = Vec::new(); + + // Add original name + variations.push(plugin_name.to_string()); + + // Convert to lowercase + let name_lower = plugin_name.to_lowercase(); + if name_lower != plugin_name { + variations.push(name_lower.clone()); + } + + // Add variations with common prefixes/suffixes removed + let prefixes = ["plugin", "mc", "minecraft"]; + let suffixes = ["plugin", "spigot", "bukkit", "paper", "mc"]; + + for prefix in prefixes.iter() { + let prefix_str = format!("{} ", prefix); + if name_lower.starts_with(&prefix_str) { + variations.push(name_lower[prefix_str.len()..].to_string()); } } - // 3. Try reading version.json from inside the server JAR - if let Some(server_jar_path) = find_server_jar(server_path) { - if let Some(version) = read_version_from_jar(&server_jar_path) { - println!("Version found via internal version.json in {}", server_jar_path.display()); - return Some(version); + for suffix in suffixes.iter() { + let suffix_str = format!(" {}", suffix); + if name_lower.ends_with(&suffix_str) { + variations.push(name_lower[0..name_lower.len() - suffix_str.len()].to_string()); } } - // 4. Try fallback: JAR filename pattern matching - if let Ok(entries) = fs::read_dir(server_path) { - for entry in entries { - if let Ok(entry) = entry { - let path = entry.path(); - if path.is_file() && path.extension().map_or(false, |ext| ext == "jar") { - let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); - if filename.starts_with("paper-") || filename.starts_with("spigot-") || filename.starts_with("craftbukkit-") { - let parts: Vec<&str> = filename.split('-').collect(); - if parts.len() > 1 { - let version_part = parts[1].trim_end_matches(".jar"); - if version_part.contains('.') { // Basic version format check - println!("Version inferred from JAR filename pattern: {}", filename); - return Some(version_part.to_string()); - } - } - } - if filename.starts_with("minecraft_server.") { - let version_part = filename.trim_start_matches("minecraft_server.").trim_end_matches(".jar"); - if version_part.contains('.') { - println!("Version inferred from minecraft_server JAR filename: {}", filename); - return Some(version_part.to_string()); - } - } - } - } - } - } + // Remove duplicates + variations.sort(); + variations.dedup(); - // 5. Try fallback: Proxy config files (Velocity, Bungee, Waterfall) - if server_type == &ServerType::BungeeCord || - server_type == &ServerType::Waterfall || - server_type == &ServerType::Velocity { - // ... (existing proxy config logic remains here) ... - if server_type == &ServerType::Velocity { - // ... velocity.toml check ... - if let Ok(content) = fs::read_to_string(server_path.join("velocity.toml")) { - for line in content.lines() { - if line.contains("minecraft-version") { - if let Some(version) = line.split('=').nth(1) { - println!("Version found via velocity.toml"); - return Some(version.trim().trim_matches('"').to_string()); - } - } - } - } - } else { - // ... config.yml check ... - if let Ok(content) = fs::read_to_string(server_path.join("config.yml")) { - if let Ok(docs) = YamlLoader::load_from_str(&content) { - if !docs.is_empty() { - let doc = &docs[0]; - if let Some(version) = doc["minecraft_version"].as_str() { - println!("Version found via config.yml"); - return Some(version.to_string()); - } - } - } - } - } - } - - // 6. No version found - println!("Could not detect Minecraft version."); - None + variations } -/// Get plugins directory path based on server type -fn get_plugins_directory(server_path: &Path, server_type: &ServerType) -> String { - match server_type { - ServerType::Velocity => server_path.join("plugins").to_string_lossy().to_string(), - ServerType::BungeeCord => server_path.join("plugins").to_string_lossy().to_string(), - ServerType::Waterfall => server_path.join("plugins").to_string_lossy().to_string(), - _ => server_path.join("plugins").to_string_lossy().to_string(), - } -} +/// Search for plugin variations +pub async fn search_with_variations(plugin_name: &str, repositories: &[RepositorySource]) -> Result, String> { + let variations = generate_search_variations(plugin_name); + let mut all_results = Vec::new(); -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ScanResult { - server_info: ServerInfo, - plugins: Vec, -} - -// Payload for progress events -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ScanProgress { - processed: usize, - total: usize, - current_file: String, -} - -#[command] -async fn scan_server_directory(app_handle: AppHandle, path: String) -> Result<(), String> { - // Spawn the scanning logic into a background thread - tauri::async_runtime::spawn(async move { - // Bring Manager trait into scope for this block - let result = perform_scan(&app_handle, &path).await; - - match result { - Ok(scan_result) => { - // Emit completion event - if let Err(e) = app_handle.emit("scan_complete", scan_result) { - eprintln!("Failed to emit scan_complete event: {}", e); - } - } + for variation in variations { + match lib_search_plugins_in_repositories(&variation, repositories.to_vec()).await { + Ok(results) => { + all_results.extend(results); + }, Err(e) => { - // Emit error event - if let Err(emit_err) = app_handle.emit("scan_error", e.to_string()) { - eprintln!("Failed to emit scan_error event: {}", emit_err); - } + println!("Error searching for variation '{}': {}", variation, e); + // Continue with other variations even if this one fails } } + } + + // Remove duplicates by plugin ID and repository + all_results.sort_by(|a, b| { + let a_key = format!("{:?}:{}", a.repository, a.id); + let b_key = format!("{:?}:{}", b.repository, b.id); + a_key.cmp(&b_key) }); - Ok(()) // Return immediately + all_results.dedup_by(|a, b| { + a.id == b.id && a.repository == b.repository + }); + + Ok(all_results) } -// The actual scanning logic, moved to a separate async function -async fn perform_scan(app_handle: &AppHandle, path: &str) -> Result { - let server_path = Path::new(path); - - if !server_path.exists() { - return Err(format!("Server path does not exist: {}", path)); - } - - // Detect server type and version - let server_type = detect_server_type(server_path); - let minecraft_version = detect_minecraft_version(server_path, &server_type); - - println!("Detected server type: {:?}", server_type); - if let Some(version) = &minecraft_version { - println!("Detected Minecraft version: {}", version); - } - - // Determine plugins directory based on server type - let plugins_dir_str = get_plugins_directory(server_path, &server_type); - let plugins_dir = Path::new(&plugins_dir_str); - - if !plugins_dir.exists() { - return Err(format!( - "Plugins directory not found at: {}", - plugins_dir.display() - )); - } - - // --- Progress Reporting Setup --- - let mut jar_files_to_process: Vec = Vec::new(); - match fs::read_dir(&plugins_dir) { - Ok(entries) => { - for entry in entries { - if let Ok(entry) = entry { - let path = entry.path(); - if path.is_file() && path.extension().map_or(false, |ext| ext.eq_ignore_ascii_case("jar")) { - jar_files_to_process.push(path); - } - } - } - } - Err(e) => { - return Err(format!("Failed to read plugins directory initially: {}", e)); - } - } - let total_plugins = jar_files_to_process.len(); - let mut processed_plugins = 0; - // --- End Progress Reporting Setup --- - - let mut plugins = Vec::new(); - - for path in jar_files_to_process { - processed_plugins += 1; - let current_file = path.file_name().unwrap_or_default().to_string_lossy().to_string(); - - // Emit progress - let progress = ScanProgress { - processed: processed_plugins, - total: total_plugins, - current_file: current_file.clone(), - }; - if let Err(e) = app_handle.emit("scan_progress", progress) { - eprintln!("Failed to emit scan_progress event: {}", e); - // Continue processing even if event emission fails - } - - // Use spawn_blocking for CPU/IO-bound tasks within the async fn - let meta_result = tauri::async_runtime::spawn_blocking(move || { - extract_plugin_metadata(&path) - }).await; - - match meta_result { - Ok(Ok(meta)) => { - // Create a Plugin from PluginMeta - let plugin = Plugin { - name: meta.name, - version: meta.version, - latest_version: None, // Will be filled by update checker - description: meta.description, - authors: meta.authors, - has_update: false, // Will be determined by update checker - api_version: meta.api_version, - main_class: meta.main_class, - depend: meta.depend, - soft_depend: meta.soft_depend, - load_before: meta.load_before, - commands: meta.commands, - permissions: meta.permissions, - file_path: meta.file_path, - file_hash: meta.file_hash, - }; - plugins.push(plugin); - } - Ok(Err(e)) => { - // Log error but continue with other plugins - println!("Error reading plugin from {}: {}", current_file, e); - // Optionally emit a specific plugin error event here - } - Err(e) => { - // This happens if the blocking task itself panics - println!("Task panicked for plugin {}: {}", current_file, e); - } - } - } - - // Create server info - let server_info = ServerInfo { - server_type, - minecraft_version, - plugins_directory: plugins_dir_str, - plugins_count: plugins.len(), // Use the count of successfully processed plugins - }; - - Ok(ScanResult { - server_info, - plugins, - }) -} - -#[command] -fn greet(name: &str) -> String { - format!("Hello, {}! You've been greeted from Rust!", name) -} - -// Web Crawler Module -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] -pub enum RepositorySource { - HangarMC, - SpigotMC, - Modrinth, - GitHub, - BukkitDev, - Custom(String), -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct RepositoryPlugin { - id: String, // Unique identifier in the repository - name: String, // Plugin name - version: String, // Latest version - description: Option, - authors: Vec, - download_url: String, // URL to download the plugin +/// Get plugin details from a repository +pub async fn lib_get_plugin_details_from_repository( + plugin_id: &str, repository: RepositorySource, - page_url: String, // URL to the plugin page - download_count: Option, - last_updated: Option, - icon_url: Option, - minecraft_versions: Vec, - categories: Vec, - rating: Option, - file_size: Option, - file_hash: Option, -} - -// Trait for implementing different repository crawlers -pub trait RepositoryCrawler { - fn search(&self, query: &str) -> Result, Box>; - fn get_plugin_details(&self, plugin_id: &str) -> Result>; - fn get_plugin_versions(&self, plugin_id: &str) -> Result, Box>; - fn download_plugin(&self, plugin_id: &str, version: &str, destination: &Path) -> Result>; - fn get_repository_name(&self) -> RepositorySource; -} - -// Basic HTTP client for crawler implementations -pub struct HttpClient { - client: reqwest::blocking::Client, -} - -impl HttpClient { - pub fn new() -> Self { - let client = reqwest::blocking::Client::builder() - .user_agent("PlugSnatcher/0.1.0") - .build() - .unwrap_or_else(|_| reqwest::blocking::Client::new()); - - HttpClient { client } - } - - pub fn get(&self, url: &str) -> Result> { - let response = self.client.get(url).send()?; - - if response.status().is_success() { - Ok(response.text()?) - } else { - Err(format!("HTTP error: {}", response.status()).into()) - } - } - - pub fn download(&self, url: &str, destination: &Path) -> Result<(), Box> { - let response = self.client.get(url).send()?; - - if response.status().is_success() { - let bytes = response.bytes()?; - fs::write(destination, bytes)?; - Ok(()) - } else { - Err(format!("Download failed: {}", response.status()).into()) - } - } -} - -// Helper function to get crawler for a specific repository -fn get_crawler(repository: &RepositorySource) -> Option> { + server_type: Option<&ServerType> +) -> Result { match repository { - RepositorySource::HangarMC => Some(Box::new(HangarCrawler::new())), - // Other repositories will be implemented later - _ => None, - } -} + RepositorySource::HangarMC => { + let crawler = crawlers::HangarCrawler::new(); + crawler.get_plugin_details(plugin_id).await + .map_err(|e| format!("Failed to get plugin details from HangarMC: {}", e)) + }, + RepositorySource::SpigotMC => { + let crawler = crawlers::SpigotMCCrawler::new(); + crawler.get_plugin_details(plugin_id).await + .map_err(|e| format!("Failed to get plugin details from SpigotMC: {}", e)) + }, + RepositorySource::Modrinth => { + let crawler = crawlers::ModrinthCrawler::new(); -// Regular repository functions (not commands) -pub fn lib_search_plugins_in_repositories(query: &str, repositories: Vec) -> Result, String> { - let mut results: Vec = Vec::new(); - - // Try each requested repository - for repo in repositories { - if let Some(crawler) = get_crawler(&repo) { - match crawler.search(query) { - Ok(repo_results) => { - results.extend(repo_results); - }, - Err(e) => { - println!("Error searching in repository {:?}: {}", repo, e); - // Continue with other repositories even if one fails - } - } - } else { - println!("Repository crawler for {:?} not implemented yet", repo); - } - } - - if results.is_empty() { - Err("No plugins found or repositories not implemented yet".to_string()) + // Use server type aware version if provided + if let Some(server_type) = server_type { + crawler.get_plugin_details_with_server_type(plugin_id, Some(server_type)).await + .map_err(|e| format!("Failed to get plugin details from Modrinth: {}", e)) } else { - Ok(results) - } -} - -pub fn lib_get_plugin_details_from_repository(plugin_id: &str, repository: RepositorySource) -> Result { - if let Some(crawler) = get_crawler(&repository) { - crawler.get_plugin_details(plugin_id).map_err(|e| e.to_string()) - } else { - Err(format!("Repository crawler for {:?} not implemented yet", repository)) - } -} - -pub fn lib_download_plugin_from_repository(plugin_id: &str, version: &str, repository: RepositorySource, destination: &str) -> Result { - if let Some(crawler) = get_crawler(&repository) { - crawler - .download_plugin(plugin_id, version, Path::new(destination)) - .map_err(|e| e.to_string()) - } else { - Err(format!("Repository crawler for {:?} not implemented yet", repository)) - } -} - -// A very simple proxy command -#[command] -fn plugin_proxy(action: &str, json_args: &str) -> Result { - match action { - "search" => { - // Parse the JSON arguments - let args: serde_json::Value = serde_json::from_str(json_args) - .map_err(|e| format!("Invalid JSON: {}", e))?; - - let query = args["query"].as_str() - .ok_or_else(|| "Missing query parameter".to_string())?; - - // Convert repositories to RepositorySource - let repo_array = args["repositories"].as_array() - .ok_or_else(|| "Missing repositories parameter".to_string())?; - - let mut repositories = Vec::new(); - for repo in repo_array { - let repo_str = repo.as_str() - .ok_or_else(|| "Repository must be a string".to_string())?; - - let repo_source = match repo_str { - "HangarMC" => RepositorySource::HangarMC, - "SpigotMC" => RepositorySource::SpigotMC, - "Modrinth" => RepositorySource::Modrinth, - "GitHub" => RepositorySource::GitHub, - "BukkitDev" => RepositorySource::BukkitDev, - _ => { - // Handle custom repositories - if repo_str.starts_with("Custom:") { - let url = repo_str.trim_start_matches("Custom:"); - RepositorySource::Custom(url.to_string()) - } else { - return Err(format!("Unknown repository: {}", repo_str)); - } - } - }; - - repositories.push(repo_source); - } - - // Call the implementation - match lib_search_plugins_in_repositories(query, repositories) { - Ok(results) => { - // Convert results to JSON - serde_json::to_string(&results) - .map_err(|e| format!("Failed to serialize results: {}", e)) - } - Err(e) => Err(e), + crawler.get_plugin_details(plugin_id).await + .map_err(|e| format!("Failed to get plugin details from Modrinth: {}", e)) } }, - "details" => { - // Parse the JSON arguments - let args: serde_json::Value = serde_json::from_str(json_args) - .map_err(|e| format!("Invalid JSON: {}", e))?; - - let plugin_id = args["plugin_id"].as_str() - .ok_or_else(|| "Missing plugin_id parameter".to_string())?; - - let repo_str = args["repository"].as_str() - .ok_or_else(|| "Missing repository parameter".to_string())?; - - // Convert repository to RepositorySource - let repository = match repo_str { - "HangarMC" => RepositorySource::HangarMC, - "SpigotMC" => RepositorySource::SpigotMC, - "Modrinth" => RepositorySource::Modrinth, - "GitHub" => RepositorySource::GitHub, - "BukkitDev" => RepositorySource::BukkitDev, - _ => { - // Handle custom repositories - if repo_str.starts_with("Custom:") { - let url = repo_str.trim_start_matches("Custom:"); - RepositorySource::Custom(url.to_string()) - } else { - return Err(format!("Unknown repository: {}", repo_str)); - } - } - }; - - // Call the implementation - match lib_get_plugin_details_from_repository(plugin_id, repository) { - Ok(details) => { - // Convert details to JSON - serde_json::to_string(&details) - .map_err(|e| format!("Failed to serialize details: {}", e)) - } - Err(e) => Err(e), - } + RepositorySource::GitHub => { + let crawler = crawlers::GitHubCrawler::new(); + crawler.get_plugin_details(plugin_id).await + .map_err(|e| format!("Failed to get plugin details from GitHub: {}", e)) }, - "download" => { - // Parse the JSON arguments - let args: serde_json::Value = serde_json::from_str(json_args) - .map_err(|e| format!("Invalid JSON: {}", e))?; - - let plugin_id = args["plugin_id"].as_str() - .ok_or_else(|| "Missing plugin_id parameter".to_string())?; - - let version = args["version"].as_str() - .ok_or_else(|| "Missing version parameter".to_string())?; - - let destination = args["destination"].as_str() - .ok_or_else(|| "Missing destination parameter".to_string())?; - - let repo_str = args["repository"].as_str() - .ok_or_else(|| "Missing repository parameter".to_string())?; - - // Convert repository to RepositorySource - let repository = match repo_str { - "HangarMC" => RepositorySource::HangarMC, - "SpigotMC" => RepositorySource::SpigotMC, - "Modrinth" => RepositorySource::Modrinth, - "GitHub" => RepositorySource::GitHub, - "BukkitDev" => RepositorySource::BukkitDev, - _ => { - // Handle custom repositories - if repo_str.starts_with("Custom:") { - let url = repo_str.trim_start_matches("Custom:"); - RepositorySource::Custom(url.to_string()) - } else { - return Err(format!("Unknown repository: {}", repo_str)); - } - } - }; - - // Call the implementation - lib_download_plugin_from_repository(plugin_id, version, repository, destination) - }, - _ => Err(format!("Unknown action: {}", action)), + _ => Err(format!("Repository source {:?} not supported for plugin details", repository)) } } -#[cfg_attr(mobile, tauri::mobile_entry_point)] +/// Download a plugin from a repository +pub async fn lib_download_plugin_from_repository( + plugin_id: &str, + version: &str, + repository: RepositorySource, + destination: &str, + server_type: Option<&ServerType> +) -> Result { + match repository { + RepositorySource::HangarMC => { + let crawler = crawlers::HangarCrawler::new(); + crawler.download_plugin(plugin_id, version, Path::new(destination)).await + .map_err(|e| format!("Failed to download plugin from HangarMC: {}", e)) + }, + RepositorySource::SpigotMC => { + let crawler = crawlers::SpigotMCCrawler::new(); + crawler.download_plugin(plugin_id, version, Path::new(destination)).await + .map_err(|e| format!("Failed to download plugin from SpigotMC: {}", e)) + }, + RepositorySource::Modrinth => { + let crawler = crawlers::ModrinthCrawler::new(); + + // Use server type aware version if provided + if let Some(server_type) = server_type { + crawler.download_plugin_with_server_type(plugin_id, version, Path::new(destination), Some(server_type)).await + .map_err(|e| format!("Failed to download plugin from Modrinth: {}", e)) + } else { + crawler.download_plugin(plugin_id, version, Path::new(destination)).await + .map_err(|e| format!("Failed to download plugin from Modrinth: {}", e)) + } + }, + RepositorySource::GitHub => { + let crawler = crawlers::GitHubCrawler::new(); + crawler.download_plugin(plugin_id, version, Path::new(destination)).await + .map_err(|e| format!("Failed to download plugin from GitHub: {}", e)) + }, + _ => Err(format!("Repository source {:?} not supported for downloads", repository)) + } +} + +/// Configure and run the Tauri application pub fn run() { + // Build the Tauri application tauri::Builder::default() .plugin(tauri_plugin_dialog::init()) .invoke_handler(tauri::generate_handler![ - greet, - scan_server_directory, - plugin_proxy + // Plugin discovery commands + scan_server_dir, + scan_server_dir_sync, + + // Plugin repository commands + search_plugins, + get_plugin_details, + + // Update commands + update_plugin, + check_plugin_updates, + check_single_plugin_update_command, + backup_plugin_command, + + // Plugin management commands + download_plugin, + set_plugin_repository, + get_plugin_versions, + load_plugin_data, + save_plugin_data, + + // Utility commands + get_potential_plugin_matches, + compare_versions, + is_plugin_compatible, + greet ]) .run(tauri::generate_context!()) .expect("error while running tauri application"); } - - - diff --git a/src-tauri/src/lib.rs.bak b/src-tauri/src/lib.rs.bak index e190bad..82fc486 100644 --- a/src-tauri/src/lib.rs.bak +++ b/src-tauri/src/lib.rs.bak @@ -1,762 +1,116 @@ // Learn more about Tauri commands at https://tauri.app/develop/calling-rust/ -use serde::{Serialize, Deserialize}; -use std::path::Path; -use std::fs; -use std::io::Read; -use tauri::command; -use zip::ZipArchive; -use yaml_rust::{YamlLoader, Yaml}; -use std::fs::File; -use sha2::{Sha256, Digest}; -use reqwest; + +// Standard library imports use std::error::Error; +use std::fs::{self, File}; +use std::io::{Read, Seek, Write}; +use std::path::{Path, PathBuf}; +use std::env; +use std::sync::Arc; +use std::time::Duration; -// Add the crawlers module -mod crawlers; -use crawlers::HangarCrawler; +// Serde for serialization/deserialization +use serde::{Serialize, Deserialize}; -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] -pub enum ServerType { - Paper, - Spigot, - Bukkit, - Vanilla, - Forge, - Fabric, - Velocity, - BungeeCord, - Waterfall, - Unknown, +// Tauri related imports +use tauri::{command, Emitter, AppHandle, Manager, State, Window}; + +// Internal modules +pub mod models; +pub mod services; +pub mod commands; +pub mod crawlers; +pub mod platform_matcher; + +// Import our models +pub use models::server::{ServerType, ServerInfo, ScanResult, ScanProgress}; +pub use models::plugin::{Plugin, PluginMeta}; +pub use models::repository::{RepositorySource, RepositoryPlugin, PotentialPluginMatch}; + +// Import our services +pub use services::http::HttpClient; +pub use services::plugin_scanner::{scan_server_directory, perform_scan, extract_plugin_metadata, calculate_file_hash, is_file_locked}; +pub use services::update_manager::{check_for_plugin_updates, check_single_plugin_update, backup_plugin, replace_plugin, normalize_version, compare_plugin_versions}; + +// Import our commands +pub use commands::plugin_commands::*; +pub use commands::scan_commands::*; + +// Import our crawlers +pub use crawlers::HangarCrawler; +pub use crawlers::SpigotMCCrawler; +pub use crawlers::ModrinthCrawler; +pub use crawlers::GitHubCrawler; + +// Import platform matchers +pub use platform_matcher::{get_compatible_modrinth_loaders, is_version_compatible_with_server}; + +/// Search for plugins in specified repositories +pub async fn lib_search_plugins_in_repositories(query: &str, repositories: Vec) -> Result, String> { + // Implementation details to be moved from original lib.rs + Ok(Vec::new()) // Placeholder } -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ServerInfo { - server_type: ServerType, - minecraft_version: Option, - plugins_directory: String, - plugins_count: usize, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Plugin { - name: String, - version: String, - latest_version: Option, - description: Option, - authors: Vec, - has_update: bool, - api_version: Option, - main_class: Option, - depend: Option>, - soft_depend: Option>, - load_before: Option>, - commands: Option, - permissions: Option, - file_path: String, - file_hash: String, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct PluginMeta { - pub name: String, - pub version: String, - pub description: Option, - pub authors: Vec, - pub api_version: Option, - pub main_class: Option, - pub depend: Option>, - pub soft_depend: Option>, - pub load_before: Option>, - pub commands: Option, - pub permissions: Option, - pub file_path: String, - pub file_size: u64, - pub file_hash: String, -} - -/// Calculates SHA-256 hash for a given file path -pub fn calculate_file_hash(file_path: &str) -> Result { - let mut file = File::open(file_path).map_err(|e| format!("Failed to open file for hashing: {}", e))?; - let mut hasher = Sha256::new(); - let mut buffer = [0; 1024]; - - loop { - let bytes_read = file.read(&mut buffer).map_err(|e| format!("Failed to read file for hashing: {}", e))?; - if bytes_read == 0 { - break; - } - hasher.update(&buffer[..bytes_read]); - } - - let hash = hasher.finalize(); - Ok(format!("{:x}", hash)) -} - -/// Extract metadata from a plugin.yml file inside a JAR -fn extract_plugin_metadata(jar_path: &Path) -> Result { - let file = fs::File::open(jar_path) - .map_err(|e| format!("Failed to open JAR file: {}", e))?; - - let file_size = file.metadata() - .map_err(|e| format!("Failed to read file metadata: {}", e))? - .len(); - - let mut archive = ZipArchive::new(file) - .map_err(|e| format!("Invalid JAR file: {}", e))?; - - // Try to find and read plugin.yml or bungee.yml - let yaml_content = match read_yaml_from_archive(&mut archive, "plugin.yml") { - Ok(content) => content, - Err(_) => match read_yaml_from_archive(&mut archive, "bungee.yml") { - Ok(content) => content, - Err(_) => { - // If no plugin metadata file is found, try to infer from filename - let filename = jar_path.file_name() - .and_then(|n| n.to_str()) - .unwrap_or("unknown.jar"); - - // Extract name and version from filename (e.g., "WorldEdit-7.2.8.jar" → name: "WorldEdit", version: "7.2.8") - let mut parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); - let version = if parts.len() > 1 { - parts.pop().unwrap_or("1.0.0").to_string() - } else { - "1.0.0".to_string() - }; - - let name = parts.join("-"); - - return Ok(PluginMeta { - name, - version, - description: None, - authors: Vec::new(), - api_version: None, - main_class: None, - depend: None, - soft_depend: None, - load_before: None, - commands: None, - permissions: None, - file_path: jar_path.to_string_lossy().to_string(), - file_size, - file_hash: calculate_file_hash(jar_path.to_str().unwrap_or("unknown.jar")).unwrap_or_else(|_| "unknown".to_string()), - }); - } - } - }; - - // Parse the YAML content - let docs = match YamlLoader::load_from_str(&yaml_content) { - Ok(docs) => docs, - Err(e) => { - println!("Failed to parse plugin YAML: {}", e); - return fallback_plugin_meta(jar_path, file_size); - } - }; - - if docs.is_empty() { - return fallback_plugin_meta(jar_path, file_size); - } - - let doc = &docs[0]; - - // Extract basic metadata with fallbacks for missing fields - let name = yaml_str_with_fallback(doc, "name", jar_path); - let version = yaml_str_with_fallback(doc, "version", jar_path); - - // Extract optional fields - let description = yaml_str_opt(doc, "description"); - - // Handle authors (can be a single string or an array) - let authors = match &doc["authors"] { - Yaml::Array(arr) => { - arr.iter() - .filter_map(|a| a.as_str().map(|s| s.to_string())) - .collect() - }, - Yaml::String(s) => vec![s.clone()], - _ => { - // Fallback to 'author' field which is sometimes used - match &doc["author"] { - Yaml::String(s) => vec![s.clone()], - _ => Vec::new(), - } - } - }; - - // Extract other optional metadata - let api_version = yaml_str_opt(doc, "api-version"); - let main_class = yaml_str_opt(doc, "main"); - - // Handle dependency lists - let depend = yaml_str_array(doc, "depend"); - let soft_depend = yaml_str_array(doc, "softdepend"); - let load_before = yaml_str_array(doc, "loadbefore"); - - // Handle complex structures as generic JSON values - let commands = match &doc["commands"] { - Yaml::Hash(_) => { - Some(serde_json::Value::String("Commands data present".to_string())) - }, - _ => None - }; - - let permissions = match &doc["permissions"] { - Yaml::Hash(_) => { - Some(serde_json::Value::String("Permissions data present".to_string())) - }, - _ => None - }; - - // Calculate the file hash - let file_hash = calculate_file_hash(jar_path.to_str().unwrap_or("unknown.jar")).unwrap_or_else(|_| "unknown".to_string()); - - Ok(PluginMeta { - name, - version, - description, - authors, - api_version, - main_class, - depend, - soft_depend, - load_before, - commands, - permissions, - file_path: jar_path.to_string_lossy().to_string(), - file_size, - file_hash, - }) -} - -// Helper function to read a YAML file from the ZIP archive -fn read_yaml_from_archive(archive: &mut ZipArchive, file_name: &str) -> Result { - match archive.by_name(file_name) { - Ok(mut file) => { - let mut contents = String::new(); - file.read_to_string(&mut contents) - .map_err(|e| format!("Failed to read {}: {}", file_name, e))?; - Ok(contents) - }, - Err(e) => Err(format!("Failed to find {}: {}", file_name, e)) - } -} - -// Helper function to create plugin metadata with fallback values -fn fallback_plugin_meta(jar_path: &Path, file_size: u64) -> Result { - let filename = jar_path.file_name() - .and_then(|n| n.to_str()) - .unwrap_or("unknown.jar"); - - // Extract name and version from filename (e.g., "WorldEdit-7.2.8.jar" → name: "WorldEdit", version: "7.2.8") - let mut parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); - let version = if parts.len() > 1 { - parts.pop().unwrap_or("1.0.0").to_string() - } else { - "1.0.0".to_string() - }; - - let name = parts.join("-"); - - Ok(PluginMeta { - name, - version, - description: None, - authors: Vec::new(), - api_version: None, - main_class: None, - depend: None, - soft_depend: None, - load_before: None, - commands: None, - permissions: None, - file_path: jar_path.to_string_lossy().to_string(), - file_size, - file_hash: calculate_file_hash(jar_path.to_str().unwrap_or("unknown.jar")).unwrap_or_else(|_| "unknown".to_string()), - }) -} - -// Extract a string from YAML with fallback to filename -fn yaml_str_with_fallback(yaml: &Yaml, key: &str, jar_path: &Path) -> String { - match yaml[key].as_str() { - Some(s) => s.to_string(), - None => { - // Extract from filename as fallback - let filename = jar_path.file_name() - .and_then(|n| n.to_str()) - .unwrap_or("unknown.jar"); - - if key == "name" { - // Extract name (e.g., "WorldEdit-7.2.8.jar" → "WorldEdit") - let parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); - parts[0].to_string() - } else if key == "version" { - // Extract version (e.g., "WorldEdit-7.2.8.jar" → "7.2.8") - let parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); - if parts.len() > 1 { - parts[1].to_string() - } else { - "1.0.0".to_string() - } - } else { - String::new() - } - } - } -} - -fn yaml_str_opt(yaml: &Yaml, key: &str) -> Option { - yaml[key].as_str().map(|s| s.to_string()) -} - -fn yaml_str_array(yaml: &Yaml, key: &str) -> Option> { - match &yaml[key] { - Yaml::Array(arr) => { - let strings: Vec = arr.iter() - .filter_map(|a| a.as_str().map(|s| s.to_string())) - .collect(); - if strings.is_empty() { None } else { Some(strings) } - }, - _ => None - } -} - -/// Detect the server type based on files in the server directory -fn detect_server_type(server_path: &Path) -> ServerType { - // Check for Paper - if server_path.join("cache").join("patched_1.19.2.jar").exists() || - server_path.join("paper.yml").exists() { - return ServerType::Paper; - } - - // Check for Spigot - if server_path.join("spigot.yml").exists() { - return ServerType::Spigot; - } - - // Check for Bukkit - if server_path.join("bukkit.yml").exists() { - return ServerType::Bukkit; - } - - // Check for Forge - if server_path.join("forge-server.jar").exists() || - server_path.join("mods").exists() { - return ServerType::Forge; - } - - // Check for Fabric - if server_path.join("fabric-server-launch.jar").exists() || - (server_path.join("mods").exists() && server_path.join("fabric-server-launcher.properties").exists()) { - return ServerType::Fabric; - } - - // Check for Velocity - if server_path.join("velocity.toml").exists() { - return ServerType::Velocity; - } - - // Check for BungeeCord - if server_path.join("BungeeCord.jar").exists() || - server_path.join("config.yml").exists() { - return ServerType::BungeeCord; - } - - // Check for Waterfall - if server_path.join("waterfall.jar").exists() || - server_path.join("waterfall.yml").exists() { - return ServerType::Waterfall; - } - - // Check if it's at least a vanilla server - if server_path.join("server.properties").exists() || - server_path.join("vanilla_server.jar").exists() { - return ServerType::Vanilla; - } - - // If no server type detected - ServerType::Unknown -} - -/// Guess the Minecraft version from various files in the server directory -fn detect_minecraft_version(server_path: &Path, server_type: &ServerType) -> Option { - // Try from version.json if it exists - if let Ok(content) = fs::read_to_string(server_path.join("version.json")) { - if let Ok(json) = serde_json::from_str::(&content) { - if let Some(version) = json["name"].as_str() { - return Some(version.to_string()); - } - } - } - - // Try from the server jar name pattern - if let Ok(entries) = fs::read_dir(server_path) { - for entry in entries { - if let Ok(entry) = entry { - let path = entry.path(); - if path.is_file() && path.extension().map_or(false, |ext| ext == "jar") { - let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); - - // Extract version from various common patterns in jar names - if filename.starts_with("paper-") || - filename.starts_with("spigot-") || - filename.starts_with("craftbukkit-") { - // Pattern: paper-1.19.2.jar, spigot-1.19.2.jar - let parts: Vec<&str> = filename.split('-').collect(); - if parts.len() > 1 { - let version_part = parts[1].trim_end_matches(".jar"); - if version_part.contains('.') { // Basic version format check - return Some(version_part.to_string()); - } - } - } - - // Look for version patterns like minecraft_server.1.19.2.jar - if filename.starts_with("minecraft_server.") { - let version_part = filename - .trim_start_matches("minecraft_server.") - .trim_end_matches(".jar"); - if version_part.contains('.') { - return Some(version_part.to_string()); - } - } - } - } - } - } - - // If server type is proxy, look in config files - if server_type == &ServerType::BungeeCord || - server_type == &ServerType::Waterfall || - server_type == &ServerType::Velocity { - // Velocity uses TOML, others use YAML - if server_type == &ServerType::Velocity { - if let Ok(content) = fs::read_to_string(server_path.join("velocity.toml")) { - // Very basic TOML parsing just for this field - for line in content.lines() { - if line.contains("minecraft-version") { - if let Some(version) = line.split('=').nth(1) { - return Some(version.trim().trim_matches('"').to_string()); - } - } - } - } - } else { - // Try to parse config.yml for BungeeCord/Waterfall - if let Ok(content) = fs::read_to_string(server_path.join("config.yml")) { - if let Ok(docs) = YamlLoader::load_from_str(&content) { - if !docs.is_empty() { - let doc = &docs[0]; - if let Some(version) = doc["minecraft_version"].as_str() { - return Some(version.to_string()); - } - } - } - } - } - } - - // Default fallback - None -} - -/// Get plugins directory path based on server type -fn get_plugins_directory(server_path: &Path, server_type: &ServerType) -> String { - match server_type { - ServerType::Velocity => server_path.join("plugins").to_string_lossy().to_string(), - ServerType::BungeeCord => server_path.join("plugins").to_string_lossy().to_string(), - ServerType::Waterfall => server_path.join("plugins").to_string_lossy().to_string(), - _ => server_path.join("plugins").to_string_lossy().to_string(), - } -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct ScanResult { - server_info: ServerInfo, - plugins: Vec, -} - -#[command] -fn scan_server_directory(path: &str) -> Result { - let server_path = Path::new(path); - - if !server_path.exists() { - return Err(format!("Server path does not exist: {}", path)); - } - - // Detect server type and version - let server_type = detect_server_type(server_path); - let minecraft_version = detect_minecraft_version(server_path, &server_type); - - println!("Detected server type: {:?}", server_type); - if let Some(version) = &minecraft_version { - println!("Detected Minecraft version: {}", version); - } - - // Determine plugins directory based on server type - let plugins_dir_str = get_plugins_directory(server_path, &server_type); - let plugins_dir = Path::new(&plugins_dir_str); - - if !plugins_dir.exists() { - return Err(format!("Plugins directory not found at: {}", plugins_dir.display())); - } - - // Scan for JAR files in the plugins directory - let mut plugins = Vec::new(); - - match fs::read_dir(&plugins_dir) { - Ok(entries) => { - for entry in entries { - if let Ok(entry) = entry { - let path = entry.path(); - - // Check if this is a JAR file - if path.is_file() && path.extension().map_or(false, |ext| ext.eq_ignore_ascii_case("jar")) { - match extract_plugin_metadata(&path) { - Ok(meta) => { - // Create a Plugin from PluginMeta - let plugin = Plugin { - name: meta.name, - version: meta.version, - latest_version: None, // Will be filled by update checker - description: meta.description, - authors: meta.authors, - has_update: false, // Will be determined by update checker - api_version: meta.api_version, - main_class: meta.main_class, - depend: meta.depend, - soft_depend: meta.soft_depend, - load_before: meta.load_before, - commands: meta.commands, - permissions: meta.permissions, - file_path: meta.file_path, - file_hash: meta.file_hash, - }; - - plugins.push(plugin); - }, - Err(e) => { - // Log error but continue with other plugins - println!("Error reading plugin from {}: {}", path.display(), e); - } - } - } - } - } - }, - Err(e) => { - return Err(format!("Failed to read plugins directory: {}", e)); - } - } - - // If no plugins were found, fall back to mock data for testing - if plugins.is_empty() && server_type == ServerType::Unknown { - // For testing only - in production, we'd just return an empty list - plugins = vec![ - Plugin { - name: "EssentialsX".to_string(), - version: "2.19.0".to_string(), - latest_version: Some("2.20.0".to_string()), - description: Some("Essential server tools for Minecraft".to_string()), - authors: vec!["md_5".to_string(), "SupaHam".to_string()], - has_update: true, - api_version: Some("1.13".to_string()), - main_class: Some("com.earth2me.essentials.Essentials".to_string()), - depend: None, - soft_depend: None, - load_before: None, - commands: None, - permissions: None, - file_path: "EssentialsX.jar".to_string(), - file_hash: calculate_file_hash("EssentialsX.jar").unwrap_or_else(|_| "unknown".to_string()), - }, - Plugin { - name: "WorldEdit".to_string(), - version: "7.2.8".to_string(), - latest_version: Some("7.2.8".to_string()), - description: Some("In-game map editor".to_string()), - authors: vec!["sk89q".to_string(), "wizjany".to_string()], - has_update: false, - api_version: Some("1.13".to_string()), - main_class: Some("com.sk89q.worldedit.bukkit.WorldEditPlugin".to_string()), - depend: None, - soft_depend: None, - load_before: None, - commands: None, - permissions: None, - file_path: "WorldEdit.jar".to_string(), - file_hash: calculate_file_hash("WorldEdit.jar").unwrap_or_else(|_| "unknown".to_string()), - }, - ]; - } - - // Create server info - let server_info = ServerInfo { - server_type, - minecraft_version, - plugins_directory: plugins_dir_str, - plugins_count: plugins.len(), - }; - - Ok(ScanResult { - server_info, - plugins, - }) -} - -#[command] -fn greet(name: &str) -> String { - format!("Hello, {}! You've been greeted from Rust!", name) -} - -// Web Crawler Module -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] -pub enum RepositorySource { - HangarMC, - SpigotMC, - Modrinth, - GitHub, - BukkitDev, - Custom(String), -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct RepositoryPlugin { - id: String, // Unique identifier in the repository - name: String, // Plugin name - version: String, // Latest version - description: Option, - authors: Vec, - download_url: String, // URL to download the plugin +/// Get plugin details from a repository +pub async fn lib_get_plugin_details_from_repository( + plugin_id: &str, repository: RepositorySource, - page_url: String, // URL to the plugin page - download_count: Option, - last_updated: Option, - icon_url: Option, - minecraft_versions: Vec, - categories: Vec, - rating: Option, - file_size: Option, - file_hash: Option, + server_type: Option<&ServerType> +) -> Result { + // Implementation details to be moved from original lib.rs + Ok(RepositoryPlugin { + id: plugin_id.to_string(), + name: "Example Plugin".to_string(), + version: "1.0.0".to_string(), + description: Some("This is a placeholder".to_string()), + authors: vec!["Example Author".to_string()], + download_url: "https://example.com".to_string(), + repository: repository, + page_url: "https://example.com".to_string(), + download_count: Some(0), + last_updated: Some("2023-01-01".to_string()), + icon_url: None, + minecraft_versions: vec!["1.19.2".to_string()], + categories: vec![], + rating: None, + file_size: None, + file_hash: None, + changelog: None, + }) // Placeholder } -// Trait for implementing different repository crawlers -pub trait RepositoryCrawler { - fn search(&self, query: &str) -> Result, Box>; - fn get_plugin_details(&self, plugin_id: &str) -> Result>; - fn get_plugin_versions(&self, plugin_id: &str) -> Result, Box>; - fn download_plugin(&self, plugin_id: &str, version: &str, destination: &Path) -> Result>; - fn get_repository_name(&self) -> RepositorySource; +/// Download a plugin from a repository +pub async fn lib_download_plugin_from_repository( + plugin_id: &str, + version: &str, + repository: RepositorySource, + destination: &str, + server_type: Option<&ServerType> +) -> Result { + // Implementation details to be moved from original lib.rs + Ok(destination.to_string()) // Placeholder } -// Basic HTTP client for crawler implementations -pub struct HttpClient { - client: reqwest::blocking::Client, +/// Search for plugin variations +pub async fn search_with_variations(plugin_name: &str, repositories: &[RepositorySource]) -> Result, String> { + // Implementation details to be moved from original lib.rs + Ok(Vec::new()) // Placeholder } -impl HttpClient { - pub fn new() -> Self { - let client = reqwest::blocking::Client::builder() - .user_agent("PlugSnatcher/0.1.0") - .build() - .unwrap_or_else(|_| reqwest::blocking::Client::new()); - - HttpClient { client } - } - - pub fn get(&self, url: &str) -> Result> { - let response = self.client.get(url).send()?; - - if response.status().is_success() { - Ok(response.text()?) - } else { - Err(format!("HTTP error: {}", response.status()).into()) - } - } - - pub fn download(&self, url: &str, destination: &Path) -> Result<(), Box> { - let response = self.client.get(url).send()?; - - if response.status().is_success() { - let bytes = response.bytes()?; - fs::write(destination, bytes)?; - Ok(()) - } else { - Err(format!("Download failed: {}", response.status()).into()) - } - } -} - -// Helper function to get crawler for a specific repository -fn get_crawler(repository: &RepositorySource) -> Option> { - match repository { - RepositorySource::HangarMC => Some(Box::new(HangarCrawler::new())), - // Other repositories will be implemented later - _ => None, - } -} - -// Command to search for plugins in specified repositories -#[command] -pub fn search_repository_plugins(query: &str, repositories: Vec) -> Result, String> { - let mut results: Vec = Vec::new(); - - // Try each requested repository - for repo in repositories { - if let Some(crawler) = get_crawler(&repo) { - match crawler.search(query) { - Ok(repo_results) => { - results.extend(repo_results); - }, - Err(e) => { - println!("Error searching in repository {:?}: {}", repo, e); - // Continue with other repositories even if one fails - } - } - } else { - println!("Repository crawler for {:?} not implemented yet", repo); - } - } - - if results.is_empty() { - Err("No plugins found or repositories not implemented yet".to_string()) - } else { - Ok(results) - } -} - -// Command to get plugin details from a specific repository -#[command] -pub fn get_repository_plugin_details(plugin_id: &str, repository: RepositorySource) -> Result { - if let Some(crawler) = get_crawler(&repository) { - crawler.get_plugin_details(plugin_id).map_err(|e| e.to_string()) - } else { - Err(format!("Repository crawler for {:?} not implemented yet", repository)) - } -} - -// Command to download a plugin from a repository -#[command] -pub fn download_repository_plugin(plugin_id: &str, version: &str, repository: RepositorySource, destination: &str) -> Result { - if let Some(crawler) = get_crawler(&repository) { - crawler - .download_plugin(plugin_id, version, Path::new(destination)) - .map_err(|e| e.to_string()) - } else { - Err(format!("Repository crawler for {:?} not implemented yet", repository)) - } -} - -#[cfg_attr(mobile, tauri::mobile_entry_point)] +/// Configure and run the Tauri application pub fn run() { + // Build the Tauri application tauri::Builder::default() - .plugin(tauri_plugin_dialog::init()) .invoke_handler(tauri::generate_handler![ - greet, - scan_server_directory, - search_repository_plugins, - get_repository_plugin_details, - download_repository_plugin + // Plugin discovery commands + scan_server_dir, + scan_server_dir_sync, + + // Plugin repository commands + search_plugins, + get_plugin_details, + + // Other commands to be added ]) .run(tauri::generate_context!()) .expect("error while running tauri application"); diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index 7f27324..96bf542 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -2,5 +2,5 @@ #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] fn main() { - plugsnatcher_lib::run() + plugsnatcher_lib::run(); } diff --git a/src-tauri/src/models/mod.rs b/src-tauri/src/models/mod.rs new file mode 100644 index 0000000..9901d7f --- /dev/null +++ b/src-tauri/src/models/mod.rs @@ -0,0 +1,7 @@ +pub mod plugin; +pub mod server; +pub mod repository; + +pub use plugin::{Plugin, PluginMeta}; +pub use server::{ServerInfo, ServerType, ScanResult, ScanProgress}; +pub use repository::{RepositorySource, RepositoryPlugin, PotentialPluginMatch}; \ No newline at end of file diff --git a/src-tauri/src/models/plugin.rs b/src-tauri/src/models/plugin.rs new file mode 100644 index 0000000..b798b0f --- /dev/null +++ b/src-tauri/src/models/plugin.rs @@ -0,0 +1,49 @@ +use serde::{Serialize, Deserialize}; + +use super::repository::RepositorySource; + +/// Represents a Minecraft plugin with detailed information +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Plugin { + pub name: String, + pub version: String, + pub latest_version: Option, + pub description: Option, + pub authors: Vec, + pub website: Option, + pub has_update: bool, + pub api_version: Option, + pub main_class: Option, + pub depend: Option>, + pub soft_depend: Option>, + pub load_before: Option>, + pub commands: Option, + pub permissions: Option, + pub file_path: String, + pub file_hash: String, + pub changelog: Option, // Changelog for the latest version + // Fields for persistence + pub repository_source: Option, + pub repository_id: Option, + pub repository_url: Option, // URL to the plugin page on the repository +} + +/// Raw metadata extracted from a plugin.yml file +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct PluginMeta { + pub name: String, + pub version: String, + pub description: Option, + pub authors: Vec, + pub website: Option, + pub api_version: Option, + pub main_class: Option, + pub depend: Option>, + pub soft_depend: Option>, + pub load_before: Option>, + pub commands: Option, + pub permissions: Option, + pub file_path: String, + pub file_size: u64, + pub file_hash: String, +} \ No newline at end of file diff --git a/src-tauri/src/models/repository.rs b/src-tauri/src/models/repository.rs new file mode 100644 index 0000000..04c6169 --- /dev/null +++ b/src-tauri/src/models/repository.rs @@ -0,0 +1,95 @@ +use serde::{Serialize, Deserialize}; +use std::error::Error; +use std::path::Path; +use std::any::Any; + +/// Represents a source of plugins +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub enum RepositorySource { + HangarMC, + SpigotMC, + Modrinth, + GitHub, + BukkitDev, + Custom(String), +} + +/// Represents a plugin from a repository +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct RepositoryPlugin { + pub id: String, // Unique identifier in the repository + pub name: String, // Plugin name + pub version: String, // Latest version + pub description: Option, + pub authors: Vec, + pub download_url: String, // URL to download the plugin + pub repository: RepositorySource, + pub page_url: String, // URL to the plugin page + pub download_count: Option, + pub last_updated: Option, + pub icon_url: Option, + pub minecraft_versions: Vec, + pub categories: Vec, + pub rating: Option, + pub file_size: Option, + pub file_hash: Option, + pub changelog: Option, // Changelog information for latest version +} + +/// Trait for crawler implementors with object safety +pub trait RepositoryCrawlerBase { + fn as_any(&self) -> &dyn Any; + fn get_repository_name(&self) -> RepositorySource; +} + +/// Repository crawler search functionality +pub trait RepositoryCrawlerSearch: RepositoryCrawlerBase { + fn search<'a>(&'a self, query: &'a str) -> std::pin::Pin, Box>> + Send + 'a>>; +} + +/// Repository crawler details functionality +pub trait RepositoryCrawlerDetails: RepositoryCrawlerBase { + fn get_plugin_details<'a>(&'a self, plugin_id: &'a str) -> std::pin::Pin>> + Send + 'a>>; +} + +/// Repository crawler versions functionality +pub trait RepositoryCrawlerVersions: RepositoryCrawlerBase { + fn get_plugin_versions<'a>(&'a self, plugin_id: &'a str) -> std::pin::Pin, Box>> + Send + 'a>>; +} + +/// Repository crawler download functionality +pub trait RepositoryCrawlerDownload: RepositoryCrawlerBase { + fn download_plugin<'a>(&'a self, plugin_id: &'a str, version: &'a str, destination: &'a Path) -> std::pin::Pin>> + Send + 'a>>; +} + +/// Represents a potential match from repositories for an installed plugin +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct PotentialPluginMatch { + pub name: String, + pub version: String, + pub repository: RepositorySource, + pub repository_id: String, + pub page_url: String, + pub description: Option, + pub minecraft_versions: Vec, + pub download_count: Option, +} + +/// Represents the result of a single plugin update operation +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct SingleUpdateResult { + pub original_file_path: String, + pub plugin: Option, // None if error occurred + pub error: Option, +} + +/// Represents the progress of bulk plugin updates +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct BulkUpdateProgress { + pub processed: usize, + pub total: usize, + pub current_plugin_name: String, +} + +// Import required for SingleUpdateResult +use super::plugin::Plugin; \ No newline at end of file diff --git a/src-tauri/src/models/server.rs b/src-tauri/src/models/server.rs new file mode 100644 index 0000000..ff82e33 --- /dev/null +++ b/src-tauri/src/models/server.rs @@ -0,0 +1,42 @@ +use serde::{Serialize, Deserialize}; + +use super::plugin::Plugin; + +/// Represents the type of Minecraft server +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub enum ServerType { + Paper, + Spigot, + Bukkit, + Vanilla, + Forge, + Fabric, + Velocity, + BungeeCord, + Waterfall, + Unknown, +} + +/// Contains information about a Minecraft server +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ServerInfo { + pub server_type: ServerType, + pub minecraft_version: Option, + pub plugins_directory: String, + pub plugins_count: usize, +} + +/// Result of a server scan operation +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ScanResult { + pub server_info: ServerInfo, + pub plugins: Vec, +} + +/// Progress information during a server scan +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ScanProgress { + pub processed: usize, + pub total: usize, + pub current_file: String, +} \ No newline at end of file diff --git a/src-tauri/src/platform_matcher.rs b/src-tauri/src/platform_matcher.rs new file mode 100644 index 0000000..9598435 --- /dev/null +++ b/src-tauri/src/platform_matcher.rs @@ -0,0 +1,117 @@ +use crate::ServerType; + +// Known platform/loader mappings +pub enum PlatformLoader { + Bukkit, // Bukkit API + Spigot, // Spigot API (Bukkit compatible) + Paper, // Paper API (Spigot compatible) + Forge, // Forge API + NeoForge, // NeoForge API (Forge fork) + Fabric, // Fabric API + Quilt, // Quilt API (Fabric compatible) + Velocity, // Velocity proxy + BungeeCord, // BungeeCord proxy + Waterfall, // Waterfall proxy (BungeeCord fork) + Sponge, // Sponge API + Unknown, // Unknown platform +} + +// Maps the ServerType to a PlatformLoader +pub fn server_type_to_platform_loader(server_type: &ServerType) -> PlatformLoader { + match server_type { + ServerType::Paper => PlatformLoader::Paper, + ServerType::Spigot => PlatformLoader::Spigot, + ServerType::Bukkit => PlatformLoader::Bukkit, + ServerType::Forge => PlatformLoader::Forge, + ServerType::Fabric => PlatformLoader::Fabric, + ServerType::Velocity => PlatformLoader::Velocity, + ServerType::BungeeCord => PlatformLoader::BungeeCord, + ServerType::Waterfall => PlatformLoader::Waterfall, + ServerType::Vanilla => PlatformLoader::Unknown, // No specific loader for vanilla + ServerType::Unknown => PlatformLoader::Unknown, + } +} + +// Standard Modrinth loader strings +pub fn get_modrinth_loader_strings(platform: &PlatformLoader) -> Vec { + match platform { + PlatformLoader::Bukkit => vec!["bukkit".to_string(), "spigot".to_string(), "paper".to_string()], + PlatformLoader::Spigot => vec!["spigot".to_string(), "paper".to_string()], + PlatformLoader::Paper => vec!["paper".to_string()], + PlatformLoader::Forge => vec!["forge".to_string()], + PlatformLoader::NeoForge => vec!["neoforge".to_string()], + PlatformLoader::Fabric => vec!["fabric".to_string()], + PlatformLoader::Quilt => vec!["quilt".to_string(), "fabric".to_string()], + PlatformLoader::Velocity => vec!["velocity".to_string()], + PlatformLoader::BungeeCord => vec!["bungeecord".to_string(), "waterfall".to_string()], + PlatformLoader::Waterfall => vec!["waterfall".to_string()], + PlatformLoader::Sponge => vec!["sponge".to_string()], + PlatformLoader::Unknown => vec![], // No specific loader strings + } +} + +// Compatible Modrinth loader strings (what the server can load) +pub fn get_compatible_modrinth_loaders(server_type: &ServerType) -> Vec { + let platform = server_type_to_platform_loader(server_type); + + match server_type { + ServerType::Paper => { + // Paper can load Paper, Spigot, and Bukkit plugins + vec!["paper".to_string(), "spigot".to_string(), "bukkit".to_string()] + }, + ServerType::Spigot => { + // Spigot can load Spigot and Bukkit plugins + vec!["spigot".to_string(), "bukkit".to_string()] + }, + ServerType::Bukkit => { + // Bukkit can only load Bukkit plugins + vec!["bukkit".to_string()] + }, + ServerType::Forge => { + // Forge can load Forge plugins (and maybe Sponge) + vec!["forge".to_string()] + }, + ServerType::Fabric => { + // Fabric can load Fabric plugins + vec!["fabric".to_string()] + }, + ServerType::Velocity => { + // Velocity proxy + vec!["velocity".to_string()] + }, + ServerType::BungeeCord => { + // BungeeCord can load BungeeCord plugins + vec!["bungeecord".to_string()] + }, + ServerType::Waterfall => { + // Waterfall can load Waterfall and BungeeCord plugins + vec!["waterfall".to_string(), "bungeecord".to_string()] + }, + _ => { + // For unknown server types, return an empty list + vec![] + } + } +} + +// Check if a version's loaders are compatible with the server type +pub fn is_version_compatible_with_server(version_loaders: &Vec, server_type: &ServerType) -> bool { + // If no loaders specified, it's possibly a universal plugin, consider it compatible + if version_loaders.is_empty() { + return true; + } + + let compatible_loaders = get_compatible_modrinth_loaders(server_type); + + // If we don't know compatible loaders for this server type, be conservative and return false + if compatible_loaders.is_empty() { + return false; + } + + // Check if any loader in the version matches any compatible loader + version_loaders.iter().any(|loader| { + compatible_loaders.iter().any(|compatible| + loader.to_lowercase() == compatible.to_lowercase() + ) + }) +} \ No newline at end of file diff --git a/src-tauri/src/services/http/client.rs b/src-tauri/src/services/http/client.rs new file mode 100644 index 0000000..a82477b --- /dev/null +++ b/src-tauri/src/services/http/client.rs @@ -0,0 +1,191 @@ +use std::env; +use std::error::Error; +use std::path::Path; +use std::time::Duration; +use tokio::time::sleep; + +use cached::proc_macro::cached; +use reqwest; +use reqwest::header::{ + HeaderMap, HeaderValue, USER_AGENT, AUTHORIZATION, ACCEPT, ACCEPT_LANGUAGE, CONNECTION, RETRY_AFTER, +}; +use reqwest::StatusCode; + +/// HTTP Client for making requests to external services +pub struct HttpClient { + client: reqwest::Client, + github_token: Option, +} + +/// Cache HTTP GET requests to avoid hitting rate limits +#[cached( + time = 3600, // Cache for 1 hour + size = 100, // Maximum number of cached responses + key = "String", + convert = r#"{ url.clone() }"#, + result = true +)] +async fn cached_http_get(url: String, client: reqwest::Client, token: Option) -> Result> { + const MAX_RETRIES: u32 = 3; + const BASE_RETRY_DELAY_MS: u64 = 1000; + + let mut headers = HeaderMap::new(); + + // Set common headers for all requests + headers.insert(USER_AGENT, HeaderValue::from_static("PlugSnatcherApp/0.1.0")); + headers.insert(ACCEPT, HeaderValue::from_static("application/json")); + headers.insert(ACCEPT_LANGUAGE, HeaderValue::from_static("en-US,en;q=0.5")); + headers.insert(CONNECTION, HeaderValue::from_static("keep-alive")); + + // Add authorization header if token is provided + if let Some(token_value) = token { + if url.contains("github.com") { + headers.insert( + AUTHORIZATION, + HeaderValue::from_str(&format!("token {}", token_value)).unwrap_or_else(|_| HeaderValue::from_static("")), + ); + } + } + + let mut retry_count = 0; + let mut retry_delay = BASE_RETRY_DELAY_MS; + + loop { + let response = client + .get(&url) + .headers(headers.clone()) + .send() + .await; + + match response { + Ok(resp) => { + // Handle rate limiting + if resp.status() == StatusCode::TOO_MANY_REQUESTS { + if retry_count >= MAX_RETRIES { + return Err(format!("Rate limit exceeded for {}", url).into()); + } + + // Check for Retry-After header or use exponential backoff + let retry_after = resp.headers() + .get(RETRY_AFTER) + .and_then(|val| val.to_str().ok()) + .and_then(|val| val.parse::().ok()) + .map(|secs| secs * 1000) // Convert header seconds to ms + .unwrap_or_else(|| { + // If no Retry-After header, just use the current exponential delay + retry_delay + }); + + // Exponential backoff calculation for the *next* potential retry + retry_delay *= 2; + println!("Rate limited for {}. Retrying after {} ms...", url, retry_after); + sleep(Duration::from_millis(retry_after)).await; + retry_count += 1; + continue; + } + + // Handle other responses + if resp.status().is_success() { + return Ok(resp.text().await?); + } else { + return Err(format!( + "Request to {} failed with status code: {}", + url, + resp.status() + ).into()); + } + }, + Err(err) => { + if retry_count >= MAX_RETRIES { + return Err(Box::new(err)); + } + sleep(Duration::from_millis(retry_delay)).await; + retry_delay *= 2; + retry_count += 1; + } + } + } +} + +/// Helper function to parse Modrinth's rate limit format +fn parse_modrinth_ratelimit(error_body: &str) -> Option { + if let Ok(json) = serde_json::from_str::(error_body) { + if let Some(retry_after) = json.get("retry_after") { + if let Some(seconds) = retry_after.as_u64() { + return Some(seconds * 1000); + } + } + } + None +} + +impl HttpClient { + /// Create a new HTTP client + pub fn new() -> Self { + let client = reqwest::ClientBuilder::new() + .timeout(Duration::from_secs(30)) + .connect_timeout(Duration::from_secs(5)) + .pool_idle_timeout(Duration::from_secs(90)) + .build() + .unwrap_or_else(|_| reqwest::Client::new()); + + // Try to get GitHub token from environment variable + let github_token = match env::var("GITHUB_API_TOKEN") { + Ok(token) if !token.is_empty() => Some(token), + _ => None, + }; + + HttpClient { + client, + github_token, + } + } + + /// Perform an HTTP GET request + pub async fn get(&self, url: &str) -> Result> { + cached_http_get(url.to_string(), self.client.clone(), self.github_token.clone()).await + } + + /// Download a file from a URL to the specified destination + pub async fn download(&self, url: &str, destination: &Path) -> Result<(), Box> { + // Create a client with a larger timeout for downloads + let client = reqwest::ClientBuilder::new() + .timeout(Duration::from_secs(180)) // Longer timeout for downloads + .build()?; + + let mut headers = HeaderMap::new(); + headers.insert(USER_AGENT, HeaderValue::from_static("PlugSnatcherApp/0.1.0")); + + // Add GitHub token if URL is GitHub and we have a token + if url.contains("github.com") && self.github_token.is_some() { + headers.insert( + AUTHORIZATION, + HeaderValue::from_str(&format!("token {}", self.github_token.as_ref().unwrap())) + .unwrap_or_else(|_| HeaderValue::from_static("")), + ); + } + + // Get response + let response = client.get(url).headers(headers).send().await?; + + // Check if request was successful + if !response.status().is_success() { + return Err(format!("Failed to download: Status {}", response.status()).into()); + } + + // Get response bytes + let bytes = response.bytes().await?; + + // Create parent directories if needed + if let Some(parent) = destination.parent() { + if !parent.exists() { + std::fs::create_dir_all(parent)?; + } + } + + // Write to file + std::fs::write(destination, bytes)?; + + Ok(()) + } +} \ No newline at end of file diff --git a/src-tauri/src/services/http/mod.rs b/src-tauri/src/services/http/mod.rs new file mode 100644 index 0000000..cd72d7b --- /dev/null +++ b/src-tauri/src/services/http/mod.rs @@ -0,0 +1,3 @@ +mod client; + +pub use client::HttpClient; \ No newline at end of file diff --git a/src-tauri/src/services/mod.rs b/src-tauri/src/services/mod.rs new file mode 100644 index 0000000..745947d --- /dev/null +++ b/src-tauri/src/services/mod.rs @@ -0,0 +1,8 @@ +pub mod http; +pub mod plugin_scanner; +pub mod update_manager; + +// Re-export important services +pub use http::HttpClient; +pub use plugin_scanner::{scan_server_directory, perform_scan, extract_plugin_metadata, calculate_file_hash, is_file_locked}; +pub use update_manager::{check_for_plugin_updates, check_single_plugin_update, backup_plugin, replace_plugin, normalize_version, compare_plugin_versions}; \ No newline at end of file diff --git a/src-tauri/src/services/plugin_scanner/file_utils.rs b/src-tauri/src/services/plugin_scanner/file_utils.rs new file mode 100644 index 0000000..31f507b --- /dev/null +++ b/src-tauri/src/services/plugin_scanner/file_utils.rs @@ -0,0 +1,58 @@ +use std::fs::{self, File}; +use std::io::Read; +use std::path::Path; +use sha2::{Sha256, Digest}; + +/// Calculate SHA-256 hash of a file for identification and verification +pub fn calculate_file_hash(file_path: &str) -> Result { + // Open the file + let mut file = match File::open(file_path) { + Ok(file) => file, + Err(e) => return Err(format!("Failed to open file for hashing: {}", e)), + }; + + // Read the file content + let mut buffer = Vec::new(); + if let Err(e) = file.read_to_end(&mut buffer) { + return Err(format!("Failed to read file for hashing: {}", e)); + } + + // Calculate the SHA-256 hash + let mut hasher = Sha256::new(); + hasher.update(&buffer); + let result = hasher.finalize(); + + // Convert the hash to a hex string + let hash_string = format!("{:x}", result); + Ok(hash_string) +} + +/// Check if a file is currently locked by another process +pub fn is_file_locked(file_path: &str) -> bool { + // Try to open the file with write permissions to check if locked + match fs::OpenOptions::new() + .write(true) + .open(file_path) + { + Ok(_) => false, // File can be opened for writing, so it's not locked + Err(_) => { + // If we can't open for writing, try to check if it exists + // This helps determine if the error is due to file being locked + // or just not existing + Path::new(file_path).exists() + } + } +} + +/// Read YAML content from a ZIP archive +pub fn read_yaml_from_archive(archive: &mut zip::ZipArchive, file_name: &str) -> Result { + match archive.by_name(file_name) { + Ok(mut file) => { + let mut contents = String::new(); + file.read_to_string(&mut contents) + .map_err(|e| format!("Failed to read {}: {}", file_name, e))?; + Ok(contents) + }, + Err(e) => Err(format!("Failed to find {}: {}", file_name, e)) + } +} \ No newline at end of file diff --git a/src-tauri/src/services/plugin_scanner/metadata_extractor.rs b/src-tauri/src/services/plugin_scanner/metadata_extractor.rs new file mode 100644 index 0000000..7727879 --- /dev/null +++ b/src-tauri/src/services/plugin_scanner/metadata_extractor.rs @@ -0,0 +1,219 @@ +use std::fs; +use std::path::Path; +use yaml_rust::{YamlLoader, Yaml}; +use zip::ZipArchive; + +use crate::models::plugin::PluginMeta; +use super::file_utils::{calculate_file_hash, read_yaml_from_archive}; + +/// Extract metadata from a plugin JAR file +pub fn extract_plugin_metadata(jar_path: &Path) -> Result { + // Get the file size + let metadata = match fs::metadata(jar_path) { + Ok(meta) => meta, + Err(e) => return Err(format!("Failed to get file metadata: {}", e)), + }; + let file_size = metadata.len(); + + // Open the JAR file + let file = match fs::File::open(jar_path) { + Ok(file) => file, + Err(e) => return Err(format!("Failed to open JAR file: {}", e)), + }; + + // Create a ZIP archive reader + let mut archive = match ZipArchive::new(file) { + Ok(archive) => archive, + Err(e) => return Err(format!("Failed to read JAR as ZIP archive: {}", e)), + }; + + // Try to read plugin.yml + let yaml_content = match read_yaml_from_archive(&mut archive, "plugin.yml") { + Ok(content) => content, + Err(_) => { + // If plugin.yml is not found, try bungee.yml for BungeeCord plugins + match read_yaml_from_archive(&mut archive, "bungee.yml") { + Ok(content) => content, + Err(_) => { + // If neither is found, use fallback metadata + return fallback_plugin_meta(jar_path, file_size); + } + } + } + }; + + // Parse the YAML content + let docs = match YamlLoader::load_from_str(&yaml_content) { + Ok(docs) => docs, + Err(e) => return Err(format!("Failed to parse plugin.yml: {}", e)), + }; + + // If there's no document in the YAML, use fallback + if docs.is_empty() { + return fallback_plugin_meta(jar_path, file_size); + } + + let doc = &docs[0]; + + // Extract plugin information + let name = yaml_str_with_fallback(doc, "name", jar_path); + let version = yaml_str_with_fallback(doc, "version", jar_path); + let description = yaml_str_opt(doc, "description"); + let website = yaml_str_opt(doc, "website"); + let api_version = yaml_str_opt(doc, "api-version"); + let main_class = yaml_str_opt(doc, "main"); + + // Extract author/authors + let authors = match &doc["author"] { + Yaml::String(author) => vec![author.clone()], + _ => match &doc["authors"] { + Yaml::Array(authors) => authors + .iter() + .filter_map(|a| match a { + Yaml::String(s) => Some(s.clone()), + _ => None, + }) + .collect(), + Yaml::String(author) => vec![author.clone()], + _ => Vec::new(), + }, + }; + + // Extract dependencies + let depend = yaml_str_array(doc, "depend"); + let soft_depend = yaml_str_array(doc, "softdepend"); + let load_before = yaml_str_array(doc, "loadbefore"); + + // Extract commands and permissions + let commands = match &doc["commands"] { + Yaml::Hash(_) => { + Some(serde_json::Value::String("Commands data present".to_string())) + }, + _ => None + }; + + let permissions = match &doc["permissions"] { + Yaml::Hash(_) => { + Some(serde_json::Value::String("Permissions data present".to_string())) + }, + _ => None + }; + + // Calculate the file hash + let file_hash = calculate_file_hash(jar_path.to_str().unwrap_or("unknown.jar")).unwrap_or_else(|_| "unknown".to_string()); + + Ok(PluginMeta { + name, + version, + description, + authors, + api_version, + main_class, + depend, + soft_depend, + load_before, + commands, + permissions, + file_path: jar_path.to_string_lossy().to_string(), + file_size, + file_hash, + website, + }) +} + +/// Create plugin metadata with fallback values +fn fallback_plugin_meta(jar_path: &Path, file_size: u64) -> Result { + let filename = jar_path.file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown.jar"); + + // Extract name and version from filename (e.g., "WorldEdit-7.2.8.jar" → name: "WorldEdit", version: "7.2.8") + let mut parts: Vec<&str> = filename.trim_end_matches(".jar").split('-').collect(); + let version = if parts.len() > 1 { + parts.pop().unwrap_or("1.0.0").to_string() + } else { + "1.0.0".to_string() + }; + + let name = parts.join("-"); + + // Calculate hash + let file_hash = calculate_file_hash(jar_path.to_str().unwrap_or("unknown.jar")).unwrap_or_else(|_| "unknown".to_string()); + + Ok(PluginMeta { + name, + version, + description: None, + authors: Vec::new(), + website: None, + api_version: None, + main_class: None, + depend: None, + soft_depend: None, + load_before: None, + commands: None, + permissions: None, + file_path: jar_path.to_string_lossy().to_string(), + file_size, + file_hash, + }) +} + +/// Helper function to extract a string with fallback +fn yaml_str_with_fallback(yaml: &Yaml, key: &str, jar_path: &Path) -> String { + match yaml[key] { + Yaml::String(ref s) => s.clone(), + _ => { + // Fallback to the JAR filename without extension + let filename = jar_path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown") + .trim_end_matches(".jar"); + + if key == "name" { + let name_parts: Vec<&str> = filename.split('-').collect(); + name_parts[0].to_string() + } else if key == "version" { + let version_parts: Vec<&str> = filename.split('-').collect(); + if version_parts.len() > 1 { + version_parts[1].to_string() + } else { + "1.0.0".to_string() + } + } else { + "".to_string() + } + } + } +} + +/// Helper function to extract an optional string +fn yaml_str_opt(yaml: &Yaml, key: &str) -> Option { + match &yaml[key] { + Yaml::String(s) => Some(s.clone()), + _ => None, + } +} + +/// Helper function to extract an array of strings +fn yaml_str_array(yaml: &Yaml, key: &str) -> Option> { + match &yaml[key] { + Yaml::Array(arr) => { + let string_arr: Vec = arr + .iter() + .filter_map(|item| match item { + Yaml::String(s) => Some(s.clone()), + _ => None, + }) + .collect(); + + if string_arr.is_empty() { + None + } else { + Some(string_arr) + } + }, + _ => None, + } +} \ No newline at end of file diff --git a/src-tauri/src/services/plugin_scanner/mod.rs b/src-tauri/src/services/plugin_scanner/mod.rs new file mode 100644 index 0000000..00409ba --- /dev/null +++ b/src-tauri/src/services/plugin_scanner/mod.rs @@ -0,0 +1,7 @@ +mod scanner; +mod metadata_extractor; +mod file_utils; + +pub use scanner::{scan_server_directory, perform_scan, get_plugin_data_path}; +pub use metadata_extractor::extract_plugin_metadata; +pub use file_utils::{calculate_file_hash, is_file_locked}; \ No newline at end of file diff --git a/src-tauri/src/services/plugin_scanner/scanner.rs b/src-tauri/src/services/plugin_scanner/scanner.rs new file mode 100644 index 0000000..5abe9db --- /dev/null +++ b/src-tauri/src/services/plugin_scanner/scanner.rs @@ -0,0 +1,409 @@ +use std::fs; +use std::path::{Path, PathBuf}; +use tauri::{AppHandle, Manager, Emitter}; +use walkdir::WalkDir; +use regex::Regex; +use std::ffi::OsStr; +use std::io::Read; + +use crate::models::server::{ServerType, ServerInfo, ScanResult, ScanProgress}; +use crate::models::plugin::{Plugin, PluginMeta}; +use super::metadata_extractor::extract_plugin_metadata; + +/// Scan a server directory and emit progress events +pub async fn scan_server_directory(app_handle: AppHandle, path: String) -> Result<(), String> { + // Get the main window + let window = app_handle.get_webview_window("main").ok_or("Main window not found")?; + + println!("Starting scan for server directory: {}", path); + + // Start the scan + match window.emit("scan_started", {}) { + Ok(_) => println!("Emitted scan_started event successfully"), + Err(e) => { + let err_msg = format!("Failed to emit scan_started event: {}", e); + println!("{}", err_msg); + return Err(err_msg); + } + } + + // Perform the actual scan in a separate function + match perform_scan(&app_handle, &path).await { + Ok(result) => { + println!("Scan completed successfully. Found {} plugins", result.plugins.len()); + + // Save the scan result to disk + match save_plugin_data(app_handle.clone(), result.plugins.clone(), path.clone()).await { + Ok(_) => println!("Saved plugin data to disk successfully"), + Err(e) => println!("Failed to save plugin data: {}", e) + } + + // Emit scan completion event with the result + println!("Emitting scan_completed event with {} plugins", result.plugins.len()); + match window.emit("scan_completed", result.clone()) { + Ok(_) => { + println!("Emitted scan_completed event successfully"); + Ok(()) + }, + Err(e) => { + let err_msg = format!("Failed to emit scan_completed event: {}", e); + println!("{}", err_msg); + Err(err_msg) + } + } + }, + Err(e) => { + println!("Scan failed with error: {}", e); + + // Emit scan error event + match window.emit("scan_error", e.clone()) { + Ok(_) => println!("Emitted scan_error event successfully"), + Err(err) => println!("Failed to emit scan_error event: {}", err) + } + + Err(e) + } + } +} + +/// Perform a scan of the server directory +pub async fn perform_scan(app_handle: &AppHandle, path: &str) -> Result { + // Normalize the path and check if it exists + let server_path = Path::new(path); + if !server_path.exists() { + return Err(format!("Server directory not found: {}", path)); + } + + // Detect server type + let server_type = detect_server_type(server_path); + + // Find Minecraft version + let minecraft_version = detect_minecraft_version(server_path, &server_type); + + // Find plugins directory + let plugins_dir = get_plugins_directory(server_path, &server_type); + let plugins_path = Path::new(&plugins_dir); + + // Check if plugins directory exists + if !plugins_path.exists() { + return Err(format!("Plugins directory not found: {}", plugins_dir)); + } + + // Get all JAR files in plugins directory + let mut plugin_files = Vec::new(); + for entry in WalkDir::new(plugins_path) + .max_depth(1) // Only scan the top level + .into_iter() + .filter_map(|e| e.ok()) + { + let path = entry.path(); + if path.is_file() && path.extension() == Some(OsStr::new("jar")) { + plugin_files.push(path.to_path_buf()); + } + } + + // Create server info object + let server_info = ServerInfo { + server_type, + minecraft_version, + plugins_directory: plugins_dir, + plugins_count: plugin_files.len(), + }; + + // Emit total plugin count + app_handle.emit("scan_progress", ScanProgress { + processed: 0, + total: plugin_files.len(), + current_file: "Starting scan...".to_string(), + }).map_err(|e| e.to_string())?; + + // Process each plugin + let mut plugins = Vec::new(); + for (index, jar_path) in plugin_files.iter().enumerate() { + // Emit progress update + let file_name = jar_path.file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown.jar"); + + app_handle.emit("scan_progress", ScanProgress { + processed: index, + total: plugin_files.len(), + current_file: file_name.to_string(), + }).map_err(|e| e.to_string())?; + + // Extract plugin metadata + match extract_plugin_metadata(jar_path) { + Ok(meta) => { + // Convert PluginMeta to Plugin + let plugin = Plugin { + name: meta.name, + version: meta.version, + latest_version: None, // Will be populated during update check + description: meta.description, + authors: meta.authors, + website: meta.website, + has_update: false, // Will be populated during update check + api_version: meta.api_version, + main_class: meta.main_class, + depend: meta.depend, + soft_depend: meta.soft_depend, + load_before: meta.load_before, + commands: meta.commands, + permissions: meta.permissions, + file_path: meta.file_path, + file_hash: meta.file_hash, + changelog: None, // Will be populated during update check + repository_source: None, // Will be populated during update check + repository_id: None, // Will be populated during update check + repository_url: None, // Will be populated during update check + }; + plugins.push(plugin); + }, + Err(e) => { + // Log the error but continue processing other plugins + eprintln!("Error extracting metadata from {}: {}", file_name, e); + } + } + } + + // Emit final progress + app_handle.emit("scan_progress", ScanProgress { + processed: plugin_files.len(), + total: plugin_files.len(), + current_file: "Scan complete".to_string(), + }).map_err(|e| e.to_string())?; + + // Return the result + Ok(ScanResult { + server_info, + plugins, + }) +} + +/// Detect the type of Minecraft server +fn detect_server_type(server_path: &Path) -> ServerType { + // Check for server JAR file + if let Some(jar_path) = find_server_jar(server_path) { + let jar_filename = jar_path.file_name() + .and_then(|n| n.to_str()) + .unwrap_or(""); + + // Convert filename to lowercase for easier matching + let filename_lower = jar_filename.to_lowercase(); + + // Check for known server types + if filename_lower.contains("paper") { + return ServerType::Paper; + } else if filename_lower.contains("spigot") { + return ServerType::Spigot; + } else if filename_lower.contains("bukkit") || filename_lower.contains("craftbukkit") { + return ServerType::Bukkit; + } else if filename_lower.contains("forge") { + return ServerType::Forge; + } else if filename_lower.contains("fabric") { + return ServerType::Fabric; + } else if filename_lower.contains("velocity") { + return ServerType::Velocity; + } else if filename_lower.contains("bungeecord") { + return ServerType::BungeeCord; + } else if filename_lower.contains("waterfall") { + return ServerType::Waterfall; + } + + // If server.properties exists, it's likely a vanilla or modified vanilla server + if server_path.join("server.properties").exists() { + // Check if there's a plugins directory + if server_path.join("plugins").exists() { + // Assume Spigot if we can't tell more specifically but plugins exist + return ServerType::Spigot; + } + return ServerType::Vanilla; + } + } + + // If we can't determine, check for directory structure hints + if server_path.join("plugins").exists() { + if server_path.join("cache").exists() && server_path.join("modules").exists() { + return ServerType::Velocity; + } else if server_path.join("libraries").exists() && server_path.join("mods").exists() { + return ServerType::Forge; + } else { + return ServerType::Spigot; // Default assumption for server with plugins + } + } + + ServerType::Unknown +} + +/// Find the server JAR file in the server directory +fn find_server_jar(server_path: &Path) -> Option { + // Define pattern for server JAR files + let server_jar_pattern = Regex::new(r"^(paper|spigot|craftbukkit|minecraft|fabric|forge|velocity|bungeecord|waterfall).*\.jar$").unwrap(); + + // Look for JAR files in the directory + let entries = match fs::read_dir(server_path) { + Ok(entries) => entries, + Err(_) => return None, + }; + + // Check each entry + for entry in entries { + if let Ok(entry) = entry { + if let Some(filename) = entry.file_name().to_str() { + // Check if it matches server JAR pattern + if filename.to_lowercase().ends_with(".jar") { + if server_jar_pattern.is_match(&filename.to_lowercase()) { + return Some(entry.path()); + } + + // Also check for common naming patterns + if filename.contains("server") || filename == "server.jar" { + return Some(entry.path()); + } + } + } + } + } + + None +} + +/// Detect the Minecraft version from server files +fn detect_minecraft_version(server_path: &Path, server_type: &ServerType) -> Option { + // Try to find server JAR + if let Some(jar_path) = find_server_jar(server_path) { + // Try to extract version from JAR filename + if let Some(filename) = jar_path.file_name().and_then(|n| n.to_str()) { + // Look for version pattern like 1.19.2 in filename + let version_pattern = Regex::new(r"(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)").unwrap(); + if let Some(captures) = version_pattern.captures(filename) { + if let Some(version_match) = captures.get(0) { + return Some(version_match.as_str().to_string()); + } + } + } + + // If version not found in filename, try to read it from the JAR + return read_version_from_jar(&jar_path); + } + + // Try server.properties for vanilla/bukkit/spigot servers + if matches!(server_type, ServerType::Vanilla | ServerType::Bukkit | ServerType::Spigot | ServerType::Paper) { + let properties_path = server_path.join("server.properties"); + if properties_path.exists() { + // Read properties file + if let Ok(content) = fs::read_to_string(properties_path) { + // Look for the level-type property + for line in content.lines() { + if line.starts_with("level-name=") { + // Try to find version.json in the level directory + let level_name = line.trim_start_matches("level-name=").trim(); + let version_json_path = server_path.join(level_name).join("version.json"); + if version_json_path.exists() { + if let Ok(version_content) = fs::read_to_string(version_json_path) { + if let Ok(json) = serde_json::from_str::(&version_content) { + if let Some(name) = json.get("name").and_then(|n| n.as_str()) { + return Some(name.to_string()); + } + } + } + } + } + } + } + } + } + + None +} + +/// Read version information from a JAR file +fn read_version_from_jar(jar_path: &Path) -> Option { + // Open the JAR file + let file = match fs::File::open(jar_path) { + Ok(file) => file, + Err(_) => return None, + }; + + // Create a ZIP archive reader + let mut archive = match zip::ZipArchive::new(file) { + Ok(archive) => archive, + Err(_) => return None, + }; + + // Try to find version.json or similar file + for i in 0..archive.len() { + let mut file = match archive.by_index(i) { + Ok(file) => file, + Err(_) => continue, + }; + + let name = file.name(); + + // Check various version files + if name.ends_with("version.json") || name.contains("version") && name.ends_with(".json") { + let mut contents = String::new(); + if let Err(_) = file.read_to_string(&mut contents) { + continue; + } + + if let Ok(json) = serde_json::from_str::(&contents) { + // Try different possible keys for version + for key in ["name", "version", "minecraft_version", "id"] { + if let Some(version) = json.get(key).and_then(|v| v.as_str()) { + return Some(version.to_string()); + } + } + } + } + } + + None +} + +/// Get the plugins directory for the server +fn get_plugins_directory(server_path: &Path, server_type: &ServerType) -> String { + match server_type { + ServerType::Velocity | ServerType::BungeeCord | ServerType::Waterfall => { + server_path.join("plugins").to_string_lossy().to_string() + }, + _ => server_path.join("plugins").to_string_lossy().to_string(), + } +} + +/// Save plugin data to disk for persistence +async fn save_plugin_data(app_handle: AppHandle, plugins: Vec, server_path: String) -> Result<(), String> { + // Get plugin data path + let data_dir = get_plugin_data_path(&app_handle, &server_path)?; + + // Create directory if it doesn't exist + if !data_dir.exists() { + fs::create_dir_all(&data_dir) + .map_err(|e| format!("Failed to create data directory: {}", e))?; + } + + // Save plugins data + let data_path = data_dir.join("plugins.json"); + let json_data = serde_json::to_string_pretty(&plugins) + .map_err(|e| format!("Failed to serialize plugin data: {}", e))?; + + fs::write(&data_path, json_data) + .map_err(|e| format!("Failed to write plugin data: {}", e))?; + + Ok(()) +} + +/// Get plugin data directory path +pub fn get_plugin_data_path(app_handle: &AppHandle, server_path: &str) -> Result { + let app_data_dir = app_handle.path().app_data_dir() + .map_err(|e| format!("Failed to get app data directory: {}", e))?; + + // Hash the server path to create a unique identifier + use sha2::Digest; + let mut hasher = sha2::Sha256::new(); + hasher.update(server_path.as_bytes()); + let server_hash = format!("{:x}", hasher.finalize()); + + // Create a directory for this server + Ok(app_data_dir.join("servers").join(server_hash)) +} \ No newline at end of file diff --git a/src-tauri/src/services/update_manager/mod.rs b/src-tauri/src/services/update_manager/mod.rs new file mode 100644 index 0000000..c87814c --- /dev/null +++ b/src-tauri/src/services/update_manager/mod.rs @@ -0,0 +1,7 @@ +mod update_checker; +mod version_utils; +mod plugin_updater; + +pub use update_checker::{check_for_plugin_updates, check_single_plugin_update}; +pub use version_utils::{normalize_version, compare_plugin_versions}; +pub use plugin_updater::{backup_plugin, replace_plugin}; \ No newline at end of file diff --git a/src-tauri/src/services/update_manager/plugin_updater.rs b/src-tauri/src/services/update_manager/plugin_updater.rs new file mode 100644 index 0000000..475a522 --- /dev/null +++ b/src-tauri/src/services/update_manager/plugin_updater.rs @@ -0,0 +1,109 @@ +use std::path::{Path, PathBuf}; +use std::fs; +use std::time::{SystemTime, UNIX_EPOCH}; +use tauri::AppHandle; + +use crate::models::repository::RepositorySource; +use crate::models::server::ServerInfo; +use crate::services::plugin_scanner::is_file_locked; + +/// Backup a plugin before replacing it +pub async fn backup_plugin(plugin_file_path: String) -> Result { + // Get the current timestamp for the backup filename + let now = SystemTime::now().duration_since(UNIX_EPOCH) + .map_err(|e| format!("Failed to get timestamp: {}", e))? + .as_secs(); + + // Create backup file path + let path = Path::new(&plugin_file_path); + let file_name = path.file_name() + .ok_or_else(|| "Invalid plugin file path".to_string())? + .to_str() + .ok_or_else(|| "Invalid file name encoding".to_string())?; + + // Create backup directory if it doesn't exist + let backup_dir = path.parent() + .unwrap_or_else(|| Path::new(".")) + .join("backups"); + + if !backup_dir.exists() { + fs::create_dir_all(&backup_dir) + .map_err(|e| format!("Failed to create backup directory: {}", e))?; + } + + // Create backup file path with timestamp + let backup_path = backup_dir.join(format!("{}.{}.bak", file_name, now)); + let backup_path_str = backup_path.to_string_lossy().to_string(); + + // Check if file is locked + if is_file_locked(&plugin_file_path) { + return Err(format!("File is locked: {}", plugin_file_path)); + } + + // Copy the file + fs::copy(&plugin_file_path, &backup_path) + .map_err(|e| format!("Failed to create backup: {}", e))?; + + Ok(backup_path_str) +} + +/// Replace a plugin with a new version +pub async fn replace_plugin( + plugin_id: String, + version: String, + repository: RepositorySource, + current_file_path: String, + server_info: Option +) -> Result { + // Check if file is locked + if is_file_locked(¤t_file_path) { + return Err(format!("Plugin file is currently locked: {}", current_file_path)); + } + + // Create a temporary file path + let download_path = create_temp_download_path(¤t_file_path)?; + + // Download the new plugin version + let server_type = server_info.as_ref().map(|info| &info.server_type); + crate::lib_download_plugin_from_repository( + &plugin_id, + &version, + repository, + &download_path.to_string_lossy(), + server_type + ).await?; + + // Backup the original file + backup_plugin(current_file_path.clone()).await?; + + // Replace the original file with the downloaded one + fs::rename(download_path, ¤t_file_path) + .map_err(|e| format!("Failed to replace plugin: {}", e))?; + + Ok(current_file_path) +} + +/// Create a temporary file path for plugin download +fn create_temp_download_path(current_file_path: &str) -> Result { + let path = Path::new(current_file_path); + let file_stem = path.file_stem() + .ok_or_else(|| "Invalid plugin file path".to_string())? + .to_str() + .ok_or_else(|| "Invalid file name encoding".to_string())?; + + let file_ext = path.extension() + .unwrap_or_else(|| std::ffi::OsStr::new("jar")) + .to_str() + .unwrap_or("jar"); + + let parent = path.parent() + .unwrap_or_else(|| Path::new(".")); + + // Generate temp file name with timestamp + let now = SystemTime::now().duration_since(UNIX_EPOCH) + .map_err(|e| format!("Failed to get timestamp: {}", e))? + .as_secs(); + + let temp_name = format!("{}.new.{}.{}", file_stem, now, file_ext); + Ok(parent.join(temp_name)) +} \ No newline at end of file diff --git a/src-tauri/src/services/update_manager/version_utils.rs b/src-tauri/src/services/update_manager/version_utils.rs new file mode 100644 index 0000000..4137a34 --- /dev/null +++ b/src-tauri/src/services/update_manager/version_utils.rs @@ -0,0 +1,60 @@ +use semver::{Version, VersionReq}; +use regex::Regex; + +/// Normalize a version string to be semver compatible +pub fn normalize_version(version_str: &str) -> String { + // If already starts with a digit, assume semantic version format + if version_str.chars().next().map_or(false, |c| c.is_ascii_digit()) { + // Clean up any common prefixes like 'v' + let cleaned = version_str.trim_start_matches(|c| c == 'v' || c == 'V'); + return cleaned.to_string(); + } + + // Return as-is for now + version_str.to_string() +} + +/// Compare two plugin versions to determine if an update is available +pub fn compare_plugin_versions(installed_str: &str, repo_str: &str) -> bool { + // Normalize version strings + let installed_version = normalize_version(installed_str); + let repo_version = normalize_version(repo_str); + + // Try to parse as semver + match (Version::parse(&installed_version), Version::parse(&repo_version)) { + (Ok(installed), Ok(repo)) => { + // Simple semver comparison + repo > installed + }, + _ => { + // Fallback to simple string comparison for non-semver versions + repo_version != installed_version + } + } +} + +/// Extract version pattern from a string +pub fn extract_version_pattern(input: &str) -> Option { + // Look for version pattern like 1.19.2 in string + let version_pattern = Regex::new(r"(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)").unwrap(); + if let Some(captures) = version_pattern.captures(input) { + if let Some(version_match) = captures.get(0) { + return Some(version_match.as_str().to_string()); + } + } + None +} + +/// Check if a plugin version is compatible with a specific Minecraft version +pub fn is_version_compatible(plugin_version: &str, minecraft_version: &str) -> bool { + // Try to parse the Minecraft version + if let Ok(mc_version) = Version::parse(minecraft_version) { + // Try to parse as a version requirement + if let Ok(req) = VersionReq::parse(plugin_version) { + return req.matches(&mc_version); + } + } + + // If version formats are incompatible, make best guess + plugin_version.contains(minecraft_version) +} \ No newline at end of file diff --git a/src/App.css b/src/App.css index 3ef7968..13ad99d 100644 --- a/src/App.css +++ b/src/App.css @@ -514,3 +514,119 @@ button { font-size: 0.9rem; border: 1px solid var(--border-color); } + +.plugin-changelog { + margin-top: 1rem; + margin-bottom: 1rem; + background-color: rgba(255, 255, 255, 0.05); + padding: 1rem; + border-radius: 4px; + max-height: 200px; + overflow-y: auto; +} + +.changelog-content { + white-space: pre-line; + font-size: 0.9rem; + line-height: 1.4; +} + +.update-actions { + margin-top: 1.5rem; + display: flex; + justify-content: center; +} + +.detail-update-button { + padding: 0.75rem 1.5rem; + background-color: var(--secondary-color); + color: white; + border: none; + border-radius: 4px; + cursor: pointer; + font-size: 1rem; + transition: background-color 0.3s; +} + +.detail-update-button:hover { + background-color: #43a047; +} + +.detail-update-button:disabled { + background-color: #666; + cursor: not-allowed; +} + +.update-available-badge { + background-color: var(--secondary-color); + color: white; + padding: 0.25rem 0.5rem; + border-radius: 4px; + font-size: 0.8rem; + margin-left: 0.5rem; +} + +.bulk-update-progress { + margin-top: 0.5rem; + font-size: 0.9rem; + color: var(--text-secondary-color); +} + +.bulk-update-progress progress { + width: 100%; + height: 8px; + border-radius: 4px; + margin-top: 0.25rem; +} + +/* Warning Modal Styles */ +.warning-modal { + position: fixed; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: rgba(0, 0, 0, 0.7); + display: flex; + justify-content: center; + align-items: center; + z-index: 2000; /* Higher than other modals */ +} + +.warning-content { + background-color: var(--background-color); + border: 2px solid var(--warning-color); + border-radius: 8px; + padding: 2rem; + width: 90%; + max-width: 500px; + text-align: center; + position: relative; + box-shadow: 0 4px 20px rgba(0, 0, 0, 0.3); +} + +.warning-content h3 { + color: var(--warning-color); + margin-bottom: 1rem; + font-size: 1.5rem; +} + +.warning-content p { + margin-bottom: 1.5rem; + line-height: 1.6; +} + +.warning-content .close-button { + padding: 0.75rem 1.5rem; + background-color: var(--warning-color); + color: white; + border: none; + border-radius: 4px; + cursor: pointer; + font-size: 1rem; + transition: background-color 0.3s; +} + +.warning-content .close-button:hover { + background-color: #e69500; +} diff --git a/src/App.tsx b/src/App.tsx index c869665..8ec3bd4 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -2,6 +2,7 @@ import { useState, useEffect } from "react"; import { invoke } from "@tauri-apps/api/core"; import { open } from "@tauri-apps/plugin-dialog"; import { listen, UnlistenFn } from "@tauri-apps/api/event"; +import { appDataDir } from '@tauri-apps/api/path'; // Import for data directory (if needed frontend side) import "./App.css"; type ServerType = @@ -39,6 +40,11 @@ interface Plugin { permissions?: any; file_path: string; file_hash: string; + website?: string; + changelog?: string; + repository_source?: string; // Add repository source (string for simplicity) + repository_id?: string; // Add repository ID + repository_url?: string; // URL to the plugin's repository page } interface ScanResult { @@ -52,6 +58,33 @@ interface ScanProgress { current_file: string; } +// --- New Interfaces for Update Events --- +interface BulkUpdateProgressPayload { + processed: number; + total: number; + current_plugin_name: string; +} + +interface SingleUpdateResultPayload { + original_file_path: string; + plugin: Plugin | null; // Updated plugin state or null if check failed but wasn't a panic + error: string | null; // Error message if any +} + +// Interface for potential plugin matches for ambiguous plugins +interface PotentialPluginMatch { + name: string; + version: string; + repository: string; + repository_id: string; + page_url: string; + description?: string; + minecraft_versions: string[]; + download_count?: number; +} + +// --- End New Interfaces --- + interface PluginDetailsProps { plugin: Plugin; onClose: () => void; @@ -94,12 +127,21 @@ function PluginDetails({ plugin, onClose }: PluginDetailsProps) {

{plugin.name}

-
Version: {plugin.version}
+
Version: {plugin.version} {plugin.latest_version && plugin.has_update && (Update available: {plugin.latest_version})}
{plugin.description && (
{plugin.description}
)} + {plugin.website && ( +
+
Website:
+ + {plugin.website} + +
+ )} + {plugin.authors && plugin.authors.length > 0 && (
Authors:
@@ -121,6 +163,13 @@ function PluginDetails({ plugin, onClose }: PluginDetailsProps) {
)} + {plugin.changelog && plugin.has_update && ( +
+
Changelog:
+
{plugin.changelog}
+
+ )} +
File Path:
{plugin.file_path}
@@ -128,6 +177,17 @@ function PluginDetails({ plugin, onClose }: PluginDetailsProps) {
File Hash (SHA-256):
{plugin.file_hash}
+ + {plugin.has_update && plugin.latest_version && ( +
+ +
+ )}
); @@ -159,6 +219,75 @@ function ServerInfoDisplay({ serverInfo }: { serverInfo: ServerInfo | null }) { ); } +// Add this new component after PluginDetails +function WarningModal({ message, onClose }: { message: string, onClose: () => void }) { + return ( +
+
+

⚠️ Warning

+

{message}

+ +
+
+ ); +} + +// Component for selecting the correct plugin from potential matches +function PluginMatchSelector({ + plugin, + potentialMatches, + onSelect, + onCancel +}: { + plugin: Plugin, + potentialMatches: PotentialPluginMatch[], + onSelect: (match: PotentialPluginMatch) => void, + onCancel: () => void +}) { + return ( +
+
+

Multiple Matches Found

+

We found several potential matches for {plugin.name}. Please select the correct one:

+ +
+ {potentialMatches.map((match, index) => ( +
+
+

{match.name}

+

Version: {match.version}

+ {match.description &&

{match.description}

} +
+ Source: {match.repository} + {match.download_count && Downloads: {match.download_count.toLocaleString()}} + MC: {match.minecraft_versions.join(', ')} +
+
+
+ + + View Page + +
+
+ ))} +
+ +
+ +
+
+
+ ); +} + function App() { const [serverPath, setServerPath] = useState(""); const [serverInfo, setServerInfo] = useState(null); @@ -168,42 +297,153 @@ function App() { const [error, setError] = useState(null); const [selectedPlugin, setSelectedPlugin] = useState(null); const [scanProgress, setScanProgress] = useState(null); + const [isCheckingUpdates, setIsCheckingUpdates] = useState(false); + const [updateError, setUpdateError] = useState(null); + // --- New State Variables --- + const [pluginLoadingStates, setPluginLoadingStates] = useState>({}); + const [bulkUpdateProgress, setBulkUpdateProgress] = useState(null); + const [warningMessage, setWarningMessage] = useState(null); + const [potentialMatches, setPotentialMatches] = useState([]); + // --- New state for match selector --- + const [showMatchSelector, setShowMatchSelector] = useState(false); + const [pluginToDisambiguate, setPluginToDisambiguate] = useState(null); + // --- End New state for match selector --- + // --- End New State Variables --- + const [serverType, setServerType] = useState('Unknown'); useEffect(() => { - let unlistenProgress: UnlistenFn | undefined; - let unlistenComplete: UnlistenFn | undefined; - let unlistenError: UnlistenFn | undefined; + let unlistenScanStarted: UnlistenFn | undefined; + let unlistenScanProgress: UnlistenFn | undefined; + let unlistenScanCompleted: UnlistenFn | undefined; + let unlistenScanError: UnlistenFn | undefined; + let unlistenBulkUpdateStart: UnlistenFn | undefined; + let unlistenUpdateCheckProgress: UnlistenFn | undefined; + let unlistenSingleUpdateCheckStarted: UnlistenFn | undefined; + let unlistenSingleUpdateCheckCompleted: UnlistenFn | undefined; const setupListeners = async () => { - unlistenProgress = await listen("scan_progress", (event) => { + unlistenScanStarted = await listen("scan_started", () => { + console.log("Scan started event received"); + setIsScanning(true); + setScanProgress(null); + setError(null); + }); + + unlistenScanProgress = await listen("scan_progress", (event) => { + console.log("Scan progress event received:", event.payload); setScanProgress(event.payload); - setError(null); }); - unlistenComplete = await listen("scan_complete", (event) => { - setServerInfo(event.payload.server_info); - setPlugins(event.payload.plugins); + unlistenScanCompleted = await listen("scan_completed", (event) => { + console.log("Scan completed event received with payload:", event.payload); + try { + console.log("Server info:", event.payload.server_info); + console.log("Plugins count:", event.payload.plugins.length); + + // Update state in a specific order to ensure UI updates properly + setIsScanning(false); + setScanComplete(true); + setServerInfo(event.payload.server_info); + setPlugins(event.payload.plugins); + setServerType(event.payload.server_info.server_type); + + // Add a slight delay and verify the state was updated + setTimeout(() => { + console.log("Verifying state updates after scan:"); + console.log("- scanComplete:", scanComplete); + console.log("- serverInfo:", serverInfo); + console.log("- plugins count:", plugins.length); + + // Force a state update if plugins length is still 0 but we got plugins + if (plugins.length === 0 && event.payload.plugins.length > 0) { + console.log("Forcing state update because plugins array is empty"); + setPlugins([...event.payload.plugins]); + } + }, 100); + + console.log("State updated after scan completion"); + } catch (err) { + console.error("Error handling scan completion:", err); + setError(`Error handling scan completion: ${err}`); + setIsScanning(false); + } + }); + + unlistenScanError = await listen("scan_error", (event) => { + console.log("Scan error event received:", event.payload); setIsScanning(false); - setScanComplete(true); - setScanProgress(null); - setError(null); - }); - - unlistenError = await listen("scan_error", (event) => { - console.error("Scan Error:", event.payload); setError(event.payload); - setIsScanning(false); - setScanProgress(null); - setScanComplete(false); }); + + unlistenBulkUpdateStart = await listen("bulk_update_start", (event) => { + console.log("Bulk update start event received, total plugins:", event.payload); + setIsCheckingUpdates(true); + setUpdateError(null); + setBulkUpdateProgress({ + processed: 0, + total: event.payload, + current_plugin_name: "Starting update check..." + }); + }); + + unlistenUpdateCheckProgress = await listen("update_check_progress", (event) => { + console.log("Update check progress event received:", event.payload); + setBulkUpdateProgress(event.payload); + }); + + unlistenSingleUpdateCheckStarted = await listen("single_update_check_started", (event) => { + console.log("Single update check started for:", event.payload); + }); + + unlistenSingleUpdateCheckCompleted = await listen("single_update_check_completed", (event) => { + console.log("Single update check completed, result:", event.payload); + const { original_file_path, plugin, error } = event.payload; + + setPluginLoadingStates(prev => ({ ...prev, [original_file_path]: false })); + + if (error) { + setUpdateError(`Error checking for updates: ${error}`); + return; + } + + if (plugin) { + setPlugins(prevPlugins => prevPlugins.map(p => { + if (p.file_path === original_file_path) { + return plugin; + } + return p; + })); + + if (serverPath) { + invoke("save_plugin_data", { + plugins: plugins.map(p => p.file_path === original_file_path ? plugin : p), + serverPath + }).catch(err => { + console.error("Error saving plugin data after single update:", err); + }); + } + } + }); + + window.addEventListener('update-plugin', ((e: CustomEvent) => { + if (e.detail) { + updatePlugin(e.detail); + } + }) as EventListener); }; setupListeners(); return () => { - unlistenProgress?.(); - unlistenComplete?.(); - unlistenError?.(); + unlistenScanStarted?.(); + unlistenScanProgress?.(); + unlistenScanCompleted?.(); + unlistenScanError?.(); + unlistenBulkUpdateStart?.(); + unlistenUpdateCheckProgress?.(); + unlistenSingleUpdateCheckStarted?.(); + unlistenSingleUpdateCheckCompleted?.(); + window.removeEventListener('update-plugin', (() => {}) as EventListener); }; }, []); @@ -212,24 +452,44 @@ function App() { const selected = await open({ directory: true, multiple: false, - title: 'Select Minecraft Server Directory' + title: "Select Minecraft Server Folder", }); - if (selected !== null) { - if (typeof selected === 'string') { - setServerPath(selected); - setError(null); - } else if (Array.isArray(selected)) { - const selectedArr = selected as string[]; - if (selectedArr.length > 0) { - setServerPath(selectedArr[0]); + if (selected && typeof selected === "string") { + console.log(`Directory selected: ${selected}`); + setServerPath(selected); + setServerInfo(null); + setPlugins([]); + setIsScanning(false); + setScanComplete(false); + setError(null); + setScanProgress(null); + setIsCheckingUpdates(false); + setUpdateError(null); + setPluginLoadingStates({}); + setBulkUpdateProgress(null); + + try { + console.log(`Attempting to load persisted data for: ${selected}`); + const loadedPlugins: Plugin[] = await invoke("load_plugin_data", { serverPath: selected }); + if (loadedPlugins && loadedPlugins.length > 0) { + console.log(`Loaded ${loadedPlugins.length} plugins from persistence.`); + setPlugins(loadedPlugins); + setScanComplete(true); setError(null); + } else { + console.log("No persisted plugin data found for this server."); } + } catch (loadError) { + console.error("Error loading persisted plugin data:", loadError); + setError(`Failed to load previous plugin data: ${loadError}`); } + } else { + console.log("Directory selection cancelled."); } } catch (err) { - console.error('Failed to open directory:', err); - setError("Failed to open directory selector. Please enter path manually."); + console.error("Error selecting directory:", err); + setError(`Error selecting directory: ${err}`); } } @@ -237,6 +497,7 @@ function App() { if (!serverPath || isScanning) return; try { + console.log("Starting scan for plugins in:", serverPath); setIsScanning(true); setScanComplete(false); setPlugins([]); @@ -244,7 +505,8 @@ function App() { setScanProgress(null); setError(null); - await invoke("scan_server_directory", { path: serverPath }); + await invoke("scan_server_dir", { path: serverPath }); + console.log("Scan server dir command invoked successfully"); } catch (err) { console.error("Error invoking scan command:", err); @@ -253,6 +515,179 @@ function App() { } } + async function checkForUpdates() { + if (!plugins.length || isCheckingUpdates) return; + + setIsCheckingUpdates(true); + setUpdateError(null); + setBulkUpdateProgress(null); + console.log("Invoking bulk check_plugin_updates..."); + + try { + const repositoriesToCheck = ['SpigotMC', 'Modrinth', 'GitHub']; + + const pluginsToSend = plugins.map(p => ({ + name: p.name, + version: p.version, + authors: p.authors || [], + file_path: p.file_path, + file_hash: p.file_hash, + website: p.website, + description: p.description, + api_version: p.api_version, + main_class: p.main_class, + depend: p.depend, + soft_depend: p.soft_depend, + load_before: p.load_before, + commands: p.commands, + permissions: p.permissions, + has_update: p.has_update || false, + repository_source: p.repository_source, + repository_id: p.repository_id, + repository_url: p.repository_url, + })); + + console.log("Sending plugin data to backend, count:", pluginsToSend.length); + + const updatedPlugins = await invoke("check_plugin_updates", { + plugins: pluginsToSend, + repositories: repositoriesToCheck, + }); + + console.log("Bulk update check completed successfully, updating state."); + setPlugins(updatedPlugins); + + if (serverPath) { + try { + console.log("[checkForUpdates] Saving plugin data..."); + await invoke("save_plugin_data", { plugins: updatedPlugins, serverPath }); + console.log("[checkForUpdates] Plugin data saved successfully."); + } catch (saveError) { + console.error("Error saving plugin data after bulk update:", saveError); + setUpdateError(`Update check complete, but failed to save plugin data: ${saveError}`); + } + } + } catch (err) { + const errorMessage = `Error during bulk update check: ${err instanceof Error ? err.message : String(err)}`; + console.error(errorMessage); + setUpdateError(errorMessage); + } finally { + setIsCheckingUpdates(false); + setBulkUpdateProgress(null); + } + } + + async function checkSinglePlugin(plugin: Plugin) { + if (isScanning || isCheckingUpdates || pluginLoadingStates[plugin.file_path]) return; + + console.log(`Invoking single check for: ${plugin.name} (${plugin.file_path})`); + setPluginLoadingStates(prev => ({ ...prev, [plugin.file_path]: true })); + setUpdateError(null); + + try { + const repositoriesToCheck = ['SpigotMC', 'Modrinth', 'GitHub']; + + const pluginToSend = { + name: plugin.name, + version: plugin.version, + authors: plugin.authors || [], + file_path: plugin.file_path, + file_hash: plugin.file_hash, + website: plugin.website, + description: plugin.description, + api_version: plugin.api_version, + main_class: plugin.main_class, + depend: plugin.depend, + soft_depend: plugin.soft_depend, + load_before: plugin.load_before, + commands: plugin.commands, + permissions: plugin.permissions, + has_update: plugin.has_update, + }; + + await invoke("check_single_plugin_update_command", { + pluginToCheck: pluginToSend, + repositoriesToCheck, + }); + } catch (err) { + const errorMessage = `Error invoking single update command for ${plugin.name}: ${err instanceof Error ? err.message : String(err)}`; + console.error(errorMessage); + setUpdateError(errorMessage); + setPluginLoadingStates(prev => ({ ...prev, [plugin.file_path]: false })); + } + } + + async function updatePlugin(plugin: Plugin) { + if (!plugin.has_update || !plugin.latest_version || !plugin.repository_source || !plugin.repository_id) { + setUpdateError(`Cannot update ${plugin.name}: Missing required update information`); + return; + } + + setPluginLoadingStates(prev => ({ ...prev, [plugin.file_path]: true })); + setUpdateError(null); + + try { + console.log(`Updating plugin: ${plugin.name} to version ${plugin.latest_version}`); + + const newFilePath = await invoke("replace_plugin", { + pluginId: plugin.repository_id, + version: plugin.latest_version, + repository: plugin.repository_source, + currentFilePath: plugin.file_path, + serverInfo: serverInfo + }); + + console.log(`Update successful for ${plugin.name}, new file path: ${newFilePath}`); + + setPlugins(currentPlugins => currentPlugins.map(p => { + if (p.file_path === plugin.file_path) { + return { + ...p, + version: p.latest_version || p.version, + has_update: false, + latest_version: p.latest_version, + file_path: newFilePath + }; + } + return p; + })); + + if (serverPath) { + await invoke("save_plugin_data", { + plugins: plugins.map(p => { + if (p.file_path === plugin.file_path) { + return { + ...p, + version: p.latest_version || p.version, + has_update: false, + file_path: newFilePath + }; + } + return p; + }), + serverPath + }); + } + } catch (err) { + const errorMessage = err instanceof Error ? err.message : String(err); + console.error(`Error updating ${plugin.name}:`, errorMessage); + + if (errorMessage.includes("in use") || + errorMessage.includes("server running") || + errorMessage.includes("being used by another process")) { + setWarningMessage(`Cannot update ${plugin.name}: The Minecraft server appears to be running. Please stop your server before updating plugins.`); + } else if (errorMessage.includes("download failed")) { + setUpdateError(`Failed to download update for ${plugin.name}. Please check your internet connection and try again.`); + } else if (errorMessage.includes("Critical error")) { + setWarningMessage(`${errorMessage} A backup of your original plugin is available in the backups folder.`); + } else { + setUpdateError(`Error updating ${plugin.name}: ${errorMessage}`); + } + } finally { + setPluginLoadingStates(prev => ({ ...prev, [plugin.file_path]: false })); + } + } + const showPluginDetails = (plugin: Plugin) => { setSelectedPlugin(plugin); }; @@ -261,6 +696,48 @@ function App() { setSelectedPlugin(null); }; + const handleSelectMatch = async (selectedMatch: PotentialPluginMatch) => { + if (!pluginToDisambiguate || !serverPath) return; + + console.log(`User selected match: ${selectedMatch.name} from ${selectedMatch.repository}`); + setShowMatchSelector(false); + setPluginLoadingStates(prev => ({ ...prev, [pluginToDisambiguate.file_path]: true })); + + try { + const updatedPlugin: Plugin = await invoke("set_plugin_repository", { + pluginFilePath: pluginToDisambiguate.file_path, + repository: selectedMatch.repository, + repositoryId: selectedMatch.repository_id, + pageUrl: selectedMatch.page_url, + serverPath: serverPath, + }); + + setPlugins(currentPlugins => + currentPlugins.map(p => + p.file_path === updatedPlugin.file_path ? updatedPlugin : p + ) + ); + console.log(`Successfully set repository source for ${updatedPlugin.name}`); + + } catch (err) { + console.error("Error setting plugin repository source:", err); + setUpdateError(`Failed to set repository source for ${pluginToDisambiguate.name}: ${err}`); + } finally { + setPluginLoadingStates(prev => ({ ...prev, [pluginToDisambiguate.file_path]: false })); + setPluginToDisambiguate(null); + setPotentialMatches([]); + } + }; + + const handleCancelMatchSelection = () => { + setShowMatchSelector(false); + setPluginToDisambiguate(null); + setPotentialMatches([]); + if (pluginToDisambiguate) { + setPluginLoadingStates(prev => ({ ...prev, [pluginToDisambiguate.file_path]: false })); + } + }; + return (
@@ -303,44 +780,104 @@ function App() { )} - {scanComplete && serverInfo && ( + {serverInfo && ( )} - {scanComplete && ( -
-

Installed Plugins ({plugins.length})

- {plugins.length > 0 ? ( - <> -
- Name - Current Version - Latest Version - Actions -
- {plugins.map((plugin, index) => ( -
-
{plugin.name}
-
{plugin.version}
-
{plugin.latest_version || 'Unknown'}
-
- {plugin.has_update && ( - - )} - -
+ {serverInfo && plugins.length > 0 && ( +
+ + {isCheckingUpdates && bulkUpdateProgress && ( +
+ Checking {bulkUpdateProgress.processed}/{bulkUpdateProgress.total}: {bulkUpdateProgress.current_plugin_name} +
- ))} - - ) : ( -

No plugins found in this directory.

- )} + )} + {updateError && ( +
{updateError}
+ )} +
+ )} + + {plugins.length > 0 && ( +
+

Installed Plugins ({plugins.length})

+
+ Name + Current Version + Latest Version + Actions +
+ {plugins.map((plugin) => ( +
+
{plugin.name}
+
{plugin.version}
+
+ {plugin.repository_url && plugin.latest_version ? ( + + {plugin.latest_version} + + ) : ( + plugin.latest_version || 'N/A' + )} +
+
+ + {plugin.has_update && ( + + )} + +
+
+ ))} +
+ )} + + {scanComplete && plugins.length === 0 && ( +
+

Installed Plugins (0)

+

No plugins found in this directory.

)} {selectedPlugin && ( )} + + {warningMessage && ( + setWarningMessage(null)} + /> + )} + + {showMatchSelector && pluginToDisambiguate && ( + + )} +