Feature: add remote file delete , picBed management

First version of PicList.
In album, you can delete remote file now.
Add picBed management
function.
This commit is contained in:
萌萌哒赫萝 2023-02-15 23:36:47 +08:00
parent 7421322475
commit efeadb8fb8
355 changed files with 12428 additions and 883 deletions

2
.github/FUNDING.yml vendored
View File

@ -1 +1 @@
custom: ["https://paypal.me/Molunerfinn"]
custom: ["https://paypal.me/Kuingsmile"]

View File

@ -3,12 +3,12 @@ description: 提交一个问题 / Report a bug
title: "[Bug]: "
labels: ["bug"]
assignees:
- molunerfinn
- Kuingsmile
body:
- type: markdown
attributes:
value: |+
## PicGo Issue 模板
## PicList Issue 模板
请依照该模板来提交,否则将会被关闭。
**提问之前请注意你看过 FAQ、文档以及那些被关闭的 issues。否则同样的提问也会被关闭**
@ -24,15 +24,15 @@ body:
options:
- label: "[文档/Doc](https://picgo.github.io/PicGo-Doc/)"
required: true
- label: "[Issues](https://github.com/Molunerfinn/PicGo/issues?q=is%3Aissue+sort%3Aupdated-desc+is%3Aclosed)"
- label: "[Issues](https://github.com/Kuingsmile/PicList/issues?q=is%3Aissue+sort%3Aupdated-desc+is%3Aclosed)"
required: true
- label: "[FAQ](https://github.com/Molunerfinn/PicGo/blob/dev/FAQ.md)"
- label: "[FAQ](https://github.com/Kuingsmile/PicList/blob/dev/FAQ.md)"
required: true
- type: input
id: version
attributes:
label: PicGo的版本 | PicGo Version
placeholder: 例如 v2.3.0-beta.1
label: PicList的版本 | PicList Version
placeholder: 例如 v0.0.1
validations:
required: true
- type: dropdown
@ -58,11 +58,11 @@ body:
id: log
attributes:
label: 相关日志 | Logs
description: 请附上 PicGo 的相关报错日志(用文本的形式)。报错日志可以在 PicGo 设置 -> 设置日志文件 -> 点击打开 后找到 | Please attach PicGo's relevant error log (in text form). The error log can be found in PicGo Settings -> Set Log File -> Click to Open
description: 请附上 PicList 的相关报错日志(用文本的形式)。报错日志可以在 PicList 设置 -> 设置日志文件 -> 点击打开 后找到 | Please attach PicList's relevant error log (in text form). The error log can be found in PicList Settings -> Set Log File -> Click to Open
- type: markdown
attributes:
value: |
最后,喜欢 PicGo 的话不妨给它点个 star~
最后,喜欢 PicList 的话不妨给它点个 star~
如果可以的话,请我喝杯咖啡?首页有赞助二维码,谢谢你的支持!
Finally, if you like PicGo, give it a star~
Finally, if you like PicList, give it a star~
Buy me a cup of coffee if you can? There is a sponsorship QR code on the homepage, thank you for your support!

View File

@ -3,12 +3,12 @@ description: 功能请求 / Feature request
title: "[Feature]: "
labels: ["feature request"]
assignees:
- molunerfinn
- Kuingsmile
body:
- type: markdown
attributes:
value: |+
## PicGo Issue 模板
## PicList Issue 模板
请依照该模板来提交,否则将会被关闭。
**提问之前请注意你看过 FAQ、文档以及那些被关闭的 issues。否则同样的提问也会被关闭**
@ -24,15 +24,15 @@ body:
options:
- label: "[文档/Doc](https://picgo.github.io/PicGo-Doc/)"
required: true
- label: "[Issues](https://github.com/Molunerfinn/PicGo/issues?q=is%3Aissue+sort%3Aupdated-desc+is%3Aclosed)"
- label: "[Issues](https://github.com/Kuingsmile/PicList/issues?q=is%3Aissue+sort%3Aupdated-desc+is%3Aclosed)"
required: true
- label: "[FAQ](https://github.com/Molunerfinn/PicGo/blob/dev/FAQ.md)"
- label: "[FAQ](https://github.com/Kuingsmile/PicList/blob/dev/FAQ.md)"
required: true
- type: input
id: version
attributes:
label: PicGo的版本 | PicGo Version
placeholder: 例如 v2.3.0-beta.1
label: PicList的版本 | PicList Version
placeholder: 例如 v0.0.1
validations:
required: true
- type: dropdown
@ -57,7 +57,7 @@ body:
- type: markdown
attributes:
value: |
最后,喜欢 PicGo 的话不妨给它点个 star~
最后,喜欢 PicList 的话不妨给它点个 star~
如果可以的话,请我喝杯咖啡?首页有赞助二维码,谢谢你的支持!
Finally, if you like PicGo, give it a star~
Finally, if you like PicList, give it a star~
Buy me a cup of coffee if you can? There is a sponsorship QR code on the homepage, thank you for your support!

View File

@ -1,13 +1,13 @@
# main.yml
# Workflow's name
name: Build
name: Auto Build
# Workflow's trigger
on:
push:
branches:
- master
- release
# Workflow's jobs
jobs:
@ -54,5 +54,6 @@ jobs:
yarn upload-dist
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
PICGO_ENV_COS_SECRET_ID: ${{ secrets.PICGO_ENV_COS_SECRET_ID }}
PICGO_ENV_COS_SECRET_KEY: ${{ secrets.PICGO_ENV_COS_SECRET_KEY }}
R2_SECRET_ID: ${{ secrets.R2_SECRET_ID }}
R2_SECRET_KEY: ${{ secrets.R2_SECRET_KEY }}
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}

View File

@ -1,7 +1,7 @@
# main.yml
# Workflow's name
name: Build
name: Manually Build
# Workflow's trigger
on: workflow_dispatch
@ -51,5 +51,6 @@ jobs:
yarn upload-dist
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
PICGO_ENV_COS_SECRET_ID: ${{ secrets.PICGO_ENV_COS_SECRET_ID }}
PICGO_ENV_COS_SECRET_KEY: ${{ secrets.PICGO_ENV_COS_SECRET_KEY }}
R2_SECRET_ID: ${{ secrets.R2_SECRET_ID }}
R2_SECRET_KEY: ${{ secrets.R2_SECRET_KEY }}
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}

5
.gitignore vendored
View File

@ -19,6 +19,9 @@ dist_electron/
test.js
.env
scripts/*.yml
scripts/generateYmlFile.js
#Electron-builder output
/dist_electron
/dist_electron
/docs
cloc.exe

BIN
356u2spwu37 Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 151 KiB

View File

@ -1,6 +1,6 @@
## 贡献指南
# 贡献指南
### 安装与启动
## 安装与启动
1. 使用 [yarn](https://yarnpkg.com/) 安装依赖
@ -22,16 +22,18 @@ yarn dev
4. 所有的全局类型定义请在 `src/universal/types/` 里添加,如果是 `enum`,请在 `src/universal/types/enum.ts` 里添加。
5. 与图床管理功能相关的代码请在`src/main/manage`和`src/renderer/manage`目录下添加。
### i18n
1. 在 `public/i18n/` 下面创建一种语言的 `yml` 文件,例如 `zh-Hans.yml`。然后参考 `zh-CN.yml` 或者 `en.yml` 编写语言文件。并注意PicGo 会通过语言文件中的 `LANG_DISPLAY_LABEL` 向用户展示该语言的名称。
## i18n
1. 在 `public/i18n/` 下面创建一种语言的 `yml` 文件,例如 `zh-Hans.yml`。然后参考 `zh-CN.yml` 或者 `en.yml` 编写语言文件。并注意PicList 会通过语言文件中的 `LANG_DISPLAY_LABEL` 向用户展示该语言的名称。
2. 在 `src/universal/i18n/index.ts` 里添加一种默认语言。其中 `label` 就是语言文件中 `LANG_DISPLAY_LABEL` 的值,`value` 是语言文件名。
3. 如果是对已有语言文件进行更新,请在更新完,务必运行一遍 `yarn gen-i18n`,确保能生成正确的语言定义文件。
### 提交代码
## 提交代码
1. 请检查代码没有多余的注释、`console.log` 等调试代码。
2. 提交代码前,请执行命令 `git add . && yarn cz`,唤起 PicGo 的[代码提交规范工具](https://github.com/PicGo/bump-version)。通过该工具提交代码。

View File

@ -1,6 +1,6 @@
## Contribution Guidelines
# Contribution Guidelines
### Installation and startup
## Installation and startup
1. Use [yarn](https://yarnpkg.com/) to install dependencies
@ -22,16 +22,17 @@ Startup project.
4. Please add all global type definitions in `src/universal/types/`, if it is `enum`, please add it in `src/universal/types/enum.ts`.
5. Code related to the management function of the picture bed should be added in the `src/main/manage` and `src/renderer/manage` directory.
### i18n
## i18n
1. Create a language `yml` file under `public/i18n/`, for example `zh-Hans.yml`. Then refer to `zh-CN.yml` or `en.yml` to write language files. Also note that PicGo will display the name of the language to the user via `LANG_DISPLAY_LABEL` in the language file.
1. Create a language `yml` file under `public/i18n/`, for example `zh-Hans.yml`. Then refer to `zh-CN.yml` or `en.yml` to write language files. Also note that PicList will display the name of the language to the user via `LANG_DISPLAY_LABEL` in the language file.
2. Add a default language to `src/universal/i18n/index.ts`. where `label` is the value of `LANG_DISPLAY_LABEL` in the language file, and `value` is the name of the language file.
3. If you are updating an existing language file, be sure to run `yarn gen-i18n` after the update to ensure that the correct language definition file can be generated.
### Submit code
## Submit code
1. Please check that the code has no extra comments, `console.log` and other debugging code.
2. Before submitting the code, please execute the command `git add . && yarn cz` to invoke PicGo's [Code Submission Specification Tool](https://github.com/PicGo/bump-version). Submit code through this tool.
2. Before submitting the code, please execute the command `git add . && yarn cz` to invoke PicGo's [Code Submission Specification Tool](https://github.com/PicGo/bump-version). Submit code through this tool.

101
FAQ.md
View File

@ -1,80 +1,85 @@
# FAQ
该FAQ修改自PicGo的FAQ感谢PicGo的作者Molunerfinn。
## 常见问题
> 在使用 PicGo 期间你会遇到很多问题,不过很多问题其实之前就有人提问过,也被解决,所以你可以先看看 [使用文档](https://picgo.github.io/PicGo-Doc/zh/guide/getting-started.html#%E5%BF%AB%E9%80%9F%E4%B8%8A%E6%89%8B),这份 FAQ以及那些被关闭的 [issues](https://github.com/Molunerfinn/PicGo/issues?q=is%3Aissue+is%3Aclosed),应该能找到答案。
> 本软件的上传工具部分来自PicGo基本没有改动请参考PicGo的 [使用文档](https://picgo.github.io/PicGo-Doc/zh/guide/getting-started.html#%E5%BF%AB%E9%80%9F%E4%B8%8A%E6%89%8B)
## 1. 七牛图床上传图片成功后,相册里无法显示或图片无`http://`前缀
## 1. PicList和PicGo有什么关系
通常是你的七牛图床配置里的`设定访问网址`没有加上`http://`或者`https//`头。
PicList项目fork自PicGo项目基于PicGo进行了二次开发添加了如下功能
参考:[issue#79](https://github.com/Molunerfinn/PicGo/issues/79)
注意:以下功能已适配的图床包括:阿里云 OSS、腾讯云 COS、七牛云 Kodo、又拍云、SM.MS、Imgur、GitHub。
## 2. 能否支持图床远端同步删除
- 相册中可同步删除云端图片
- 支持管理所有图床,可以在线进行云端目录查看、文件搜索、批量上传、批量下载、删除文件和图片预览等
- 对于私有存储桶等支持复制预签名链接进行分享
- 优化了PicGo的界面解锁了窗口大小限制同时美化了部分界面布局
不能。有些图床比如微博图床、SM.MS、Imgur 等)不支持后台管理,为了架构统一不支持远端删除。
PicList所有新功能的添加没有影响到PicGo的原有功能所以你可以在PicList中使用PicGo的所有插件。同时仍然可以配合typora、obsidian等软件进行使用
## 3. 能否支持上传视频文件
## 2. 使用图床管理功能时,出现无法获取目录等错误
目前不能。如果有人开发了相应的插件理论可以支持任意文件上传
请查看日志文件`manage.log`此外各平台的API调用基本都有每小时次数限制如果出现错误请稍后再试
## 4. 微博图床上传之后无法显示预览图
## 3. 支持哪些图床远端同步删除
通常是挂了全局代理导致的。
可以本软件基于PicGo进行了二次开发添加了远端同步删除功能。但是需要你的图床支持目前支持的图床有
参考:[issue36](https://github.com/Molunerfinn/PicGo/issues/36)
- 阿里云 OSS
- 腾讯云 COS
- 七牛云 Kodo
- 又拍云
- SM.MS
- Imgur
- GitHub
## 4. 能否支持上传视频文件
可以,通过新添加的图床管理功能,你可以上传任意格式的文件,包括视频文件。同时,在管理界面内上传时,使用分片上传/流式上传等方式相对于PicGo内置的转换为base64的方式上传更快更稳定。
## 5. 能否支持某某某图床
截止 v1.6PicGo 支持了如下图床:
PicGo本体支持了如下图床:
- `微博图床` v1.0
- `七牛图床` v1.0
- `腾讯云 COS v4\v5 版本` v1.1 & v1.5.0
- `又拍云` v1.2.0
- `GitHub` v1.5.0
- `SM.MS` v1.5.1
- `阿里云 OSS` v1.6.0
- `Imgur` v1.6.0
- `七牛图床`
- `腾讯云 COS`
- `又拍云`
- `GitHub`
- `SM.MS`
- `阿里云 OSS`
- `Imgur`
所以本体内将不会再支持其他图床。需要其他图床支持可以参考目前已有的三方 [插件](https://github.com/PicGo/Awesome-PicGo),如果还是没有你所需要的图床欢迎开发一个插件供大家使用
PicList在上述7个图床之外计划整合和优化现有插件内置更多的常用图床
## 6. 一个图床设置多个信息
此外PicList兼容PicGo的插件系统需要其他图床支持可以参考目前已有的PicGo三方 [插件](https://github.com/PicGo/Awesome-PicGo),如果还是没有你所需要的图床欢迎开发一个插件供大家使用。
不能。因为目前的架构只支持一个图床一份信息。
## 7. GitHub 图床有时能上传,有时上传失败
## 6. Github 图床有时能上传,有时上传失败
1. GitHub 图床不支持上传同名文件,如果有同名文件上传,会报错。建议开启 `时间戳重命名` 避免同名文件。
2. GitHub 服务器和国内 GFW 的问题会导致有时上传成功,有时上传失败,无解。想要稳定请使用付费云存储,如阿里云、腾讯云等,价格也不会贵。
## 8. Mac 上无法打开 PicGo 的主窗口界面
## 7. Mac 上无法打开 PicList 的主窗口界面
PicGo 在 Mac 上是一个顶部栏应用,在 dock 栏是不会有图标的。要打开主窗口,请右键或者双指点按顶部栏 PicGo 图标,选择「打开详细窗口」即可打开主窗口。
PicList 在 Mac 上是一个顶部栏应用,在 dock 栏是不会有图标的。要打开主窗口,请右键或者双指点按顶部栏 PicList 图标,选择「打开详细窗口」即可打开主窗口。
## 9. 上传失败,或者是服务器出错
## 8. 上传失败,或者是服务器出错
1. PicGo 自带的图床都经过测试,上传出错一般都不是 PicGo 自身的原因。如果你用的是 GitHub 图床请参考上面的第 7 点。
2. 检查 PicGo 的日志(报错日志可以在 PicGo 设置 -> 设置日志文件 -> 点击打开 后找到),看看 `[PicGo Error]` 的报错信息里有什么关键信息
1. PicList 自带的图床都经过测试,上传出错一般都不是 PicList 自身的原因。如果你用的是 GitHub 图床请参考上面的第 7 点。
2. 检查 PicList 的日志(报错日志可以在 PicList 设置 -> 设置日志文件 -> 点击打开 后找到),看看 `[PicList Error]` 的报错信息里有什么关键信息
1. 先自行搜索 error 里的报错信息,往往你能百度或者谷歌出问题原因,不必开 issue。
2. 如果有带有 `401` 、`403` 等 `40X` 状态码字样的,不用怀疑,就是你配置写错了,仔细检查配置,看看是否多了空格之类的。
3. 如果带有 `HttpError`、`RequestError` 、 `socket hang up` 等字样的说明这是网络问题我无法帮你解决网络问题请检查你自己的网络是否有代理DNS 设置是否正常等。
3. 通常网络问题引起的上传失败都是因为代理设置不当导致的。如果开启了系统代理,建议同时也在 PicGo 的代理设置中设置对应的HTTP代理。参考 [#912](https://github.com/Molunerfinn/PicGo/issues/912)
3. 通常网络问题引起的上传失败都是因为代理设置不当导致的。如果开启了系统代理,建议同时也在 PicList 的代理设置中设置对应的HTTP代理。
## 10. macOS版本安装完之后没有主界面
请找到PicGo在顶部栏的图标,然后右键(触摸板双指点按,或者鼠标右键),即可找到「打开详细窗口」的菜单。
请找到PicList在顶部栏的图标,然后右键(触摸板双指点按,或者鼠标右键),即可找到「打开详细窗口」的菜单。
## 11. 相册突然无法显示图片 或者 上传后相册不更新 或者 使用Typora+PicGo上传图片成功但是没有写回Typora
## 11. macOS系统安装完PicList显示「文件已损坏」或者安装完打开没有反应
这个原因可能是相册存储文件损坏导致的。可以找到 PicGo 配置文件所在路径下的 `picgo.db` ,将其删掉(删掉前建议备份一遍),再重启 PicGo 试试。
注意同时看看日志文件里有没有什么error必要时可以提issue。2.3.0以上的版本已经解决因为 `picgo.db` 损坏导致的上述问题,建议更新版本。
## 12. Gitee相关问题
如果在使用 Gitee 图床的时候遇到上传的问题,由于 PicGo 并没有官方提供 Gitee 上传服务,无法帮你解决,请去你所使用的 Gitee 插件仓库发相关的issue。
## 13. macOS系统安装完PicGo显示「文件已损坏」或者安装完打开没有反应
因为 PicGo 没有签名,所以会被 macOS 的安全检查所拦下。
因为 PicList 没有签名,所以会被 macOS 的安全检查所拦下。
1. 安装后打开遇到「文件已损坏」的情况,请按如下方式操作:
@ -84,10 +89,10 @@ PicGo 在 Mac 上是一个顶部栏应用,在 dock 栏是不会有图标的。
sudo spctl --master-disable
```
然后放行 PicGo :
然后放行 PicList :
```
xattr -cr /Applications/PicGo.app
xattr -cr /Applications/PicList.app
```
然后就能正常打开。
@ -118,9 +123,5 @@ options:
执行命令
```
xattr -c /Applications/PicGo.app/*
xattr -c /Applications/PicList.app/*
```
2. 如果安装打开后没有反应,请按下方顺序排查:
1. macOS安装好之后PicGo 是不会弹出主窗口的,因为 PicGo 在 macOS 系统里设计是个顶部栏应用。注意看你顶部栏的图标,如果有 PicGo 的图标,说明安装成功了,点击图标即可打开顶部栏窗口。参考上述[第八点](#8-mac-上无法打开-picgo-的主窗口界面)。
2. 如果你是 M1 的系统,此前装过 PicGo 的 x64 版本,但是后来更新了 arm64 的版本发现打开后没反应,请重启电脑即可。

View File

@ -1,5 +1,7 @@
The MIT License (MIT)
Copyright (c) 2017-present, Molunerfinn
Copyright (c) 2019 诗人的咸鱼
Copyright (c) 2023-present, KuingSmile
Permission is hereby granted, free of charge, to any person obtaining a copy

View File

@ -2,43 +2,61 @@
<div align="center">
<img src="http://imgx.horosama.com/admin_uploads/2022/10/2022_10_05_633d79e401694.png" alt="">
<h1>PicList</h1>
<a href="https://github.com/Kuingsmile/PicHoro/releases">
<a href="https://github.com/Kuingsmile/PicList/releases">
<img src="https://img.shields.io/github/downloads/Kuingsmile/PicList/total.svg?style=flat-square" alt="">
</a>
<a href="https://github.com/Kuingsmile/PicHoro/releases/latest">
<a href="https://github.com/Kuingsmile/PicList/releases/latest">
<img src="https://img.shields.io/github/release/Kuingsmile/PicList.svg?style=flat-square" alt="">
</a>
<a href="https://github.com/Kuingsmile/PicHoro">
<a href="https://github.com/Kuingsmile/PicList">
<img src="https://img.shields.io/github/stars/Kuingsmile/PicList.svg?style=flat-square" alt="">
</a>
</div>
&emsp;&emsp;一款综合了PicGo和AList的图片上传和图床管理桌面工具基于PicGo处于早期开发中
&emsp;&emsp;一款fork自PicGo的二次开发项目保留了PicGo的所有功能的同时为相册添加了同步云端删除功能同时增加了完整的云存储管理功能包括云端目录查看、文件搜索、批量上传下载和删除文件复制多种格式文件和图片预览等。
## 开发计划
## 特色功能
本项目的开发初衷是为了解决在个人在使用PicGo桌面版时候的几个痛点
- 保留了PicGo的所有功能兼容已有的PicGo插件系统包括和typora、obsidian等的搭配
- 相册中可同步删除云端图片
- 支持管理所有图床,可以在线进行云端目录查看、文件搜索、批量上传、批量下载、删除文件和图片预览等
- 管理界面使用内置数据库缓存目录,加速目录加载速度
- 对于私有存储桶等支持复制预签名链接进行分享
- 优化了PicGo的界面解锁了窗口大小限制同时美化了部分界面布局
1. 相册中无法同步删除云端图片,不小心上传错或者想更换图片时不方便;
2. 只能上传图片,无法上传视频或其它格式文件,在需要向文章中插入其它资源的时候需要自己去上传;
3. 不能查看和复制使用PicGo软件之前上传的图片的链接
4. 不能从云端取回文件。
## 下载安装
为了优化以上问题基于PicHoro的开发经验以及使用AList软件时的一些体验决定基于PicGo开发一款增强版的软件PicList期望除了PicGo的核心功能外增加如下功能
### Github release
1. 相册可同步删除云端图片,支持加强版的图片预览和元信息查看;
2. 支持所有格式和不大于2G的文件的上传
3. 支持管理所有图床,可以在线进行云端目录查看、文件搜索、上传、下载、删除和文件预览等;
4. 支持不同图床之间的文件复制和移动等;
5. 兼容已有的PicGo插件系统。
https://github.com/Kuingsmile/PicList/releases
## 开发进度
### CloudFlare R2
开发中预计在2023年2月底之前发布第一个发行版。
请参考release页面的说明
## 应用截图
![image](https://user-images.githubusercontent.com/96409857/219062180-ba6de40b-94bb-45be-a510-c4d231920032.png)
![image](https://user-images.githubusercontent.com/96409857/219063188-d7e0b0e7-6e3c-4deb-8bef-0b2b57d2d7ee.png)
![image](https://user-images.githubusercontent.com/96409857/219063398-9a8607df-a1e2-4121-a652-ebd63b38007b.png)
## 开发说明
1. 你需要有 Node、Git 环境,了解 npm 的相关知识。
2. git clone https://github.com/Kuingsmile/PicList.git 并进入项目。
yarn 下载依赖。注意如果你没有 yarn请去 官网 下载安装后再使用。 用 npm install 将导致未知错误!
3. Mac 需要有 Xcode 环境Windows 需要有 VS 环境。
4. 如果需要贡献代码,可以参考[贡献指南](https://github.com/Kuingsmile/PicList/blob/dev/CONTRIBUTING.md)。
## 其它相关
- [PicGo](https://github.com/Molunerfinn/PicGo) : 原版PicGo项目
## License
本项目基于MIT协议开源欢迎大家使用和贡献代码感谢原作者Molunerfinn的开源精神。
[MIT](https://opensource.org/licenses/MIT)
Copyright (c) 2023 Kuingsmile
Copyright (c) 2017-present, Molunerfinn
Copyright (c) 2023-present Kuingsmile

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

After

Width:  |  Height:  |  Size: 274 KiB

BIN
build/icons/icon2.icns Normal file

Binary file not shown.

View File

@ -1,13 +1,13 @@
!macro customInstall
SetRegView 64
WriteRegStr HKCR "*\shell\PicGo" "" "Upload pictures w&ith PicGo"
WriteRegStr HKCR "*\shell\PicGo" "Icon" "$INSTDIR\PicGo.exe"
WriteRegStr HKCR "*\shell\PicGo\command" "" '"$INSTDIR\PicGo.exe" "upload" "%1"'
WriteRegStr HKCR "*\shell\PicList" "" "Upload pictures w&ith PicList"
WriteRegStr HKCR "*\shell\PicList" "Icon" "$INSTDIR\PicList.exe"
WriteRegStr HKCR "*\shell\PicList\command" "" '"$INSTDIR\PicList.exe" "upload" "%1"'
SetRegView 32
WriteRegStr HKCR "*\shell\PicGo" "" "Upload pictures w&ith PicGo"
WriteRegStr HKCR "*\shell\PicGo" "Icon" "$INSTDIR\PicGo.exe"
WriteRegStr HKCR "*\shell\PicGo\command" "" '"$INSTDIR\PicGo.exe" "upload" "%1"'
WriteRegStr HKCR "*\shell\PicList" "" "Upload pictures w&ith PicList"
WriteRegStr HKCR "*\shell\PicList" "Icon" "$INSTDIR\PicList.exe"
WriteRegStr HKCR "*\shell\PicList\command" "" '"$INSTDIR\PicList.exe" "upload" "%1"'
!macroend
!macro customUninstall
DeleteRegKey HKCR "*\shell\PicGo"
DeleteRegKey HKCR "*\shell\PicList"
!macroend

View File

@ -1,216 +0,0 @@
<template lang='pug'>
#app(v-cloak)
#header
.mask
img.logo(src="~icons/256x256.png", alt="PicGo")
h1.title PicGo
small(v-if="version") {{ version }}
h2.desc 图片上传+管理新体验
button.download(@click="goLink('https://github.com/Molunerfinn/picgo/releases')") 免费下载
button.download(@click="goLink('https://picgo.github.io/PicGo-Doc/zh/guide/')") 查看文档
h3.desc
| 基于#[a(href="https://github.com/SimulatedGREG/electron-vue" target="_blank") electron-vue]开发
h3.desc
| 支持macOS,Windows,Linux
h3.desc
| 支持#[a(href="https://picgo.github.io/PicGo-Doc/zh/guide/config.html#%E6%8F%92%E4%BB%B6%E8%AE%BE%E7%BD%AE%EF%BC%88v2-0%EF%BC%89" target="_blank") 插件系统]让PicGo更强大
#container.container-fluid
.row.ex-width
img.gallery.col-xs-10.col-xs-offset-1.col-md-offset-2.col-md-8(src="https://cdn.jsdelivr.net/gh/Molunerfinn/test/picgo-site/first.png")
.row.ex-width.display-list
.display-list__item(v-for="(item, index) in itemList" :key="index" :class="{ 'o-item': index % 2 !== 0 }")
.col-xs-10.col-xs-offset-1.col-md-7.col-md-offset-0
img(:src="item.url")
.col-xs-10.col-xs-offset-1.col-md-5.col-md-offset-0.display-list__content
.display-list__title {{ item.title }}
.display-list__desc {{ item.desc }}
.row.ex-width.info
.col-xs-10.col-xs-offset-1
| &copy;2017 - {{ year }} #[a(href="https://github.com/Molunerfinn" target="_blank") Molunerfinn]
</template>
<script>
export default {
name: 'HomePage',
data () {
return {
version: '',
year: new Date().getFullYear(),
itemList: [
{
url: 'https://cdn.jsdelivr.net/gh/Molunerfinn/test/picgo-site/second.png',
title: '精致设计',
desc: 'macOS系统下支持拖拽至menubar图标实现上传。menubar app 窗口显示最新上传的5张图片以及剪贴板里的图片。点击图片自动将上传的链接复制到剪贴板。Windows平台不支持'
},
{
url: 'https://cdn.jsdelivr.net/gh/Molunerfinn/test/picgo-site/third.png',
title: 'Mini小窗',
desc: 'Windows以及Linux系统下提供一个mini悬浮窗用于用户拖拽上传节约你宝贵的桌面空间。'
},
{
url: 'https://cdn.jsdelivr.net/gh/Molunerfinn/test/picgo-site/forth.png',
title: '便捷管理',
desc: '查看你的上传记录,重复使用更方便。支持点击图片大图查看。支持删除图片(仅本地记录),让界面更加干净。'
},
{
url: 'https://cdn.jsdelivr.net/gh/Molunerfinn/test/picgo-site/fifth.png',
title: '可选图床',
desc: '默认支持微博图床、七牛图床、腾讯云COS、又拍云、GitHub、SM.MS、阿里云OSS、Imgur。方便不同图床的上传需求。2.0版本开始更可以自己开发插件实现其他图床的上传需求。'
},
{
url: 'https://cdn.jsdelivr.net/gh/Molunerfinn/test/picgo-site/sixth.png',
title: '多样链接',
desc: '支持5种默认剪贴板链接格式包括一种自定义格式让你的文本编辑游刃有余。'
},
{
url: 'https://cdn.jsdelivr.net/gh/Molunerfinn/test/picgo-site/seventh.png',
title: '插件系统',
desc: '2.0版本开始支持插件系统让PicGo发挥无限潜能成为一个极致的效率工具。'
}
]
}
},
created () {
this.getVersion()
},
methods: {
goLink (link) {
window.open(link, '_blank')
},
async getVersion () {
const release = 'https://api.github.com/repos/Molunerfinn/PicGo/releases/latest'
const res = await this.$http.get(release)
this.version = res.data.name
}
}
}
</script>
<style lang='stylus'>
[v-cloak]
display none
*
box-sizing border-box
body,
html,
h1
margin 0
padding 0
font-family "Source Sans Pro","Helvetica Neue","PingFang SC","Hiragino Sans GB","Microsoft YaHei","微软雅黑",Arial,sans-serif
#app
position relative
.mask
position absolute
width 100%
height 100vh
top 0
left 0
background rgba(0,0,0, 0.7)
z-index -1
#header
height 100vh
width 100%
background-image url("https://cdn.jsdelivr.net/gh/Molunerfinn/test/picgo-site/bg.jpeg")
background-attachment fixed
background-size cover
background-position center
text-align center
padding 15vh
position relative
z-index 2
.logo
width 120px
.title
color #4BA2E2
font-size 36px
font-weight 300
margin 10px auto
text-align center
small
margin-left 10px
font-size 14px
.desc
font-weight 400
margin 20px auto 10px
color #ddd
a
text-decoration none
color #4BA2E2
.download
display inline-block
line-height 1
white-space nowrap
cursor pointer
background transparent
border 1px solid #d8dce5
color #ddd
-webkit-appearance none
text-align center
box-sizing border-box
outline none
margin 20px 12px
transition .1s
font-weight 500
user-select none
padding 12px 20px
font-size 14px
border-radius 20px
padding 12px 23px
transition .2s all ease-in-out
&:hover
background #ddd
color rgba(0,0,0, 0.7)
#container
position relative
text-align center
margin-top -10vh
z-index 3
.gallery
margin-bottom 60px
cursor pointer
transition all .2s ease-in-out
&:hover
transform scale(1.05)
.display-list
&__item
padding 48px
text-align left
background #2E2E2E
overflow hidden
&.o-item
background #fff
.display-list__desc
color #2E2E2E
img
width 100%
cursor pointer
transition all .2s ease-in-out
&:hover
transform scale(1.05)
&__content
padding-top 120px
&__title
color #4BA2E2
font-size 50px
&__desc
color #fff
margin-top 20px
.info
padding 48px 0
background #2E2E2E
color #fff
a
text-decoration none
color #fff
@media (max-width: 768px)
#header
padding 10vh
#container
.display-list
&__item
padding 24px 12px
&__content
padding-top 30px
&__title
font-size 25px
&__desc
margin-top 12px
</style>

View File

@ -1,10 +0,0 @@
import Vue from 'vue'
import App from './APP.vue'
import 'melody.css'
import axios from 'axios'
Vue.prototype.$http = axios
new Vue({
render: h => h(App)
}).$mount('#app')

View File

@ -1,12 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>PicGo</title>
</head>
<body>
<div id="app"></div>
</body>
</html>

View File

@ -15,28 +15,43 @@
"postinstall": "electron-builder install-app-deps",
"postuninstall": "electron-builder install-app-deps",
"release": "vue-cli-service electron:build --publish always",
"upload-dist": "node ./scripts/upload-dist-to-cos.js"
"upload-dist": "node ./scripts/upload-dist-to-r2.js"
},
"dependencies": {
"@element-plus/icons-vue": "^2.0.10",
"@imengyu/vue3-context-menu": "^1.2.2",
"@octokit/rest": "^19.0.7",
"@picgo/i18n": "^1.0.0",
"@picgo/store": "^2.0.4",
"axios": "^0.19.0",
"@types/mime-types": "^2.1.1",
"ali-oss": "^6.17.1",
"aws-sdk": "^2.1304.0",
"axios": "^1.3.2",
"compare-versions": "^4.1.3",
"core-js": "^3.27.1",
"cos-nodejs-sdk-v5": "^2.11.19",
"custom-electron-titlebar": "^4.1.5",
"element-plus": "^2.2.28",
"fs-extra": "^10.0.0",
"js-yaml": "^4.1.0",
"dexie": "^3.2.3",
"element-plus": "^2.2.30",
"fast-xml-parser": "^4.1.1",
"form-data": "^4.0.0",
"fs-extra": "^11.1.0",
"got": "^12.5.3",
"hpagent": "^1.2.0",
"keycode": "^2.2.0",
"lodash-id": "^0.14.0",
"lowdb": "^1.0.0",
"mime-types": "^2.1.35",
"mitt": "^3.0.0",
"picgo": "^1.5.0",
"piclist": "^0.0.8",
"pinia": "^2.0.29",
"pinia-plugin-persistedstate": "^3.0.2",
"qiniu": "^7.8.0",
"qrcode.vue": "^3.3.3",
"shell-path": "2.1.0",
"shell-path": "3.0.0",
"upyun": "^3.4.6",
"uuid": "^9.0.0",
"vue": "^3.2.45",
"vue": "^3.2.47",
"vue-router": "^4.1.6",
"vue3-lazyload": "^0.3.6",
"vue3-photo-preview": "^0.2.9",
@ -45,8 +60,9 @@
"devDependencies": {
"@babel/plugin-proposal-optional-chaining": "^7.16.7",
"@picgo/bump-version": "^1.1.2",
"@types/ali-oss": "^6.16.7",
"@types/electron-devtools-installer": "^2.2.0",
"@types/fs-extra": "^9.0.13",
"@types/fs-extra": "^11.0.1",
"@types/inquirer": "^6.5.0",
"@types/js-yaml": "^4.0.5",
"@types/lowdb": "^1.0.9",
@ -70,16 +86,16 @@
"dotenv": "^16.0.1",
"electron": "^22.0.2",
"electron-devtools-installer": "^3.2.0",
"eslint": "^8.31.0",
"eslint": "^8.34.0",
"eslint-config-standard": ">=16.0.0",
"eslint-plugin-import": "^2.24.2",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0",
"eslint-plugin-vue": "^9.8.0",
"eslint-plugin-vue": "^9.9.0",
"husky": "^3.1.0",
"stylus": "^0.54.7",
"stylus-loader": "^3.0.2",
"typescript": "^4.4.3",
"typescript": "^4.9.5",
"vue-cli-plugin-electron-builder": "^3.0.0-alpha.4"
},
"commitlint": {

View File

@ -14,7 +14,7 @@
<key>NSMenuItem</key>
<dict>
<key>default</key>
<string>Upload pictures with PicGo</string>
<string>Upload pictures with PicList</string>
</dict>
<key>NSMessage</key>
<string>runWorkflowAsService</string>

View File

@ -59,7 +59,7 @@
<key>ActionParameters</key>
<dict>
<key>COMMAND_STRING</key>
<string>/Applications/PicGo.app/Contents/MacOS/PicGo upload "$@" &gt; /dev/null 2&gt;&amp;1 &amp;</string>
<string>/Applications/PicList.app/Contents/MacOS/PicList upload "$@" &gt; /dev/null 2&gt;&amp;1 &amp;</string>
<key>CheckedForUserDefaultShell</key>
<true/>
<key>inputMethod</key>

View File

@ -34,7 +34,7 @@ PICBEDS_SETTINGS: Picbeds Settings
PICBEDS_MANAGE: Picbeds Manage
PICLIST_SETTINGS: PicList Settings
PLUGIN_SETTINGS: Plugins Settings
PICGO_SPONSOR_TEXT: PicList is a free software, if you like it, please don't forget to buy me a cup of coffee.
PICLIST_SPONSOR_TEXT: PicList is a free software, if you like it, please don't forget to buy me a cup of coffee.
ALIPAY: Alipay
WECHATPAY: Wechat Pay
CHOOSE_PICBED: Choose Picbed
@ -88,7 +88,7 @@ SETTINGS_PLUGIN_INSTALL_MIRROR: Mirror for Plugin Install
SETTINGS_CURRENT_VERSION: Current Version
SETTINGS_NEWEST_VERSION: Newest Version
SETTINGS_GETING: Getting...
SETTINGS_TIPS_HAS_NEW_VERSION: PicGo has a new version, please click confirm to open download page
SETTINGS_TIPS_HAS_NEW_VERSION: PicList has a new version, please click confirm to open download page
SETTINGS_LOG_FILE: Log File
SETTINGS_LOG_LEVEL: Log Level
SETTINGS_LOG_FILE_SIZE: Log File Size
@ -191,12 +191,12 @@ UPDATE_PLUGIN: Update Plugin
TIPS_NOTICE: Tips
TIPS_WARNING: Warning
TIPS_ERROR: Error
TIPS_INSTALL_NODE_AND_RELOAD_PICGO: Please install Node.js and restart PicGo to continue
TIPS_INSTALL_NODE_AND_RELOAD_PICGO: Please install Node.js and restart PicList to continue
TIPS_PLUGIN_REMOVE_GALLERY_ITEM: Plugin is trying to remove some images from the album gallery, continue?
TIPS_PLUGIN_OVERWRITE_GALLERY: Plugin is trying to overwrite the album gallery, continue?
TIPS_UPLOAD_NOT_PICTURES: The latest clipboard item is not a picture
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_DEFAULT: PicGo config file broken, has been restored to default
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_BACKUP: PicGo config file broken, has been restored to backup
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_DEFAULT: PicList config file broken, has been restored to default
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_BACKUP: PicList config file broken, has been restored to backup
TIPS_PICGO_BACKUP_FILE_VERSION: 'Backup file version: ${v}'
TIPS_CUSTOM_CONFIG_FILE_PATH_ERROR: Custom config file parse error, please check the path content
TIPS_SHORTCUT_MODIFIED_SUCCEED: Shortcut modified successfully

View File

@ -34,7 +34,7 @@ PICBEDS_SETTINGS: 图床设置
PICBEDS_MANAGE: 图床管理
PICLIST_SETTINGS: PicList设置
PLUGIN_SETTINGS: 插件设置
PICGO_SPONSOR_TEXT: PicList是免费开源的软件如果你喜欢它对你有帮助可以请我喝杯蜜雪冰城~
PICLIST_SPONSOR_TEXT: PicList是免费开源的软件如果你喜欢它对你有帮助可以请我喝杯蜜雪冰城~
ALIPAY: 支付宝
WECHATPAY: 微信支付
CHOOSE_PICBED: 选择图床
@ -88,7 +88,7 @@ SETTINGS_PLUGIN_INSTALL_MIRROR: 插件安装镜像
SETTINGS_CURRENT_VERSION: 当前版本
SETTINGS_NEWEST_VERSION: 最新版本
SETTINGS_GETING: 正在获取中
SETTINGS_TIPS_HAS_NEW_VERSION: PicGo更新啦,请点击确定打开下载页面
SETTINGS_TIPS_HAS_NEW_VERSION: PicList更新啦,请点击确定打开下载页面
SETTINGS_LOG_FILE: 日志文件
SETTINGS_LOG_LEVEL: 日志记录等级
SETTINGS_LOG_FILE_SIZE: 日志文件大小
@ -191,12 +191,12 @@ UPDATE_PLUGIN: 更新插件
TIPS_NOTICE: 注意
TIPS_WARNING: 警告
TIPS_ERROR: 发生错误
TIPS_INSTALL_NODE_AND_RELOAD_PICGO: 请安装Node.js并重启PicGo再继续操作
TIPS_INSTALL_NODE_AND_RELOAD_PICGO: 请安装Node.js并重启PicList再继续操作
TIPS_PLUGIN_REMOVE_GALLERY_ITEM: 有插件正在试图删除一些相册图片,是否继续
TIPS_PLUGIN_OVERWRITE_GALLERY: 有插件正在试图覆盖相册列表,是否继续
TIPS_UPLOAD_NOT_PICTURES: 剪贴板最新的一条记录不是图片
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_DEFAULT: PicGo 配置文件损坏,已经恢复为默认配置
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_BACKUP: PicGo 配置文件损坏,已经恢复为备份配置
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_DEFAULT: PicList 配置文件损坏,已经恢复为默认配置
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_BACKUP: PicList 配置文件损坏,已经恢复为备份配置
TIPS_PICGO_BACKUP_FILE_VERSION: '备份文件版本: ${v}'
TIPS_CUSTOM_CONFIG_FILE_PATH_ERROR: 自定义文件解析出错,请检查路径内容是否正确
TIPS_SHORTCUT_MODIFIED_SUCCEED: 快捷键已经修改成功

View File

@ -34,7 +34,7 @@ PICBEDS_SETTINGS: 圖床設定
PICBEDS_MANAGE: 圖床管理
PICLIST_SETTINGS: PicList設定
PLUGIN_SETTINGS: 插件設定
PICGO_SPONSOR_TEXT: PicList是開放原始碼的軟體如果你喜歡它對你有幫助不妨請我喝杯咖啡~
PICLIST_SPONSOR_TEXT: PicList是開放原始碼的軟體如果你喜歡它對你有幫助不妨請我喝杯咖啡~
ALIPAY: 支付寶
WECHATPAY: 微信支付
CHOOSE_PICBED: 選擇圖床
@ -88,7 +88,7 @@ SETTINGS_PLUGIN_INSTALL_MIRROR: 插件安裝鏡像
SETTINGS_CURRENT_VERSION: 當前版本
SETTINGS_NEWEST_VERSION: 最新版本
SETTINGS_GETING: 正在取得中
SETTINGS_TIPS_HAS_NEW_VERSION: PicGo更新啦,請點擊確定開啟下載頁面
SETTINGS_TIPS_HAS_NEW_VERSION: PicList更新啦,請點擊確定開啟下載頁面
SETTINGS_LOG_FILE: 記錄檔案
SETTINGS_LOG_LEVEL: 記錄等级
SETTINGS_LOG_FILE_SIZE: 記錄檔案大小
@ -191,12 +191,12 @@ UPDATE_PLUGIN: 更新插件
TIPS_NOTICE: 注意
TIPS_WARNING: 警告
TIPS_ERROR: 發生錯誤
TIPS_INSTALL_NODE_AND_RELOAD_PICGO: 請安裝Node.js並重新啟動PicGo再繼續操作
TIPS_INSTALL_NODE_AND_RELOAD_PICGO: 請安裝Node.js並重新啟動PicList再繼續操作
TIPS_PLUGIN_REMOVE_GALLERY_ITEM: 有插件正在試圖刪除一些相簿圖片,是否繼續?
TIPS_PLUGIN_OVERWRITE_GALLERY: 有插件正在試圖覆蓋相簿列表,是否繼續?
TIPS_UPLOAD_NOT_PICTURES: 剪貼簿最新的一條記錄不是圖片
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_DEFAULT: PicGo 設定檔案已損壞,已經恢復為預設設定
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_BACKUP: PicGo 設定檔案已損壞,已經恢復為備份設定
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_DEFAULT: PicList設定檔案已損壞,已經恢復為預設設定
TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_BACKUP: PicList 設定檔案已損壞,已經恢復為備份設定
TIPS_PICGO_BACKUP_FILE_VERSION: '備份檔案版本: ${v}'
TIPS_CUSTOM_CONFIG_FILE_PATH_ERROR: 自訂設定檔案解析出錯,請檢查路徑內容是否正確
TIPS_SHORTCUT_MODIFIED_SUCCEED: 快捷鍵已經修改成功

View File

@ -6,11 +6,11 @@
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<link rel="icon" href="<%= BASE_URL %>favicon.ico">
<title>PicGo</title>
<title>PicList</title>
</head>
<body>
<noscript>
<strong>We're sorry but picgo-new doesn't work properly without JavaScript enabled. Please enable it to continue.</strong>
<strong>We're sorry but piclist-new doesn't work properly without JavaScript enabled. Please enable it to continue.</strong>
</noscript>
<div id="app"></div>
<!-- built files will be auto injected -->

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

BIN
public/picbed/aliyun.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

BIN
public/picbed/github.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB

BIN
public/picbed/imgur.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

BIN
public/picbed/qiniu.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

BIN
public/picbed/smms.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

BIN
public/picbed/tcyun.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

BIN
public/picbed/upyun.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.5 KiB

View File

@ -2,24 +2,24 @@
// macos
const darwin = [{
appNameWithPrefix: 'PicGo-',
appNameWithPrefix: 'PicList-',
ext: '.dmg',
arch: '-arm64',
'version-file': 'latest-mac.yml'
}, {
appNameWithPrefix: 'PicGo-',
appNameWithPrefix: 'PicList-',
ext: '.dmg',
arch: '-x64',
'version-file': 'latest-mac.yml'
}]
const linux = [{
appNameWithPrefix: 'PicGo-',
appNameWithPrefix: 'PicList-',
ext: '.AppImage',
arch: '',
'version-file': 'latest-linux.yml'
}, {
appNameWithPrefix: 'picgo_',
appNameWithPrefix: 'piclist_',
ext: '.snap',
arch: '_amd64',
'version-file': 'latest-linux.yml'
@ -27,17 +27,17 @@ const linux = [{
// windows
const win32 = [{
appNameWithPrefix: 'PicGo-Setup-',
appNameWithPrefix: 'PicList-Setup-',
ext: '.exe',
arch: '-ia32',
'version-file': 'latest.yml'
}, {
appNameWithPrefix: 'PicGo-Setup-',
appNameWithPrefix: 'PicList-Setup-',
ext: '.exe',
arch: '-x64',
'version-file': 'latest.yml'
}, {
appNameWithPrefix: 'PicGo-Setup-',
appNameWithPrefix: 'PicList-Setup-',
ext: '.exe',
arch: '', // 32 & 64
'version-file': 'latest.yml'

View File

@ -1,18 +1,18 @@
const pkg = require('../package.json')
const version = pkg.version
// TODO: use the same name format
const generateURL = (platform, ext, prefix = 'PicGo-') => {
return `https://picgo-1251750343.cos.ap-chengdu.myqcloud.com/${version}/${prefix}${version}${platform}${ext}`
const generateURL = (platform, ext, prefix = 'PicList-') => {
return `https://release.piclist.cn/${version}/${prefix}${version}${platform}${ext}`
}
const platformExtList = [
['-arm64', '.dmg', 'PicGo-'],
['-x64', '.dmg', 'PicGo-'],
['', '.AppImage', 'PicGo-'],
['-ia32', '.exe', 'PicGo-Setup-'],
['-x64', '.exe', 'PicGo-Setup-'],
['', '.exe', 'PicGo-Setup-'],
['_amd64', '.snap', 'picgo_']
['-arm64', '.dmg', 'PicList-'],
['-x64', '.dmg', 'PicList-'],
['', '.AppImage', 'PicList-'],
['-ia32', '.exe', 'PicList-Setup-'],
['-x64', '.exe', 'PicList-Setup-'],
['', '.exe', 'PicList-Setup-'],
['_amd64', '.snap', 'piclist_']
]
const links = platformExtList.map(([arch, ext, prefix]) => {

View File

@ -1,103 +0,0 @@
// upload dist bundled-app to cos
require('dotenv').config()
const crypto = require('crypto')
const fs = require('fs')
const mime = require('mime-types')
const pkg = require('../package.json')
const configList = require('./config')
const axios = require('axios').default
const path = require('path')
const distPath = path.join(__dirname, '../dist_electron')
const BUCKET = 'picgo-1251750343'
// const AREA = 'ap-chengdu'
const VERSION = pkg.version
const FILE_PATH = `${VERSION}/`
const SECRET_ID = process.env.PICGO_ENV_COS_SECRET_ID
const SECRET_KEY = process.env.PICGO_ENV_COS_SECRET_KEY
// https://cloud.tencent.com/document/product/436/7778#signature
/**
* @param {string} fileName
* @returns
*/
const generateSignature = (fileName, folder = FILE_PATH) => {
const secretKey = SECRET_KEY
// const area = AREA
const bucket = BUCKET
const path = folder
const today = Math.floor(new Date().getTime() / 1000)
const tomorrow = today + 86400
const signTime = `${today};${tomorrow}`
const signKey = crypto.createHmac('sha1', secretKey).update(signTime).digest('hex')
const httpString = `put\n/${path}${fileName}\n\nhost=${bucket}.cos.accelerate.myqcloud.com\n`
const sha1edHttpString = crypto.createHash('sha1').update(httpString).digest('hex')
const stringToSign = `sha1\n${signTime}\n${sha1edHttpString}\n`
const signature = crypto.createHmac('sha1', signKey).update(stringToSign).digest('hex')
return {
signature,
signTime
}
}
/**
*
* @param {string} fileName
* @param {Buffer} fileBuffer
* @param {{ signature: string, signTime: string }} signature
* @returns
*/
const getReqOptions = (fileName, fileBuffer, signature, folder = FILE_PATH) => {
return {
method: 'PUT',
url: `http://${BUCKET}.cos.accelerate.myqcloud.com/${encodeURI(folder)}${encodeURI(fileName)}`,
headers: {
Host: `${BUCKET}.cos.accelerate.myqcloud.com`,
Authorization: `q-sign-algorithm=sha1&q-ak=${SECRET_ID}&q-sign-time=${signature.signTime}&q-key-time=${signature.signTime}&q-header-list=host&q-url-param-list=&q-signature=${signature.signature}`,
contentType: mime.lookup(fileName),
useAgent: `PicGo;${pkg.version};null;null`
},
maxContentLength: Infinity,
maxBodyLength: Infinity,
data: fileBuffer,
resolveWithFullResponse: true
}
}
const uploadFile = async () => {
try {
const platform = process.platform
if (configList[platform]) {
let versionFileHasUploaded = false
for (const [index, config] of configList[platform].entries()) {
const fileName = `${config.appNameWithPrefix}${VERSION}${config.arch}${config.ext}`
const filePath = path.join(distPath, fileName)
const versionFilePath = path.join(distPath, config['version-file'])
let versionFileName = config['version-file']
if (VERSION.toLocaleLowerCase().includes('beta')) {
versionFileName = versionFileName.replace('.yml', '.beta.yml')
}
// upload dist file
const signature = generateSignature(fileName)
const reqOptions = getReqOptions(fileName, fs.readFileSync(filePath), signature)
console.log('[PicGo Dist] Uploading...', fileName, `${index + 1}/${configList[platform].length}`)
await axios.request(reqOptions)
// upload version file
if (!versionFileHasUploaded) {
const signature = generateSignature(versionFileName, '')
const reqOptions = getReqOptions(versionFileName, fs.readFileSync(versionFilePath), signature, '')
console.log('[PicGo Version File] Uploading...', versionFileName)
await axios.request(reqOptions)
versionFileHasUploaded = true
}
}
} else {
console.warn('platform not supported!', platform)
}
} catch (e) {
console.error(e)
}
}
uploadFile()

View File

@ -0,0 +1,67 @@
// upload dist bundled-app to r2
require('dotenv').config()
const S3 = require('aws-sdk/clients/s3')
const pkg = require('../package.json')
const configList = require('./config')
const fs = require('fs')
const path = require('path')
const BUCKET = 'piclist-dl'
const VERSION = pkg.version
const FILE_PATH = `${VERSION}/`
const ACCOUNT_ID = process.env.R2_ACCOUNT_ID
const SECRET_ID = process.env.R2_SECRET_ID
const SECRET_KEY = process.env.R2_SECRET_KEY
console.log(ACCOUNT_ID, SECRET_ID, SECRET_KEY)
const s3 = new S3({
endpoint: `https://${ACCOUNT_ID}.r2.cloudflarestorage.com`,
accessKeyId: SECRET_ID,
secretAccessKey: SECRET_KEY,
signatureVersion: 'v4',
})
const uploadFile = async () => {
try {
const platform = process.platform
if (configList[platform]) {
let versionFileHasUploaded = false
for (const [index, config] of configList[platform].entries()) {
const fileName = `${config.appNameWithPrefix}${VERSION}${config.arch}${config.ext}`
const distPath = path.join(__dirname, '../dist_electron')
let versionFileName = config['version-file']
console.log('[PicList Dist] Uploading...', fileName, `${index + 1}/${configList[platform].length}`)
const fileBuffer = fs.readFileSync(path.join(distPath, fileName))
await s3.upload({
Bucket: BUCKET,
Key: `${FILE_PATH}${fileName}`,
Body: fileBuffer
}).promise()
// upload version file
if (!versionFileHasUploaded) {
console.log('[PicList Version File] Uploading...', versionFileName)
let versionFilePath
if (platform === 'win32') {
versionFilePath = path.join(distPath, 'latest.yml')
} else if (platform === 'darwin') {
versionFilePath = path.join(distPath, 'latest-mac.yml')
} else {
versionFilePath = path.join(distPath, 'latest-linux.yml')
}
const versionFileBuffer = fs.readFileSync(versionFilePath)
await s3.upload({
Bucket: BUCKET,
Key: `${versionFileName}`,
Body: versionFileBuffer
}).promise()
versionFileHasUploaded = true
}
}
} else {
console.warn('platform not supported!', platform)
}
} catch (err) {
console.error(err)
}
}
uploadFile()

View File

@ -1,23 +1,3 @@
import { bootstrap } from '~/main/lifeCycle'
bootstrap.launchApp()
/**
* Auto Updater
*
* Uncomment the following code below and install `electron-updater` to
* support auto updating. Code Signing with a valid certificate is required.
* https://simulatedgreg.gitbooks.io/electron-vue/content/en/using-electron-builder.html#auto-updating
*/
// import { autoUpdater } from 'electron-updater'
// autoUpdater.on('update-downloaded', () => {
// autoUpdater.quitAndInstall()
// })
// app.on('ready', () => {
// if (process.env.NODE_ENV === 'production') {
// autoUpdater.checkForUpdates()
// }
// })

View File

@ -7,6 +7,7 @@ import { webFrame } from 'electron'
import VueLazyLoad from 'vue3-lazyload'
import axios from 'axios'
import { mainMixin } from './renderer/utils/mainMixin'
import ContextMenu from '@imengyu/vue3-context-menu'
import { dragMixin } from '@/utils/mixin'
import { initTalkingData } from './renderer/utils/analytics'
import db from './renderer/utils/db'
@ -15,6 +16,8 @@ import { getConfig, saveConfig, sendToMain, triggerRPC } from '@/utils/dataSende
import { store } from '@/store'
import vue3PhotoPreview from 'vue3-photo-preview'
import 'vue3-photo-preview/dist/index.css'
import { createPinia } from 'pinia'
import piniaPluginPersistedstate from 'pinia-plugin-persistedstate'
webFrame.setVisualZoomLevelLimits(1, 1)
@ -45,6 +48,8 @@ app.config.globalProperties.sendToMain = sendToMain
app.mixin(mainMixin)
app.mixin(dragMixin)
const pinia = createPinia()
pinia.use(piniaPluginPersistedstate)
app.use(VueLazyLoad, {
error: `file://${__static.replace(/\\/g, '/')}/unknown-file-type.svg`
@ -53,7 +58,8 @@ app.use(ElementUI)
app.use(router)
app.use(store)
app.use(vue3PhotoPreview)
app.use(pinia)
app.use(ContextMenu)
app.mount('#app')
initTalkingData()

View File

@ -9,12 +9,11 @@ import path from 'path'
import axios from 'axios'
import windowManager from '../window/windowManager'
import { showNotification } from '~/main/utils/common'
import { isDev } from '~/universal/utils/common'
// for test
const REMOTE_NOTICE_URL = isDev ? 'http://localhost:8181/remote-notice.json' : 'https://picgo-1251750343.cos.accelerate.myqcloud.com/remote-notice.yml'
const REMOTE_NOTICE_URL = 'https://release.piclist.cn/remote-notice.json'
const REMOTE_NOTICE_LOCAL_STORAGE_FILE = 'picgo-remote-notice.json'
const REMOTE_NOTICE_LOCAL_STORAGE_FILE = 'piclist-remote-notice.json'
const STORE_PATH = app.getPath('userData')
@ -106,7 +105,6 @@ class RemoteNoticeHandler {
if (this.checkActionCount(action)) {
switch (action.type) {
case IRemoteNoticeActionType.SHOW_DIALOG: {
// SHOW DIALOG
const currentWindow = windowManager.getAvailableWindow()
dialog.showOpenDialog(currentWindow, action.data?.options)
break

View File

@ -181,7 +181,6 @@ export function createTray () {
}
} else {
const imgUrl = img.toDataURL()
// console.log(imgUrl)
obj.push({
width: img.getSize().width,
height: img.getSize().height,

View File

@ -10,7 +10,6 @@ import db, { GalleryDB } from '~/main/apis/core/datastore'
import { handleCopyUrl } from '~/main/utils/common'
import { handleUrlEncode } from '#/utils/common'
import { T } from '~/main/i18n/index'
// import dayjs from 'dayjs'
const handleClipboardUploading = async (): Promise<false | ImgInfo[]> => {
const useBuiltinClipboard = !!db.get('settings.useBuiltinClipboard')

View File

@ -11,7 +11,7 @@ import db from '~/main/apis/core/datastore'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import util from 'util'
import { IPicGo } from 'picgo'
import { IPicGo } from 'piclist'
import { showNotification, calcDurationRange, getClipboardFilePath } from '~/main/utils/common'
import { RENAME_FILE_NAME, TALKING_DATA_EVENT } from '~/universal/events/constants'
import logger from '@core/picgo/logger'
@ -163,6 +163,9 @@ class Uploader {
duration: Date.now() - startTime
} as IAnalyticsData)
}
output.forEach((item: ImgInfo) => {
item.config = db.get(`picBed.${item.type}`)
})
return output.filter(item => item.imgUrl)
} else {
return false

View File

@ -11,17 +11,10 @@ import db from '~/main/apis/core/datastore'
import { TOGGLE_SHORTKEY_MODIFIED_MODE } from '#/events/constants'
import { app } from 'electron'
import { remoteNoticeHandler } from '../remoteNotice'
// import { i18n } from '~/main/i18n'
// import { URLSearchParams } from 'url'
const windowList = new Map<IWindowList, IWindowListItem>()
const handleWindowParams = (windowURL: string) => {
// const [baseURL, hash = ''] = windowURL.split('#')
// const search = new URLSearchParams()
// const lang = i18n.getLanguage()
// search.append('lang', lang)
// return `${baseURL}?${search.toString()}#${hash}`
return windowURL
}

View File

@ -45,14 +45,6 @@ class WindowManager implements IWindowManager {
return this.windowMap.has(name)
}
// useless
// delete (name: IWindowList) {
// const window = this.windowMap.get(name)
// if (window) {
// this.windowIdMap.delete(window.id)
// this.windowMap.delete(name)
// }
// }
deleteById = (id: number) => {
const name = this.windowIdMap.get(id)
if (name) {

View File

@ -1,11 +1,11 @@
import fs from 'fs-extra'
import writeFile from 'write-file-atomic'
import path from 'path'
import { app as APP } from 'electron'
import { getLogger } from '@core/utils/localLogger'
import { app } from 'electron'
import { getLogger } from '../utils/localLogger'
import dayjs from 'dayjs'
import { T } from '~/main/i18n'
const STORE_PATH = APP.getPath('userData')
const STORE_PATH = app.getPath('userData')
const configFilePath = path.join(STORE_PATH, 'data.json')
const configFileBackupPath = path.join(STORE_PATH, 'data.bak.json')
export const defaultConfigPath = configFilePath
@ -79,7 +79,6 @@ function dbPathChecker (): string {
if (_configFilePath) {
return _configFilePath
}
// defaultConfigPath
_configFilePath = defaultConfigPath
// if defaultConfig path is not exit
// do not parse the content of config
@ -98,8 +97,8 @@ function dbPathChecker (): string {
}
return _configFilePath
} catch (e) {
const picgoLogPath = path.join(STORE_PATH, 'picgo-gui-local.log')
const logger = getLogger(picgoLogPath)
const piclistLogPath = path.join(STORE_PATH, 'piclist-gui-local.log')
const logger = getLogger(piclistLogPath, 'PicList')
if (!hasCheckPath) {
const optionsTpl = {
title: T('TIPS_NOTICE'),
@ -123,8 +122,8 @@ function getGalleryDBPath (): {
dbBackupPath: string
} {
const configPath = dbPathChecker()
const dbPath = path.join(path.dirname(configPath), 'picgo.db')
const dbBackupPath = path.join(path.dirname(dbPath), 'picgo.bak.db')
const dbPath = path.join(path.dirname(configPath), 'piclist.db')
const dbBackupPath = path.join(path.dirname(dbPath), 'piclist.bak.db')
return {
dbPath,
dbBackupPath

View File

@ -1,6 +1,6 @@
import { dbChecker, dbPathChecker } from 'apis/core/datastore/dbChecker'
import pkg from 'root/package.json'
import { PicGo } from 'picgo'
import { PicGo } from 'piclist'
import db from 'apis/core/datastore'
import debounce from 'lodash/debounce'

View File

@ -41,9 +41,9 @@ const recreateLogFile = (logPath: string): void => {
}
/**
* for local log before picgo inited
* for local log before piclist inited
*/
const getLogger = (logPath: string) => {
const getLogger = (logPath: string, logtype: string) => {
let hasUncathcedError = false
try {
if (!fs.existsSync(logPath)) {
@ -64,7 +64,7 @@ const getLogger = (logPath: string) => {
return
}
try {
let log = `${dayjs().format('YYYY-MM-DD HH:mm:ss')} [PicGo ${type.toUpperCase()}] `
let log = `${dayjs().format('YYYY-MM-DD HH:mm:ss')} [${logtype} ${type.toUpperCase()}] `
msg.forEach((item: ILogArgvTypeWithError) => {
if (typeof item === 'object' && type === 'error') {
log += `\n------Error Stack Begin------\n${util.format(item.stack)}\n-------Error Stack End------- `

View File

@ -11,7 +11,7 @@ import { IPasteStyle, IPicGoHelperType, IWindowList } from '#/types/enum'
import shortKeyHandler from 'apis/app/shortKey/shortKeyHandler'
import picgo from '@core/picgo'
import { handleStreamlinePluginName, simpleClone } from '~/universal/utils/common'
import { IGuiMenuItem, PicGo as PicGoCore } from 'picgo'
import { IGuiMenuItem, PicGo as PicGoCore } from 'piclist'
import windowManager from 'apis/app/window/windowManager'
import { showNotification } from '~/main/utils/common'
import { dbPathChecker } from 'apis/core/datastore/dbChecker'

View File

@ -11,7 +11,7 @@ import pkg from 'root/package.json'
import GuiApi from 'apis/gui'
import { PICGO_CONFIG_PLUGIN, PICGO_HANDLE_PLUGIN_DONE, PICGO_HANDLE_PLUGIN_ING, PICGO_TOGGLE_PLUGIN, SHOW_MAIN_PAGE_DONATION, SHOW_MAIN_PAGE_QRCODE } from '~/universal/events/constants'
import picgoCoreIPC from '~/main/events/picgoCoreIPC'
import { PicGo as PicGoCore } from 'picgo'
import { PicGo as PicGoCore } from 'piclist'
import { T } from '~/main/i18n'
import { changeCurrentUploader } from '~/main/utils/handleUploaderConfig'

View File

@ -2,9 +2,9 @@ import path from 'path'
import { app } from 'electron'
import { getLogger } from 'apis/core/utils/localLogger'
const STORE_PATH = app.getPath('userData')
const LOG_PATH = path.join(STORE_PATH, 'picgo-gui-local.log')
const LOG_PATH = path.join(STORE_PATH, 'piclist-gui-local.log')
const logger = getLogger(LOG_PATH)
const logger = getLogger(LOG_PATH, 'PicList')
// since the error may occur in picgo-core
// so we can't use the log from picgo

View File

@ -1,8 +1,8 @@
// TODO: so how to import pure esm module in electron main process????? help wanted
// just copy the fix-path because I can't import pure ESM module in electron main process
const shellPath = require('shell-path')
// @ts-nocheck
import { shellPath } from 'shell-path'
export default function fixPath () {
if (process.platform === 'win32') {

View File

@ -34,9 +34,12 @@ import bus from '@core/bus'
import logger from 'apis/core/picgo/logger'
import picgo from 'apis/core/picgo'
import fixPath from './fixPath'
import { clearTempFolder } from '../manage/utils/common'
import { initI18n } from '~/main/utils/handleI18n'
import { remoteNoticeHandler } from 'apis/app/remoteNotice'
import { manageIpcList } from '../manage/events/ipcList'
import getManageApi from '../manage/Main'
import UpDownTaskQueue from '../manage/datastore/upDownTaskQueue'
const isDevelopment = process.env.NODE_ENV !== 'production'
const handleStartUpFiles = (argv: string[], cwd: string) => {
@ -64,6 +67,9 @@ class LifeCycle {
beforeOpen()
initI18n()
ipcList.listen()
getManageApi()
UpDownTaskQueue.getInstance()
manageIpcList.listen()
busEventList.listen()
updateShortKeyFromVersion212(db, db.get('settings.shortKey'))
await migrateGalleryFromVersion230(db, GalleryDB.getInstance(), picgo)
@ -135,7 +141,7 @@ class LifeCycle {
openAtLogin: db.get('settings.autoStart') || false
})
if (process.platform === 'win32') {
app.setAppUserModelId('com.molunerfinn.picgo')
app.setAppUserModelId('com.kuingsmile.piclist')
}
if (process.env.XDG_CURRENT_DESKTOP && process.env.XDG_CURRENT_DESKTOP.includes('Unity')) {
@ -151,6 +157,8 @@ class LifeCycle {
})
app.on('will-quit', () => {
UpDownTaskQueue.getInstance().persist()
clearTempFolder()
globalShortcut.unregisterAll()
bus.removeAllListeners()
server.shutdown()

10
src/main/manage/Main.ts Normal file
View File

@ -0,0 +1,10 @@
/* eslint-disable */
import { manageDbChecker } from './datastore/dbChecker'
import { ManageApi } from './manageApi'
manageDbChecker()
const getManageApi = (picBedName: string = 'placeholder'): ManageApi => {
return new ManageApi(picBedName)
}
export default getManageApi

View File

@ -0,0 +1,587 @@
import axios from 'axios'
import { hmacSha1Base64, getFileMimeType, gotDownload, formatError } from '../utils/common'
import { ipcMain, IpcMainEvent } from 'electron'
import fs from 'fs-extra'
import { XMLParser } from 'fast-xml-parser'
import OSS from 'ali-oss'
import path from 'path'
import { isImage } from '~/renderer/manage/utils/common'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import UpDownTaskQueue,
{
uploadTaskSpecialStatus,
commonTaskStatus
} from '../datastore/upDownTaskQueue'
import { ManageLogger } from '../utils/logger'
// 坑爹阿里云 返回数据类型标注和实际各种不一致
class AliyunApi {
ctx: OSS
accessKeyId: string
accessKeySecret: string
timeOut = 60000
logger: ManageLogger
constructor (accessKeyId: string, accessKeySecret: string, logger: ManageLogger) {
this.ctx = new OSS({
accessKeyId,
accessKeySecret,
secure: true
})
this.accessKeyId = accessKeyId
this.accessKeySecret = accessKeySecret
this.logger = logger
}
formatFolder (item: string, slicedPrefix: string) {
return {
key: item,
fileSize: 0,
formatedTime: '',
fileName: item.replace(slicedPrefix, '').replace('/', ''),
isDir: true,
checked: false,
isImage: false,
match: false,
Key: item
}
}
formatFile (item: OSS.ObjectMeta, slicedPrefix: string, urlPrefix: string): any {
const result = {
...item,
key: item.name,
rawUrl: `${urlPrefix}/${item.name}`,
fileName: item.name.replace(slicedPrefix, ''),
fileSize: item.size,
formatedTime: new Date(item.lastModified).toLocaleString(),
isDir: false,
checked: false,
match: false,
isImage: isImage(item.name.replace(slicedPrefix, ''))
}
const temp = result.rawUrl
result.rawUrl = result.url
result.url = temp
return result
}
getCanonicalizedOSSHeaders (headers: IStringKeyMap) {
const lowerCaseHeaders = Object.keys(headers).reduce((acc, key) => {
acc[key.toLowerCase()] = headers[key]
return acc
}, {} as IStringKeyMap)
let canonicalizedOSSHeaders = ''
const headerKeys = Object.keys(lowerCaseHeaders).sort()
headerKeys.forEach((key) => {
key.startsWith('x-oss-') && (canonicalizedOSSHeaders += `${key}:${lowerCaseHeaders[key]}\n`)
})
return canonicalizedOSSHeaders
}
authorization (method: string, canonicalizedResource: string, headers: IStringKeyMap, contentMd5: string, contentType: string) {
const date = new Date().toUTCString()
const stringToSign = `${method.toUpperCase()}\n${contentMd5}\n${contentType}\n${date}\n${this.getCanonicalizedOSSHeaders(headers)}${canonicalizedResource}`
return `OSS ${this.accessKeyId}:${hmacSha1Base64(this.accessKeySecret, stringToSign)}`
}
getNewCtx (region: string, bucket: string) {
return new OSS({
accessKeyId: this.accessKeyId,
accessKeySecret: this.accessKeySecret,
region,
bucket,
secure: true
})
}
/**
*
*/
async getBucketList (): Promise<any> {
const formatItem = (item: OSS.Bucket) => {
return {
Name: item.name,
Location: item.region,
CreationDate: item.creationDate
}
}
const res = await this.ctx.listBuckets({
'max-keys': 1000
}) as IStringKeyMap
const result = [] as IStringKeyMap[]
let NextMarker = ''
if (res.res.statusCode === 200) {
if (res.buckets) {
result.push(...res.buckets.map((item: OSS.Bucket) => formatItem(item)))
let isTruncated = res.isTruncated
NextMarker = res.nextMarker
while (isTruncated) {
const res = await this.ctx.listBuckets({
marker: NextMarker,
'max-keys': 1000
}) as IStringKeyMap
if (res.res.statusCode === 200) {
if (res.buckets) {
result.push(...res.buckets.map((item: OSS.Bucket) => formatItem(item)))
isTruncated = res.isTruncated
NextMarker = res.nextMarker
} else {
isTruncated = false
}
} else {
isTruncated = false
}
}
return result
} else {
return []
}
} else {
return []
}
}
/**
*
*/
async getBucketDomain (param: IStringKeyMap): Promise<any> {
const headers = {
Date: new Date().toUTCString()
}
const authorization = this.authorization('GET', `/${param.bucketName}/?cname`, headers, '', '')
const res = await axios({
url: `https://${param.bucketName}.${param.region}.aliyuncs.com/?cname`,
method: 'GET',
headers: {
...headers,
Authorization: authorization
}
})
if (res.status === 200) {
const parser = new XMLParser()
const result = parser.parse(res.data)
if (result.ListCnameResult && result.ListCnameResult.Cname) {
if (Array.isArray(result.ListCnameResult.Cname)) {
const cnameList = [] as string[]
result.ListCnameResult.Cname.forEach((item: IStringKeyMap) => {
item.Status === 'Enabled' && cnameList.push(item.Domain)
})
return cnameList
} else {
return result.ListCnameResult.Cname.Status === 'Enabled' ? [result.ListCnameResult.Cname.Domain] : []
}
} else {
return []
}
} else {
return []
}
}
/**
*
* @param {Object} configMap
* configMap = {
* BucketName: string,
* region: string,
* acl: string
* }
* @description
* acl: private | publicRead | publicReadWrite
*/
async createBucket (configMap: IStringKeyMap): Promise<boolean> {
const client = new OSS({
accessKeyId: this.accessKeyId,
accessKeySecret: this.accessKeySecret,
region: configMap.region,
secure: true
})
const aclTransMap: IStringKeyMap = {
private: 'private',
publicRead: 'public-read',
publicReadWrite: 'public-read-write'
}
const res = await client.putBucket(configMap.BucketName, {
acl: aclTransMap[configMap.acl],
storageClass: 'Standard',
dataRedundancyType: 'LRS',
timeout: this.timeOut
})
return res && res.res.status === 200
}
async getBucketListBackstage (configMap: IStringKeyMap): Promise<any> {
const window = windowManager.get(IWindowList.SETTING_WINDOW)!
const { bucketName: bucket, bucketConfig: { Location: region }, prefix, cancelToken } = configMap
const slicedPrefix = prefix.slice(1)
const urlPrefix = configMap.customUrl || `https://${bucket}.${region}.aliyuncs.com`
let marker
const cancelTask = [false]
ipcMain.on('cancelLoadingFileList', (_evt: IpcMainEvent, token: string) => {
if (token === cancelToken) {
cancelTask[0] = true
ipcMain.removeAllListeners('cancelLoadingFileList')
}
})
let res = {} as any
const result = {
fullList: <any>[],
success: false,
finished: false
}
const client = this.getNewCtx(region, bucket)
do {
res = await client.listV2({
prefix: slicedPrefix === '' ? undefined : slicedPrefix,
delimiter: '/',
'max-keys': '1000',
'continuation-token': marker
}, {
timeout: this.timeOut
})
if (res && res.res.statusCode === 200) {
res.prefixes && res.prefixes.forEach((item: string) => {
result.fullList.push(this.formatFolder(item, slicedPrefix))
})
res.objects && res.objects.forEach((item: OSS.ObjectMeta) => {
item.size !== 0 && result.fullList.push(this.formatFile(item, slicedPrefix, urlPrefix))
})
window.webContents.send('refreshFileTransferList', result)
} else {
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
}
marker = res.nextContinuationToken
} while (res.isTruncated === true && !cancelTask[0])
result.success = true
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
}
/**
*
* @param {Object} configMap
* configMap = {
* bucketName: string,
* bucketConfig: {
* Location: string
* },
* paging: boolean,
* prefix: string,
* marker: string,
* itemsPerPage: number,
* customUrl: string
* }
*/
async getBucketFileList (configMap: IStringKeyMap): Promise<any> {
const { bucketName: bucket, bucketConfig: { Location: region }, prefix, marker, itemsPerPage } = configMap
const slicedPrefix = prefix.slice(1)
const urlPrefix = configMap.customUrl || `https://${bucket}.${region}.aliyuncs.com`
let res = {} as any
const result = {
fullList: <any>[],
isTruncated: false,
nextMarker: '',
success: false
}
const client = this.getNewCtx(region, bucket)
res = await client.listV2({
prefix: slicedPrefix === '' ? undefined : slicedPrefix,
delimiter: '/',
'max-keys': itemsPerPage.toString(),
'continuation-token': marker
}, {
timeout: this.timeOut
}) as any
// prefixes can be null
// objects will be [] when no file
if (res && res.res.statusCode === 200) {
res.prefixes && res.prefixes.forEach((item: string) => {
result.fullList.push(this.formatFolder(item, slicedPrefix))
})
res.objects && res.objects.forEach((item: OSS.ObjectMeta) => {
item.size !== 0 && result.fullList.push(this.formatFile(item, slicedPrefix, urlPrefix))
})
result.isTruncated = res.isTruncated
result.nextMarker = res.nextContinuationToken === null ? '' : res.nextContinuationToken
result.success = true
return result
} else {
return result
}
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* oldKey: string,
* newKey: string
* }
*/
async renameBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, region, oldKey, newKey } = configMap
const client = this.getNewCtx(region, bucketName)
const res = await client.copy(
newKey,
oldKey
) as any
if (res && res.res.statusCode === 200) {
const res2 = await client.delete(oldKey) as any
return res2 && res2.res.statusCode === 204
} else {
return false
}
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string
* }
*/
async deleteBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, region, key } = configMap
const client = this.getNewCtx(region, bucketName)
const res = await client.delete(key) as any
return res && res.res.statusCode === 204
}
/**
*
* @param configMap
*/
async deleteBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, region, key } = configMap
const client = this.getNewCtx(region, bucketName)
let marker
let isTruncated
const allFileList = {
CommonPrefixes: [] as any[],
Contents: [] as any[]
}
let res = await client.listV2({
prefix: key,
delimiter: '/',
'max-keys': '1000'
}, {
timeout: 60000
}) as any
if (res && res.res.statusCode === 200) {
res.prefixes !== null && allFileList.CommonPrefixes.push(...res.prefixes)
res.objects.length > 0 && allFileList.Contents.push(...res.objects)
isTruncated = res.isTruncated
marker = res.nextContinuationToken
while (isTruncated) {
res = await client.listV2({
prefix: key,
delimiter: '/',
'max-keys': '1000',
'continuation-token': marker
}, {
timeout: this.timeOut
}) as any
if (res && res.res.statusCode === 200) {
res.prefixes !== null && allFileList.CommonPrefixes.push(...res.prefixes)
res.objects.length > 0 && allFileList.Contents.push(...res.objects)
isTruncated = res.isTruncated
marker = res.nextContinuationToken
} else {
return false
}
}
} else {
return false
}
if (allFileList.CommonPrefixes.length > 0) {
for (const item of allFileList.CommonPrefixes) {
res = await this.deleteBucketFolder({
bucketName,
region,
key: item
})
if (!res) {
return false
}
}
}
if (allFileList.Contents.length > 0) {
const cycle = Math.ceil(allFileList.Contents.length / 1000)
for (let i = 0; i < cycle; i++) {
res = await client.deleteMulti(
allFileList.Contents.slice(i * 1000, (i + 1) * 1000).map((item: any) => {
return item.name
})
) as any
if (!(res && res.res.statusCode === 200)) {
return false
}
}
}
return true
}
/**
* url
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string,
* expires: number,
* customUrl: string
* }
*/
async getPreSignedUrl (configMap: IStringKeyMap): Promise<string> {
const { bucketName, region, key, expires, customUrl } = configMap
const client = this.getNewCtx(region, bucketName)
const res = client.signatureUrl(key, {
expires: expires || 3600
})
return customUrl ? `${customUrl.replace(/\/$/, '')}/${key}${res.slice(res.indexOf('?'))}` : res
}
/**
*
* @param configMap
*/
async uploadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { fileArray } = configMap
// fileArray = [{
// bucketName: string,
// region: string,
// key: string,
// filePath: string
// fileSize: number
// }]
const instance = UpDownTaskQueue.getInstance()
fileArray.forEach((item: any) => {
item.key.startsWith('/') && (item.key = item.key.slice(1))
})
for (const item of fileArray) {
const { bucketName, region, key, filePath, fileName } = item
const client = this.getNewCtx(region, bucketName)
const id = `${bucketName}-${region}-${key}-${filePath}`
if (instance.getUploadTask(id)) {
continue
}
instance.addUploadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
sourceFilePath: filePath,
targetFilePath: key,
targetFileBucket: bucketName,
targetFileRegion: region
})
client.multipartUpload(
key,
filePath,
{
partSize: 1 * 1024 * 1024,
mime: getFileMimeType(fileName),
progress: (p: number) => {
const id = `${bucketName}-${region}-${key}-${filePath}`
instance.updateUploadTask({
id,
progress: Math.floor(p * 100),
status: uploadTaskSpecialStatus.uploading
})
},
timeout: 60000
}
).then((res: any) => {
const id = `${bucketName}-${region}-${key}-${filePath}`
if (res && res.res.statusCode === 200) {
instance.updateUploadTask({
id,
progress: 100,
status: uploadTaskSpecialStatus.uploaded,
response: JSON.stringify(res),
finishTime: new Date().toLocaleString()
})
} else {
instance.updateUploadTask({
id,
progress: 0,
status: commonTaskStatus.failed,
response: JSON.stringify(res),
finishTime: new Date().toLocaleString()
})
}
}).catch((err: any) => {
this.logger.error(formatError(err, { class: 'AliyunApi', method: 'uploadBucketFile' }))
const id = `${bucketName}-${region}-${key}-${filePath}`
instance.updateUploadTask({
id,
progress: 0,
status: commonTaskStatus.failed,
response: JSON.stringify(err),
finishTime: new Date().toLocaleString()
})
})
}
return true
}
/**
*
* @param configMap
*/
async createBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, region, key } = configMap
const client = this.getNewCtx(region, bucketName)
const res = await client.put(key, Buffer.from('')) as any
return res && res.res.statusCode === 200
}
/**
*
* @param configMap
*/
async downloadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { downloadPath, fileArray } = configMap
// fileArray = [{
// bucketName: string,
// region: string,
// key: string,
// fileName: string
// }]
const instance = UpDownTaskQueue.getInstance()
for (const item of fileArray) {
const { bucketName, region, key, fileName } = item
const client = this.getNewCtx(region, bucketName)
const savedFilePath = path.join(downloadPath, fileName)
const fileStream = fs.createWriteStream(savedFilePath)
const id = `${bucketName}-${region}-${key}`
if (instance.getDownloadTask(id)) {
continue
}
instance.addDownloadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
targetFilePath: savedFilePath
})
const preSignedUrl = client.signatureUrl(key, {
expires: 60 * 60 * 48
})
gotDownload(instance, preSignedUrl, fileStream, id, savedFilePath, this.logger)
}
return true
}
}
export default AliyunApi

View File

@ -0,0 +1,17 @@
import TcyunApi from './tcyun'
import AliyunApi from './aliyun'
import QiniuApi from './qiniu'
import UpyunApi from './upyun'
import SmmsApi from './smms'
import GithubApi from './github'
import ImgurApi from './imgur'
export default {
TcyunApi,
AliyunApi,
QiniuApi,
UpyunApi,
SmmsApi,
GithubApi,
ImgurApi
}

View File

@ -0,0 +1,436 @@
import got from 'got'
import { ManageLogger } from '../utils/logger'
import { isImage } from '~/renderer/manage/utils/common'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import { ipcMain, IpcMainEvent } from 'electron'
import { gotUpload, trimPath, gotDownload, getAgent, getOptions } from '../utils/common'
import UpDownTaskQueue,
{
commonTaskStatus
} from '../datastore/upDownTaskQueue'
import fs from 'fs-extra'
import path from 'path'
class GithubApi {
token: string
username: string
logger: ManageLogger
proxy: any
baseUrl = 'https://api.github.com'
commonHeaders : IStringKeyMap
constructor (token: string, username: string, proxy: string | undefined, logger: ManageLogger) {
this.logger = logger
this.token = token.startsWith('Bearer ') ? token : `Bearer ${token}`.trim()
this.username = username
this.proxy = proxy
this.commonHeaders = {
Authorization: this.token,
Accept: 'application/vnd.github+json'
}
}
formatFolder (item: any, slicedPrefix: string) {
let key = ''
if (slicedPrefix === '') {
key = `${item.path}/`
} else {
key = `${slicedPrefix}/${item.path}/`
}
return {
...item,
Key: key,
key,
fileSize: 0,
formatedTime: '',
fileName: item.path,
isDir: true,
checked: false,
isImage: false,
match: false
}
}
formatFile (item: any, slicedPrefix: string, branch: string, repo: string, cdnUrl: string | undefined) {
let rawUrl = ''
if (cdnUrl) {
const placeholder = ['{username}', '{repo}', '{branch}', '{path}']
if (placeholder.some(item => cdnUrl.includes(item))) {
rawUrl = cdnUrl.replace('{username}', this.username)
.replace('{repo}', repo)
.replace('{branch}', branch)
.replace('{path}', `${slicedPrefix}/${item.path}`)
} else {
rawUrl = `${cdnUrl}/${slicedPrefix}/${item.path}`
}
} else {
rawUrl = `https://raw.githubusercontent.com/${this.username}/${repo}/${branch}/${slicedPrefix}/${item.path}`
}
rawUrl = rawUrl.replace(/(?<!https?:)\/{2,}/g, '/')
let key = ''
if (slicedPrefix === '') {
key = item.path
} else {
key = `${slicedPrefix}/${item.path}`
}
const result = {
...item,
Key: key,
key,
fileSize: item.size,
formatedTime: '',
fileName: item.path,
isDir: false,
checked: false,
match: false,
isImage: isImage(item.path),
rawUrl
}
const temp = result.rawUrl
result.rawUrl = result.url
result.url = temp
return result
}
/**
* get repo list
*/
async getBucketList (): Promise<any> {
let initPage = 1
let res
const result = [] as any[]
do {
res = await got(
`${this.baseUrl}/user/repos`,
getOptions('GET', this.commonHeaders, { page: initPage, per_page: 100 }, 'json', undefined, undefined, this.proxy)
) as any
if (res.statusCode === 200) {
res.body.forEach((item: any) => {
result.push({
...item,
Name: item.name,
Location: item.id,
CreationDate: item.created_at
})
})
} else {
return []
}
initPage++
} while (res.body.length > 0)
return result
}
/**
* branch列表
*/
async getBucketDomain (param: IStringKeyMap): Promise<any> {
const { bucketName: repo } = param
let initPage = 1
let res
const result = [] as string[]
do {
res = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/branches`,
getOptions('GET', this.commonHeaders, { page: initPage, per_page: 100 }, 'json', undefined, undefined, this.proxy)
) as any
if (res.statusCode === 200) {
res.body.forEach((item: any) => result.push(item.name))
} else {
return []
}
initPage++
} while (res.body.length > 0)
return result
}
async getBucketListBackstage (configMap: IStringKeyMap): Promise<any> {
const window = windowManager.get(IWindowList.SETTING_WINDOW)!
const { bucketName: repo, customUrl: branch, prefix, cancelToken, cdnUrl } = configMap
const slicedPrefix = prefix.replace(/^\//, '').replace(/\/$/, '')
const cancelTask = [false]
ipcMain.on('cancelLoadingFileList', (_evt: IpcMainEvent, token: string) => {
if (token === cancelToken) {
cancelTask[0] = true
ipcMain.removeAllListeners('cancelLoadingFileList')
}
})
let res = {} as any
const result = {
fullList: <any>[],
success: false,
finished: false
}
res = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/git/trees/${branch}:${slicedPrefix}`,
getOptions('GET', this.commonHeaders, undefined, 'json', undefined, undefined, this.proxy)
)
if (res && res.statusCode === 200) {
res.body.tree.forEach((item: any) => {
if (item.type === 'tree') {
result.fullList.push(this.formatFolder(item, slicedPrefix))
} else {
result.fullList.push(this.formatFile(item, slicedPrefix, branch, repo, cdnUrl))
}
})
} else {
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
}
result.success = true
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string
* }
*/
async deleteBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName: repo, githubBranch: branch, key, DeleteHash: sha } = configMap
const body = {
message: 'deleted by PicList',
sha,
branch
}
const res = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/contents/${key}`,
getOptions('DELETE', this.commonHeaders, undefined, 'json', JSON.stringify(body), undefined, this.proxy)
)
return res.statusCode === 200
}
/**
* create a new tree to delete a folder
* @param configMap
*/
async deleteBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName: repo, githubBranch: branch, key } = configMap
const refRes = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/git/refs/heads/${branch}`,
getOptions('GET', this.commonHeaders, undefined, 'json', undefined, undefined, this.proxy)
) as any
if (refRes.statusCode !== 200) {
return false
}
const refSha = refRes.body.object.sha
const rootRes = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/branches/${branch}`,
getOptions('GET', undefined, undefined, 'json', undefined, undefined, this.proxy)
) as any
if (rootRes.statusCode !== 200) {
return false
}
const rootSha = rootRes.body.commit.commit.tree.sha
// TODO: if there are more than 10000 files in the folder, it will be truncated
// Rare cases, not considered for now
const treeRes = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/git/trees/${branch}:${key.replace(/^\//, '').replace(/\/$/, '')}`,
getOptions('GET', this.commonHeaders, {
recursive: true
}, 'json', undefined, undefined, this.proxy)
) as any
if (treeRes.statusCode !== 200) {
return false
}
const oldTree = treeRes.body.tree
const newTree = oldTree.filter((item: any) => item.type === 'blob')
.map((item:any) => ({
path: `${key.replace(/^\//, '').replace(/\/$/, '')}/${item.path}`,
mode: item.mode,
type: item.type,
sha: null
}))
const newTreeShaRes = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/git/trees`,
getOptions('POST', this.commonHeaders, undefined, 'json', JSON.stringify({
base_tree: rootSha,
tree: newTree
}), undefined, this.proxy)
) as any
if (newTreeShaRes.statusCode !== 201) {
return false
}
const newTreeSha = newTreeShaRes.body.sha
const commitRes = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/git/commits`,
getOptions('POST', this.commonHeaders, undefined, 'json', JSON.stringify({
message: 'deleted by PicList',
tree: newTreeSha,
parents: [refSha]
}), undefined, this.proxy)
) as any
if (commitRes.statusCode !== 201) {
return false
}
const commitSha = commitRes.body.sha
const updateRefRes = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/git/refs/heads/${branch}`,
getOptions('PATCH', this.commonHeaders, undefined, 'json', JSON.stringify({
sha: commitSha
}), undefined, this.proxy)
) as any
if (updateRefRes.statusCode !== 200) {
return false
}
return true
}
/**
* url
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string,
* expires: number,
* customUrl: string
* }
*/
async getPreSignedUrl (configMap: IStringKeyMap): Promise<string> {
const { bucketName: repo, customUrl: branch, key, rawUrl, githubPrivate: isPrivate } = configMap
if (!isPrivate) {
return rawUrl
}
const res = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/contents/${key}`,
getOptions('GET', this.commonHeaders, {
ref: branch
}, 'json', undefined, undefined, this.proxy)
) as any
if (res.statusCode === 200) {
return res.body.download_url
} else {
return ''
}
}
/**
*
* @param configMap
*/
async createBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName: repo, githubBranch: branch, key } = configMap
const newFileKey = `${trimPath(key)}/.gitkeep`
const base64Content = Buffer.from('created by PicList').toString('base64')
const body = {
message: `created a new folder named ${key} by PicList`,
content: base64Content,
branch
}
const res = await got(
`${this.baseUrl}/repos/${this.username}/${repo}/contents/${newFileKey}`,
getOptions('PUT', this.commonHeaders, undefined, 'json', JSON.stringify(body), undefined, this.proxy)
)
return res.statusCode === 201
}
/**
*
* @param configMap
*/
async uploadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
fileArray.forEach((item: any) => {
item.key.startsWith('/') && (item.key = item.key.slice(1))
})
const filteredFileArray = fileArray.filter((item: any) => item.fileSize < 100 * 1024 * 1024)
for (const item of filteredFileArray) {
const { bucketName: repo, region, githubBranch: branch, key, filePath, fileName } = item
const id = `${repo}-${branch}-${key}-${filePath}`
if (instance.getUploadTask(id)) {
continue
}
const trimKey = trimPath(key)
const base64Content = fs.readFileSync(filePath, { encoding: 'base64' })
instance.addUploadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
sourceFilePath: filePath,
targetFilePath: key,
targetFileBucket: repo,
targetFileRegion: region
})
gotUpload(
instance,
`${this.baseUrl}/repos/${this.username}/${repo}/contents/${trimKey}`,
'PUT',
JSON.stringify({
message: 'uploaded by PicList',
branch,
content: base64Content
}),
this.commonHeaders,
id,
this.logger,
30000,
false,
getAgent(this.proxy)
)
}
return true
}
/**
*
* @param configMap
*/
async downloadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { downloadPath, fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
for (const item of fileArray) {
const { bucketName: repo, customUrl: branch, key, fileName, githubPrivate, githubUrl } = item
const id = `${repo}-${branch}-${key}-${fileName}`
const savedFilePath = path.join(downloadPath, fileName)
const fileStream = fs.createWriteStream(savedFilePath)
if (instance.getDownloadTask(id)) {
continue
}
instance.addDownloadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
targetFilePath: savedFilePath
})
let downloadUrl
if (githubPrivate) {
const preSignedUrl = await this.getPreSignedUrl({
bucketName: repo,
customUrl: branch,
key,
rawUrl: githubUrl,
githubPrivate
})
downloadUrl = preSignedUrl
} else {
downloadUrl = githubUrl
}
gotDownload(
instance,
downloadUrl,
fileStream,
id,
savedFilePath,
this.logger,
undefined,
getAgent(this.proxy)
)
}
return true
}
}
export default GithubApi

View File

@ -0,0 +1,262 @@
import got from 'got'
import ManageLogger from '../utils/logger'
import { getAgent, getOptions, gotDownload, gotUpload, getFileMimeType } from '../utils/common'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import { ipcMain, IpcMainEvent } from 'electron'
import { isImage } from '~/renderer/manage/utils/common'
import path from 'path'
import UpDownTaskQueue,
{
commonTaskStatus
} from '../datastore/upDownTaskQueue'
import FormData from 'form-data'
import fs from 'fs-extra'
class ImgurApi {
userName: string
accessToken: string
proxy: any
logger: ManageLogger
tokenHeaders: any
idHeaders: any
baseUrl = 'https://api.imgur.com/3'
constructor (userName: string, accessToken: string, proxy: any, logger: ManageLogger) {
this.userName = userName
this.accessToken = accessToken.startsWith('Bearer ') ? accessToken : `Bearer ${accessToken}`
this.proxy = proxy
this.logger = logger
this.tokenHeaders = {
Authorization: this.accessToken
}
}
formatFile (item: any) {
return {
...item,
Key: path.basename(item.link),
key: path.basename(item.link),
fileName: `${item.name}${path.extname(item.link)}`,
formatedTime: new Date(item.datetime * 1000).toLocaleString(),
fileSize: item.size,
isDir: false,
checked: false,
match: false,
isImage: isImage(path.basename(item.link)),
url: item.link,
sha: item.deletehash
}
}
/**
* get repo list
*/
async getBucketList (): Promise<any> {
let initPage = 0
let res
const result = [] as any[]
do {
res = await got(
`${this.baseUrl}/account/${this.userName}/albums/ids/${initPage}`,
getOptions('GET', this.tokenHeaders, undefined, 'json', undefined, undefined, this.proxy)
) as any
if (res.statusCode === 200 && res.body.success) {
res.body.data.forEach((item: any) => {
result.push(item)
})
} else {
return []
}
initPage++
} while (res.body.data.length > 0)
const finalResult = [] as any[]
for (let i = 0; i < result.length; i++) {
const item = result[i]
const res = await got(
`${this.baseUrl}/account/${this.userName}/album/${item}`,
getOptions('GET', this.tokenHeaders, undefined, 'json', undefined, undefined, this.proxy)
) as any
if (res.statusCode === 200 && res.body.success) {
finalResult.push({
...res.body.data,
Name: res.body.data.title,
Location: res.body.data.id,
CreationDate: res.body.data.datetime
})
} else {
return []
}
}
finalResult.push({
Name: '全部',
Location: 'unclassified',
CreationDate: new Date().getTime()
})
return finalResult
}
async getBucketListBackstage (configMap: IStringKeyMap): Promise<any> {
const window = windowManager.get(IWindowList.SETTING_WINDOW)!
const { bucketConfig: { Location: albumHash }, cancelToken } = configMap
const cancelTask = [false]
ipcMain.on('cancelLoadingFileList', (_evt: IpcMainEvent, token: string) => {
if (token === cancelToken) {
cancelTask[0] = true
ipcMain.removeAllListeners('cancelLoadingFileList')
}
})
let res = {} as any
const result = {
fullList: <any>[],
success: false,
finished: false
}
if (albumHash !== 'unclassified') {
res = await got(
`${this.baseUrl}/account/${this.userName}/album/${albumHash}`,
getOptions('GET', this.tokenHeaders, undefined, 'json', undefined, undefined, this.proxy)
) as any
if (res.statusCode === 200 && res.body.success) {
res.body.data.images.forEach((item: any) => {
result.fullList.push(this.formatFile(item))
})
} else {
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
}
} else {
let initPage = 0
do {
res = await got(
`${this.baseUrl}/account/${this.userName}/images/${initPage}`,
getOptions('GET', this.tokenHeaders, undefined, 'json', undefined, undefined, this.proxy)
) as any
if (res.statusCode === 200 && res.body.success) {
res.body.data.forEach((item: any) => {
result.fullList.push(this.formatFile(item))
})
} else {
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
}
initPage++
} while (res.body.data.length > 0)
}
result.success = true
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
}
async deleteBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { DeleteHash: deleteHash } = configMap
const res = await got(
`${this.baseUrl}/account/${this.userName}/image/${deleteHash}`,
getOptions('DELETE', this.tokenHeaders, undefined, 'json', undefined, undefined, this.proxy)
) as any
return res.statusCode === 200 && res.body.success
}
/**
*
* @param configMap
*/
async uploadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
fileArray.forEach((item: any) => {
item.key = item.key.replace(/^\/+/, '')
})
for (const item of fileArray) {
const { bucketName, region: albumHash, key, fileName, filePath, fileSize } = item
const id = `${albumHash}-${key}-${filePath}`
if (instance.getUploadTask(id) || fileSize > 1024 * 1024 * 200) {
continue
}
instance.addUploadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
sourceFilePath: filePath,
targetFilePath: key,
targetFileBucket: bucketName,
targetFileRegion: albumHash
})
const form = new FormData()
form.append('type', 'file')
form.append('description', 'uploaded by PicList')
form.append('name', path.basename(key, path.extname(key)))
if (fileSize > 1024 * 1024 * 10) {
form.append('video', fs.createReadStream(filePath), {
filename: path.basename(key),
contentType: getFileMimeType(fileName)
})
} else {
form.append('image', fs.createReadStream(filePath), {
filename: path.basename(key),
contentType: getFileMimeType(fileName)
})
}
albumHash !== 'unclassified' && form.append('album', albumHash)
const headers = form.getHeaders()
headers.Authorization = this.accessToken
gotUpload(
instance,
`${this.baseUrl}/image`,
'POST',
form,
headers,
id,
this.logger,
30000,
false,
getAgent(this.proxy)
)
}
return true
}
/**
*
* @param configMap
*/
async downloadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { downloadPath, fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
for (const item of fileArray) {
const { bucketName, region, key, fileName, githubUrl: url } = item
const id = `${bucketName}-${region}-${key}-${fileName}`
const savedFilePath = path.join(downloadPath, fileName)
const fileStream = fs.createWriteStream(savedFilePath)
if (instance.getDownloadTask(id)) {
continue
}
instance.addDownloadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
targetFilePath: savedFilePath
})
gotDownload(
instance,
url,
fileStream,
id,
savedFilePath,
this.logger,
undefined,
getAgent(this.proxy)
)
}
return true
}
}
export default ImgurApi

View File

@ -0,0 +1,655 @@
import axios from 'axios'
import { hmacSha1Base64, getFileMimeType, gotDownload, formatError } from '../utils/common'
import fs from 'fs-extra'
import qiniu from 'qiniu/index'
import path from 'path'
import { isImage } from '~/renderer/manage/utils/common'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import { ipcMain, IpcMainEvent } from 'electron'
import UpDownTaskQueue,
{
uploadTaskSpecialStatus,
commonTaskStatus
} from '../datastore/upDownTaskQueue'
import { ManageLogger } from '../utils/logger'
class QiniuApi {
mac: qiniu.auth.digest.Mac
accessKey: string
secretKey: string
commonType = 'application/x-www-form-urlencoded'
host = 'uc.qiniuapi.com'
logger: ManageLogger
hostList = {
getBucketList: 'https://uc.qiniuapi.com/buckets',
getBucketDomain: 'https://uc.qiniuapi.com/v2/domains'
}
constructor (accessKey: string, secretKey: string, logger: ManageLogger) {
this.mac = new qiniu.auth.digest.Mac(accessKey, secretKey)
this.accessKey = accessKey
this.secretKey = secretKey
this.logger = logger
}
formatFolder (item: string, slicedPrefix: string) {
return {
Key: item,
key: item,
fileSize: 0,
fileName: item.replace(slicedPrefix, '').replace('/', ''),
isDir: true,
checked: false,
isImage: false,
match: false
}
}
formatFile (item: any, slicedPrefix: string, urlPrefix: string) {
return {
...item,
fileName: item.key.replace(slicedPrefix, ''),
url: `${urlPrefix}/${item.key}`,
fileSize: item.fsize,
formatedTime: new Date(parseInt(item.putTime.toString().slice(0, -7), 10)).toLocaleString(),
isDir: false,
checked: false,
match: false,
isImage: isImage(item.key.replace(slicedPrefix, ''))
}
}
authorization (
method: string,
urlPath: string,
host: string,
body: string,
query: string,
contentType: string,
xQiniuHeaders?: IStringKeyMap
) {
let signStr = `${method.toUpperCase()} ${urlPath}`
query && (signStr += `?${query}`)
signStr += `\nHost: ${host}`
contentType && (signStr += `\nContent-Type: ${contentType}`)
let xQiniuHeaderStr = ''
if (xQiniuHeaders) {
const xQiniuHeaderKeys = Object.keys(xQiniuHeaders).sort()
xQiniuHeaderKeys.forEach((key) => {
xQiniuHeaderStr += `\n${key}:${xQiniuHeaders[key]}`
})
signStr += xQiniuHeaderStr
}
signStr += '\n\n'
if (contentType !== 'application/octet-stream' && body) {
signStr += body
}
return `Qiniu ${this.accessKey}:${hmacSha1Base64(this.secretKey, signStr).replace(/\+/g, '-').replace(/\//g, '_')}`
}
/**
*
*/
async getBucketList (): Promise<any> {
const host = this.hostList.getBucketList
const authorization = qiniu.util.generateAccessToken(this.mac, host, undefined)
const res = await axios.get(host, {
headers: {
Authorization: authorization,
'Content-Type': this.commonType
},
timeout: 10000
})
if (res && res.status === 200) {
if (res.data && res.data.length) {
const result = [] as any[]
for (let i = 0; i < res.data.length; i++) {
const info = await this.getBucketInfo({ bucketName: res.data[i] })
if (!info.success) {
return []
}
result.push({
Name: res.data[i],
Location: info.zone,
CreationDate: new Date().toISOString(),
Private: info.private
})
}
return result
} else {
return []
}
} else {
return []
}
}
/**
*
*/
async getBucketInfo (param: IStringKeyMap): Promise<any> {
const { bucketName } = param
const urlPath = `/v2/bucketInfo?bucket=${bucketName}&fs=true`
const authorization = this.authorization('POST', urlPath, this.host, '', '', 'application/json')
const res = await axios({
method: 'post',
url: `https://${this.host}/v2/bucketInfo`,
params: {
bucket: bucketName,
fs: true
},
headers: {
Authorization: authorization,
'Content-Type': 'application/json',
Host: this.host
},
timeout: 10000
})
if (res && res.status === 200) {
return {
success: true,
private: res.data.private,
zone: res.data.zone
}
} else {
return {
success: false
}
}
}
/**
*
*/
async getBucketDomain (param: IStringKeyMap): Promise<any> {
const { bucketName } = param
const host = this.hostList.getBucketDomain
const authorization = qiniu.util.generateAccessToken(this.mac, `${host}?tbl=${bucketName}`, undefined)
const res = await axios.get(host, {
params: {
tbl: bucketName
},
headers: {
Authorization: authorization,
'Content-Type': this.commonType
},
timeout: 10000
})
if (res && res.status === 200) {
return res.data && res.data.length ? res.data : []
} else {
return []
}
}
/**
*
*/
async setBucketAclPolicy (param: IStringKeyMap): Promise<boolean> {
// 0: 公开访问 1: 私有访问
const { bucketName } = param
let { isPrivate } = param
isPrivate = isPrivate ? 1 : 0
const urlPath = `/private?bucket=${bucketName}&private=${isPrivate}`
const authorization = this.authorization('POST', urlPath, this.host, '', '', this.commonType)
const res = await axios({
method: 'post',
url: `https://${this.host}/private`,
params: {
bucket: bucketName,
private: isPrivate
},
headers: {
Authorization: authorization,
'Content-Type': this.commonType,
Host: this.host
},
timeout: 10000
})
return res && res.status === 200
}
/**
*
* @param {Object} configMap
* configMap = {
* BucketName: string,
* region: string,
* acl: boolean // 是否公开访问
* }
*/
async createBucket (configMap: IStringKeyMap): Promise<boolean> {
const { BucketName, region } = configMap
const { acl } = configMap
const urlPath = `/mkbucketv3/${BucketName}/region/${region}`
const authorization = this.authorization('POST', urlPath, this.host, '', '', 'application/json')
const res = await axios({
method: 'post',
url: `https://${this.host}${urlPath}`,
headers: {
Authorization: authorization,
'Content-Type': 'application/json',
Host: this.host
},
timeout: 10000
})
if (res && res.status === 200) {
const changeAclRes = await this.setBucketAclPolicy({
bucketName: BucketName,
isPrivate: !acl
})
return changeAclRes
} else {
return false
}
}
async getBucketListBackstage (configMap: IStringKeyMap): Promise<any> {
const window = windowManager.get(IWindowList.SETTING_WINDOW)!
const { bucketName: bucket, prefix, cancelToken, customUrl: urlPrefix } = configMap
let marker = undefined as any
const slicedPrefix = prefix.slice(1)
const cancelTask = [false]
ipcMain.on('cancelLoadingFileList', (_evt: IpcMainEvent, token: string) => {
if (token === cancelToken) {
cancelTask[0] = true
ipcMain.removeAllListeners('cancelLoadingFileList')
}
})
let res = {} as any
const result = {
fullList: <any>[],
success: false,
finished: false
}
const config = new qiniu.conf.Config()
const bucketManager = new qiniu.rs.BucketManager(this.mac, config)
do {
res = await new Promise((resolve, reject) => {
bucketManager.listPrefix(bucket, {
prefix: slicedPrefix === '' ? undefined : slicedPrefix,
delimiter: '/',
marker,
limit: 1000
}, (err: any, respBody: any, respInfo: any) => {
if (err) {
reject(err)
} else {
resolve({
respBody,
respInfo
})
}
})
})
if (res && res.respInfo.statusCode === 200) {
res.respBody && res.respBody.commonPrefixes && res.respBody.commonPrefixes.forEach((item: any) => {
result.fullList.push(this.formatFolder(item, slicedPrefix))
})
res.respBody && res.respBody.items && res.respBody.items.forEach((item: any) => {
item.fsize !== 0 && result.fullList.push(this.formatFile(item, slicedPrefix, urlPrefix))
})
window.webContents.send('refreshFileTransferList', result)
} else {
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
}
marker = res.respBody.marker
} while (res.respBody && res.respBody.marker && !cancelTask[0])
result.success = true
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
}
/**
*
* @param {Object} configMap
* configMap = {
* bucketName: string,
* bucketConfig: {
* Location: string
* },
* paging: boolean,
* prefix: string,
* marker: string,
* itemsPerPage: number,
* customUrl: string
* }
*/
async getBucketFileList (configMap: IStringKeyMap): Promise<any> {
const { bucketName: bucket, prefix, marker, itemsPerPage, customUrl: urlPrefix } = configMap
const slicedPrefix = prefix.slice(1)
const config = new qiniu.conf.Config()
const bucketManager = new qiniu.rs.BucketManager(this.mac, config)
let res = {} as any
const result = {
fullList: <any>[],
isTruncated: false,
nextMarker: '',
success: false
}
res = await new Promise((resolve, reject) => {
bucketManager.listPrefix(bucket, {
limit: itemsPerPage,
prefix: slicedPrefix === '' ? undefined : slicedPrefix,
marker,
delimiter: '/'
}, (err, respBody, respInfo) => {
if (err) {
reject(err)
} else {
resolve({
respBody,
respInfo
})
}
})
})
if (res && res.respInfo.statusCode === 200) {
if (res.respBody && res.respBody.commonPrefixes) {
res.respBody.commonPrefixes.forEach((item: string) => {
result.fullList.push(this.formatFolder(item, slicedPrefix))
})
}
if (res.respBody && res.respBody.items) {
res.respBody.items.forEach((item: any) => {
item.fsize !== 0 && result.fullList.push(this.formatFile(item, slicedPrefix, urlPrefix))
})
}
result.isTruncated = !!(res.respBody && res.respBody.marker)
result.nextMarker = res.respBody && res.respBody.marker ? res.respBody.marker : ''
result.success = true
return result
} else {
return result
}
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string
* }
*/
async deleteBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, key } = configMap
const config = new qiniu.conf.Config()
const bucketManager = new qiniu.rs.BucketManager(this.mac, config)
const res = await new Promise((resolve, reject) => {
bucketManager.delete(bucketName, key, (err, respBody, respInfo) => {
if (err) {
reject(err)
} else {
resolve({
respBody,
respInfo
})
}
})
}) as any
if (res && res.respInfo.statusCode === 200) {
return true
} else {
return false
}
}
/**
*
* @param configMap
*/
async deleteBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, key } = configMap
const config = new qiniu.conf.Config()
const bucketManager = new qiniu.rs.BucketManager(this.mac, config)
let marker = ''
let isTruncated = true
const allFileList = {
Contents: [] as any[]
}
do {
const res = await new Promise((resolve, reject) => {
bucketManager.listPrefix(bucketName, {
prefix: key,
marker,
limit: 1000
}, (err, respBody, respInfo) => {
if (err) {
reject(err)
} else {
resolve({
respBody,
respInfo
})
}
})
}) as any
if (res && res.respInfo.statusCode === 200) {
if (res.respBody && res.respBody.items) {
allFileList.Contents = allFileList.Contents.concat(res.respBody.items)
}
isTruncated = !!(res.respBody && res.respBody.marker)
marker = res.respBody && res.respBody.marker ? res.respBody.marker : ''
} else {
return false
}
} while (isTruncated)
const cycleNum = Math.ceil(allFileList.Contents.length / 1000)
for (let i = 0; i < cycleNum; i++) {
const deleteOps = allFileList.Contents.slice(i * 1000, (i + 1) * 1000).map((item: any) => {
return qiniu.rs.deleteOp(bucketName, item.key)
})
const res = await new Promise((resolve, reject) => {
bucketManager.batch(deleteOps, (err, respBody, respInfo) => {
if (err) {
reject(err)
} else {
resolve({
respBody,
respInfo
})
}
})
}) as any
if (!(res && res.respInfo.statusCode === 200)) {
return false
}
}
return true
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* oldKey: string,
* newKey: string
* }
*/
async renameBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, oldKey, newKey } = configMap
const config = new qiniu.conf.Config()
const bucketManager = new qiniu.rs.BucketManager(this.mac, config)
const res = await new Promise((resolve, reject) => {
bucketManager.move(bucketName, oldKey, bucketName, newKey, {
force: true
}, (err, respBody, respInfo) => {
if (err) {
reject(err)
} else {
resolve({
respBody,
respInfo
})
}
})
}) as any
return res && res.respInfo.statusCode === 200
}
/**
* url
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string,
* expires: number,
* customUrl: string
* }
*/
async getPreSignedUrl (configMap: IStringKeyMap): Promise<string> {
const { key, expires, customUrl } = configMap
const config = new qiniu.conf.Config()
const bucketManager = new qiniu.rs.BucketManager(this.mac, config)
const urlPrefix = customUrl
const expiration = parseInt(Date.now() / 1000 + expires)
const res = bucketManager.privateDownloadUrl(urlPrefix, key, expiration)
return res
}
/**
*
* @param configMap
*/
async uploadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
fileArray.forEach((item: any) => {
item.key = item.key.replace(/^\/+/, '')
})
for (const item of fileArray) {
const { bucketName, region, key, filePath, fileName } = item
instance.addUploadTask({
id: `${bucketName}-${region}-${key}-${filePath}`,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
sourceFilePath: filePath,
targetFilePath: key,
targetFileBucket: bucketName,
targetFileRegion: region
})
const config = new qiniu.conf.Config()
const resumeUploader = new qiniu.resume_up.ResumeUploader(config)
const putExtra = new qiniu.resume_up.PutExtra()
const uploadToken = new qiniu.rs.PutPolicy({
scope: `${bucketName}:${key}`,
expires: 36000
}).uploadToken(this.mac)
putExtra.fname = key
putExtra.params = {}
putExtra.mimeType = getFileMimeType(fileName)
putExtra.version = 'v2'
putExtra.partSize = 4 * 1024 * 1024
putExtra.progressCallback = (uploadBytes, totalBytes) => {
const progress = Math.floor(uploadBytes / totalBytes * 100)
instance.updateUploadTask({
id: `${bucketName}-${region}-${key}-${filePath}`,
progress,
status: uploadTaskSpecialStatus.uploading
})
}
resumeUploader.putFile(uploadToken, key, filePath, putExtra, (respErr, respBody, respInfo) => {
if (respErr) {
this.logger.error(formatError(respErr, { class: 'Qiniu', method: 'uploadBucketFile' }))
instance.updateUploadTask({
id: `${bucketName}-${region}-${key}-${filePath}`,
progress: 0,
status: commonTaskStatus.failed,
finishTime: new Date().toLocaleString()
})
return
}
if (respInfo.statusCode === 200) {
instance.updateUploadTask({
id: `${bucketName}-${region}-${key}-${filePath}`,
progress: 100,
status: uploadTaskSpecialStatus.uploaded,
response: JSON.stringify(respBody),
finishTime: new Date().toLocaleString()
})
} else {
instance.updateUploadTask({
id: `${bucketName}-${region}-${key}-${filePath}`,
progress: 0,
status: commonTaskStatus.failed,
finishTime: new Date().toLocaleString()
})
}
})
}
return true
}
/**
*
* @param configMap
*/
async createBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, key } = configMap
const putPolicy = new qiniu.rs.PutPolicy({
scope: `${bucketName}:${key}`
})
const uploadToken = putPolicy.uploadToken(this.mac)
const FormUploader = new qiniu.form_up.FormUploader()
const putExtra = new qiniu.form_up.PutExtra()
const res = await new Promise((resolve, reject) => {
FormUploader.put(uploadToken, key, '', putExtra, (err, respBody, respInfo) => {
if (err) {
reject(err)
} else {
resolve({
respBody,
respInfo
})
}
})
}) as any
if (res && res.respInfo.statusCode === 200) {
return true
} else {
return false
}
}
/**
*
* @param configMap
*/
async downloadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { downloadPath, fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
for (const item of fileArray) {
const { bucketName, region, key, fileName, customUrl } = item
const savedFilePath = path.join(downloadPath, fileName)
const fileStream = fs.createWriteStream(savedFilePath)
const id = `${bucketName}-${region}-${key}`
if (instance.getDownloadTask(id)) {
continue
}
instance.addDownloadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
targetFilePath: savedFilePath
})
const preSignedUrl = await this.getPreSignedUrl({ key, expires: 36000, customUrl })
gotDownload(instance, preSignedUrl, fileStream, id, savedFilePath, this.logger)
}
return true
}
}
export default QiniuApi

View File

@ -0,0 +1,248 @@
import { isImage } from '@/manage/utils/common'
import axios, { AxiosInstance } from 'axios'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import { ipcMain, IpcMainEvent } from 'electron'
import FormData from 'form-data'
import fs from 'fs-extra'
import { getFileMimeType, gotUpload, gotDownload } from '../utils/common'
import path from 'path'
import UpDownTaskQueue, { commonTaskStatus } from '../datastore/upDownTaskQueue'
import { ManageLogger } from '../utils/logger'
class SmmsApi {
baseUrl = 'https://smms.app/api/v2'
token: string
axiosInstance: AxiosInstance
logger: ManageLogger
constructor (token: string, logger: ManageLogger) {
this.token = token
this.axiosInstance = axios.create({
baseURL: this.baseUrl,
timeout: 30000,
headers: {
Authorization: this.token
}
})
this.logger = logger
}
formatFile (item: any) {
return {
...item,
Key: item.path,
key: item.path,
fileName: item.filename,
fileSize: item.size,
formatedTime: new Date(item.created_at).toLocaleString(),
isDir: false,
checked: false,
match: false,
isImage: isImage(item.storename),
sha: item.hash,
downloadUrl: item.url
}
}
async getBucketListBackstage (configMap: IStringKeyMap): Promise<any> {
const window = windowManager.get(IWindowList.SETTING_WINDOW)!
const { cancelToken } = configMap
let marker = 1
const cancelTask = [false]
ipcMain.on('cancelLoadingFileList', (_evt: IpcMainEvent, token: string) => {
if (token === cancelToken) {
cancelTask[0] = true
ipcMain.removeAllListeners('cancelLoadingFileList')
}
})
let res = {} as any
const result = {
fullList: <any>[],
success: false,
finished: false
}
do {
res = await this.axiosInstance(
'/upload_history',
{
method: 'GET',
headers: {
'Content-Type': 'multipart/form-data'
},
params: {
page: marker
}
})
if (res && res.status === 200 && res.data && res.data.success) {
if (res.data.Count === 0) {
result.success = true
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
} else {
res.data.data.forEach((item: any) => {
result.fullList.push(this.formatFile(item))
})
window.webContents.send('refreshFileTransferList', result)
}
} else {
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
}
marker++
} while (!cancelTask[0] && res && res.status === 200 && res.data && res.data.success && res.data.CurrentPage < res.data.TotalPages)
result.success = true
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
}
/**
*
* @param {Object} configMap
* configMap = {
* bucketName: string,
* bucketConfig: {
* Location: string
* },
* paging: boolean,
* prefix: string,
* marker: string,
* itemsPerPage: number,
* customUrl: string
* }
*/
async getBucketFileList (configMap: IStringKeyMap): Promise<any> {
const { currentPage } = configMap
let res = {} as any
const result = {
fullList: <any>[],
isTruncated: false,
nextMarker: '',
success: false
}
res = await this.axiosInstance(
'/upload_history',
{
method: 'GET',
headers: {
'Content-Type': 'multipart/form-data'
},
params: {
page: currentPage
}
}
)
if (res && res.status === 200 && res.data && res.data.success) {
if (res.data.Count === 0) {
result.success = true
return result
}
res.data.data.forEach((item: any) => {
result.fullList.push(this.formatFile(item))
})
result.isTruncated = res.data.CurrentPage < res.data.TotalPages
result.nextMarker = res.data.CurrentPage + 1
result.success = true
return result
} else {
return result
}
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string,
* DeleteHash: string
* }
*/
async deleteBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { DeleteHash } = configMap
const params = {
hash: DeleteHash,
format: 'json'
}
const res = await this.axiosInstance(
`/delete/${DeleteHash}`,
{
method: 'GET',
params
}
)
return res && res.status === 200 && res.data && res.data.success
}
/**
*
* @param configMap
*/
async uploadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
for (const item of fileArray) {
const { bucketName, region, key, filePath, fileName } = item
const id = `${bucketName}-${region}-${key}-${filePath}`
if (instance.getUploadTask(id)) {
continue
}
instance.addUploadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
sourceFilePath: filePath,
targetFilePath: key,
targetFileBucket: bucketName,
targetFileRegion: region
})
const form = new FormData()
form.append('format', 'json')
form.append('smfile', fs.createReadStream(filePath), {
filename: path.basename(fileName),
contentType: getFileMimeType(fileName)
})
const headers = form.getHeaders()
headers.Authorization = this.token
const url = `${this.baseUrl}/upload`
gotUpload(instance, url, 'POST', form, headers, id, this.logger)
}
return true
}
/**
*
* @param configMap
*/
async downloadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { downloadPath, fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
for (const item of fileArray) {
const { bucketName, region, key, fileName, downloadUrl: preSignedUrl } = item
const savedFilePath = path.join(downloadPath, fileName)
const fileStream = fs.createWriteStream(savedFilePath)
const id = `${bucketName}-${region}-${key}`
if (instance.getDownloadTask(id)) {
continue
}
instance.addDownloadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
targetFilePath: savedFilePath
})
gotDownload(instance, preSignedUrl, fileStream, id, savedFilePath, this.logger)
}
return true
}
}
export default SmmsApi

View File

@ -0,0 +1,523 @@
import COS from 'cos-nodejs-sdk-v5'
import fs from 'fs-extra'
import path from 'path'
import { isImage } from '~/renderer/manage/utils/common'
import { handleUrlEncode } from '~/universal/utils/common'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import { ipcMain, IpcMainEvent } from 'electron'
import { formatError, getFileMimeType } from '../utils/common'
import UpDownTaskQueue,
{
uploadTaskSpecialStatus,
commonTaskStatus,
downloadTaskSpecialStatus
} from '../datastore/upDownTaskQueue'
import { ManageLogger } from '../utils/logger'
class TcyunApi {
ctx: COS
logger: ManageLogger
constructor (secretId: string, secretKey: string, logger: ManageLogger) {
this.ctx = new COS({
SecretId: secretId,
SecretKey: secretKey
})
this.logger = logger
}
formatFolder (item: {Prefix: string}, slicedPrefix: string): any {
return {
...item,
key: item.Prefix,
fileSize: 0,
formatedTime: '',
fileName: item.Prefix.replace(slicedPrefix, '').replace('/', ''),
isDir: true,
checked: false,
isImage: false,
match: false
}
}
formatFile (item: COS.CosObject, slicedPrefix: string, urlPrefix: string): any {
return {
...item,
key: item.Key,
fileName: item.Key.replace(slicedPrefix, ''),
fileSize: parseInt(item.Size),
formatedTime: new Date(item.LastModified).toLocaleString(),
isDir: false,
checked: false,
isImage: isImage(item.Key),
match: false,
url: `${urlPrefix}/${item.Key}`
}
}
/**
*
*/
async getBucketList (): Promise<any> {
const res = await this.ctx.getService({})
return res && res.Buckets ? res.Buckets : []
}
/**
*
*/
async getBucketDomain (param: IStringKeyMap): Promise<any> {
const { bucketName, region } = param
const res = await this.ctx.getBucketDomain({
Bucket: bucketName,
Region: region
})
const result = [] as string[]
if (res && res.statusCode === 200) {
if (res.DomainRule && res.DomainRule.length > 0) {
res.DomainRule.forEach((item: any) => {
if (item.Status === 'ENABLED') {
result.push(item.Name)
}
})
return result
} else {
return []
}
} else {
return []
}
}
/**
*
* @param {Object} configMap
* configMap = {
* BucketName: string,
* region: string,
* acl: string
* }
* @description
* acl: private | publicRead | publicReadWrite
*/
async createBucket (configMap: IStringKeyMap): Promise < boolean > {
const aclTransMap: IStringKeyMap = {
private: 'private',
publicRead: 'public-read',
publicReadWrite: 'public-read-write'
}
const res = await this.ctx.putBucket({
ACL: aclTransMap[configMap.acl],
Bucket: configMap.BucketName,
Region: configMap.region
})
return res && res.statusCode === 200
}
async getBucketListBackstage (configMap: IStringKeyMap): Promise < any > {
const window = windowManager.get(IWindowList.SETTING_WINDOW)!
const bucket = configMap.bucketName
const region = configMap.bucketConfig.Location
const prefix = configMap.prefix as string
const slicedPrefix = prefix.slice(1, prefix.length)
const urlPrefix = configMap.customUrl || `https://${bucket}.cos.${region}.myqcloud.com`
let marker
const cancelToken = configMap.cancelToken as string
const cancelTask = [false]
ipcMain.on('cancelLoadingFileList', (_evt: IpcMainEvent, token: string) => {
if (token === cancelToken) {
cancelTask[0] = true
ipcMain.removeAllListeners('cancelLoadingFileList')
}
})
let res = {} as COS.GetBucketResult
const result = {
fullList: <any>[],
success: false,
finished: false
}
do {
res = await this.ctx.getBucket({
Bucket: bucket,
Region: region,
Prefix: slicedPrefix === '' ? undefined : slicedPrefix,
Delimiter: '/',
Marker: marker
})
if (res && res.statusCode === 200) {
res.CommonPrefixes.forEach((item: { Prefix: string}) =>
result.fullList.push(this.formatFolder(item, slicedPrefix)))
res.Contents.forEach((item: COS.CosObject) =>
parseInt(item.Size) !== 0 && result.fullList.push(this.formatFile(item, slicedPrefix, urlPrefix)))
window.webContents.send('refreshFileTransferList', result)
} else {
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
}
marker = res.NextMarker
} while (res.IsTruncated === 'true' && !cancelTask[0])
result.success = true
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
}
/**
*
* @param {Object} configMap
* configMap = {
* bucketName: string,
* bucketConfig: {
* Location: string
* },
* paging: boolean,
* prefix: string,
* marker: string,
* itemsPerPage: number,
* customUrl: string
* }
*/
async getBucketFileList (configMap: IStringKeyMap): Promise<any> {
const bucket = configMap.bucketName
const region = configMap.bucketConfig.Location
const prefix = configMap.prefix as string
const slicedPrefix = prefix.slice(1)
const urlPrefix = configMap.customUrl || `https://${bucket}.cos.${region}.myqcloud.com`
const marker = configMap.marker as string
const itemsPerPage = configMap.itemsPerPage as number
let res = {} as COS.GetBucketResult
const result = {
fullList: <any>[],
isTruncated: false,
nextMarker: '',
success: false
}
res = await this.ctx.getBucket({
Bucket: bucket,
Region: region,
Prefix: slicedPrefix === '' ? undefined : slicedPrefix,
Delimiter: '/',
Marker: marker,
MaxKeys: itemsPerPage
})
if (res && res.statusCode === 200) {
res.CommonPrefixes.forEach((item: { Prefix: string}) =>
result.fullList.push(this.formatFolder(item, slicedPrefix)))
res.Contents.forEach((item: COS.CosObject) =>
parseInt(item.Size) !== 0 && result.fullList.push(this.formatFile(item, slicedPrefix, urlPrefix)))
result.isTruncated = res.IsTruncated === 'true'
result.nextMarker = res.NextMarker || ''
result.success = true
return result
} else {
return result
}
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* oldKey: string,
* newKey: string
* }
*/
async renameBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, region, oldKey, newKey } = configMap
const res = await this.ctx.putObjectCopy({
Bucket: bucketName,
Region: region,
Key: newKey,
CopySource: handleUrlEncode(`${bucketName}.cos.${region}.myqcloud.com/${oldKey}`)
})
if (res && res.statusCode === 200) {
const res2 = await this.ctx.deleteObject({
Bucket: bucketName,
Region: region,
Key: oldKey
})
return res2 && res2.statusCode === 204
} else {
return false
}
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string
* }
*/
async deleteBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, region, key } = configMap
const res = await this.ctx.deleteObject({
Bucket: bucketName,
Region: region,
Key: key
})
return res && res.statusCode === 204
}
/**
*
* @param configMap
*/
async deleteBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, region, key } = configMap
let marker
let isTruncated
const allFileList = {
CommonPrefixes: [] as any[],
Contents: [] as any[]
}
let res = await this.ctx.getBucket({
Bucket: bucketName,
Region: region,
Prefix: key,
Delimiter: '/',
MaxKeys: 1000
})
if (res && res.statusCode === 200) {
res.CommonPrefixes.length > 0 && allFileList.CommonPrefixes.push(...res.CommonPrefixes)
res.Contents.length > 0 && allFileList.Contents.push(...res.Contents)
isTruncated = res.IsTruncated
marker = res.NextMarker
while (isTruncated === 'true') {
res = await this.ctx.getBucket({
Bucket: bucketName,
Region: region,
Prefix: key,
Delimiter: '/',
Marker: marker,
MaxKeys: 1000
}) as any
if (res && res.statusCode === 200) {
res.CommonPrefixes.length > 0 && allFileList.CommonPrefixes.push(...res.CommonPrefixes)
res.Contents.length > 0 && allFileList.Contents.push(...res.Contents)
isTruncated = res.IsTruncated
marker = res.NextMarker
} else {
return false
}
}
} else {
return false
}
if (allFileList.CommonPrefixes.length > 0) {
for (const item of allFileList.CommonPrefixes) {
res = await this.deleteBucketFolder({
bucketName,
region,
key: item.Prefix
}) as any
if (!res) {
return false
}
}
}
if (allFileList.Contents.length > 0) {
const cycle = Math.ceil(allFileList.Contents.length / 1000)
for (let i = 0; i < cycle; i++) {
res = await this.ctx.deleteMultipleObject({
Bucket: bucketName,
Region: region,
Objects: allFileList.Contents.slice(i * 1000, (i + 1) * 1000).map((item: any) => {
return {
Key: item.Key
}
})
}) as any
if (!(res && res.statusCode === 200)) {
return false
}
}
}
return true
}
/**
* url
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string,
* expires: number,
* customUrl: string
* }
*/
async getPreSignedUrl (configMap: IStringKeyMap): Promise<string> {
const { bucketName, region, key, expires, customUrl } = configMap
const res = this.ctx.getObjectUrl({
Bucket: bucketName,
Region: region,
Key: key,
Expires: expires,
Sign: true
}, () => {
})
return customUrl ? `${customUrl.replace(/\/$/, '')}/${key}${res.slice(res.indexOf('?'))}` : res
}
/**
*
* @param configMap
*/
async uploadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { fileArray } = configMap
// fileArray = [{
// bucketName: string,
// region: string,
// key: string,
// filePath: string
// fileSize: number
// }]
const instance = UpDownTaskQueue.getInstance()
const files = [] as any[]
for (const item of fileArray) {
const { bucketName, region, key, filePath, fileSize, fileName } = item
const id = `${bucketName}-${region}-${key}-${filePath}`
if (instance.getUploadTask(id)) {
continue
}
instance.addUploadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
sourceFilePath: filePath,
targetFilePath: key,
targetFileBucket: bucketName,
targetFileRegion: region
})
files.push({
Bucket: bucketName,
Region: region,
Key: key,
FilePath: filePath,
ContentType: getFileMimeType(filePath),
Body: fileSize > 1048576 ? fs.createReadStream(filePath) : undefined,
onProgress: (progress: any) => {
const cancelToken = ''
instance.updateUploadTask({
id,
progress: Math.floor(progress.percent * 100),
status: uploadTaskSpecialStatus.uploading,
cancelToken
})
},
onFileFinish: (err: any, data: any) => {
if (data) {
instance.updateUploadTask({
id,
progress: 100,
status: uploadTaskSpecialStatus.uploaded,
response: typeof data === 'object' ? JSON.stringify(data) : String(data),
finishTime: new Date().toLocaleString()
})
} else {
this.logger.error(formatError(err, { method: 'uploadBucketFile', class: 'TcyunApi' }))
instance.updateUploadTask({
id,
progress: 0,
status: commonTaskStatus.failed,
response: typeof err === 'object' ? JSON.stringify(err) : String(err),
finishTime: new Date().toLocaleString()
})
}
}
})
this.ctx.uploadFiles({
files
})
}
return true
}
/**
*
* @param configMap
*/
async createBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { bucketName, region, key } = configMap
const res = await this.ctx.putObject({
Bucket: bucketName,
Region: region,
Key: key,
Body: ''
})
return res && res.statusCode === 200
}
/**
*
* @param configMap
*/
async downloadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { downloadPath, fileArray } = configMap
// fileArray = [{
// bucketName: string,
// region: string,
// key: string,
// fileName: string
// }]
const instance = UpDownTaskQueue.getInstance()
for (const item of fileArray) {
const { bucketName, region, key, fileName } = item
const id = `${bucketName}-${region}-${key}`
if (instance.getDownloadTask(id)) {
continue
}
instance.addDownloadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
targetFilePath: path.join(downloadPath, fileName)
})
this.ctx.downloadFile({
Bucket: bucketName,
Region: region,
Key: key,
RetryTimes: 3,
ChunkSize: 1024 * 1024 * 1,
FilePath: path.join(downloadPath, fileName),
onProgress: (progress: any) => {
instance.updateDownloadTask({
id,
progress: Math.floor(progress.percent * 100),
status: downloadTaskSpecialStatus.downloading
})
}
}).then((res: any) => {
instance.updateDownloadTask({
id,
progress: res && res.statusCode === 200 ? 100 : 0,
status: res && res.statusCode === 200 ? downloadTaskSpecialStatus.downloaded : commonTaskStatus.failed,
response: typeof res === 'object' ? JSON.stringify(res) : String(res),
finishTime: new Date().toLocaleString()
})
}).catch((err: any) => {
this.logger.error(formatError(err, { method: 'downloadBucketFile', class: 'TcyunApi' }))
instance.updateDownloadTask({
id,
progress: 0,
status: commonTaskStatus.failed,
response: typeof err === 'object' ? JSON.stringify(err) : String(err),
finishTime: new Date().toLocaleString()
})
})
}
return true
}
}
export default TcyunApi

View File

@ -0,0 +1,388 @@
// @ts-ignore
import Upyun from 'upyun'
import { md5, hmacSha1Base64, getFileMimeType, gotDownload, gotUpload } from '../utils/common'
import { isImage } from '~/renderer/manage/utils/common'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import { ipcMain, IpcMainEvent } from 'electron'
import axios from 'axios'
import FormData from 'form-data'
import fs from 'fs-extra'
import path from 'path'
import UpDownTaskQueue,
{
commonTaskStatus
} from '../datastore/upDownTaskQueue'
import { ManageLogger } from '../utils/logger'
class UpyunApi {
ser: Upyun.Service
cli: Upyun.Client
bucket: string
operator: string
password: string
stopMarker = 'g2gCZAAEbmV4dGQAA2VvZg'
logger: ManageLogger
constructor (bucket: string, operator: string, password: string, logger: ManageLogger) {
this.ser = new Upyun.Service(bucket, operator, password)
this.cli = new Upyun.Client(this.ser)
this.bucket = bucket
this.operator = operator
this.password = password
this.logger = logger
}
formatFolder (item: any, slicedPrefix: string) {
return {
...item,
key: `${slicedPrefix}${item.name}/`,
fileSize: 0,
formatedTime: '',
fileName: item.name,
isDir: true,
checked: false,
isImage: false,
match: false,
Key: `${slicedPrefix}${item.name}/`
}
}
formatFile (item: any, slicedPrefix: string, urlPrefix: string) {
return {
...item,
fileName: item.name,
fileSize: item.size,
formatedTime: new Date(parseInt(item.time) * 1000).toLocaleString(),
isDir: false,
checked: false,
match: false,
isImage: isImage(item.name),
url: `${urlPrefix}/${slicedPrefix}${item.name}`,
key: `${slicedPrefix}${item.name}`
}
}
authorization (
method: string,
uri: string,
contentMd5: string,
operator: string,
password: string
) {
const passwordMd5 = md5(password, 'hex')
const date = new Date().toUTCString()
const upperMethod = method.toUpperCase()
let stringToSign = ''
const codedUri = encodeURI(uri)
if (contentMd5 === '') {
stringToSign = `${upperMethod}&${codedUri}&${date}`
} else {
stringToSign = `${upperMethod}&${codedUri}&${date}&${contentMd5}`
}
const signature = hmacSha1Base64(passwordMd5, stringToSign)
return `UPYUN ${operator}:${signature}`
}
/**
*
*/
async getBucketList (): Promise<any> {
return this.bucket
}
async getBucketListBackstage (configMap: IStringKeyMap): Promise<any> {
const window = windowManager.get(IWindowList.SETTING_WINDOW)!
const { bucketName: bucket, prefix, cancelToken } = configMap
const slicedPrefix = prefix.slice(1)
const urlPrefix = configMap.customUrl || `http://${bucket}.test.upcdn.net`
let marker = ''
const cancelTask = [false]
ipcMain.on('cancelLoadingFileList', (_evt: IpcMainEvent, token: string) => {
if (token === cancelToken) {
cancelTask[0] = true
ipcMain.removeAllListeners('cancelLoadingFileList')
}
})
let res = {} as any
const result = {
fullList: <any>[],
success: false,
finished: false
}
do {
res = await this.cli.listDir(prefix, {
limit: 10000,
iter: marker
})
if (res) {
res.files && res.files.forEach((item: any) => {
item.type === 'N' && result.fullList.push(this.formatFile(item, slicedPrefix, urlPrefix))
item.type === 'F' && result.fullList.push(this.formatFolder(item, slicedPrefix))
})
window.webContents.send('refreshFileTransferList', result)
} else {
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
return
}
marker = res.next
} while (!cancelTask[0] && res.next !== this.stopMarker)
result.success = true
result.finished = true
window.webContents.send('refreshFileTransferList', result)
ipcMain.removeAllListeners('cancelLoadingFileList')
}
/**
*
* @param {Object} configMap
* configMap = {
* bucketName: string,
* bucketConfig: {
* Location: string
* },
* paging: boolean,
* prefix: string,
* marker: string,
* itemsPerPage: number,
* customUrl: string
* }
*/
async getBucketFileList (configMap: IStringKeyMap): Promise<any> {
const { bucketName: bucket, prefix, marker, itemsPerPage } = configMap
const slicedPrefix = prefix.slice(1)
const urlPrefix = configMap.customUrl || `http://${bucket}.test.upcdn.net`
let res = {} as any
const result = {
fullList: <any>[],
isTruncated: false,
nextMarker: '',
success: false
}
res = await this.cli.listDir(prefix, {
limit: itemsPerPage,
iter: marker || ''
})
if (res) {
res.files && res.files.forEach((item: any) => {
item.type === 'N' && result.fullList.push(this.formatFile(item, slicedPrefix, urlPrefix))
item.type === 'F' && result.fullList.push(this.formatFolder(item, slicedPrefix))
})
result.isTruncated = res.next !== this.stopMarker
result.nextMarker = res.next
result.success = true
return result
} else {
return result
}
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* oldKey: string,
* newKey: string
* }
*/
async renameBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const oldKey = configMap.oldKey
let newKey = configMap.newKey
const method = 'PUT'
if (newKey.endsWith('/')) {
newKey = newKey.slice(0, -1)
}
const xUpyunMoveSource = `/${this.bucket}/${oldKey}`
const uri = `/${this.bucket}/${newKey}`
const authorization = this.authorization(method, uri, '', this.operator, this.password)
const headers = {
Authorization: authorization,
'X-Upyun-Move-Source': xUpyunMoveSource,
'Content-Length': 0,
Date: new Date().toUTCString()
}
const res = await axios({
method,
url: `http://v0.api.upyun.com${uri}`,
headers
})
return res.status === 200
}
/**
*
* @param configMap
* configMap = {
* bucketName: string,
* region: string,
* key: string
* }
*/
async deleteBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { key } = configMap
const res = await this.cli.deleteFile(key)
return res
}
/**
* delete bucket folder
* @param configMap
*/
async deleteBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { key } = configMap
let marker = ''
let isTruncated
const allFileList = {
CommonPrefixes: [] as any[],
Contents: [] as any[]
}
do {
const res = await this.cli.listDir(key, {
limit: 10000,
iter: marker
})
if (res) {
res.files.forEach((item: any) => {
item.type === 'N' && allFileList.Contents.push({
...item,
key: `${key}${item.name}`
})
item.type === 'F' && allFileList.CommonPrefixes.push({
...item,
key: `${key}${item.name}/`
})
})
marker = res.next
isTruncated = res.next !== this.stopMarker
} else {
return false
}
} while (isTruncated)
if (allFileList.Contents.length > 0) {
let success = false
for (let i = 0; i < allFileList.Contents.length; i++) {
const item = allFileList.Contents[i]
success = await this.cli.deleteFile(item.key)
if (!success) {
return false
}
}
}
if (allFileList.CommonPrefixes.length > 0) {
for (const item of allFileList.CommonPrefixes) {
const res = await this.deleteBucketFolder({
key: item.key
})
if (!res) {
return false
}
}
}
const deleteSelf = await this.cli.deleteFile(key)
if (!deleteSelf) {
return false
}
return true
}
/**
* upload file to bucket
* axiso:onUploadProgress not work in nodejs , use got instead
* @param configMap
*/
async uploadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
fileArray.forEach((item: any) => {
item.key = item.key.replace(/^\/+/, '')
})
for (const item of fileArray) {
const { bucketName, region, key, filePath, fileName, fileSize } = item
const id = `${bucketName}-${region}-${key}-${filePath}`
if (instance.getUploadTask(id)) {
continue
}
instance.addUploadTask({
id,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
sourceFilePath: filePath,
targetFilePath: key,
targetFileBucket: bucketName,
targetFileRegion: region
})
const date = new Date().toUTCString()
const uri = `/${key}`
const method = 'POST'
const uplpadPolicy = {
bucket: bucketName,
'save-key': uri,
expiration: Math.floor(Date.now() / 1000) + 2592000,
date,
'content-length': fileSize
}
const base64Policy = Buffer.from(JSON.stringify(uplpadPolicy)).toString('base64')
const stringToSign = `${method}&/${bucketName}&${date}&${base64Policy}`
const signature = hmacSha1Base64(md5(this.password, 'hex'), stringToSign)
const authorization = `UPYUN ${this.operator}:${signature}`
const form = new FormData()
form.append('policy', base64Policy)
form.append('authorization', authorization)
form.append('file', fs.createReadStream(filePath), {
filename: path.basename(key),
contentType: getFileMimeType(fileName)
})
const headers = form.getHeaders()
headers.Host = 'v0.api.upyun.com'
headers.Date = date
headers.Authorization = authorization
gotUpload(instance, `http://v0.api.upyun.com/${bucketName}`, method, form, headers, id, this.logger)
}
return true
}
/**
*
* @param configMap
*/
async createBucketFolder (configMap: IStringKeyMap): Promise<boolean> {
const { key } = configMap
const res = await this.cli.makeDir(`/${key}`)
return res
}
/**
*
* @param configMap
*/
async downloadBucketFile (configMap: IStringKeyMap): Promise<boolean> {
const { downloadPath, fileArray } = configMap
const instance = UpDownTaskQueue.getInstance()
for (const item of fileArray) {
const { bucketName, region, key, fileName, customUrl } = item
const savedFilePath = path.join(downloadPath, fileName)
const fileStream = fs.createWriteStream(savedFilePath)
const id = `${bucketName}-${region}-${key}`
if (instance.getDownloadTask(id)) {
continue
}
instance.addDownloadTask({
id: `${bucketName}-${region}-${key}`,
progress: 0,
status: commonTaskStatus.queuing,
sourceFileName: fileName,
targetFilePath: savedFilePath
})
const preSignedUrl = `${customUrl}/${key}`
gotDownload(instance, preSignedUrl, fileStream, id, savedFilePath, this.logger)
}
return true
}
}
export default UpyunApi

View File

@ -0,0 +1,66 @@
/* eslint-disable */
import { JSONStore } from '@picgo/store'
import { IJSON } from '@picgo/store/dist/types'
import { ManageApiType, ManageConfigType } from '~/universal/types/manage'
class ManageDB {
private readonly ctx: ManageApiType
private readonly db: JSONStore
constructor (ctx: ManageApiType) {
this.ctx = ctx
this.db = new JSONStore(this.ctx.configPath)
let initParams: IStringKeyMap = {
picBed: {},
settings: {},
currentPicBed: 'placeholder'
}
for (let key in initParams) {
if (!this.db.has(key)) {
try {
this.db.set(key, initParams[key])
} catch (e: any) {
this.ctx.logger.error(e)
throw e
}
}
}
}
read (flush?: boolean): IJSON {
return this.db.read(flush)
}
get (key: string = ''): any {
this.read(true)
return this.db.get(key)
}
set (key: string, value: any): void {
this.read(true)
return this.db.set(key, value)
}
has (key: string): boolean {
this.read(true)
return this.db.has(key)
}
unset (key: string, value: any): boolean {
this.read(true)
return this.db.unset(key, value)
}
saveConfig (config: Partial<ManageConfigType>): void {
Object.keys(config).forEach((name: string) => {
this.set(name, config[name])
})
}
removeConfig (config: ManageConfigType): void {
Object.keys(config).forEach((name: string) => {
this.unset(name, config[name])
})
}
}
export default ManageDB

View File

@ -0,0 +1,116 @@
import fs from 'fs-extra'
import writeFile from 'write-file-atomic'
import path from 'path'
import { app } from 'electron'
import { getLogger } from '@core/utils/localLogger'
import dayjs from 'dayjs'
import { T } from '~/main/i18n'
const STORE_PATH = app.getPath('userData')
const manageConfigFilePath = path.join(STORE_PATH, 'manage.json')
export const defaultManageConfigPath = manageConfigFilePath
const manageConfigFileBackupPath = path.join(STORE_PATH, 'manage.bak.json')
let _configFilePath = ''
let hasCheckPath = false
const errorMsg = {
broken: T('TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_DEFAULT'),
brokenButBackup: T('TIPS_PICGO_CONFIG_FILE_BROKEN_WITH_BACKUP')
}
/** ensure notification list */
if (!global.notificationList) global.notificationList = []
function manageDbChecker () {
if (process.type !== 'renderer') {
const manageConfigFilePath = managePathChecker()
if (!fs.existsSync(manageConfigFilePath)) {
return
}
let configFile: string = '{}'
const optionsTpl = {
title: T('TIPS_NOTICE'),
body: ''
}
// config save bak
try {
configFile = fs.readFileSync(manageConfigFilePath, { encoding: 'utf-8' })
JSON.parse(configFile)
} catch (e) {
fs.unlinkSync(manageConfigFilePath)
if (fs.existsSync(manageConfigFileBackupPath)) {
try {
configFile = fs.readFileSync(manageConfigFileBackupPath, { encoding: 'utf-8' })
JSON.parse(configFile)
writeFile.sync(manageConfigFilePath, configFile, { encoding: 'utf-8' })
const stats = fs.statSync(manageConfigFileBackupPath)
optionsTpl.body = `${errorMsg.brokenButBackup}\n${T('TIPS_PICGO_BACKUP_FILE_VERSION', {
v: dayjs(stats.mtime).format('YYYY-MM-DD HH:mm:ss')
})}`
global.notificationList.push(optionsTpl)
return
} catch (e) {
optionsTpl.body = errorMsg.broken
global.notificationList.push(optionsTpl)
return
}
}
optionsTpl.body = errorMsg.broken
global.notificationList.push(optionsTpl)
return
}
writeFile.sync(manageConfigFileBackupPath, configFile, { encoding: 'utf-8' })
}
}
/**
* Get manage config path
*/
function managePathChecker (): string {
if (_configFilePath) {
return _configFilePath
}
// defaultConfigPath
_configFilePath = defaultManageConfigPath
// if defaultConfig path is not exit
// do not parse the content of config
if (!fs.existsSync(defaultManageConfigPath)) {
return _configFilePath
}
try {
const configString = fs.readFileSync(defaultManageConfigPath, { encoding: 'utf-8' })
const config = JSON.parse(configString)
const userConfigPath: string = config.configPath || ''
if (userConfigPath) {
if (fs.existsSync(userConfigPath) && userConfigPath.endsWith('.json')) {
_configFilePath = userConfigPath
return _configFilePath
}
}
return _configFilePath
} catch (e) {
const manageLogPath = path.join(STORE_PATH, 'manage-gui-local.log')
const logger = getLogger(manageLogPath, 'Manage')
if (!hasCheckPath) {
const optionsTpl = {
title: T('TIPS_NOTICE'),
body: T('TIPS_CUSTOM_CONFIG_FILE_PATH_ERROR')
}
global.notificationList?.push(optionsTpl)
hasCheckPath = true
}
logger('error', e)
_configFilePath = defaultManageConfigPath
return _configFilePath
}
}
function managePathDir () {
return path.dirname(managePathChecker())
}
export {
managePathChecker,
managePathDir,
manageDbChecker
}

View File

@ -0,0 +1,212 @@
// a singleton class to manage the up/down task queue
// qiniu tcyun aliyun smms imgur github upyun
import path from 'path'
import { app } from 'electron'
import fs from 'fs-extra'
export enum commonTaskStatus {
queuing = 'queuing',
failed = 'failed',
canceled = 'canceled',
paused = 'paused'
}
export enum uploadTaskSpecialStatus {
uploading = 'uploading',
uploaded = 'uploaded'
}
export enum downloadTaskSpecialStatus {
downloading = 'downloading',
downloaded = 'downloaded',
}
export type uploadTaskStatus = commonTaskStatus | uploadTaskSpecialStatus
type downloadTaskStatus = commonTaskStatus | downloadTaskSpecialStatus
export interface IUploadTask {
id: string
progress: number
status: uploadTaskStatus
sourceFilePath: string
sourceFileName: string
targetFilePath: string
targetFileBucket?: string
response?: any
cancelToken?: string
timeConsuming?: number
alias?: string
[other: string]: any
}
export interface IDownloadTask {
id: string
progress: number
status: downloadTaskStatus
sourceFileUrl?: string
sourceFileName?: string
sourceConfig?: IStringKeyMap
targetFilePath?: string
response?: any
cancelToken?: string
timeConsuming?: number
reseumConfig?: IStringKeyMap
alias?: string
[other: string]: any
}
class UpDownTaskQueue {
/* eslint-disable */
private static instance: UpDownTaskQueue
/* eslint-enable */
private uploadTaskQueue = <IUploadTask[]>[]
private downloadTaskQueue = <IDownloadTask[]>[]
private persistPath = path.join(app.getPath('userData'), 'UpDownTaskQueue.json')
private constructor () {
this.restore()
}
public static getInstance () {
if (!UpDownTaskQueue.instance) {
UpDownTaskQueue.instance = new UpDownTaskQueue()
}
return UpDownTaskQueue.instance
}
getUploadTaskQueue () {
return UpDownTaskQueue.getInstance().uploadTaskQueue
}
getDownloadTaskQueue () {
return UpDownTaskQueue.getInstance().downloadTaskQueue
}
getUploadTask (taskId: string) {
return UpDownTaskQueue.getInstance().uploadTaskQueue.find(item => item.id === taskId)
}
getAllUploadTask () {
return UpDownTaskQueue.getInstance().uploadTaskQueue
}
addUploadTask (task: IUploadTask) {
UpDownTaskQueue.getInstance().uploadTaskQueue.push(task)
}
updateUploadTask (task: Partial<IUploadTask>) {
const taskIndex = UpDownTaskQueue.getInstance().uploadTaskQueue.findIndex(item => item.id === task.id)
if (taskIndex !== -1) {
const taskKeys = Object.keys(task)
taskKeys.forEach(key => {
if (key !== 'id') {
UpDownTaskQueue.getInstance().uploadTaskQueue[taskIndex][key] = task[key]
}
})
}
}
removeUploadTask (taskId: string) {
const taskIndex = UpDownTaskQueue.getInstance().uploadTaskQueue.findIndex(item => item.id === taskId)
if (taskIndex !== -1) {
UpDownTaskQueue.getInstance().uploadTaskQueue.splice(taskIndex, 1)
}
}
removeDownloadTask (taskId: string) {
const taskIndex = UpDownTaskQueue.getInstance().downloadTaskQueue.findIndex(item => item.id === taskId)
if (taskIndex !== -1) {
UpDownTaskQueue.getInstance().downloadTaskQueue.splice(taskIndex, 1)
}
}
getDownloadTask (taskId: string) {
return UpDownTaskQueue.getInstance().downloadTaskQueue.find(item => item.id === taskId)
}
getAllDownloadTask () {
return UpDownTaskQueue.getInstance().downloadTaskQueue
}
addDownloadTask (task: IDownloadTask) {
UpDownTaskQueue.getInstance().downloadTaskQueue.push(task)
}
updateDownloadTask (task: Partial<IDownloadTask>) {
const taskIndex = UpDownTaskQueue.getInstance().downloadTaskQueue.findIndex(item => item.id === task.id)
if (taskIndex !== -1) {
const taskKeys = Object.keys(task)
taskKeys.forEach(key => {
if (key !== 'id') {
UpDownTaskQueue.getInstance().downloadTaskQueue[taskIndex][key] = task[key]
}
})
}
}
clearUploadTaskQueue () {
UpDownTaskQueue.getInstance().uploadTaskQueue = []
}
removeUploadedTask () {
UpDownTaskQueue.getInstance().uploadTaskQueue = UpDownTaskQueue.getInstance().uploadTaskQueue.filter(item => item.status !== uploadTaskSpecialStatus.uploaded && item.status !== commonTaskStatus.canceled && item.status !== commonTaskStatus.failed)
}
removeDownloadedTask () {
UpDownTaskQueue.getInstance().downloadTaskQueue = UpDownTaskQueue.getInstance().downloadTaskQueue.filter(item => item.status !== downloadTaskSpecialStatus.downloaded && item.status !== commonTaskStatus.canceled && item.status !== commonTaskStatus.failed)
}
clearDownloadTaskQueue () {
UpDownTaskQueue.getInstance().downloadTaskQueue = []
}
clearAllTaskQueue () {
this.clearUploadTaskQueue()
this.clearDownloadTaskQueue()
}
persist () {
try {
this.checkPersistPath()
fs.writeFileSync(this.persistPath, JSON.stringify({
uploadTaskQueue: this.uploadTaskQueue,
downloadTaskQueue: this.downloadTaskQueue
}))
} catch (e) {
console.log(e)
}
}
private restore () {
try {
this.checkPersistPath()
const persistData = JSON.parse(fs.readFileSync(this.persistPath, { encoding: 'utf-8' }))
this.uploadTaskQueue = persistData.uploadTaskQueue
this.downloadTaskQueue = persistData.downloadTaskQueue
} catch (e) {
this.uploadTaskQueue = []
this.downloadTaskQueue = []
}
}
private checkPersistPath () {
if (!fs.existsSync(this.persistPath)) {
fs.writeFileSync(this.persistPath, JSON.stringify({
uploadTaskQueue: this.uploadTaskQueue,
downloadTaskQueue: this.downloadTaskQueue
}))
}
try {
JSON.parse(fs.readFileSync(this.persistPath, { encoding: 'utf-8' }))
} catch (e) {
fs.writeFileSync(this.persistPath, JSON.stringify({
uploadTaskQueue: this.uploadTaskQueue,
downloadTaskQueue: this.downloadTaskQueue
}))
}
}
}
export default UpDownTaskQueue

View File

@ -0,0 +1,3 @@
export const PICLIST_MANAGE_GET_CONFIG = 'PICLIST_MANAGE_GET_CONFIG'
export const PICLIST_MANAGE_SAVE_CONFIG = 'PICLIST_MANAGE_SAVE_CONFIG'
export const PICLIST_MANAGE_REMOVE_CONFIG = 'PICLIST_MANAGE_REMOVE_CONFIG'

View File

@ -0,0 +1,142 @@
import manageCoreIPC from './manageCoreIPC'
import { ManageApi } from '../manageApi'
import { ipcMain, IpcMainInvokeEvent, dialog, app, shell } from 'electron'
import UpDownTaskQueue from '../datastore/upDownTaskQueue'
import { downloadFileFromUrl } from '../utils/common'
import path from 'path'
import fs from 'fs-extra'
export const manageIpcList = {
listen () {
manageCoreIPC.listen()
ipcMain.handle('getBucketList', async (_evt: IpcMainInvokeEvent, currentPicBed: string) => {
const manage = new ManageApi(currentPicBed)
return manage.getBucketList()
})
ipcMain.handle('createBucket', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.createBucket(param)
})
ipcMain.handle('getBucketFileList', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.getBucketFileList(param)
})
ipcMain.handle('getBucketDomain', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
const result = await manage.getBucketDomain(param)
return result
})
ipcMain.handle('setBucketAclPolicy', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.setBucketAclPolicy(param)
})
ipcMain.handle('renameBucketFile', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.renameBucketFile(param)
})
ipcMain.handle('deleteBucketFile', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.deleteBucketFile(param)
})
ipcMain.handle('deleteBucketFolder', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.deleteBucketFolder(param)
})
ipcMain.on('getBucketListBackstage', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.getBucketListBackstage(param)
})
ipcMain.handle('openFileSelectDialog', async () => {
const res = await dialog.showOpenDialog({
properties: ['openFile', 'multiSelections']
})
if (res.canceled) {
return []
} else {
return res.filePaths
}
})
ipcMain.handle('getPreSignedUrl', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.getPreSignedUrl(param)
})
ipcMain.handle('getUploadTaskList', async () => {
return UpDownTaskQueue.getInstance().getAllUploadTask()
})
ipcMain.handle('getDownloadTaskList', async () => {
return UpDownTaskQueue.getInstance().getAllDownloadTask()
})
ipcMain.on('uploadBucketFile', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.uploadBucketFile(param)
})
ipcMain.on('downloadBucketFile', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.downloadBucketFile(param)
})
ipcMain.handle('createBucketFolder', async (_evt: IpcMainInvokeEvent, currentPicBed: string, param: IStringKeyMap) => {
const manage = new ManageApi(currentPicBed)
return manage.createBucketFolder(param)
})
ipcMain.on('deleteUploadedTask', async () => {
UpDownTaskQueue.getInstance().removeUploadedTask()
})
ipcMain.on('deleteAllUploadedTask', async () => {
UpDownTaskQueue.getInstance().clearUploadTaskQueue()
})
ipcMain.on('deleteDownloadedTask', async () => {
UpDownTaskQueue.getInstance().removeDownloadedTask()
})
ipcMain.on('deleteAllDownloadedTask', async () => {
UpDownTaskQueue.getInstance().clearDownloadTaskQueue()
})
ipcMain.handle('selectDownloadFolder', async () => {
const res = await dialog.showOpenDialog({
properties: ['openDirectory']
})
return res.filePaths[0]
})
ipcMain.handle('getDefaultDownloadFolder', async () => {
return app.getPath('downloads')
})
ipcMain.on('OpenDownloadedFolder', async (_evt: IpcMainInvokeEvent, path: string | undefined) => {
if (path) {
shell.showItemInFolder(path)
} else {
shell.openPath(app.getPath('downloads'))
}
})
ipcMain.on('OpenLocalFile', async (_evt: IpcMainInvokeEvent, fullPath: string) => {
fs.existsSync(fullPath) ? shell.showItemInFolder(fullPath) : shell.openPath(path.dirname(fullPath))
})
ipcMain.handle('downloadFileFromUrl', async (_evt: IpcMainInvokeEvent, urls: string[]) => {
const res = await downloadFileFromUrl(urls)
return res
})
}
}

View File

@ -0,0 +1,35 @@
import {
IpcMainEvent,
ipcMain
} from 'electron'
import getManageApi from '../Main'
import { PICLIST_MANAGE_GET_CONFIG, PICLIST_MANAGE_SAVE_CONFIG, PICLIST_MANAGE_REMOVE_CONFIG } from './constants'
const manageApi = getManageApi()
const handleManageGetConfig = () => {
ipcMain.on(PICLIST_MANAGE_GET_CONFIG, (event: IpcMainEvent, key: string | undefined, callbackId: string) => {
const result = manageApi.getConfig(key)
event.sender.send(PICLIST_MANAGE_GET_CONFIG, result, callbackId)
})
}
const handleManageSaveConfig = () => {
ipcMain.on(PICLIST_MANAGE_SAVE_CONFIG, (_event: IpcMainEvent, data: any) => {
manageApi.saveConfig(data)
})
}
const handleManageRemoveConfig = () => {
ipcMain.on(PICLIST_MANAGE_REMOVE_CONFIG, (_event: IpcMainEvent, key: string, propName: string) => {
manageApi.removeConfig(key, propName)
})
}
export default {
listen () {
handleManageGetConfig()
handleManageSaveConfig()
handleManageRemoveConfig()
}
}

View File

@ -0,0 +1,529 @@
import fs from 'fs-extra'
import path from 'path'
import { EventEmitter } from 'events'
import { managePathChecker } from './datastore/dbChecker'
import {
ManageApiType,
ManageConfigType,
ManageError,
PicBedMangeConfig
} from '~/universal/types/manage'
import ManageDB from './datastore/db'
import { ManageLogger } from './utils/logger'
import { get, set, unset } from 'lodash'
import { homedir } from 'os'
import { isInputConfigValid, formatError } from './utils/common'
import API from './apis/api'
import windowManager from 'apis/app/window/windowManager'
import { IWindowList } from '#/types/enum'
import { ipcMain } from 'electron'
export class ManageApi extends EventEmitter implements ManageApiType {
private _config!: Partial<ManageConfigType>
private db!: ManageDB
currentPicBed: string
configPath: string
baseDir!: string
logger: ManageLogger
currentPicBedConfig: PicBedMangeConfig
constructor (currentPicBed: string = '') {
super()
this.currentPicBed = currentPicBed || (this.getConfig('currentPicBed') ?? 'placeholder')
this.configPath = managePathChecker()
this.initConfigPath()
this.logger = new ManageLogger(this)
this.initconfig()
this.currentPicBedConfig = this.getPicBedConfig(this.currentPicBed)
}
getMsgParam (method: string) {
return {
class: 'ManageApi',
method,
picbedName: this.currentPicBedConfig.picBedName
}
}
errorMsg (err: any, param: IStringKeyMap) {
this.logger.error(formatError(err, param))
}
createClient () {
const name = this.currentPicBedConfig.picBedName
switch (name) {
case 'tcyun':
return new API.TcyunApi(this.currentPicBedConfig.secretId, this.currentPicBedConfig.secretKey, this.logger)
case 'aliyun':
return new API.AliyunApi(this.currentPicBedConfig.accessKeyId, this.currentPicBedConfig.accessKeySecret, this.logger)
case 'qiniu':
return new API.QiniuApi(this.currentPicBedConfig.accessKey, this.currentPicBedConfig.secretKey, this.logger)
case 'upyun':
return new API.UpyunApi(this.currentPicBedConfig.bucketName, this.currentPicBedConfig.operator, this.currentPicBedConfig.password, this.logger)
case 'smms':
return new API.SmmsApi(this.currentPicBedConfig.token, this.logger)
case 'github':
return new API.GithubApi(this.currentPicBedConfig.token, this.currentPicBedConfig.githubUsername, this.currentPicBedConfig.proxy, this.logger)
case 'imgur':
return new API.ImgurApi(this.currentPicBedConfig.imgurUserName, this.currentPicBedConfig.accessToken, this.currentPicBedConfig.proxy, this.logger)
default:
return {} as any
}
}
private getPicBedConfig (picBedName: string): PicBedMangeConfig {
return this.getConfig<PicBedMangeConfig>(`picBed.${picBedName}`)
}
private initConfigPath (): void {
if (this.configPath === '') {
this.configPath = `${homedir()}/.piclist/manage.json`
}
if (path.extname(this.configPath).toUpperCase() !== '.JSON') {
this.configPath = ''
throw Error('The configuration file only supports JSON format.')
}
this.baseDir = path.dirname(this.configPath)
const exist = fs.pathExistsSync(this.configPath)
if (!exist) {
fs.ensureFileSync(this.configPath)
}
}
private initconfig (): void {
this.db = new ManageDB(this)
this._config = this.db.read(true) as ManageConfigType
}
getConfig<T> (name?: string): T {
if (!name) {
return this._config as unknown as T
} else {
return get(this._config, name)
}
}
saveConfig (config: IStringKeyMap): void {
if (!isInputConfigValid(config)) {
this.logger.warn(
'the format of config is invalid, please provide object'
)
return
}
this.setConfig(config)
this.db.saveConfig(config)
}
removeConfig (key: string, propName: string): void {
if (!key || !propName) {
return
}
this.unsetConfig(key, propName)
this.db.unset(key, propName)
}
setConfig (config: IStringKeyMap): void {
if (!isInputConfigValid(config)) {
this.logger.warn(
'the format of config is invalid, please provide object'
)
return
}
Object.keys(config).forEach((name: string) => {
set(this._config, name, config[name])
})
}
unsetConfig (key: string, propName: string): void {
if (!key || !propName) return
unset(this.getConfig(key), propName)
}
async getBucketList (
param?: IStringKeyMap | undefined
): Promise<any> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'github':
case 'imgur':
try {
client = this.createClient()
return await client.getBucketList()
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('getBucketList'))
return []
}
case 'upyun':
return [{
Name: this.currentPicBedConfig.bucketName,
Location: 'upyun',
CreationDate: new Date().toISOString()
}]
case 'smms':
return [{
Name: 'smms',
Location: 'smms',
CreationDate: new Date().toISOString()
}]
default:
console.log(param)
return []
}
}
async getBucketInfo (
param?: IStringKeyMap | undefined
): Promise<IStringKeyMap | ManageError> {
console.log(param)
return {}
}
async getBucketDomain (
param: IStringKeyMap
): Promise<IStringKeyMap | ManageError> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'github':
try {
client = this.createClient() as any
return await client.getBucketDomain(param)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('getBucketDomain'))
return []
}
case 'upyun':
return [this.currentPicBedConfig.customUrl]
case 'smms':
return ['https://smms.app']
case 'imgur':
return ['https://imgur.com']
default:
return []
}
}
async createBucket (
param?: IStringKeyMap
): Promise<boolean> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
try {
client = this.createClient() as any
return await client.createBucket(param!)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('createBucket'))
return false
}
default:
return false
}
}
async deleteBucket (
param?: IStringKeyMap
): Promise<boolean> {
console.log(param)
return false
}
async getOperatorList (
param?: IStringKeyMap
): Promise<string[] | ManageError> {
console.log(param)
return []
}
async addOperator (
param?: IStringKeyMap
): Promise<boolean> {
console.log(param)
return false
}
async deleteOperator (
param?: IStringKeyMap
): Promise<boolean> {
console.log(param)
return false
}
async getBucketAclPolicy (
param?: IStringKeyMap
): Promise<IStringKeyMap | ManageError> {
console.log(param)
return {}
}
async setBucketAclPolicy (
param?: IStringKeyMap
): Promise<boolean> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'qiniu':
try {
client = new API.QiniuApi(this.currentPicBedConfig.accessKey, this.currentPicBedConfig.secretKey, this.logger)
return await client.setBucketAclPolicy(param!)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('setBucketAclPolicy'))
return false
}
default:
return false
}
}
/**
* bucket文件列表
* @param param
* @returns
*/
async getBucketListBackstage (
param?: IStringKeyMap
): Promise<IStringKeyMap | ManageError> {
let client
let window
const defaultResult = {
fullList: [],
success: false,
finished: true
}
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'upyun':
case 'smms':
case 'github':
case 'imgur':
try {
client = this.createClient() as any
return await client.getBucketListBackstage(param!)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('getBucketListBackstage'))
window = windowManager.get(IWindowList.SETTING_WINDOW)!
window.webContents.send('refreshFileTransferList', defaultResult)
ipcMain.removeAllListeners('cancelLoadingFileList')
return {}
}
default:
window = windowManager.get(IWindowList.SETTING_WINDOW)!
window.webContents.send('refreshFileTransferList', defaultResult)
ipcMain.removeAllListeners('cancelLoadingFileList')
return {}
}
}
/**
*
*
* key: 完整路径
* fileName: 文件名
* formatedTime: 格式化时间
* isDir: 是否是文件夹
* fileSize: 文件大小
**/
async getBucketFileList (
param?: IStringKeyMap
): Promise<IStringKeyMap | ManageError> {
const defaultResponse = {
fullList: <any>[],
isTruncated: false,
nextMarker: '',
success: false
}
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'upyun':
case 'smms':
try {
client = this.createClient()
return await client.getBucketFileList(param!)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('getBucketFileList'))
return defaultResponse
}
default:
return defaultResponse
}
}
async deleteBucketFile (
param?: IStringKeyMap
): Promise<boolean> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'upyun':
case 'smms':
case 'github':
case 'imgur':
try {
client = this.createClient() as any
const res = await client.deleteBucketFile(param!)
return res
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('deleteBucketFile'))
return false
}
default:
return false
}
}
async deleteBucketFolder (
param?: IStringKeyMap
): Promise<boolean> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'upyun':
case 'github':
try {
client = this.createClient() as any
return await client.deleteBucketFolder(param!)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('deleteBucketFolder'))
return false
}
default:
return false
}
}
async renameBucketFile (
param?: IStringKeyMap
): Promise<boolean> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'upyun':
try {
client = this.createClient() as any
return await client.renameBucketFile(param!)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('renameBucketFile'))
return false
}
default:
return false
}
}
async downloadBucketFile (
param?: IStringKeyMap
): Promise<boolean> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'upyun':
case 'smms':
case 'github':
case 'imgur':
try {
client = this.createClient() as any
const res = await client.downloadBucketFile(param!)
return res
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('downloadBucketFile'))
return false
}
default:
return false
}
}
async copyMoveBucketFile (
param?: IStringKeyMap
): Promise<boolean> {
console.log(param)
return false
}
async createBucketFolder (
param?: IStringKeyMap
): Promise<boolean> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'upyun':
case 'github':
try {
client = this.createClient() as any
return await client.createBucketFolder(param!)
} catch (error) {
this.errorMsg(error, this.getMsgParam('createBucketFolder'))
return false
}
default:
return false
}
}
async uploadBucketFile (
param?: IStringKeyMap
): Promise<boolean> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'upyun':
case 'smms':
case 'github':
case 'imgur':
try {
client = this.createClient() as any
return await client.uploadBucketFile(param!)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('uploadBucketFile'))
return false
}
default:
return false
}
}
async getPreSignedUrl (
param?: IStringKeyMap
): Promise<string> {
let client
switch (this.currentPicBedConfig.picBedName) {
case 'tcyun':
case 'aliyun':
case 'qiniu':
case 'github':
try {
client = this.createClient() as any
return await client.getPreSignedUrl(param!)
} catch (error: any) {
this.errorMsg(error, this.getMsgParam('getPreSignedUrl'))
return 'error'
}
default:
return 'error'
}
}
}

View File

@ -0,0 +1,272 @@
import fs from 'fs-extra'
import path from 'path'
import mime from 'mime-types'
import axios from 'axios'
import { app } from 'electron'
import crypto from 'crypto'
import got, { RequestError } from 'got'
import { Stream } from 'stream'
import { promisify } from 'util'
import UpDownTaskQueue,
{
uploadTaskSpecialStatus,
commonTaskStatus,
downloadTaskSpecialStatus
} from '../datastore/upDownTaskQueue'
import { ManageLogger } from '../utils/logger'
import { formatHttpProxy } from '@/manage/utils/common'
import { HttpsProxyAgent, HttpProxyAgent } from 'hpagent'
export const getFSFile = async (
filePath: string,
stream: boolean = false
): Promise<IStringKeyMap> => {
try {
return {
extension: path.extname(filePath),
fileName: path.basename(filePath),
buffer: stream
? fs.createReadStream(filePath)
: await fs.readFile(filePath),
success: true
}
} catch (e) {
return {
success: false
}
}
}
export const isInputConfigValid = (config: any): boolean => {
if (
typeof config === 'object' &&
!Array.isArray(config) &&
Object.keys(config).length > 0
) {
return true
}
return false
}
export const getFileMimeType = (filePath: string): string => {
return mime.lookup(filePath) || 'application/octet-stream'
}
const checkTempFolderExist = async () => {
const tempPath = path.join(app.getPath('downloads'), 'piclistTemp')
try {
await fs.access(tempPath)
} catch (e) {
await fs.mkdir(tempPath)
}
}
export const downloadFileFromUrl = async (urls: string[]) => {
const tempPath = path.join(app.getPath('downloads'), 'piclistTemp')
await checkTempFolderExist()
const result = [] as string[]
for (let i = 0; i < urls.length; i++) {
const finishDownload = promisify(Stream.finished)
const fileName = path.basename(urls[i]).split('?')[0]
const filePath = path.join(tempPath, fileName)
const writer = fs.createWriteStream(filePath)
const res = await axios({
method: 'get',
url: urls[i],
responseType: 'stream'
})
res.data.pipe(writer)
await finishDownload(writer)
result.push(filePath)
}
return result
}
export const clearTempFolder = () => fs.emptyDirSync(path.join(app.getPath('downloads'), 'piclistTemp'))
export const md5 = (str: string, code: 'hex' | 'base64'): string => crypto.createHash('md5').update(str).digest(code)
export const hmacSha1Base64 = (secretKey: string, stringToSign: string) : string => crypto.createHmac('sha1', secretKey).update(Buffer.from(stringToSign, 'utf8')).digest('base64')
export const gotDownload = async (
instance: UpDownTaskQueue,
preSignedUrl: string,
fileStream: fs.WriteStream,
id : string,
savedFilePath: string,
logger?: ManageLogger,
param?: any,
agent: any = {}
) => {
got(
preSignedUrl,
{
timeout: {
request: 30000
},
isStream: true,
throwHttpErrors: false,
searchParams: param,
agent
}
)
.on('downloadProgress', (progress: any) => {
instance.updateDownloadTask({
id,
progress: Math.floor(progress.percent * 100),
status: downloadTaskSpecialStatus.downloading
})
})
.pipe(fileStream)
.on('finish', () => {
instance.updateDownloadTask({
id,
progress: 100,
status: downloadTaskSpecialStatus.downloaded,
finishTime: new Date().toLocaleString()
})
})
.on('error', (err: any) => {
logger && logger.error(formatError(err, { method: 'gotDownload' }))
fs.remove(savedFilePath)
instance.updateDownloadTask({
id,
progress: 0,
status: commonTaskStatus.failed,
response: formatError(err, { method: 'gotDownload' }),
finishTime: new Date().toLocaleString()
})
})
}
export const gotUpload = async (
instance: UpDownTaskQueue,
url: string,
method: 'PUT' | 'POST',
body: any,
headers: any,
id: string,
logger?: ManageLogger,
timeout: number = 30000,
throwHttpErrors: boolean = false,
agent: any = {}
) => {
got(
url,
{
headers,
method,
body,
timeout: {
request: timeout
},
throwHttpErrors,
agent
}
)
.on('uploadProgress', (progress: any) => {
instance.updateUploadTask({
id,
progress: Math.floor(progress.percent * 100),
status: uploadTaskSpecialStatus.uploading
})
})
.then((res: any) => {
instance.updateUploadTask({
id,
progress: res && (res.statusCode === 200 || res.statusCode === 201) ? 100 : 0,
status: res && (res.statusCode === 200 || res.statusCode === 201) ? uploadTaskSpecialStatus.uploaded : commonTaskStatus.failed,
finishTime: new Date().toLocaleString()
})
})
.catch((err: any) => {
logger && logger.error(formatError(err, { method: 'gotUpload' }))
instance.updateUploadTask({
id,
progress: 0,
response: formatError(err, { method: 'gotUpload' }),
status: commonTaskStatus.failed,
finishTime: new Date().toLocaleString()
})
})
}
export const formatError = (err: any, params:IStringKeyMap) => {
if (err instanceof RequestError) {
return {
...params,
message: err.message ?? '',
name: 'RequestError',
code: err.code,
stack: err.stack ?? '',
timings: err.timings ?? {}
}
} else if (err instanceof Error) {
return {
...params,
name: err.name ?? '',
message: err.message ?? '',
stack: err.stack ?? ''
}
} else {
if (typeof err === 'object') {
return JSON.stringify(err) + JSON.stringify(params)
} else {
return String(err) + JSON.stringify(params)
}
}
}
export const trimPath = (path: string) => path.replace(/^\/+|\/+$/g, '').replace(/\/+/g, '/')
export const getAgent = (proxy:any, https: boolean = true) => {
const formatProxy = formatHttpProxy(proxy, 'string') as any
const opt = {
keepAlive: true,
keepAliveMsecs: 1000,
maxSockets: 256,
maxFreeSockets: 256,
scheduling: 'lifo' as 'lifo' | 'fifo' | undefined,
proxy: formatProxy.replace('127.0.0.1', 'localhost')
}
if (https) {
return formatProxy
? {
https: new HttpsProxyAgent(opt)
}
: {}
} else {
return formatProxy
? {
http: new HttpProxyAgent(opt)
}
: {}
}
}
export function getOptions (
method?: string,
headers?: IStringKeyMap,
searchParams?: IStringKeyMap,
responseType?: string,
body?: any,
timeout?: number,
proxy?: any
) {
const options = {
method: method?.toUpperCase(),
headers,
searchParams,
agent: getAgent(proxy),
timeout: {
request: timeout || 30000
},
body,
throwHttpErrors: false,
responseType
} as IStringKeyMap
Object.keys(options).forEach(key => {
options[key] === undefined && delete options[key]
})
return options
}

View File

@ -0,0 +1,68 @@
const AliyunAreaCodeName : IStringKeyMap = {
'oss-cn-hangzhou': '华东1杭州',
'oss-cn-shanghai': '华东2上海',
'oss-cn-nanjing': '华东5南京本地地域',
'oss-cn-fuzhou': '华东6福州本地地域',
'oss-cn-qingdao': '华北1青岛',
'oss-cn-beijing': '华北2北京',
'oss-cn-zhangjiakou': '华北3张家口',
'oss-cn-huhehaote': '华北5呼和浩特',
'oss-cn-wulanchabu': '华北6乌兰察布',
'oss-cn-shenzhen': '华南1深圳',
'oss-cn-heyuan': '华南2河源',
'oss-cn-guangzhou': '华南3广州',
'oss-cn-chengdu': '西南1成都',
'oss-cn-hongkong': '中国(香港)',
'oss-us-west-1': '美国(硅谷)',
'oss-us-east-1': '美国(弗吉尼亚)',
'oss-ap-northeast-1': '日本(东京)',
'oss-ap-northeast-2': '韩国(首尔)',
'oss-ap-southeast-1': '新加坡',
'oss-ap-southeast-2': '澳大利亚(悉尼)',
'oss-ap-southeast-3': '马来西亚(吉隆坡)',
'oss-ap-southeast-5': '印度尼西亚(雅加达)',
'oss-ap-southeast-6': '菲律宾(马尼拉)',
'oss-ap-southeast-7': '泰国(曼谷)',
'oss-ap-south-1': '印度(孟买)',
'oss-eu-central-1': '德国(法兰克福)',
'oss-eu-west-1': '英国(伦敦)',
'oss-me-east-1': '阿联酋(迪拜)'
}
const QiniuAreaCodeName : IStringKeyMap = {
z0: '华东-浙江',
'cn-east-2': '华东 浙江2',
z1: '华北-河北',
z2: '华南-广东',
na0: '北美-洛杉矶',
as0: '亚太-新加坡',
'ap-northeast-1': '亚太-首尔'
}
const TencentAreaCodeName : IStringKeyMap = {
'ap-beijing-1': '北京一区',
'ap-beijing': '北京',
'ap-nanjing': '南京',
'ap-shanghai': '上海',
'ap-guangzhou': '广州',
'ap-chengdu': '成都',
'ap-chongqing': '重庆',
'ap-shenzhen-fsi': '深圳金融',
'ap-shagnhai-fsi': '上海金融',
'ap-beijing-fsi': '北京金融',
'ap-hongkong': '香港',
'ap-singapore': '新加坡',
'ap-mumbai': '孟买',
'ap-jakarta': '雅加达',
'ap-seoul': '首尔',
'ap-bangkok': '曼谷',
'ap-tokyo': '东京',
'na-siliconvalley': '硅谷(美西)',
'na-ashburn': '弗吉尼亚(美东)',
'na-toronto': '多伦多',
'sa-saopaulo': '圣保罗',
'eu-frankfurt': '法兰克福',
'eu-moscow': '莫斯科'
}
export { AliyunAreaCodeName, QiniuAreaCodeName, TencentAreaCodeName }

View File

@ -0,0 +1,165 @@
import chalk from 'chalk'
import dayjs from 'dayjs'
import fs from 'fs-extra'
import path from 'path'
import util from 'util'
import { ILogType } from '#/types/enum'
import { ILogColor, ILogger } from 'piclist/dist/types'
import { ManageApiType, Undefinable } from '~/universal/types/manage'
import { enforceNumber, isDev } from '#/utils/common'
export class ManageLogger implements ILogger {
private readonly level = {
[ILogType.success]: 'green',
[ILogType.info]: 'blue',
[ILogType.warn]: 'yellow',
[ILogType.error]: 'red'
}
private readonly ctx: ManageApiType
private logLevel!: string
private logPath!: string
constructor (ctx: ManageApiType) {
this.ctx = ctx
}
private handleLog (type: ILogType, ...msg: ILogArgvTypeWithError[]): void {
const logHeader = chalk[this.level[type] as ILogColor](
`[PicList ${type.toUpperCase()}]`
)
console.log(logHeader, ...msg)
this.logLevel = this.ctx.getConfig('settings.logLevel')
this.logPath =
this.ctx.getConfig<Undefinable<string>>('settings.logPath') ||
path.join(this.ctx.baseDir, './manage.log')
setTimeout(() => {
try {
const result = this.checkLogFileIsLarge(this.logPath)
if (result.isLarge) {
const warningMsg = `Log file is too large (> ${
result.logFileSizeLimit! / 1024 / 1024 || '10'
} MB), recreate log file`
console.log(chalk.yellow('[PicList WARN]:'), warningMsg)
this.recreateLogFile(this.logPath)
msg.unshift(warningMsg)
}
this.handleWriteLog(this.logPath, type, ...msg)
} catch (e) {
console.error('[PicList Error] on checking log file size', e)
}
}, 0)
}
private checkLogFileIsLarge (logPath: string): {
isLarge: boolean
logFileSize?: number
logFileSizeLimit?: number
} {
if (fs.existsSync(logPath)) {
const logFileSize = fs.statSync(logPath).size
const logFileSizeLimit =
enforceNumber(
this.ctx.getConfig<Undefinable<number>>(
'settings.logFileSizeLimit'
) || 10
) *
1024 *
1024
return {
isLarge: logFileSize > logFileSizeLimit,
logFileSize,
logFileSizeLimit
}
}
fs.ensureFileSync(logPath)
return {
isLarge: false
}
}
private recreateLogFile (logPath: string): void {
if (fs.existsSync(logPath)) {
fs.unlinkSync(logPath)
fs.createFileSync(logPath)
}
}
private handleWriteLog (
logPath: string,
type: string,
...msg: ILogArgvTypeWithError[]
): void {
try {
if (this.checkLogLevel(type, this.logLevel)) {
let log = `${dayjs().format(
'YYYY-MM-DD HH:mm:ss'
)} [PicList ${type.toUpperCase()}] `
msg.forEach((item: ILogArgvTypeWithError) => {
if (item instanceof Error && type === 'error') {
log += `\n------Error Stack Begin------\n${util.format(
item?.stack
)}\n-------Error Stack End------- `
} else {
if (typeof item === 'object') {
if (item?.stack) {
log = log + `\n------Error Stack Begin------\n${util.format(
item.stack
)}\n-------Error Stack End------- `
}
item = JSON.stringify(item, (key, value) => {
if (key === 'stack') {
return undefined
}
return value
}, 2)
}
log += `${item as string} `
}
})
log += '\n'
fs.appendFileSync(logPath, log)
}
} catch (e) {
console.error('[PicList Error] on writing log file', e)
}
}
private checkLogLevel (
type: string,
level: undefined | string | string[]
): boolean {
if (level === undefined || level === 'all') {
return true
}
if (Array.isArray(level)) {
return level.some((item: string) => item === type || item === 'all')
} else {
return type === level
}
}
success (...msq: ILogArgvType[]): void {
return this.handleLog(ILogType.success, ...msq)
}
info (...msq: ILogArgvType[]): void {
return this.handleLog(ILogType.info, ...msq)
}
error (...msq: ILogArgvTypeWithError[]): void {
return this.handleLog(ILogType.error, ...msq)
}
warn (...msq: ILogArgvType[]): void {
return this.handleLog(ILogType.warn, ...msq)
}
debug (...msq: ILogArgvType[]): void {
if (isDev) {
this.handleLog(ILogType.info, ...msq)
}
}
}
export default ManageLogger

View File

@ -2,7 +2,7 @@ import { DBStore } from '@picgo/store'
import ConfigStore from '~/main/apis/core/datastore'
import path from 'path'
import fse from 'fs-extra'
import { PicGo as PicGoCore } from 'picgo'
import { PicGo as PicGoCore } from 'piclist'
import { T } from '~/main/i18n'
// from v2.1.2
const updateShortKeyFromVersion212 = (db: typeof ConfigStore, shortKeyConfig: IShortKeyConfigs | IOldShortKeyConfigs) => {

View File

@ -48,7 +48,7 @@ class Server {
if (request.method === 'POST') {
if (!routers.getHandler(request.url!)) {
logger.warn(`[PicGo Server] don't support [${request.url}] url`)
logger.warn(`[PicList Server] don't support [${request.url}] url`)
handleResponse({
response,
statusCode: 404,
@ -66,7 +66,7 @@ class Server {
try {
postObj = (body === '') ? {} : JSON.parse(body)
} catch (err: any) {
logger.error('[PicGo Server]', err)
logger.error('[PicList Server]', err)
return handleResponse({
response,
body: {
@ -75,7 +75,7 @@ class Server {
}
})
}
logger.info('[PicGo Server] get the request', body)
logger.info('[PicList Server] get the request', body)
const handler = routers.getHandler(request.url!)
handler!({
...postObj,
@ -84,7 +84,7 @@ class Server {
})
}
} else {
logger.warn(`[PicGo Server] don't support [${request.method}] method`)
logger.warn(`[PicList Server] don't support [${request.method}] method`)
response.statusCode = 404
response.end()
}
@ -92,7 +92,7 @@ class Server {
// port as string is a bug
private listen = (port: number | string) => {
logger.info(`[PicGo Server] is listening at ${port}`)
logger.info(`[PicList Server] is listening at ${port}`)
if (typeof port === 'string') {
port = parseInt(port, 10)
}
@ -103,7 +103,7 @@ class Server {
await axios.post(ensureHTTPLink(`${this.config.host}:${port}/heartbeat`))
this.shutdown(true)
} catch (e) {
logger.warn(`[PicGo Server] ${port} is busy, trying with port ${(port as number) + 1}`)
logger.warn(`[PicList Server] ${port} is busy, trying with port ${(port as number) + 1}`)
// fix a bug: not write an increase number to config file
// to solve the auto number problem
this.listen((port as number) + 1)
@ -122,7 +122,7 @@ class Server {
shutdown (hasStarted?: boolean) {
this.httpServer.close()
if (!hasStarted) {
logger.info('[PicGo Server] shutdown')
logger.info('[PicList Server] shutdown')
}
}

View File

@ -8,7 +8,7 @@ import { uploadChoosedFiles, uploadClipboardFiles } from 'apis/app/uploader/apis
import path from 'path'
import { dbPathDir } from 'apis/core/datastore/dbChecker'
const STORE_PATH = dbPathDir()
const LOG_PATH = path.join(STORE_PATH, 'picgo.log')
const LOG_PATH = path.join(STORE_PATH, 'piclist.log')
const errorMessage = `upload error. see ${LOG_PATH} for more detail.`
@ -22,9 +22,9 @@ router.post('/upload', async ({
try {
if (list.length === 0) {
// upload with clipboard
logger.info('[PicGo Server] upload clipboard file')
logger.info('[PicList Server] upload clipboard file')
const res = await uploadClipboardFiles()
logger.info('[PicGo Server] upload result:', res)
logger.info('[PicList Server] upload result:', res)
if (res) {
handleResponse({
response,
@ -43,7 +43,7 @@ router.post('/upload', async ({
})
}
} else {
logger.info('[PicGo Server] upload files in list')
logger.info('[PicList Server] upload files in list')
// upload with files
const pathList = list.map(item => {
return {
@ -52,7 +52,7 @@ router.post('/upload', async ({
})
const win = windowManager.getAvailableWindow()
const res = await uploadChoosedFiles(win.webContents, pathList)
logger.info('[PicGo Server] upload result', res.join(' ; '))
logger.info('[PicList Server] upload result', res.join(' ; '))
if (res.length) {
handleResponse({
response,

View File

@ -19,7 +19,7 @@ export const handleResponse = ({
body?: any
}) => {
if (body?.success === false) {
logger.warn('[PicGo Server] upload failed, see picgo.log for more detail ↑')
logger.warn('[PicList Server] upload failed, see piclist.log for more detail ↑')
}
response.writeHead(statusCode, header)
response.write(JSON.stringify(body))

View File

@ -4,7 +4,6 @@ import os from 'os'
import { dbPathChecker } from 'apis/core/datastore/dbChecker'
import yaml from 'js-yaml'
import { i18nManager } from '~/main/i18n'
// import { ILocales } from '~/universal/types/i18n'
const configPath = dbPathChecker()
const CONFIG_DIR = path.dirname(configPath)
@ -21,12 +20,12 @@ function beforeOpen () {
* macOS
*/
function resolveMacWorkFlow () {
const dest = `${os.homedir()}/Library/Services/Upload pictures with PicGo.workflow`
const dest = `${os.homedir()}/Library/Services/Upload pictures with PicList.workflow`
if (fs.existsSync(dest)) {
return true
} else {
try {
fs.copySync(path.join(__static, 'Upload pictures with PicGo.workflow'), dest)
fs.copySync(path.join(__static, 'Upload pictures with PicList.workflow'), dest)
} catch (e) {
console.log(e)
}

View File

@ -1,6 +1,6 @@
import path from 'path'
import fs from 'fs-extra'
import { Logger } from 'picgo'
import { Logger } from 'piclist'
import { isUrl } from '~/universal/utils/common'
interface IResultFileObject {
path: string

View File

@ -7,7 +7,7 @@ import { getLatestVersion } from '#/utils/getLatestVersion'
const version = pkg.version
// const releaseUrl = 'https://api.github.com/repos/Molunerfinn/PicGo/releases'
// const releaseUrlBackup = 'https://picgo-1251750343.cos.ap-chengdu.myqcloud.com'
const downloadUrl = 'https://github.com/Molunerfinn/PicGo/releases/latest'
const downloadUrl = 'https://github.com/Kuingsmile/PicList/releases/latest'
const checkVersion = async () => {
let showTip = db.get('settings.showUpdateTip')
@ -16,8 +16,7 @@ const checkVersion = async () => {
showTip = true
}
if (showTip) {
const isCheckBetaUpdate = db.get('settings.checkBetaUpdate') !== false
const res: string = await getLatestVersion(isCheckBetaUpdate)
const res: string = await getLatestVersion()
if (res !== '') {
const latest = res
const result = compareVersion2Update(version, latest)
@ -49,12 +48,6 @@ const checkVersion = async () => {
// if true -> update else return false
const compareVersion2Update = (current: string, latest: string) => {
try {
if (latest.includes('beta')) {
const isCheckBetaUpdate = db.get('settings.checkBetaUpdate') !== false
if (!isCheckBetaUpdate) {
return false
}
}
return lt(current, latest)
} catch (e) {
return false

View File

@ -8,7 +8,7 @@
import { useStore } from '@/hooks/useStore'
import { onBeforeMount, onMounted, onUnmounted } from 'vue'
import { getConfig } from './utils/dataSender'
import type { IConfig } from 'picgo'
import type { IConfig } from 'piclist'
import bus from './utils/bus'
import { FORCE_UPDATE } from '~/universal/events/constants'

View File

@ -0,0 +1,25 @@
import OSS from 'ali-oss'
export default class AliyunApi {
static async delete (configMap: IStringKeyMap): Promise<boolean> {
const { fileName, config: { accessKeyId, accessKeySecret, bucket, area, path } } = configMap
try {
const client = new OSS({
accessKeyId,
accessKeySecret,
bucket,
region: area
})
let key
if (path === '/' || !path) {
key = fileName
} else {
key = `${path.replace(/^\//, '').replace(/\/$/, '')}/${fileName}`
}
const result = await client.delete(key) as any
return result.res.status === 204
} catch (error) {
return false
}
}
}

View File

@ -0,0 +1,27 @@
import SmmsApi from './smms'
import TcyunApi from './tcyun'
import AliyunApi from './aliyun'
import QiniuApi from './qiniu'
import ImgurApi from './imgur'
import GithubApi from './github'
import UpyunApi from './upyun'
const apiMap: IStringKeyMap = {
smms: SmmsApi,
tcyun: TcyunApi,
aliyun: AliyunApi,
qiniu: QiniuApi,
imgur: ImgurApi,
github: GithubApi,
upyun: UpyunApi
}
export default class ALLApi {
static async delete (configMap: IStringKeyMap): Promise<boolean> {
if (apiMap[configMap.type] !== undefined) {
return await apiMap[configMap.type].delete(configMap)
} else {
return false
}
}
}

View File

@ -0,0 +1,31 @@
import { Octokit } from '@octokit/rest'
export default class GithubApi {
static async delete (configMap: IStringKeyMap): Promise<boolean> {
const { fileName, hash, config: { repo, token, branch, path } } = configMap
const owner = repo.split('/')[0]
const repoName = repo.split('/')[1]
const octokit = new Octokit({
auth: token
})
let key
if (path === '/' || !path) {
key = fileName
} else {
key = `${path.replace(/^\//, '').replace(/\/$/, '')}/${fileName}`
}
try {
const result = await octokit.rest.repos.deleteFile({
owner,
repo: repoName,
path: key,
message: `delete ${fileName} by PicList`,
sha: hash,
branch
})
return result.status === 200
} catch (error) {
return false
}
}
}

View File

@ -0,0 +1,21 @@
import axios from 'axios'
export default class ImgurApi {
static async delete (configMap: IStringKeyMap): Promise<boolean> {
const clientId = configMap.config.clientId
const { hash } = configMap
const fullUrl = `https://api.imgur.com/3/image/${hash}`
const headers = {
Authorization: `Client-ID ${clientId}`
}
try {
const res = await axios.delete(fullUrl, {
headers,
timeout: 10000
})
return res.status === 200
} catch (error) {
return false
}
}
}

View File

@ -0,0 +1,33 @@
import Qiniu from 'qiniu'
export default class QiniuApi {
static async delete (configMap: IStringKeyMap): Promise<boolean> {
const { fileName, config: { accessKey, secretKey, bucket, path } } = configMap
const mac = new Qiniu.auth.digest.Mac(accessKey, secretKey)
const qiniuConfig = new Qiniu.conf.Config()
try {
const bucketManager = new Qiniu.rs.BucketManager(mac, qiniuConfig)
let key = ''
if (path === '/' || !path) {
key = fileName
} else {
key = `${path.replace(/^\//, '').replace(/\/$/, '')}/${fileName}`
}
const res = await new Promise((resolve, reject) => {
bucketManager.delete(bucket, key, (err, respBody, respInfo) => {
if (err) {
reject(err)
} else {
resolve({
respBody,
respInfo
})
}
})
}) as any
return res && res.respInfo.statusCode === 200
} catch (error) {
return false
}
}
}

23
src/renderer/apis/smms.ts Normal file
View File

@ -0,0 +1,23 @@
import axios from 'axios'
export default class SmmsApi {
static async delete (configMap: IStringKeyMap): Promise<boolean> {
const { hash, config: { token } } = configMap
if (!hash || !token) {
return false
} else {
const res = await axios.get(
`https://smms.app/api/v2/delete/${hash}`, {
headers: {
Authorization: token
},
params: {
hash,
format: 'json'
},
timeout: 10000
})
return res.status === 200
}
}
}

View File

@ -0,0 +1,27 @@
import COS from 'cos-nodejs-sdk-v5'
export default class TcyunApi {
static async delete (configMap: IStringKeyMap): Promise<boolean> {
const { fileName, config: { secretId, secretKey, bucket, area, path } } = configMap
try {
const cos = new COS({
SecretId: secretId,
SecretKey: secretKey
})
let key
if (path === '/' || !path) {
key = `/${fileName}`
} else {
key = `/${path.replace(/^\//, '').replace(/\/$/, '')}${fileName}`
}
const result = await cos.deleteObject({
Bucket: bucket,
Region: area,
Key: key
})
return result.statusCode === 204
} catch (error) {
return false
}
}
}

View File

@ -0,0 +1,22 @@
// @ts-ignore
import Upyun from 'upyun'
export default class UpyunApi {
static async delete (configMap: IStringKeyMap): Promise<boolean> {
const { fileName, config: { bucket, operator, password, path } } = configMap
try {
const service = new Upyun.Service(bucket, operator, password)
const client = new Upyun.Client(service)
let key
if (path === '/' || !path) {
key = fileName
} else {
key = `${path.replace(/^\//, '').replace(/\/$/, '')}/${fileName}`
}
const result = await client.deleteFile(key)
return result
} catch (error) {
return false
}
}
}

View File

@ -51,11 +51,11 @@
</el-icon>
<span>{{ $T('UPLOAD_AREA') }}</span>
</el-menu-item>
<el-menu-item :index="routerConfig.MANAGE_MAIN_PAGE">
<el-menu-item :index="routerConfig.MANAGE_LOGIN_PAGE">
<el-icon>
<PictureFilled />
<PieChart />
</el-icon>
<span>{{ $T('PICBEDS_MANAGE') }}</span>
<span>管理页面</span>
</el-menu-item>
<el-menu-item :index="routerConfig.GALLERY_PAGE">
<el-icon>
@ -105,8 +105,8 @@
</el-icon>
</el-col>
<el-col
:span="19"
:offset="5"
:span="21"
:offset="3"
style="height: 100%"
class="main-wrapper"
:class="{ 'darwin': os === 'darwin' }"
@ -133,7 +133,7 @@
width="70%"
top="10vh"
>
{{ $T('PICGO_SPONSOR_TEXT') }}
{{ $T('PICLIST_SPONSOR_TEXT') }}
<el-row class="support">
<el-col :span="12">
<img
@ -219,10 +219,11 @@ import {
InfoFilled,
Minus,
CirclePlus,
Close
Close,
PieChart
} from '@element-plus/icons-vue'
import { ElMessage as $message } from 'element-plus'
import { T } from '@/i18n/index'
import { T as $T } from '@/i18n/index'
import { ref, onBeforeUnmount, Ref, onBeforeMount, watch, nextTick, reactive } from 'vue'
import { onBeforeRouteUpdate, useRouter } from 'vue-router'
import QrcodeVue from 'qrcode.vue'
@ -299,18 +300,6 @@ const handleSelect = (index: string) => {
type
}
})
// if (this.$builtInPicBed.includes(picBed)) {
// this.$router.push({
// name: picBed
// })
// } else {
// this.$router.push({
// name: 'others',
// params: {
// type: picBed
// }
// })
// }
}
}
@ -332,7 +321,7 @@ function openMiniWindow () {
function handleCopyPicBedConfig () {
clipboard.writeText(picBedConfigString.value)
$message.success(T('COPY_PICBED_CONFIG_SUCCEED'))
$message.success($T('COPY_PICBED_CONFIG_SUCCEED'))
}
function getPicBeds (event: IpcRendererEvent, picBeds: IPicBedType[]) {

View File

@ -1,13 +0,0 @@
<template>
<div id="appm">
{{ test }}
</div>
</template>
<script lang="ts" setup>
const test = 'test'
</script>
<style lang="stylus">
</style>

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Some files were not shown because too many files have changed in this diff Show More