mirror of
https://github.com/derrod/legendary.git
synced 2024-06-02 10:44:54 +12:00
Compare commits
465 commits
Author | SHA1 | Date | |
---|---|---|---|
7fefdc4973 | |||
96e07ff453 | |||
ac6290627c | |||
691048d481 | |||
837c166187 | |||
1841da51f0 | |||
56d439ed2d | |||
2fdacb75d3 | |||
d2963db5b2 | |||
f1d815797f | |||
591039eaf3 | |||
9131f32c22 | |||
450784283d | |||
c56a81ab64 | |||
488d14c6e0 | |||
4c765325af | |||
c6e622f3ae | |||
013f7d4bde | |||
03b21f49de | |||
bd2e7ca0cd | |||
20b121bdb9 | |||
b759d9dbb1 | |||
51377e8548 | |||
07a16f7b84 | |||
c69301212c | |||
865dd51e2b | |||
6536473063 | |||
6d7909c311 | |||
0e35b70941 | |||
e0428b497e | |||
6500ea73af | |||
96b155800a | |||
4145381b93 | |||
e26b9e60ff | |||
bdd53fb8f8 | |||
bbb19d6cb6 | |||
175168adcb | |||
8b2809779f | |||
4bed49e7e1 | |||
f97d799e87 | |||
09d39b3fe3 | |||
a70ac2d1f9 | |||
362287543b | |||
ae05b4c1e5 | |||
6b8273f983 | |||
00f025dcc9 | |||
87b01b77d8 | |||
f19a1ba69d | |||
c8a6e68bf4 | |||
2ed9557b2c | |||
da23690510 | |||
c3eb6b4fe6 | |||
032b7fc64f | |||
29086276ee | |||
4c99bf8987 | |||
6709e8aa4f | |||
4722e38081 | |||
2ffd183554 | |||
d59e973816 | |||
f80ceb50f3 | |||
cf22de2bcf | |||
ddb7e1c3ca | |||
36e6e5f08a | |||
0e23b8e4f0 | |||
85f6bd3220 | |||
9e5fbaf21a | |||
ecb405172b | |||
c053860f25 | |||
3ab31561bf | |||
66ef0f3d5e | |||
c0d67882bb | |||
338fef2fac | |||
075f446add | |||
0eec8472a4 | |||
abd3a9d496 | |||
53e2accbb0 | |||
e111ae56fc | |||
88d30322b5 | |||
b136748168 | |||
5a20f12461 | |||
f26c8ab0a1 | |||
0d23775337 | |||
d8af06c936 | |||
a73d0694f6 | |||
f9a2dae282 | |||
7a617d35f3 | |||
e5ec8e25b3 | |||
dcfdfbc520 | |||
83072d0b39 | |||
410c840aa4 | |||
9e145278d5 | |||
594e60e850 | |||
496bda3345 | |||
fc73c1d4bf | |||
f902963b1a | |||
791fb5da11 | |||
46bda313d6 | |||
06b18fe94a | |||
40748a91ba | |||
e52223c3ce | |||
a3bc07e15a | |||
b7f4a9f45a | |||
60a504edde | |||
2b71b50d5c | |||
823d672c2c | |||
a12238e4ef | |||
2ef5401dbb | |||
1e97a4d791 | |||
ec91f69adc | |||
3d1042e27e | |||
d7360eef3e | |||
ca005f6274 | |||
cffb10188a | |||
f20ae123a3 | |||
0f0b430a3c | |||
7ac9ec7b5f | |||
b7ad4daeb2 | |||
6ab354b20e | |||
869c749908 | |||
3793601de3 | |||
858d2f98e6 | |||
158b28eaff | |||
778ecacbd3 | |||
180692195f | |||
3bc819e567 | |||
742d3a3b05 | |||
cf95da395c | |||
66a30d6b2a | |||
e6da49d0bf | |||
f21ecf1eda | |||
f0ca8e6a9b | |||
a25de242d9 | |||
4ab0c99a0f | |||
024c03eb55 | |||
49cc8db22f | |||
c86cb40c10 | |||
be4c1b1cda | |||
1c6e83e9f8 | |||
a48bad9999 | |||
710f5d07d7 | |||
8d28945e8b | |||
ed1cbfc87e | |||
f7f13ed749 | |||
ce68ae87bf | |||
58bd76c39e | |||
cf8bccc569 | |||
6c3f409c49 | |||
8f2d42892b | |||
df1c3e6a3c | |||
48baba6adc | |||
557724339d | |||
b30de01cc7 | |||
586aeaf6de | |||
9bcfb15cf8 | |||
ba1e05af53 | |||
976b7cebf0 | |||
cc5c7a90b8 | |||
4bccd460ad | |||
ac5af04980 | |||
de3f3f93af | |||
840210040f | |||
005089ee9b | |||
bec119bc03 | |||
ecb04324d5 | |||
cea5f42425 | |||
9a3652086b | |||
d3ea2c6cfd | |||
cc44149e68 | |||
e44998b786 | |||
8e4bb8d3dd | |||
202f07973a | |||
05aac59836 | |||
edadf1c780 | |||
0a63b8b007 | |||
6a408e8404 | |||
8a9ca14391 | |||
4a4e1397d4 | |||
0298a53315 | |||
ecb230511f | |||
d15f05fc60 | |||
08267025b4 | |||
9469d3cb6f | |||
2e6335bf09 | |||
688910bf91 | |||
e771ccdf19 | |||
a4c6dee7ef | |||
d70f0daa22 | |||
cd74af8832 | |||
0f481e1f31 | |||
72215875ee | |||
3fed7d2614 | |||
013792f7b9 | |||
8512a9a7a1 | |||
af08f5d11b | |||
dfaccba2cb | |||
fc66f9f372 | |||
2474c43b7b | |||
300110e2bc | |||
b8e5dac0d6 | |||
3cba1c8510 | |||
03ef95923d | |||
dd099c0afd | |||
99c97032b4 | |||
2adc0b1a3e | |||
6fb6bb14a4 | |||
0d491aed90 | |||
a0da79bc2c | |||
f0f4b545f5 | |||
3d877185b0 | |||
b5a2fba896 | |||
33b89f5e9a | |||
75f2da576b | |||
d2a6f16060 | |||
0e4ab85b2f | |||
bc1c27b8d2 | |||
e5ba44ecfa | |||
b5120fa99d | |||
4a743dc1ca | |||
c7030c480e | |||
cb69d7c9d7 | |||
8d71df0cc4 | |||
efaf25b9d9 | |||
21d62dcd76 | |||
b6cb31df8b | |||
1fd8acdee4 | |||
599e4766b2 | |||
e60c3f7aa7 | |||
a4c1f0e670 | |||
d941b9d61e | |||
6b91c5779b | |||
fbb4acbc88 | |||
ed0ac1e0b2 | |||
3c831da310 | |||
335619ff79 | |||
363ac15faa | |||
d61946d15d | |||
352d3d2d0d | |||
0e72950382 | |||
11850228a8 | |||
8c087fc90e | |||
508c6a3a58 | |||
71633333b7 | |||
3e4c70ece2 | |||
c3ade2994e | |||
6c35504058 | |||
fa02ed396e | |||
48cab6af57 | |||
01ec2ccd89 | |||
797598322d | |||
803fc46249 | |||
67e651cb01 | |||
57d88b2db4 | |||
6106433153 | |||
e0ad2171bf | |||
db5cd43047 | |||
eb8bc3713b | |||
9d18ef03fa | |||
4dd495e2f5 | |||
8c50f051a7 | |||
31c4c32ec7 | |||
82376e3d57 | |||
694a275dac | |||
e11dd8734f | |||
db1a6706e4 | |||
e1b5245252 | |||
c8189460c2 | |||
1c8349a28e | |||
20f934dc12 | |||
8dadf3c1a1 | |||
d737ca57a7 | |||
5671448264 | |||
e71ab3155e | |||
a8e35e9f3b | |||
c33f9a0084 | |||
90a4efdfbf | |||
32e9ef1cd3 | |||
9511d9d65b | |||
6f7989ab50 | |||
9e21a81c96 | |||
1dfc5aabe7 | |||
4eaa608370 | |||
aeecaa4d3e | |||
7151470197 | |||
8fb4c56730 | |||
4fd50a93a0 | |||
356f0f84f5 | |||
999ff36667 | |||
fe912246a5 | |||
27a3d83c45 | |||
2ff6712932 | |||
260c0e97a2 | |||
42aae4eccf | |||
f00d23c8c4 | |||
0fb3d39a0a | |||
c83bf7218c | |||
24832ea074 | |||
8c56bd93d5 | |||
081cca2c19 | |||
de24ee8157 | |||
0d1592266b | |||
e8207d53cd | |||
f280d53496 | |||
ee3f9a3e07 | |||
d0d37c40e7 | |||
c9f9d38f1b | |||
8d4cf943c7 | |||
0523ecfe28 | |||
95e76b0624 | |||
73b1dc1825 | |||
c43833146a | |||
19ba9a45f7 | |||
926e89c89a | |||
3145fdb534 | |||
eacb8eb006 | |||
b27879f21f | |||
22b9c5c932 | |||
d8cd885542 | |||
8d47b4b2af | |||
42d737a301 | |||
33ad64f0a4 | |||
b89579e3be | |||
1850a8491f | |||
07415d944c | |||
d14ba92c9b | |||
aaf7e0934f | |||
106ed16a49 | |||
658cc94dbe | |||
8181f9faeb | |||
964ee08d05 | |||
355b1107e6 | |||
85a275950d | |||
d5ec14b556 | |||
2146a45104 | |||
856c5ef2ee | |||
0ef916e9c5 | |||
8c002a76e0 | |||
9462aa331f | |||
7f53746ee6 | |||
b57735abf3 | |||
841db6bcb6 | |||
fa9e650ea1 | |||
e6bb64f16f | |||
07ae84c6a0 | |||
054a3ea7eb | |||
27c92e3555 | |||
6d3a8af70b | |||
205f960ed4 | |||
6cef1a1410 | |||
9693a0b8ff | |||
8da4b55987 | |||
0bc543518c | |||
74bc2fecc0 | |||
3aad87e1a9 | |||
23a76e8a56 | |||
594a21c8f1 | |||
115ac27b79 | |||
55ec1707ef | |||
674793b808 | |||
aafba86a94 | |||
5d4beeb27d | |||
ea01cb6302 | |||
ce89965274 | |||
9471ca41e1 | |||
b3b5470947 | |||
2f6f043716 | |||
be23d915b3 | |||
70c0be72fe | |||
6486d02daa | |||
c2d60233fc | |||
75cc194424 | |||
b19a482fdb | |||
2d366a7704 | |||
a7d5e37f82 | |||
d300972b46 | |||
a722e7b8ba | |||
b857967dfa | |||
b78b4f49c1 | |||
96ff42f05a | |||
20c08aa9a4 | |||
917cfc259e | |||
fcf8bee6eb | |||
c89f9d82c7 | |||
8e5f579db7 | |||
77efeee2ca | |||
15591a1e2d | |||
26715695d8 | |||
4e539c4fd5 | |||
a029fc6fdb | |||
7e74a2e5b0 | |||
c4695d2b99 | |||
ead2bdc16c | |||
56fdfba787 | |||
f2639a7e67 | |||
8460fdfd2a | |||
82f38d279d | |||
02c83405fb | |||
638578283b | |||
2f4ad78e27 | |||
a116013f05 | |||
5960cb8699 | |||
09c52b66ff | |||
566dd6904b | |||
0b97cdeeef | |||
3f04de448b | |||
e7ce2e5cb7 | |||
0d8b74a9e0 | |||
d70d5a6521 | |||
d15b882929 | |||
517ef083f9 | |||
8f6cb9c856 | |||
55f9f05206 | |||
f22c8d0ab6 | |||
f9dad549c3 | |||
ff29b949cb | |||
3a608610f3 | |||
4706a42cee | |||
7509550eb1 | |||
dbc4131ec2 | |||
a1993eb568 | |||
156b4716f4 | |||
b5d4224664 | |||
888d62a96d | |||
ee2432c443 | |||
b319cb505c | |||
e881e42d5f | |||
730225c06b | |||
fbd0df1ecc | |||
cffd9040f7 | |||
6b4cf6558b | |||
1ded086969 | |||
a725dd0ad8 | |||
d263ab75cd | |||
3fc5e5bab6 | |||
21eac6ecfa | |||
0bf7110653 | |||
e8a3a3ec8d | |||
b12798e6b0 | |||
82a2706e4c | |||
72aea19853 | |||
791bc77c7c | |||
43bb09a1ad | |||
39bfa3df15 | |||
5a8de3a3bc | |||
ba6ec3ecca | |||
6876371325 | |||
d82870a5d3 | |||
d21e002272 | |||
b33396915e | |||
5fe35c7008 | |||
315bdfb4a4 | |||
0acfc47b33 | |||
ad2912a88e | |||
dd5f77c3d1 | |||
c53ac14c64 | |||
d3026672d1 | |||
d6e9c5ef46 | |||
f3991d3ee2 | |||
65085e5b4a | |||
84a940fcbb | |||
a465966954 | |||
69eeccec21 | |||
4d5539c889 | |||
0416b472d3 | |||
8726843bdb | |||
ea05ea2559 |
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
|
@ -2,10 +2,10 @@ blank_issues_enabled: false
|
|||
contact_links:
|
||||
- name: GitHub Wiki
|
||||
url: https://github.com/derrod/legendary/wiki/Game-workarounds
|
||||
about: The Legendary Wiki contains troubleshooting steps for some games and a guide for setting up Proton
|
||||
about: The Legendary Wiki contains troubleshooting steps for some games as well as a setup guide for Windows
|
||||
- name: GitHub Discussions
|
||||
url: https://github.com/derrod/legendary/discussions
|
||||
about: GitHub Forum for anything that is not a legendary issue (e.g. game or WINE problems)
|
||||
- name: Discord chat
|
||||
url: https://discord.gg/RQHbMVrwRr
|
||||
url: https://legendary.gl/discord
|
||||
about: Discord chat for help with game or WINE issues
|
||||
|
|
62
.github/workflows/python.yml
vendored
62
.github/workflows/python.yml
vendored
|
@ -11,26 +11,29 @@ jobs:
|
|||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: ['ubuntu-20.04', 'windows-latest']
|
||||
|
||||
fail-fast: true
|
||||
os: ['ubuntu-20.04', 'windows-2019', 'macos-11']
|
||||
fail-fast: false
|
||||
max-parallel: 3
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Dependencies
|
||||
- name: Legendary dependencies and build tools
|
||||
run: pip3 install --upgrade
|
||||
setuptools
|
||||
pyinstaller
|
||||
requests
|
||||
setuptools
|
||||
wheel
|
||||
filelock
|
||||
|
||||
- name: Strip
|
||||
- name: Optional dependencies (WebView)
|
||||
run: pip3 install --upgrade pywebview
|
||||
if: runner.os != 'macOS'
|
||||
|
||||
- name: Set strip option on non-Windows
|
||||
id: strip
|
||||
run: echo ::set-output name=option::--strip
|
||||
if: runner.os != 'Windows'
|
||||
|
@ -41,36 +44,37 @@ jobs:
|
|||
--onefile
|
||||
--name legendary
|
||||
${{ steps.strip.outputs.option }}
|
||||
-i ../assets/windows_icon.ico
|
||||
cli.py
|
||||
env:
|
||||
PYTHONOPTIMIZE: 1
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ runner.os }}-package
|
||||
path: legendary/dist/*
|
||||
|
||||
deb:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: ['ubuntu-20.04']
|
||||
|
||||
fail-fast: true
|
||||
max-parallel: 3
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Dependencies
|
||||
run: sudo apt install
|
||||
python3-all
|
||||
python3-stdeb
|
||||
dh-python
|
||||
python3-requests
|
||||
python3-setuptools
|
||||
python3-wheel
|
||||
run: |
|
||||
sudo apt install ruby
|
||||
sudo gem install fpm
|
||||
|
||||
- name: Build
|
||||
run: python3 setup.py --command-packages=stdeb.command bdist_deb
|
||||
run: fpm
|
||||
--input-type python
|
||||
--output-type deb
|
||||
--python-package-name-prefix python3
|
||||
--deb-suggests python3-webview
|
||||
--maintainer "Rodney <rodney@rodney.io>"
|
||||
--category python
|
||||
--depends "python3 >= 3.9"
|
||||
setup.py
|
||||
|
||||
- name: Os version
|
||||
id: os_version
|
||||
|
@ -78,7 +82,7 @@ jobs:
|
|||
source /etc/os-release
|
||||
echo ::set-output name=version::$NAME-$VERSION_ID
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ steps.os_version.outputs.version }}-deb-package
|
||||
path: deb_dist/*.deb
|
||||
path: ./*.deb
|
||||
|
|
537
README.md
537
README.md
|
@ -2,42 +2,50 @@
|
|||
## A free and open-source Epic Games Launcher alternative
|
||||
![Logo](https://repository-images.githubusercontent.com/249938026/80b18f80-96c7-11ea-9183-0a8c96e7cada)
|
||||
|
||||
[![Discord](https://discordapp.com/api/guilds/695233346627698689/widget.png?style=shield)](https://discord.gg/UJKBwPw) [![Twitter Follow](https://img.shields.io/twitter/follow/legendary_gl?label=Follow%20us%20for%20updates%21&style=social)](https://twitter.com/legendary_gl)
|
||||
[![Discord](https://discordapp.com/api/guilds/695233346627698689/widget.png?style=shield)](https://legendary.gl/discord) [![Twitter Follow](https://img.shields.io/twitter/follow/legendary_gl?label=Follow%20us%20for%20updates%21&style=social)](https://twitter.com/legendary_gl)
|
||||
|
||||
Legendary is an open-source game launcher that can download and install games from the Epic Games platform on Linux and Windows.
|
||||
Legendary is an open-source game launcher that can download and install games from the Epic Games platform on Linux, macOS, and Windows.
|
||||
Its name as a tongue-in-cheek play on tiers of [item rarity in many MMORPGs](https://wow.gamepedia.com/Quality).
|
||||
|
||||
Right now Legendary is in beta and not feature-complete. You might run into some bugs or issues.
|
||||
If you do please [create an issue on GitHub](https://github.com/derrod/legendary/issues/new/choose) so we can fix it.
|
||||
|
||||
Please read the the [config file](#config-file) and [cli usage](#usage) sections before creating an issue to avoid invalid reports.
|
||||
|
||||
If you run into any issues [ask for help on our Discord](https://legendary.gl/discord) or [create an issue on GitHub](https://github.com/derrod/legendary/issues/new/choose) so we can fix it!
|
||||
|
||||
Finally, if you wish to support the project, please consider [buying me a coffee on Ko-Fi](https://ko-fi.com/derrod).
|
||||
Alternatively, if you've been considering picking up a copy of CrossOver you can use my [affiliate link](https://www.codeweavers.com/?ad=892) and discount code `LEGENDARY15` in their store.
|
||||
|
||||
**Note:** Legendary is currently a CLI (command-line interface) application without a graphical user interface,
|
||||
it has to be run from a terminal (e.g. PowerShell)
|
||||
|
||||
**What works:**
|
||||
**Features:**
|
||||
- Authenticating with Epic's service
|
||||
- Downloading and installing your games and their DLC
|
||||
- Delta patching/updating of installed games
|
||||
- Launching games with online authentication (for multiplayer/DRM)
|
||||
- Syncing cloud saves (compatible with EGL)
|
||||
- Running games with WINE on Linux
|
||||
- Importing/Exporting installed games from/to the Epic Games Launcher
|
||||
- Running games with WINE on Linux/macOS
|
||||
- Importing/Exporting installed games from/to the Epic Games Launcher (unsupported for macOS version of EGL)
|
||||
|
||||
**Planned:**
|
||||
- Simple GUI for managing/launching games
|
||||
- Better interfaces for other developers to use Legendary in their projects
|
||||
- Lots and lots of bug fixes, optimizations, and refactoring...
|
||||
|
||||
## Requirements
|
||||
|
||||
- Linux or Windows (64-bit)
|
||||
- python 3.8+ (64-bit on Windows)
|
||||
- PyPI packages: `requests`, optionally `setuptools` and `wheel` for setup/building
|
||||
- Linux, Windows (8.1+), or macOS (12.0+)
|
||||
+ 32-bit operating systems are not supported
|
||||
- python 3.9+ (64-bit)
|
||||
+ (Windows) `pythonnet` is not yet compatible with 3.10+, use 3.9 if you plan to install `pywebview`
|
||||
- PyPI packages:
|
||||
+ `requests`
|
||||
+ (optional) `pywebview` for webview-based login
|
||||
+ (optional) `setuptools` and `wheel` for setup/building
|
||||
|
||||
**Note:** Running Windows applications on Linux or macOS requires [Wine](https://www.winehq.org/).
|
||||
|
||||
## How to run/install
|
||||
|
||||
### Package Manager
|
||||
### Package Manager (Linux)
|
||||
|
||||
Several distros already have packages available, check out the [Available Linux Packages](https://github.com/derrod/legendary/wiki/Available-Linux-Packages) wiki page for details.
|
||||
|
||||
|
@ -51,21 +59,21 @@ but more will be available in the future.
|
|||
Note that since packages are maintained by third parties it may take a bit for them to be updated to the latest version.
|
||||
If you always want to have the latest features and fixes available then using the PyPI distribution is recommended.
|
||||
|
||||
### Standalone
|
||||
### Prebuilt Standalone Binary (Windows, macOS, and Linux)
|
||||
|
||||
Download the `legendary` or `legendary.exe` binary from [the latest release](https://github.com/derrod/legendary/releases/latest)
|
||||
and move it to somewhere in your `$PATH`/`%PATH%`. Don't forget to `chmod +x` it on Linux.
|
||||
and move it to somewhere in your `$PATH`/`%PATH%`. Don't forget to `chmod +x` it on Linux/macOS.
|
||||
|
||||
The Windows .exe and Linux executable were created with PyInstaller and will run standalone even without python being installed.
|
||||
The Windows .exe and Linux/macOS executable were created with PyInstaller and will run standalone even without python being installed.
|
||||
Note that on Linux glibc >= 2.25 is required, so older distributions such as Ubuntu 16.04 or Debian stretch will not work.
|
||||
|
||||
### Python package
|
||||
### Python Package (any)
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
To prevent problems with permissions during installation, please upgrade your `pip` by running `python -m pip install -U pip --user`.
|
||||
|
||||
> **Tip:** You may need to replace `python` in the above command with `python3.8` on Linux, or `py -3.8` on Windows.
|
||||
> **Tip:** You may need to replace `python` in the above command with `python3` on Linux/macOS, or `py -3` on Windows.
|
||||
|
||||
#### Installation from PyPI (recommended)
|
||||
|
||||
|
@ -75,9 +83,25 @@ Legendary is available on [PyPI](https://pypi.org/project/legendary-gl/), to ins
|
|||
pip install legendary-gl
|
||||
```
|
||||
|
||||
Optionally if logging in via an embedded web view is desired also run
|
||||
```bash
|
||||
pip install legendary-gl[webview]
|
||||
```
|
||||
On Linux this may also require installing a supported web engine and its python bindings.
|
||||
Ubunutu example:
|
||||
```bash
|
||||
sudo apt install python3-gi-cairo
|
||||
pip install legendary-gl[webview]
|
||||
```
|
||||
|
||||
Alternatively `pip install legendary-gl[webview_gtk]` or `pip install pywebview[gtk]` will work
|
||||
but may require manually installing dependencies needed to build `PyGObject`.
|
||||
|
||||
**Note:** Using pywebview's Qt engine may not work correctly. Using pywebview is currently unsupported on macOS.
|
||||
|
||||
#### Manually from the repo
|
||||
|
||||
- Install python3.8, setuptools, wheel, and requests
|
||||
- Install python3.9, setuptools, wheel, and requests
|
||||
- Clone the git repository and cd into it
|
||||
- Run `pip install .`
|
||||
|
||||
|
@ -99,7 +123,7 @@ echo 'export PATH=$PATH:~/.local/bin' >> ~/.profile && source ~/.profile
|
|||
|
||||
### Directly from the repo (for dev/testing)
|
||||
|
||||
- Install python3.8 and requests (optionally in a venv)
|
||||
- Install python3.9 and requests (optionally in a venv)
|
||||
- cd into the repository
|
||||
- Run `pip install -e .`
|
||||
|
||||
|
@ -113,12 +137,17 @@ To log in:
|
|||
````
|
||||
legendary auth
|
||||
````
|
||||
Authentication is a little finicky since we have to go through the Epic website. The login page should open in your browser and after logging in you should be presented with a JSON response that contains a code, just copy and paste the code into your terminal to log in.
|
||||
On Windows you can use the `--import` flag to import the authentication from the Epic Games Launcher. Note that this will log you out of the Epic Launcher.
|
||||
When using the prebuilt Windows executables of version 0.20.14 or higher this should open a new window with the Epic Login.
|
||||
|
||||
Otherwise, authentication is a little finicky since we have to go through the Epic website and manually copy a code.
|
||||
The login page should open in your browser and after logging in you should be presented with a JSON response that contains a code ("authorizationCode"), just copy the code into the terminal and hit enter.
|
||||
|
||||
Alternatively you can use the `--import` flag to import the authentication from the Epic Games Launcher (manually specifying the used WINE prefix may be required on Linux).
|
||||
Note that this will log you out of the Epic Launcher.
|
||||
|
||||
Listing your games
|
||||
````
|
||||
legendary list-games
|
||||
legendary list
|
||||
````
|
||||
This will fetch a list of games available on your account, the first time may take a while depending on how many games you have.
|
||||
|
||||
|
@ -126,7 +155,7 @@ Installing a game
|
|||
````
|
||||
legendary install Anemone
|
||||
````
|
||||
**Important:** the name used for these commands is the app name, *not* the game's name! The app name is in the parentheses after the game title in the games list.
|
||||
**Note:** the name used here is generally the game's "app name" as seen in the games list rather than its title, but as of 0.20.12 legendary will try to match game names or abbreviations thereof as well. In this case `legendary install world of goo` or `legendary install wog` would also work!
|
||||
|
||||
List installed games and check for updates
|
||||
````
|
||||
|
@ -135,15 +164,18 @@ legendary list-installed --check-updates
|
|||
|
||||
Launch (run) a game with online authentication
|
||||
````
|
||||
legendary launch Anemone
|
||||
legendary launch "world of goo"
|
||||
````
|
||||
**Tip:** most games will run fine offline (`--offline`), and thus won't require launching through legendary for online authentication. You can run `legendary launch <App Name> --offline --dry-run` to get a command line that will launch the game with all parameters that would be used by the Epic Launcher. These can then be entered into any other game launcher (e.g. Lutris/Steam) if the game requires them.
|
||||
**Tip:** most games will run fine offline (`--offline`), and thus won't require launching through legendary for online authentication.
|
||||
You can run `legendary launch <App Name> --offline --dry-run` to get a command line that will launch the game with all parameters that would be used by the Epic Launcher.
|
||||
These can then be entered into any other game launcher (e.g. Lutris/Steam) if the game requires them.
|
||||
|
||||
Importing a previously installed game
|
||||
````
|
||||
legendary import-game Anemone /mnt/games/Epic/WorldOfGoo
|
||||
legendary import Anemone /mnt/games/Epic/WorldOfGoo
|
||||
````
|
||||
**Note:** Importing will require a full verification so Legendary can correctly update the game later.
|
||||
**Note:** Importing will require a full verification so Legendary can correctly update the game later.
|
||||
**Note 2:** In order to use an alias here you may have to put it into quotes if if contains more than one word, e.g. `legendary import-game "world of goo" /mnt/games/Epic/WorldOfGoo`.
|
||||
|
||||
Sync savegames with the Epic Cloud
|
||||
````
|
||||
|
@ -159,53 +191,228 @@ legendary -y egl-sync
|
|||
## Usage
|
||||
|
||||
````
|
||||
usage: legendary [-h] [-v] [-y] [-V]
|
||||
{auth,install,download,update,repair,uninstall,launch,list-games,list-installed,list-files,list-saves,download-saves,sync-saves,verify-game,import-game,egl-sync,status,cleanup}
|
||||
...
|
||||
usage: legendary [-h] [-H] [-v] [-y] [-V] [-J] [-A <seconds>] <command> ...
|
||||
|
||||
Legendary v0.X.X - "Codename"
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-H, --full-help Show full help (including individual command help)
|
||||
-v, --debug Set loglevel to debug
|
||||
-y, --yes Default to yes for all prompts
|
||||
-V, --version Print version and exit
|
||||
-J, --pretty-json Pretty-print JSON
|
||||
-A <seconds>, --api-timeout <seconds>
|
||||
API HTTP request timeout (default: 10 seconds)
|
||||
|
||||
Commands:
|
||||
{auth,install,download,update,repair,uninstall,launch,list-games,list-installed,list-files,list-saves,download-saves,sync-saves,verify-game,import-game,egl-sync,status,cleanup}
|
||||
auth Authenticate with EPIC
|
||||
install (download,update,repair)
|
||||
Download a game
|
||||
uninstall Uninstall (delete) a game
|
||||
launch Launch a game
|
||||
list-games List available (installable) games
|
||||
list-installed List installed games
|
||||
list-files List files in manifest
|
||||
list-saves List available cloud saves
|
||||
download-saves Download all cloud saves
|
||||
sync-saves Sync cloud saves
|
||||
verify-game Verify a game's local files
|
||||
import-game Import an already installed game
|
||||
egl-sync Setup or run Epic Games Launcher sync
|
||||
status Show legendary status information
|
||||
<command>
|
||||
activate Activate games on third party launchers
|
||||
alias Manage aliases
|
||||
auth Authenticate with the Epic Games Store
|
||||
clean-saves Clean cloud saves
|
||||
cleanup Remove old temporary, metadata, and manifest files
|
||||
crossover Setup CrossOver for launching games (macOS only)
|
||||
download-saves Download all cloud saves
|
||||
egl-sync Setup or run Epic Games Launcher sync
|
||||
eos-overlay Manage EOS Overlay install
|
||||
import Import an already installed game
|
||||
info Prints info about specified app name or manifest
|
||||
install (download, update, repair)
|
||||
Install/download/update/repair a game
|
||||
launch Launch a game
|
||||
list List available (installable) games
|
||||
list-files List files in manifest
|
||||
list-installed List installed games
|
||||
list-saves List available cloud saves
|
||||
move Move specified app name to a new location
|
||||
status Show legendary status information
|
||||
sync-saves Sync cloud saves
|
||||
uninstall Uninstall (delete) a game
|
||||
verify Verify a game's local files
|
||||
|
||||
Individual command help:
|
||||
|
||||
Command: activate
|
||||
usage: legendary activate [-h] (-U | -O)
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-U, --uplay Activate Uplay/Ubisoft Connect titles on your Ubisoft account
|
||||
(Uplay install not required)
|
||||
-O, --origin Activate Origin/EA App managed titles on your EA account
|
||||
(requires Origin to be installed)
|
||||
|
||||
|
||||
Command: alias
|
||||
usage: legendary alias [-h]
|
||||
<add|rename|remove|list> [<App name/Old alias>]
|
||||
[<New alias>]
|
||||
|
||||
positional arguments:
|
||||
<add|rename|remove|list>
|
||||
Action: Add, rename, remove, or list alias(es)
|
||||
<App name/Old alias> App name when using "add" or "list" action, existing
|
||||
alias when using "rename" or "remove" action
|
||||
<New alias> New alias when using "add" action
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
|
||||
|
||||
Command: auth
|
||||
usage: legendary auth [-h] [--import] [--code <exchange code>]
|
||||
[--sid <session id>] [--delete]
|
||||
[--sid <session id>] [--delete] [--disable-webview]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--import Import Epic Games Launcher authentication data (logs
|
||||
out of EGL)
|
||||
--code <exchange code>
|
||||
Use specified exchange code instead of interactive
|
||||
authentication
|
||||
--code <authorization code>
|
||||
Use specified authorization code instead of interactive authentication
|
||||
--token <exchange token>
|
||||
Use specified exchange token instead of interactive authentication
|
||||
--sid <session id> Use specified session id instead of interactive
|
||||
authentication
|
||||
--delete Remove existing authentication (log out)
|
||||
--disable-webview Do not use embedded browser for login
|
||||
|
||||
|
||||
Command: clean-saves
|
||||
usage: legendary clean-saves [-h] [--delete-incomplete] [<App Name>]
|
||||
|
||||
positional arguments:
|
||||
<App Name> Name of the app (optional)
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--delete-incomplete Delete incomplete save files
|
||||
|
||||
|
||||
Command: cleanup
|
||||
usage: legendary cleanup [-h] [--keep-manifests]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--keep-manifests Do not delete old manifests
|
||||
|
||||
|
||||
Command: crossover
|
||||
usage: legendary crossover [-h] [--reset] [--download] [--ignore-version]
|
||||
[--crossover-app <path to .app>]
|
||||
[--crossover-bottle <bottle name>]
|
||||
[<App Name>]
|
||||
|
||||
positional arguments:
|
||||
<App Name> App name to configure, will configure defaults if
|
||||
ommited
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--reset Reset default/app-specific crossover configuration
|
||||
--download Automatically download and set up a preconfigured
|
||||
bottle (experimental)
|
||||
--ignore-version Disable version check for available bottles when using
|
||||
--download
|
||||
--crossover-app <path to .app>
|
||||
Specify app to skip interactive selection
|
||||
--crossover-bottle <bottle name>
|
||||
Specify bottle to skip interactive selection
|
||||
|
||||
|
||||
Command: download-saves
|
||||
usage: legendary download-saves [-h] [<App Name>]
|
||||
|
||||
positional arguments:
|
||||
<App Name> Name of the app (optional)
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
|
||||
|
||||
Command: egl-sync
|
||||
usage: legendary egl-sync [-h] [--egl-manifest-path EGL_MANIFEST_PATH]
|
||||
[--egl-wine-prefix EGL_WINE_PREFIX] [--enable-sync]
|
||||
[--disable-sync] [--one-shot] [--import-only]
|
||||
[--export-only] [--migrate] [--unlink]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--egl-manifest-path EGL_MANIFEST_PATH
|
||||
Path to the Epic Games Launcher's Manifests folder,
|
||||
should point to
|
||||
/ProgramData/Epic/EpicGamesLauncher/Data/Manifests
|
||||
--egl-wine-prefix EGL_WINE_PREFIX
|
||||
Path to the WINE prefix the Epic Games Launcher is
|
||||
installed in
|
||||
--enable-sync Enable automatic EGL <-> Legendary sync
|
||||
--disable-sync Disable automatic sync and exit
|
||||
--one-shot Sync once, do not ask to setup automatic sync
|
||||
--import-only Only import games from EGL (no export)
|
||||
--export-only Only export games to EGL (no import)
|
||||
--migrate Import games into legendary, then remove them from EGL
|
||||
(implies --import-only --one-shot --unlink)
|
||||
--unlink Disable sync and remove EGL metadata from installed
|
||||
games
|
||||
|
||||
|
||||
Command: eos-overlay
|
||||
usage: legendary eos-overlay [-h] [--path PATH] [--prefix PREFIX] [--app APP]
|
||||
[--bottle BOTTLE]
|
||||
<install|update|remove|enable|disable|info>
|
||||
|
||||
positional arguments:
|
||||
<install|update|remove|enable|disable|info>
|
||||
Action: install, remove, enable, disable, or print
|
||||
info about the overlay
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--path PATH Path to the EOS overlay folder to be enabled/installed
|
||||
to.
|
||||
--prefix PREFIX WINE prefix to install the overlay in
|
||||
--app APP Use this app's wine prefix (if configured in config)
|
||||
--bottle BOTTLE WINE prefix to install the overlay in
|
||||
|
||||
|
||||
Command: import
|
||||
usage: legendary import [-h] [--disable-check] [--with-dlcs] [--skip-dlcs]
|
||||
[--platform <Platform>]
|
||||
<App Name> <Installation directory>
|
||||
|
||||
positional arguments:
|
||||
<App Name> Name of the app
|
||||
<Installation directory>
|
||||
Path where the game is installed
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--disable-check Disables completeness check of the to-be-imported game
|
||||
installation (useful if the imported game is a much
|
||||
older version or missing files)
|
||||
--with-dlcs Automatically attempt to import all DLCs with the base
|
||||
game
|
||||
--skip-dlcs Do not ask about importing DLCs.
|
||||
--platform <Platform>
|
||||
Platform for import (default: Mac on macOS, otherwise
|
||||
Windows)
|
||||
|
||||
|
||||
Command: info
|
||||
usage: legendary info [-h] [--offline] [--json] [--platform <Platform>]
|
||||
<App Name/Manifest URI>
|
||||
|
||||
positional arguments:
|
||||
<App Name/Manifest URI>
|
||||
App name or manifest path/URI
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--offline Only print info available offline
|
||||
--json Output information in JSON format
|
||||
--platform <Platform>
|
||||
Platform to fetch info for (default: installed or Mac
|
||||
on macOS, Windows otherwise)
|
||||
|
||||
|
||||
Command: install
|
||||
|
@ -218,7 +425,7 @@ positional arguments:
|
|||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--base-path <path> Path for game installations (defaults to ~/legendary)
|
||||
--base-path <path> Path for game installations (defaults to ~/Games)
|
||||
--game-folder <path> Folder for game installation (defaults to folder
|
||||
specified in metadata)
|
||||
--max-shared-memory <size>
|
||||
|
@ -246,8 +453,7 @@ optional arguments:
|
|||
--dlm-debug Set download manager and worker processes' loglevel to
|
||||
debug
|
||||
--platform <Platform>
|
||||
Platform override for download (also sets --no-
|
||||
install)
|
||||
Platform for install (default: installed or Windows)
|
||||
--prefix <prefix> Only fetch files whose path starts with <prefix> (case
|
||||
insensitive)
|
||||
--exclude <prefix> Exclude files starting with <prefix> (case
|
||||
|
@ -268,22 +474,17 @@ optional arguments:
|
|||
download size)
|
||||
--reset-sdl Reset selective downloading choices (requires repair
|
||||
to download new components)
|
||||
--skip-sdl Skip SDL prompt and continue with defaults (only
|
||||
required game data)
|
||||
--disable-sdl Disable selective downloading for title, reset
|
||||
existing configuration (if any)
|
||||
--preferred-cdn <hostname>
|
||||
Set the hostname of the preferred CDN to use when
|
||||
available
|
||||
--no-https Download games via plaintext HTTP (like EGS), e.g. for
|
||||
use with a lan cache
|
||||
|
||||
|
||||
Command: uninstall
|
||||
usage: legendary uninstall [-h] [--keep-files] <App Name>
|
||||
|
||||
positional arguments:
|
||||
<App Name> Name of the app
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--keep-files Keep files but remove game from Legendary database
|
||||
--with-dlcs Automatically install all DLCs with the base game
|
||||
--skip-dlcs Do not ask about installing DLCs.
|
||||
|
||||
|
||||
Command: launch
|
||||
|
@ -314,42 +515,39 @@ optional arguments:
|
|||
--reset-defaults Reset config settings for app and exit
|
||||
--override-exe <exe path>
|
||||
Override executable to launch (relative path)
|
||||
--origin Launch Origin to activate or run the game.
|
||||
--json Print launch information as JSON and exit
|
||||
--wine <wine binary> Set WINE binary to use to launch the app
|
||||
--wine-prefix <wine pfx path>
|
||||
Set WINE prefix to use
|
||||
--no-wine Do not run game with WINE (e.g. if a wrapper is used)
|
||||
--crossover Interactively configure CrossOver for this
|
||||
application.
|
||||
--crossover-app <path to .app>
|
||||
Specify which App to use for CrossOver (e.g.
|
||||
"/Applications/CrossOver.app")
|
||||
--crossover-bottle <bottle name>
|
||||
Specify which bottle to use for CrossOver
|
||||
|
||||
|
||||
Command: list-games
|
||||
usage: legendary list-games [-h] [--platform <Platform>] [--include-ue] [--csv]
|
||||
[--tsv] [--json]
|
||||
Command: list
|
||||
usage: legendary list [-h] [--platform <Platform>] [--include-ue] [-T] [--csv]
|
||||
[--tsv] [--json] [--force-refresh]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--platform <Platform>
|
||||
Override platform that games are shown for (e.g.
|
||||
Win32/Mac)
|
||||
Platform to fetch game list for (default: Mac on
|
||||
macOS, otherwise Windows)
|
||||
--include-ue Also include Unreal Engine content
|
||||
(Engine/Marketplace) in list
|
||||
--include-non-installable
|
||||
-T, --third-party, --include-non-installable
|
||||
Include apps that are not installable (e.g. that have
|
||||
to be activated on Origin)
|
||||
--csv List games in CSV format
|
||||
--tsv List games in TSV format
|
||||
--json List games in JSON format
|
||||
|
||||
|
||||
Command: list-installed
|
||||
usage: legendary list-installed [-h] [--check-updates] [--csv] [--tsv] [--json]
|
||||
[--show-dirs]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--check-updates Check for updates for installed games
|
||||
--csv List games in CSV format
|
||||
--tsv List games in TSV format
|
||||
--json List games in JSON format
|
||||
--show-dirs Print installation directory in output
|
||||
--force-refresh Force a refresh of all game metadata
|
||||
|
||||
|
||||
Command: list-files
|
||||
|
@ -365,7 +563,7 @@ optional arguments:
|
|||
-h, --help show this help message and exit
|
||||
--force-download Always download instead of using on-disk manifest
|
||||
--platform <Platform>
|
||||
Platform override for download (disables install)
|
||||
Platform (default: Mac on macOS, otherwise Windows)
|
||||
--manifest <uri> Manifest URL or path to use instead of the CDN one
|
||||
--csv Output in CSV format
|
||||
--tsv Output in TSV format
|
||||
|
@ -375,6 +573,19 @@ optional arguments:
|
|||
--install-tag <tag> Show only files with specified install tag
|
||||
|
||||
|
||||
Command: list-installed
|
||||
usage: legendary list-installed [-h] [--check-updates] [--csv] [--tsv]
|
||||
[--json] [--show-dirs]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--check-updates Check for updates for installed games
|
||||
--csv List games in CSV format
|
||||
--tsv List games in TSV format
|
||||
--json List games in JSON format
|
||||
--show-dirs Print installation directory in output
|
||||
|
||||
|
||||
Command: list-saves
|
||||
usage: legendary list-saves [-h] [<App Name>]
|
||||
|
||||
|
@ -385,14 +596,26 @@ optional arguments:
|
|||
-h, --help show this help message and exit
|
||||
|
||||
|
||||
Command: download-saves
|
||||
usage: legendary download-saves [-h] [<App Name>]
|
||||
Command: move
|
||||
usage: legendary move [-h] [--skip-move] <App Name> <New Base Path>
|
||||
|
||||
positional arguments:
|
||||
<App Name> Name of the app (optional)
|
||||
<App Name> Name of the app
|
||||
<New Base Path> Directory to move game folder to
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--skip-move Only change legendary database, do not move files (e.g. if
|
||||
already moved)
|
||||
|
||||
|
||||
Command: status
|
||||
usage: legendary status [-h] [--offline] [--json]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--offline Only print offline status information, do not login
|
||||
--json Show status in JSON format
|
||||
|
||||
|
||||
Command: sync-saves
|
||||
|
@ -415,88 +638,28 @@ optional arguments:
|
|||
--disable-filters Disable save game file filtering
|
||||
|
||||
|
||||
Command: verify-game
|
||||
usage: legendary verify-game [-h] <App Name>
|
||||
Command: uninstall
|
||||
usage: legendary uninstall [-h] [--keep-files] <App Name>
|
||||
|
||||
positional arguments:
|
||||
<App Name> Name of the app
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--keep-files Keep files but remove game from Legendary database
|
||||
|
||||
|
||||
Command: verify
|
||||
usage: legendary verify [-h] <App Name>
|
||||
|
||||
positional arguments:
|
||||
<App Name> Name of the app
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
|
||||
|
||||
Command: import-game
|
||||
usage: legendary import-game [-h] [--disable-check]
|
||||
<App Name> <Installation directory>
|
||||
|
||||
positional arguments:
|
||||
<App Name> Name of the app
|
||||
<Installation directory>
|
||||
Path where the game is installed
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--disable-check Disables completeness check of the to-be-imported game
|
||||
installation (useful if the imported game is a much
|
||||
older version or missing files)
|
||||
|
||||
|
||||
Command: egl-sync
|
||||
usage: legendary egl-sync [-h] [--egl-manifest-path EGL_MANIFEST_PATH]
|
||||
[--egl-wine-prefix EGL_WINE_PREFIX] [--enable-sync]
|
||||
[--disable-sync] [--one-shot] [--import-only]
|
||||
[--export-only] [--unlink]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--egl-manifest-path EGL_MANIFEST_PATH
|
||||
Path to the Epic Games Launcher's Manifests folder,
|
||||
should point to
|
||||
/ProgramData/Epic/EpicGamesLauncher/Data/Manifests
|
||||
--egl-wine-prefix EGL_WINE_PREFIX
|
||||
Path to the WINE prefix the Epic Games Launcher is
|
||||
installed in
|
||||
--enable-sync Enable automatic EGL <-> Legendary sync
|
||||
--disable-sync Disable automatic sync and exit
|
||||
--one-shot Sync once, do not ask to setup automatic sync
|
||||
--import-only Only import games from EGL (no export)
|
||||
--export-only Only export games to EGL (no import)
|
||||
--unlink Disable sync and remove EGL metadata from installed
|
||||
games
|
||||
|
||||
|
||||
Command: status
|
||||
usage: legendary status [-h] [--offline] [--json]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--offline Only print offline status information, do not login
|
||||
--json Show status in JSON format
|
||||
|
||||
|
||||
Command: cleanup
|
||||
usage: legendary cleanup [-h] [--keep-manifests]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--keep-manifests Do not delete old manifests
|
||||
````
|
||||
|
||||
|
||||
## Environment variables
|
||||
|
||||
Legendary supports overriding certain things via environment variables,
|
||||
it also passes through any environment variables set before it is called.
|
||||
|
||||
Legendary specific environment variables:
|
||||
+ `LGDRY_WINE_BINARY` - specifies wine binary
|
||||
+ `LGDRY_WINE_PREFIX` - specified wine prefix
|
||||
+ `LGDRY_NO_WINE` - disables wine
|
||||
+ `LGDRY_WRAPPER` - specifies wrapper binary/command line
|
||||
|
||||
Note that the priority for settings that occur multiple times is:
|
||||
command line > environment variables > config variables.
|
||||
|
||||
## Config file
|
||||
|
||||
Legendary supports some options as well as game specific configuration in `~/.config/legendary/config.ini`:
|
||||
|
@ -504,8 +667,8 @@ Legendary supports some options as well as game specific configuration in `~/.co
|
|||
[Legendary]
|
||||
log_level = debug
|
||||
; maximum shared memory (in MiB) to use for installation
|
||||
max_memory = 1024
|
||||
; maximum number of worker processes when downloading (fewer workers will be slower, but also use fewer system resources)
|
||||
max_memory = 2048
|
||||
; maximum number of worker processes when downloading (fewer workers will be slower, but also use less system resources)
|
||||
max_workers = 8
|
||||
; default install directory
|
||||
install_dir = /mnt/tank/games
|
||||
|
@ -519,13 +682,39 @@ egl_programdata = /home/user/Games/epic-games-store/drive_c/...
|
|||
preferred_cdn = epicgames-download1.akamaized.net
|
||||
; disable HTTPS for downloads (e.g. to use a LanCache)
|
||||
disable_https = false
|
||||
; Disables the automatic update check
|
||||
disable_update_check = false
|
||||
; Disables the notice about an available update on exit
|
||||
disable_update_notice = false
|
||||
; Disable automatically-generated aliases
|
||||
disable_auto_aliasing = false
|
||||
|
||||
; default settings to use (currently limited to WINE executable)
|
||||
; macOS specific settings
|
||||
; Default application platform to use (default: Mac on macOS, Windows elsewhere)
|
||||
default_platform = Windows
|
||||
; Fallback to "Windows" platform if native version unavailable
|
||||
install_platform_fallback = true
|
||||
; (macOS) Disable automatic CrossOver use
|
||||
disable_auto_crossover = false
|
||||
; Default directory for native Mac applications (.app packages)
|
||||
mac_install_dir = /User/legendary/Applications
|
||||
|
||||
[Legendary.aliases]
|
||||
; List of aliases for simpler CLI use, in the format `<alias> = <app name>`
|
||||
HITMAN 3 = Eider
|
||||
gtav = 9d2d0eb64d5c44529cece33fe2a46482
|
||||
|
||||
; default settings to use for all apps (unless overridden in the app's config section)
|
||||
; Note that only the settings listed below are supported.
|
||||
[default]
|
||||
; (linux) specify wine executable to use
|
||||
; (all) wrapper to run the game with (e.g. "gamemode")
|
||||
wrapper = gamemode
|
||||
; (linux/macOS) Wine executable and prefix
|
||||
wine_executable = wine
|
||||
; wine prefix (alternative to using environment variable)
|
||||
wine_prefix = /home/user/.wine
|
||||
; (macOS) CrossOver options
|
||||
crossover_app = /Applications/CrossOver.app
|
||||
crossover_bottle = Legendary
|
||||
|
||||
; default environment variables to set (overridden by game specific ones)
|
||||
[default.env]
|
||||
|
@ -539,9 +728,10 @@ offline = true
|
|||
skip_update_check = true
|
||||
; start parameters to use (in addition to the required ones)
|
||||
start_params = -windowed
|
||||
wine_executable = /path/to/proton/wine64
|
||||
; override language with two-letter language code
|
||||
language = fr
|
||||
; Override Wine version for this app
|
||||
wine_executable = /path/to/wine64
|
||||
|
||||
[AppName.env]
|
||||
; environment variables to set for this game (mostly useful on linux)
|
||||
|
@ -551,9 +741,20 @@ DXVK_CONFIG_FILE = /mnt/tank/games/Game/dxvk.conf
|
|||
[AppName2]
|
||||
; Use a wrapper to run this script
|
||||
; Note that the path might have to be quoted if it contains spaces
|
||||
wrapper = "/path/to/Proton 5.0/proton" run
|
||||
wrapper = "/path/with spaces/gamemoderun"
|
||||
; Do not run this executable with WINE (e.g. when the wrapper handles that)
|
||||
no_wine = true
|
||||
; Override the executable launched for this game, for example to bypass a launcher (e.g. Borderlands)
|
||||
override_exe = relative/path/to/file.exe
|
||||
; Disable selective downloading for this title
|
||||
disable_sdl = true
|
||||
|
||||
[AppName3]
|
||||
; Command to run before launching the gmae
|
||||
pre_launch_command = /path/to/script.sh
|
||||
; Whether or not to wait for command to finish running
|
||||
pre_launch_wait = false
|
||||
; (macOS) override crossover settings
|
||||
crossover_app = /Applications/CrossOver Nightly.app
|
||||
crossover_bottle = SomethingElse
|
||||
````
|
||||
|
|
BIN
assets/windows_icon.ico
Normal file
BIN
assets/windows_icon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 25 KiB |
264
doc/ue_manifest.ksy
Normal file
264
doc/ue_manifest.ksy
Normal file
|
@ -0,0 +1,264 @@
|
|||
meta:
|
||||
id: ue_manifest
|
||||
title: Binary Unreal Engine Manifest Version 20
|
||||
application: Epic Games Launcher
|
||||
file-extension: manifest
|
||||
license: 0BSD
|
||||
endian: le
|
||||
seq:
|
||||
- id: header
|
||||
type: header
|
||||
- id: body_compressed
|
||||
size: header.size_compressed
|
||||
type: body
|
||||
process: zlib
|
||||
if: header.is_compressed
|
||||
- id: body_uncompressed
|
||||
size: header.size_uncompressed
|
||||
type: body
|
||||
if: not header.is_compressed
|
||||
- id: unknown
|
||||
size-eos: true
|
||||
types:
|
||||
header:
|
||||
seq:
|
||||
- id: magic
|
||||
contents: [0x0C, 0xC0, 0xBE, 0x44]
|
||||
- id: header_size
|
||||
type: u4
|
||||
- id: size_uncompressed
|
||||
type: u4
|
||||
- id: size_compressed
|
||||
type: u4
|
||||
- id: sha1_hash
|
||||
size: 20
|
||||
- id: stored_as
|
||||
type: u1
|
||||
enum: stored_as_flag
|
||||
- id: version
|
||||
type: u4
|
||||
- id: unknown
|
||||
size: header_size - 41
|
||||
instances:
|
||||
is_compressed:
|
||||
value: stored_as.to_i & 1 == 1
|
||||
body:
|
||||
seq:
|
||||
- id: meta_size
|
||||
type: u4
|
||||
- id: metadata
|
||||
size: meta_size - 4
|
||||
type: metadata
|
||||
- id: cdl_size
|
||||
type: u4
|
||||
- id: chunk_data_list
|
||||
type: chunk_data_list
|
||||
size: cdl_size - 4
|
||||
- id: fml_size
|
||||
type: u4
|
||||
- id: file_manifest_list
|
||||
type: file_manifest_list
|
||||
size: fml_size - 4
|
||||
- id: custom_data_size
|
||||
type: u4
|
||||
- id: custom_fields
|
||||
type: custom_fields
|
||||
size: custom_data_size - 4
|
||||
- id: unknown
|
||||
size-eos: true
|
||||
metadata:
|
||||
seq:
|
||||
- id: data_version
|
||||
type: u1
|
||||
- id: feature_level
|
||||
type: u4
|
||||
- id: is_file_data
|
||||
type: u1
|
||||
- id: app_id
|
||||
type: u4
|
||||
- id: app_name
|
||||
type: fstring
|
||||
- id: build_version
|
||||
type: fstring
|
||||
- id: launch_exe
|
||||
type: fstring
|
||||
- id: launch_command
|
||||
type: fstring
|
||||
- id: prereq_ids_num
|
||||
type: u4
|
||||
- id: prereq_ids
|
||||
type: fstring
|
||||
repeat: expr
|
||||
repeat-expr: prereq_ids_num
|
||||
- id: prereq_name
|
||||
type: fstring
|
||||
- id: prereq_path
|
||||
type: fstring
|
||||
- id: prereq_args
|
||||
type: fstring
|
||||
- id: build_id
|
||||
type: fstring
|
||||
if: data_version > 0
|
||||
- id: unknown
|
||||
size-eos: true
|
||||
chunk_data_list:
|
||||
seq:
|
||||
- id: version
|
||||
type: u1
|
||||
- id: count
|
||||
type: u4
|
||||
- id: guids
|
||||
size: 16
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: ue_hashes
|
||||
type: u8
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: sha_hashes
|
||||
size: 20
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: group_nums
|
||||
type: u1
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: window_sizes
|
||||
type: u4
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: file_sizes
|
||||
type: s8
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: unknown
|
||||
size-eos: true
|
||||
file_manifest_list:
|
||||
seq:
|
||||
- id: version
|
||||
type: u1
|
||||
- id: count
|
||||
type: u4
|
||||
- id: filenames
|
||||
type: fstring
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: symlink_targets
|
||||
type: fstring
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: sha_hashes
|
||||
size: 20
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: flags
|
||||
type: u1
|
||||
enum: file_flags
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: tags
|
||||
type: tags
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: chunk_parts
|
||||
type: chunk_parts
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: md5_hashes
|
||||
if: version > 0
|
||||
type: md5_hash
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: mime_types
|
||||
if: version > 0
|
||||
type: fstring
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: sha256_hashes
|
||||
if: version > 1
|
||||
size: 32
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: unknown
|
||||
size-eos: true
|
||||
custom_fields:
|
||||
seq:
|
||||
- id: version
|
||||
type: u1
|
||||
- id: count
|
||||
type: u4
|
||||
- id: keys
|
||||
type: fstring
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: values
|
||||
type: fstring
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
- id: unknown
|
||||
size-eos: true
|
||||
fstring:
|
||||
seq:
|
||||
- id: length
|
||||
type: s4
|
||||
- id: value_ascii
|
||||
size: length
|
||||
type: str
|
||||
encoding: 'ASCII'
|
||||
if: length >= 0
|
||||
- id: value_utf16
|
||||
size: -2 * length
|
||||
type: str
|
||||
encoding: 'UTF-16LE'
|
||||
if: length < 0
|
||||
instances:
|
||||
value:
|
||||
value: 'length >= 0 ? value_ascii : value_utf16'
|
||||
if: length >= 0 or length < 0
|
||||
tags:
|
||||
seq:
|
||||
- id: count
|
||||
type: u4
|
||||
- id: tag
|
||||
type: fstring
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
chunk_parts:
|
||||
seq:
|
||||
- id: count
|
||||
type: u4
|
||||
- id: elements
|
||||
type: chunk_part
|
||||
repeat: expr
|
||||
repeat-expr: count
|
||||
chunk_part:
|
||||
seq:
|
||||
- id: entry_size
|
||||
type: u4
|
||||
- id: guid
|
||||
size: 16
|
||||
- id: offset
|
||||
type: u4
|
||||
- id: size
|
||||
type: u4
|
||||
- id: unknown
|
||||
size: entry_size - 28
|
||||
md5_hash:
|
||||
seq:
|
||||
- id: has_md5
|
||||
type: u4
|
||||
- id: md5
|
||||
size: 16
|
||||
if: has_md5 != 0
|
||||
instances:
|
||||
body:
|
||||
value: 'header.is_compressed ? body_compressed : body_uncompressed'
|
||||
enums:
|
||||
stored_as_flag:
|
||||
0x0: uncompressed
|
||||
0x1: compressed
|
||||
file_flags:
|
||||
0x0: none
|
||||
0x1: read_only
|
||||
0x2: compressed
|
||||
0x4: unix_executable
|
|
@ -1,4 +1,4 @@
|
|||
"""Legendary!"""
|
||||
|
||||
__version__ = '0.20.7'
|
||||
__codename__ = 'Route Kanal'
|
||||
__version__ = '0.20.34'
|
||||
__codename__ = 'Direct Intervention'
|
||||
|
|
|
@ -1,19 +1,25 @@
|
|||
# !/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
import requests.adapters
|
||||
import logging
|
||||
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from legendary.models.exceptions import InvalidCredentialsError
|
||||
from legendary.models.gql import *
|
||||
|
||||
|
||||
class EPCAPI:
|
||||
_user_agent = 'UELauncher/11.0.1-14907503+++Portal+Release-Live Windows/10.0.19041.1.256.64bit'
|
||||
_store_user_agent = 'EpicGamesLauncher/14.0.8-22004686+++Portal+Release-Live'
|
||||
# required for the oauth request
|
||||
_user_basic = '34a02cf8f4414e29b15921876da36f9a'
|
||||
_pw_basic = 'daafbccc737745039dffe53d94fc76cf'
|
||||
_label = 'Live-EternalKnight'
|
||||
|
||||
_oauth_host = 'account-public-service-prod03.ol.epicgames.com'
|
||||
_launcher_host = 'launcher-public-service-prod06.ol.epicgames.com'
|
||||
|
@ -22,13 +28,23 @@ class EPCAPI:
|
|||
_ecommerce_host = 'ecommerceintegration-public-service-ecomprod02.ol.epicgames.com'
|
||||
_datastorage_host = 'datastorage-public-service-liveegs.live.use1a.on.epicgames.com'
|
||||
_library_host = 'library-service.live.use1a.on.epicgames.com'
|
||||
# Using the actual store host with a user-agent newer than 14.0.8 leads to a CF verification page,
|
||||
# but the dedicated graphql host works fine.
|
||||
# _store_gql_host = 'launcher.store.epicgames.com'
|
||||
_store_gql_host = 'graphql.epicgames.com'
|
||||
_artifact_service_host = 'artifact-public-service-prod.beee.live.use1a.on.epicgames.com'
|
||||
|
||||
def __init__(self, lc='en', cc='US'):
|
||||
self.session = requests.session()
|
||||
def __init__(self, lc='en', cc='US', timeout=10.0):
|
||||
self.log = logging.getLogger('EPCAPI')
|
||||
self.unauth_session = requests.session()
|
||||
|
||||
self.session = requests.session()
|
||||
self.session.headers['User-Agent'] = self._user_agent
|
||||
# increase maximum pool size for multithreaded metadata requests
|
||||
self.session.mount('https://', requests.adapters.HTTPAdapter(pool_maxsize=16))
|
||||
|
||||
self.unauth_session = requests.session()
|
||||
self.unauth_session.headers['User-Agent'] = self._user_agent
|
||||
|
||||
self._oauth_basic = HTTPBasicAuth(self._user_basic, self._pw_basic)
|
||||
|
||||
self.access_token = None
|
||||
|
@ -37,9 +53,33 @@ class EPCAPI:
|
|||
self.language_code = lc
|
||||
self.country_code = cc
|
||||
|
||||
self.request_timeout = timeout if timeout > 0 else None
|
||||
|
||||
def get_auth_url(self):
|
||||
login_url = 'https://www.epicgames.com/id/login?redirectUrl='
|
||||
redirect_url = f'https://www.epicgames.com/id/api/redirect?clientId={self._user_basic}&responseType=code'
|
||||
return login_url + urllib.parse.quote(redirect_url)
|
||||
|
||||
def update_egs_params(self, egs_params):
|
||||
# update user-agent
|
||||
if version := egs_params['version']:
|
||||
self._user_agent = f'UELauncher/{version} Windows/10.0.19041.1.256.64bit'
|
||||
self._store_user_agent = f'EpicGamesLauncher/{version}'
|
||||
self.session.headers['User-Agent'] = self._user_agent
|
||||
self.unauth_session.headers['User-Agent'] = self._user_agent
|
||||
# update label
|
||||
if label := egs_params['label']:
|
||||
self._label = label
|
||||
# update client credentials
|
||||
if 'client_id' in egs_params and 'client_secret' in egs_params:
|
||||
self._user_basic = egs_params['client_id']
|
||||
self._pw_basic = egs_params['client_secret']
|
||||
self._oauth_basic = HTTPBasicAuth(self._user_basic, self._pw_basic)
|
||||
|
||||
def resume_session(self, session):
|
||||
self.session.headers['Authorization'] = f'bearer {session["access_token"]}'
|
||||
r = self.session.get(f'https://{self._oauth_host}/account/api/oauth/verify')
|
||||
r = self.session.get(f'https://{self._oauth_host}/account/api/oauth/verify',
|
||||
timeout=self.request_timeout)
|
||||
if r.status_code >= 500:
|
||||
r.raise_for_status()
|
||||
|
||||
|
@ -53,7 +93,8 @@ class EPCAPI:
|
|||
self.user = session
|
||||
return self.user
|
||||
|
||||
def start_session(self, refresh_token: str = None, exchange_token: str = None) -> dict:
|
||||
def start_session(self, refresh_token: str = None, exchange_token: str = None,
|
||||
authorization_code: str = None, client_credentials: bool = False) -> dict:
|
||||
if refresh_token:
|
||||
params = dict(grant_type='refresh_token',
|
||||
refresh_token=refresh_token,
|
||||
|
@ -62,29 +103,49 @@ class EPCAPI:
|
|||
params = dict(grant_type='exchange_code',
|
||||
exchange_code=exchange_token,
|
||||
token_type='eg1')
|
||||
elif authorization_code:
|
||||
params = dict(grant_type='authorization_code',
|
||||
code=authorization_code,
|
||||
token_type='eg1')
|
||||
elif client_credentials:
|
||||
params = dict(grant_type='client_credentials',
|
||||
token_type='eg1')
|
||||
else:
|
||||
raise ValueError('At least one token type must be specified!')
|
||||
|
||||
r = self.session.post(f'https://{self._oauth_host}/account/api/oauth/token',
|
||||
data=params, auth=self._oauth_basic)
|
||||
data=params, auth=self._oauth_basic,
|
||||
timeout=self.request_timeout)
|
||||
# Only raise HTTP exceptions on server errors
|
||||
if r.status_code >= 500:
|
||||
r.raise_for_status()
|
||||
|
||||
j = r.json()
|
||||
if 'error' in j:
|
||||
self.log.warning(f'Login to EGS API failed with errorCode: {j["errorCode"]}')
|
||||
if 'errorCode' in j:
|
||||
if j['errorCode'] == 'errors.com.epicgames.oauth.corrective_action_required':
|
||||
self.log.error(f'{j["errorMessage"]} ({j["correctiveAction"]}), '
|
||||
f'open the following URL to take action: {j["continuationUrl"]}')
|
||||
else:
|
||||
self.log.error(f'Login to EGS API failed with errorCode: {j["errorCode"]}')
|
||||
raise InvalidCredentialsError(j['errorCode'])
|
||||
elif r.status_code >= 400:
|
||||
self.log.error(f'EGS API responded with status {r.status_code} but no error in response: {j}')
|
||||
raise InvalidCredentialsError('Unknown error')
|
||||
|
||||
self.user = j
|
||||
self.session.headers['Authorization'] = f'bearer {self.user["access_token"]}'
|
||||
return self.user
|
||||
self.session.headers['Authorization'] = f'bearer {j["access_token"]}'
|
||||
# only set user info when using non-anonymous login
|
||||
if not client_credentials:
|
||||
self.user = j
|
||||
|
||||
return j
|
||||
|
||||
def invalidate_session(self): # unused
|
||||
r = self.session.delete(f'https://{self._oauth_host}/account/api/oauth/sessions/kill/{self.access_token}')
|
||||
_ = self.session.delete(f'https://{self._oauth_host}/account/api/oauth/sessions/kill/{self.access_token}',
|
||||
timeout=self.request_timeout)
|
||||
|
||||
def get_game_token(self):
|
||||
r = self.session.get(f'https://{self._oauth_host}/account/api/oauth/exchange')
|
||||
r = self.session.get(f'https://{self._oauth_host}/account/api/oauth/exchange',
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
|
@ -92,41 +153,90 @@ class EPCAPI:
|
|||
user_id = self.user.get('account_id')
|
||||
r = self.session.post(f'https://{self._ecommerce_host}/ecommerceintegration/api/public/'
|
||||
f'platforms/EPIC/identities/{user_id}/ownershipToken',
|
||||
data=dict(nsCatalogItemId=f'{namespace}:{catalog_item_id}'))
|
||||
data=dict(nsCatalogItemId=f'{namespace}:{catalog_item_id}'),
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
def get_external_auths(self):
|
||||
user_id = self.user.get('account_id')
|
||||
r = self.session.get(f'https://{self._oauth_host}/account/api/public/account/{user_id}/externalAuths',
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_game_assets(self, platform='Windows', label='Live'):
|
||||
r = self.session.get(f'https://{self._launcher_host}/launcher/api/public/assets/{platform}',
|
||||
params=dict(label=label))
|
||||
params=dict(label=label), timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_game_manifest(self, namespace, catalog_item_id, app_name, platform='Windows', label='Live'):
|
||||
r = self.session.get(f'https://{self._launcher_host}/launcher/api/public/assets/v2/platform'
|
||||
f'/{platform}/namespace/{namespace}/catalogItem/{catalog_item_id}/app'
|
||||
f'/{app_name}/label/{label}')
|
||||
f'/{app_name}/label/{label}',
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_user_entitlements(self):
|
||||
def get_launcher_manifests(self, platform='Windows', label=None):
|
||||
r = self.session.get(f'https://{self._launcher_host}/launcher/api/public/assets/v2/platform/'
|
||||
f'{platform}/launcher', timeout=self.request_timeout,
|
||||
params=dict(label=label if label else self._label))
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_user_entitlements(self, start=0):
|
||||
user_id = self.user.get('account_id')
|
||||
r = self.session.get(f'https://{self._entitlements_host}/entitlement/api/account/{user_id}/entitlements',
|
||||
params=dict(start=0, count=5000))
|
||||
params=dict(start=start, count=1000), timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_game_info(self, namespace, catalog_item_id):
|
||||
def get_user_entitlements_full(self):
|
||||
ret = []
|
||||
|
||||
while True:
|
||||
resp = self.get_user_entitlements(start=len(ret))
|
||||
ret.extend(resp)
|
||||
if len(resp) < 1000:
|
||||
break
|
||||
|
||||
return ret
|
||||
|
||||
def get_game_info(self, namespace, catalog_item_id, timeout=None):
|
||||
r = self.session.get(f'https://{self._catalog_host}/catalog/api/shared/namespace/{namespace}/bulk/items',
|
||||
params=dict(id=catalog_item_id, includeDLCDetails=True, includeMainGameDetails=True,
|
||||
country=self.country_code, locale=self.language_code))
|
||||
country=self.country_code, locale=self.language_code),
|
||||
timeout=timeout or self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json().get(catalog_item_id, None)
|
||||
|
||||
def get_artifact_service_ticket(self, sandbox_id: str, artifact_id: str, label='Live', platform='Windows'):
|
||||
# Based on EOS Helper Windows service implementation. Only works with anonymous EOSH session.
|
||||
# sandbox_id is the same as the namespace, artifact_id is the same as the app name
|
||||
r = self.session.post(f'https://{self._artifact_service_host}/artifact-service/api/public/v1/dependency/'
|
||||
f'sandbox/{sandbox_id}/artifact/{artifact_id}/ticket',
|
||||
json=dict(label=label, expiresInSeconds=300, platform=platform),
|
||||
params=dict(useSandboxAwareLabel='false'),
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_game_manifest_by_ticket(self, artifact_id: str, signed_ticket: str, label='Live', platform='Windows'):
|
||||
# Based on EOS Helper Windows service implementation.
|
||||
r = self.session.post(f'https://{self._launcher_host}/launcher/api/public/assets/v2/'
|
||||
f'by-ticket/app/{artifact_id}',
|
||||
json=dict(platform=platform, label=label, signedTicket=signed_ticket),
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_library_items(self, include_metadata=True):
|
||||
records = []
|
||||
r = self.session.get(f'https://{self._library_host}/library/api/public/items',
|
||||
params=dict(includeMetadata=include_metadata))
|
||||
params=dict(includeMetadata=include_metadata),
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
j = r.json()
|
||||
records.extend(j['records'])
|
||||
|
@ -134,7 +244,8 @@ class EPCAPI:
|
|||
# Fetch remaining library entries as long as there is a cursor
|
||||
while cursor := j['responseMetadata'].get('nextCursor', None):
|
||||
r = self.session.get(f'https://{self._library_host}/library/api/public/items',
|
||||
params=dict(includeMetadata=include_metadata, cursor=cursor))
|
||||
params=dict(includeMetadata=include_metadata, cursor=cursor),
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
j = r.json()
|
||||
records.extend(j['records'])
|
||||
|
@ -142,26 +253,60 @@ class EPCAPI:
|
|||
return records
|
||||
|
||||
def get_user_cloud_saves(self, app_name='', manifests=False, filenames=None):
|
||||
if app_name and manifests:
|
||||
app_name += '/manifests/'
|
||||
elif app_name:
|
||||
app_name += '/'
|
||||
if app_name:
|
||||
app_name += '/manifests/' if manifests else '/'
|
||||
|
||||
user_id = self.user.get('account_id')
|
||||
|
||||
if filenames:
|
||||
r = self.session.post(f'https://{self._datastorage_host}/api/v1/access/egstore/savesync/'
|
||||
f'{user_id}/{app_name}', json=dict(files=filenames))
|
||||
f'{user_id}/{app_name}',
|
||||
json=dict(files=filenames),
|
||||
timeout=self.request_timeout)
|
||||
else:
|
||||
r = self.session.get(f'https://{self._datastorage_host}/api/v1/access/egstore/savesync/'
|
||||
f'{user_id}/{app_name}')
|
||||
f'{user_id}/{app_name}',
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
|
||||
def create_game_cloud_saves(self, app_name, filenames):
|
||||
return self.get_user_cloud_saves(app_name, filenames=filenames)
|
||||
|
||||
def delete_game_cloud_save_file(self, path):
|
||||
url = f'https://{self._datastorage_host}/api/v1/data/egstore/{path}'
|
||||
r = self.session.delete(url)
|
||||
r = self.session.delete(url, timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
|
||||
def store_get_uplay_codes(self):
|
||||
user_id = self.user.get('account_id')
|
||||
r = self.session.post(f'https://{self._store_gql_host}/graphql',
|
||||
headers={'user-agent': self._store_user_agent},
|
||||
json=dict(query=uplay_codes_query,
|
||||
variables=dict(accountId=user_id)),
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def store_claim_uplay_code(self, uplay_id, game_id):
|
||||
user_id = self.user.get('account_id')
|
||||
r = self.session.post(f'https://{self._store_gql_host}/graphql',
|
||||
headers={'user-agent': self._store_user_agent},
|
||||
json=dict(query=uplay_claim_query,
|
||||
variables=dict(accountId=user_id,
|
||||
uplayAccountId=uplay_id,
|
||||
gameId=game_id)),
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def store_redeem_uplay_codes(self, uplay_id):
|
||||
user_id = self.user.get('account_id')
|
||||
r = self.session.post(f'https://{self._store_gql_host}/graphql',
|
||||
headers={'user-agent': self._store_user_agent},
|
||||
json=dict(query=uplay_redeem_query,
|
||||
variables=dict(accountId=user_id,
|
||||
uplayAccountId=uplay_id)),
|
||||
timeout=self.request_timeout)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
|
30
legendary/api/lgd.py
Normal file
30
legendary/api/lgd.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# !/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from platform import system
|
||||
from legendary import __version__
|
||||
|
||||
|
||||
class LGDAPI:
|
||||
_user_agent = f'Legendary/{__version__} ({system()})'
|
||||
_api_host = 'api.legendary.gl'
|
||||
|
||||
def __init__(self):
|
||||
self.session = requests.session()
|
||||
self.log = logging.getLogger('LGDAPI')
|
||||
self.session.headers['User-Agent'] = self._user_agent
|
||||
|
||||
def get_version_information(self):
|
||||
r = self.session.get(f'https://{self._api_host}/v1/version.json',
|
||||
timeout=10.0)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_sdl_config(self, app_name):
|
||||
r = self.session.get(f'https://{self._api_host}/v1/sdl/{app_name}.json',
|
||||
timeout=10.0)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
2201
legendary/cli.py
2201
legendary/cli.py
File diff suppressed because it is too large
Load diff
1366
legendary/core.py
1366
legendary/core.py
File diff suppressed because it is too large
Load diff
|
@ -22,14 +22,14 @@ from legendary.models.manifest import ManifestComparison, Manifest
|
|||
class DLManager(Process):
|
||||
def __init__(self, download_dir, base_url, cache_dir=None, status_q=None,
|
||||
max_workers=0, update_interval=1.0, dl_timeout=10, resume_file=None,
|
||||
max_shared_memory=1024 * 1024 * 1024):
|
||||
max_shared_memory=1024 * 1024 * 1024, bind_ip=None):
|
||||
super().__init__(name='DLManager')
|
||||
self.log = logging.getLogger('DLM')
|
||||
self.proc_debug = False
|
||||
|
||||
self.base_url = base_url
|
||||
self.dl_dir = download_dir
|
||||
self.cache_dir = cache_dir if cache_dir else os.path.join(download_dir, '.cache')
|
||||
self.cache_dir = cache_dir or os.path.join(download_dir, '.cache')
|
||||
|
||||
# All the queues!
|
||||
self.logging_queue = None
|
||||
|
@ -37,8 +37,11 @@ class DLManager(Process):
|
|||
self.writer_queue = None
|
||||
self.dl_result_q = None
|
||||
self.writer_result_q = None
|
||||
self.max_workers = max_workers if max_workers else min(cpu_count() * 2, 16)
|
||||
|
||||
# Worker stuff
|
||||
self.max_workers = max_workers or min(cpu_count() * 2, 16)
|
||||
self.dl_timeout = dl_timeout
|
||||
self.bind_ips = [] if not bind_ip else bind_ip.split(',')
|
||||
|
||||
# Analysis stuff
|
||||
self.analysis = None
|
||||
|
@ -113,7 +116,7 @@ class DLManager(Process):
|
|||
mismatch = 0
|
||||
completed_files = set()
|
||||
|
||||
for line in open(self.resume_file).readlines():
|
||||
for line in open(self.resume_file, encoding='utf-8').readlines():
|
||||
file_hash, _, filename = line.strip().partition(':')
|
||||
_p = os.path.join(self.dl_dir, filename)
|
||||
if not os.path.exists(_p):
|
||||
|
@ -137,6 +140,24 @@ class DLManager(Process):
|
|||
except Exception as e:
|
||||
self.log.warning(f'Reading resume file failed: {e!r}, continuing as normal...')
|
||||
|
||||
elif resume:
|
||||
# Basic check if files exist locally, put all missing files into "added"
|
||||
# This allows new SDL tags to be installed without having to do a repair as well.
|
||||
missing_files = set()
|
||||
|
||||
for fm in manifest.file_manifest_list.elements:
|
||||
if fm.filename in mc.added:
|
||||
continue
|
||||
|
||||
local_path = os.path.join(self.dl_dir, fm.filename)
|
||||
if not os.path.exists(local_path):
|
||||
missing_files.add(fm.filename)
|
||||
|
||||
self.log.info(f'Found {len(missing_files)} missing files.')
|
||||
mc.added |= missing_files
|
||||
mc.changed -= missing_files
|
||||
mc.unchanged -= missing_files
|
||||
|
||||
# Install tags are used for selective downloading, e.g. for language packs
|
||||
additional_deletion_tasks = []
|
||||
if file_install_tag is not None:
|
||||
|
@ -208,6 +229,8 @@ class DLManager(Process):
|
|||
fmlist = sorted(manifest.file_manifest_list.elements,
|
||||
key=lambda a: a.filename.lower())
|
||||
|
||||
# Create reference count for chunks and calculate additional/temporary disk size required for install
|
||||
current_tmp_size = 0
|
||||
for fm in fmlist:
|
||||
self.hash_map[fm.filename] = fm.sha_hash.hex()
|
||||
|
||||
|
@ -219,6 +242,20 @@ class DLManager(Process):
|
|||
for cp in fm.chunk_parts:
|
||||
references[cp.guid_num] += 1
|
||||
|
||||
if fm.filename in mc.added:
|
||||
# if the file was added, it just adds to the delta
|
||||
current_tmp_size += fm.file_size
|
||||
analysis_res.disk_space_delta = max(current_tmp_size, analysis_res.disk_space_delta)
|
||||
elif fm.filename in mc.changed:
|
||||
# if the file was changed, we need temporary space equal to the full size,
|
||||
# but then subtract the size of the old file as it's deleted on write completion.
|
||||
current_tmp_size += fm.file_size
|
||||
analysis_res.disk_space_delta = max(current_tmp_size, analysis_res.disk_space_delta)
|
||||
current_tmp_size -= old_manifest.file_manifest_list.get_file_by_path(fm.filename).file_size
|
||||
|
||||
# clamp to 0
|
||||
self.log.debug(f'Disk space delta: {analysis_res.disk_space_delta/1024/1024:.02f} MiB')
|
||||
|
||||
if processing_optimization:
|
||||
s_time = time.time()
|
||||
# reorder the file manifest list to group files that share many chunks
|
||||
|
@ -352,6 +389,9 @@ class DLManager(Process):
|
|||
self.tasks.extend(chunk_tasks)
|
||||
self.tasks.append(FileTask(current_file.filename, flags=TaskFlags.CLOSE_FILE))
|
||||
|
||||
if current_file.executable:
|
||||
self.tasks.append(FileTask(current_file.filename, flags=TaskFlags.MAKE_EXECUTABLE))
|
||||
|
||||
# check if runtime cache size has changed
|
||||
if current_cache_size > last_cache_size:
|
||||
self.log.debug(f' * New maximum cache size: {current_cache_size / 1024 / 1024:.02f} MiB')
|
||||
|
@ -527,8 +567,8 @@ class DLManager(Process):
|
|||
|
||||
file_hash = self.hash_map[res.filename]
|
||||
# write last completed file to super simple resume file
|
||||
with open(self.resume_file, 'ab') as rf:
|
||||
rf.write(f'{file_hash}:{res.filename}\n'.encode('utf-8'))
|
||||
with open(self.resume_file, 'a', encoding='utf-8') as rf:
|
||||
rf.write(f'{file_hash}:{res.filename}\n')
|
||||
|
||||
if not res.success:
|
||||
# todo make this kill the installation process or at least skip the file and mark it as failed
|
||||
|
@ -582,6 +622,12 @@ class DLManager(Process):
|
|||
if t.is_alive():
|
||||
self.log.warning(f'Thread did not terminate! {repr(t)}')
|
||||
|
||||
# forcibly kill DL workers that are not actually dead yet
|
||||
for child in self.children:
|
||||
child.join(timeout=5.0)
|
||||
if child.exitcode is None:
|
||||
child.terminate()
|
||||
|
||||
# clean up all the queues, otherwise this process won't terminate properly
|
||||
for name, q in zip(('Download jobs', 'Writer jobs', 'Download results', 'Writer results'),
|
||||
(self.dl_worker_queue, self.writer_queue, self.dl_result_q, self.writer_result_q)):
|
||||
|
@ -612,10 +658,15 @@ class DLManager(Process):
|
|||
self.writer_result_q = MPQueue(-1)
|
||||
|
||||
self.log.info(f'Starting download workers...')
|
||||
|
||||
bind_ip = None
|
||||
for i in range(self.max_workers):
|
||||
if self.bind_ips:
|
||||
bind_ip = self.bind_ips[i % len(self.bind_ips)]
|
||||
|
||||
w = DLWorker(f'DLWorker {i + 1}', self.dl_worker_queue, self.dl_result_q,
|
||||
self.shared_memory.name, logging_queue=self.logging_queue,
|
||||
dl_timeout=self.dl_timeout)
|
||||
dl_timeout=self.dl_timeout, bind_addr=bind_ip)
|
||||
self.children.append(w)
|
||||
w.start()
|
||||
|
||||
|
@ -685,13 +736,13 @@ class DLManager(Process):
|
|||
total_used = (num_shared_memory_segments - total_avail) * (self.analysis.biggest_chunk / 1024 / 1024)
|
||||
|
||||
if runtime and processed_chunks:
|
||||
rt_hours, runtime = int(runtime // 3600), runtime % 3600
|
||||
rt_minutes, rt_seconds = int(runtime // 60), int(runtime % 60)
|
||||
|
||||
average_speed = processed_chunks / runtime
|
||||
estimate = (num_chunk_tasks - processed_chunks) / average_speed
|
||||
hours, estimate = int(estimate // 3600), estimate % 3600
|
||||
minutes, seconds = int(estimate // 60), int(estimate % 60)
|
||||
|
||||
rt_hours, runtime = int(runtime // 3600), runtime % 3600
|
||||
rt_minutes, rt_seconds = int(runtime // 60), int(runtime % 60)
|
||||
else:
|
||||
hours = minutes = seconds = 0
|
||||
rt_hours = rt_minutes = rt_seconds = 0
|
||||
|
@ -701,7 +752,7 @@ class DLManager(Process):
|
|||
f'ETA: {hours:02d}:{minutes:02d}:{seconds:02d}')
|
||||
self.log.info(f' - Downloaded: {total_dl / 1024 / 1024:.02f} MiB, '
|
||||
f'Written: {total_write / 1024 / 1024:.02f} MiB')
|
||||
self.log.info(f' - Cache usage: {total_used} MiB, active tasks: {self.active_tasks}')
|
||||
self.log.info(f' - Cache usage: {total_used:.02f} MiB, active tasks: {self.active_tasks}')
|
||||
self.log.info(f' + Download\t- {dl_speed / 1024 / 1024:.02f} MiB/s (raw) '
|
||||
f'/ {dl_unc_speed / 1024 / 1024:.02f} MiB/s (decompressed)')
|
||||
self.log.info(f' + Disk\t- {w_speed / 1024 / 1024:.02f} MiB/s (write) / '
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import requests
|
||||
import time
|
||||
import logging
|
||||
|
||||
|
@ -10,6 +9,9 @@ from multiprocessing import Process
|
|||
from multiprocessing.shared_memory import SharedMemory
|
||||
from queue import Empty
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter, DEFAULT_POOLBLOCK
|
||||
|
||||
from legendary.models.chunk import Chunk
|
||||
from legendary.models.downloading import (
|
||||
DownloaderTask, DownloaderTaskResult,
|
||||
|
@ -18,9 +20,22 @@ from legendary.models.downloading import (
|
|||
)
|
||||
|
||||
|
||||
class BindingHTTPAdapter(HTTPAdapter):
|
||||
def __init__(self, addr):
|
||||
self.__attrs__.append('addr')
|
||||
self.addr = addr
|
||||
super().__init__()
|
||||
|
||||
def init_poolmanager(
|
||||
self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
|
||||
):
|
||||
pool_kwargs['source_address'] = (self.addr, 0)
|
||||
super().init_poolmanager(connections, maxsize, block, **pool_kwargs)
|
||||
|
||||
|
||||
class DLWorker(Process):
|
||||
def __init__(self, name, queue, out_queue, shm, max_retries=5,
|
||||
logging_queue=None, dl_timeout=10):
|
||||
def __init__(self, name, queue, out_queue, shm, max_retries=7,
|
||||
logging_queue=None, dl_timeout=10, bind_addr=None):
|
||||
super().__init__(name=name)
|
||||
self.q = queue
|
||||
self.o_q = out_queue
|
||||
|
@ -34,6 +49,12 @@ class DLWorker(Process):
|
|||
self.logging_queue = logging_queue
|
||||
self.dl_timeout = float(dl_timeout) if dl_timeout else 10.0
|
||||
|
||||
# optionally bind an address
|
||||
if bind_addr:
|
||||
adapter = BindingHTTPAdapter(bind_addr)
|
||||
self.session.mount('https://', adapter)
|
||||
self.session.mount('http://', adapter)
|
||||
|
||||
def run(self):
|
||||
# we have to fix up the logger before we can start
|
||||
_root = logging.getLogger()
|
||||
|
@ -51,12 +72,12 @@ class DLWorker(Process):
|
|||
empty = False
|
||||
except Empty:
|
||||
if not empty:
|
||||
logger.debug(f'Queue Empty, waiting for more...')
|
||||
logger.debug('Queue Empty, waiting for more...')
|
||||
empty = True
|
||||
continue
|
||||
|
||||
if isinstance(job, TerminateWorkerTask): # let worker die
|
||||
logger.debug(f'Worker received termination signal, shutting down...')
|
||||
logger.debug('Worker received termination signal, shutting down...')
|
||||
break
|
||||
|
||||
tries = 0
|
||||
|
@ -65,6 +86,12 @@ class DLWorker(Process):
|
|||
|
||||
try:
|
||||
while tries < self.max_retries:
|
||||
# retry once immediately, otherwise do exponential backoff
|
||||
if tries > 1:
|
||||
sleep_time = 2**(tries-1)
|
||||
logger.info(f'Sleeping {sleep_time} seconds before retrying.')
|
||||
time.sleep(sleep_time)
|
||||
|
||||
# print('Downloading', job.url)
|
||||
logger.debug(f'Downloading {job.url}')
|
||||
|
||||
|
@ -93,17 +120,18 @@ class DLWorker(Process):
|
|||
break
|
||||
|
||||
if not chunk:
|
||||
logger.warning(f'Chunk somehow None?')
|
||||
logger.warning('Chunk somehow None?')
|
||||
self.o_q.put(DownloaderTaskResult(success=False, **job.__dict__))
|
||||
continue
|
||||
|
||||
# decompress stuff
|
||||
try:
|
||||
size = len(chunk.data)
|
||||
data = chunk.data
|
||||
size = len(data)
|
||||
if size > job.shm.size:
|
||||
logger.fatal(f'Downloaded chunk is longer than SharedMemorySegment!')
|
||||
logger.fatal('Downloaded chunk is longer than SharedMemorySegment!')
|
||||
|
||||
self.shm.buf[job.shm.offset:job.shm.offset + size] = bytes(chunk.data)
|
||||
self.shm.buf[job.shm.offset:job.shm.offset + size] = data
|
||||
del chunk
|
||||
self.o_q.put(DownloaderTaskResult(success=True, size_decompressed=size,
|
||||
size_downloaded=compressed, **job.__dict__))
|
||||
|
@ -124,7 +152,7 @@ class FileWorker(Process):
|
|||
self.q = queue
|
||||
self.o_q = out_queue
|
||||
self.base_path = base_path
|
||||
self.cache_path = cache_path if cache_path else os.path.join(base_path, '.cache')
|
||||
self.cache_path = cache_path or os.path.join(base_path, '.cache')
|
||||
self.shm = SharedMemory(name=shm)
|
||||
self.log_level = logging.getLogger().level
|
||||
self.logging_queue = logging_queue
|
||||
|
@ -137,7 +165,7 @@ class FileWorker(Process):
|
|||
|
||||
logger = logging.getLogger(self.name)
|
||||
logger.setLevel(self.log_level)
|
||||
logger.debug(f'Download worker reporting for duty!')
|
||||
logger.debug('Download worker reporting for duty!')
|
||||
|
||||
last_filename = ''
|
||||
current_file = None
|
||||
|
@ -153,7 +181,7 @@ class FileWorker(Process):
|
|||
if isinstance(j, TerminateWorkerTask):
|
||||
if current_file:
|
||||
current_file.close()
|
||||
logger.debug(f'Worker received termination signal, shutting down...')
|
||||
logger.debug('Worker received termination signal, shutting down...')
|
||||
# send termination task to results halnder as well
|
||||
self.o_q.put(TerminateWorkerTask())
|
||||
break
|
||||
|
@ -224,12 +252,27 @@ class FileWorker(Process):
|
|||
|
||||
self.o_q.put(WriterTaskResult(success=True, **j.__dict__))
|
||||
continue
|
||||
elif j.flags & TaskFlags.MAKE_EXECUTABLE:
|
||||
if current_file:
|
||||
logger.warning('Trying to chmod file without closing first!')
|
||||
current_file.close()
|
||||
current_file = None
|
||||
|
||||
try:
|
||||
st = os.stat(full_path)
|
||||
os.chmod(full_path, st.st_mode | 0o111)
|
||||
except OSError as e:
|
||||
if not j.flags & TaskFlags.SILENT:
|
||||
logger.error(f'chmod\'ing file failed: {e!r}')
|
||||
|
||||
self.o_q.put(WriterTaskResult(success=True, **j.__dict__))
|
||||
continue
|
||||
|
||||
try:
|
||||
if j.shared_memory:
|
||||
shm_offset = j.shared_memory.offset + j.chunk_offset
|
||||
shm_end = shm_offset + j.chunk_size
|
||||
current_file.write(self.shm.buf[shm_offset:shm_end].tobytes())
|
||||
current_file.write(self.shm.buf[shm_offset:shm_end])
|
||||
elif j.cache_file:
|
||||
with open(os.path.join(self.cache_path, j.cache_file), 'rb') as f:
|
||||
if j.chunk_offset:
|
||||
|
|
62
legendary/lfs/crossover.py
Normal file
62
legendary/lfs/crossover.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
import logging
|
||||
import plistlib
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
_logger = logging.getLogger('CXHelpers')
|
||||
|
||||
|
||||
def mac_get_crossover_version(app_path):
|
||||
try:
|
||||
plist = plistlib.load(open(os.path.join(app_path, 'Contents', 'Info.plist'), 'rb'))
|
||||
return plist['CFBundleShortVersionString']
|
||||
except Exception as e:
|
||||
_logger.debug(f'Failed to load plist for "{app_path}" with {e!r}')
|
||||
return None
|
||||
|
||||
|
||||
def mac_find_crossover_apps():
|
||||
paths = ['/Applications/CrossOver.app']
|
||||
try:
|
||||
out = subprocess.check_output(['mdfind', 'kMDItemCFBundleIdentifier="com.codeweavers.CrossOver"'])
|
||||
paths.extend(out.decode('utf-8', 'replace').strip().split('\n'))
|
||||
except Exception as e:
|
||||
_logger.warning(f'Trying to find CrossOver installs via mdfind failed: {e!r}')
|
||||
|
||||
valid = [p for p in paths if os.path.exists(os.path.join(p, 'Contents', 'Info.plist'))]
|
||||
found_tuples = set()
|
||||
|
||||
for path in valid:
|
||||
version = mac_get_crossover_version(path)
|
||||
if not version:
|
||||
continue
|
||||
_logger.debug(f'Found Crossover {version} at "{path}"')
|
||||
found_tuples.add((version, path))
|
||||
|
||||
return sorted(found_tuples, reverse=True)
|
||||
|
||||
|
||||
def mac_get_crossover_bottles():
|
||||
bottles_path = os.path.expanduser('~/Library/Application Support/CrossOver/Bottles')
|
||||
if not os.path.exists(bottles_path):
|
||||
return []
|
||||
return sorted(p for p in os.listdir(bottles_path) if mac_is_valid_bottle(p))
|
||||
|
||||
|
||||
def mac_is_valid_bottle(bottle_name):
|
||||
bottles_path = os.path.expanduser('~/Library/Application Support/CrossOver/Bottles')
|
||||
return os.path.exists(os.path.join(bottles_path, bottle_name, 'cxbottle.conf'))
|
||||
|
||||
|
||||
def mac_get_bottle_path(bottle_name):
|
||||
bottles_path = os.path.expanduser('~/Library/Application Support/CrossOver/Bottles')
|
||||
return os.path.join(bottles_path, bottle_name)
|
||||
|
||||
|
||||
def mac_is_crossover_running():
|
||||
try:
|
||||
out = subprocess.check_output(['launchctl', 'list'])
|
||||
return b'com.codeweavers.CrossOver.' in out
|
||||
except Exception as e:
|
||||
_logger.warning(f'Getting list of running application bundles failed: {e!r}')
|
||||
return True # assume the worst
|
|
@ -10,6 +10,10 @@ from legendary.models.egl import EGLManifest
|
|||
|
||||
|
||||
class EPCLFS:
|
||||
# Known encryption key(s) for JSON user data
|
||||
# Data is encrypted using AES-256-ECB mode
|
||||
data_keys = []
|
||||
|
||||
def __init__(self):
|
||||
if os.name == 'nt':
|
||||
self.appdata_path = os.path.expandvars(
|
||||
|
@ -30,22 +34,32 @@ class EPCLFS:
|
|||
if not self.appdata_path:
|
||||
raise ValueError('EGS AppData path is not set')
|
||||
|
||||
self.config.read(os.path.join(self.appdata_path, 'GameUserSettings.ini'))
|
||||
if not os.path.isdir(self.appdata_path):
|
||||
raise ValueError('EGS AppData path does not exist')
|
||||
|
||||
self.config.read(os.path.join(self.appdata_path, 'GameUserSettings.ini'), encoding='utf-8')
|
||||
|
||||
def save_config(self):
|
||||
if not self.appdata_path:
|
||||
raise ValueError('EGS AppData path is not set')
|
||||
|
||||
with open(os.path.join(self.appdata_path, 'GameUserSettings.ini'), 'w') as f:
|
||||
if not os.path.isdir(self.appdata_path):
|
||||
raise ValueError('EGS AppData path does not exist')
|
||||
|
||||
with open(os.path.join(self.appdata_path, 'GameUserSettings.ini'), 'w', encoding='utf-8') as f:
|
||||
self.config.write(f, space_around_delimiters=False)
|
||||
|
||||
def read_manifests(self):
|
||||
if not self.programdata_path:
|
||||
raise ValueError('EGS ProgramData path is not set')
|
||||
|
||||
if not os.path.isdir(self.programdata_path):
|
||||
# Not sure if we should `raise` here as well
|
||||
return
|
||||
|
||||
for f in os.listdir(self.programdata_path):
|
||||
if f.endswith('.item'):
|
||||
data = json.load(open(os.path.join(self.programdata_path, f)))
|
||||
data = json.load(open(os.path.join(self.programdata_path, f), encoding='utf-8'))
|
||||
self.manifests[data['AppName']] = data
|
||||
|
||||
def get_manifests(self) -> List[EGLManifest]:
|
||||
|
@ -67,9 +81,13 @@ class EPCLFS:
|
|||
if not self.programdata_path:
|
||||
raise ValueError('EGS ProgramData path is not set')
|
||||
|
||||
if not os.path.isdir(self.programdata_path):
|
||||
raise ValueError('EGS ProgramData path does not exist')
|
||||
|
||||
manifest_data = manifest.to_json()
|
||||
self.manifests[manifest.app_name] = manifest_data
|
||||
with open(os.path.join(self.programdata_path, f'{manifest.installation_guid}.item'), 'w') as f:
|
||||
_path = os.path.join(self.programdata_path, f'{manifest.installation_guid}.item')
|
||||
with open(_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(manifest_data, f, indent=4, sort_keys=True)
|
||||
|
||||
def delete_manifest(self, app_name):
|
||||
|
|
147
legendary/lfs/eos.py
Normal file
147
legendary/lfs/eos.py
Normal file
|
@ -0,0 +1,147 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
from legendary.models.game import Game
|
||||
|
||||
if os.name == 'nt':
|
||||
from legendary.lfs.windows_helpers import *
|
||||
|
||||
logger = logging.getLogger('EOSUtils')
|
||||
# Dummy Game objects to use with Core methods that expect them
|
||||
# Overlay
|
||||
EOSOverlayApp = Game(app_name='98bc04bc842e4906993fd6d6644ffb8d',
|
||||
app_title='Epic Online Services Overlay',
|
||||
metadata=dict(namespace='302e5ede476149b1bc3e4fe6ae45e50e',
|
||||
id='cc15684f44d849e89e9bf4cec0508b68'))
|
||||
# EOS Windows service
|
||||
EOSHApp = Game(app_name='c9e2eb9993a1496c99dc529b49a07339',
|
||||
app_title='Epic Online Services Helper (EOSH)',
|
||||
metadata=dict(namespace='302e5ede476149b1bc3e4fe6ae45e50e',
|
||||
id='1108a9c0af47438da91331753b22ea21'))
|
||||
|
||||
EOS_OVERLAY_KEY = r'SOFTWARE\Epic Games\EOS'
|
||||
WINE_EOS_OVERLAY_KEY = EOS_OVERLAY_KEY.replace('\\', '\\\\')
|
||||
EOS_OVERLAY_VALUE = 'OverlayPath'
|
||||
VULKAN_OVERLAY_KEY = r'SOFTWARE\Khronos\Vulkan\ImplicitLayers'
|
||||
|
||||
|
||||
def query_registry_entries(prefix=None):
|
||||
if os.name == 'nt':
|
||||
# Overlay location for the EOS SDK to load
|
||||
overlay_path = query_registry_value(HKEY_CURRENT_USER, EOS_OVERLAY_KEY, EOS_OVERLAY_VALUE)
|
||||
# Vulkan Layers
|
||||
# HKCU
|
||||
vulkan_hkcu = [i[0] for i in
|
||||
list_registry_values(HKEY_CURRENT_USER, VULKAN_OVERLAY_KEY)
|
||||
if 'EOS' in i[0]]
|
||||
# HKLM 64 & 32 bit
|
||||
vulkan_hklm = [i[0] for i in
|
||||
list_registry_values(HKEY_LOCAL_MACHINE, VULKAN_OVERLAY_KEY)
|
||||
if 'EOS' in i[0]]
|
||||
vulkan_hklm += [i[0] for i in
|
||||
list_registry_values(HKEY_LOCAL_MACHINE, VULKAN_OVERLAY_KEY, use_32bit_view=True)
|
||||
if 'EOS' in i[0]]
|
||||
|
||||
return dict(overlay_path=overlay_path,
|
||||
vulkan_hkcu=vulkan_hkcu,
|
||||
vulkan_hklm=vulkan_hklm)
|
||||
elif prefix:
|
||||
# Only read HKCU since we don't really care for the Vulkan stuff (doesn't work in WINE)
|
||||
use_reg_file = os.path.join(prefix, 'user.reg')
|
||||
if not os.path.exists(use_reg_file):
|
||||
raise ValueError('No user.reg file, invalid path')
|
||||
|
||||
reg_lines = open(use_reg_file, 'r', encoding='utf-8').readlines()
|
||||
for line in reg_lines:
|
||||
if EOS_OVERLAY_VALUE in line:
|
||||
overlay_path = line.partition('=')[2].strip().strip('"')
|
||||
break
|
||||
else:
|
||||
overlay_path = None
|
||||
|
||||
if overlay_path:
|
||||
if overlay_path.startswith('C:'):
|
||||
overlay_path = os.path.join(prefix, 'drive_c', overlay_path[3:])
|
||||
elif overlay_path.startswith('Z:'):
|
||||
overlay_path = overlay_path[2:]
|
||||
|
||||
return dict(overlay_path=overlay_path,
|
||||
vulkan_hkcu=list(),
|
||||
vulkan_hklm=list())
|
||||
else:
|
||||
raise ValueError('No prefix specified on non-Windows platform')
|
||||
|
||||
|
||||
def add_registry_entries(overlay_path, prefix=None):
|
||||
if os.name == 'nt':
|
||||
logger.debug(f'Settings HKCU EOS Overlay Path: {overlay_path}')
|
||||
set_registry_value(HKEY_CURRENT_USER, EOS_OVERLAY_KEY, EOS_OVERLAY_VALUE,
|
||||
overlay_path.replace('\\', '/'), TYPE_STRING)
|
||||
vk_32_path = os.path.join(overlay_path, 'EOSOverlayVkLayer-Win32.json').replace('/', '\\')
|
||||
vk_64_path = os.path.join(overlay_path, 'EOSOverlayVkLayer-Win64.json').replace('/', '\\')
|
||||
# the launcher only sets those in HKCU, th e service sets them in HKLM,
|
||||
# but it's not in use yet, so just do HKCU for now
|
||||
logger.debug(f'Settings HKCU 32-bit Vulkan Layer: {vk_32_path}')
|
||||
set_registry_value(HKEY_CURRENT_USER, VULKAN_OVERLAY_KEY, vk_32_path, 0, TYPE_DWORD)
|
||||
logger.debug(f'Settings HKCU 64-bit Vulkan Layer: {vk_32_path}')
|
||||
set_registry_value(HKEY_CURRENT_USER, VULKAN_OVERLAY_KEY, vk_64_path, 0, TYPE_DWORD)
|
||||
elif prefix:
|
||||
# Again only care for HKCU OverlayPath because Windows Vulkan layers don't work anyway
|
||||
use_reg_file = os.path.join(prefix, 'user.reg')
|
||||
if not os.path.exists(use_reg_file):
|
||||
raise ValueError('No user.reg file, invalid path')
|
||||
|
||||
reg_lines = open(use_reg_file, 'r', encoding='utf-8').readlines()
|
||||
|
||||
overlay_path = overlay_path.replace('\\', '/')
|
||||
if overlay_path.startswith('/'):
|
||||
overlay_path = f'Z:{overlay_path}'
|
||||
|
||||
overlay_line = f'"{EOS_OVERLAY_VALUE}"="{overlay_path}"\n'
|
||||
overlay_idx = None
|
||||
section_idx = None
|
||||
|
||||
for idx, line in enumerate(reg_lines):
|
||||
if EOS_OVERLAY_VALUE in line:
|
||||
reg_lines[idx] = overlay_line
|
||||
break
|
||||
elif WINE_EOS_OVERLAY_KEY in line:
|
||||
section_idx = idx
|
||||
else:
|
||||
if section_idx:
|
||||
reg_lines.insert(section_idx + 1, overlay_line)
|
||||
else:
|
||||
reg_lines.append(f'[{WINE_EOS_OVERLAY_KEY}]\n')
|
||||
reg_lines.append(overlay_line)
|
||||
|
||||
open(use_reg_file, 'w', encoding='utf-8').writelines(reg_lines)
|
||||
else:
|
||||
raise ValueError('No prefix specified on non-Windows platform')
|
||||
|
||||
|
||||
def remove_registry_entries(prefix=None):
|
||||
entries = query_registry_entries(prefix)
|
||||
|
||||
if os.name == 'nt':
|
||||
if entries['overlay_path']:
|
||||
logger.debug('Removing HKCU EOS OverlayPath')
|
||||
remove_registry_value(HKEY_CURRENT_USER, EOS_OVERLAY_KEY, EOS_OVERLAY_VALUE)
|
||||
for value in entries['vulkan_hkcu']:
|
||||
logger.debug(f'Removing HKCU Vulkan Layer: {value}')
|
||||
remove_registry_value(HKEY_CURRENT_USER, VULKAN_OVERLAY_KEY, value)
|
||||
for value in entries['vulkan_hklm']:
|
||||
logger.debug(f'Removing HKLM Vulkan Layer: {value}')
|
||||
remove_registry_value(HKEY_LOCAL_MACHINE, VULKAN_OVERLAY_KEY, value)
|
||||
remove_registry_value(HKEY_LOCAL_MACHINE, VULKAN_OVERLAY_KEY, value, use_32bit_view=True)
|
||||
elif prefix:
|
||||
# Same as above, only HKCU.
|
||||
use_reg_file = os.path.join(prefix, 'user.reg')
|
||||
if not os.path.exists(use_reg_file):
|
||||
raise ValueError('No user.reg file, invalid path')
|
||||
|
||||
if entries['overlay_path']:
|
||||
reg_lines = open(use_reg_file, 'r', encoding='utf-8').readlines()
|
||||
filtered_lines = [line for line in reg_lines if EOS_OVERLAY_VALUE not in line]
|
||||
open(use_reg_file, 'w', encoding='utf-8').writelines(filtered_lines)
|
||||
else:
|
||||
raise ValueError('No prefix specified on non-Windows platform')
|
|
@ -4,18 +4,31 @@ import json
|
|||
import os
|
||||
import logging
|
||||
|
||||
from contextlib import contextmanager
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
from time import time
|
||||
|
||||
from filelock import FileLock
|
||||
|
||||
from .utils import clean_filename, LockedJSONData
|
||||
|
||||
from legendary.models.game import *
|
||||
from legendary.utils.config import LGDConf
|
||||
from legendary.utils.lfs import clean_filename
|
||||
from legendary.utils.aliasing import generate_aliases
|
||||
from legendary.models.config import LGDConf
|
||||
from legendary.utils.env import is_windows_mac_or_pyi
|
||||
|
||||
|
||||
FILELOCK_DEBUG = False
|
||||
|
||||
|
||||
class LGDLFS:
|
||||
def __init__(self):
|
||||
def __init__(self, config_file=None):
|
||||
self.log = logging.getLogger('LGDLFS')
|
||||
|
||||
if config_path := os.environ.get('XDG_CONFIG_HOME'):
|
||||
if config_path := os.environ.get('LEGENDARY_CONFIG_PATH'):
|
||||
self.path = config_path
|
||||
elif config_path := os.environ.get('XDG_CONFIG_HOME'):
|
||||
self.path = os.path.join(config_path, 'legendary')
|
||||
else:
|
||||
self.path = os.path.expanduser('~/.config/legendary')
|
||||
|
@ -28,9 +41,24 @@ class LGDLFS:
|
|||
self._assets = None
|
||||
# EGS metadata
|
||||
self._game_metadata = dict()
|
||||
# Legendary update check info
|
||||
self._update_info = None
|
||||
# EOS Overlay install/update check info
|
||||
self._overlay_update_info = None
|
||||
self._overlay_install_info = None
|
||||
# Config with game specific settings (e.g. start parameters, env variables)
|
||||
self.config = LGDConf(comment_prefixes='/', allow_no_value=True)
|
||||
self.config.optionxform = str
|
||||
|
||||
if config_file:
|
||||
# if user specified a valid relative/absolute path use that,
|
||||
# otherwise create file in legendary config directory
|
||||
if os.path.exists(config_file):
|
||||
self.config_path = os.path.abspath(config_file)
|
||||
else:
|
||||
self.config_path = os.path.join(self.path, clean_filename(config_file))
|
||||
self.log.info(f'Using non-default config file "{self.config_path}"')
|
||||
else:
|
||||
self.config_path = os.path.join(self.path, 'config.ini')
|
||||
|
||||
# ensure folders exist.
|
||||
for f in ['', 'manifests', 'metadata', 'tmp']:
|
||||
|
@ -64,22 +92,38 @@ class LGDLFS:
|
|||
self.log.warning(f'Removing "{os.path.join(self.path, "manifests", "old")}" folder failed: '
|
||||
f'{e!r}, please remove manually')
|
||||
|
||||
if not FILELOCK_DEBUG:
|
||||
# Prevent filelock logger from spamming Legendary debug output
|
||||
filelock_logger = logging.getLogger('filelock')
|
||||
filelock_logger.setLevel(logging.INFO)
|
||||
|
||||
# try loading config
|
||||
try:
|
||||
self.config.read(os.path.join(self.path, 'config.ini'))
|
||||
self.config.read(self.config_path)
|
||||
except Exception as e:
|
||||
self.log.error(f'Unable to read configuration file, please ensure that file is valid! '
|
||||
f'(Error: {repr(e)})')
|
||||
self.log.warning(f'Continuing with blank config in safe-mode...')
|
||||
self.log.warning('Continuing with blank config in safe-mode...')
|
||||
self.config.read_only = True
|
||||
|
||||
# make sure "Legendary" section exists
|
||||
if 'Legendary' not in self.config:
|
||||
self.config['Legendary'] = dict()
|
||||
self.config.add_section('Legendary')
|
||||
|
||||
# Add opt-out options with explainers
|
||||
if not self.config.has_option('Legendary', 'disable_update_check'):
|
||||
self.config.set('Legendary', '; Disables the automatic update check')
|
||||
self.config.set('Legendary', 'disable_update_check', 'false')
|
||||
if not self.config.has_option('Legendary', 'disable_update_notice'):
|
||||
self.config.set('Legendary', '; Disables the notice about an available update on exit')
|
||||
self.config.set('Legendary', 'disable_update_notice', 'false' if is_windows_mac_or_pyi() else 'true')
|
||||
|
||||
self._installed_lock = FileLock(os.path.join(self.path, 'installed.json') + '.lock')
|
||||
|
||||
try:
|
||||
self._installed = json.load(open(os.path.join(self.path, 'installed.json')))
|
||||
except Exception as e: # todo do not do this
|
||||
except Exception as e:
|
||||
self.log.debug(f'Loading installed games failed: {e!r}')
|
||||
self._installed = None
|
||||
|
||||
# load existing app metadata
|
||||
|
@ -90,31 +134,46 @@ class LGDLFS:
|
|||
except Exception as e:
|
||||
self.log.debug(f'Loading game meta file "{gm_file}" failed: {e!r}')
|
||||
|
||||
# load auto-aliases if enabled
|
||||
self.aliases = dict()
|
||||
if not self.config.getboolean('Legendary', 'disable_auto_aliasing', fallback=False):
|
||||
try:
|
||||
_j = json.load(open(os.path.join(self.path, 'aliases.json')))
|
||||
for app_name, aliases in _j.items():
|
||||
for alias in aliases:
|
||||
self.aliases[alias] = app_name
|
||||
except Exception as e:
|
||||
self.log.debug(f'Loading aliases failed with {e!r}')
|
||||
|
||||
@property
|
||||
@contextmanager
|
||||
def userdata_lock(self) -> LockedJSONData:
|
||||
"""Wrapper around the lock to automatically update user data when it is released"""
|
||||
with LockedJSONData(os.path.join(self.path, 'user.json')) as lock:
|
||||
try:
|
||||
yield lock
|
||||
finally:
|
||||
self._user_data = lock.data
|
||||
|
||||
@property
|
||||
def userdata(self):
|
||||
if self._user_data is not None:
|
||||
return self._user_data
|
||||
|
||||
try:
|
||||
self._user_data = json.load(open(os.path.join(self.path, 'user.json')))
|
||||
return self._user_data
|
||||
with self.userdata_lock as locked:
|
||||
return locked.data
|
||||
except Exception as e:
|
||||
self.log.debug(f'Failed to load user data: {e!r}')
|
||||
return None
|
||||
|
||||
@userdata.setter
|
||||
def userdata(self, userdata):
|
||||
if userdata is None:
|
||||
raise ValueError('Userdata is none!')
|
||||
|
||||
self._user_data = userdata
|
||||
json.dump(userdata, open(os.path.join(self.path, 'user.json'), 'w'),
|
||||
indent=2, sort_keys=True)
|
||||
raise NotImplementedError('The setter has been removed, use the locked userdata instead.')
|
||||
|
||||
def invalidate_userdata(self):
|
||||
self._user_data = None
|
||||
if os.path.exists(os.path.join(self.path, 'user.json')):
|
||||
os.remove(os.path.join(self.path, 'user.json'))
|
||||
with self.userdata_lock as lock:
|
||||
lock.clear()
|
||||
|
||||
@property
|
||||
def entitlements(self):
|
||||
|
@ -141,8 +200,8 @@ class LGDLFS:
|
|||
def assets(self):
|
||||
if self._assets is None:
|
||||
try:
|
||||
self._assets = [GameAsset.from_json(a) for a in
|
||||
json.load(open(os.path.join(self.path, 'assets.json')))]
|
||||
tmp = json.load(open(os.path.join(self.path, 'assets.json')))
|
||||
self._assets = {k: [GameAsset.from_json(j) for j in v] for k, v in tmp.items()}
|
||||
except Exception as e:
|
||||
self.log.debug(f'Failed to load assets data: {e!r}')
|
||||
return None
|
||||
|
@ -155,27 +214,33 @@ class LGDLFS:
|
|||
raise ValueError('Assets is none!')
|
||||
|
||||
self._assets = assets
|
||||
json.dump([a.__dict__ for a in self._assets],
|
||||
json.dump({platform: [a.__dict__ for a in assets] for platform, assets in self._assets.items()},
|
||||
open(os.path.join(self.path, 'assets.json'), 'w'),
|
||||
indent=2, sort_keys=True)
|
||||
|
||||
def _get_manifest_filename(self, app_name, version):
|
||||
fname = clean_filename(f'{app_name}_{version}')
|
||||
def _get_manifest_filename(self, app_name, version, platform=None):
|
||||
if platform:
|
||||
fname = clean_filename(f'{app_name}_{platform}_{version}')
|
||||
else:
|
||||
fname = clean_filename(f'{app_name}_{version}')
|
||||
return os.path.join(self.path, 'manifests', f'{fname}.manifest')
|
||||
|
||||
def load_manifest(self, app_name, version):
|
||||
def load_manifest(self, app_name, version, platform='Windows'):
|
||||
try:
|
||||
return open(self._get_manifest_filename(app_name, version), 'rb').read()
|
||||
return open(self._get_manifest_filename(app_name, version, platform), 'rb').read()
|
||||
except FileNotFoundError: # all other errors should propagate
|
||||
return None
|
||||
self.log.debug(f'Loading manifest failed, retrying without platform in filename...')
|
||||
try:
|
||||
return open(self._get_manifest_filename(app_name, version), 'rb').read()
|
||||
except FileNotFoundError: # all other errors should propagate
|
||||
return None
|
||||
|
||||
def save_manifest(self, app_name, manifest_data, version):
|
||||
with open(self._get_manifest_filename(app_name, version), 'wb') as f:
|
||||
def save_manifest(self, app_name, manifest_data, version, platform='Windows'):
|
||||
with open(self._get_manifest_filename(app_name, version, platform), 'wb') as f:
|
||||
f.write(manifest_data)
|
||||
|
||||
def get_game_meta(self, app_name):
|
||||
_meta = self._game_metadata.get(app_name, None)
|
||||
if _meta:
|
||||
if _meta := self._game_metadata.get(app_name, None):
|
||||
return Game.from_json(_meta)
|
||||
return None
|
||||
|
||||
|
@ -186,14 +251,17 @@ class LGDLFS:
|
|||
json.dump(json_meta, open(meta_file, 'w'), indent=2, sort_keys=True)
|
||||
|
||||
def delete_game_meta(self, app_name):
|
||||
if app_name in self._game_metadata:
|
||||
del self._game_metadata[app_name]
|
||||
meta_file = os.path.join(self.path, 'metadata', f'{app_name}.json')
|
||||
if os.path.exists(meta_file):
|
||||
os.remove(meta_file)
|
||||
else:
|
||||
if app_name not in self._game_metadata:
|
||||
raise ValueError(f'Game {app_name} does not exist in metadata DB!')
|
||||
|
||||
del self._game_metadata[app_name]
|
||||
meta_file = os.path.join(self.path, 'metadata', f'{app_name}.json')
|
||||
if os.path.exists(meta_file):
|
||||
os.remove(meta_file)
|
||||
|
||||
def get_game_app_names(self):
|
||||
return sorted(self._game_metadata.keys())
|
||||
|
||||
def get_tmp_path(self):
|
||||
return os.path.join(self.path, 'tmp')
|
||||
|
||||
|
@ -214,7 +282,16 @@ class LGDLFS:
|
|||
self.log.warning(f'Failed to delete file "{f}": {e!r}')
|
||||
|
||||
def clean_manifests(self, in_use):
|
||||
in_use_files = set(f'{clean_filename(f"{app_name}_{version}")}.manifest' for app_name, version in in_use)
|
||||
in_use_files = {
|
||||
f'{clean_filename(f"{app_name}_{version}")}.manifest'
|
||||
for app_name, version, _ in in_use
|
||||
}
|
||||
|
||||
in_use_files |= {
|
||||
f'{clean_filename(f"{app_name}_{platform}_{version}")}.manifest'
|
||||
for app_name, version, platform in in_use
|
||||
}
|
||||
|
||||
for f in os.listdir(os.path.join(self.path, 'manifests')):
|
||||
if f not in in_use_files:
|
||||
try:
|
||||
|
@ -222,6 +299,27 @@ class LGDLFS:
|
|||
except Exception as e:
|
||||
self.log.warning(f'Failed to delete file "{f}": {e!r}')
|
||||
|
||||
def lock_installed(self) -> bool:
|
||||
"""
|
||||
Locks the install data. We do not care about releasing this lock.
|
||||
If it is acquired by a Legendary instance it should own the lock until it exits.
|
||||
Some operations such as egl sync may be simply skipped if a lock cannot be acquired
|
||||
"""
|
||||
if self._installed_lock.is_locked:
|
||||
return True
|
||||
|
||||
try:
|
||||
self._installed_lock.acquire(blocking=False)
|
||||
# reload data in case it has been updated elsewhere
|
||||
try:
|
||||
self._installed = json.load(open(os.path.join(self.path, 'installed.json')))
|
||||
except Exception as e:
|
||||
self.log.debug(f'Failed to load installed game data: {e!r}')
|
||||
|
||||
return True
|
||||
except TimeoutError:
|
||||
return False
|
||||
|
||||
def get_installed_game(self, app_name):
|
||||
if self._installed is None:
|
||||
try:
|
||||
|
@ -230,8 +328,7 @@ class LGDLFS:
|
|||
self.log.debug(f'Failed to load installed game data: {e!r}')
|
||||
return None
|
||||
|
||||
game_json = self._installed.get(app_name, None)
|
||||
if game_json:
|
||||
if game_json := self._installed.get(app_name, None):
|
||||
return InstalledGame.from_json(game_json)
|
||||
return None
|
||||
|
||||
|
@ -272,14 +369,123 @@ class LGDLFS:
|
|||
if self.config.read_only or not self.config.modified:
|
||||
return
|
||||
# if config file has been modified externally, back-up the user-modified version before writing
|
||||
if (modtime := int(os.stat(os.path.join(self.path, 'config.ini')).st_mtime)) != self.config.modtime:
|
||||
new_filename = f'config.{modtime}.ini'
|
||||
self.log.warning(f'Configuration file has been modified while legendary was running, '
|
||||
f'user-modified config will be renamed to "{new_filename}"...')
|
||||
os.rename(os.path.join(self.path, 'config.ini'), os.path.join(self.path, new_filename))
|
||||
if os.path.exists(self.config_path):
|
||||
if (modtime := int(os.stat(self.config_path).st_mtime)) != self.config.modtime:
|
||||
new_filename = f'config.{modtime}.ini'
|
||||
self.log.warning(f'Configuration file has been modified while legendary was running, '
|
||||
f'user-modified config will be renamed to "{new_filename}"...')
|
||||
os.rename(self.config_path, os.path.join(os.path.dirname(self.config_path), new_filename))
|
||||
|
||||
with open(os.path.join(self.path, 'config.ini'), 'w') as cf:
|
||||
with open(self.config_path, 'w') as cf:
|
||||
self.config.write(cf)
|
||||
|
||||
def get_dir_size(self):
|
||||
return sum(f.stat().st_size for f in Path(self.path).glob('**/*') if f.is_file())
|
||||
|
||||
def get_cached_version(self):
|
||||
if self._update_info:
|
||||
return self._update_info
|
||||
|
||||
try:
|
||||
self._update_info = json.load(open(os.path.join(self.path, 'version.json')))
|
||||
except Exception as e:
|
||||
self.log.debug(f'Failed to load cached update data: {e!r}')
|
||||
self._update_info = dict(last_update=0, data=None)
|
||||
|
||||
return self._update_info
|
||||
|
||||
def set_cached_version(self, version_data):
|
||||
if not version_data:
|
||||
return
|
||||
self._update_info = dict(last_update=time(), data=version_data)
|
||||
json.dump(self._update_info, open(os.path.join(self.path, 'version.json'), 'w'),
|
||||
indent=2, sort_keys=True)
|
||||
|
||||
def get_cached_sdl_data(self, app_name):
|
||||
try:
|
||||
return json.load(open(os.path.join(self.path, 'tmp', f'{app_name}.json')))
|
||||
except Exception as e:
|
||||
self.log.debug(f'Failed to load cached SDL data: {e!r}')
|
||||
return None
|
||||
|
||||
def set_cached_sdl_data(self, app_name, sdl_version, sdl_data):
|
||||
if not app_name or not sdl_data:
|
||||
return
|
||||
json.dump(dict(version=sdl_version, data=sdl_data),
|
||||
open(os.path.join(self.path, 'tmp', f'{app_name}.json'), 'w'),
|
||||
indent=2, sort_keys=True)
|
||||
|
||||
def get_cached_overlay_version(self):
|
||||
if self._overlay_update_info:
|
||||
return self._overlay_update_info
|
||||
|
||||
try:
|
||||
self._overlay_update_info = json.load(open(
|
||||
os.path.join(self.path, 'overlay_version.json')))
|
||||
except Exception as e:
|
||||
self.log.debug(f'Failed to load cached Overlay update data: {e!r}')
|
||||
self._overlay_update_info = dict(last_update=0, data=None)
|
||||
|
||||
return self._overlay_update_info
|
||||
|
||||
def set_cached_overlay_version(self, version_data):
|
||||
self._overlay_update_info = dict(last_update=time(), data=version_data)
|
||||
json.dump(self._overlay_update_info,
|
||||
open(os.path.join(self.path, 'overlay_version.json'), 'w'),
|
||||
indent=2, sort_keys=True)
|
||||
|
||||
def get_overlay_install_info(self):
|
||||
if not self._overlay_install_info:
|
||||
try:
|
||||
data = json.load(open(os.path.join(self.path, 'overlay_install.json')))
|
||||
self._overlay_install_info = InstalledGame.from_json(data)
|
||||
except Exception as e:
|
||||
self.log.debug(f'Failed to load overlay install data: {e!r}')
|
||||
|
||||
return self._overlay_install_info
|
||||
|
||||
def set_overlay_install_info(self, igame: InstalledGame):
|
||||
self._overlay_install_info = igame
|
||||
json.dump(vars(igame), open(os.path.join(self.path, 'overlay_install.json'), 'w'),
|
||||
indent=2, sort_keys=True)
|
||||
|
||||
def remove_overlay_install_info(self):
|
||||
try:
|
||||
self._overlay_install_info = None
|
||||
os.remove(os.path.join(self.path, 'overlay_install.json'))
|
||||
except Exception as e:
|
||||
self.log.debug(f'Failed to delete overlay install data: {e!r}')
|
||||
|
||||
def generate_aliases(self):
|
||||
self.log.debug('Generating list of aliases...')
|
||||
|
||||
self.aliases = dict()
|
||||
aliases = set()
|
||||
collisions = set()
|
||||
alias_map = defaultdict(set)
|
||||
|
||||
for app_name in self._game_metadata.keys():
|
||||
game = self.get_game_meta(app_name)
|
||||
if game.is_dlc:
|
||||
continue
|
||||
game_folder = game.metadata.get('customAttributes', {}).get('FolderName', {}).get('value', None)
|
||||
_aliases = generate_aliases(game.app_title, game_folder=game_folder, app_name=game.app_name)
|
||||
for alias in _aliases:
|
||||
if alias not in aliases:
|
||||
aliases.add(alias)
|
||||
alias_map[game.app_name].add(alias)
|
||||
else:
|
||||
collisions.add(alias)
|
||||
|
||||
# remove colliding aliases from map and add aliases to lookup table
|
||||
for app_name, aliases in alias_map.items():
|
||||
alias_map[app_name] -= collisions
|
||||
for alias in alias_map[app_name]:
|
||||
self.aliases[alias] = app_name
|
||||
|
||||
def serialise_sets(obj):
|
||||
"""Turn sets into sorted lists for storage"""
|
||||
return sorted(obj) if isinstance(obj, set) else obj
|
||||
|
||||
json.dump(alias_map, open(os.path.join(self.path, 'aliases.json'), 'w', newline='\n'),
|
||||
indent=2, sort_keys=True, default=serialise_sets)
|
||||
|
|
|
@ -3,11 +3,16 @@
|
|||
import os
|
||||
import shutil
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
|
||||
from pathlib import Path
|
||||
from sys import stdout
|
||||
from time import perf_counter
|
||||
from typing import List, Iterator
|
||||
|
||||
from filelock import FileLock
|
||||
|
||||
from legendary.models.game import VerifyResult
|
||||
|
||||
logger = logging.getLogger('LFS Utils')
|
||||
|
@ -38,7 +43,7 @@ def delete_filelist(path: str, filenames: List[str],
|
|||
_dir, _fn = os.path.split(filename)
|
||||
if _dir:
|
||||
dirs.add(_dir)
|
||||
|
||||
|
||||
try:
|
||||
os.remove(os.path.join(path, _dir, _fn))
|
||||
except Exception as e:
|
||||
|
@ -64,25 +69,27 @@ def delete_filelist(path: str, filenames: List[str],
|
|||
if not silent:
|
||||
logger.error(f'Failed removing directory "{_dir}" with {e!r}')
|
||||
no_error = False
|
||||
|
||||
|
||||
if delete_root_directory:
|
||||
try:
|
||||
os.rmdir(path)
|
||||
except Exception as e:
|
||||
if not silent:
|
||||
logger.error(f'Removing game directory failed with {e!r}')
|
||||
|
||||
|
||||
return no_error
|
||||
|
||||
|
||||
def validate_files(base_path: str, filelist: List[tuple], hash_type='sha1') -> Iterator[tuple]:
|
||||
def validate_files(base_path: str, filelist: List[tuple], hash_type='sha1',
|
||||
large_file_threshold=1024 * 1024 * 512) -> Iterator[tuple]:
|
||||
"""
|
||||
Validates the files in filelist in path against the provided hashes
|
||||
|
||||
:param base_path: path in which the files are located
|
||||
:param filelist: list of tuples in format (path, hash [hex])
|
||||
:param hash_type: (optional) type of hash, default is sha1
|
||||
:return: list of files that failed hash check
|
||||
:param large_file_threshold: (optional) threshold for large files, default is 512 MiB
|
||||
:return: yields tuples in format (VerifyResult, path, hash [hex], bytes read)
|
||||
"""
|
||||
|
||||
if not filelist:
|
||||
|
@ -96,23 +103,51 @@ def validate_files(base_path: str, filelist: List[tuple], hash_type='sha1') -> I
|
|||
# logger.debug(f'Checking "{file_path}"...')
|
||||
|
||||
if not os.path.exists(full_path):
|
||||
yield VerifyResult.FILE_MISSING, file_path, ''
|
||||
yield VerifyResult.FILE_MISSING, file_path, '', 0
|
||||
continue
|
||||
|
||||
show_progress = False
|
||||
interval = 0
|
||||
speed = 0.0
|
||||
start_time = 0.0
|
||||
|
||||
try:
|
||||
_size = os.path.getsize(full_path)
|
||||
if _size > large_file_threshold:
|
||||
# enable progress indicator and go to new line
|
||||
stdout.write('\n')
|
||||
show_progress = True
|
||||
interval = (_size / (1024 * 1024)) // 100
|
||||
start_time = perf_counter()
|
||||
|
||||
with open(full_path, 'rb') as f:
|
||||
real_file_hash = hashlib.new(hash_type)
|
||||
i = 0
|
||||
while chunk := f.read(1024*1024):
|
||||
real_file_hash.update(chunk)
|
||||
if show_progress and i % interval == 0:
|
||||
pos = f.tell()
|
||||
perc = (pos / _size) * 100
|
||||
speed = pos / 1024 / 1024 / (perf_counter() - start_time)
|
||||
stdout.write(f'\r=> Verifying large file "{file_path}": {perc:.0f}% '
|
||||
f'({pos / 1024 / 1024:.1f}/{_size / 1024 / 1024:.1f} MiB) '
|
||||
f'[{speed:.1f} MiB/s]\t')
|
||||
stdout.flush()
|
||||
i += 1
|
||||
|
||||
if show_progress:
|
||||
stdout.write(f'\r=> Verifying large file "{file_path}": 100% '
|
||||
f'({_size / 1024 / 1024:.1f}/{_size / 1024 / 1024:.1f} MiB) '
|
||||
f'[{speed:.1f} MiB/s]\t\n')
|
||||
|
||||
result_hash = real_file_hash.hexdigest()
|
||||
if file_hash != result_hash:
|
||||
yield VerifyResult.HASH_MISMATCH, file_path, result_hash
|
||||
yield VerifyResult.HASH_MISMATCH, file_path, result_hash, f.tell()
|
||||
else:
|
||||
yield VerifyResult.HASH_MATCH, file_path, result_hash
|
||||
yield VerifyResult.HASH_MATCH, file_path, result_hash, f.tell()
|
||||
except Exception as e:
|
||||
logger.fatal(f'Could not verify "{file_path}"; opening failed with: {e!r}')
|
||||
yield VerifyResult.OTHER_ERROR, file_path, ''
|
||||
yield VerifyResult.OTHER_ERROR, file_path, '', 0
|
||||
|
||||
|
||||
def clean_filename(filename):
|
||||
|
@ -121,3 +156,45 @@ def clean_filename(filename):
|
|||
|
||||
def get_dir_size(path):
|
||||
return sum(f.stat().st_size for f in Path(path).glob('**/*') if f.is_file())
|
||||
|
||||
|
||||
class LockedJSONData(FileLock):
|
||||
def __init__(self, file_path: str):
|
||||
super().__init__(file_path + '.lock')
|
||||
|
||||
self._file_path = file_path
|
||||
self._data = None
|
||||
self._initial_data = None
|
||||
|
||||
def __enter__(self):
|
||||
super().__enter__()
|
||||
|
||||
if os.path.exists(self._file_path):
|
||||
with open(self._file_path, 'r', encoding='utf-8') as f:
|
||||
self._data = json.load(f)
|
||||
self._initial_data = self._data
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
super().__exit__(exc_type, exc_val, exc_tb)
|
||||
|
||||
if self._data != self._initial_data:
|
||||
if self._data is not None:
|
||||
with open(self._file_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(self._data, f, indent=2, sort_keys=True)
|
||||
else:
|
||||
if os.path.exists(self._file_path):
|
||||
os.remove(self._file_path)
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self._data
|
||||
|
||||
@data.setter
|
||||
def data(self, new_data):
|
||||
if new_data is None:
|
||||
raise ValueError('Invalid new data, use clear() explicitly to reset file data')
|
||||
self._data = new_data
|
||||
|
||||
def clear(self):
|
||||
self._data = None
|
96
legendary/lfs/windows_helpers.py
Normal file
96
legendary/lfs/windows_helpers.py
Normal file
|
@ -0,0 +1,96 @@
|
|||
import logging
|
||||
import winreg
|
||||
import ctypes
|
||||
|
||||
_logger = logging.getLogger('WindowsHelpers')
|
||||
|
||||
HKEY_CURRENT_USER = winreg.HKEY_CURRENT_USER
|
||||
HKEY_LOCAL_MACHINE = winreg.HKEY_LOCAL_MACHINE
|
||||
TYPE_STRING = winreg.REG_SZ
|
||||
TYPE_DWORD = winreg.REG_DWORD
|
||||
|
||||
|
||||
def query_registry_value(hive, key, value):
|
||||
ret = None
|
||||
try:
|
||||
k = winreg.OpenKey(hive, key, reserved=0, access=winreg.KEY_READ)
|
||||
except FileNotFoundError:
|
||||
_logger.debug(f'Registry key "{key}" not found')
|
||||
else:
|
||||
try:
|
||||
ret, _ = winreg.QueryValueEx(k, value)
|
||||
except FileNotFoundError:
|
||||
_logger.debug(f'Registry value "{key}":"{value}" not found')
|
||||
winreg.CloseKey(k)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def list_registry_values(hive, key, use_32bit_view=False):
|
||||
ret = []
|
||||
|
||||
access = winreg.KEY_READ
|
||||
if use_32bit_view:
|
||||
access |= winreg.KEY_WOW64_32KEY
|
||||
|
||||
try:
|
||||
k = winreg.OpenKey(hive, key, reserved=0, access=access)
|
||||
except FileNotFoundError:
|
||||
_logger.debug(f'Registry key "{key}" not found')
|
||||
else:
|
||||
idx = 0
|
||||
while True:
|
||||
try:
|
||||
ret.append(winreg.EnumValue(k, idx))
|
||||
except OSError:
|
||||
break
|
||||
idx += 1
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def remove_registry_value(hive, key, value, use_32bit_view=False):
|
||||
access = winreg.KEY_ALL_ACCESS
|
||||
if use_32bit_view:
|
||||
access |= winreg.KEY_WOW64_32KEY
|
||||
|
||||
try:
|
||||
k = winreg.OpenKey(hive, key, reserved=0, access=access)
|
||||
except FileNotFoundError:
|
||||
_logger.debug(f'Registry key "{key}" not found')
|
||||
else:
|
||||
try:
|
||||
winreg.DeleteValue(k, value)
|
||||
except Exception as e:
|
||||
_logger.debug(f'Deleting "{key}":"{value}" failed with {repr(e)}')
|
||||
winreg.CloseKey(k)
|
||||
|
||||
|
||||
def set_registry_value(hive, key, value, data, reg_type=winreg.REG_SZ, use_32bit_view=False):
|
||||
access = winreg.KEY_ALL_ACCESS
|
||||
if use_32bit_view:
|
||||
access |= winreg.KEY_WOW64_32KEY
|
||||
|
||||
try:
|
||||
k = winreg.CreateKeyEx(hive, key, reserved=0, access=access)
|
||||
except Exception as e:
|
||||
_logger.debug(f'Failed creating/opening registry key "{key}" with {repr(e)}')
|
||||
else:
|
||||
try:
|
||||
winreg.SetValueEx(k, value, 0, reg_type, data)
|
||||
except Exception as e:
|
||||
_logger.debug(f'Setting "{key}":"{value}" to "{data}" failed with {repr(e)}')
|
||||
winreg.CloseKey(k)
|
||||
|
||||
|
||||
def double_clicked() -> bool:
|
||||
# Thanks https://stackoverflow.com/a/55476145
|
||||
|
||||
# Load kernel32.dll
|
||||
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
|
||||
# Create an array to store the processes in. This doesn't actually need to
|
||||
# be large enough to store the whole process list since GetConsoleProcessList()
|
||||
# just returns the number of processes if the array is too small.
|
||||
process_array = (ctypes.c_uint * 1)()
|
||||
num_processes = kernel32.GetConsoleProcessList(process_array, 1)
|
||||
return num_processes < 3
|
93
legendary/lfs/wine_helpers.py
Normal file
93
legendary/lfs/wine_helpers.py
Normal file
|
@ -0,0 +1,93 @@
|
|||
import configparser
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger('WineHelpers')
|
||||
|
||||
|
||||
def read_registry(wine_pfx):
|
||||
reg = configparser.ConfigParser(comment_prefixes=(';', '#', '/', 'WINE'), allow_no_value=True, strict=False)
|
||||
reg.optionxform = str
|
||||
reg.read(os.path.join(wine_pfx, 'user.reg'))
|
||||
return reg
|
||||
|
||||
|
||||
def get_shell_folders(registry, wine_pfx):
|
||||
folders = dict()
|
||||
for k, v in registry['Software\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Explorer\\\\Shell Folders'].items():
|
||||
path_cleaned = v.strip('"').strip().replace('\\\\', '/').replace('C:/', '')
|
||||
folders[k.strip('"').strip()] = os.path.join(wine_pfx, 'drive_c', path_cleaned)
|
||||
return folders
|
||||
|
||||
|
||||
def case_insensitive_file_search(path: str) -> str:
|
||||
"""
|
||||
Similar to case_insensitive_path_search: Finds a file case-insensitively
|
||||
Note that this *does* work on Windows, although it's rather pointless
|
||||
"""
|
||||
path_parts = os.path.normpath(path).split(os.sep)
|
||||
# If path_parts[0] is empty, we're on Unix and thus start searching at /
|
||||
if not path_parts[0]:
|
||||
path_parts[0] = '/'
|
||||
|
||||
computed_path = path_parts[0]
|
||||
for part in path_parts[1:]:
|
||||
# If the computed directory does not exist, add all remaining parts as-is to at least return a valid path
|
||||
# at the end
|
||||
if not os.path.exists(computed_path):
|
||||
computed_path = os.path.join(computed_path, part)
|
||||
continue
|
||||
|
||||
# First try to find an exact match
|
||||
actual_file_or_dirname = part if os.path.exists(os.path.join(computed_path, part)) else None
|
||||
|
||||
# If there is no case-sensitive match, find a case-insensitive one
|
||||
if not actual_file_or_dirname:
|
||||
actual_file_or_dirname = next((
|
||||
x for x in os.listdir(computed_path)
|
||||
if x.lower() == part.lower()
|
||||
), part)
|
||||
computed_path = os.path.join(computed_path, actual_file_or_dirname)
|
||||
return computed_path
|
||||
|
||||
|
||||
def case_insensitive_path_search(path):
|
||||
"""
|
||||
Attempts to find a path case-insensitively
|
||||
"""
|
||||
# Legendary's save path resolver always returns absolute paths, so this is not as horrible as it looks
|
||||
path_parts = path.replace('\\', '/').split('/')
|
||||
path_parts[0] = '/'
|
||||
# filter out empty parts
|
||||
path_parts = [i for i in path_parts if i]
|
||||
|
||||
# attempt to find lowest level directory that exists case-sensitively
|
||||
longest_path = ''
|
||||
remaining_parts = []
|
||||
for i in range(len(path_parts), 0, -1):
|
||||
if os.path.exists(os.path.join(*path_parts[:i])):
|
||||
longest_path = path_parts[:i]
|
||||
remaining_parts = path_parts[i:]
|
||||
break
|
||||
logger.debug(f'Longest valid path: {longest_path}')
|
||||
logger.debug(f'Remaining parts: {remaining_parts}')
|
||||
|
||||
# Iterate over remaining parts, find matching directories case-insensitively
|
||||
still_remaining = []
|
||||
for idx, part in enumerate(remaining_parts):
|
||||
for item in os.listdir(os.path.join(*longest_path)):
|
||||
if not os.path.isdir(os.path.join(*longest_path, item)):
|
||||
continue
|
||||
if item.lower() == part.lower():
|
||||
longest_path.append(item)
|
||||
break
|
||||
else:
|
||||
# once we stop finding parts break
|
||||
still_remaining = remaining_parts[idx:]
|
||||
break
|
||||
|
||||
logger.debug(f'New longest path: {longest_path}')
|
||||
logger.debug(f'Still unresolved: {still_remaining}')
|
||||
final_path = os.path.join(*longest_path, *still_remaining)
|
||||
logger.debug(f'Final path: {final_path}')
|
||||
return os.path.realpath(final_path)
|
|
@ -10,7 +10,6 @@ from uuid import uuid4
|
|||
from legendary.utils.rolling_hash import get_hash
|
||||
|
||||
|
||||
# ToDo do some reworking to make this more memory efficient
|
||||
class Chunk:
|
||||
header_magic = 0xB1FE3AA2
|
||||
|
||||
|
@ -114,10 +113,7 @@ class Chunk:
|
|||
return _chunk
|
||||
|
||||
def write(self, fp=None, compress=True):
|
||||
if not fp:
|
||||
bio = BytesIO()
|
||||
else:
|
||||
bio = fp
|
||||
bio = fp or BytesIO()
|
||||
|
||||
self.uncompressed_size = self.compressed_size = len(self.data)
|
||||
if compress or self.compressed:
|
||||
|
@ -144,7 +140,4 @@ class Chunk:
|
|||
# finally, add the data
|
||||
bio.write(self._data)
|
||||
|
||||
if not fp:
|
||||
return bio.getvalue()
|
||||
else:
|
||||
return bio.tell()
|
||||
return bio.tell() if fp else bio.getvalue()
|
||||
|
|
|
@ -9,6 +9,7 @@ class LGDConf(configparser.ConfigParser):
|
|||
self.read_only = False
|
||||
self.modtime = None
|
||||
super().__init__(*args, **kwargs)
|
||||
self.optionxform = str
|
||||
|
||||
def read(self, filename):
|
||||
# if config file exists, save modification time
|
||||
|
@ -22,12 +23,16 @@ class LGDConf(configparser.ConfigParser):
|
|||
super().write(*args, **kwargs)
|
||||
self.modtime = int(time.time())
|
||||
|
||||
def set(self, *args, **kwargs):
|
||||
def set(self, section, option, value=None):
|
||||
if self.read_only:
|
||||
return
|
||||
|
||||
# ensure config section exists
|
||||
if not self.has_section(section):
|
||||
self.add_section(section)
|
||||
|
||||
self.modified = True
|
||||
super().set(*args, **kwargs)
|
||||
super().set(section, option, value)
|
||||
|
||||
def remove_option(self, section, option):
|
||||
if self.read_only:
|
|
@ -62,6 +62,7 @@ class TaskFlags(Flag):
|
|||
CREATE_EMPTY_FILE = auto()
|
||||
RENAME_FILE = auto()
|
||||
RELEASE_MEMORY = auto()
|
||||
MAKE_EXECUTABLE = auto()
|
||||
SILENT = auto()
|
||||
|
||||
|
||||
|
@ -126,6 +127,7 @@ class AnalysisResult:
|
|||
dl_size: int = 0
|
||||
uncompressed_dl_size: int = 0
|
||||
install_size: int = 0
|
||||
disk_space_delta: int = 0
|
||||
reuse_size: int = 0
|
||||
biggest_file_size: int = 0
|
||||
unchanged_size: int = 0
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from copy import deepcopy
|
||||
from distutils.util import strtobool
|
||||
|
||||
from legendary.models.game import InstalledGame, Game
|
||||
from legendary.utils.cli import strtobool
|
||||
|
||||
|
||||
_template = {
|
||||
|
@ -134,8 +134,8 @@ class EGLManifest:
|
|||
tmp.app_version_string = igame.version
|
||||
tmp.base_urls = igame.base_urls
|
||||
tmp.build_label = 'Live'
|
||||
tmp.catalog_item_id = game.asset_info.catalog_item_id
|
||||
tmp.namespace = game.asset_info.namespace
|
||||
tmp.catalog_item_id = game.catalog_item_id
|
||||
tmp.namespace = game.namespace
|
||||
tmp.display_name = igame.title
|
||||
tmp.install_location = igame.install_path
|
||||
tmp.install_size = igame.install_size
|
||||
|
@ -145,9 +145,9 @@ class EGLManifest:
|
|||
tmp.executable = igame.executable
|
||||
tmp.main_game_appname = game.app_name # todo for DLC support this needs to be the base game
|
||||
tmp.app_folder_name = game.metadata.get('customAttributes', {}).get('FolderName', {}).get('value', '')
|
||||
tmp.manifest_location = igame.install_path + '/.egstore'
|
||||
tmp.manifest_location = f'{igame.install_path}/.egstore'
|
||||
tmp.ownership_token = igame.requires_ot
|
||||
tmp.staging_location = igame.install_path + '/.egstore/bps'
|
||||
tmp.staging_location = f'{igame.install_path}/.egstore/bps'
|
||||
tmp.can_run_offline = igame.can_run_offline
|
||||
tmp.is_incomplete_install = False
|
||||
tmp.needs_validation = igame.needs_verification
|
||||
|
|
|
@ -1,12 +1,4 @@
|
|||
# coding: utf-8
|
||||
|
||||
# ToDo more custom exceptions where it makes sense
|
||||
|
||||
|
||||
class CaptchaError(Exception):
|
||||
"""Raised by core if direct login fails"""
|
||||
pass
|
||||
|
||||
|
||||
class InvalidCredentialsError(Exception):
|
||||
pass
|
||||
|
|
|
@ -1,17 +1,23 @@
|
|||
# coding: utf-8
|
||||
|
||||
from datetime import datetime
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Optional, List, Dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class GameAsset:
|
||||
def __init__(self):
|
||||
self.app_name = ''
|
||||
self.asset_id = ''
|
||||
self.build_version = ''
|
||||
self.catalog_item_id = ''
|
||||
self.label_name = ''
|
||||
self.namespace = ''
|
||||
self.metadata = dict()
|
||||
"""
|
||||
App asset data
|
||||
"""
|
||||
app_name: str = ''
|
||||
asset_id: str = ''
|
||||
build_version: str = ''
|
||||
catalog_item_id: str = ''
|
||||
label_name: str = ''
|
||||
namespace: str = ''
|
||||
metadata: Dict = field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def from_egs_json(cls, json):
|
||||
|
@ -38,98 +44,164 @@ class GameAsset:
|
|||
return tmp
|
||||
|
||||
|
||||
@dataclass
|
||||
class Game:
|
||||
def __init__(self, app_name='', app_title='', asset_info=None, app_version='', metadata=None):
|
||||
self.metadata = dict() if metadata is None else metadata # store metadata from EGS
|
||||
self.asset_info = asset_info if asset_info else GameAsset() # asset info from EGS
|
||||
"""
|
||||
Combination of app asset and app metadata as stored on disk
|
||||
"""
|
||||
app_name: str
|
||||
app_title: str
|
||||
|
||||
self.app_version = app_version
|
||||
self.app_name = app_name
|
||||
self.app_title = app_title
|
||||
self.base_urls = [] # base urls for download, only really used when cached manifest is current
|
||||
asset_infos: Dict[str, GameAsset] = field(default_factory=dict)
|
||||
base_urls: List[str] = field(default_factory=list)
|
||||
metadata: Dict = field(default_factory=dict)
|
||||
|
||||
def app_version(self, platform='Windows'):
|
||||
if platform not in self.asset_infos:
|
||||
return None
|
||||
return self.asset_infos[platform].build_version
|
||||
|
||||
@property
|
||||
def is_dlc(self):
|
||||
return self.metadata and 'mainGameItem' in self.metadata
|
||||
|
||||
@property
|
||||
def third_party_store(self):
|
||||
if not self.metadata:
|
||||
return None
|
||||
return self.metadata.get('customAttributes', {}).get('ThirdPartyManagedApp', {}).get('value', None)
|
||||
|
||||
@property
|
||||
def partner_link_type(self):
|
||||
if not self.metadata:
|
||||
return None
|
||||
return self.metadata.get('customAttributes', {}).get('partnerLinkType', {}).get('value', None)
|
||||
|
||||
@property
|
||||
def partner_link_id(self):
|
||||
if not self.metadata:
|
||||
return None
|
||||
return self.metadata.get('customAttributes', {}).get('partnerLinkId', {}).get('value', None)
|
||||
|
||||
@property
|
||||
def supports_cloud_saves(self):
|
||||
return self.metadata and (self.metadata.get('customAttributes', {}).get('CloudSaveFolder') is not None)
|
||||
|
||||
@property
|
||||
def supports_mac_cloud_saves(self):
|
||||
return self.metadata and (self.metadata.get('customAttributes', {}).get('CloudSaveFolder_MAC') is not None)
|
||||
|
||||
@property
|
||||
def additional_command_line(self):
|
||||
if not self.metadata:
|
||||
return None
|
||||
return self.metadata.get('customAttributes', {}).get('AdditionalCommandLine', {}).get('value', None)
|
||||
|
||||
@property
|
||||
def is_launchable_addon(self):
|
||||
if not self.metadata:
|
||||
return False
|
||||
return any(m['path'] == 'addons/launchable' for m in self.metadata.get('categories', []))
|
||||
|
||||
@property
|
||||
def catalog_item_id(self):
|
||||
if not self.metadata:
|
||||
return None
|
||||
return self.metadata['id']
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
if not self.metadata:
|
||||
return None
|
||||
return self.metadata['namespace']
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json):
|
||||
tmp = cls()
|
||||
tmp = cls(
|
||||
app_name=json.get('app_name', ''),
|
||||
app_title=json.get('app_title', ''),
|
||||
)
|
||||
tmp.metadata = json.get('metadata', dict())
|
||||
tmp.asset_info = GameAsset.from_json(json.get('asset_info', dict()))
|
||||
tmp.app_name = json.get('app_name', 'undefined')
|
||||
tmp.app_title = json.get('app_title', 'undefined')
|
||||
tmp.app_version = json.get('app_version', 'undefined')
|
||||
if 'asset_infos' in json:
|
||||
tmp.asset_infos = {k: GameAsset.from_json(v) for k, v in json['asset_infos'].items()}
|
||||
else:
|
||||
# Migrate old asset_info to new asset_infos
|
||||
tmp.asset_infos['Windows'] = GameAsset.from_json(json.get('asset_info', dict()))
|
||||
|
||||
tmp.base_urls = json.get('base_urls', list())
|
||||
return tmp
|
||||
|
||||
@property
|
||||
def __dict__(self):
|
||||
"""This is just here so asset_info gets turned into a dict as well"""
|
||||
return dict(metadata=self.metadata, asset_info=self.asset_info.__dict__,
|
||||
app_name=self.app_name, app_title=self.app_title,
|
||||
app_version=self.app_version, base_urls=self.base_urls)
|
||||
"""This is just here so asset_infos gets turned into a dict as well"""
|
||||
assets_dictified = {k: v.__dict__ for k, v in self.asset_infos.items()}
|
||||
return dict(metadata=self.metadata, asset_infos=assets_dictified, app_name=self.app_name,
|
||||
app_title=self.app_title, base_urls=self.base_urls)
|
||||
|
||||
|
||||
@dataclass
|
||||
class InstalledGame:
|
||||
def __init__(self, app_name='', title='', version='', manifest_path='', base_urls=None,
|
||||
install_path='', executable='', launch_parameters='', prereq_info=None,
|
||||
can_run_offline=False, requires_ot=False, is_dlc=False, save_path=None,
|
||||
needs_verification=False, install_size=0, egl_guid='', install_tags=None):
|
||||
self.app_name = app_name
|
||||
self.title = title
|
||||
self.version = version
|
||||
"""
|
||||
Local metadata for an installed app
|
||||
"""
|
||||
app_name: str
|
||||
install_path: str
|
||||
title: str
|
||||
version: str
|
||||
|
||||
self.manifest_path = manifest_path
|
||||
self.base_urls = list() if not base_urls else base_urls
|
||||
self.install_path = install_path
|
||||
self.executable = executable
|
||||
self.launch_parameters = launch_parameters
|
||||
self.prereq_info = prereq_info
|
||||
self.can_run_offline = can_run_offline
|
||||
self.requires_ot = requires_ot
|
||||
self.is_dlc = is_dlc
|
||||
self.save_path = save_path
|
||||
self.needs_verification = needs_verification
|
||||
self.install_size = install_size
|
||||
self.egl_guid = egl_guid
|
||||
self.install_tags = install_tags if install_tags else []
|
||||
base_urls: List[str] = field(default_factory=list)
|
||||
can_run_offline: bool = False
|
||||
egl_guid: str = ''
|
||||
executable: str = ''
|
||||
install_size: int = 0
|
||||
install_tags: List[str] = field(default_factory=list)
|
||||
is_dlc: bool = False
|
||||
launch_parameters: str = ''
|
||||
manifest_path: str = ''
|
||||
needs_verification: bool = False
|
||||
platform: str = 'Windows'
|
||||
prereq_info: Optional[Dict] = None
|
||||
uninstaller: Optional[Dict] = None
|
||||
requires_ot: bool = False
|
||||
save_path: Optional[str] = None
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json):
|
||||
tmp = cls()
|
||||
tmp.app_name = json.get('app_name', '')
|
||||
tmp.version = json.get('version', '')
|
||||
tmp.title = json.get('title', '')
|
||||
tmp = cls(
|
||||
app_name=json.get('app_name', ''),
|
||||
install_path=json.get('install_path', ''),
|
||||
title=json.get('title', ''),
|
||||
version=json.get('version', ''),
|
||||
)
|
||||
|
||||
tmp.manifest_path = json.get('manifest_path', '')
|
||||
tmp.base_urls = json.get('base_urls', list())
|
||||
tmp.install_path = json.get('install_path', '')
|
||||
tmp.executable = json.get('executable', '')
|
||||
tmp.launch_parameters = json.get('launch_parameters', '')
|
||||
tmp.prereq_info = json.get('prereq_info', None)
|
||||
tmp.uninstaller = json.get('uninstaller', None)
|
||||
|
||||
tmp.can_run_offline = json.get('can_run_offline', False)
|
||||
tmp.requires_ot = json.get('requires_ot', False)
|
||||
tmp.is_dlc = json.get('is_dlc', False)
|
||||
tmp.save_path = json.get('save_path', None)
|
||||
tmp.manifest_path = json.get('manifest_path', '')
|
||||
tmp.needs_verification = json.get('needs_verification', False) is True
|
||||
tmp.platform = json.get('platform', 'Windows')
|
||||
tmp.install_size = json.get('install_size', 0)
|
||||
tmp.egl_guid = json.get('egl_guid', '')
|
||||
tmp.install_tags = json.get('install_tags', [])
|
||||
return tmp
|
||||
|
||||
|
||||
@dataclass
|
||||
class SaveGameFile:
|
||||
def __init__(self, app_name='', filename='', manifest='', datetime=None):
|
||||
self.app_name = app_name
|
||||
self.filename = filename
|
||||
self.manifest_name = manifest
|
||||
self.datetime = datetime
|
||||
"""
|
||||
Metadata for a cloud save manifest
|
||||
"""
|
||||
app_name: str
|
||||
filename: str
|
||||
manifest_name: str
|
||||
datetime: Optional[datetime] = None
|
||||
|
||||
|
||||
class SaveGameStatus(Enum):
|
||||
|
@ -144,3 +216,25 @@ class VerifyResult(Enum):
|
|||
HASH_MISMATCH = 1
|
||||
FILE_MISSING = 2
|
||||
OTHER_ERROR = 3
|
||||
|
||||
|
||||
@dataclass
|
||||
class LaunchParameters:
|
||||
"""
|
||||
Parameters for launching a game
|
||||
"""
|
||||
# game-supplied parameters
|
||||
game_parameters: list = field(default_factory=list)
|
||||
game_executable: str = ''
|
||||
game_directory: str = ''
|
||||
# EGL parameters (auth, ovt, etc.)
|
||||
egl_parameters: list = field(default_factory=list)
|
||||
# command line before executable (WINE, gamemode, etc.)
|
||||
launch_command: list = field(default_factory=list)
|
||||
# working directory for launched process
|
||||
working_directory: str = ''
|
||||
# user and environment supplied options
|
||||
user_parameters: list = field(default_factory=list)
|
||||
environment: dict = field(default_factory=dict)
|
||||
pre_launch_command: str = ''
|
||||
pre_launch_wait: bool = False
|
||||
|
|
61
legendary/models/gql.py
Normal file
61
legendary/models/gql.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
# GQL queries needed for the EGS API
|
||||
|
||||
uplay_codes_query = '''
|
||||
query partnerIntegrationQuery($accountId: String!) {
|
||||
PartnerIntegration {
|
||||
accountUplayCodes(accountId: $accountId) {
|
||||
epicAccountId
|
||||
gameId
|
||||
uplayAccountId
|
||||
regionCode
|
||||
redeemedOnUplay
|
||||
redemptionTimestamp
|
||||
}
|
||||
}
|
||||
}
|
||||
'''
|
||||
|
||||
uplay_redeem_query = '''
|
||||
mutation redeemAllPendingCodes($accountId: String!, $uplayAccountId: String!) {
|
||||
PartnerIntegration {
|
||||
redeemAllPendingCodes(accountId: $accountId, uplayAccountId: $uplayAccountId) {
|
||||
data {
|
||||
epicAccountId
|
||||
uplayAccountId
|
||||
redeemedOnUplay
|
||||
redemptionTimestamp
|
||||
}
|
||||
success
|
||||
}
|
||||
}
|
||||
}
|
||||
'''
|
||||
|
||||
uplay_claim_query = '''
|
||||
mutation claimUplayCode($accountId: String!, $uplayAccountId: String!, $gameId: String!) {
|
||||
PartnerIntegration {
|
||||
claimUplayCode(
|
||||
accountId: $accountId
|
||||
uplayAccountId: $uplayAccountId
|
||||
gameId: $gameId
|
||||
) {
|
||||
data {
|
||||
assignmentTimestam
|
||||
epicAccountId
|
||||
epicEntitlement {
|
||||
entitlementId
|
||||
catalogItemId
|
||||
entitlementName
|
||||
country
|
||||
}
|
||||
gameId
|
||||
redeemedOnUplay
|
||||
redemptionTimestamp
|
||||
regionCode
|
||||
uplayAccountId
|
||||
}
|
||||
success
|
||||
}
|
||||
}
|
||||
}
|
||||
'''
|
|
@ -1,5 +1,7 @@
|
|||
# coding: utf-8
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import struct
|
||||
|
@ -7,6 +9,7 @@ import zlib
|
|||
|
||||
from base64 import b64encode
|
||||
from io import BytesIO
|
||||
from typing import Optional
|
||||
|
||||
logger = logging.getLogger('Manifest')
|
||||
|
||||
|
@ -16,8 +19,8 @@ def read_fstring(bio):
|
|||
|
||||
# if the length is negative the string is UTF-16 encoded, this was a pain to figure out.
|
||||
if length < 0:
|
||||
# utf-16 chars are 2 bytes wide but the length is # of characters, not bytes
|
||||
# todo actually make sure utf-16 characters can't be longer than 2 bytes
|
||||
# utf-16 chars are (generally) 2 bytes wide, but the length is # of characters, not bytes.
|
||||
# 4-byte wide chars exist, but best I can tell Epic's (de)serializer doesn't support those.
|
||||
length *= -2
|
||||
s = bio.read(length - 2).decode('utf-16')
|
||||
bio.seek(2, 1) # utf-16 strings have two byte null terminators
|
||||
|
@ -61,6 +64,7 @@ def get_chunk_dir(version):
|
|||
|
||||
class Manifest:
|
||||
header_magic = 0x44BEC00C
|
||||
default_serialisation_version = 17
|
||||
|
||||
def __init__(self):
|
||||
self.header_size = 41
|
||||
|
@ -72,10 +76,10 @@ class Manifest:
|
|||
self.data = b''
|
||||
|
||||
# remainder
|
||||
self.meta = None
|
||||
self.chunk_data_list = None
|
||||
self.file_manifest_list = None
|
||||
self.custom_fields = None
|
||||
self.meta: Optional[ManifestMeta] = None
|
||||
self.chunk_data_list: Optional[CDL] = None
|
||||
self.file_manifest_list: Optional[FML] = None
|
||||
self.custom_fields: Optional[CustomFields] = None
|
||||
|
||||
@property
|
||||
def compressed(self):
|
||||
|
@ -91,8 +95,7 @@ class Manifest:
|
|||
_m.file_manifest_list = FML.read(_tmp)
|
||||
_m.custom_fields = CustomFields.read(_tmp)
|
||||
|
||||
unhandled_data = _tmp.read()
|
||||
if unhandled_data:
|
||||
if unhandled_data := _tmp.read():
|
||||
logger.warning(f'Did not read {len(unhandled_data)} remaining bytes in manifest! '
|
||||
f'This may not be a problem.')
|
||||
|
||||
|
@ -118,10 +121,10 @@ class Manifest:
|
|||
_manifest.version = struct.unpack('<I', bio.read(4))[0]
|
||||
|
||||
if bio.tell() != _manifest.header_size:
|
||||
logger.fatal(f'Did not read entire header {bio.tell()} != {_manifest.header_size}! '
|
||||
f'Header version: {_manifest.version}, please report this on '
|
||||
f'GitHub along with a sample of the problematic manifest!')
|
||||
raise ValueError('Did not read complete manifest header!')
|
||||
logger.warning(f'Did not read entire header {bio.tell()} != {_manifest.header_size}! '
|
||||
f'Header version: {_manifest.version}, please report this on '
|
||||
f'GitHub along with a sample of the problematic manifest!')
|
||||
bio.seek(_manifest.header_size)
|
||||
|
||||
data = bio.read()
|
||||
if _manifest.compressed:
|
||||
|
@ -137,6 +140,26 @@ class Manifest:
|
|||
def write(self, fp=None, compress=True):
|
||||
body_bio = BytesIO()
|
||||
|
||||
# set serialisation version based on enabled features or original version
|
||||
target_version = max(self.default_serialisation_version, self.meta.feature_level)
|
||||
if self.meta.data_version == 2:
|
||||
target_version = max(21, target_version)
|
||||
elif self.file_manifest_list.version == 2:
|
||||
target_version = max(20, target_version)
|
||||
elif self.file_manifest_list.version == 1:
|
||||
target_version = max(19, target_version)
|
||||
elif self.meta.data_version == 1:
|
||||
target_version = max(18, target_version)
|
||||
|
||||
# Downgrade manifest if unknown newer version
|
||||
if target_version > 21:
|
||||
logger.warning(f'Trying to serialise an unknown target version: {target_version},'
|
||||
f'clamping to 21.')
|
||||
target_version = 21
|
||||
|
||||
# Ensure metadata will be correct
|
||||
self.meta.feature_level = target_version
|
||||
|
||||
self.meta.write(body_bio)
|
||||
self.chunk_data_list.write(body_bio)
|
||||
self.file_manifest_list.write(body_bio)
|
||||
|
@ -151,10 +174,7 @@ class Manifest:
|
|||
self.data = zlib.compress(self.data)
|
||||
self.size_compressed = len(self.data)
|
||||
|
||||
if not fp:
|
||||
bio = BytesIO()
|
||||
else:
|
||||
bio = fp
|
||||
bio = fp or BytesIO()
|
||||
|
||||
bio.write(struct.pack('<I', self.header_magic))
|
||||
bio.write(struct.pack('<I', self.header_size))
|
||||
|
@ -162,13 +182,47 @@ class Manifest:
|
|||
bio.write(struct.pack('<I', self.size_compressed))
|
||||
bio.write(self.sha_hash)
|
||||
bio.write(struct.pack('B', self.stored_as))
|
||||
bio.write(struct.pack('<I', self.version))
|
||||
bio.write(struct.pack('<I', target_version))
|
||||
bio.write(self.data)
|
||||
|
||||
if not fp:
|
||||
return bio.getvalue()
|
||||
else:
|
||||
return bio.tell()
|
||||
return bio.tell() if fp else bio.getvalue()
|
||||
|
||||
def apply_delta_manifest(self, delta_manifest: Manifest):
|
||||
added = set()
|
||||
# overwrite file elements with the ones from the delta manifest
|
||||
for idx, file_elem in enumerate(self.file_manifest_list.elements):
|
||||
try:
|
||||
delta_file = delta_manifest.file_manifest_list.get_file_by_path(file_elem.filename)
|
||||
self.file_manifest_list.elements[idx] = delta_file
|
||||
added.add(delta_file.filename)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# add other files that may be missing
|
||||
for delta_file in delta_manifest.file_manifest_list.elements:
|
||||
if delta_file.filename not in added:
|
||||
self.file_manifest_list.elements.append(delta_file)
|
||||
# update count and clear map
|
||||
self.file_manifest_list.count = len(self.file_manifest_list.elements)
|
||||
self.file_manifest_list._path_map = None
|
||||
|
||||
# ensure guid map exists (0 will most likely yield no result, so ignore ValueError)
|
||||
try:
|
||||
self.chunk_data_list.get_chunk_by_guid(0)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# add new chunks from delta manifest to main manifest and again clear maps and update count
|
||||
existing_chunk_guids = self.chunk_data_list._guid_int_map.keys()
|
||||
|
||||
for chunk in delta_manifest.chunk_data_list.elements:
|
||||
if chunk.guid_num not in existing_chunk_guids:
|
||||
self.chunk_data_list.elements.append(chunk)
|
||||
|
||||
self.chunk_data_list.count = len(self.chunk_data_list.elements)
|
||||
self.chunk_data_list._guid_map = None
|
||||
self.chunk_data_list._guid_int_map = None
|
||||
self.chunk_data_list._path_map = None
|
||||
|
||||
|
||||
class ManifestMeta:
|
||||
|
@ -186,6 +240,8 @@ class ManifestMeta:
|
|||
self.prereq_name = ''
|
||||
self.prereq_path = ''
|
||||
self.prereq_args = ''
|
||||
self.uninstall_action_path = ''
|
||||
self.uninstall_action_args = ''
|
||||
# this build id is used for something called "delta file" which I guess I'll have to implement eventually
|
||||
self._build_id = ''
|
||||
|
||||
|
@ -223,19 +279,27 @@ class ManifestMeta:
|
|||
|
||||
# This is a list though I've never seen more than one entry
|
||||
entries = struct.unpack('<I', bio.read(4))[0]
|
||||
for i in range(entries):
|
||||
for _ in range(entries):
|
||||
_meta.prereq_ids.append(read_fstring(bio))
|
||||
|
||||
_meta.prereq_name = read_fstring(bio)
|
||||
_meta.prereq_path = read_fstring(bio)
|
||||
_meta.prereq_args = read_fstring(bio)
|
||||
|
||||
# apparently there's a newer version that actually stores *a* build id.
|
||||
if _meta.data_version > 0:
|
||||
# Manifest version 18 with data version >= 1 stores build ID
|
||||
if _meta.data_version >= 1:
|
||||
_meta._build_id = read_fstring(bio)
|
||||
# Manifest version 21 with data version >= 2 stores uninstall commands
|
||||
if _meta.data_version >= 2:
|
||||
_meta.uninstall_action_path = read_fstring(bio)
|
||||
_meta.uninstall_action_args = read_fstring(bio)
|
||||
|
||||
if bio.tell() != _meta.meta_size:
|
||||
raise ValueError('Did not read entire meta!')
|
||||
if (size_read := bio.tell()) != _meta.meta_size:
|
||||
logger.warning(f'Did not read entire manifest metadata! Version: {_meta.data_version}, '
|
||||
f'{_meta.meta_size - size_read} bytes missing, skipping...')
|
||||
bio.seek(_meta.meta_size - size_read, 1)
|
||||
# downgrade version to prevent issues during serialisation
|
||||
_meta.data_version = 0
|
||||
|
||||
return _meta
|
||||
|
||||
|
@ -260,8 +324,11 @@ class ManifestMeta:
|
|||
write_fstring(bio, self.prereq_path)
|
||||
write_fstring(bio, self.prereq_args)
|
||||
|
||||
if self.data_version > 0:
|
||||
if self.data_version >= 1:
|
||||
write_fstring(bio, self.build_id)
|
||||
if self.data_version >= 2:
|
||||
write_fstring(bio, self.uninstall_action_path)
|
||||
write_fstring(bio, self.uninstall_action_args)
|
||||
|
||||
meta_end = bio.tell()
|
||||
bio.seek(meta_start)
|
||||
|
@ -339,7 +406,7 @@ class CDL:
|
|||
|
||||
# the way this data is stored is rather odd, maybe there's a nicer way to write this...
|
||||
|
||||
for i in range(_cdl.count):
|
||||
for _ in range(_cdl.count):
|
||||
_cdl.elements.append(ChunkInfo(manifest_version=manifest_version))
|
||||
|
||||
# guid, doesn't seem to be a standard like UUID but is fairly straightfoward, 4 bytes, 128 bit.
|
||||
|
@ -366,8 +433,12 @@ class CDL:
|
|||
for chunk in _cdl.elements:
|
||||
chunk.file_size = struct.unpack('<q', bio.read(8))[0]
|
||||
|
||||
if bio.tell() - cdl_start != _cdl.size:
|
||||
raise ValueError('Did not read entire chunk data list!')
|
||||
if (size_read := bio.tell() - cdl_start) != _cdl.size:
|
||||
logger.warning(f'Did not read entire chunk data list! Version: {_cdl.version}, '
|
||||
f'{_cdl.size - size_read} bytes missing, skipping...')
|
||||
bio.seek(_cdl.size - size_read, 1)
|
||||
# downgrade version to prevent issues during serialisation
|
||||
_cdl.version = 0
|
||||
|
||||
return _cdl
|
||||
|
||||
|
@ -480,7 +551,7 @@ class FML:
|
|||
_fml.version = struct.unpack('B', bio.read(1))[0]
|
||||
_fml.count = struct.unpack('<I', bio.read(4))[0]
|
||||
|
||||
for i in range(_fml.count):
|
||||
for _ in range(_fml.count):
|
||||
_fml.elements.append(FileManifest())
|
||||
|
||||
for fm in _fml.elements:
|
||||
|
@ -501,14 +572,14 @@ class FML:
|
|||
# install tags, no idea what they do, I've only seen them in the Fortnite manifest
|
||||
for fm in _fml.elements:
|
||||
_elem = struct.unpack('<I', bio.read(4))[0]
|
||||
for i in range(_elem):
|
||||
for _ in range(_elem):
|
||||
fm.install_tags.append(read_fstring(bio))
|
||||
|
||||
# Each file is made up of "Chunk Parts" that can be spread across the "chunk stream"
|
||||
for fm in _fml.elements:
|
||||
_elem = struct.unpack('<I', bio.read(4))[0]
|
||||
_offset = 0
|
||||
for i in range(_elem):
|
||||
for _ in range(_elem):
|
||||
chunkp = ChunkPart()
|
||||
_start = bio.tell()
|
||||
_size = struct.unpack('<I', bio.read(4))[0]
|
||||
|
@ -522,12 +593,31 @@ class FML:
|
|||
logger.warning(f'Did not read {diff} bytes from chunk part!')
|
||||
bio.seek(diff)
|
||||
|
||||
# MD5 hash + MIME type (Manifest feature level 19)
|
||||
if _fml.version >= 1:
|
||||
for fm in _fml.elements:
|
||||
_has_md5 = struct.unpack('<I', bio.read(4))[0]
|
||||
if _has_md5 != 0:
|
||||
fm.hash_md5 = bio.read(16)
|
||||
|
||||
for fm in _fml.elements:
|
||||
fm.mime_type = read_fstring(bio)
|
||||
|
||||
# SHA256 hash (Manifest feature level 20)
|
||||
if _fml.version >= 2:
|
||||
for fm in _fml.elements:
|
||||
fm.hash_sha256 = bio.read(32)
|
||||
|
||||
# we have to calculate the actual file size ourselves
|
||||
for fm in _fml.elements:
|
||||
fm.file_size = sum(c.size for c in fm.chunk_parts)
|
||||
|
||||
if bio.tell() - fml_start != _fml.size:
|
||||
raise ValueError('Did not read entire chunk data list!')
|
||||
if (size_read := bio.tell() - fml_start) != _fml.size:
|
||||
logger.warning(f'Did not read entire file data list! Version: {_fml.version}, '
|
||||
f'{_fml.size - size_read} bytes missing, skipping...')
|
||||
bio.seek(_fml.size - size_read, 1)
|
||||
# downgrade version to prevent issues during serialisation
|
||||
_fml.version = 0
|
||||
|
||||
return _fml
|
||||
|
||||
|
@ -560,6 +650,20 @@ class FML:
|
|||
bio.write(struct.pack('<I', cp.offset))
|
||||
bio.write(struct.pack('<I', cp.size))
|
||||
|
||||
if self.version >= 1:
|
||||
for fm in self.elements:
|
||||
has_md5 = 1 if fm.hash_md5 else 0
|
||||
bio.write(struct.pack('<I', has_md5))
|
||||
if has_md5:
|
||||
bio.write(fm.hash_md5)
|
||||
|
||||
for fm in self.elements:
|
||||
write_fstring(bio, fm.mime_type)
|
||||
|
||||
if self.version >= 2:
|
||||
for fm in self.elements:
|
||||
bio.write(fm.hash_sha256)
|
||||
|
||||
fml_end = bio.tell()
|
||||
bio.seek(fml_start)
|
||||
bio.write(struct.pack('<I', fml_end - fml_start))
|
||||
|
@ -575,6 +679,9 @@ class FileManifest:
|
|||
self.install_tags = []
|
||||
self.chunk_parts = []
|
||||
self.file_size = 0
|
||||
self.hash_md5 = b''
|
||||
self.mime_type = ''
|
||||
self.hash_sha256 = b''
|
||||
|
||||
@property
|
||||
def read_only(self):
|
||||
|
@ -600,6 +707,7 @@ class FileManifest:
|
|||
_cp.append('[...]')
|
||||
cp_repr = ', '.join(_cp)
|
||||
|
||||
# ToDo add MD5, MIME, SHA256 if those ever become relevant
|
||||
return '<FileManifest (filename="{}", symlink_target="{}", hash={}, flags={}, ' \
|
||||
'install_tags=[{}], chunk_parts=[{}], file_size={})>'.format(
|
||||
self.filename, self.symlink_target, self.hash.hex(), self.flags,
|
||||
|
@ -670,19 +778,16 @@ class CustomFields:
|
|||
_cf.version = struct.unpack('B', bio.read(1))[0]
|
||||
_cf.count = struct.unpack('<I', bio.read(4))[0]
|
||||
|
||||
_keys = []
|
||||
_values = []
|
||||
|
||||
for i in range(_cf.count):
|
||||
_keys.append(read_fstring(bio))
|
||||
|
||||
for i in range(_cf.count):
|
||||
_values.append(read_fstring(bio))
|
||||
|
||||
_keys = [read_fstring(bio) for _ in range(_cf.count)]
|
||||
_values = [read_fstring(bio) for _ in range(_cf.count)]
|
||||
_cf._dict = dict(zip(_keys, _values))
|
||||
|
||||
if bio.tell() - cf_start != _cf.size:
|
||||
raise ValueError('Did not read entire custom fields list!')
|
||||
if (size_read := bio.tell() - cf_start) != _cf.size:
|
||||
logger.warning(f'Did not read entire custom fields part! Version: {_cf.version}, '
|
||||
f'{_cf.size - size_read} bytes missing, skipping...')
|
||||
bio.seek(_cf.size - size_read, 1)
|
||||
# downgrade version to prevent issues during serialisation
|
||||
_cf.version = 0
|
||||
|
||||
return _cf
|
||||
|
||||
|
@ -723,8 +828,7 @@ class ManifestComparison:
|
|||
old_files = {fm.filename: fm.hash for fm in old_manifest.file_manifest_list.elements}
|
||||
|
||||
for fm in manifest.file_manifest_list.elements:
|
||||
old_file_hash = old_files.pop(fm.filename, None)
|
||||
if old_file_hash:
|
||||
if old_file_hash := old_files.pop(fm.filename, None):
|
||||
if fm.hash == old_file_hash:
|
||||
comp.unchanged.add(fm.filename)
|
||||
else:
|
||||
|
|
101
legendary/utils/aliasing.py
Normal file
101
legendary/utils/aliasing.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
from string import ascii_lowercase, digits
|
||||
|
||||
# Aliases generated:
|
||||
# - name lowercase (without TM etc.)
|
||||
# - same, but without spaces
|
||||
# - same, but roman numerals are replaced
|
||||
# if name has >= 2 parts:
|
||||
# - initials
|
||||
# - initials, but roman numerals are intact
|
||||
# - initials, but roman numerals are replaced with number
|
||||
# if ':' in name:
|
||||
# - run previous recursively with everything before ":"
|
||||
# if single 'f' in long word:
|
||||
# - split word (this is mainly for cases like Battlfront -> BF)
|
||||
# the first word longer than 1 character that isn't "the", "for", or "of" will also be added
|
||||
|
||||
allowed_characters = ascii_lowercase+digits
|
||||
roman = {
|
||||
'i': '1',
|
||||
'ii': '2',
|
||||
'iii': '3',
|
||||
'iv': '4',
|
||||
'v': '5',
|
||||
'vi': '6',
|
||||
'vii': '7',
|
||||
'viii': '8',
|
||||
'ix': '9',
|
||||
'x': '10',
|
||||
'xi': '11',
|
||||
'xii': '12',
|
||||
'xiii': '13',
|
||||
'xiv': '14',
|
||||
'xv': '15',
|
||||
'xvi': '16',
|
||||
'xvii': '17',
|
||||
'xviii': '18',
|
||||
'xix': '19',
|
||||
'xx': '20'
|
||||
}
|
||||
|
||||
|
||||
def _filter(input):
|
||||
return ''.join(l for l in input if l in allowed_characters)
|
||||
|
||||
|
||||
def generate_aliases(game_name, game_folder=None, split_words=True, app_name=None):
|
||||
# normalise and split name, then filter for legal characters
|
||||
game_parts = [_filter(p) for p in game_name.lower().split()]
|
||||
# filter out empty parts
|
||||
game_parts = [p for p in game_parts if p]
|
||||
|
||||
_aliases = [
|
||||
game_name.lower().strip(),
|
||||
' '.join(game_parts),
|
||||
''.join(game_parts),
|
||||
''.join(roman.get(p, p) for p in game_parts),
|
||||
]
|
||||
|
||||
# single word abbreviation
|
||||
try:
|
||||
first_word = next(i for i in game_parts if i not in ('for', 'the', 'of'))
|
||||
if len(first_word) > 1:
|
||||
_aliases.append(first_word)
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
# remove subtitle from game
|
||||
if ':' in game_name:
|
||||
_aliases.extend(generate_aliases(game_name.partition(':')[0]))
|
||||
if '-' in game_name:
|
||||
_aliases.extend(generate_aliases(game_name.replace('-', ' ')))
|
||||
# include folder name for alternative short forms
|
||||
if game_folder:
|
||||
_aliases.extend(generate_aliases(game_folder, split_words=False))
|
||||
# include lowercase version of app name in aliases
|
||||
if app_name:
|
||||
_aliases.append(app_name.lower())
|
||||
# include initialisms
|
||||
if len(game_parts) > 1:
|
||||
_aliases.append(''.join(p[0] for p in game_parts))
|
||||
_aliases.append(''.join(p[0] if p not in roman else p for p in game_parts))
|
||||
_aliases.append(''.join(roman.get(p, p[0]) for p in game_parts))
|
||||
# Attempt to address cases like "Battlefront" being shortened to "BF"
|
||||
if split_words:
|
||||
new_game_parts = []
|
||||
for word in game_parts:
|
||||
if len(word) >= 8 and word[3:-3].count('f') == 1:
|
||||
word_middle = word[3:-3]
|
||||
word_split = ' f'.join(word_middle.split('f'))
|
||||
word = word[0:3] + word_split + word[-3:]
|
||||
new_game_parts.extend(word.split())
|
||||
else:
|
||||
new_game_parts.append(word)
|
||||
|
||||
if len(new_game_parts) > 1:
|
||||
_aliases.append(''.join(p[0] for p in new_game_parts))
|
||||
_aliases.append(''.join(p[0] if p not in roman else p for p in new_game_parts))
|
||||
_aliases.append(''.join(roman.get(p, p[0]) for p in new_game_parts))
|
||||
|
||||
# return sorted uniques
|
||||
return sorted(set(_aliases))
|
|
@ -1,11 +1,5 @@
|
|||
from legendary.utils.selective_dl import games
|
||||
|
||||
|
||||
def get_boolean_choice(prompt, default=True):
|
||||
if default:
|
||||
yn = 'Y/n'
|
||||
else:
|
||||
yn = 'y/N'
|
||||
yn = 'Y/n' if default else 'y/N'
|
||||
|
||||
choice = input(f'{prompt} [{yn}]: ')
|
||||
if not choice:
|
||||
|
@ -16,29 +10,82 @@ def get_boolean_choice(prompt, default=True):
|
|||
return False
|
||||
|
||||
|
||||
def sdl_prompt(app_name, title):
|
||||
tags = ['']
|
||||
if '__required' in games[app_name]:
|
||||
tags.extend(games[app_name]['__required']['tags'])
|
||||
def get_int_choice(prompt, default=None, min_choice=None, max_choice=None, return_on_invalid=False):
|
||||
if default is not None:
|
||||
prompt = f'{prompt} [{default}]: '
|
||||
else:
|
||||
prompt = f'{prompt}: '
|
||||
|
||||
print(f'You are about to install {title}, this game supports selective downloads.')
|
||||
print('The following optional packs are available:')
|
||||
for tag, info in games[app_name].items():
|
||||
while True:
|
||||
try:
|
||||
if inp := input(prompt):
|
||||
choice = int(inp)
|
||||
else:
|
||||
return default
|
||||
except ValueError:
|
||||
if return_on_invalid:
|
||||
return None
|
||||
return_on_invalid = True
|
||||
continue
|
||||
else:
|
||||
if min_choice is not None and choice < min_choice:
|
||||
print(f'Number must be greater than {min_choice}')
|
||||
if return_on_invalid:
|
||||
return None
|
||||
return_on_invalid = True
|
||||
continue
|
||||
if max_choice is not None and choice > max_choice:
|
||||
print(f'Number must be less than {max_choice}')
|
||||
if return_on_invalid:
|
||||
return None
|
||||
return_on_invalid = True
|
||||
continue
|
||||
return choice
|
||||
|
||||
|
||||
def sdl_prompt(sdl_data, title):
|
||||
tags = ['']
|
||||
if '__required' in sdl_data:
|
||||
tags.extend(sdl_data['__required']['tags'])
|
||||
|
||||
print(f'You are about to install {title}, this application supports selective downloads.')
|
||||
print('The following optional packs are available (tag - name):')
|
||||
for tag, info in sdl_data.items():
|
||||
if tag == '__required':
|
||||
continue
|
||||
print(' *', tag, '-', info['name'])
|
||||
|
||||
print('Please enter a comma-separated list of optional packs to install (leave blank for defaults)')
|
||||
examples = ','.join([g for g in games[app_name].keys() if g != '__required'][:2])
|
||||
choices = input(f'Additional packs [e.g. {examples}]: ')
|
||||
examples = ', '.join([g for g in sdl_data.keys() if g != '__required'][:2])
|
||||
print(f'Please enter tags of pack(s) to install (space/comma-separated, e.g. "{examples}")')
|
||||
print('Leave blank to use defaults (only required data will be downloaded).')
|
||||
choices = input('Additional packs [Enter to confirm]: ')
|
||||
if not choices:
|
||||
return tags
|
||||
|
||||
for c in choices.split(','):
|
||||
for c in choices.strip('"').replace(',', ' ').split():
|
||||
c = c.strip()
|
||||
if c in games[app_name]:
|
||||
tags.extend(games[app_name][c]['tags'])
|
||||
if c in sdl_data:
|
||||
tags.extend(sdl_data[c]['tags'])
|
||||
else:
|
||||
print('Invalid tag:', c)
|
||||
|
||||
return tags
|
||||
|
||||
|
||||
def strtobool(val):
|
||||
"""Convert a string representation of truth to true (1) or false (0).
|
||||
|
||||
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
|
||||
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
|
||||
'val' is anything else.
|
||||
|
||||
Copied from python standard library as distutils.util.strtobool is deprecated.
|
||||
"""
|
||||
val = val.lower()
|
||||
if val in ('y', 'yes', 't', 'true', 'on', '1'):
|
||||
return 1
|
||||
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
|
||||
return 0
|
||||
else:
|
||||
raise ValueError("invalid truth value %r" % (val,))
|
||||
|
||||
|
|
|
@ -1,29 +1,28 @@
|
|||
import argparse
|
||||
|
||||
# reference: https://gist.github.com/sampsyo/471779#gistcomment-2886157
|
||||
|
||||
|
||||
class AliasedSubParsersAction(argparse._SubParsersAction):
|
||||
class _AliasedPseudoAction(argparse.Action):
|
||||
def __init__(self, name, aliases, help):
|
||||
dest = name
|
||||
if aliases:
|
||||
dest += ' (%s)' % ','.join(aliases)
|
||||
sup = super(AliasedSubParsersAction._AliasedPseudoAction, self)
|
||||
sup.__init__(option_strings=[], dest=dest, help=help)
|
||||
|
||||
class HiddenAliasSubparsersAction(argparse._SubParsersAction):
|
||||
def add_parser(self, name, **kwargs):
|
||||
aliases = kwargs.pop('aliases', [])
|
||||
parser = super(AliasedSubParsersAction, self).add_parser(name, **kwargs)
|
||||
# set prog from the existing prefix
|
||||
if kwargs.get('prog') is None:
|
||||
kwargs['prog'] = f'{self._prog_prefix} {name}'
|
||||
|
||||
# Make the aliases work.
|
||||
for alias in aliases:
|
||||
self._name_parser_map[alias] = parser
|
||||
# Make the help text reflect them, first removing old help entry.
|
||||
aliases = kwargs.pop('aliases', ())
|
||||
hide_aliases = kwargs.pop('hide_aliases', False)
|
||||
|
||||
# create a pseudo-action to hold the choice help
|
||||
if 'help' in kwargs:
|
||||
help = kwargs.pop('help')
|
||||
self._choices_actions.pop()
|
||||
pseudo_action = self._AliasedPseudoAction(name, aliases, help)
|
||||
self._choices_actions.append(pseudo_action)
|
||||
_aliases = None if hide_aliases else aliases
|
||||
choice_action = self._ChoicesPseudoAction(name, _aliases, help)
|
||||
self._choices_actions.append(choice_action)
|
||||
|
||||
# create the parser and add it to the map
|
||||
parser = self._parser_class(**kwargs)
|
||||
self._name_parser_map[name] = parser
|
||||
|
||||
# make parser available under aliases also
|
||||
for alias in aliases:
|
||||
self._name_parser_map[alias] = parser
|
||||
|
||||
return parser
|
||||
|
|
248
legendary/utils/egl_crypt.py
Normal file
248
legendary/utils/egl_crypt.py
Normal file
|
@ -0,0 +1,248 @@
|
|||
"""
|
||||
Stripped down version of https://github.com/boppreh/aes which is in turn based
|
||||
on https://github.com/bozhu/AES-Python with ECB decryption added.
|
||||
|
||||
You should practically never roll your own crypto like this.
|
||||
In this case it's just unimportant enough since all it needs to do is decrypt some data from the EGL config file.
|
||||
"""
|
||||
|
||||
import locale
|
||||
|
||||
s_box = (
|
||||
0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,
|
||||
0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,
|
||||
0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,
|
||||
0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,
|
||||
0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,
|
||||
0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,
|
||||
0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,
|
||||
0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,
|
||||
0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,
|
||||
0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,
|
||||
0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,
|
||||
0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,
|
||||
0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,
|
||||
0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,
|
||||
0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,
|
||||
0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16,
|
||||
)
|
||||
|
||||
inv_s_box = (
|
||||
0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38, 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB,
|
||||
0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87, 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB,
|
||||
0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D, 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E,
|
||||
0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2, 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25,
|
||||
0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92,
|
||||
0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA, 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84,
|
||||
0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A, 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06,
|
||||
0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02, 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B,
|
||||
0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA, 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73,
|
||||
0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85, 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E,
|
||||
0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89, 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B,
|
||||
0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20, 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4,
|
||||
0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31, 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F,
|
||||
0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D, 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF,
|
||||
0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0, 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61,
|
||||
0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26, 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D,
|
||||
)
|
||||
|
||||
|
||||
def sub_bytes(s):
|
||||
for i in range(4):
|
||||
for j in range(4):
|
||||
s[i][j] = s_box[s[i][j]]
|
||||
|
||||
|
||||
def inv_sub_bytes(s):
|
||||
for i in range(4):
|
||||
for j in range(4):
|
||||
s[i][j] = inv_s_box[s[i][j]]
|
||||
|
||||
|
||||
def shift_rows(s):
|
||||
s[0][1], s[1][1], s[2][1], s[3][1] = s[1][1], s[2][1], s[3][1], s[0][1]
|
||||
s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2]
|
||||
s[0][3], s[1][3], s[2][3], s[3][3] = s[3][3], s[0][3], s[1][3], s[2][3]
|
||||
|
||||
|
||||
def inv_shift_rows(s):
|
||||
s[0][1], s[1][1], s[2][1], s[3][1] = s[3][1], s[0][1], s[1][1], s[2][1]
|
||||
s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2]
|
||||
s[0][3], s[1][3], s[2][3], s[3][3] = s[1][3], s[2][3], s[3][3], s[0][3]
|
||||
|
||||
|
||||
def add_round_key(s, k):
|
||||
for i in range(4):
|
||||
for j in range(4):
|
||||
s[i][j] ^= k[i][j]
|
||||
|
||||
|
||||
# learned from http://cs.ucsb.edu/~koc/cs178/projects/JT/aes.c
|
||||
xtime = lambda a: (((a << 1) ^ 0x1B) & 0xFF) if (a & 0x80) else (a << 1)
|
||||
|
||||
|
||||
def mix_single_column(a):
|
||||
# see Sec 4.1.2 in The Design of Rijndael
|
||||
t = a[0] ^ a[1] ^ a[2] ^ a[3]
|
||||
u = a[0]
|
||||
a[0] ^= t ^ xtime(a[0] ^ a[1])
|
||||
a[1] ^= t ^ xtime(a[1] ^ a[2])
|
||||
a[2] ^= t ^ xtime(a[2] ^ a[3])
|
||||
a[3] ^= t ^ xtime(a[3] ^ u)
|
||||
|
||||
|
||||
def mix_columns(s):
|
||||
for i in range(4):
|
||||
mix_single_column(s[i])
|
||||
|
||||
|
||||
def inv_mix_columns(s):
|
||||
# see Sec 4.1.3 in The Design of Rijndael
|
||||
for i in range(4):
|
||||
u = xtime(xtime(s[i][0] ^ s[i][2]))
|
||||
v = xtime(xtime(s[i][1] ^ s[i][3]))
|
||||
s[i][0] ^= u
|
||||
s[i][1] ^= v
|
||||
s[i][2] ^= u
|
||||
s[i][3] ^= v
|
||||
|
||||
mix_columns(s)
|
||||
|
||||
|
||||
r_con = (
|
||||
0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,
|
||||
0x80, 0x1B, 0x36, 0x6C, 0xD8, 0xAB, 0x4D, 0x9A,
|
||||
0x2F, 0x5E, 0xBC, 0x63, 0xC6, 0x97, 0x35, 0x6A,
|
||||
0xD4, 0xB3, 0x7D, 0xFA, 0xEF, 0xC5, 0x91, 0x39,
|
||||
)
|
||||
|
||||
|
||||
def bytes2matrix(text):
|
||||
""" Converts a 16-byte array into a 4x4 matrix. """
|
||||
return [list(text[i:i + 4]) for i in range(0, len(text), 4)]
|
||||
|
||||
|
||||
def matrix2bytes(matrix):
|
||||
""" Converts a 4x4 matrix into a 16-byte array. """
|
||||
return bytes(sum(matrix, []))
|
||||
|
||||
|
||||
def xor_bytes(a, b):
|
||||
""" Returns a new byte array with the elements xor'ed. """
|
||||
return bytes(i ^ j for i, j in zip(a, b))
|
||||
|
||||
|
||||
def unpad(plaintext):
|
||||
"""
|
||||
Removes a PKCS#7 padding, returning the unpadded text and ensuring the
|
||||
padding was correct.
|
||||
"""
|
||||
padding_len = plaintext[-1]
|
||||
assert padding_len > 0
|
||||
message, padding = plaintext[:-padding_len], plaintext[-padding_len:]
|
||||
assert all(p == padding_len for p in padding)
|
||||
return message
|
||||
|
||||
|
||||
def split_blocks(message, block_size=16, require_padding=True):
|
||||
assert len(message) % block_size == 0 or not require_padding
|
||||
return [message[i:i + 16] for i in range(0, len(message), block_size)]
|
||||
|
||||
|
||||
class AES:
|
||||
"""
|
||||
Class for AES-128 encryption with CBC mode and PKCS#7.
|
||||
|
||||
This is a raw implementation of AES, without key stretching or IV
|
||||
management. Unless you need that, please use `encrypt` and `decrypt`.
|
||||
"""
|
||||
rounds_by_key_size = {16: 10, 24: 12, 32: 14}
|
||||
|
||||
def __init__(self, master_key):
|
||||
"""
|
||||
Initializes the object with a given key.
|
||||
"""
|
||||
assert len(master_key) in AES.rounds_by_key_size
|
||||
self.n_rounds = AES.rounds_by_key_size[len(master_key)]
|
||||
self._key_matrices = self._expand_key(master_key)
|
||||
|
||||
def _expand_key(self, master_key):
|
||||
"""
|
||||
Expands and returns a list of key matrices for the given master_key.
|
||||
"""
|
||||
# Initialize round keys with raw key material.
|
||||
key_columns = bytes2matrix(master_key)
|
||||
iteration_size = len(master_key) // 4
|
||||
|
||||
# Each iteration has exactly as many columns as the key material.
|
||||
i = 1
|
||||
while len(key_columns) < (self.n_rounds + 1) * 4:
|
||||
# Copy previous word.
|
||||
word = list(key_columns[-1])
|
||||
|
||||
# Perform schedule_core once every "row".
|
||||
if len(key_columns) % iteration_size == 0:
|
||||
# Circular shift.
|
||||
word.append(word.pop(0))
|
||||
# Map to S-BOX.
|
||||
word = [s_box[b] for b in word]
|
||||
# XOR with first byte of R-CON, since the others bytes of R-CON are 0.
|
||||
word[0] ^= r_con[i]
|
||||
i += 1
|
||||
elif len(master_key) == 32 and len(key_columns) % iteration_size == 4:
|
||||
# Run word through S-box in the fourth iteration when using a
|
||||
# 256-bit key.
|
||||
word = [s_box[b] for b in word]
|
||||
|
||||
# XOR with equivalent word from previous iteration.
|
||||
word = xor_bytes(word, key_columns[-iteration_size])
|
||||
key_columns.append(word)
|
||||
|
||||
# Group key words in 4x4 byte matrices.
|
||||
return [key_columns[4 * i: 4 * (i + 1)] for i in range(len(key_columns) // 4)]
|
||||
|
||||
def decrypt_block(self, ciphertext):
|
||||
"""
|
||||
Decrypts a single block of 16 byte long ciphertext.
|
||||
"""
|
||||
assert len(ciphertext) == 16
|
||||
|
||||
cipher_state = bytes2matrix(ciphertext)
|
||||
|
||||
add_round_key(cipher_state, self._key_matrices[-1])
|
||||
inv_shift_rows(cipher_state)
|
||||
inv_sub_bytes(cipher_state)
|
||||
|
||||
for i in range(self.n_rounds - 1, 0, -1):
|
||||
add_round_key(cipher_state, self._key_matrices[i])
|
||||
inv_mix_columns(cipher_state)
|
||||
inv_shift_rows(cipher_state)
|
||||
inv_sub_bytes(cipher_state)
|
||||
|
||||
add_round_key(cipher_state, self._key_matrices[0])
|
||||
|
||||
return matrix2bytes(cipher_state)
|
||||
|
||||
def decrypt_ecb(self, ciphertext):
|
||||
"""
|
||||
Decrypts `ciphertext` using ECB mode
|
||||
"""
|
||||
|
||||
blocks = []
|
||||
for ciphertext_block in split_blocks(ciphertext, require_padding=False):
|
||||
# CTR mode decrypt: ciphertext XOR encrypt(nonce)
|
||||
block = self.decrypt_block(ciphertext_block)
|
||||
blocks.append(block)
|
||||
|
||||
return b''.join(blocks)
|
||||
|
||||
|
||||
def decrypt_epic_data(key, encrypted):
|
||||
decrypted = unpad(AES(key.encode('ascii')).decrypt_ecb(encrypted)).strip(b'\x00')
|
||||
# try various encodings, just to be sure
|
||||
for encoding in (locale.getpreferredencoding(), 'cp1252', 'cp932', 'ascii', 'utf-8'):
|
||||
try:
|
||||
return decrypted.decode(encoding)
|
||||
except: # ignore exception, just try the next encoding
|
||||
continue
|
||||
raise ValueError('Failed to decode decrypted data')
|
14
legendary/utils/env.py
Normal file
14
legendary/utils/env.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def is_pyinstaller():
|
||||
return getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS')
|
||||
|
||||
|
||||
def is_windows_or_pyi():
|
||||
return is_pyinstaller() or os.name == 'nt'
|
||||
|
||||
|
||||
def is_windows_mac_or_pyi():
|
||||
return is_pyinstaller() or os.name == 'nt' or sys.platform == 'darwin'
|
|
@ -1,5 +1,7 @@
|
|||
# coding: utf-8
|
||||
|
||||
from sys import platform
|
||||
|
||||
# games where the download order optimizations are enabled by default
|
||||
# a set() of versions can be specified, empty set means all versions.
|
||||
_optimize_default = {
|
||||
|
@ -11,9 +13,32 @@ _optimize_default = {
|
|||
}
|
||||
}
|
||||
|
||||
# Some games use launchers that don't work with Legendary, these are overriden here
|
||||
_exe_overrides = {
|
||||
'kinglet': {
|
||||
'darwin': 'Base/Binaries/Win64EOS/CivilizationVI.exe',
|
||||
'linux': 'Base/Binaries/Win64EOS/CivilizationVI.exe',
|
||||
'win32': 'LaunchPad/LaunchPad.exe'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def is_opt_enabled(app_name, version):
|
||||
if (versions := _optimize_default.get(app_name.lower())) is not None:
|
||||
if version in versions or not versions:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_exe_override(app_name):
|
||||
return _exe_overrides.get(app_name.lower(), {}).get(platform, None)
|
||||
|
||||
|
||||
def update_workarounds(api_data):
|
||||
if 'reorder_optimization' in api_data:
|
||||
_optimize_default.clear()
|
||||
_optimize_default.update(api_data['reorder_optimization'])
|
||||
if 'executable_override' in api_data:
|
||||
_exe_overrides.clear()
|
||||
_exe_overrides.update(api_data['executable_override'])
|
||||
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
from legendary.models.manifest import Manifest
|
||||
|
||||
|
||||
def combine_manifests(base_manifest: Manifest, delta_manifest: Manifest):
|
||||
added = set()
|
||||
# overwrite file elements with the ones from the delta manifest
|
||||
for idx, file_elem in enumerate(base_manifest.file_manifest_list.elements):
|
||||
try:
|
||||
delta_file = delta_manifest.file_manifest_list.get_file_by_path(file_elem.filename)
|
||||
base_manifest.file_manifest_list.elements[idx] = delta_file
|
||||
added.add(delta_file.filename)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# add other files that may be missing
|
||||
for delta_file in delta_manifest.file_manifest_list.elements:
|
||||
if delta_file.filename not in added:
|
||||
base_manifest.file_manifest_list.elements.append(delta_file)
|
||||
# update count and clear map
|
||||
base_manifest.file_manifest_list.count = len(base_manifest.file_manifest_list.elements)
|
||||
base_manifest.file_manifest_list._path_map = None
|
||||
|
||||
# ensure guid map exists
|
||||
try:
|
||||
base_manifest.chunk_data_list.get_chunk_by_guid(0)
|
||||
except:
|
||||
pass
|
||||
|
||||
# add new chunks from delta manifest to main manifest and again clear maps and update count
|
||||
existing_chunk_guids = base_manifest.chunk_data_list._guid_int_map.keys()
|
||||
|
||||
for chunk in delta_manifest.chunk_data_list.elements:
|
||||
if chunk.guid_num not in existing_chunk_guids:
|
||||
base_manifest.chunk_data_list.elements.append(chunk)
|
||||
|
||||
base_manifest.chunk_data_list.count = len(base_manifest.chunk_data_list.elements)
|
||||
base_manifest.chunk_data_list._guid_map = None
|
||||
base_manifest.chunk_data_list._guid_int_map = None
|
||||
base_manifest.chunk_data_list._path_map = None
|
|
@ -22,11 +22,14 @@ def _filename_matches(filename, patterns):
|
|||
"""
|
||||
|
||||
for pattern in patterns:
|
||||
if pattern.endswith('/'):
|
||||
# pat is a directory, check if path starts with it
|
||||
if filename.startswith(pattern):
|
||||
return True
|
||||
elif fnmatch(filename, pattern):
|
||||
# Pattern is a directory, just check if path starts with it
|
||||
if pattern.endswith('/') and filename.startswith(pattern):
|
||||
return True
|
||||
# Check if pattern is a suffix of filename
|
||||
if filename.endswith(pattern):
|
||||
return True
|
||||
# Check if pattern with wildcards ('*') matches
|
||||
if fnmatch(filename, pattern):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
@ -51,8 +54,8 @@ class SaveGameHelper:
|
|||
_tmp_file.seek(0)
|
||||
return ci
|
||||
|
||||
def package_savegame(self, input_folder: str, app_name: str = '',
|
||||
epic_id: str = '', cloud_folder: str = '',
|
||||
def package_savegame(self, input_folder: str, app_name: str = '', epic_id: str = '',
|
||||
cloud_folder: str = '', cloud_folder_mac: str = '',
|
||||
include_filter: list = None,
|
||||
exclude_filter: list = None,
|
||||
manifest_dt: datetime = None):
|
||||
|
@ -61,6 +64,7 @@ class SaveGameHelper:
|
|||
:param app_name: App name for savegame being stored
|
||||
:param epic_id: Epic account ID
|
||||
:param cloud_folder: Folder the savegame resides in (based on game metadata)
|
||||
:param cloud_folder_mac: Folder the macOS savegame resides in (based on game metadata)
|
||||
:param include_filter: list of patterns for files to include (excludes all others)
|
||||
:param exclude_filter: list of patterns for files to exclude (includes all others)
|
||||
:param manifest_dt: datetime for the manifest name (optional)
|
||||
|
@ -77,6 +81,8 @@ class SaveGameHelper:
|
|||
manifest_dt = datetime.utcnow()
|
||||
m.meta.build_version = manifest_dt.strftime('%Y.%m.%d-%H.%M.%S')
|
||||
m.custom_fields['CloudSaveFolder'] = cloud_folder
|
||||
if cloud_folder_mac:
|
||||
m.custom_fields['CloudSaveFolder_MAC'] = cloud_folder_mac
|
||||
|
||||
self.log.info(f'Packing savegame for "{app_name}", input folder: {input_folder}')
|
||||
files = []
|
||||
|
@ -130,7 +136,7 @@ class SaveGameHelper:
|
|||
self.log.warning(f'Got EOF for "{f.filename}" with {remaining} bytes remaining! '
|
||||
f'File may have been corrupted/modified.')
|
||||
break
|
||||
|
||||
|
||||
cur_buffer.write(_tmp)
|
||||
fhash.update(_tmp) # update sha1 hash with new data
|
||||
f.chunk_parts.append(cp)
|
||||
|
@ -164,3 +170,21 @@ class SaveGameHelper:
|
|||
|
||||
# return dict with created files for uploading/whatever
|
||||
return self.files
|
||||
|
||||
def get_deletion_list(self, save_folder, include_filter=None, exclude_filter=None):
|
||||
files = []
|
||||
for _dir, _, _files in os.walk(save_folder):
|
||||
for _file in _files:
|
||||
_file_path = os.path.join(_dir, _file)
|
||||
_file_path_rel = os.path.relpath(_file_path, save_folder).replace('\\', '/')
|
||||
|
||||
if include_filter and not _filename_matches(_file_path_rel, include_filter):
|
||||
self.log.debug(f'Excluding "{_file_path_rel}" (does not match include filter)')
|
||||
continue
|
||||
elif exclude_filter and _filename_matches(_file_path_rel, exclude_filter):
|
||||
self.log.debug(f'Excluding "{_file_path_rel}" (does match exclude filter)')
|
||||
continue
|
||||
|
||||
files.append(_file_path_rel)
|
||||
|
||||
return files
|
||||
|
|
|
@ -33,6 +33,9 @@ games = {
|
|||
|
||||
def get_sdl_appname(app_name):
|
||||
for k in games.keys():
|
||||
if k.endswith('_Mac'):
|
||||
continue
|
||||
|
||||
if app_name.startswith(k):
|
||||
return k
|
||||
return None
|
||||
|
|
155
legendary/utils/webview_login.py
Normal file
155
legendary/utils/webview_login.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
import logging
|
||||
import json
|
||||
import os
|
||||
import webbrowser
|
||||
|
||||
from legendary import __version__
|
||||
|
||||
logger = logging.getLogger('WebViewHelper')
|
||||
webview_available = True
|
||||
|
||||
try:
|
||||
import webview
|
||||
|
||||
# silence logger
|
||||
webview.logger.setLevel(logging.FATAL)
|
||||
gui = webview.initialize()
|
||||
if gui and os.name == 'nt' and gui.renderer not in ('edgechromium', 'cef'):
|
||||
raise NotImplementedError(f'Renderer {gui.renderer} not supported on Windows.')
|
||||
except Exception as e:
|
||||
logger.debug(f'Webview unavailable, disabling webview login (Exception: {e!r}).')
|
||||
webview_available = False
|
||||
|
||||
login_url = 'https://www.epicgames.com/id/login'
|
||||
sid_url = 'https://www.epicgames.com/id/api/redirect?'
|
||||
logout_url = f'https://www.epicgames.com/id/logout?productName=epic-games&redirectUrl={login_url}'
|
||||
goodbye_url = 'https://legendary.gl/goodbye'
|
||||
window_js = '''
|
||||
window.ue = {
|
||||
signinprompt: {
|
||||
requestexchangecodesignin: pywebview.api.set_exchange_code,
|
||||
registersignincompletecallback: pywebview.api.trigger_sid_exchange
|
||||
},
|
||||
common: {
|
||||
launchexternalurl: pywebview.api.open_url_external
|
||||
}
|
||||
}
|
||||
'''
|
||||
|
||||
get_sid_js = '''
|
||||
function on_loaded() {
|
||||
pywebview.api.login_sid(this.responseText);
|
||||
}
|
||||
|
||||
var sid_req = new XMLHttpRequest();
|
||||
sid_req.addEventListener("load", on_loaded);
|
||||
sid_req.open("GET", "/id/api/redirect?");
|
||||
sid_req.send();
|
||||
'''
|
||||
|
||||
|
||||
class MockLauncher:
|
||||
def __init__(self, callback_sid, callback_code):
|
||||
self.callback_sid = callback_sid
|
||||
self.callback_code = callback_code
|
||||
self.window = None
|
||||
self.inject_js = True
|
||||
self.destroy_on_load = False
|
||||
self.callback_result = None
|
||||
|
||||
def on_loaded(self):
|
||||
url = self.window.get_current_url()
|
||||
logger.debug(f'Loaded url: {url.partition("?")[0]}')
|
||||
|
||||
if self.destroy_on_load:
|
||||
logger.info('Closing login window...')
|
||||
self.window.destroy()
|
||||
return
|
||||
|
||||
# Inject JS so required window.ue stuff is available
|
||||
if self.inject_js:
|
||||
self.window.evaluate_js(window_js)
|
||||
|
||||
if 'logout' in url and self.callback_sid:
|
||||
# prepare to close browser after logout redirect
|
||||
self.destroy_on_load = True
|
||||
elif 'logout' in url:
|
||||
self.inject_js = True
|
||||
|
||||
def nop(self, *args, **kwargs):
|
||||
return
|
||||
|
||||
def open_url_external(self, url):
|
||||
webbrowser.open(url)
|
||||
|
||||
def set_exchange_code(self, exchange_code):
|
||||
self.inject_js = False
|
||||
logger.debug('Got exchange code (stage 1)!')
|
||||
# The default Windows webview retains cookies, GTK/Qt do not. Therefore we can
|
||||
# skip logging out on those platforms and directly use the exchange code we're given.
|
||||
# On windows we have to do a little dance with the SID to create a session that
|
||||
# remains valid after logging out in the embedded browser.
|
||||
# Update: Epic broke SID login, we'll also do this on Windows now
|
||||
# if self.window.gui.renderer in ('gtkwebkit2', 'qtwebengine', 'qtwebkit'):
|
||||
self.destroy_on_load = True
|
||||
try:
|
||||
self.callback_result = self.callback_code(exchange_code)
|
||||
except Exception as e:
|
||||
logger.error(f'Logging in via exchange-code failed with {e!r}')
|
||||
finally:
|
||||
# We cannot destroy the browser from here,
|
||||
# so we'll load a small goodbye site first.
|
||||
self.window.load_url(goodbye_url)
|
||||
|
||||
def trigger_sid_exchange(self, *args, **kwargs):
|
||||
# check if code-based login hasn't already set the destroy flag
|
||||
if not self.destroy_on_load:
|
||||
logger.debug('Injecting SID JS')
|
||||
# inject JS to get SID API response and call our API
|
||||
self.window.evaluate_js(get_sid_js)
|
||||
|
||||
def login_sid(self, sid_json):
|
||||
# Try SID login, then log out
|
||||
try:
|
||||
j = json.loads(sid_json)
|
||||
sid = j['sid']
|
||||
logger.debug(f'Got SID (stage 2)! Executing sid login callback...')
|
||||
exchange_code = self.callback_sid(sid)
|
||||
if exchange_code:
|
||||
self.callback_result = self.callback_code(exchange_code)
|
||||
except Exception as e:
|
||||
logger.error(f'SID login failed with {e!r}')
|
||||
finally:
|
||||
logger.debug('Starting browser logout...')
|
||||
self.window.load_url(logout_url)
|
||||
|
||||
|
||||
def do_webview_login(callback_sid=None, callback_code=None, user_agent=None):
|
||||
api = MockLauncher(callback_sid=callback_sid, callback_code=callback_code)
|
||||
url = login_url
|
||||
|
||||
if os.name == 'nt':
|
||||
# On Windows we open the logout URL first to invalidate the current cookies (if any).
|
||||
# Additionally, we have to disable JS injection for the first load, as otherwise the user
|
||||
# will get an error for some reason.
|
||||
url = logout_url
|
||||
api.inject_js = False
|
||||
|
||||
logger.info('Opening Epic Games login window...')
|
||||
# Open logout URL first to remove existing cookies, then redirect to login.
|
||||
window = webview.create_window(f'Legendary {__version__} - Epic Games Account Login',
|
||||
url=url, width=768, height=1024, js_api=api)
|
||||
api.window = window
|
||||
window.events.loaded += api.on_loaded
|
||||
|
||||
try:
|
||||
webview.start(user_agent=user_agent)
|
||||
except Exception as we:
|
||||
logger.error(f'Running webview failed with {we!r}. If this error persists try the manual '
|
||||
f'login process by adding --disable-webview to your command line.')
|
||||
return None
|
||||
|
||||
if api.callback_result is None:
|
||||
logger.error('Login aborted by user.')
|
||||
|
||||
return api.callback_result
|
|
@ -1,17 +0,0 @@
|
|||
import os
|
||||
import configparser
|
||||
|
||||
|
||||
def read_registry(wine_pfx):
|
||||
reg = configparser.ConfigParser(comment_prefixes=(';', '#', '/', 'WINE'), allow_no_value=True)
|
||||
reg.optionxform = str
|
||||
reg.read(os.path.join(wine_pfx, 'user.reg'))
|
||||
return reg
|
||||
|
||||
|
||||
def get_shell_folders(registry, wine_pfx):
|
||||
folders = dict()
|
||||
for k, v in registry['Software\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Explorer\\\\Shell Folders'].items():
|
||||
path_cleaned = v.strip('"').strip().replace('\\\\', '/').replace('C:/', '')
|
||||
folders[k.strip('"').strip()] = os.path.join(wine_pfx, 'drive_c', path_cleaned)
|
||||
return folders
|
|
@ -1 +1,2 @@
|
|||
requests<3.0
|
||||
filelock
|
||||
|
|
17
setup.py
17
setup.py
|
@ -8,8 +8,8 @@ from setuptools import setup
|
|||
|
||||
from legendary import __version__ as legendary_version
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
sys.exit('python 3.8 or higher is required for legendary')
|
||||
if sys.version_info < (3, 9):
|
||||
sys.exit('python 3.9 or higher is required for legendary')
|
||||
|
||||
with open("README.md", "r") as fh:
|
||||
long_description_l = fh.readlines()
|
||||
|
@ -26,6 +26,7 @@ setup(
|
|||
'legendary',
|
||||
'legendary.api',
|
||||
'legendary.downloader',
|
||||
'legendary.downloader.mp',
|
||||
'legendary.lfs',
|
||||
'legendary.models',
|
||||
'legendary.utils',
|
||||
|
@ -36,22 +37,26 @@ setup(
|
|||
install_requires=[
|
||||
'requests<3.0',
|
||||
'setuptools',
|
||||
'wheel'
|
||||
'wheel',
|
||||
'filelock'
|
||||
],
|
||||
extras_require=dict(
|
||||
webview=['pywebview>=3.4'],
|
||||
webview_gtk=['pywebview>=3.4', 'PyGObject']
|
||||
),
|
||||
url='https://github.com/derrod/legendary',
|
||||
description='Free and open-source replacement for the Epic Games Launcher application',
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
python_requires='>=3.8',
|
||||
python_requires='>=3.9',
|
||||
classifiers=[
|
||||
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Operating System :: Microsoft',
|
||||
'Intended Audience :: End Users/Desktop',
|
||||
'Topic :: Games/Entertainment',
|
||||
'Development Status :: 4 - Beta',
|
||||
],
|
||||
]
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue