1
0
mirror of https://github.com/PrivSec-dev/privsec.dev synced 2024-12-21 20:31:33 -05:00
Signed-off-by: Tommy <contact@tommytran.io>
This commit is contained in:
Tommy 2022-07-16 19:40:48 -04:00
parent dbd248041b
commit 3004366831
No known key found for this signature in database
GPG Key ID: 060B29EB996BD9F2
20 changed files with 644 additions and 28 deletions

BIN
.DS_Store vendored

Binary file not shown.

5
.firebaserc Normal file
View File

@ -0,0 +1,5 @@
{
"projects": {
"default": "privsec-356523"
}
}

View File

@ -0,0 +1,19 @@
# This file was auto-generated by the Firebase CLI
# https://github.com/firebase/firebase-tools
name: Deploy to Firebase Hosting on merge
'on':
push:
branches:
- main
jobs:
build_and_deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: FirebaseExtended/action-hosting-deploy@v0
with:
repoToken: '${{ secrets.GITHUB_TOKEN }}'
firebaseServiceAccount: '${{ secrets.FIREBASE_SERVICE_ACCOUNT_PRIVSEC_356523 }}'
channelId: live
projectId: privsec-356523

View File

@ -0,0 +1,16 @@
# This file was auto-generated by the Firebase CLI
# https://github.com/firebase/firebase-tools
name: Deploy to Firebase Hosting on PR
'on': pull_request
jobs:
build_and_preview:
if: '${{ github.event.pull_request.head.repo.full_name == github.repository }}'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: FirebaseExtended/action-hosting-deploy@v0
with:
repoToken: '${{ secrets.GITHUB_TOKEN }}'
firebaseServiceAccount: '${{ secrets.FIREBASE_SERVICE_ACCOUNT_PRIVSEC_356523 }}'
projectId: privsec-356523

66
.gitignore vendored Normal file
View File

@ -0,0 +1,66 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
firebase-debug.log*
firebase-debug.*.log*
# Firebase cache
.firebase/
# Firebase config
# Uncomment this if you'd like others to create their own Firebase project.
# For a team working on the same Firebase project(s), it is recommended to leave
# it commented so all members can deploy to the same project(s) in .firebaserc.
# .firebaserc
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env

View File

@ -4,9 +4,6 @@ paginate: 5
theme: PaperMod
enableRobotsTXT: true
buildDrafts: false
buildFuture: false
buildExpired: false
minify:
disableXML: true
@ -14,33 +11,18 @@ minify:
params:
env: production # to enable google analytics, opengraph, twitter-cards and schema.
title: PrivSec.dev
description: "A practical approach to privacy and security"
keywords: [Privacy, Security]
author: PrivSec.dev Team
author: PrivSec Team
# author: ["Me", "You"] # multiple authors
images: ["<link or path of image for opengraph, twitter-cards>"]
DateFormat: "January 2, 2006"
defaultTheme: auto # dark, light
defaultTheme: dark # dark, light
disableThemeToggle: false
ShowReadingTime: true
ShowShareButtons: true
ShowPostNavLinks: true
ShowBreadCrumbs: true
ShowCodeCopyButtons: false
ShowWordCount: true
showtoc: true
ShowRssButtonInSectionTermList: true
UseHugoToc: true
disableSpecial1stPost: false
disableScrollToTop: false
comments: false
hidemeta: false
hideSummary: false
showtoc: false
tocopen: false
defaultTheme: dark
disableThemeToggle: true
ShowCodeCopyButtons: true
assets:
# disableHLJS: true # to disable highlight.js
@ -59,6 +41,13 @@ params:
enabled: true # needs to be explicitly set
title: PrivSec.dev
subtitle: "A practical approach to privacy and security"
buttons:
- name: About
url: about
- name: Donate
url: donate
- name: Tags
url: tags
socialIcons:
- name: matrix
@ -74,7 +63,7 @@ params:
hiddenInSingle: true # hide on single page
editPost:
URL: "https://github.com/<path_to_repo>/content"
URL: "https://github.com/PrivSec-dev/privsec.dev/content"
Text: "Suggest Changes" # edit text
appendFilePath: true # to append file path to Edit link
@ -92,7 +81,7 @@ menu:
main:
- identifier: Knowledge Base
name: Knowledge Base
url: /Knowledge Base/
url: /knowledge/
weight: 10
- identifier: Operating Systems
name: Operating Systems
@ -106,7 +95,7 @@ menu:
name: Providers
url: /providers/
weight: 40
- identifier: Self Hosting
name: Self Hosting
url: /self-hosting/
weight: 50
privacy:
youtube:
privacyEnhanced: true

45
content/about.md Normal file
View File

@ -0,0 +1,45 @@
---
title: "About Us"
---
PrivSec.dev is made by a group of enthusiastic individuals looking to provide practical privacy and security advice for the end user. We are security researchers, developers, system administrators... generally people with technical knowledge and work in the field.
We focus on in-depth system configuration, security analysis, and software/hardware recommendations. Our site is based on technical merits, not ideologies and politics.
---
### Tommy
System Administrator. Benevolent dictator for life @privsec.dev.
**Website**: [tommytran.io](https://tommytran.io)\
**Matrix**: [@tommy:arcticfoxes.net](https://matrix.to/#/@tommy:arcticfoxes.net)\
**Email**: [tommy@privsec.dev](mailto:tommy@privsec.dev)
### June
Security Researcher. GrapheneOS Developer. Excessively obsessed with Homestuck.
**Matrix**: [@june:grapheneos.org](https://matrix.to/#/@june:grapheneos.org)\
**Email**: [june@privsec.dev](mailto:june@privsec.dev)
### Wonderfall
Random guy passing by on the Internet who is interested in all kinds of things. Total nerd.
**Websites**: [wonderfall.space](https://wonderfall.space) and [wonderfall.dev](https://wonderfall.dev)\
**Matrix**: [@wonderfall:lysergide.dev](https://matrix.to/#/@wonderfall:lysergide.dev)
### Lberrymage
Accrescent developer. Rust shill and man who can't stop recycling the names of his previous projects.
**Website**: [accrescent.app](https://accrescent.app/)\
**Matrix**: [@lberrymage:matrix.org](https://matrix.to/#/@lberrymage:matrix.org)
### Randomhydrosol
GrapheneOS Developer. Friendliest Indian tech support on planet Earth.
**Matrix**: [@randomhydrosol:grapheneos.org](https://matrix.to/#/@randomhydrosol:grapheneos.org)
### Madaidan
Security Researcher. Whonix developer. Uses Firefox, Telegram, and Linux like a true IRA terrorist.
**Website**: [https://madaidans-insecurities.github.io/](https://madaidans-insecurities.github.io/)\
**Matrix**: [@madaidan.:matrix.org](https://matrix.to/#/@madaidan.:matrix.org)

17
content/donate.md Normal file
View File

@ -0,0 +1,17 @@
---
title: "Donate"
---
The domain costs us $12/year to renew from Google. We got our repository hosted for free on GitHub. We got our site hosted for free with Firebase. It costs Tommy ~$20/month to run the mail server, but that server is used for a bunch of his projects, not just PrivSec, and we doubt it will be used that much anyways. The point is, this website does not cost much to run, and as such we will not be accepting donation as a project.
The real cost is the time and energy we put into writing, testing, and fact checking the content. Some of our members may want accept donation, and you can donate to them individually.
### June
**XMR**: `49b1PUPeHJEDwZqaaQm4MQUjycY8ckEko53jvTcPB5yE2QKoS5haNe3Fnbg1Le7nSkgUkm4tcpj4Z2YmtaT3j6KVUVgBGw2`
---
Alternatively, please consider donating to the projects below. These are projects which we rely on for our own digital safety and recommend to our readers. They are vital for the privacy, security, and safety of thousands of people.
### GrapheneOS
**Donation Link**: [grapheneos.org/donate](https://grapheneos.org/donate)

View File

@ -0,0 +1,81 @@
---
title: "Multi-factor Authentication"
tags: ['knowledge base', 'security']
author: Tommy
---
**Multi-factor authentication** is a security mechanism that requires additional verification beyond your username (or email) and password. This usually comes in the form of a one time passcode, a push notification, or plugging in and tapping a hardware security key.
## Common protocols
### Email and SMS MFA
Email and SMS MFA are examples of the weaker MFA protocols. Email MFA is not great as whoever controls your email account can typically both reset your password and recieve your MFA verification. SMS on the other hand is problematic due to the lack of any kind of encryption, making it vulnerable to sniffing. [Sim swap](https://en.wikipedia.org/wiki/SIM_swap_scam) attacks, if carried out successfully, will allow an attacker to recieve your one time passcode while locking you out of your own account. In certain cases,websites or services may also allow the user to reset their account login by calling them using the phone number used for MFA, which could be faked with a [spoofed CallerID](https://en.wikipedia.org/wiki/Caller_ID_spoofing).
Only use these protocols when it is the only option you have, and be very careful with SMS MFA as it could actually worsen your security.
### Push Confirmations
Push confirmation MFA is typically a notification being sent to an app on your phone asking you to confirm new account logins. This method is a lot better than SMS or email, since an attacker typically wouldn't be able to get these push notifications without having an already logged-in device.
Push comfirmation in most cases rely on a third party provider like [Duo](https://duo.com/). This means that trust is placed in a server that neither you nor your service provider control. A malicious push confirmation server could compromise your MFA or profile you based on which website and account you use with the service.
Even if the push notification application and server is provided by first party as is the case with Microsoft login and [Microsoft Authenticator](https://www.microsoft.com/en-us/security/mobile-authenticator-app), there is still a risk of you accidentally tapping on the confirmation button.
### Time-based One-time Password (TOTP)
TOTP is one of the most common forms of MFA available. When you set up TOTP, you setup a "[shared secret](https://en.wikipedia.org/wiki/Shared_secret)" with the service that you intend to use and store it in your authentication app.
The time-limited code is then derived from the shared secret and the current time. As the code is only valid for a short time, without access to the shared secret, an adversary cannot generate new codes.
If you have a [Yubikey](https://www.yubico.com/), you should store the "shared secrets" on the key itself using the [Yubico Authenticator](https://www.yubico.com/products/yubico-authenticator/) app. After the initial setup, the Yubico Authenticator will only expose the 6 digit code to the machine it is running on, but not the shared secret. Additional security can be set up by requiring touch confirmation, protecting digit codes not in used from a compromised operating system.
Unlike [WebAuthn](#fido-fast-identity-online), TOTP offers no protection against [phishing](https://en.wikipedia.org/wiki/Phishing) or reuse attacks. If an adversary obtains a valid code from you, they may use it as many times as they like until it expires (generally 60 seconds + grace period).
Despite of its short comings, TOTP is considered better and safer than Push Confirmations.
### Yubico OTP
Yubico OTP is an authentication protocol typically implemented in hardware security keys. When you decide to use Yubico OTP, the key will generate a public ID, private ID, and a Secret Key which is then uploaded to the Yubico OTP server.
When logging into a website, all you need to do is to physically touch the security key. The security key will emulate a keyboard and print out a one-time password into the password field.
The service will then forward the one-time password to the Yubico OTP server for validation. A counter is incremented both on the key and Yubico's validation server. The OTP can only be used once, and when a successful authentication occurs, the counter is increased which prevents reuse of the OTP. Yubico provides a [detailed document](https://developers.yubico.com/OTP/OTPs_Explained.html) about the process.
![Yubico OTP](/yubico-otp.png)
The Yubico validation server is a cloud based service, and you're placing trust in Yubico that their server won't be used to bypass your MFA or profile you. The public ID associated with Yubico OTP is reused on every website and could be another avenue for third-parties to profile you. Like TOTP, Yubico OTP does not provide phishing resistance.
Yubico OTP is an inferior protocol compared to TOTP since TOTP does not need trust in a third party server and most security keys that support Yubico OTP (namely the Yubikey and OnlyKey) supports TOTP anyway. Yubico OTP is still better than Push Confirmation, however.
### FIDO (Fast IDentity Online)
[FIDO](https://en.wikipedia.org/wiki/FIDO_Alliance) includes a number of standards, first there was U2F and then later [FIDO2](https://en.wikipedia.org/wiki/FIDO2_Project) which includes the web standard [WebAuthn](https://en.wikipedia.org/wiki/WebAuthn).
U2F and FIDO2 refer to the [Client to Authenticator Protocol](https://en.wikipedia.org/wiki/Client_to_Authenticator_Protocol), which is the protocol between the security key and the computer, such as a laptop or phone. It complements WebAuthn which is the component used to authenticate with the website (the "Relying Party") you're trying to log in on.
WebAuthn is the most secure and private form of second factor authentication. While the authentication experience is similar to Yubico OTP, the key does not print out a one-time password and validate with a third-party server. Instead, it uses [public key cryptography](https://en.wikipedia.org/wiki/Public-key_cryptography) for authentication.
When you create an account, the public key is sent to the service, then when you log in, the service will require you to "sign" some data with your private key. The benefit of this is that no password data is ever stored by the service, so there is nothing for an adversary to steal.
{{< youtube id="aMo4ZlWznao">}}
FIDO2 and WebAuthn have superior security and privacy properties when compared to any MFA methods.
Typically for web services it is used with WebAuthn which is a part of the [W3C recommendations](https://en.wikipedia.org/wiki/World_Wide_Web_Consortium#W3C_recommendation_(REC)). It uses public key authentication and is more secure than shared secrets used in Yubico OTP and TOTP methods, as it includes the origin name (usually, the domain name) during authentication. Attestation is provided to protect you from phishing attacks, as it helps you to determine that you are using the authentic service and not a fake copy.
Unlike Yubico OTP, WebAuthn does not use any public ID, so the key is **not** identifiable across different websites. It also does not use any third-party cloud server for authentication. All communication is completed between the key and the website you are logging into. FIDO also uses a counter which is incremented upon use in order to prevent session reuse and cloned keys.
If a website or service supports WebAuthn for the authentication, it is highly recommended that you use it over any other form of MFA.
## Notes
### Initial Set Up
When buying a security key, it is important that you change the default credentials, set up password protection for the key, and enable touch confirmation if your key supports it. Products such as the YubiKey have multiple interfaces with separate credentials for each one of them, so you should go over each interface and set up protection as well.
### Backups
You should always have backups for your MFA method. Hardware security keys can get lost, stolen or simply stop working over time. It is recommended that you have a pair of hardware security keys with the same access to your accounts instead of just one.
When using TOTP with an authenticator app, be sure to back up your recovery keys or the app itself, or copy the "shared secrets" to another instance of the app on a different phone or to an encrypted container (e.g. [VeraCrypt](../encryption.md#veracrypt)).

View File

@ -0,0 +1,3 @@
---
title: Knowledge Base
---

View File

@ -0,0 +1,10 @@
---
title: "Linux Insecurities"
tags: ['knowledge base', operating system', 'security', 'linux']
author: Tommy
---
There is a common misconception among privacy communities that Linux is one of the more secure operating systems, either because it is open source or because it is widely used in the cloud. This is however, a far cry from reality.
There is already a very indepth technical blog explaning the various security weaknesses of Linux by Madaidan, [Whonix](https://www.whonix.org/)'s Security Researcher. This page will attempt to address some of the questions commonly raised in reaction to his blog post. You can find the original article [here](https://madaidans-insecurities.github.io/linux.html).

3
content/os/_index.md Normal file
View File

@ -0,0 +1,3 @@
---
title: Operating Systems
---

View File

@ -0,0 +1,3 @@
---
title: Providers
---

View File

@ -0,0 +1,213 @@
---
title: "F-Droid Security Analysis"
date: 2022-01-02T21:28:31Z
tags: ['software', 'android', 'security']
author: Wonderfall
canonicalURL: https://wonderfall.dev/fdroid-issues
ShowCanonicalLink: true
---
F-Droid is a popular alternative app repository for Android, especially known for its main repository dedicated to free and open-source software. F-Droid is often recommended among security and privacy enthusiasts, but how does it stack up against Play Store in practice? This write-up will attempt to emphasize major security issues with F-Droid that you should consider.
Before we start, a few things to keep in mind:
- The main goal of this write-up was to inform users so they can make responsible choices, not to trash someone else's work. I have respect for any work done in the name of good intentions. Likewise, please don't misinterpret the intentions of this article.
- You have your own reasons for using open-source or free/libre/whatever software which won't be discussed here. A development model shouldn't be an excuse for bad practices and shouldn't lure you into believing that it can provide strong guarantees it cannot.
- A lot of information in this article is sourced from official and trusted sources, but you're welcome to do your own research.
- These analyses do not account for threat models and personal preferences. As the author of this article, I'm only interested in facts and not ideologies.
*This is not an in-depth security review, nor is it exhaustive.*
## 1. The trusted party problem
To understand why this is a problem, you'll have to understand a bit about F-Droid's architecture, the things it does very differently from other app repositories, and the [Android platform security model](https://arxiv.org/pdf/1904.05572.pdf) (some of the issues listed in this article are somewhat out of the scope of the OS security model, but the majority is).
Unlike other repositories, F-Droid signs all the apps in the main repository with **its own signing keys** (unique per app) at the exception of the very few [reproducible builds](https://f-droid.org/en/docs/Reproducible_Builds/). A signature is a mathematical scheme that guarantees the authenticity of the applications you download. Upon the installation of an app, Android pins the signature across the entire OS (including user profiles): that's what we call a *trust-on-first-use* model since all subsequent updates of the app must have the corresponding signature to be installed.
Normally, the developer is supposed to sign their own app prior to its upload on a distribution channel, whether that is a website or a traditional repository (or both). You don't have to trust the source (usually recommended by the developer) except for the first installation: future updates will have their authenticity cryptographically guaranteed. The issue with F-Droid is that all apps are signed by the same party (F-Droid) which is also not the developer. You're now adding another party you'll have to trust since **you still have to trust the developer** anyway, which isn't ideal: **the fewer parties, the better**.
On the other hand, Play Store now manages the app signing keys too, as [Play App Signing](https://developer.android.com/studio/publish/app-signing#app-signing-google-play) is required for app bundles which are required for new apps since August 2021. These signing keys can be uploaded or automatically generated, and are securely stored by [Google Cloud Key Management Service](https://services.google.com/fh/files/misc/security_whitepapers_march2018.pdf). It should be noted that the developer still has to sign the app with **an upload key** so that Google can verify its authenticity before signing it with the app signing key. For apps created before August 2021 that may have [not opted in Play App Signing](https://developer.android.com/studio/publish/app-signing#opt-out) yet, the developer still manages the private key and is responsible for its security, as a compromised private key can allow a third party to sign and distribute malicious code.
F-Droid requires that the source code of the app is exempt from any proprietary library or ad service, according to their [inclusion policy](https://f-droid.org/en/docs/Inclusion_Policy/). Usually, that means that some developers will have to maintain a slightly different version of their codebase that should comply with F-Droid's requirements. Besides, their "quality control" offers **close to no guarantees** as having access to the source code doesn't mean it can be easily proofread. Saying Play Store is filled with malicious apps is beyond the point: the **false sense of security** is a real issue. Users should not think of the F-Droid main repository as free of malicious apps, yet unfortunately many are inclined to believe this.
> But... can't I just trust F-Droid and be done with it?
[You don't have to take my word for it](https://forum.f-droid.org/t/is-it-as-safe-as-it-is-from-fdroid-official-repo/15956/12): they openly admit themselves it's a [very basic process](https://forum.f-droid.org/t/is-it-as-safe-as-it-is-from-fdroid-official-repo/15956/2) relying on badness enumeration (this doesn't work by the way) which consists in a few scripts scanning the code for proprietary blobs and known trackers. You are therefore not exempted from trusting upstream developers and it goes for any repository.
*A tempting idea would be to compare F-Droid to the desktop Linux model where users trust their distribution maintainers out-of-the-box (this can be sane if you're already trusting the OS anyway), but the desktop platform is intrinsically chaotic and heterogeneous for better and for worse. It really shouldn't be compared to the Android platform in any way.*
While we've seen that F-Droid controls the signing servers (much like Play App Signing), F-Droid also fully controls the build servers that run the disposable VMs used for building apps. And [as of July 2022](https://gitlab.com/groups/fdroid/-/milestones/5#tab-issues), their guest VM image officially runs a version of Debian which reached EOL. Undoubtedly, this raises questions about their whole infrastructure security.
> How can you be sure that the app repository can be held to account for the code it delivers?
F-Droid's answer, interesting yet largely unused, is [build reproducibility](https://f-droid.org/en/docs/Reproducible_Builds/). While deterministic builds are a neat idea in theory, it requires the developer to make their toolchain match with what F-Droid provides. It's additional work on both ends sometimes resulting in [apps severely lagging behind in updates](https://code.briarproject.org/briar/briar/-/issues/1612), so reproducible builds are not as common as we would have wanted. It should be noted that reproducible builds in the main repository can be exclusively developer-signed.
Google's approach is [code transparency for app bundles](https://developer.android.com/guide/app-bundle/code-transparency), which is a simple idea addressing some of the concerns with Play App Signing. A JSON Web Token (JWT) signed by a key private to the developer is included in the app bundle before its upload to Play Store. This token contains a list of DEX files and native `.so` libraries and their hashes, allowing end-users to verify that the running code was built and signed by the app developer. Code transparency has known limitations, however: not all resources can be verified, and this verification can only be done manually since it's not part of the Android platform itself (so requiring a code transparency file cannot be enforced by the OS right now). Despite its incompleteness, code transparency is still helpful, easy to implement, and thus something we should see more often as time goes by.
> What about other app repositories such as Amazon?
[To my current knowledge](https://developer.amazon.com/docs/app-submission/understanding-submission.html#code_wrapper), the Amazon Appstore has always been wrapping APKs with their own code (including their own trackers), and this means they were effectively resigning submitted APKs.
If you understood correctly the information above, Google can't do this for apps that haven't opted in Play App Signing. As for apps concerned by Play App Signing, while Google could technically introduce their own code like Amazon, they wouldn't do that without telling about it since this will be easily noticeable by the developer and more globally researchers. They have other means on the Android app development platform to do so. Believing they won't do that based on this principle is not a strong guarantee, however: hence the above paragraph about code transparency for app bundles.
Huawei AppGallery seems to have a [similar approach](https://developer.huawei.com/consumer/en/doc/distribution/app/20210812) to Google, where submitted apps could be developer-signed, but newer apps will be resigned by Huawei.
## 2. Slow and irregular updates
Since you're adding one more party to the mix, that party is now responsible for delivering proper builds of the app: it's a common thing among traditional Linux distributions and their packaging system. They have to catch up with *upstream* on a regular basis, but very few do it well (Arch Linux comes to my mind). Others, like Debian, prefer making extensive *downstream* changes and delivering security fixes for a subset of vulnerabilities assigned to a CVE (yeah, it's as bad as it sounds, but that's another topic).
Not only does F-Droid require specific changes for the app to comply with its inclusion policy, which often leads to more maintenance work, it also has a rather strange way of triggering new builds. Part of its build process seems to be [automated](https://f-droid.org/en/docs/FAQ_-_App_Developers/), which is the least you could expect. Now here's the thing: app signing keys are on an **air-gapped server** (meaning it's disconnected from any network, at least that's what they claim: see [their recommendations](https://f-droid.org/docs/Building_a_Signing_Server/) for reference), which forces an irregular update cycle where a human has to manually trigger the signing process. It is far from an ideal situation, and you may argue it's the least to be expected since by entrusting all the signing keys to one party, you could also introduce a single point of failure. Should their system be compromised (whether from the inside or the outside), this could lead to serious security issues affecting plenty of users.
*This is one of the main reasons why Signal refused to support the inclusion of a third-party build in the F-Droid official repository. While [this GitHub issue](https://github.com/signalapp/Signal-Android/issues/127) is quite old, many points still hold true today.*
Considering all this, and the fact that their build process is often broken using outdated tools, you have to expect **far slower updates** compared to a traditional distribution system. Slow updates mean that you will be exposed to security vulnerabilities more often than you should've been. It would be unwise to have a full browser updated through the F-Droid official repository, for instance. F-Droid third-party repositories somewhat mitigate the issue of slow updates since they can be managed directly by the developer. It isn't ideal either as you will see below.
## 3. Low target API level (SDK) for client & apps
SDK stands for *Software Development Kit* and is the collection of software to build apps for a given platform. On Android, a higher SDK level means you'll be able to make use of modern API levels of which each iteration brings **security and privacy improvements**. For instance, API level 31 makes use of all these improvements on Android 12.
As you may already know, Android has a strong sandboxing model where each application is sandboxed. You could say that an app compiled with the highest API level benefits from all the latest improvements brought to the app sandbox; as opposed to outdated apps compiled with older API levels, which have a **weaker sandbox**.
```
# b/35917228 - /proc/misc access
# This will go away in a future Android release
allow untrusted_app_25 proc_misc:file r_file_perms;
# Access to /proc/tty/drivers, to allow apps to determine if they
# are running in an emulated environment.
# b/33214085 b/33814662 b/33791054 b/33211769
# https://github.com/strazzere/anti-emulator/blob/master/AntiEmulator/src/diff/strazzere/anti/emulator/FindEmulator.java
# This will go away in a future Android release
allow untrusted_app_25 proc_tty_drivers:file r_file_perms;
```
This is a mere sample of the [SELinux exceptions](https://android.googlesource.com/platform/system/sepolicy/+/refs/tags/android-12.0.0_r21/private) that have to be made on older API levels so that you can understand why it matters.
It turns out the official F-Droid client doesn't care much about this since it lags behind quite a bit, **[targeting the API level 25](https://gitlab.com/fdroid/fdroidclient/-/blob/2a8b16683a2dbee16d624a58e7dd3ea1da772fbd/app/build.gradle#L33)** (Android 7.1) of which some SELinux exceptions were shown above. As a workaround, some users recommended third-party clients such as [Foxy Droid](https://f-droid.org/en/packages/nya.kitsunyan.foxydroid/) or [Aurora Droid](https://f-droid.org/en/packages/com.aurora.adroid/). While these clients might be technically better, they're poorly maintained for some, and they also introduce yet another party to the mix. [Droid-ify](https://github.com/Iamlooker/Droid-ify) (recently rebreanded to Neo-Store) seems to be a better option than the official client in most aspects.
Furthermore, F-Droid **doesn't enforce a minimum target SDK** for the official repository. Play Store [does that quite aggressively](https://developer.android.com/google/play/requirements/target-sdk) for new apps and app updates:
- Since August 2021, Play Store requires new apps to target at least API level 30.
- Since November 2021, existing apps must at least target API level 30 for updates to be submitted.
While it may seem bothersome, it's a necessity to keep the **app ecosystem modern and healthy**. Here, F-Droid sends the wrong message to developers (and even users) because they should care about it, and this is why many of us think it may be even harmful to the FOSS ecosystem. Backward compatibility is often the enemy of security, and while there's a middle-ground for convenience and obsolescence, it shouldn't be exaggerated. As a result of this philosophy, the main repository of F-Droid is filled with obsolete apps from another era, just for these apps to be able to run on the more than ten years old Android 4.0 Ice Cream Sandwich. Let's not make the same mistake as the desktop platforms: instead, complain to your vendors for selling devices with no decent OS/firmware support.
There is little practical reason for developers not to increase the target SDK version (`targetSdkVersion`) along with each Android release. This attribute matches the version of the platform an app is targeting, and allows access to modern improvements, rules and features on a modern OS. The app can still ensure backwards compatibility in such a way that it can run on older platforms: the `minSdkVersion` attribute informs the system about the minimum API level required for the application to run. Setting it too low isn't practical though, because this requires having a lot of fallback code (most of it is handled by common libraries) and separate code paths.
At the time of writing:
- Android 9 is the oldest Android version that is [getting security updates](https://endoflife.date/android).
- [~80% of the Android devices](https://developer.android.com/about/dashboards) used in the world are **at least** running 8.0 Oreo.
*Overall statistics do not reflect real-world usage of a given app (people using old devices are not necessarily using your app). If anything, it should be viewed as an underestimation.*
## 4. General lack of good practices
The F-Droid client allows multiple repositories to coexist within the same app. Many of the issues highlighted above were focused on the main official repository which most of the F-Droid users will use anyway. However, having **other repositories in a single app also violates the security model of Android** which was not designed for this at all. The OS expects you to trust **an app repository as a single source** of apps, yet F-Droid isn't that by design as it mixes several repositories in one single app. This is important because the OS management APIs and features (such as [UserManager](https://developer.android.com/reference/android/os/UserManager)) are not meant for this and see F-Droid as a single source, so you're trusting the app client to not mess up far more than you should, especially when the privileged extension comes into the picture. This is also a problem with the OS first-party source feature.
On that note, it is also worth noting the repository metadata format isn't properly signed by lacking whole-file signing and key rotation. [Their index v1](https://f-droid.org/2021/02/05/apis-for-all-the-things.html#the-repo-index) format [uses JAR signing](https://gitlab.com/fdroid/fdroidserver/-/blob/3182b77d180b2313f4fdb101af96c035380abfd7/fdroidserver/signindex.py) with `jarsigner`, which has serious security flaws. It seems that [work is in progress on a v2 format](https://gitlab.com/fdroid/fdroidserver/-/commit/3182b77d180b2313f4fdb101af96c035380abfd7) with support for `apksigner`, although the final implementation remains to be seen. This just seems to be an over-engineered and flawed approach since better suited tools such as `signify` could be used to sign the metadata JSON.
As a matter of fact, the [new unattended update API](https://developer.android.com/reference/android/Manifest.permission#UPDATE_PACKAGES_WITHOUT_USER_ACTION) added in API level 31 (Android 12) that allows seamless app updates for app repositories without [privileged access](https://f-droid.org/en/packages/org.fdroid.fdroid.privileged/) to the system (such an approach is not compatible with the security model) won't work with F-Droid "as is". It should be mentioned that the aforementioned third-party client [Neo-Store](https://github.com/Iamlooker/Droid-ify/issues/20) supports this API, although the underlying issues about the F-Droid infrastructure largely remain. Indeed, this secure API allowing for unprivileged unattended updates not only requires for the app repository client to target API level 31, but the apps to be updated also have to at least target API level 29.
Their client also lacks **TLS certificate pinning**, unlike Play Store which improves security for all connections to Google (they generally use a limited set of root CAs including [their own](https://pki.goog/)). Certificate pinning is a way for apps to increase the security of their connection to services [by providing a set of public key hashes](https://developer.android.com/training/articles/security-config#CertificatePinning) of known-good certificates for these services instead of trusting pre-installed CAs. This can avoid some cases where an interception (*man-in-the-middle* attack) could be possible and lead to various security issues considering you're trusting the app to deliver you other apps.
It is an important security feature that is also straightforward to implement using the [declarative network security configuration](https://developer.android.com/training/articles/security-config) available since Android 7.0 (API level 24). See how GrapheneOS pins both root and CA certificates in their [app repository client](https://github.com/GrapheneOS/Apps):
```
<!-- res/xml/network_security_config.xml -->
<network-security-config>
<base-config cleartextTrafficPermitted="false"/>
<domain-config>
<domain includeSubdomains="true">apps.grapheneos.org</domain>
<pin-set>
<!-- ISRG Root X1 -->
<pin digest="SHA-256">C5+lpZ7tcVwmwQIMcRtPbsQtWLABXhQzejna0wHFr8M=</pin>
<!-- ISRG Root X2 -->
<pin digest="SHA-256">diGVwiVYbubAI3RW4hB9xU8e/CH2GnkuvVFZE8zmgzI=</pin>
<!-- Let's Encrypt R3 -->
<pin digest="SHA-256">jQJTbIh0grw0/1TkHSumWb+Fs0Ggogr621gT3PvPKG0=</pin>
<!-- Let's Encrypt E1 -->
<pin digest="SHA-256">J2/oqMTsdhFWW/n85tys6b4yDBtb6idZayIEBx7QTxA=</pin>
...
</pin-set>
</domain-config>
</network-security-config>
```
To be fair, they've thought several times about adding certificate pinning to their client [at least for the default repositories](https://gitlab.com/fdroid/fdroidclient/-/issues/105). [Relics of preliminary work](https://gitlab.com/fdroid/fdroidclient/-/blob/1.14-alpha4/app/src/main/java/org/fdroid/fdroid/FDroidCertPins.java) can even be found in their current codebase, but it's unfortunate that they haven't been able to find [any working implementation](https://github.com/f-droid/fdroidclient/commit/7f78b46664981b9b73cadbfdda6391f6fe939c77) so far. Given the overly complex nature of F-Droid, that's largely understandable.
F-Droid also has a problem regarding the adoption of **[new signature schemes](https://source.android.com/security/apksigning)** as they [held out on the v1 signature scheme](https://forum.f-droid.org/t/why-f-droid-is-still-using-apk-signature-scheme-v1/10602) (which was [horrible](https://www.xda-developers.com/janus-vulnerability-android-apps/) and deprecated since 2017) until they were forced by Android 11 requirements to support the newer v2/v3 schemes (v2 was introduced in Android 7.0). Quite frankly, this is straight-up bad, and **signing APKs with GPG** is no better considering [how bad PGP and its reference implementation GPG are](https://latacora.micro.blog/2019/07/16/the-pgp-problem.html) (even Debian [is trying to move away from it](https://wiki.debian.org/Teams/Apt/Spec/AptSign)). Ideally, F-Droid should fully move on to newer signature schemes, and should completely phase out the legacy signature schemes which are still being used for some apps and metadata.
## 5. Confusing UX
It is worth mentioning that their website has (for some reason) always been hosting an [outdated APK of F-Droid](https://forum.f-droid.org/t/why-does-the-f-droid-website-nearly-always-host-an-outdated-f-droid-apk/6234), and this is still the case today, leading to many users wondering why they can't install F-Droid on their secondary user profile (due to the downgrade prevention enforced by Android). "Stability" seems to be the main reason mentioned on their part, which doesn't make sense: either your version isn't ready to be published in a stable channel, or it is and new users should be able to access it easily.
F-Droid should enforce the approach of prefixing the package name of their alternate builds with `org.f-droid` for instance (or add a `.fdroid` suffix as some already have). Building and signing while **reusing the package name** ([application ID](https://developer.android.com/studio/build/configure-app-module)) is bad practice as it causes **signature verification errors** when some users try to update/install these apps from other sources, even directly from the developer. That is again due to the security model of Android which enforces a signature check when installing app updates (or installing them again in a secondary user profile). Note that this is going to be an issue with Play App Signing as well, and developers are encouraged to follow this approach should they intend to distribute their apps through different distribution channels.
This results in a confusing user experience where it's hard to keep track of who signs each app, and from which repository the app should be downloaded or updated.
## 6. Misleading permissions approach
F-Droid shows a list of the [low-level permissions](https://developer.android.com/reference/android/Manifest.permission) for each app: these low-level permissions are usually grouped in the standard high-level permissions (Location, Microphone, Camera, etc.) and special toggles (nearby Wi-Fi networks, Bluetooth devices, etc.) that are explicitly based on a type of sensitive data. While showing a list of low-level permissions could be useful information for a developer, it's often a **misleading** and inaccurate approach for the end-user. Since Android 6, apps have to [request the standard permissions at runtime](https://developer.android.com/guide/topics/permissions/overview#runtime) and do not get them simply by being installed, so showing all the "under the hood" permissions without proper context is not useful and makes the permission model unnecessarily confusing.
F-Droid claims that these low-level permissions are relevant because they support Android 5.1+, meaning they support very outdated versions of Android where apps could have [install-time permissions](https://source.android.com/devices/tech/config/runtime_perms). Anyway, if a technical user wants to see all the manifest permissions for some reason, then they can access the app manifest pretty easily (in fact, exposing the raw manifest would be less misleading). But this is already beyond the scope of this article because anyone who cares about privacy and security wouldn't run a 8 years old version of Android that has not received security updates for years.
*To clear up confusion: even apps targeting an API level below 23 (Android 5.1 or older) do not have permissions granted at install time on modern Android, which instead displays a legacy permission grant dialog. Whether or not permissions are granted at install time does not just depend on the app's `targetSdkVersion`. And even if this were the case, the OS package installer on modern Android would've been designed to show the requested permissions for those legacy apps.*
For example, the low-level permission `RECEIVE_BOOT_COMPLETED` is referred to in F-Droid as the *run at startup* description, when in fact this permission is not needed to start at boot and just refers to a specific time broadcasted by the system once it finishes booting, and is not about background usage (though power usage may be a valid concern). To be fair, these short summaries used to be provided by the Android documentation years ago, but the permission model has drastically evolved since then and most of them aren't accurate anymore.
> *Allows the app to have itself started as soon as the system has finished booting. This can make it take longer to start the phone and allow the app to slow down the overall phone by always running.*
In modern Android, the background restriction toggle is what really provides the ability for apps to run in the background. Some low-level permissions don't even have a security/privacy impact and shouldn't be misinterpreted as having one. Anyhow, you can be sure that each dangerous low-level permission has a **high-level representation** that is **disabled by default** and needs to be **granted dynamically** to the app (by a toggle or explicit user consent in general).
Another example to illustrate the shortcomings of this approach would be the `QUERY_ALL_PACKAGES` low-level permission, which is referred to as the *query all packages* permission that "allows an app to see all installed packages". While this is somewhat correct, this can also be misleading: apps do not need `QUERY_ALL_PACKAGES` to list other apps within the same user profile. Even without this permission, some apps are visible automatically (visibility is restricted by default [since Android 11](https://developer.android.com/training/package-visibility)). If an app needs more visibility, it will declare a `<queries>` element in its manifest file: in other words, `QUERY_ALL_PACKAGES` is only one way to achieve visibility. Again, this goes to show low-level manifest permissions are not intended to be interpreted as high-level permissions the user should fully comprehend.
Play Store for instance conveys the permissions in a way less misleading way: the main low-level permissions are first grouped in their high-level user-facing toggles, and the rest is shown under "Other". This permission list can only be accessed by taping "About this app" then "App permissions - See more" at the bottom of the page. Play Store will tell the app may request access to the following permissions: this kind of wording is more important than it seems. *Update: since July 2022, Play Store doesn't offer a way to display low-level permissions anymore.*
Moreover, [Play Store restricts the use of highly invasive permissions](https://support.google.com/googleplay/android-developer/answer/9888170) such as `MANAGE_EXTERNAL_STORAGE` which allows apps to opt out of scoped storage if they can't work with [more privacy friendly approaches](https://developer.android.com/guide/topics/providers/document-provider) (like a file explorer). Apps that can't justify their use of this permission (which again has to be granted dynamically) may be removed from Play Store. This is where an app repository can actually be useful in their review process to protect end-users from installing poorly made apps that might compromise their privacy. Not that it matters much if these apps target very old API levels that are inclined to require invasive permissions in the first place...
## Conclusion: what should you do?
So far, you have been presented with referenced facts that are easily verifiable. In the next part, I'll allow myself to express my own thoughts and opinions. You're free to disagree with them, but don't let that overshadow the rest.
While some improvements could easily be made, I don't think F-Droid is in an ideal situation to solve all of these issues because some of them are **inherent flaws** in their architecture. I'd also argue that their core philosophy is not aligned with some security principles expressed in this article. In any case, I can only wish for them to improve since they're one of the most popular alternatives to commercial app repositories, and are therefore trusted by a large userbase.
F-Droid is often seen as the only way to get and support open-source apps: that is not the case. Sure, F-Droid could help you in finding FOSS apps that you wouldn't otherwise have known existed. Many developers also publish their FOSS apps on the **Play Store** or their website directly. Most of the time, releases are available on **GitHub**, which is great since each GitHub releases page has an Atom feed. If downloading APKs from regular websites, you can use `apksigner` to validate the authenticity by comparing the certificate fingerprint against the fingerprint from another source (it wouldn't matter otherwise).
This is how you may proceed to get the app certificate:
```
apksigner verify --print-certs --verbose myCoolApp.apk
```
Also, as written above: the OS pins the app signature (for all profiles) upon installation, and enforces signature check for app updates. In practice, this means the source doesn't matter as much after the initial installation.
For most people, I'd recommend just **sticking with Play Store**. Play Store isn't quite flawless, but emphasises the adoption of modern security standards which in turn encourages better privacy practices; as strange as it may sound, Google is not always doing bad things in that regard.
*Note: this article obviously can't address all the flaws related to Play Store itself. Again, the main topic of this article is about F-Droid and should not be seen as an exhaustive comparison between different app repositories.*
> Should I really care?
**It's up to your threat model**, and of course your personal preferences. Most likely, your phone won't turn into a nuclear weapon if you install F-Droid on it - and this is far from the point that this article is trying to make. Still, I believe the information presented will be valuable for anyone who values a **practical approach to privacy** (rather than an ideological one). Such an approach is partially described below.
> But there is more malware in Play Store! How can you say that it's more secure?
As explained above, it doesn't matter as you shouldn't really rely on any quality control to be the sole guarantee that a software is free of malicious or exploitable code. Play Store and even the Apple App Store may have a considerable amount of malware because a full reverse-engineering of any uploaded app isn't feasible realistically. However, they fulfill their role quite well, and that is all that is expected of them.
> With Play App Signing being effectively enforced for new apps, isn't Play Store as "flawed" as F-Droid?
I've seen this comment repeatedly, and it would be dismissing all the other points made in this article. Also, I strongly suggest that you carefully read the sections related to Play App Signing, and preferably the official documentation on this matter. It's not a black and white question and there are many more nuances to it.
> Aren't open-source apps more secure? Doesn't it make F-Droid safer?
You can still find and get your open-source apps elsewhere. And no, open-source apps [aren't necessarily more private or secure](https://seirdy.one/2022/02/02/floss-security.html). Instead, you should rely on the strong security and privacy guarantees provided by a modern operating system with **a robust sandboxing/permission model**, namely modern Android, GrapheneOS and iOS. Pay close attention to the permissions you grant, and avoid legacy apps as they could require invasive permissions to run.
When it comes to *trackers* (this really comes up a lot), you shouldn't believe in the flawed idea that you can enumerate all of them. The *enumerating badness* approach is [known to be flawed in the security field](https://www.ranum.com/security/computer_security/editorials/dumb/), and the same applies to privacy. You shouldn't believe that a random script can detect every single line of code that can be used for data exfiltration. Data exfiltration can be properly prevented in the first place by the permission model, which again **denies access to sensitive data by default**: this is a simple, yet rigorous and effective approach.
No app should be unnecessarily entrusted with any kind of permission. It is only if you deem it necessary that you should allow access to a type of data, and this access should be as fine-grained as possible. That's the way the Android platform works (regular apps run in the explicit `untrusted_app` domain) and continues evolving. Contrary to some popular beliefs, usability and most productivity tasks can still be achieved in a secure and private way.
> Isn't Google evil? Isn't Play Store spyware?
Some people tend to exaggerate the importance of Google in their threat model, at the cost of pragmatism and security/privacy good practices. Play Store isn't spyware and can run unprivileged like it does on GrapheneOS (including with unattended updates support). On the vast majority of devices though, Google Play is a privileged app and a core part of the OS that provides low-level system modules. In that case, the trust issues involved with Play App Signing could be considered less important since Google Play is already trusted as a privileged component.
**Play Store evidently has some privacy issues** given it's a proprietary service which requires an account (this cannot be circumvented), and Google services have a history of nagging users to enable privacy-invasive features. Again, some of these privacy issues can be mitigated by setting up the [Play services compatibility layer from GrapheneOS](https://grapheneos.org/usage#sandboxed-google-play) which runs Play services and Play Store in the regular app sandbox (the `untrusted_app` domain). [ProtonAOSP also shares that feature](https://protonaosp.org/features#privacy-and-security). This solution could very well be ported to other Android-based operating systems. If you want to go further, consider using a properly configured account with the least amount of personally indentifiable information possible (note that the phone number requirement appears to be region-dependent).
If you don't have Play services installed, you can use a third-party Play Store client called **[Aurora Store](https://auroraoss.com/)**. Aurora Store has some issues of its own, and some of them overlap in fact with F-Droid. Aurora Store somehow still requires [the legacy storage permission](https://gitlab.com/AuroraOSS/AuroraStore/-/blob/26f5d4fd558263a89baee4c3cbe1d220913da104/app/src/main/AndroidManifest.xml#L28-32), has yet to [implement certificate pinning](https://gitlab.com/AuroraOSS/AuroraStore/-/issues/697), has been known to sometimes retrieve wrong versions of apps, and [distributed account tokens](https://gitlab.com/AuroraOSS/AuroraStore/-/issues/722) over [cleartext HTTP](https://gitlab.com/AuroraOSS/AuroraStore/-/issues/734) until fairly recently; not that it matters much since tokens were designed to be shared between users, which is already concerning. I'd recommend against using the shared "anonymous" accounts feature: you should make your own throwaway account with minimal information.
You should also keep an eye on the great work **GrapheneOS** does on [their future app repository](https://github.com/GrapheneOS/Apps). It will be a simple, secure, modern app repository for a curated list of high-quality apps, some of which will have their own builds (for instance, Signal still uses their [original 1024-bits RSA key](https://github.com/signalapp/Signal-Android/issues/9362) that has never been rotated since then). Inspired by this work, a GrapheneOS community member is developing a more generic app repository called [Accrescent](https://twitter.com/lberrymage/status/1475307653089792003). Hopefully, we'll see well-made alternatives like these flourish.
*Thanks to the GrapheneOS community for proofreading this article. Bear in mind that these are not official recommendations from the GrapheneOS project.*
*Post-publication note: it's unfortunate that the release of this article mostly triggered a negative response from the F-Droid team which prefers to dismiss this article on several occasions rather than bringing relevant counterpoints. Some of their core members are also involved in a harassment campaign towards projects and security researchers that do not share their views. While this article remains a technical one, there are definitely ethical concerns to take into consideration.*

View File

@ -0,0 +1,3 @@
---
title: Software
---

11
external-blogs.sh Executable file
View File

@ -0,0 +1,11 @@
#!/bin/bash
#F-Droid Issues Blog
rm -rf './content/software/F-Droid Security Analysis.md'
curl https://raw.githubusercontent.com/Wonderfall/wonderfall.github.io/main/content/posts/fdroid-issues.md -o './content/software/F-Droid Security Analysis.md'
sed -i 's/title: "A brief and informal analysis of F-Droid security"/title: "F-Droid Security Analysis"/' './content/software/F-Droid Security Analysis.md'
sed -i '/draft: false/d' './content/software/F-Droid Security Analysis.md'
sed -i "s/tags:.*/tags: ['software', 'android', 'security']/" './content/software/F-Droid Security Analysis.md'
sed -i '/^tags:.*/a ShowCanonicalLink: true' './content/software/F-Droid Security Analysis.md'
sed -i '/^tags:.*/a canonicalURL: https://wonderfall.dev/fdroid-issues' './content/software/F-Droid Security Analysis.md'
sed -i '/^tags:.*/a author: Wonderfall' './content/software/F-Droid Security Analysis.md'

10
firebase.json Normal file
View File

@ -0,0 +1,10 @@
{
"hosting": {
"public": "public",
"ignore": [
"firebase.json",
"**/.*",
"**/node_modules/**"
]
}
}

33
public/404.html Normal file
View File

@ -0,0 +1,33 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Page Not Found</title>
<style media="screen">
body { background: #ECEFF1; color: rgba(0,0,0,0.87); font-family: Roboto, Helvetica, Arial, sans-serif; margin: 0; padding: 0; }
#message { background: white; max-width: 360px; margin: 100px auto 16px; padding: 32px 24px 16px; border-radius: 3px; }
#message h3 { color: #888; font-weight: normal; font-size: 16px; margin: 16px 0 12px; }
#message h2 { color: #ffa100; font-weight: bold; font-size: 16px; margin: 0 0 8px; }
#message h1 { font-size: 22px; font-weight: 300; color: rgba(0,0,0,0.6); margin: 0 0 16px;}
#message p { line-height: 140%; margin: 16px 0 24px; font-size: 14px; }
#message a { display: block; text-align: center; background: #039be5; text-transform: uppercase; text-decoration: none; color: white; padding: 16px; border-radius: 4px; }
#message, #message a { box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24); }
#load { color: rgba(0,0,0,0.4); text-align: center; font-size: 13px; }
@media (max-width: 600px) {
body, #message { margin-top: 0; background: white; box-shadow: none; }
body { border-top: 16px solid #ffa100; }
}
</style>
</head>
<body>
<div id="message">
<h2>404</h2>
<h1>Page Not Found</h1>
<p>The specified file was not found on this website. Please check the URL for mistakes and try again.</p>
<h3>Why am I seeing this?</h3>
<p>This page was generated by the Firebase Command-Line Interface. To modify it, edit the <code>404.html</code> file in your project's configured <code>public</code> directory.</p>
</div>
</body>
</html>

89
public/index.html Normal file
View File

@ -0,0 +1,89 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Welcome to Firebase Hosting</title>
<!-- update the version number as needed -->
<script defer src="/__/firebase/9.9.0/firebase-app-compat.js"></script>
<!-- include only the Firebase features as you need -->
<script defer src="/__/firebase/9.9.0/firebase-auth-compat.js"></script>
<script defer src="/__/firebase/9.9.0/firebase-database-compat.js"></script>
<script defer src="/__/firebase/9.9.0/firebase-firestore-compat.js"></script>
<script defer src="/__/firebase/9.9.0/firebase-functions-compat.js"></script>
<script defer src="/__/firebase/9.9.0/firebase-messaging-compat.js"></script>
<script defer src="/__/firebase/9.9.0/firebase-storage-compat.js"></script>
<script defer src="/__/firebase/9.9.0/firebase-analytics-compat.js"></script>
<script defer src="/__/firebase/9.9.0/firebase-remote-config-compat.js"></script>
<script defer src="/__/firebase/9.9.0/firebase-performance-compat.js"></script>
<!--
initialize the SDK after all desired features are loaded, set useEmulator to false
to avoid connecting the SDK to running emulators.
-->
<script defer src="/__/firebase/init.js?useEmulator=true"></script>
<style media="screen">
body { background: #ECEFF1; color: rgba(0,0,0,0.87); font-family: Roboto, Helvetica, Arial, sans-serif; margin: 0; padding: 0; }
#message { background: white; max-width: 360px; margin: 100px auto 16px; padding: 32px 24px; border-radius: 3px; }
#message h2 { color: #ffa100; font-weight: bold; font-size: 16px; margin: 0 0 8px; }
#message h1 { font-size: 22px; font-weight: 300; color: rgba(0,0,0,0.6); margin: 0 0 16px;}
#message p { line-height: 140%; margin: 16px 0 24px; font-size: 14px; }
#message a { display: block; text-align: center; background: #039be5; text-transform: uppercase; text-decoration: none; color: white; padding: 16px; border-radius: 4px; }
#message, #message a { box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24); }
#load { color: rgba(0,0,0,0.4); text-align: center; font-size: 13px; }
@media (max-width: 600px) {
body, #message { margin-top: 0; background: white; box-shadow: none; }
body { border-top: 16px solid #ffa100; }
}
</style>
</head>
<body>
<div id="message">
<h2>Welcome</h2>
<h1>Firebase Hosting Setup Complete</h1>
<p>You're seeing this because you've successfully setup Firebase Hosting. Now it's time to go build something extraordinary!</p>
<a target="_blank" href="https://firebase.google.com/docs/hosting/">Open Hosting Documentation</a>
</div>
<p id="load">Firebase SDK Loading&hellip;</p>
<script>
document.addEventListener('DOMContentLoaded', function() {
const loadEl = document.querySelector('#load');
// // 🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥
// // The Firebase SDK is initialized and available here!
//
// firebase.auth().onAuthStateChanged(user => { });
// firebase.database().ref('/path/to/ref').on('value', snapshot => { });
// firebase.firestore().doc('/foo/bar').get().then(() => { });
// firebase.functions().httpsCallable('yourFunction')().then(() => { });
// firebase.messaging().requestPermission().then(() => { });
// firebase.storage().ref('/path/to/ref').getDownloadURL().then(() => { });
// firebase.analytics(); // call to activate
// firebase.analytics().logEvent('tutorial_completed');
// firebase.performance(); // call to activate
//
// // 🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥
try {
let app = firebase.app();
let features = [
'auth',
'database',
'firestore',
'functions',
'messaging',
'storage',
'analytics',
'remoteConfig',
'performance',
].filter(feature => typeof app[feature] === 'function');
loadEl.textContent = `Firebase SDK loaded with ${features.join(', ')}`;
} catch (e) {
console.error(e);
loadEl.textContent = 'Error loading the Firebase SDK, check the console.';
}
});
</script>
</body>
</html>

BIN
static/yubico-otp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 160 KiB