This commit is contained in:
PramUkesh 2023-07-12 12:58:44 +00:00 committed by GitHub
commit e44ff512fa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
350 changed files with 1100 additions and 74652 deletions

11
.github/FUNDING.yml vendored
View File

@ -1 +1,10 @@
custom: https://zeronet.io/docs/help_zeronet/donate/
github: canewsin
patreon: # Replace with a single Patreon username e.g., user1
open_collective: # Replace with a single Open Collective username e.g., user1
ko_fi: canewsin
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: canewsin
issuehunt: # Replace with a single IssueHunt username e.g., user1
otechie: # Replace with a single Otechie username e.g., user1
custom: ['https://paypal.me/PramUkesh', 'https://zerolink.ml/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/']

72
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@ -0,0 +1,72 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ py3-latest ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ py3-latest ]
schedule:
- cron: '32 19 * * 2'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -4,46 +4,48 @@ on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-16.04
runs-on: ubuntu-20.04
strategy:
max-parallel: 16
matrix:
python-version: [3.5, 3.6, 3.7, 3.8, 3.9]
python-version: ["3.7", "3.8", "3.9"]
steps:
- uses: actions/checkout@v2
- name: Checkout ZeroNet
uses: actions/checkout@v2
with:
submodules: "true"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Prepare for installation
run: |
python3 -m pip install setuptools
python3 -m pip install --upgrade pip wheel
python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium
- name: Prepare for installation
run: |
python3 -m pip install setuptools
python3 -m pip install --upgrade pip wheel
python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium
- name: Install
run: |
python3 -m pip install --upgrade -r requirements.txt
python3 -m pip list
- name: Install
run: |
python3 -m pip install --upgrade -r requirements.txt
python3 -m pip list
- name: Prepare for tests
run: |
openssl version -a
echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6
- name: Prepare for tests
run: |
openssl version -a
echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6
- name: Test
run: |
catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini
export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test
export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test
export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test
export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test
export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test
export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test
find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/
- name: Test
run: |
catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini
export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test
export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test
export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test
export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test
export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test
export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test
find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "plugins"]
path = plugins
url = https://github.com/ZeroNetX/ZeroNet-Plugins.git

View File

@ -1,6 +1,85 @@
### ZeroNet 0.7.2 (2020-09-?) Rev4206?
### ZeroNet 0.9.0 (2023-07-12) Rev4630
- Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9
- Add trackers to Config.py for failsafety incase missing trackers.txt
- Added Proxy links
- Fix pysha3 dep installation issue
- FileRequest -> Remove Unnecessary check, Fix error wording
- Fix Response when site is missing for `actionAs`
### ZeroNet 0.8.5 (2023-02-12) Rev4625
- Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows.
- default theme-class for missing value in `users.json`.
- Fetch Stats Plugin changes.
### ZeroNet 0.8.4 (2022-12-12) Rev4620
- Increase Minimum Site size to 25MB.
### ZeroNet 0.8.3 (2022-12-11) Rev4611
- main.py -> Fix accessing unassigned varible
- ContentManager -> Support for multiSig
- SiteStrorage.py -> Fix accessing unassigned varible
- ContentManager.py Improve Logging of Valid Signers
### ZeroNet 0.8.2 (2022-11-01) Rev4610
- Fix Startup Error when plugins dir missing
- Move trackers to seperate file & Add more trackers
- Config:: Skip loading missing tracker files
- Added documentation for getRandomPort fn
### ZeroNet 0.8.1 (2022-10-01) Rev4600
- fix readdress loop (cherry-pick previously added commit from conservancy)
- Remove Patreon badge
- Update README-ru.md (#177)
- Include inner_path of failed request for signing in error msg and response
- Don't Fail Silently When Cert is Not Selected
- Console Log Updates, Specify min supported ZeroNet version for Rust version Protocol Compatibility
- Update FUNDING.yml
### ZeroNet 0.8.0 (2022-05-27) Rev4591
- Revert File Open to catch File Access Errors.
### ZeroNet 0.7.9-patch (2022-05-26) Rev4586
- Use xescape(s) from zeronet-conservancy
- actionUpdate response Optimisation
- Fetch Plugins Repo Updates
- Fix Unhandled File Access Errors
- Create codeql-analysis.yml
### ZeroNet 0.7.9 (2022-05-26) Rev4585
- Rust Version Compatibility for update Protocol msg
- Removed Non Working Trakers.
- Dynamically Load Trackers from Dashboard Site.
- Tracker Supply Improvements.
- Fix Repo Url for Bug Report
- First Party Tracker Update Service using Dashboard Site.
- remove old v2 onion service [#158](https://github.com/ZeroNetX/ZeroNet/pull/158)
### ZeroNet 0.7.8 (2022-03-02) Rev4580
- Update Plugins with some bug fixes and Improvements
### ZeroNet 0.7.6 (2022-01-12) Rev4565
- Sync Plugin Updates
- Clean up tor v3 patch [#115](https://github.com/ZeroNetX/ZeroNet/pull/115)
- Add More Default Plugins to Repo
- Doubled Site Publish Limits
- Update ZeroNet Repo Urls [#103](https://github.com/ZeroNetX/ZeroNet/pull/103)
- UI/UX: Increases Size of Notifications Close Button [#106](https://github.com/ZeroNetX/ZeroNet/pull/106)
- Moved Plugins to Seperate Repo
- Added `access_key` variable in Config, this used to access restrited plugins when multiuser plugin is enabled. When MultiUserPlugin is enabled we cannot access some pages like /Stats, this key will remove such restriction with access key.
- Added `last_connection_id_current_version` to ConnectionServer, helpful to estimate no of connection from current client version.
- Added current version: connections to /Stats page. see the previous point.
### ZeroNet 0.7.5 (2021-11-28) Rev4560
- Add more default trackers
- Change default homepage address to `1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`
- Change default update site address to `1Update8crprmciJHwp2WXqkx2c4iYp18`
### ZeroNet 0.7.3 (2021-11-28) Rev4555
- Fix xrange is undefined error
- Fix Incorrect viewport on mobile while loading
- Tor-V3 Patch by anonymoose
### ZeroNet 0.7.1 (2019-07-01) Rev4206
### Added

View File

@ -1,4 +1,4 @@
FROM alpine:3.11
FROM alpine:3.15
#Base settings
ENV HOME /root
@ -6,9 +6,9 @@ ENV HOME /root
COPY requirements.txt /root/requirements.txt
#Install ZeroNet
RUN apk --update --no-cache --no-progress add python3 python3-dev gcc libffi-dev musl-dev make tor openssl \
RUN apk --update --no-cache --no-progress add python3 python3-dev py3-pip gcc g++ autoconf automake libtool libffi-dev musl-dev make tor openssl \
&& pip3 install -r /root/requirements.txt \
&& apk del python3-dev gcc libffi-dev musl-dev make \
&& apk del python3-dev gcc g++ autoconf automake libtool libffi-dev musl-dev make \
&& echo "ControlPort 9051" >> /etc/tor/torrc \
&& echo "CookieAuthentication 1" >> /etc/tor/torrc
@ -22,12 +22,12 @@ COPY . /root
VOLUME /root/data
#Control if Tor proxy is started
ENV ENABLE_TOR false
ENV ENABLE_TOR true
WORKDIR /root
#Set upstart command
CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552
CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26117
#Expose ports
EXPOSE 43110 26552
EXPOSE 43110 26117

View File

@ -1,211 +1,133 @@
# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/)
# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet)
[简体中文](./README-zh-cn.md)
[English](./README.md)
Децентрализованные вебсайты использующие Bitcoin криптографию и BitTorrent сеть - https://zeronet.io
Децентрализованные вебсайты, использующие криптографию Bitcoin и протокол BitTorrent — https://zeronet.dev ([Зеркало в ZeroNet](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/)). В отличии от Bitcoin, ZeroNet'у не требуется блокчейн для работы, однако он использует ту же криптографию, чтобы обеспечить сохранность и проверку данных.
## Зачем?
* Мы верим в открытую, свободную, и не отцензуренную сеть и коммуникацию.
* Нет единой точки отказа: Сайт онлайн пока по крайней мере 1 пир обслуживает его.
* Никаких затрат на хостинг: Сайты обслуживаются посетителями.
* Невозможно отключить: Он нигде, потому что он везде.
* Быстр и работает оффлайн: Вы можете получить доступ к сайту, даже если Интернет недоступен.
- Мы верим в открытую, свободную, и неподдающуюся цензуре сеть и связь.
- Нет единой точки отказа: Сайт остаётся онлайн, пока его обслуживает хотя бы 1 пир.
- Нет затрат на хостинг: Сайты обслуживаются посетителями.
- Невозможно отключить: Он нигде, потому что он везде.
- Скорость и возможность работать без Интернета: Вы сможете получить доступ к сайту, потому что его копия хранится на вашем компьютере и у ваших пиров.
## Особенности
* Обновляемые в реальном времени сайты
* Поддержка Namecoin .bit доменов
* Лёгок в установке: распаковал & запустил
* Клонирование вебсайтов в один клик
* Password-less [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki)
based authorization: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек
* Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы
* Анонимность: Полная поддержка сети Tor с помощью скрытых служб .onion вместо адресов IPv4
* TLS зашифрованные связи
* Автоматическое открытие uPnP порта
* Плагин для поддержки многопользовательской (openproxy)
* Работает с любыми браузерами и операционными системами
- Обновление сайтов в реальном времени
- Поддержка доменов `.bit` ([Namecoin](https://www.namecoin.org))
- Легкая установка: просто распакуйте и запустите
- Клонирование сайтов "в один клик"
- Беспарольная [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki)
авторизация: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек
- Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы
- Анонимность: Полная поддержка сети Tor, используя скрытые службы `.onion` вместо адресов IPv4
- Зашифрованное TLS подключение
- Автоматическое открытие UPnPпорта
- Плагин для поддержки нескольких пользователей (openproxy)
- Работа с любыми браузерами и операционными системами
## Текущие ограничения
- Файловые транзакции не сжаты
- Нет приватных сайтов
## Как это работает?
* После запуска `zeronet.py` вы сможете посетить зайты (zeronet сайты) используя адрес
`http://127.0.0.1:43110/{zeronet_address}`
(например. `http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`).
* Когда вы посещаете новый сайт zeronet, он пытается найти пиров с помощью BitTorrent
чтобы загрузить файлы сайтов (html, css, js ...) из них.
* Каждый посещенный зайт также обслуживается вами. (Т.е хранится у вас на компьютере)
* Каждый сайт содержит файл `content.json`, который содержит все остальные файлы в хэше sha512
и подпись, созданную с использованием частного ключа сайта.
* Если владелец сайта (у которого есть закрытый ключ для адреса сайта) изменяет сайт, то он/она
- После запуска `zeronet.py` вы сможете посещать сайты в ZeroNet, используя адрес
`http://127.0.0.1:43110/{zeronet_адрес}`
(Например: `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`).
- Когда вы посещаете новый сайт в ZeroNet, он пытается найти пиров с помощью протокола BitTorrent,
чтобы скачать у них файлы сайта (HTML, CSS, JS и т.д.).
- После посещения сайта вы тоже становитесь его пиром.
- Каждый сайт содержит файл `content.json`, который содержит SHA512 хеши всех остальные файлы
и подпись, созданную с помощью закрытого ключа сайта.
- Если владелец сайта (тот, кто владеет закрытым ключом для адреса сайта) изменяет сайт, он
подписывает новый `content.json` и публикует его для пиров. После этого пиры проверяют целостность `content.json`
(используя подпись), они загружают измененные файлы и публикуют новый контент для других пиров.
#### [Слайд-шоу о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000)
#### [Часто задаваемые вопросы »](https://zeronet.io/docs/faq/)
#### [Документация разработчика ZeroNet »](https://zeronet.io/docs/site_development/getting_started/)
(используя подпись), скачвают изменённые файлы и распространяют новый контент для других пиров.
[Презентация о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000)
[Часто задаваемые вопросы »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/)
[Документация разработчика ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
## Скриншоты
![Screenshot](https://i.imgur.com/H60OAHY.png)
![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png)
[Больше скриншотов в документации ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/)
#### [Больше скриншотов в ZeroNet документации »](https://zeronet.io/docs/using_zeronet/sample_sites/)
## Как присоединиться?
### Windows
## Как вступить
- Скачайте и распакуйте архив [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26МБ)
- Запустите `ZeroNet.exe`
* Скачайте ZeroBundle пакет:
* [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip)
* [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip)
* [Linux 64-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz)
* [Linux 32-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz)
* Распакуйте где угодно
* Запустите `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux)
### macOS
### Linux терминал
- Скачайте и распакуйте архив [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14МБ)
- Запустите `ZeroNet.app`
* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz`
* `tar xvpfz ZeroBundle-linux64.tar.gz`
* `cd ZeroBundle`
* Запустите с помощью `./ZeroNet.sh`
### Linux (64 бит)
Он загружает последнюю версию ZeroNet, затем запускает её автоматически.
- Скачайте и распакуйте архив [ZeroNet-linux.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip) (14МБ)
- Запустите `./ZeroNet.sh`
#### Ручная установка для Debian Linux
> **Note**
> Запустите таким образом: `./ZeroNet.sh --ui_ip '*' --ui_restrict ваш_ip_адрес`, чтобы разрешить удалённое подключение к веб–интерфейсу.
* `sudo apt-get update`
* `sudo apt-get install msgpack-python python-gevent`
* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz`
* `tar xvpfz master.tar.gz`
* `cd ZeroNet-master`
* Запустите с помощью `python2 zeronet.py`
* Откройте http://127.0.0.1:43110/ в вашем браузере.
### Docker
### [Arch Linux](https://www.archlinux.org)
Официальный образ находится здесь: https://hub.docker.com/r/canewsin/zeronet/
* `git clone https://aur.archlinux.org/zeronet.git`
* `cd zeronet`
* `makepkg -srci`
* `systemctl start zeronet`
* Откройте http://127.0.0.1:43110/ в вашем браузере.
### Android (arm, arm64, x86)
Смотрите [ArchWiki](https://wiki.archlinux.org)'s [ZeroNet
article](https://wiki.archlinux.org/index.php/ZeroNet) для дальнейшей помощи.
- Для работы требуется Android как минимум версии 5.0 Lollipop
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
alt="Download from Google Play"
height="80">](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile)
- Скачать APK: https://github.com/canewsin/zeronet_mobile/releases
### [Gentoo Linux](https://www.gentoo.org)
### Android (arm, arm64, x86) Облегчённый клиент только для просмотра (1МБ)
* [`layman -a raiagent`](https://github.com/leycec/raiagent)
* `echo '>=net-vpn/zeronet-0.5.4' >> /etc/portage/package.accept_keywords`
* *(Опционально)* Включить поддержку Tor: `echo 'net-vpn/zeronet tor' >>
/etc/portage/package.use`
* `emerge zeronet`
* `rc-service zeronet start`
* Откройте http://127.0.0.1:43110/ в вашем браузере.
- Для работы требуется Android как минимум версии 4.1 Jelly Bean
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
alt="Download from Google Play"
height="80">](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite)
Смотрите `/usr/share/doc/zeronet-*/README.gentoo.bz2` для дальнейшей помощи.
### Установка из исходного кода
### [FreeBSD](https://www.freebsd.org/)
* `pkg install zeronet` or `cd /usr/ports/security/zeronet/ && make install clean`
* `sysrc zeronet_enable="YES"`
* `service zeronet start`
* Откройте http://127.0.0.1:43110/ в вашем браузере.
### [Vagrant](https://www.vagrantup.com/)
* `vagrant up`
* Подключитесь к VM с помощью `vagrant ssh`
* `cd /vagrant`
* Запустите `python2 zeronet.py --ui_ip 0.0.0.0`
* Откройте http://127.0.0.1:43110/ в вашем браузере.
### [Docker](https://www.docker.com/)
* `docker run -d -v <local_data_folder>:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet`
* Это изображение Docker включает в себя прокси-сервер Tor, который по умолчанию отключён.
Остерегайтесь что некоторые хостинг-провайдеры могут не позволить вам запускать Tor на своих серверах.
Если вы хотите включить его,установите переменную среды `ENABLE_TOR` в` true` (по умолчанию: `false`) Например:
`docker run -d -e "ENABLE_TOR=true" -v <local_data_folder>:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet`
* Откройте http://127.0.0.1:43110/ в вашем браузере.
### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/)
* `virtualenv env`
* `source env/bin/activate`
* `pip install msgpack gevent`
* `python2 zeronet.py`
* Откройте http://127.0.0.1:43110/ в вашем браузере.
## Текущие ограничения
* ~~Нет torrent-похожего файла разделения для поддержки больших файлов~~ (поддержка больших файлов добавлена)
* ~~Не анонимнее чем Bittorrent~~ (добавлена встроенная поддержка Tor)
* Файловые транзакции не сжаты ~~ или незашифрованы еще ~~ (добавлено шифрование TLS)
* Нет приватных сайтов
## Как я могу создать сайт в Zeronet?
Завершите работу zeronet, если он запущен
```bash
$ zeronet.py siteCreate
...
- Site private key (Приватный ключ сайта): 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq
- Site address (Адрес сайта): 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2
...
- Site created! (Сайт создан)
$ zeronet.py
...
```sh
wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip
unzip ZeroNet-src.zip
cd ZeroNet
sudo apt-get update
sudo apt-get install python3-pip
sudo python3 -m pip install -r requirements.txt
```
- Запустите `python3 zeronet.py`
Поздравляем, вы закончили! Теперь каждый может получить доступ к вашему зайту используя
`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2`
Откройте приветственную страницу ZeroHello в вашем браузере по ссылке http://127.0.0.1:43110/
Следующие шаги: [ZeroNet Developer Documentation](https://zeronet.io/docs/site_development/getting_started/)
## Как мне создать сайт в ZeroNet?
- Кликните на **⋮** > **"Create new, empty site"** в меню на сайте [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d).
- Вы будете **перенаправлены** на совершенно новый сайт, который может быть изменён только вами!
- Вы можете найти и изменить контент вашего сайта в каталоге **data/[адресашего_сайта]**
- После изменений откройте ваш сайт, переключите влево кнопку "0" в правом верхнем углу, затем нажмите кнопки **sign** и **publish** внизу
## Как я могу модифицировать Zeronet сайт?
* Измените файлы расположенные в data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 директории.
Когда закончите с изменением:
```bash
$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2
- Signing site (Подпись сайта): 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2...
Private key (Приватный ключ) (input hidden):
```
* Введите секретный ключ, который вы получили при создании сайта, потом:
```bash
$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2
...
Site:13DNDk..bhC2 Publishing to 3/10 peers...
Site:13DNDk..bhC2 Successfuly published to 3 peers
- Serving files....
```
* Вот и всё! Вы успешно подписали и опубликовали свои изменения.
Следующие шаги: [Документация разработчика ZeroNet](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
## Поддержите проект
- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX
- Paypal: https://zeronet.io/docs/help_zeronet/donate/
### Спонсоры
* Улучшенная совместимость с MacOS / Safari стала возможной благодаря [BrowserStack.com](https://www.browserstack.com)
- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Рекомендуем)
- LiberaPay: https://liberapay.com/PramUkesh
- Paypal: https://paypal.me/PramUkesh
- Другие способы: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive)
#### Спасибо!
* Больше информации, помощь, журнал изменений, zeronet сайты: https://www.reddit.com/r/zeronet/
* Приходите, пообщайтесь с нами: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или на [gitter](https://gitter.im/HelloZeroNet/ZeroNet)
* Email: hello@zeronet.io (PGP: CB9613AE)
- Здесь вы можете получить больше информации, помощь, прочитать список изменений и исследовать ZeroNet сайты: https://www.reddit.com/r/zeronetx/
- Общение происходит на канале [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или в [Gitter](https://gitter.im/canewsin/ZeroNet)
- Электронная почта: canews.in@gmail.com

View File

@ -1,8 +1,8 @@
# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=py3)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/)
# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet)
[English](./README.md)
使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.io
使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.dev
## 为什么?
@ -33,7 +33,7 @@
* 在运行 `zeronet.py` 后,您将可以通过
`http://127.0.0.1:43110/{zeronet_address}`(例如:
`http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`)访问 zeronet 中的站点
`http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`)访问 zeronet 中的站点
* 在您浏览 zeronet 站点时,客户端会尝试通过 BitTorrent 网络来寻找可用的节点从而下载需要的文件htmlcssjs...
* 您将会储存每一个浏览过的站点
* 每个站点都包含一个名为 `content.json` 的文件,它储存了其他所有文件的 sha512 散列值以及一个通过站点私钥生成的签名
@ -41,9 +41,9 @@
那么这些节点将会在使用签名验证 `content.json` 的真实性后,下载修改后的文件并将新内容推送至另外的节点
#### [关于 ZeroNet 加密,站点更新,多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000)
#### [常见问题 »](https://zeronet.io/docs/faq/)
#### [常见问题 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/)
#### [ZeroNet 开发者文档 »](https://zeronet.io/docs/site_development/getting_started/)
#### [ZeroNet 开发者文档 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
## 屏幕截图
@ -51,28 +51,28 @@
![Screenshot](https://i.imgur.com/H60OAHY.png)
![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png)
#### [ZeroNet 文档中的更多屏幕截图 »](https://zeronet.io/docs/using_zeronet/sample_sites/)
#### [ZeroNet 文档中的更多屏幕截图 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/)
## 如何加入
### Windows
- 下载 [ZeroNet-py3-win64.zip](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist-win64/ZeroNet-py3-win64.zip) (18MB)
- 下载 [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB)
- 在任意位置解压缩
- 运行 `ZeroNet.exe`
### macOS
- 下载 [ZeroNet-dist-mac.zip](https://github.com/HelloZeroNet/ZeroNet-dist/archive/mac/ZeroNet-dist-mac.zip) (13.2MB)
- 下载 [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB)
- 在任意位置解压缩
- 运行 `ZeroNet.app`
### Linux (x86-64bit)
- `wget https://github.com/HelloZeroNet/ZeroNet-linux/archive/dist-linux64/ZeroNet-py3-linux64.tar.gz`
- `tar xvpfz ZeroNet-py3-linux64.tar.gz`
- `cd ZeroNet-linux-dist-linux64/`
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip`
- `unzip ZeroNet-linux.zip`
- `cd ZeroNet-linux`
- 使用以下命令启动 `./ZeroNet.sh`
- 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面
@ -80,44 +80,53 @@
### 从源代码安装
- `wget https://github.com/HelloZeroNet/ZeroNet/archive/py3/ZeroNet-py3.tar.gz`
- `tar xvpfz ZeroNet-py3.tar.gz`
- `cd ZeroNet-py3`
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`
- `unzip ZeroNet-src.zip`
- `cd ZeroNet`
- `sudo apt-get update`
- `sudo apt-get install python3-pip`
- `sudo python3 -m pip install -r requirements.txt`
- 使用以下命令启动 `python3 zeronet.py`
- 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面
### Android (arm, arm64, x86)
- minimum Android version supported 21 (Android 5.0 Lollipop)
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
alt="Download from Google Play"
height="80">](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile)
- APK download: https://github.com/canewsin/zeronet_mobile/releases
### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB)
- minimum Android version supported 16 (JellyBean)
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
alt="Download from Google Play"
height="80">](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite)
## 现有限制
* ~~没有类似于 torrent 的文件拆分来支持大文件~~ (已添加大文件支持)
* ~~没有比 BitTorrent 更好的匿名性~~ (已添加内置的完整 Tor 支持)
* 传输文件时没有压缩~~和加密~~ (已添加 TLS 支持)
* 传输文件时没有压缩
* 不支持私有站点
## 如何创建一个 ZeroNet 站点?
* 点击 [ZeroHello](http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D) 站点的 **⋮** > **「新建空站点」** 菜单项
* 点击 [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d) 站点的 **⋮** > **「新建空站点」** 菜单项
* 您将被**重定向**到一个全新的站点,该站点只能由您修改
* 您可以在 **data/[您的站点地址]** 目录中找到并修改网站的内容
* 修改后打开您的网站将右上角的「0」按钮拖到左侧然后点击底部的**签名**并**发布**按钮
接下来的步骤:[ZeroNet 开发者文档](https://zeronet.io/docs/site_development/getting_started/)
接下来的步骤:[ZeroNet 开发者文档](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
## 帮助这个项目
- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred)
- LiberaPay: https://liberapay.com/PramUkesh
- Paypal: https://paypal.me/PramUkesh
- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive)
- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX
- Paypal: https://zeronet.io/docs/help_zeronet/donate/
### 赞助商
* [BrowserStack.com](https://www.browserstack.com) 使更好的 macOS/Safari 兼容性成为可能
#### 感谢您!
* 更多信息,帮助,变更记录和 zeronet 站点https://www.reddit.com/r/zeronet/
* 前往 [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 或 [gitter](https://gitter.im/HelloZeroNet/ZeroNet) 和我们聊天
* [这里](https://gitter.im/ZeroNet-zh/Lobby)是一个 gitter 上的中文聊天室
* Email: hello@zeronet.io (PGP: [960F FF2D 6C14 5AA6 13E8 491B 5B63 BAE6 CB96 13AE](https://zeronet.io/files/tamas@zeronet.io_pub.asc))
* 更多信息,帮助,变更记录和 zeronet 站点https://www.reddit.com/r/zeronetx/
* 前往 [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 或 [gitter](https://gitter.im/canewsin/ZeroNet) 和我们聊天
* [这里](https://gitter.im/canewsin/ZeroNet)是一个 gitter 上的中文聊天室
* Email: canews.in@gmail.com

View File

@ -1,6 +1,6 @@
# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=py3)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) ![tests](https://github.com/HelloZeroNet/ZeroNet/workflows/tests/badge.svg) [![Docker Pulls](https://img.shields.io/docker/pulls/nofish/zeronet)](https://hub.docker.com/r/nofish/zeronet)
Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.io / [onion](http://zeronet34m3r5ngdu54uj57dcafpgdjhxsgq5kla5con4qvcmfzpvhad.onion)
# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet)
<!--TODO: Update Onion Site -->
Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.dev / [ZeroNet Site](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/), Unlike Bitcoin, ZeroNet Doesn't need a blockchain to run, But uses cryptography used by BTC, to ensure data integrity and validation.
## Why?
@ -33,22 +33,22 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/
* After starting `zeronet.py` you will be able to visit zeronet sites using
`http://127.0.0.1:43110/{zeronet_address}` (eg.
`http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`).
`http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`).
* When you visit a new zeronet site, it tries to find peers using the BitTorrent
network so it can download the site files (html, css, js...) from them.
* Each visited site is also served by you.
* Every site contains a `content.json` file which holds all other files in a sha512 hash
and a signature generated using the site's private key.
* If the site owner (who has the private key for the site address) modifies the
site, then he/she signs the new `content.json` and publishes it to the peers.
site and signs the new `content.json` and publishes it to the peers.
Afterwards, the peers verify the `content.json` integrity (using the
signature), they download the modified files and publish the new content to
other peers.
#### [Slideshow about ZeroNet cryptography, site updates, multi-user sites »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000)
#### [Frequently asked questions »](https://zeronet.io/docs/faq/)
#### [Frequently asked questions »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/)
#### [ZeroNet Developer Documentation »](https://zeronet.io/docs/site_development/getting_started/)
#### [ZeroNet Developer Documentation »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
## Screenshots
@ -56,48 +56,72 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/
![Screenshot](https://i.imgur.com/H60OAHY.png)
![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png)
#### [More screenshots in ZeroNet docs »](https://zeronet.io/docs/using_zeronet/sample_sites/)
#### [More screenshots in ZeroNet docs »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/)
## How to join
### Windows
- Download [ZeroNet-py3-win64.zip](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist-win64/ZeroNet-py3-win64.zip) (18MB)
- Download [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB)
- Unpack anywhere
- Run `ZeroNet.exe`
### macOS
- Download [ZeroNet-dist-mac.zip](https://github.com/HelloZeroNet/ZeroNet-dist/archive/mac/ZeroNet-dist-mac.zip) (13.2MB)
- Download [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB)
- Unpack anywhere
- Run `ZeroNet.app`
### Linux (x86-64bit)
- `wget https://github.com/HelloZeroNet/ZeroNet-linux/archive/dist-linux64/ZeroNet-py3-linux64.tar.gz`
- `tar xvpfz ZeroNet-py3-linux64.tar.gz`
- `cd ZeroNet-linux-dist-linux64/`
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip`
- `unzip ZeroNet-linux.zip`
- `cd ZeroNet-linux`
- Start with: `./ZeroNet.sh`
- Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/
__Tip:__ Start with `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` to allow remote connections on the web interface.
### Android (arm, arm64, x86)
- minimum Android version supported 16 (JellyBean)
- minimum Android version supported 21 (Android 5.0 Lollipop)
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
alt="Download from Google Play"
height="80">](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile)
- APK download: https://github.com/canewsin/zeronet_mobile/releases
- XDA Labs: https://labs.xda-developers.com/store/app/in.canews.zeronet
### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB)
- minimum Android version supported 16 (JellyBean)
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
alt="Download from Google Play"
height="80">](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite)
#### Docker
There is an official image, built from source at: https://hub.docker.com/r/nofish/zeronet/
There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/
### Online Proxies
Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one.
#### Official ZNX Proxy :
https://proxy.zeronet.dev/
https://zeronet.dev/
#### From Community
https://0net-preview.com/
https://portal.ngnoid.tv/
https://zeronet.ipfsscan.io/
### Install from source
- `wget https://github.com/HelloZeroNet/ZeroNet/archive/py3/ZeroNet-py3.tar.gz`
- `tar xvpfz ZeroNet-py3.tar.gz`
- `cd ZeroNet-py3`
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`
- `unzip ZeroNet-src.zip`
- `cd ZeroNet`
- `sudo apt-get update`
- `sudo apt-get install python3-pip`
- `sudo python3 -m pip install -r requirements.txt`
@ -106,32 +130,27 @@ There is an official image, built from source at: https://hub.docker.com/r/nofis
## Current limitations
* ~~No torrent-like file splitting for big file support~~ (big file support added)
* ~~No more anonymous than Bittorrent~~ (built-in full Tor support added)
* File transactions are not compressed ~~or encrypted yet~~ (TLS encryption added)
* File transactions are not compressed
* No private sites
## How can I create a ZeroNet site?
* Click on **⋮** > **"Create new, empty site"** menu item on the site [ZeroHello](http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D).
* Click on **⋮** > **"Create new, empty site"** menu item on the site [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d).
* You will be **redirected** to a completely new site that is only modifiable by you!
* You can find and modify your site's content in **data/[yoursiteaddress]** directory
* After the modifications open your site, drag the topright "0" button to left, then press **sign** and **publish** buttons on the bottom
Next steps: [ZeroNet Developer Documentation](https://zeronet.io/docs/site_development/getting_started/)
Next steps: [ZeroNet Developer Documentation](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
## Help keep this project alive
- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX
- Paypal: https://zeronet.io/docs/help_zeronet/donate/
### Sponsors
* Better macOS/Safari compatibility made possible by [BrowserStack.com](https://www.browserstack.com)
- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred)
- LiberaPay: https://liberapay.com/PramUkesh
- Paypal: https://paypal.me/PramUkesh
- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive)
#### Thank you!
* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronet/
* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/HelloZeroNet/ZeroNet)
* Email: hello@zeronet.io (PGP: [960F FF2D 6C14 5AA6 13E8 491B 5B63 BAE6 CB96 13AE](https://zeronet.io/files/tamas@zeronet.io_pub.asc))
* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronetx/
* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/canewsin/ZeroNet)
* Email: canews.in@gmail.com

1
plugins Submodule

@ -0,0 +1 @@
Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb

View File

@ -1,148 +0,0 @@
import time
import urllib.request
import struct
import socket
import lib.bencode_open as bencode_open
from lib.subtl.subtl import UdpTrackerClient
import socks
import sockshandler
import gevent
from Plugin import PluginManager
from Config import config
from Debug import Debug
from util import helper
# We can only import plugin host clases after the plugins are loaded
@PluginManager.afterLoad
def importHostClasses():
global Peer, AnnounceError
from Peer import Peer
from Site.SiteAnnouncer import AnnounceError
@PluginManager.registerTo("SiteAnnouncer")
class SiteAnnouncerPlugin(object):
def getSupportedTrackers(self):
trackers = super(SiteAnnouncerPlugin, self).getSupportedTrackers()
if config.disable_udp or config.trackers_proxy != "disable":
trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")]
return trackers
def getTrackerHandler(self, protocol):
if protocol == "udp":
handler = self.announceTrackerUdp
elif protocol == "http":
handler = self.announceTrackerHttp
elif protocol == "https":
handler = self.announceTrackerHttps
else:
handler = super(SiteAnnouncerPlugin, self).getTrackerHandler(protocol)
return handler
def announceTrackerUdp(self, tracker_address, mode="start", num_want=10):
s = time.time()
if config.disable_udp:
raise AnnounceError("Udp disabled by config")
if config.trackers_proxy != "disable":
raise AnnounceError("Udp trackers not available with proxies")
ip, port = tracker_address.split("/")[0].split(":")
tracker = UdpTrackerClient(ip, int(port))
if helper.getIpType(ip) in self.getOpenedServiceTypes():
tracker.peer_port = self.fileserver_port
else:
tracker.peer_port = 0
tracker.connect()
if not tracker.poll_once():
raise AnnounceError("Could not connect")
tracker.announce(info_hash=self.site.address_sha1, num_want=num_want, left=431102370)
back = tracker.poll_once()
if not back:
raise AnnounceError("No response after %.0fs" % (time.time() - s))
elif type(back) is dict and "response" in back:
peers = back["response"]["peers"]
else:
raise AnnounceError("Invalid response: %r" % back)
return peers
def httpRequest(self, url):
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'
}
req = urllib.request.Request(url, headers=headers)
if config.trackers_proxy == "tor":
tor_manager = self.site.connection_server.tor_manager
handler = sockshandler.SocksiPyHandler(socks.SOCKS5, tor_manager.proxy_ip, tor_manager.proxy_port)
opener = urllib.request.build_opener(handler)
return opener.open(req, timeout=50)
elif config.trackers_proxy == "disable":
return urllib.request.urlopen(req, timeout=25)
else:
proxy_ip, proxy_port = config.trackers_proxy.split(":")
handler = sockshandler.SocksiPyHandler(socks.SOCKS5, proxy_ip, int(proxy_port))
opener = urllib.request.build_opener(handler)
return opener.open(req, timeout=50)
def announceTrackerHttps(self, *args, **kwargs):
kwargs["protocol"] = "https"
return self.announceTrackerHttp(*args, **kwargs)
def announceTrackerHttp(self, tracker_address, mode="start", num_want=10, protocol="http"):
tracker_ip, tracker_port = tracker_address.rsplit(":", 1)
if helper.getIpType(tracker_ip) in self.getOpenedServiceTypes():
port = self.fileserver_port
else:
port = 1
params = {
'info_hash': self.site.address_sha1,
'peer_id': self.peer_id, 'port': port,
'uploaded': 0, 'downloaded': 0, 'left': 431102370, 'compact': 1, 'numwant': num_want,
'event': 'started'
}
url = protocol + "://" + tracker_address + "?" + urllib.parse.urlencode(params)
s = time.time()
response = None
# Load url
if config.tor == "always" or config.trackers_proxy != "disable":
timeout = 60
else:
timeout = 30
with gevent.Timeout(timeout, False): # Make sure of timeout
req = self.httpRequest(url)
response = req.read()
req.close()
req = None
if not response:
raise AnnounceError("No response after %.0fs" % (time.time() - s))
# Decode peers
try:
peer_data = bencode_open.loads(response)[b"peers"]
response = None
peer_count = int(len(peer_data) / 6)
peers = []
for peer_offset in range(peer_count):
off = 6 * peer_offset
peer = peer_data[off:off + 6]
addr, port = struct.unpack('!LH', peer)
peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port})
except Exception as err:
raise AnnounceError("Invalid response: %r (%s)" % (response, Debug.formatException(err)))
return peers

View File

@ -1 +0,0 @@
from . import AnnounceBitTorrentPlugin

View File

@ -1,5 +0,0 @@
{
"name": "AnnounceBitTorrent",
"description": "Discover new peers using BitTorrent trackers.",
"default": "enabled"
}

View File

@ -1,147 +0,0 @@
import time
import gevent
from Plugin import PluginManager
from Config import config
from . import BroadcastServer
@PluginManager.registerTo("SiteAnnouncer")
class SiteAnnouncerPlugin(object):
def announce(self, force=False, *args, **kwargs):
local_announcer = self.site.connection_server.local_announcer
thread = None
if local_announcer and (force or time.time() - local_announcer.last_discover > 5 * 60):
thread = gevent.spawn(local_announcer.discover, force=force)
back = super(SiteAnnouncerPlugin, self).announce(force=force, *args, **kwargs)
if thread:
thread.join()
return back
class LocalAnnouncer(BroadcastServer.BroadcastServer):
def __init__(self, server, listen_port):
super(LocalAnnouncer, self).__init__("zeronet", listen_port=listen_port)
self.server = server
self.sender_info["peer_id"] = self.server.peer_id
self.sender_info["port"] = self.server.port
self.sender_info["broadcast_port"] = listen_port
self.sender_info["rev"] = config.rev
self.known_peers = {}
self.last_discover = 0
def discover(self, force=False):
self.log.debug("Sending discover request (force: %s)" % force)
self.last_discover = time.time()
if force: # Probably new site added, clean cache
self.known_peers = {}
for peer_id, known_peer in list(self.known_peers.items()):
if time.time() - known_peer["found"] > 20 * 60:
del(self.known_peers[peer_id])
self.log.debug("Timeout, removing from known_peers: %s" % peer_id)
self.broadcast({"cmd": "discoverRequest", "params": {}}, port=self.listen_port)
def actionDiscoverRequest(self, sender, params):
back = {
"cmd": "discoverResponse",
"params": {
"sites_changed": self.server.site_manager.sites_changed
}
}
if sender["peer_id"] not in self.known_peers:
self.known_peers[sender["peer_id"]] = {"added": time.time(), "sites_changed": 0, "updated": 0, "found": time.time()}
self.log.debug("Got discover request from unknown peer %s (%s), time to refresh known peers" % (sender["ip"], sender["peer_id"]))
gevent.spawn_later(1.0, self.discover) # Let the response arrive first to the requester
return back
def actionDiscoverResponse(self, sender, params):
if sender["peer_id"] in self.known_peers:
self.known_peers[sender["peer_id"]]["found"] = time.time()
if params["sites_changed"] != self.known_peers.get(sender["peer_id"], {}).get("sites_changed"):
# Peer's site list changed, request the list of new sites
return {"cmd": "siteListRequest"}
else:
# Peer's site list is the same
for site in self.server.sites.values():
peer = site.peers.get("%s:%s" % (sender["ip"], sender["port"]))
if peer:
peer.found("local")
def actionSiteListRequest(self, sender, params):
back = []
sites = list(self.server.sites.values())
# Split adresses to group of 100 to avoid UDP size limit
site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)]
for site_group in site_groups:
res = {}
res["sites_changed"] = self.server.site_manager.sites_changed
res["sites"] = [site.address_hash for site in site_group]
back.append({"cmd": "siteListResponse", "params": res})
return back
def actionSiteListResponse(self, sender, params):
s = time.time()
peer_sites = set(params["sites"])
num_found = 0
added_sites = []
for site in self.server.sites.values():
if site.address_hash in peer_sites:
added = site.addPeer(sender["ip"], sender["port"], source="local")
num_found += 1
if added:
site.worker_manager.onPeers()
site.updateWebsocket(peers_added=1)
added_sites.append(site)
# Save sites changed value to avoid unnecessary site list download
if sender["peer_id"] not in self.known_peers:
self.known_peers[sender["peer_id"]] = {"added": time.time()}
self.known_peers[sender["peer_id"]]["sites_changed"] = params["sites_changed"]
self.known_peers[sender["peer_id"]]["updated"] = time.time()
self.known_peers[sender["peer_id"]]["found"] = time.time()
self.log.debug(
"Tracker result: Discover from %s response parsed in %.3fs, found: %s added: %s of %s" %
(sender["ip"], time.time() - s, num_found, added_sites, len(peer_sites))
)
@PluginManager.registerTo("FileServer")
class FileServerPlugin(object):
def __init__(self, *args, **kwargs):
super(FileServerPlugin, self).__init__(*args, **kwargs)
if config.broadcast_port and config.tor != "always" and not config.disable_udp:
self.local_announcer = LocalAnnouncer(self, config.broadcast_port)
else:
self.local_announcer = None
def start(self, *args, **kwargs):
if self.local_announcer:
gevent.spawn(self.local_announcer.start)
return super(FileServerPlugin, self).start(*args, **kwargs)
def stop(self):
if self.local_announcer:
self.local_announcer.stop()
res = super(FileServerPlugin, self).stop()
return res
@PluginManager.registerTo("ConfigPlugin")
class ConfigPlugin(object):
def createArguments(self):
group = self.parser.add_argument_group("AnnounceLocal plugin")
group.add_argument('--broadcast_port', help='UDP broadcasting port for local peer discovery', default=1544, type=int, metavar='port')
return super(ConfigPlugin, self).createArguments()

View File

@ -1,139 +0,0 @@
import socket
import logging
import time
from contextlib import closing
from Debug import Debug
from util import UpnpPunch
from util import Msgpack
class BroadcastServer(object):
def __init__(self, service_name, listen_port=1544, listen_ip=''):
self.log = logging.getLogger("BroadcastServer")
self.listen_port = listen_port
self.listen_ip = listen_ip
self.running = False
self.sock = None
self.sender_info = {"service": service_name}
def createBroadcastSocket(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, 'SO_REUSEPORT'):
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except Exception as err:
self.log.warning("Error setting SO_REUSEPORT: %s" % err)
binded = False
for retry in range(3):
try:
sock.bind((self.listen_ip, self.listen_port))
binded = True
break
except Exception as err:
self.log.error(
"Socket bind to %s:%s error: %s, retry #%s" %
(self.listen_ip, self.listen_port, Debug.formatException(err), retry)
)
time.sleep(retry)
if binded:
return sock
else:
return False
def start(self): # Listens for discover requests
self.sock = self.createBroadcastSocket()
if not self.sock:
self.log.error("Unable to listen on port %s" % self.listen_port)
return
self.log.debug("Started on port %s" % self.listen_port)
self.running = True
while self.running:
try:
data, addr = self.sock.recvfrom(8192)
except Exception as err:
if self.running:
self.log.error("Listener receive error: %s" % err)
continue
if not self.running:
break
try:
message = Msgpack.unpack(data)
response_addr, message = self.handleMessage(addr, message)
if message:
self.send(response_addr, message)
except Exception as err:
self.log.error("Handlemessage error: %s" % Debug.formatException(err))
self.log.debug("Stopped listening on port %s" % self.listen_port)
def stop(self):
self.log.debug("Stopping, socket: %s" % self.sock)
self.running = False
if self.sock:
self.sock.close()
def send(self, addr, message):
if type(message) is not list:
message = [message]
for message_part in message:
message_part["sender"] = self.sender_info
self.log.debug("Send to %s: %s" % (addr, message_part["cmd"]))
with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(Msgpack.pack(message_part), addr)
def getMyIps(self):
return UpnpPunch._get_local_ips()
def broadcast(self, message, port=None):
if not port:
port = self.listen_port
my_ips = self.getMyIps()
addr = ("255.255.255.255", port)
message["sender"] = self.sender_info
self.log.debug("Broadcast using ips %s on port %s: %s" % (my_ips, port, message["cmd"]))
for my_ip in my_ips:
try:
with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.bind((my_ip, 0))
sock.sendto(Msgpack.pack(message), addr)
except Exception as err:
self.log.warning("Error sending broadcast using ip %s: %s" % (my_ip, err))
def handleMessage(self, addr, message):
self.log.debug("Got from %s: %s" % (addr, message["cmd"]))
cmd = message["cmd"]
params = message.get("params", {})
sender = message["sender"]
sender["ip"] = addr[0]
func_name = "action" + cmd[0].upper() + cmd[1:]
func = getattr(self, func_name, None)
if sender["service"] != "zeronet" or sender["peer_id"] == self.sender_info["peer_id"]:
# Skip messages not for us or sent by us
message = None
elif func:
message = func(sender, params)
else:
self.log.debug("Unknown cmd: %s" % cmd)
message = None
return (sender["ip"], sender["broadcast_port"]), message

View File

@ -1,113 +0,0 @@
import time
import copy
import gevent
import pytest
import mock
from AnnounceLocal import AnnounceLocalPlugin
from File import FileServer
from Test import Spy
@pytest.fixture
def announcer(file_server, site):
file_server.sites[site.address] = site
announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server, listen_port=1100)
file_server.local_announcer = announcer
announcer.listen_port = 1100
announcer.sender_info["broadcast_port"] = 1100
announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"])
announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically
gevent.spawn(announcer.start)
time.sleep(0.5)
assert file_server.local_announcer.running
return file_server.local_announcer
@pytest.fixture
def announcer_remote(request, site_temp):
file_server_remote = FileServer("127.0.0.1", 1545)
file_server_remote.sites[site_temp.address] = site_temp
announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server_remote, listen_port=1101)
file_server_remote.local_announcer = announcer
announcer.listen_port = 1101
announcer.sender_info["broadcast_port"] = 1101
announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"])
announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically
gevent.spawn(announcer.start)
time.sleep(0.5)
assert file_server_remote.local_announcer.running
def cleanup():
file_server_remote.stop()
request.addfinalizer(cleanup)
return file_server_remote.local_announcer
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestAnnounce:
def testSenderInfo(self, announcer):
sender_info = announcer.sender_info
assert sender_info["port"] > 0
assert len(sender_info["peer_id"]) == 20
assert sender_info["rev"] > 0
def testIgnoreSelfMessages(self, announcer):
# No response to messages that has same peer_id as server
assert not announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": announcer.sender_info, "params": {}})[1]
# Response to messages with different peer id
sender_info = copy.copy(announcer.sender_info)
sender_info["peer_id"] += "-"
addr, res = announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": sender_info, "params": {}})
assert res["params"]["sites_changed"] > 0
def testDiscoverRequest(self, announcer, announcer_remote):
assert len(announcer_remote.known_peers) == 0
with Spy.Spy(announcer_remote, "handleMessage") as responses:
announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port)
time.sleep(0.1)
response_cmds = [response[1]["cmd"] for response in responses]
assert response_cmds == ["discoverResponse", "siteListResponse"]
assert len(responses[-1][1]["params"]["sites"]) == 1
# It should only request siteList if sites_changed value is different from last response
with Spy.Spy(announcer_remote, "handleMessage") as responses:
announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port)
time.sleep(0.1)
response_cmds = [response[1]["cmd"] for response in responses]
assert response_cmds == ["discoverResponse"]
def testPeerDiscover(self, announcer, announcer_remote, site):
assert announcer.server.peer_id != announcer_remote.server.peer_id
assert len(list(announcer.server.sites.values())[0].peers) == 0
announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port)
time.sleep(0.1)
assert len(list(announcer.server.sites.values())[0].peers) == 1
def testRecentPeerList(self, announcer, announcer_remote, site):
assert len(site.peers_recent) == 0
assert len(site.peers) == 0
with Spy.Spy(announcer, "handleMessage") as responses:
announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
time.sleep(0.1)
assert [response[1]["cmd"] for response in responses] == ["discoverResponse", "siteListResponse"]
assert len(site.peers_recent) == 1
assert len(site.peers) == 1
# It should update peer without siteListResponse
last_time_found = list(site.peers.values())[0].time_found
site.peers_recent.clear()
with Spy.Spy(announcer, "handleMessage") as responses:
announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
time.sleep(0.1)
assert [response[1]["cmd"] for response in responses] == ["discoverResponse"]
assert len(site.peers_recent) == 1
assert list(site.peers.values())[0].time_found > last_time_found

View File

@ -1,4 +0,0 @@
from src.Test.conftest import *
from Config import config
config.broadcast_port = 0

View File

@ -1,5 +0,0 @@
[pytest]
python_files = Test*.py
addopts = -rsxX -v --durations=6
markers =
webtest: mark a test as a webtest.

View File

@ -1 +0,0 @@
from . import AnnounceLocalPlugin

View File

@ -1,5 +0,0 @@
{
"name": "AnnounceLocal",
"description": "Discover LAN clients using UDP broadcasting.",
"default": "enabled"
}

View File

@ -1,190 +0,0 @@
import time
import os
import logging
import json
import atexit
import gevent
from Config import config
from Plugin import PluginManager
from util import helper
class TrackerStorage(object):
def __init__(self):
self.log = logging.getLogger("TrackerStorage")
self.file_path = "%s/trackers.json" % config.data_dir
self.load()
self.time_discover = 0.0
atexit.register(self.save)
def getDefaultFile(self):
return {"shared": {}}
def onTrackerFound(self, tracker_address, type="shared", my=False):
if not tracker_address.startswith("zero://"):
return False
trackers = self.getTrackers()
added = False
if tracker_address not in trackers:
trackers[tracker_address] = {
"time_added": time.time(),
"time_success": 0,
"latency": 99.0,
"num_error": 0,
"my": False
}
self.log.debug("New tracker found: %s" % tracker_address)
added = True
trackers[tracker_address]["time_found"] = time.time()
trackers[tracker_address]["my"] = my
return added
def onTrackerSuccess(self, tracker_address, latency):
trackers = self.getTrackers()
if tracker_address not in trackers:
return False
trackers[tracker_address]["latency"] = latency
trackers[tracker_address]["time_success"] = time.time()
trackers[tracker_address]["num_error"] = 0
def onTrackerError(self, tracker_address):
trackers = self.getTrackers()
if tracker_address not in trackers:
return False
trackers[tracker_address]["time_error"] = time.time()
trackers[tracker_address]["num_error"] += 1
if len(self.getWorkingTrackers()) >= config.working_shared_trackers_limit:
error_limit = 5
else:
error_limit = 30
error_limit
if trackers[tracker_address]["num_error"] > error_limit and trackers[tracker_address]["time_success"] < time.time() - 60 * 60:
self.log.debug("Tracker %s looks down, removing." % tracker_address)
del trackers[tracker_address]
def getTrackers(self, type="shared"):
return self.file_content.setdefault(type, {})
def getWorkingTrackers(self, type="shared"):
trackers = {
key: tracker for key, tracker in self.getTrackers(type).items()
if tracker["time_success"] > time.time() - 60 * 60
}
return trackers
def getFileContent(self):
if not os.path.isfile(self.file_path):
open(self.file_path, "w").write("{}")
return self.getDefaultFile()
try:
return json.load(open(self.file_path))
except Exception as err:
self.log.error("Error loading trackers list: %s" % err)
return self.getDefaultFile()
def load(self):
self.file_content = self.getFileContent()
trackers = self.getTrackers()
self.log.debug("Loaded %s shared trackers" % len(trackers))
for address, tracker in list(trackers.items()):
tracker["num_error"] = 0
if not address.startswith("zero://"):
del trackers[address]
def save(self):
s = time.time()
helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True).encode("utf8"))
self.log.debug("Saved in %.3fs" % (time.time() - s))
def discoverTrackers(self, peers):
if len(self.getWorkingTrackers()) > config.working_shared_trackers_limit:
return False
s = time.time()
num_success = 0
for peer in peers:
if peer.connection and peer.connection.handshake.get("rev", 0) < 3560:
continue # Not supported
res = peer.request("getTrackers")
if not res or "error" in res:
continue
num_success += 1
for tracker_address in res["trackers"]:
if type(tracker_address) is bytes: # Backward compatibilitys
tracker_address = tracker_address.decode("utf8")
added = self.onTrackerFound(tracker_address)
if added: # Only add one tracker from one source
break
if not num_success and len(peers) < 20:
self.time_discover = 0.0
if num_success:
self.save()
self.log.debug("Trackers discovered from %s/%s peers in %.3fs" % (num_success, len(peers), time.time() - s))
if "tracker_storage" not in locals():
tracker_storage = TrackerStorage()
@PluginManager.registerTo("SiteAnnouncer")
class SiteAnnouncerPlugin(object):
def getTrackers(self):
if tracker_storage.time_discover < time.time() - 5 * 60:
tracker_storage.time_discover = time.time()
gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers())
trackers = super(SiteAnnouncerPlugin, self).getTrackers()
shared_trackers = list(tracker_storage.getTrackers("shared").keys())
if shared_trackers:
return trackers + shared_trackers
else:
return trackers
def announceTracker(self, tracker, *args, **kwargs):
res = super(SiteAnnouncerPlugin, self).announceTracker(tracker, *args, **kwargs)
if res:
latency = res
tracker_storage.onTrackerSuccess(tracker, latency)
elif res is False:
tracker_storage.onTrackerError(tracker)
return res
@PluginManager.registerTo("FileRequest")
class FileRequestPlugin(object):
def actionGetTrackers(self, params):
shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys())
self.response({"trackers": shared_trackers})
@PluginManager.registerTo("FileServer")
class FileServerPlugin(object):
def portCheck(self, *args, **kwargs):
res = super(FileServerPlugin, self).portCheck(*args, **kwargs)
if res and not config.tor == "always" and "Bootstrapper" in PluginManager.plugin_manager.plugin_names:
for ip in self.ip_external_list:
my_tracker_address = "zero://%s:%s" % (ip, config.fileserver_port)
tracker_storage.onTrackerFound(my_tracker_address, my=True)
return res
@PluginManager.registerTo("ConfigPlugin")
class ConfigPlugin(object):
def createArguments(self):
group = self.parser.add_argument_group("AnnounceShare plugin")
group.add_argument('--working_shared_trackers_limit', help='Stop discovering new shared trackers after this number of shared trackers reached', default=5, type=int, metavar='limit')
return super(ConfigPlugin, self).createArguments()

View File

@ -1,24 +0,0 @@
import pytest
from AnnounceShare import AnnounceSharePlugin
from Peer import Peer
from Config import config
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestAnnounceShare:
def testAnnounceList(self, file_server):
open("%s/trackers.json" % config.data_dir, "w").write("{}")
tracker_storage = AnnounceSharePlugin.tracker_storage
tracker_storage.load()
peer = Peer(file_server.ip, 1544, connection_server=file_server)
assert peer.request("getTrackers")["trackers"] == []
tracker_storage.onTrackerFound("zero://%s:15441" % file_server.ip)
assert peer.request("getTrackers")["trackers"] == []
# It needs to have at least one successfull announce to be shared to other peers
tracker_storage.onTrackerSuccess("zero://%s:15441" % file_server.ip, 1.0)
assert peer.request("getTrackers")["trackers"] == ["zero://%s:15441" % file_server.ip]

View File

@ -1,3 +0,0 @@
from src.Test.conftest import *
from Config import config

View File

@ -1,5 +0,0 @@
[pytest]
python_files = Test*.py
addopts = -rsxX -v --durations=6
markers =
webtest: mark a test as a webtest.

View File

@ -1 +0,0 @@
from . import AnnounceSharePlugin

View File

@ -1,5 +0,0 @@
{
"name": "AnnounceShare",
"description": "Share possible trackers between clients.",
"default": "enabled"
}

View File

@ -1,140 +0,0 @@
import time
import itertools
from Plugin import PluginManager
from util import helper
from Crypt import CryptRsa
allow_reload = False # No source reload supported in this plugin
time_full_announced = {} # Tracker address: Last announced all site to tracker
connection_pool = {} # Tracker address: Peer object
# We can only import plugin host clases after the plugins are loaded
@PluginManager.afterLoad
def importHostClasses():
global Peer, AnnounceError
from Peer import Peer
from Site.SiteAnnouncer import AnnounceError
# Process result got back from tracker
def processPeerRes(tracker_address, site, peers):
added = 0
# Onion
found_onion = 0
for packed_address in peers["onion"]:
found_onion += 1
peer_onion, peer_port = helper.unpackOnionAddress(packed_address)
if site.addPeer(peer_onion, peer_port, source="tracker"):
added += 1
# Ip4
found_ipv4 = 0
peers_normal = itertools.chain(peers.get("ip4", []), peers.get("ipv4", []), peers.get("ipv6", []))
for packed_address in peers_normal:
found_ipv4 += 1
peer_ip, peer_port = helper.unpackAddress(packed_address)
if site.addPeer(peer_ip, peer_port, source="tracker"):
added += 1
if added:
site.worker_manager.onPeers()
site.updateWebsocket(peers_added=added)
return added
@PluginManager.registerTo("SiteAnnouncer")
class SiteAnnouncerPlugin(object):
def getTrackerHandler(self, protocol):
if protocol == "zero":
return self.announceTrackerZero
else:
return super(SiteAnnouncerPlugin, self).getTrackerHandler(protocol)
def announceTrackerZero(self, tracker_address, mode="start", num_want=10):
global time_full_announced
s = time.time()
need_types = ["ip4"] # ip4 for backward compatibility reasons
need_types += self.site.connection_server.supported_ip_types
if self.site.connection_server.tor_manager.enabled:
need_types.append("onion")
if mode == "start" or mode == "more": # Single: Announce only this site
sites = [self.site]
full_announce = False
else: # Multi: Announce all currently serving site
full_announce = True
if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 15: # No reannounce all sites within short time
return None
time_full_announced[tracker_address] = time.time()
from Site import SiteManager
sites = [site for site in SiteManager.site_manager.sites.values() if site.isServing()]
# Create request
add_types = self.getOpenedServiceTypes()
request = {
"hashes": [], "onions": [], "port": self.fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types
}
for site in sites:
if "onion" in add_types:
onion = self.site.connection_server.tor_manager.getOnion(site.address)
request["onions"].append(onion)
request["hashes"].append(site.address_hash)
# Tracker can remove sites that we don't announce
if full_announce:
request["delete"] = True
# Sent request to tracker
tracker_peer = connection_pool.get(tracker_address) # Re-use tracker connection if possible
if not tracker_peer:
tracker_ip, tracker_port = tracker_address.rsplit(":", 1)
tracker_peer = Peer(str(tracker_ip), int(tracker_port), connection_server=self.site.connection_server)
tracker_peer.is_tracker_connection = True
connection_pool[tracker_address] = tracker_peer
res = tracker_peer.request("announce", request)
if not res or "peers" not in res:
if full_announce:
time_full_announced[tracker_address] = 0
raise AnnounceError("Invalid response: %s" % res)
# Add peers from response to site
site_index = 0
peers_added = 0
for site_res in res["peers"]:
site = sites[site_index]
peers_added += processPeerRes(tracker_address, site, site_res)
site_index += 1
# Check if we need to sign prove the onion addresses
if "onion_sign_this" in res:
self.site.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites)))
request["onion_signs"] = {}
request["onion_sign_this"] = res["onion_sign_this"]
request["need_num"] = 0
for site in sites:
onion = self.site.connection_server.tor_manager.getOnion(site.address)
publickey = self.site.connection_server.tor_manager.getPublickey(onion)
if publickey not in request["onion_signs"]:
sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion))
request["onion_signs"][publickey] = sign
res = tracker_peer.request("announce", request)
if not res or "onion_sign_this" in res:
if full_announce:
time_full_announced[tracker_address] = 0
raise AnnounceError("Announce onion address to failed: %s" % res)
if full_announce:
tracker_peer.remove() # Close connection, we don't need it in next 5 minute
self.site.log.debug(
"Tracker announce result: zero://%s (sites: %s, new peers: %s, add: %s, mode: %s) in %.3fs" %
(tracker_address, site_index, peers_added, add_types, mode, time.time() - s)
)
return True

View File

@ -1 +0,0 @@
from . import AnnounceZeroPlugin

View File

@ -1,5 +0,0 @@
{
"name": "AnnounceZero",
"description": "Announce using ZeroNet protocol.",
"default": "enabled"
}

View File

@ -1,143 +0,0 @@
import os
import json
import contextlib
import time
from Plugin import PluginManager
from Config import config
@PluginManager.registerTo("Actions")
class ActionsPlugin:
def getBenchmarkTests(self, online=False):
tests = super().getBenchmarkTests(online)
tests.extend([
{"func": self.testDbConnect, "num": 10, "time_standard": 0.27},
{"func": self.testDbInsert, "num": 10, "time_standard": 0.91},
{"func": self.testDbInsertMultiuser, "num": 1, "time_standard": 0.57},
{"func": self.testDbQueryIndexed, "num": 1000, "time_standard": 0.84},
{"func": self.testDbQueryNotIndexed, "num": 1000, "time_standard": 1.30}
])
return tests
@contextlib.contextmanager
def getTestDb(self):
from Db import Db
path = "%s/benchmark.db" % config.data_dir
if os.path.isfile(path):
os.unlink(path)
schema = {
"db_name": "TestDb",
"db_file": path,
"maps": {
".*": {
"to_table": {
"test": "test"
}
}
},
"tables": {
"test": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
"schema_changed": 1426195822
}
}
}
db = Db.Db(schema, path)
yield db
db.close()
if os.path.isfile(path):
os.unlink(path)
def testDbConnect(self, num_run=1):
import sqlite3
for i in range(num_run):
with self.getTestDb() as db:
db.checkTables()
yield "."
yield "(SQLite version: %s, API: %s)" % (sqlite3.sqlite_version, sqlite3.version)
def testDbInsert(self, num_run=1):
yield "x 1000 lines "
for u in range(num_run):
with self.getTestDb() as db:
db.checkTables()
data = {"test": []}
for i in range(1000): # 1000 line of data
data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
db.updateJson("%s/test_%s.json" % (config.data_dir, u))
os.unlink("%s/test_%s.json" % (config.data_dir, u))
assert db.execute("SELECT COUNT(*) FROM test").fetchone()[0] == 1000
yield "."
def fillTestDb(self, db):
db.checkTables()
cur = db.getCursor()
cur.logging = False
for u in range(100, 200): # 100 user
data = {"test": []}
for i in range(100): # 1000 line of data
data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
db.updateJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
os.unlink("%s/test_%s.json" % (config.data_dir, u))
if u % 10 == 0:
yield "."
def testDbInsertMultiuser(self, num_run=1):
yield "x 100 users x 100 lines "
for u in range(num_run):
with self.getTestDb() as db:
for progress in self.fillTestDb(db):
yield progress
num_rows = db.execute("SELECT COUNT(*) FROM test").fetchone()[0]
assert num_rows == 10000, "%s != 10000" % num_rows
def testDbQueryIndexed(self, num_run=1):
s = time.time()
with self.getTestDb() as db:
for progress in self.fillTestDb(db):
pass
yield " (Db warmup done in %.3fs) " % (time.time() - s)
found_total = 0
for i in range(num_run): # 1000x by test_id
found = 0
res = db.execute("SELECT * FROM test WHERE test_id = %s" % (i % 100))
for row in res:
found_total += 1
found += 1
del(res)
yield "."
assert found == 100, "%s != 100 (i: %s)" % (found, i)
yield "Found: %s" % found_total
def testDbQueryNotIndexed(self, num_run=1):
s = time.time()
with self.getTestDb() as db:
for progress in self.fillTestDb(db):
pass
yield " (Db warmup done in %.3fs) " % (time.time() - s)
found_total = 0
for i in range(num_run): # 1000x by test_id
found = 0
res = db.execute("SELECT * FROM test WHERE json_id = %s" % i)
for row in res:
found_total += 1
found += 1
yield "."
del(res)
if i == 0 or i > 100:
assert found == 0, "%s != 0 (i: %s)" % (found, i)
else:
assert found == 100, "%s != 100 (i: %s)" % (found, i)
yield "Found: %s" % found_total

View File

@ -1,183 +0,0 @@
import os
import io
from collections import OrderedDict
from Plugin import PluginManager
from Config import config
from util import Msgpack
@PluginManager.registerTo("Actions")
class ActionsPlugin:
def createZipFile(self, path):
import zipfile
test_data = b"Test" * 1024
file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
with zipfile.ZipFile(path, 'w') as archive:
for y in range(100):
zip_info = zipfile.ZipInfo(file_name % y, (1980, 1, 1, 0, 0, 0))
zip_info.compress_type = zipfile.ZIP_DEFLATED
zip_info.create_system = 3
zip_info.flag_bits = 0
zip_info.external_attr = 25165824
archive.writestr(zip_info, test_data)
def testPackZip(self, num_run=1):
"""
Test zip file creating
"""
yield "x 100 x 5KB "
from Crypt import CryptHash
zip_path = '%s/test.zip' % config.data_dir
for i in range(num_run):
self.createZipFile(zip_path)
yield "."
archive_size = os.path.getsize(zip_path) / 1024
yield "(Generated file size: %.2fkB)" % archive_size
hash = CryptHash.sha512sum(open(zip_path, "rb"))
valid = "cb32fb43783a1c06a2170a6bc5bb228a032b67ff7a1fd7a5efb9b467b400f553"
assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid)
os.unlink(zip_path)
def testUnpackZip(self, num_run=1):
"""
Test zip file reading
"""
yield "x 100 x 5KB "
import zipfile
zip_path = '%s/test.zip' % config.data_dir
test_data = b"Test" * 1024
file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91".decode("utf8")
self.createZipFile(zip_path)
for i in range(num_run):
with zipfile.ZipFile(zip_path) as archive:
for f in archive.filelist:
assert f.filename.startswith(file_name), "Invalid filename: %s != %s" % (f.filename, file_name)
data = archive.open(f.filename).read()
assert archive.open(f.filename).read() == test_data, "Invalid data: %s..." % data[0:30]
yield "."
os.unlink(zip_path)
def createArchiveFile(self, path, archive_type="gz"):
import tarfile
import gzip
# Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime
def nodate_write_gzip_header(self):
self._write_mtime = 0
original_write_gzip_header(self)
test_data_io = io.BytesIO(b"Test" * 1024)
file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
original_write_gzip_header = gzip.GzipFile._write_gzip_header
gzip.GzipFile._write_gzip_header = nodate_write_gzip_header
with tarfile.open(path, 'w:%s' % archive_type) as archive:
for y in range(100):
test_data_io.seek(0)
tar_info = tarfile.TarInfo(file_name % y)
tar_info.size = 4 * 1024
archive.addfile(tar_info, test_data_io)
def testPackArchive(self, num_run=1, archive_type="gz"):
"""
Test creating tar archive files
"""
yield "x 100 x 5KB "
from Crypt import CryptHash
hash_valid_db = {
"gz": "92caec5121a31709cbbc8c11b0939758e670b055bbbe84f9beb3e781dfde710f",
"bz2": "b613f41e6ee947c8b9b589d3e8fa66f3e28f63be23f4faf015e2f01b5c0b032d",
"xz": "ae43892581d770959c8d993daffab25fd74490b7cf9fafc7aaee746f69895bcb",
}
archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type)
for i in range(num_run):
self.createArchiveFile(archive_path, archive_type=archive_type)
yield "."
archive_size = os.path.getsize(archive_path) / 1024
yield "(Generated file size: %.2fkB)" % archive_size
hash = CryptHash.sha512sum(open("%s/test.tar.%s" % (config.data_dir, archive_type), "rb"))
valid = hash_valid_db[archive_type]
assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid)
if os.path.isfile(archive_path):
os.unlink(archive_path)
def testUnpackArchive(self, num_run=1, archive_type="gz"):
"""
Test reading tar archive files
"""
yield "x 100 x 5KB "
import tarfile
test_data = b"Test" * 1024
file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type)
self.createArchiveFile(archive_path, archive_type=archive_type)
for i in range(num_run):
with tarfile.open(archive_path, 'r:%s' % archive_type) as archive:
for y in range(100):
assert archive.extractfile(file_name % y).read() == test_data
yield "."
if os.path.isfile(archive_path):
os.unlink(archive_path)
def testPackMsgpack(self, num_run=1):
"""
Test msgpack encoding
"""
yield "x 100 x 5KB "
binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
data = OrderedDict(
sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
)
data_packed_valid = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
data_packed_valid += b'hello' * 1024
for y in range(num_run):
for i in range(100):
data_packed = Msgpack.pack(data)
yield "."
assert data_packed == data_packed_valid, "%s<br>!=<br>%s" % (repr(data_packed), repr(data_packed_valid))
def testUnpackMsgpack(self, num_run=1):
"""
Test msgpack decoding
"""
yield "x 5KB "
binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
data = OrderedDict(
sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
)
data_packed = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
data_packed += b'hello' * 1024
for y in range(num_run):
data_unpacked = Msgpack.unpack(data_packed, decode=False)
yield "."
assert data_unpacked == data, "%s<br>!=<br>%s" % (data_unpacked, data)
def testUnpackMsgpackStreaming(self, num_run=1, fallback=False):
"""
Test streaming msgpack decoding
"""
yield "x 1000 x 5KB "
binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
data = OrderedDict(
sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
)
data_packed = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
data_packed += b'hello' * 1024
for i in range(num_run):
unpacker = Msgpack.getUnpacker(decode=False, fallback=fallback)
for y in range(1000):
unpacker.feed(data_packed)
for data_unpacked in unpacker:
pass
yield "."
assert data == data_unpacked, "%s != %s" % (data_unpacked, data)

View File

@ -1,428 +0,0 @@
import os
import time
import io
import math
import hashlib
import re
import sys
from Config import config
from Crypt import CryptHash
from Plugin import PluginManager
from Debug import Debug
from util import helper
plugin_dir = os.path.dirname(__file__)
benchmark_key = None
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
@helper.encodeResponse
def actionBenchmark(self):
global benchmark_key
script_nonce = self.getScriptNonce()
if not benchmark_key:
benchmark_key = CryptHash.random(encoding="base64")
self.sendHeader(script_nonce=script_nonce)
if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
yield "This function is disabled on this proxy"
return
data = self.render(
plugin_dir + "/media/benchmark.html",
script_nonce=script_nonce,
benchmark_key=benchmark_key,
filter=re.sub("[^A-Za-z0-9]", "", self.get.get("filter", ""))
)
yield data
@helper.encodeResponse
def actionBenchmarkResult(self):
global benchmark_key
if self.get.get("benchmark_key", "") != benchmark_key:
return self.error403("Invalid benchmark key")
self.sendHeader(content_type="text/plain", noscript=True)
if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
yield "This function is disabled on this proxy"
return
yield " " * 1024 # Head (required for streaming)
import main
s = time.time()
for part in main.actions.testBenchmark(filter=self.get.get("filter", "")):
yield part
yield "\n - Total time: %.3fs" % (time.time() - s)
@PluginManager.registerTo("Actions")
class ActionsPlugin:
def getMultiplerTitle(self, multipler):
if multipler < 0.3:
multipler_title = "Sloooow"
elif multipler < 0.6:
multipler_title = "Ehh"
elif multipler < 0.8:
multipler_title = "Goodish"
elif multipler < 1.2:
multipler_title = "OK"
elif multipler < 1.7:
multipler_title = "Fine"
elif multipler < 2.5:
multipler_title = "Fast"
elif multipler < 3.5:
multipler_title = "WOW"
else:
multipler_title = "Insane!!"
return multipler_title
def formatResult(self, taken, standard):
if not standard:
return " Done in %.3fs" % taken
if taken > 0:
multipler = standard / taken
else:
multipler = 99
multipler_title = self.getMultiplerTitle(multipler)
return " Done in %.3fs = %s (%.2fx)" % (taken, multipler_title, multipler)
def getBenchmarkTests(self, online=False):
if hasattr(super(), "getBenchmarkTests"):
tests = super().getBenchmarkTests(online)
else:
tests = []
tests.extend([
{"func": self.testHdPrivatekey, "num": 50, "time_standard": 0.57},
{"func": self.testSign, "num": 20, "time_standard": 0.46},
{"func": self.testVerify, "kwargs": {"lib_verify": "sslcrypto_fallback"}, "num": 20, "time_standard": 0.38},
{"func": self.testVerify, "kwargs": {"lib_verify": "sslcrypto"}, "num": 200, "time_standard": 0.30},
{"func": self.testVerify, "kwargs": {"lib_verify": "libsecp256k1"}, "num": 200, "time_standard": 0.10},
{"func": self.testPackMsgpack, "num": 100, "time_standard": 0.35},
{"func": self.testUnpackMsgpackStreaming, "kwargs": {"fallback": False}, "num": 100, "time_standard": 0.35},
{"func": self.testUnpackMsgpackStreaming, "kwargs": {"fallback": True}, "num": 10, "time_standard": 0.5},
{"func": self.testPackZip, "num": 5, "time_standard": 0.065},
{"func": self.testPackArchive, "kwargs": {"archive_type": "gz"}, "num": 5, "time_standard": 0.08},
{"func": self.testPackArchive, "kwargs": {"archive_type": "bz2"}, "num": 5, "time_standard": 0.68},
{"func": self.testPackArchive, "kwargs": {"archive_type": "xz"}, "num": 5, "time_standard": 0.47},
{"func": self.testUnpackZip, "num": 20, "time_standard": 0.25},
{"func": self.testUnpackArchive, "kwargs": {"archive_type": "gz"}, "num": 20, "time_standard": 0.28},
{"func": self.testUnpackArchive, "kwargs": {"archive_type": "bz2"}, "num": 20, "time_standard": 0.83},
{"func": self.testUnpackArchive, "kwargs": {"archive_type": "xz"}, "num": 20, "time_standard": 0.38},
{"func": self.testCryptHash, "kwargs": {"hash_type": "sha256"}, "num": 10, "time_standard": 0.50},
{"func": self.testCryptHash, "kwargs": {"hash_type": "sha512"}, "num": 10, "time_standard": 0.33},
{"func": self.testCryptHashlib, "kwargs": {"hash_type": "sha3_256"}, "num": 10, "time_standard": 0.33},
{"func": self.testCryptHashlib, "kwargs": {"hash_type": "sha3_512"}, "num": 10, "time_standard": 0.65},
{"func": self.testRandom, "num": 100, "time_standard": 0.08},
])
if online:
tests += [
{"func": self.testHttps, "num": 1, "time_standard": 2.1}
]
return tests
def testBenchmark(self, num_multipler=1, online=False, num_run=None, filter=None):
"""
Run benchmark on client functions
"""
tests = self.getBenchmarkTests(online=online)
if filter:
tests = [test for test in tests[:] if filter.lower() in test["func"].__name__.lower()]
yield "\n"
res = {}
res_time_taken = {}
multiplers = []
for test in tests:
s = time.time()
if num_run:
num_run_test = num_run
else:
num_run_test = math.ceil(test["num"] * num_multipler)
func = test["func"]
func_name = func.__name__
kwargs = test.get("kwargs", {})
key = "%s %s" % (func_name, kwargs)
if kwargs:
yield "* Running %s (%s) x %s " % (func_name, kwargs, num_run_test)
else:
yield "* Running %s x %s " % (func_name, num_run_test)
i = 0
try:
for progress in func(num_run_test, **kwargs):
i += 1
if num_run_test > 10:
should_print = i % (num_run_test / 10) == 0 or progress != "."
else:
should_print = True
if should_print:
if num_run_test == 1 and progress == ".":
progress = "..."
yield progress
time_taken = time.time() - s
if num_run:
time_standard = 0
else:
time_standard = test["time_standard"] * num_multipler
yield self.formatResult(time_taken, time_standard)
yield "\n"
res[key] = "ok"
res_time_taken[key] = time_taken
multiplers.append(time_standard / max(time_taken, 0.001))
except Exception as err:
res[key] = err
yield "Failed!\n! Error: %s\n\n" % Debug.formatException(err)
yield "\n== Result ==\n"
# Check verification speed
if "testVerify {'lib_verify': 'sslcrypto'}" in res_time_taken:
speed_order = ["sslcrypto_fallback", "sslcrypto", "libsecp256k1"]
time_taken = {}
for lib_verify in speed_order:
time_taken[lib_verify] = res_time_taken["testVerify {'lib_verify': '%s'}" % lib_verify]
time_taken["sslcrypto_fallback"] *= 10 # fallback benchmark only run 20 times instead of 200
speedup_sslcrypto = time_taken["sslcrypto_fallback"] / time_taken["sslcrypto"]
speedup_libsecp256k1 = time_taken["sslcrypto_fallback"] / time_taken["libsecp256k1"]
yield "\n* Verification speedup:\n"
yield " - OpenSSL: %.1fx (reference: 7.0x)\n" % speedup_sslcrypto
yield " - libsecp256k1: %.1fx (reference: 23.8x)\n" % speedup_libsecp256k1
if speedup_sslcrypto < 2:
res["Verification speed"] = "error: OpenSSL speedup low: %.1fx" % speedup_sslcrypto
if speedup_libsecp256k1 < speedup_sslcrypto:
res["Verification speed"] = "error: libsecp256k1 speedup low: %.1fx" % speedup_libsecp256k1
if not res:
yield "! No tests found"
if config.action == "test":
sys.exit(1)
else:
num_failed = len([res_key for res_key, res_val in res.items() if res_val != "ok"])
num_success = len([res_key for res_key, res_val in res.items() if res_val == "ok"])
yield "\n* Tests:\n"
yield " - Total: %s tests\n" % len(res)
yield " - Success: %s tests\n" % num_success
yield " - Failed: %s tests\n" % num_failed
if any(multiplers):
multipler_avg = sum(multiplers) / len(multiplers)
multipler_title = self.getMultiplerTitle(multipler_avg)
yield " - Average speed factor: %.2fx (%s)\n" % (multipler_avg, multipler_title)
# Display errors
for res_key, res_val in res.items():
if res_val != "ok":
yield " ! %s %s\n" % (res_key, res_val)
if num_failed != 0 and config.action == "test":
sys.exit(1)
def testHttps(self, num_run=1):
"""
Test https connection with valid and invalid certs
"""
import urllib.request
import urllib.error
body = urllib.request.urlopen("https://google.com").read()
assert len(body) > 100
yield "."
badssl_urls = [
"https://expired.badssl.com/",
"https://wrong.host.badssl.com/",
"https://self-signed.badssl.com/",
"https://untrusted-root.badssl.com/"
]
for badssl_url in badssl_urls:
try:
body = urllib.request.urlopen(badssl_url).read()
https_err = None
except urllib.error.URLError as err:
https_err = err
assert https_err
yield "."
def testCryptHash(self, num_run=1, hash_type="sha256"):
"""
Test hashing functions
"""
yield "(5MB) "
from Crypt import CryptHash
hash_types = {
"sha256": {"func": CryptHash.sha256sum, "hash_valid": "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa"},
"sha512": {"func": CryptHash.sha512sum, "hash_valid": "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d"}
}
hash_func = hash_types[hash_type]["func"]
hash_valid = hash_types[hash_type]["hash_valid"]
data = io.BytesIO(b"Hello" * 1024 * 1024) # 5MB
for i in range(num_run):
data.seek(0)
hash = hash_func(data)
yield "."
assert hash == hash_valid, "%s != %s" % (hash, hash_valid)
def testCryptHashlib(self, num_run=1, hash_type="sha3_256"):
"""
Test SHA3 hashing functions
"""
yield "x 5MB "
hash_types = {
"sha3_256": {"func": hashlib.sha3_256, "hash_valid": "c8aeb3ef9fe5d6404871c0d2a4410a4d4e23268e06735648c9596f436c495f7e"},
"sha3_512": {"func": hashlib.sha3_512, "hash_valid": "b75dba9472d8af3cc945ce49073f3f8214d7ac12086c0453fb08944823dee1ae83b3ffbc87a53a57cc454521d6a26fe73ff0f3be38dddf3f7de5d7692ebc7f95"},
}
hash_func = hash_types[hash_type]["func"]
hash_valid = hash_types[hash_type]["hash_valid"]
data = io.BytesIO(b"Hello" * 1024 * 1024) # 5MB
for i in range(num_run):
data.seek(0)
h = hash_func()
while 1:
buff = data.read(1024 * 64)
if not buff:
break
h.update(buff)
hash = h.hexdigest()
yield "."
assert hash == hash_valid, "%s != %s" % (hash, hash_valid)
def testRandom(self, num_run=1):
"""
Test generating random data
"""
yield "x 1000 x 256 bytes "
for i in range(num_run):
data_last = None
for y in range(1000):
data = os.urandom(256)
assert data != data_last
assert len(data) == 256
data_last = data
yield "."
def testHdPrivatekey(self, num_run=2):
"""
Test generating deterministic private keys from a master seed
"""
from Crypt import CryptBitcoin
seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"
privatekeys = []
for i in range(num_run):
privatekeys.append(CryptBitcoin.hdPrivatekey(seed, i * 10))
yield "."
valid = "5JSbeF5PevdrsYjunqpg7kAGbnCVYa1T4APSL3QRu8EoAmXRc7Y"
assert privatekeys[0] == valid, "%s != %s" % (privatekeys[0], valid)
if len(privatekeys) > 1:
assert privatekeys[0] != privatekeys[-1]
def testSign(self, num_run=1):
"""
Test signing data using a private key
"""
from Crypt import CryptBitcoin
data = "Hello" * 1024
privatekey = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
for i in range(num_run):
yield "."
sign = CryptBitcoin.sign(data, privatekey)
valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
assert sign == valid, "%s != %s" % (sign, valid)
def testVerify(self, num_run=1, lib_verify="sslcrypto"):
"""
Test verification of generated signatures
"""
from Crypt import CryptBitcoin
CryptBitcoin.loadLib(lib_verify, silent=True)
data = "Hello" * 1024
privatekey = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
address = CryptBitcoin.privatekeyToAddress(privatekey)
sign = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
for i in range(num_run):
ok = CryptBitcoin.verify(data, address, sign, lib_verify=lib_verify)
yield "."
assert ok, "does not verify from %s" % address
if lib_verify == "sslcrypto":
yield("(%s)" % CryptBitcoin.sslcrypto.ecc.get_backend())
def testPortCheckers(self):
"""
Test all active open port checker
"""
from Peer import PeerPortchecker
for ip_type, func_names in PeerPortchecker.PeerPortchecker.checker_functions.items():
yield "\n- %s:" % ip_type
for func_name in func_names:
yield "\n - Tracker %s: " % func_name
try:
for res in self.testPortChecker(func_name):
yield res
except Exception as err:
yield Debug.formatException(err)
def testPortChecker(self, func_name):
"""
Test single open port checker
"""
from Peer import PeerPortchecker
peer_portchecker = PeerPortchecker.PeerPortchecker(None)
announce_func = getattr(peer_portchecker, func_name)
res = announce_func(3894)
yield res
def testAll(self):
"""
Run all tests to check system compatibility with ZeroNet functions
"""
for progress in self.testBenchmark(online=not config.offline, num_run=1):
yield progress
@PluginManager.registerTo("ConfigPlugin")
class ConfigPlugin(object):
def createArguments(self):
back = super(ConfigPlugin, self).createArguments()
if self.getCmdlineValue("test") == "benchmark":
self.test_parser.add_argument(
'--num_multipler', help='Benchmark run time multipler',
default=1.0, type=float, metavar='num'
)
self.test_parser.add_argument(
'--filter', help='Filter running benchmark',
default=None, metavar='test name'
)
elif self.getCmdlineValue("test") == "portChecker":
self.test_parser.add_argument(
'--func_name', help='Name of open port checker function',
default=None, metavar='func_name'
)
return back

View File

@ -1,3 +0,0 @@
from . import BenchmarkPlugin
from . import BenchmarkDb
from . import BenchmarkPack

View File

@ -1,123 +0,0 @@
<html>
<script nonce="{script_nonce}">
window.benchmark_key = "{benchmark_key}";
function setState(elem, text) {
var formatted = text
var parts = text.match(/\* Running (.*?)(\n|$)/g)
if (parts) {
for (var i=0; i < parts.length; i++) {
part = parts[i];
var details = part.match(/\* Running (.*?) (\.+|$)(.*)/);
if (details) {
var title = details[1]
var progress = details[2]
var result = details[3]
result_parts = result.match(/(.*) Done in ([0-9\.]+)s = (.*?) \(([0-9\.]+)x\)/)
var percent = Math.min(100, progress.length * 10)
if (result_parts) percent = 100
var style = "background-image: linear-gradient(90deg, #FFF " + percent + "%, #FFF 0%, #d9d5de 0%);"
var part_formatted = "<div class='test' style='" + style + "'>"
part_formatted += "<span class='title'>" + title + "</span><span class='percent percent-" + percent + "'>" + percent + "%</span> "
if (result_parts) {
var result_extra = result_parts[1]
var taken = result_parts[2]
var multipler_title = result_parts[3]
var multipler = result_parts[4]
part_formatted += "<div class='result result-" + multipler_title.replace(/[^A-Za-z]/g, "") + "'>"
part_formatted += " <span class='taken'>" + taken + "s</span>"
part_formatted += " <span class='multipler'>" + multipler + "x</span>"
part_formatted += " <span class='multipler-title'>" + multipler_title + "</span>"
part_formatted += "</div>"
} else {
part_formatted += "<div class='result'>" + result + "</div>"
}
part_formatted += "</div>"
formatted = formatted.replace(part, part_formatted);
}
}
}
formatted = formatted.replace(/(\! Error:.*)/, "<div class='test error'>$1</div>");
formatted = formatted.replace(/(\== Result ==[^]*)/, "<div class='test summary'>$1</div>");
var is_bottom = document.body.scrollTop + document.body.clientHeight >= document.body.scrollHeight - 5;
elem.innerHTML = formatted.trim();
if (is_bottom)
document.body.scrollTop = document.body.scrollHeight;
}
function stream(url, elem) {
document.getElementById("h1").innerText = "Benchmark: Starting..."
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.setRequestHeader('Accept', 'text/html');
xhr.send(null);
xhr.onreadystatechange = function(state) {
document.getElementById("h1").innerText = "Benchmark: Running..."
setState(elem, xhr.responseText);
if (xhr.readyState == 4) {
document.getElementById("h1").innerText = "Benchmark: Done."
}
}
}
</script>
<body>
<style>
body {
background-color: #3c3546;
background-image: url("data:image/svg+xml,%3Csvg width='60' height='60' viewBox='0 0 60 60' xmlns='http://www.w3.org/2000/svg'%3E%3Cg fill='none' fill-rule='evenodd'%3E%3Cg fill='%23cfcfcf' fill-opacity='0.09'%3E%3Cpath d='M36 34v-4h-2v4h-4v2h4v4h2v-4h4v-2h-4zm0-30V0h-2v4h-4v2h4v4h2V6h4V4h-4zM6 34v-4H4v4H0v2h4v4h2v-4h4v-2H6zM6 4V0H4v4H0v2h4v4h2V6h4V4H6z'/%3E%3C/g%3E%3C/g%3E%3C/svg%3E");}
h1 {
font-family: monospace; color: white; font-weight: normal; text-transform: uppercase;
max-width: 690px; margin: 30px auto; margin-bottom: 10px;
}
#out {
white-space: pre-line; background-color: #ffffff1a; padding: 20px; font-family: Consolas, monospace;
font-size: 11px; width: 90%; margin: auto; max-width: 650px; box-shadow: 0px 10px 30px -10px #5c5c5c6b;
}
.test { padding: 12px; box-shadow: 0px 5px 13px -5px #5c5c5c6b; margin-bottom: -2px; background-color: white; border: 1px solid #dbdbdb; }
.test .percent { float: right; }
.test .percent-100 { display: none; }
.test .result { float: right; }
.test .title { max-width: calc(100% - 150px); display: inline-block; }
.test .multipler-title { display: inline-block; width: 50px; text-align: right; }
.test:last-child { margin-bottom: 15px; border-color: #c1c1c1; }
.test .result-Sloooow { color: red; }
.test .result-Ehh { color: #ad1457; }
.test .result-Goodish { color: #ef6c00; }
.test .result-Ok { color: #00cf03; }
.test .result-Fine { color: #00bcd4; }
.test .result-Fast { color: #4b78ff; }
.test .result-WOW { color: #9c27b0; }
.test .result-Insane { color: #d603f4; }
.test.summary { margin-top: 20px; text-transform: uppercase; border-left: 10px solid #00ff63; border-color: #00ff63; }
.test.error { background-color: #ff2259; color: white; border-color: red; }
#start { text-align: center }
.button {
background-color: white; padding: 10px 20px; display: inline-block; border-radius: 5px;
text-decoration: none; color: #673AB7; text-transform: uppercase; margin-bottom: 11px; border-bottom: 2px solid #c1bff8;
}
.button:hover { border-bottom-color: #673AB7; }
.button:active { transform: translateY(1px) }
small { text-transform: uppercase; opacity: 0.7; color: white; letter-spacing: 1px; }
</style>
<h1 id="h1">Benchmark</h1>
<div id="out">
<div id="start">
<a href="#Start" class="button" id="start_button">Start benchmark</a>
<small>(It will take around 20 sec)</small>
</div>
</div>
<script nonce="{script_nonce}">
function start() {
stream("/BenchmarkResult?benchmark_key={benchmark_key}&filter={filter}", document.getElementById("out"));
return false;
}
document.getElementById("start_button").onclick = start
</script>
</body>
</html>

View File

@ -1,5 +0,0 @@
{
"name": "Benchmark",
"description": "Test and benchmark database and cryptographic functions related to ZeroNet.",
"default": "enabled"
}

View File

@ -1,170 +0,0 @@
import array
def packPiecefield(data):
if not isinstance(data, bytes) and not isinstance(data, bytearray):
raise Exception("Invalid data type: %s" % type(data))
res = []
if not data:
return array.array("H", b"")
if data[0] == b"\x00":
res.append(0)
find = b"\x01"
else:
find = b"\x00"
last_pos = 0
pos = 0
while 1:
pos = data.find(find, pos)
if find == b"\x00":
find = b"\x01"
else:
find = b"\x00"
if pos == -1:
res.append(len(data) - last_pos)
break
res.append(pos - last_pos)
last_pos = pos
return array.array("H", res)
def unpackPiecefield(data):
if not data:
return b""
res = []
char = b"\x01"
for times in data:
if times > 10000:
return b""
res.append(char * times)
if char == b"\x01":
char = b"\x00"
else:
char = b"\x01"
return b"".join(res)
def spliceBit(data, idx, bit):
if bit != b"\x00" and bit != b"\x01":
raise Exception("Invalid bit: %s" % bit)
if len(data) < idx:
data = data.ljust(idx + 1, b"\x00")
return data[:idx] + bit + data[idx+ 1:]
class Piecefield(object):
def tostring(self):
return "".join(["1" if b else "0" for b in self.tobytes()])
class BigfilePiecefield(Piecefield):
__slots__ = ["data"]
def __init__(self):
self.data = b""
def frombytes(self, s):
if not isinstance(s, bytes) and not isinstance(s, bytearray):
raise Exception("Invalid type: %s" % type(s))
self.data = s
def tobytes(self):
return self.data
def pack(self):
return packPiecefield(self.data).tobytes()
def unpack(self, s):
self.data = unpackPiecefield(array.array("H", s))
def __getitem__(self, key):
try:
return self.data[key]
except IndexError:
return False
def __setitem__(self, key, value):
self.data = spliceBit(self.data, key, value)
class BigfilePiecefieldPacked(Piecefield):
__slots__ = ["data"]
def __init__(self):
self.data = b""
def frombytes(self, data):
if not isinstance(data, bytes) and not isinstance(data, bytearray):
raise Exception("Invalid type: %s" % type(data))
self.data = packPiecefield(data).tobytes()
def tobytes(self):
return unpackPiecefield(array.array("H", self.data))
def pack(self):
return array.array("H", self.data).tobytes()
def unpack(self, data):
self.data = data
def __getitem__(self, key):
try:
return self.tobytes()[key]
except IndexError:
return False
def __setitem__(self, key, value):
data = spliceBit(self.tobytes(), key, value)
self.frombytes(data)
if __name__ == "__main__":
import os
import psutil
import time
testdata = b"\x01" * 100 + b"\x00" * 900 + b"\x01" * 4000 + b"\x00" * 4999 + b"\x01"
meminfo = psutil.Process(os.getpid()).memory_info
for storage in [BigfilePiecefieldPacked, BigfilePiecefield]:
print("-- Testing storage: %s --" % storage)
m = meminfo()[0]
s = time.time()
piecefields = {}
for i in range(10000):
piecefield = storage()
piecefield.frombytes(testdata[:i] + b"\x00" + testdata[i + 1:])
piecefields[i] = piecefield
print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
m = meminfo()[0]
s = time.time()
for piecefield in list(piecefields.values()):
val = piecefield[1000]
print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
m = meminfo()[0]
s = time.time()
for piecefield in list(piecefields.values()):
piecefield[1000] = b"\x01"
print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
m = meminfo()[0]
s = time.time()
for piecefield in list(piecefields.values()):
packed = piecefield.pack()
print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)))
m = meminfo()[0]
s = time.time()
for piecefield in list(piecefields.values()):
piecefield.unpack(packed)
print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
piecefields = {}

View File

@ -1,843 +0,0 @@
import time
import os
import subprocess
import shutil
import collections
import math
import warnings
import base64
import binascii
import json
import gevent
import gevent.lock
from Plugin import PluginManager
from Debug import Debug
from Crypt import CryptHash
with warnings.catch_warnings():
warnings.filterwarnings("ignore") # Ignore missing sha3 warning
import merkletools
from util import helper
from util import Msgpack
from util.Flag import flag
import util
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
# We can only import plugin host clases after the plugins are loaded
@PluginManager.afterLoad
def importPluginnedClasses():
global VerifyError, config
from Content.ContentManager import VerifyError
from Config import config
if "upload_nonces" not in locals():
upload_nonces = {}
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
def isCorsAllowed(self, path):
if path == "/ZeroNet-Internal/BigfileUpload":
return True
else:
return super(UiRequestPlugin, self).isCorsAllowed(path)
@helper.encodeResponse
def actionBigfileUpload(self):
nonce = self.get.get("upload_nonce")
if nonce not in upload_nonces:
return self.error403("Upload nonce error.")
upload_info = upload_nonces[nonce]
del upload_nonces[nonce]
self.sendHeader(200, "text/html", noscript=True, extra_headers={
"Access-Control-Allow-Origin": "null",
"Access-Control-Allow-Credentials": "true"
})
self.readMultipartHeaders(self.env['wsgi.input']) # Skip http headers
result = self.handleBigfileUpload(upload_info, self.env['wsgi.input'].read)
return json.dumps(result)
def actionBigfileUploadWebsocket(self):
ws = self.env.get("wsgi.websocket")
if not ws:
self.start_response("400 Bad Request", [])
return [b"Not a websocket request!"]
nonce = self.get.get("upload_nonce")
if nonce not in upload_nonces:
return self.error403("Upload nonce error.")
upload_info = upload_nonces[nonce]
del upload_nonces[nonce]
ws.send("poll")
buffer = b""
def read(size):
nonlocal buffer
while len(buffer) < size:
buffer += ws.receive()
ws.send("poll")
part, buffer = buffer[:size], buffer[size:]
return part
result = self.handleBigfileUpload(upload_info, read)
ws.send(json.dumps(result))
def handleBigfileUpload(self, upload_info, read):
site = upload_info["site"]
inner_path = upload_info["inner_path"]
with site.storage.open(inner_path, "wb", create_dirs=True) as out_file:
merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile(
read, upload_info["size"], upload_info["piece_size"], out_file
)
if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
hash = binascii.hexlify(piecemap_info["sha512_pieces"][0])
hash_id = site.content_manager.hashfield.getHashId(hash)
site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
else: # Big file
file_name = helper.getFilename(inner_path)
site.storage.open(upload_info["piecemap"], "wb").write(Msgpack.pack({file_name: piecemap_info}))
# Find piecemap and file relative path to content.json
file_info = site.content_manager.getFileInfo(inner_path, new_file=True)
content_inner_path_dir = helper.getDirname(file_info["content_inner_path"])
piecemap_relative_path = upload_info["piecemap"][len(content_inner_path_dir):]
file_relative_path = inner_path[len(content_inner_path_dir):]
# Add file to content.json
if site.storage.isFile(file_info["content_inner_path"]):
content = site.storage.loadJson(file_info["content_inner_path"])
else:
content = {}
if "files_optional" not in content:
content["files_optional"] = {}
content["files_optional"][file_relative_path] = {
"sha512": merkle_root,
"size": upload_info["size"],
"piecemap": piecemap_relative_path,
"piece_size": piece_size
}
merkle_root_hash_id = site.content_manager.hashfield.getHashId(merkle_root)
site.content_manager.optionalDownloaded(inner_path, merkle_root_hash_id, upload_info["size"], own=True)
site.storage.writeJson(file_info["content_inner_path"], content)
site.content_manager.contents.loadItem(file_info["content_inner_path"]) # reload cache
return {
"merkle_root": merkle_root,
"piece_num": len(piecemap_info["sha512_pieces"]),
"piece_size": piece_size,
"inner_path": inner_path
}
def readMultipartHeaders(self, wsgi_input):
found = False
for i in range(100):
line = wsgi_input.readline()
if line == b"\r\n":
found = True
break
if not found:
raise Exception("No multipart header found")
return i
def actionFile(self, file_path, *args, **kwargs):
if kwargs.get("file_size", 0) > 1024 * 1024 and kwargs.get("path_parts"): # Only check files larger than 1MB
path_parts = kwargs["path_parts"]
site = self.server.site_manager.get(path_parts["address"])
big_file = site.storage.openBigfile(path_parts["inner_path"], prebuffer=2 * 1024 * 1024)
if big_file:
kwargs["file_obj"] = big_file
kwargs["file_size"] = big_file.size
return super(UiRequestPlugin, self).actionFile(file_path, *args, **kwargs)
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
def actionBigfileUploadInit(self, to, inner_path, size, protocol="xhr"):
valid_signers = self.site.content_manager.getValidSigners(inner_path)
auth_address = self.user.getAuthAddress(self.site.address)
if not self.site.settings["own"] and auth_address not in valid_signers:
self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers))
return self.response(to, {"error": "Forbidden, you can only modify your own files"})
nonce = CryptHash.random()
piece_size = 1024 * 1024
inner_path = self.site.content_manager.sanitizePath(inner_path)
file_info = self.site.content_manager.getFileInfo(inner_path, new_file=True)
content_inner_path_dir = helper.getDirname(file_info["content_inner_path"])
file_relative_path = inner_path[len(content_inner_path_dir):]
upload_nonces[nonce] = {
"added": time.time(),
"site": self.site,
"inner_path": inner_path,
"websocket_client": self,
"size": size,
"piece_size": piece_size,
"piecemap": inner_path + ".piecemap.msgpack"
}
if protocol == "xhr":
return {
"url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
"piece_size": piece_size,
"inner_path": inner_path,
"file_relative_path": file_relative_path
}
elif protocol == "websocket":
server_url = self.request.getWsServerUrl()
if server_url:
proto, host = server_url.split("://")
origin = proto.replace("http", "ws") + "://" + host
else:
origin = "{origin}"
return {
"url": origin + "/ZeroNet-Internal/BigfileUploadWebsocket?upload_nonce=" + nonce,
"piece_size": piece_size,
"inner_path": inner_path,
"file_relative_path": file_relative_path
}
else:
return {"error": "Unknown protocol"}
@flag.no_multiuser
def actionSiteSetAutodownloadBigfileLimit(self, to, limit):
permissions = self.getPermissions(to)
if "ADMIN" not in permissions:
return self.response(to, "You don't have permission to run this command")
self.site.settings["autodownload_bigfile_size_limit"] = int(limit)
self.response(to, "ok")
def actionFileDelete(self, to, inner_path):
piecemap_inner_path = inner_path + ".piecemap.msgpack"
if self.hasFilePermission(inner_path) and self.site.storage.isFile(piecemap_inner_path):
# Also delete .piecemap.msgpack file if exists
self.log.debug("Deleting piecemap: %s" % piecemap_inner_path)
file_info = self.site.content_manager.getFileInfo(piecemap_inner_path)
if file_info:
content_json = self.site.storage.loadJson(file_info["content_inner_path"])
relative_path = file_info["relative_path"]
if relative_path in content_json.get("files_optional", {}):
del content_json["files_optional"][relative_path]
self.site.storage.writeJson(file_info["content_inner_path"], content_json)
self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
try:
self.site.storage.delete(piecemap_inner_path)
except Exception as err:
self.log.error("File %s delete error: %s" % (piecemap_inner_path, err))
return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path)
@PluginManager.registerTo("ContentManager")
class ContentManagerPlugin(object):
def getFileInfo(self, inner_path, *args, **kwargs):
if "|" not in inner_path:
return super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs)
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
file_info = super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs)
return file_info
def readFile(self, read_func, size, buff_size=1024 * 64):
part_num = 0
recv_left = size
while 1:
part_num += 1
read_size = min(buff_size, recv_left)
part = read_func(read_size)
if not part:
break
yield part
if part_num % 100 == 0: # Avoid blocking ZeroNet execution during upload
time.sleep(0.001)
recv_left -= read_size
if recv_left <= 0:
break
def hashBigfile(self, read_func, size, piece_size=1024 * 1024, file_out=None):
self.site.settings["has_bigfile"] = True
recv = 0
try:
piece_hash = CryptHash.sha512t()
piece_hashes = []
piece_recv = 0
mt = merkletools.MerkleTools()
mt.hash_function = CryptHash.sha512t
part = ""
for part in self.readFile(read_func, size):
if file_out:
file_out.write(part)
recv += len(part)
piece_recv += len(part)
piece_hash.update(part)
if piece_recv >= piece_size:
piece_digest = piece_hash.digest()
piece_hashes.append(piece_digest)
mt.leaves.append(piece_digest)
piece_hash = CryptHash.sha512t()
piece_recv = 0
if len(piece_hashes) % 100 == 0 or recv == size:
self.log.info("- [HASHING:%.0f%%] Pieces: %s, %.1fMB/%.1fMB" % (
float(recv) / size * 100, len(piece_hashes), recv / 1024 / 1024, size / 1024 / 1024
))
part = ""
if len(part) > 0:
piece_digest = piece_hash.digest()
piece_hashes.append(piece_digest)
mt.leaves.append(piece_digest)
except Exception as err:
raise err
finally:
if file_out:
file_out.close()
mt.make_tree()
merkle_root = mt.get_merkle_root()
if type(merkle_root) is bytes: # Python <3.5
merkle_root = merkle_root.decode()
return merkle_root, piece_size, {
"sha512_pieces": piece_hashes
}
def hashFile(self, dir_inner_path, file_relative_path, optional=False):
inner_path = dir_inner_path + file_relative_path
file_size = self.site.storage.getSize(inner_path)
# Only care about optional files >1MB
if not optional or file_size < 1 * 1024 * 1024:
return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)
back = {}
content = self.contents.get(dir_inner_path + "content.json")
hash = None
piecemap_relative_path = None
piece_size = None
# Don't re-hash if it's already in content.json
if content and file_relative_path in content.get("files_optional", {}):
file_node = content["files_optional"][file_relative_path]
if file_node["size"] == file_size:
self.log.info("- [SAME SIZE] %s" % file_relative_path)
hash = file_node.get("sha512")
piecemap_relative_path = file_node.get("piecemap")
piece_size = file_node.get("piece_size")
if not hash or not piecemap_relative_path: # Not in content.json yet
if file_size < 5 * 1024 * 1024: # Don't create piecemap automatically for files smaller than 5MB
return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)
self.log.info("- [HASHING] %s" % file_relative_path)
merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb").read, file_size)
if not hash:
hash = merkle_root
if not piecemap_relative_path:
file_name = helper.getFilename(file_relative_path)
piecemap_relative_path = file_relative_path + ".piecemap.msgpack"
piecemap_inner_path = inner_path + ".piecemap.msgpack"
self.site.storage.open(piecemap_inner_path, "wb").write(Msgpack.pack({file_name: piecemap_info}))
back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True))
piece_num = int(math.ceil(float(file_size) / piece_size))
# Add the merkle root to hashfield
hash_id = self.site.content_manager.hashfield.getHashId(hash)
self.optionalDownloaded(inner_path, hash_id, file_size, own=True)
self.site.storage.piecefields[hash].frombytes(b"\x01" * piece_num)
back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size}
return back
def getPiecemap(self, inner_path):
file_info = self.site.content_manager.getFileInfo(inner_path)
piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
self.site.needFile(piecemap_inner_path, priority=20)
piecemap = Msgpack.unpack(self.site.storage.open(piecemap_inner_path, "rb").read())[helper.getFilename(inner_path)]
piecemap["piece_size"] = file_info["piece_size"]
return piecemap
def verifyPiece(self, inner_path, pos, piece):
try:
piecemap = self.getPiecemap(inner_path)
except Exception as err:
raise VerifyError("Unable to download piecemap: %s" % Debug.formatException(err))
piece_i = int(pos / piecemap["piece_size"])
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
raise VerifyError("Invalid hash")
return True
def verifyFile(self, inner_path, file, ignore_same=True):
if "|" not in inner_path:
return super(ContentManagerPlugin, self).verifyFile(inner_path, file, ignore_same)
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
return self.verifyPiece(inner_path, pos_from, file)
def optionalDownloaded(self, inner_path, hash_id, size=None, own=False):
if "|" in inner_path:
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
file_info = self.getFileInfo(inner_path)
# Mark piece downloaded
piece_i = int(pos_from / file_info["piece_size"])
self.site.storage.piecefields[file_info["sha512"]][piece_i] = b"\x01"
# Only add to site size on first request
if hash_id in self.hashfield:
size = 0
elif size > 1024 * 1024:
file_info = self.getFileInfo(inner_path)
if file_info and "sha512" in file_info: # We already have the file, but not in piecefield
sha512 = file_info["sha512"]
if sha512 not in self.site.storage.piecefields:
self.site.storage.checkBigfile(inner_path)
return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own)
def optionalRemoved(self, inner_path, hash_id, size=None):
if size and size > 1024 * 1024:
file_info = self.getFileInfo(inner_path)
sha512 = file_info["sha512"]
if sha512 in self.site.storage.piecefields:
del self.site.storage.piecefields[sha512]
# Also remove other pieces of the file from download queue
for key in list(self.site.bad_files.keys()):
if key.startswith(inner_path + "|"):
del self.site.bad_files[key]
self.site.worker_manager.removeSolvedFileTasks()
return super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size)
@PluginManager.registerTo("SiteStorage")
class SiteStoragePlugin(object):
def __init__(self, *args, **kwargs):
super(SiteStoragePlugin, self).__init__(*args, **kwargs)
self.piecefields = collections.defaultdict(BigfilePiecefield)
if "piecefields" in self.site.settings.get("cache", {}):
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
if piecefield_packed:
self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed))
self.site.settings["cache"]["piecefields"] = {}
def createSparseFile(self, inner_path, size, sha512=None):
file_path = self.getPath(inner_path)
self.ensureDir(os.path.dirname(inner_path))
f = open(file_path, 'wb')
f.truncate(min(1024 * 1024 * 5, size)) # Only pre-allocate up to 5MB
f.close()
if os.name == "nt":
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.call(["fsutil", "sparse", "setflag", file_path], close_fds=True, startupinfo=startupinfo)
if sha512 and sha512 in self.piecefields:
self.log.debug("%s: File not exists, but has piecefield. Deleting piecefield." % inner_path)
del self.piecefields[sha512]
def write(self, inner_path, content):
if "|" not in inner_path:
return super(SiteStoragePlugin, self).write(inner_path, content)
# Write to specific position by passing |{pos} after the filename
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
file_path = self.getPath(inner_path)
# Create dir if not exist
self.ensureDir(os.path.dirname(inner_path))
if not os.path.isfile(file_path):
file_info = self.site.content_manager.getFileInfo(inner_path)
self.createSparseFile(inner_path, file_info["size"])
# Write file
with open(file_path, "rb+") as file:
file.seek(pos_from)
if hasattr(content, 'read'): # File-like object
shutil.copyfileobj(content, file) # Write buff to disk
else: # Simple string
file.write(content)
del content
self.onUpdated(inner_path)
def checkBigfile(self, inner_path):
file_info = self.site.content_manager.getFileInfo(inner_path)
if not file_info or (file_info and "piecemap" not in file_info): # It's not a big file
return False
self.site.settings["has_bigfile"] = True
file_path = self.getPath(inner_path)
sha512 = file_info["sha512"]
piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"]))
if os.path.isfile(file_path):
if sha512 not in self.piecefields:
if open(file_path, "rb").read(128) == b"\0" * 128:
piece_data = b"\x00"
else:
piece_data = b"\x01"
self.log.debug("%s: File exists, but not in piecefield. Filling piecefiled with %s * %s." % (inner_path, piece_num, piece_data))
self.piecefields[sha512].frombytes(piece_data * piece_num)
else:
self.log.debug("Creating bigfile: %s" % inner_path)
self.createSparseFile(inner_path, file_info["size"], sha512)
self.piecefields[sha512].frombytes(b"\x00" * piece_num)
self.log.debug("Created bigfile: %s" % inner_path)
return True
def openBigfile(self, inner_path, prebuffer=0):
if not self.checkBigfile(inner_path):
return False
self.site.needFile(inner_path, blocking=False) # Download piecemap
return BigFile(self.site, inner_path, prebuffer=prebuffer)
class BigFile(object):
def __init__(self, site, inner_path, prebuffer=0):
self.site = site
self.inner_path = inner_path
file_path = site.storage.getPath(inner_path)
file_info = self.site.content_manager.getFileInfo(inner_path)
self.piece_size = file_info["piece_size"]
self.sha512 = file_info["sha512"]
self.size = file_info["size"]
self.prebuffer = prebuffer
self.read_bytes = 0
self.piecefield = self.site.storage.piecefields[self.sha512]
self.f = open(file_path, "rb+")
self.read_lock = gevent.lock.Semaphore()
def read(self, buff=64 * 1024):
with self.read_lock:
pos = self.f.tell()
read_until = min(self.size, pos + buff)
requests = []
# Request all required blocks
while 1:
piece_i = int(pos / self.piece_size)
if piece_i * self.piece_size >= read_until:
break
pos_from = piece_i * self.piece_size
pos_to = pos_from + self.piece_size
if not self.piecefield[piece_i]:
requests.append(self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=10))
pos += self.piece_size
if not all(requests):
return None
# Request prebuffer
if self.prebuffer:
prebuffer_until = min(self.size, read_until + self.prebuffer)
priority = 3
while 1:
piece_i = int(pos / self.piece_size)
if piece_i * self.piece_size >= prebuffer_until:
break
pos_from = piece_i * self.piece_size
pos_to = pos_from + self.piece_size
if not self.piecefield[piece_i]:
self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=max(0, priority))
priority -= 1
pos += self.piece_size
gevent.joinall(requests)
self.read_bytes += buff
# Increase buffer for long reads
if self.read_bytes > 7 * 1024 * 1024 and self.prebuffer < 5 * 1024 * 1024:
self.site.log.debug("%s: Increasing bigfile buffer size to 5MB..." % self.inner_path)
self.prebuffer = 5 * 1024 * 1024
return self.f.read(buff)
def seek(self, pos, whence=0):
with self.read_lock:
if whence == 2: # Relative from file end
pos = self.size + pos # Use the real size instead of size on the disk
whence = 0
return self.f.seek(pos, whence)
def seekable(self):
return self.f.seekable()
def tell(self):
return self.f.tell()
def close(self):
self.f.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
@PluginManager.registerTo("WorkerManager")
class WorkerManagerPlugin(object):
def addTask(self, inner_path, *args, **kwargs):
file_info = kwargs.get("file_info")
if file_info and "piecemap" in file_info: # Bigfile
self.site.settings["has_bigfile"] = True
piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
piecemap_task = None
if not self.site.storage.isFile(piecemap_inner_path):
# Start download piecemap
piecemap_task = super(WorkerManagerPlugin, self).addTask(piecemap_inner_path, priority=30)
autodownload_bigfile_size_limit = self.site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit)
if "|" not in inner_path and self.site.isDownloadable(inner_path) and file_info["size"] / 1024 / 1024 <= autodownload_bigfile_size_limit:
gevent.spawn_later(0.1, self.site.needFile, inner_path + "|all") # Download all pieces
if "|" in inner_path:
# Start download piece
task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs)
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
task["piece_i"] = int(pos_from / file_info["piece_size"])
task["sha512"] = file_info["sha512"]
else:
if inner_path in self.site.bad_files:
del self.site.bad_files[inner_path]
if piecemap_task:
task = piecemap_task
else:
fake_evt = gevent.event.AsyncResult() # Don't download anything if no range specified
fake_evt.set(True)
task = {"evt": fake_evt}
if not self.site.storage.isFile(inner_path):
self.site.storage.createSparseFile(inner_path, file_info["size"], file_info["sha512"])
piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"]))
self.site.storage.piecefields[file_info["sha512"]].frombytes(b"\x00" * piece_num)
else:
task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs)
return task
def taskAddPeer(self, task, peer):
if "piece_i" in task:
if not peer.piecefields[task["sha512"]][task["piece_i"]]:
if task["sha512"] not in peer.piecefields:
gevent.spawn(peer.updatePiecefields, force=True)
elif not task["peers"]:
gevent.spawn(peer.updatePiecefields)
return False # Deny to add peers to task if file not in piecefield
return super(WorkerManagerPlugin, self).taskAddPeer(task, peer)
@PluginManager.registerTo("FileRequest")
class FileRequestPlugin(object):
def isReadable(self, site, inner_path, file, pos):
# Peek into file
if file.read(10) == b"\0" * 10:
# Looks empty, but makes sures we don't have that piece
file_info = site.content_manager.getFileInfo(inner_path)
if "piece_size" in file_info:
piece_i = int(pos / file_info["piece_size"])
if not site.storage.piecefields[file_info["sha512"]][piece_i]:
return False
# Seek back to position we want to read
file.seek(pos)
return super(FileRequestPlugin, self).isReadable(site, inner_path, file, pos)
def actionGetPiecefields(self, params):
site = self.sites.get(params["site"])
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
# Add peer to site if not added before
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True)
if not peer.connection: # Just added
peer.connect(self.connection) # Assign current connection to peer
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()}
self.response({"piecefields_packed": piecefields_packed})
def actionSetPiecefields(self, params):
site = self.sites.get(params["site"])
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
# Add or get peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection)
if not peer.connection:
peer.connect(self.connection)
peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
for sha512, piecefield_packed in params["piecefields_packed"].items():
peer.piecefields[sha512].unpack(piecefield_packed)
site.settings["has_bigfile"] = True
self.response({"ok": "Updated"})
@PluginManager.registerTo("Peer")
class PeerPlugin(object):
def __getattr__(self, key):
if key == "piecefields":
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
return self.piecefields
elif key == "time_piecefields_updated":
self.time_piecefields_updated = None
return self.time_piecefields_updated
else:
return super(PeerPlugin, self).__getattr__(key)
@util.Noparallel(ignore_args=True)
def updatePiecefields(self, force=False):
if self.connection and self.connection.handshake.get("rev", 0) < 2190:
return False # Not supported
# Don't update piecefield again in 1 min
if self.time_piecefields_updated and time.time() - self.time_piecefields_updated < 60 and not force:
return False
self.time_piecefields_updated = time.time()
res = self.request("getPiecefields", {"site": self.site.address})
if not res or "error" in res:
return False
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
try:
for sha512, piecefield_packed in res["piecefields_packed"].items():
self.piecefields[sha512].unpack(piecefield_packed)
except Exception as err:
self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
return self.piecefields
def sendMyHashfield(self, *args, **kwargs):
return super(PeerPlugin, self).sendMyHashfield(*args, **kwargs)
def updateHashfield(self, *args, **kwargs):
if self.site.settings.get("has_bigfile"):
thread = gevent.spawn(self.updatePiecefields, *args, **kwargs)
back = super(PeerPlugin, self).updateHashfield(*args, **kwargs)
thread.join()
return back
else:
return super(PeerPlugin, self).updateHashfield(*args, **kwargs)
def getFile(self, site, inner_path, *args, **kwargs):
if "|" in inner_path:
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
kwargs["pos_from"] = pos_from
kwargs["pos_to"] = pos_to
return super(PeerPlugin, self).getFile(site, inner_path, *args, **kwargs)
@PluginManager.registerTo("Site")
class SitePlugin(object):
def isFileDownloadAllowed(self, inner_path, file_info):
if "piecemap" in file_info:
file_size_mb = file_info["size"] / 1024 / 1024
if config.bigfile_size_limit and file_size_mb > config.bigfile_size_limit:
self.log.debug(
"Bigfile size %s too large: %sMB > %sMB, skipping..." %
(inner_path, file_size_mb, config.bigfile_size_limit)
)
return False
file_info = file_info.copy()
file_info["size"] = file_info["piece_size"]
return super(SitePlugin, self).isFileDownloadAllowed(inner_path, file_info)
def getSettingsCache(self):
back = super(SitePlugin, self).getSettingsCache()
if self.storage.piecefields:
back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()}
return back
def needFile(self, inner_path, *args, **kwargs):
if inner_path.endswith("|all"):
@util.Pooled(20)
def pooledNeedBigfile(inner_path, *args, **kwargs):
if inner_path not in self.bad_files:
self.log.debug("Cancelled piece, skipping %s" % inner_path)
return False
return self.needFile(inner_path, *args, **kwargs)
inner_path = inner_path.replace("|all", "")
file_info = self.needFileInfo(inner_path)
# Use default function to download non-optional file
if "piece_size" not in file_info:
return super(SitePlugin, self).needFile(inner_path, *args, **kwargs)
file_size = file_info["size"]
piece_size = file_info["piece_size"]
piece_num = int(math.ceil(float(file_size) / piece_size))
file_threads = []
piecefield = self.storage.piecefields.get(file_info["sha512"])
for piece_i in range(piece_num):
piece_from = piece_i * piece_size
piece_to = min(file_size, piece_from + piece_size)
if not piecefield or not piecefield[piece_i]:
inner_path_piece = "%s|%s-%s" % (inner_path, piece_from, piece_to)
self.bad_files[inner_path_piece] = self.bad_files.get(inner_path_piece, 1)
res = pooledNeedBigfile(inner_path_piece, blocking=False)
if res is not True and res is not False:
file_threads.append(res)
gevent.joinall(file_threads)
else:
return super(SitePlugin, self).needFile(inner_path, *args, **kwargs)
@PluginManager.registerTo("ConfigPlugin")
class ConfigPlugin(object):
def createArguments(self):
group = self.parser.add_argument_group("Bigfile plugin")
group.add_argument('--autodownload_bigfile_size_limit', help='Also download bigfiles smaller than this limit if help distribute option is checked', default=10, metavar="MB", type=int)
group.add_argument('--bigfile_size_limit', help='Maximum size of downloaded big files', default=False, metavar="MB", type=int)
return super(ConfigPlugin, self).createArguments()

View File

@ -1,574 +0,0 @@
import time
import io
import binascii
import pytest
import mock
from Connection import ConnectionServer
from Content.ContentManager import VerifyError
from File import FileServer
from File import FileRequest
from Worker import WorkerManager
from Peer import Peer
from Bigfile import BigfilePiecefield, BigfilePiecefieldPacked
from Test import Spy
from util import Msgpack
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestBigfile:
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
piece_size = 1024 * 1024
def createBigfile(self, site, inner_path="data/optional.any.iso", pieces=10):
f = site.storage.open(inner_path, "w")
for i in range(pieces * 100):
f.write(("Test%s" % i).ljust(10, "-") * 1000)
f.close()
assert site.content_manager.sign("content.json", self.privatekey)
return inner_path
def testPiecemapCreate(self, site):
inner_path = self.createBigfile(site)
content = site.storage.loadJson("content.json")
assert "data/optional.any.iso" in content["files_optional"]
file_node = content["files_optional"][inner_path]
assert file_node["size"] == 10 * 1000 * 1000
assert file_node["sha512"] == "47a72cde3be80b4a829e7674f72b7c6878cf6a70b0c58c6aa6c17d7e9948daf6"
assert file_node["piecemap"] == inner_path + ".piecemap.msgpack"
piecemap = Msgpack.unpack(site.storage.open(file_node["piecemap"], "rb").read())["optional.any.iso"]
assert len(piecemap["sha512_pieces"]) == 10
assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
assert binascii.hexlify(piecemap["sha512_pieces"][0]) == b"a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
def testVerifyPiece(self, site):
inner_path = self.createBigfile(site)
# Verify all 10 piece
f = site.storage.open(inner_path, "rb")
for i in range(10):
piece = io.BytesIO(f.read(1024 * 1024))
piece.seek(0)
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
f.close()
# Try to verify piece 0 with piece 1 hash
with pytest.raises(VerifyError) as err:
i = 1
f = site.storage.open(inner_path, "rb")
piece = io.BytesIO(f.read(1024 * 1024))
f.close()
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
assert "Invalid hash" in str(err.value)
def testSparseFile(self, site):
inner_path = "sparsefile"
# Create a 100MB sparse file
site.storage.createSparseFile(inner_path, 100 * 1024 * 1024)
# Write to file beginning
s = time.time()
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024)
time_write_start = time.time() - s
# Write to file end
s = time.time()
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024)
time_write_end = time.time() - s
# Verify writes
f = site.storage.open(inner_path)
assert f.read(10) == b"hellostart"
f.seek(99 * 1024 * 1024)
assert f.read(8) == b"helloend"
f.close()
site.storage.delete(inner_path)
# Writing to end shold not take much longer, than writing to start
assert time_write_end <= max(0.1, time_write_start * 1.1)
def testRangedFileRequest(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites[site_temp.address] = site_temp
site_temp.connection_server = client
connection = client.getConnection(file_server.ip, 1544)
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
assert buff.getvalue().startswith(b"Test524") # Correct data
buff.seek(0)
assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
connection.close()
client.stop()
def testRangedFileDownload(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Make sure the file and the piecemap in the optional hashfield
file_info = site.content_manager.getFileInfo(inner_path)
assert site.content_manager.hashfield.hasHash(file_info["sha512"])
piecemap_hash = site.content_manager.getFileInfo(file_info["piecemap"])["sha512"]
assert site.content_manager.hashfield.hasHash(piecemap_hash)
# Init client server
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
peer_client = site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
# client_piecefield = peer_client.piecefields[file_info["sha512"]].tostring()
# assert client_piecefield == "1" * 10
# Download 5. and 10. block
site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024))
# Verify 0. block not downloaded
f = site_temp.storage.open(inner_path)
assert f.read(10) == b"\0" * 10
# Verify 5. and 10. block downloaded
f.seek(5 * 1024 * 1024)
assert f.read(7) == b"Test524"
f.seek(9 * 1024 * 1024)
assert f.read(7) == b"943---T"
# Verify hashfield
assert set(site_temp.content_manager.hashfield) == set([18343, 43727]) # 18343: data/optional.any.iso, 43727: data/optional.any.iso.hashmap.msgpack
def testOpenBigfile(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
# Open virtual file
assert not site_temp.storage.isFile(inner_path)
with site_temp.storage.openBigfile(inner_path) as f:
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == b"Test524"
f.seek(9 * 1024 * 1024)
assert f.read(7) == b"943---T"
assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
assert set(site_temp.content_manager.hashfield) == set([18343, 43727])
assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
# Test requesting already downloaded
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == b"Test524"
assert len(requests) == 0
# Test requesting multi-block overflow reads
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024) # We already have this block
data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
assert data.startswith(b"Test524")
assert data.endswith(b"Test838-")
assert b"\0" not in data # No null bytes allowed
assert len(requests) == 2 # Two block download
# Test out of range request
f.seek(5 * 1024 * 1024)
data = f.read(1024 * 1024 * 30)
assert len(data) == 10 * 1000 * 1000 - (5 * 1024 * 1024)
f.seek(30 * 1024 * 1024)
data = f.read(1024 * 1024 * 30)
assert len(data) == 0
@pytest.mark.parametrize("piecefield_obj", [BigfilePiecefield, BigfilePiecefieldPacked])
def testPiecefield(self, piecefield_obj, site):
testdatas = [
b"\x01" * 100 + b"\x00" * 900 + b"\x01" * 4000 + b"\x00" * 4999 + b"\x01",
b"\x00\x01\x00\x01\x00\x01" * 10 + b"\x00\x01" * 90 + b"\x01\x00" * 400 + b"\x00" * 4999,
b"\x01" * 10000,
b"\x00" * 10000
]
for testdata in testdatas:
piecefield = piecefield_obj()
piecefield.frombytes(testdata)
assert piecefield.tobytes() == testdata
assert piecefield[0] == testdata[0]
assert piecefield[100] == testdata[100]
assert piecefield[1000] == testdata[1000]
assert piecefield[len(testdata) - 1] == testdata[len(testdata) - 1]
packed = piecefield.pack()
piecefield_new = piecefield_obj()
piecefield_new.unpack(packed)
assert piecefield.tobytes() == piecefield_new.tobytes()
assert piecefield_new.tobytes() == testdata
def testFileGet(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
site_temp.connection_server = FileServer(file_server.ip, 1545)
site_temp.connection_server.sites[site_temp.address] = site_temp
site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
# Download second block
with site_temp.storage.openBigfile(inner_path) as f:
f.seek(1024 * 1024)
assert f.read(1024)[0:1] != b"\0"
# Make sure first block not download
with site_temp.storage.open(inner_path) as f:
assert f.read(1024)[0:1] == b"\0"
peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
# Should drop error on first block request
assert not peer2.getFile(site.address, "%s|0-%s" % (inner_path, 1024 * 1024 * 1))
# Should not drop error for second block request
assert peer2.getFile(site.address, "%s|%s-%s" % (inner_path, 1024 * 1024 * 1, 1024 * 1024 * 2))
def benchmarkPeerMemory(self, site, file_server):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
import psutil, os
meminfo = psutil.Process(os.getpid()).memory_info
mem_s = meminfo()[0]
s = time.time()
for i in range(25000):
site.addPeer(file_server.ip, i)
print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB
print(list(site.peers.values())[0].piecefields)
def testUpdatePiecefield(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
server1 = file_server
server1.sites[site.address] = site
server2 = FileServer(file_server.ip, 1545)
server2.sites[site_temp.address] = site_temp
site_temp.connection_server = server2
# Add file_server as peer to client
server2_peer1 = site_temp.addPeer(file_server.ip, 1544)
# Testing piecefield sync
assert len(server2_peer1.piecefields) == 0
assert server2_peer1.updatePiecefields() # Query piecefields from peer
assert len(server2_peer1.piecefields) > 0
def testWorkerManagerPiecefieldDeny(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
server1 = file_server
server1.sites[site.address] = site
server2 = FileServer(file_server.ip, 1545)
server2.sites[site_temp.address] = site_temp
site_temp.connection_server = server2
# Add file_server as peer to client
server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Working
site_temp.downloadContent("content.json", download_files=False)
site_temp.needFile("data/optional.any.iso.piecemap.msgpack")
# Add fake peers with optional files downloaded
for i in range(5):
fake_peer = site_temp.addPeer("127.0.1.%s" % i, 1544)
fake_peer.hashfield = site.content_manager.hashfield
fake_peer.has_hashfield = True
with Spy.Spy(WorkerManager, "addWorker") as requests:
site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
site_temp.needFile("%s|%s-%s" % (inner_path, 6 * 1024 * 1024, 7 * 1024 * 1024))
# It should only request parts from peer1 as the other peers does not have the requested parts in piecefields
assert len([request[1] for request in requests if request[1] != server2_peer1]) == 0
def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
server1 = file_server
server1.sites[site.address] = site
server2 = FileServer(file_server.ip, 1545)
server2.sites[site_temp.address] = site_temp
site_temp.connection_server = server2
sha512 = site.content_manager.getFileInfo(inner_path)["sha512"]
# Create 10 fake peer for each piece
for i in range(10):
peer = Peer(file_server.ip, 1544, site_temp, server2)
peer.piecefields[sha512][i] = b"\x01"
peer.updateHashfield = mock.MagicMock(return_value=False)
peer.updatePiecefields = mock.MagicMock(return_value=False)
peer.findHashIds = mock.MagicMock(return_value={"nope": []})
peer.hashfield = site.content_manager.hashfield
peer.has_hashfield = True
peer.key = "Peer:%s" % i
site_temp.peers["Peer:%s" % i] = peer
site_temp.downloadContent("content.json", download_files=False)
site_temp.needFile("data/optional.any.iso.piecemap.msgpack")
with Spy.Spy(Peer, "getFile") as requests:
for i in range(10):
site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024))
assert len(requests) == 10
for i in range(10):
assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer
def testDownloadStats(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
# Open virtual file
assert not site_temp.storage.isFile(inner_path)
# Check size before downloads
assert site_temp.settings["size"] < 10 * 1024 * 1024
assert site_temp.settings["optional_downloaded"] == 0
size_piecemap = site_temp.content_manager.getFileInfo(inner_path + ".piecemap.msgpack")["size"]
size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
with site_temp.storage.openBigfile(inner_path) as f:
assert b"\0" not in f.read(1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
with site_temp.storage.openBigfile(inner_path) as f:
# Don't count twice
assert b"\0" not in f.read(1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
# Add second block
assert b"\0" not in f.read(1024 * 1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
def testPrebuffer(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
# Open virtual file
assert not site_temp.storage.isFile(inner_path)
with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == b"Test524"
# assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
time.sleep(0.5) # Wait prebuffer download
sha512 = site.content_manager.getFileInfo(inner_path)["sha512"]
assert site_temp.storage.piecefields[sha512].tostring() == "0000011100"
# No prebuffer beyond end of the file
f.seek(9 * 1024 * 1024)
assert b"\0" not in f.read(7)
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0
def testDownloadAllPieces(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
# Open virtual file
assert not site_temp.storage.isFile(inner_path)
with Spy.Spy(FileRequest, "route") as requests:
site_temp.needFile("%s|all" % inner_path)
assert len(requests) == 12 # piecemap.msgpack, getPiecefields, 10 x piece
# Don't re-download already got pieces
with Spy.Spy(FileRequest, "route") as requests:
site_temp.needFile("%s|all" % inner_path)
assert len(requests) == 0
def testFileSize(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
# Open virtual file
assert not site_temp.storage.isFile(inner_path)
# Download first block
site_temp.needFile("%s|%s-%s" % (inner_path, 0 * 1024 * 1024, 1 * 1024 * 1024))
assert site_temp.storage.getSize(inner_path) < 1000 * 1000 * 10 # Size on the disk should be smaller than the real size
site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024))
assert site_temp.storage.getSize(inner_path) == site.storage.getSize(inner_path)
def testFileRename(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
site_temp.connection_server = FileServer(file_server.ip, 1545)
site_temp.connection_server.sites[site_temp.address] = site_temp
site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
with Spy.Spy(FileRequest, "route") as requests:
site_temp.needFile("%s|%s-%s" % (inner_path, 0, 1 * self.piece_size))
assert len([req for req in requests if req[1] == "streamFile"]) == 2 # 1 piece + piecemap
# Rename the file
inner_path_new = inner_path.replace(".iso", "-new.iso")
site.storage.rename(inner_path, inner_path_new)
site.storage.delete("data/optional.any.iso.piecemap.msgpack")
assert site.content_manager.sign("content.json", self.privatekey, remove_missing_optional=True)
files_optional = site.content_manager.contents["content.json"]["files_optional"].keys()
assert "data/optional.any-new.iso.piecemap.msgpack" in files_optional
assert "data/optional.any.iso.piecemap.msgpack" not in files_optional
assert "data/optional.any.iso" not in files_optional
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10) # Wait for download
assert len([req[1] for req in requests if req[1] == "streamFile"]) == 0
with site_temp.storage.openBigfile(inner_path_new, prebuffer=0) as f:
f.read(1024)
# First piece already downloaded
assert [req for req in requests if req[1] == "streamFile"] == []
# Second piece needs to be downloaded + changed piecemap
f.seek(self.piece_size)
f.read(1024)
assert [req[3]["inner_path"] for req in requests if req[1] == "streamFile"] == [inner_path_new + ".piecemap.msgpack", inner_path_new]
@pytest.mark.parametrize("size", [1024 * 3, 1024 * 1024 * 3, 1024 * 1024 * 30])
def testNullFileRead(self, file_server, site, site_temp, size):
inner_path = "data/optional.iso"
f = site.storage.open(inner_path, "w")
f.write("\0" * size)
f.close()
assert site.content_manager.sign("content.json", self.privatekey)
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
site_temp.connection_server = FileServer(file_server.ip, 1545)
site_temp.connection_server.sites[site_temp.address] = site_temp
site_temp.addPeer(file_server.ip, 1544)
# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
if "piecemap" in site.content_manager.getFileInfo(inner_path): # Bigfile
site_temp.needFile(inner_path + "|all")
else:
site_temp.needFile(inner_path)
assert site_temp.storage.getSize(inner_path) == size

View File

@ -1 +0,0 @@
from src.Test.conftest import *

View File

@ -1,5 +0,0 @@
[pytest]
python_files = Test*.py
addopts = -rsxX -v --durations=6
markers =
webtest: mark a test as a webtest.

View File

@ -1,2 +0,0 @@
from . import BigfilePlugin
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked

View File

@ -1,181 +0,0 @@
import time
import sys
import collections
import itertools
import logging
import gevent
from util import helper
from Config import config
class ChartCollector(object):
def __init__(self, db):
self.db = db
if config.action == "main":
gevent.spawn_later(60 * 3, self.collector)
self.log = logging.getLogger("ChartCollector")
self.last_values = collections.defaultdict(dict)
def setInitialLastValues(self, sites):
# Recover last value of site bytes/sent
for site in sites:
self.last_values["site:" + site.address]["site_bytes_recv"] = site.settings.get("bytes_recv", 0)
self.last_values["site:" + site.address]["site_bytes_sent"] = site.settings.get("bytes_sent", 0)
def getCollectors(self):
collectors = {}
import main
file_server = main.file_server
sites = file_server.sites
if not sites:
return collectors
content_db = list(sites.values())[0].content_manager.contents.db
# Connection stats
collectors["connection"] = lambda: len(file_server.connections)
collectors["connection_in"] = (
lambda: len([1 for connection in file_server.connections if connection.type == "in"])
)
collectors["connection_onion"] = (
lambda: len([1 for connection in file_server.connections if connection.ip.endswith(".onion")])
)
collectors["connection_ping_avg"] = (
lambda: round(1000 * helper.avg(
[connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay]
))
)
collectors["connection_ping_min"] = (
lambda: round(1000 * min(
[connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay]
))
)
collectors["connection_rev_avg"] = (
lambda: helper.avg(
[connection.handshake["rev"] for connection in file_server.connections if connection.handshake]
)
)
# Request stats
collectors["file_bytes_recv|change"] = lambda: file_server.bytes_recv
collectors["file_bytes_sent|change"] = lambda: file_server.bytes_sent
collectors["request_num_recv|change"] = lambda: file_server.num_recv
collectors["request_num_sent|change"] = lambda: file_server.num_sent
# Limit
collectors["optional_limit"] = lambda: content_db.getOptionalLimitBytes()
collectors["optional_used"] = lambda: content_db.getOptionalUsedBytes()
collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()])
# Peers
collectors["peer"] = lambda peers: len(peers)
collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer])
# Size
collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()])
collectors["size_optional"] = lambda: sum([site.settings.get("size_optional", 0) for site in sites.values()])
collectors["content"] = lambda: sum([len(site.content_manager.contents) for site in sites.values()])
return collectors
def getSiteCollectors(self):
site_collectors = {}
# Size
site_collectors["site_size"] = lambda site: site.settings.get("size", 0)
site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0)
site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0)
site_collectors["site_content"] = lambda site: len(site.content_manager.contents)
# Data transfer
site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0)
site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0)
# Peers
site_collectors["site_peer"] = lambda site: len(site.peers)
site_collectors["site_peer_onion"] = lambda site: len(
[True for peer in site.peers.values() if peer.ip.endswith(".onion")]
)
site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection])
return site_collectors
def getUniquePeers(self):
import main
sites = main.file_server.sites
return set(itertools.chain.from_iterable(
[site.peers.keys() for site in sites.values()]
))
def collectDatas(self, collectors, last_values, site=None):
if site is None:
peers = self.getUniquePeers()
datas = {}
for key, collector in collectors.items():
try:
if site:
value = collector(site)
elif key.startswith("peer"):
value = collector(peers)
else:
value = collector()
except ValueError:
value = None
except Exception as err:
self.log.info("Collector %s error: %s" % (key, err))
value = None
if "|change" in key: # Store changes relative to last value
key = key.replace("|change", "")
last_value = last_values.get(key, 0)
last_values[key] = value
value = value - last_value
if value is None:
datas[key] = None
else:
datas[key] = round(value, 3)
return datas
def collectGlobal(self, collectors, last_values):
now = int(time.time())
s = time.time()
datas = self.collectDatas(collectors, last_values["global"])
values = []
for key, value in datas.items():
values.append((self.db.getTypeId(key), value, now))
self.log.debug("Global collectors done in %.3fs" % (time.time() - s))
s = time.time()
cur = self.db.getCursor()
cur.executemany("INSERT INTO data (type_id, value, date_added) VALUES (?, ?, ?)", values)
self.log.debug("Global collectors inserted in %.3fs" % (time.time() - s))
def collectSites(self, sites, collectors, last_values):
now = int(time.time())
s = time.time()
values = []
for address, site in list(sites.items()):
site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site)
for key, value in site_datas.items():
values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now))
time.sleep(0.001)
self.log.debug("Site collections done in %.3fs" % (time.time() - s))
s = time.time()
cur = self.db.getCursor()
cur.executemany("INSERT INTO data (type_id, site_id, value, date_added) VALUES (?, ?, ?, ?)", values)
self.log.debug("Site collectors inserted in %.3fs" % (time.time() - s))
def collector(self):
collectors = self.getCollectors()
site_collectors = self.getSiteCollectors()
import main
sites = main.file_server.sites
i = 0
while 1:
self.collectGlobal(collectors, self.last_values)
if i % 12 == 0: # Only collect sites data every hour
self.collectSites(sites, site_collectors, self.last_values)
time.sleep(60 * 5)
i += 1

View File

@ -1,133 +0,0 @@
from Config import config
from Db.Db import Db
import time
class ChartDb(Db):
def __init__(self):
self.version = 2
super(ChartDb, self).__init__(self.getSchema(), "%s/chart.db" % config.data_dir)
self.foreign_keys = True
self.checkTables()
self.sites = self.loadSites()
self.types = self.loadTypes()
def getSchema(self):
schema = {}
schema["db_name"] = "Chart"
schema["tables"] = {}
schema["tables"]["data"] = {
"cols": [
["data_id", "INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE"],
["type_id", "INTEGER NOT NULL"],
["site_id", "INTEGER"],
["value", "INTEGER"],
["date_added", "DATETIME DEFAULT (CURRENT_TIMESTAMP)"]
],
"indexes": [
"CREATE INDEX site_id ON data (site_id)",
"CREATE INDEX date_added ON data (date_added)"
],
"schema_changed": 2
}
schema["tables"]["type"] = {
"cols": [
["type_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"],
["name", "TEXT"]
],
"schema_changed": 1
}
schema["tables"]["site"] = {
"cols": [
["site_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"],
["address", "TEXT"]
],
"schema_changed": 1
}
return schema
def getTypeId(self, name):
if name not in self.types:
res = self.execute("INSERT INTO type ?", {"name": name})
self.types[name] = res.lastrowid
return self.types[name]
def getSiteId(self, address):
if address not in self.sites:
res = self.execute("INSERT INTO site ?", {"address": address})
self.sites[address] = res.lastrowid
return self.sites[address]
def loadSites(self):
sites = {}
for row in self.execute("SELECT * FROM site"):
sites[row["address"]] = row["site_id"]
return sites
def loadTypes(self):
types = {}
for row in self.execute("SELECT * FROM type"):
types[row["name"]] = row["type_id"]
return types
def deleteSite(self, address):
if address in self.sites:
site_id = self.sites[address]
del self.sites[address]
self.execute("DELETE FROM site WHERE ?", {"site_id": site_id})
self.execute("DELETE FROM data WHERE ?", {"site_id": site_id})
def archive(self):
week_back = 1
while 1:
s = time.time()
date_added_from = time.time() - 60 * 60 * 24 * 7 * (week_back + 1)
date_added_to = date_added_from + 60 * 60 * 24 * 7
res = self.execute("""
SELECT
MAX(date_added) AS date_added,
SUM(value) AS value,
GROUP_CONCAT(data_id) AS data_ids,
type_id,
site_id,
COUNT(*) AS num
FROM data
WHERE
site_id IS NULL AND
date_added > :date_added_from AND
date_added < :date_added_to
GROUP BY strftime('%Y-%m-%d %H', date_added, 'unixepoch', 'localtime'), type_id
""", {"date_added_from": date_added_from, "date_added_to": date_added_to})
num_archived = 0
cur = self.getCursor()
for row in res:
if row["num"] == 1:
continue
cur.execute("INSERT INTO data ?", {
"type_id": row["type_id"],
"site_id": row["site_id"],
"value": row["value"],
"date_added": row["date_added"]
})
cur.execute("DELETE FROM data WHERE data_id IN (%s)" % row["data_ids"])
num_archived += row["num"]
self.log.debug("Archived %s data from %s weeks ago in %.3fs" % (num_archived, week_back, time.time() - s))
week_back += 1
time.sleep(0.1)
if num_archived == 0:
break
# Only keep 6 month of global stats
self.execute(
"DELETE FROM data WHERE site_id IS NULL AND date_added < :date_added_limit",
{"date_added_limit": time.time() - 60 * 60 * 24 * 30 * 6 }
)
# Only keep 1 month of site stats
self.execute(
"DELETE FROM data WHERE site_id IS NOT NULL AND date_added < :date_added_limit",
{"date_added_limit": time.time() - 60 * 60 * 24 * 30 }
)
if week_back > 1:
self.execute("VACUUM")

View File

@ -1,57 +0,0 @@
import time
import itertools
import gevent
from Config import config
from util import helper
from util.Flag import flag
from Plugin import PluginManager
from .ChartDb import ChartDb
from .ChartCollector import ChartCollector
if "db" not in locals().keys(): # Share on reloads
db = ChartDb()
gevent.spawn_later(10 * 60, db.archive)
helper.timer(60 * 60 * 6, db.archive)
collector = ChartCollector(db)
@PluginManager.registerTo("SiteManager")
class SiteManagerPlugin(object):
def load(self, *args, **kwargs):
back = super(SiteManagerPlugin, self).load(*args, **kwargs)
collector.setInitialLastValues(self.sites.values())
return back
def delete(self, address, *args, **kwargs):
db.deleteSite(address)
return super(SiteManagerPlugin, self).delete(address, *args, **kwargs)
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
@flag.admin
def actionChartDbQuery(self, to, query, params=None):
if config.debug or config.verbose:
s = time.time()
rows = []
try:
if not query.strip().upper().startswith("SELECT"):
raise Exception("Only SELECT query supported")
res = db.execute(query, params)
except Exception as err: # Response the error to client
self.log.error("ChartDbQuery error: %s" % err)
return {"error": str(err)}
# Convert result to dict
for row in res:
rows.append(dict(row))
if config.verbose and time.time() - s > 0.1: # Log slow query
self.log.debug("Slow query: %s (%.3fs)" % (query, time.time() - s))
return rows
@flag.admin
def actionChartGetPeerLocations(self, to):
peers = {}
for site in self.server.sites.values():
peers.update(site.peers)
peer_locations = self.getPeerLocations(peers)
return peer_locations

View File

@ -1 +0,0 @@
from . import ChartPlugin

View File

@ -1,5 +0,0 @@
{
"name": "Chart",
"description": "Collect and provide stats of client information.",
"default": "enabled"
}

View File

@ -1,262 +0,0 @@
import time
import re
import html
import os
from Plugin import PluginManager
from Translate import Translate
from Config import config
from util.Flag import flag
from .ContentFilterStorage import ContentFilterStorage
plugin_dir = os.path.dirname(__file__)
if "_" not in locals():
_ = Translate(plugin_dir + "/languages/")
@PluginManager.registerTo("SiteManager")
class SiteManagerPlugin(object):
def load(self, *args, **kwargs):
global filter_storage
super(SiteManagerPlugin, self).load(*args, **kwargs)
filter_storage = ContentFilterStorage(site_manager=self)
def add(self, address, *args, **kwargs):
should_ignore_block = kwargs.get("ignore_block") or kwargs.get("settings")
if should_ignore_block:
block_details = None
elif filter_storage.isSiteblocked(address):
block_details = filter_storage.getSiteblockDetails(address)
else:
address_hashed = filter_storage.getSiteAddressHashed(address)
if filter_storage.isSiteblocked(address_hashed):
block_details = filter_storage.getSiteblockDetails(address_hashed)
else:
block_details = None
if block_details:
raise Exception("Site blocked: %s" % html.escape(block_details.get("reason", "unknown reason")))
else:
return super(SiteManagerPlugin, self).add(address, *args, **kwargs)
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
# Mute
def cbMuteAdd(self, to, auth_address, cert_user_id, reason):
filter_storage.file_content["mutes"][auth_address] = {
"cert_user_id": cert_user_id, "reason": reason, "source": self.site.address, "date_added": time.time()
}
filter_storage.save()
filter_storage.changeDbs(auth_address, "remove")
self.response(to, "ok")
@flag.no_multiuser
def actionMuteAdd(self, to, auth_address, cert_user_id, reason):
if "ADMIN" in self.getPermissions(to):
self.cbMuteAdd(to, auth_address, cert_user_id, reason)
else:
self.cmd(
"confirm",
[_["Hide all content from <b>%s</b>?"] % html.escape(cert_user_id), _["Mute"]],
lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason)
)
@flag.no_multiuser
def cbMuteRemove(self, to, auth_address):
del filter_storage.file_content["mutes"][auth_address]
filter_storage.save()
filter_storage.changeDbs(auth_address, "load")
self.response(to, "ok")
@flag.no_multiuser
def actionMuteRemove(self, to, auth_address):
if "ADMIN" in self.getPermissions(to):
self.cbMuteRemove(to, auth_address)
else:
cert_user_id = html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"])
self.cmd(
"confirm",
[_["Unmute <b>%s</b>?"] % cert_user_id, _["Unmute"]],
lambda res: self.cbMuteRemove(to, auth_address)
)
@flag.admin
def actionMuteList(self, to):
self.response(to, filter_storage.file_content["mutes"])
# Siteblock
@flag.no_multiuser
@flag.admin
def actionSiteblockIgnoreAddSite(self, to, site_address):
if site_address in filter_storage.site_manager.sites:
return {"error": "Site already added"}
else:
if filter_storage.site_manager.need(site_address, ignore_block=True):
return "ok"
else:
return {"error": "Invalid address"}
@flag.no_multiuser
@flag.admin
def actionSiteblockAdd(self, to, site_address, reason=None):
filter_storage.file_content["siteblocks"][site_address] = {"date_added": time.time(), "reason": reason}
filter_storage.save()
self.response(to, "ok")
@flag.no_multiuser
@flag.admin
def actionSiteblockRemove(self, to, site_address):
del filter_storage.file_content["siteblocks"][site_address]
filter_storage.save()
self.response(to, "ok")
@flag.admin
def actionSiteblockList(self, to):
self.response(to, filter_storage.file_content["siteblocks"])
@flag.admin
def actionSiteblockGet(self, to, site_address):
if filter_storage.isSiteblocked(site_address):
res = filter_storage.getSiteblockDetails(site_address)
else:
site_address_hashed = filter_storage.getSiteAddressHashed(site_address)
if filter_storage.isSiteblocked(site_address_hashed):
res = filter_storage.getSiteblockDetails(site_address_hashed)
else:
res = {"error": "Site block not found"}
self.response(to, res)
# Include
@flag.no_multiuser
def actionFilterIncludeAdd(self, to, inner_path, description=None, address=None):
if address:
if "ADMIN" not in self.getPermissions(to):
return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"})
site = self.server.sites[address]
else:
address = self.site.address
site = self.site
if "ADMIN" in self.getPermissions(to):
self.cbFilterIncludeAdd(to, True, address, inner_path, description)
else:
content = site.storage.loadJson(inner_path)
title = _["New shared global content filter: <b>%s</b> (%s sites, %s users)"] % (
html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
)
self.cmd(
"confirm",
[title, "Add"],
lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description)
)
def cbFilterIncludeAdd(self, to, res, address, inner_path, description):
if not res:
self.response(to, res)
return False
filter_storage.includeAdd(address, inner_path, description)
self.response(to, "ok")
@flag.no_multiuser
def actionFilterIncludeRemove(self, to, inner_path, address=None):
if address:
if "ADMIN" not in self.getPermissions(to):
return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"})
else:
address = self.site.address
key = "%s/%s" % (address, inner_path)
if key not in filter_storage.file_content["includes"]:
self.response(to, {"error": "Include not found"})
filter_storage.includeRemove(address, inner_path)
self.response(to, "ok")
def actionFilterIncludeList(self, to, all_sites=False, filters=False):
if all_sites and "ADMIN" not in self.getPermissions(to):
return self.response(to, {"error": "Forbidden: Only ADMIN sites can list all sites includes"})
back = []
includes = filter_storage.file_content.get("includes", {}).values()
for include in includes:
if not all_sites and include["address"] != self.site.address:
continue
if filters:
include = dict(include) # Don't modify original file_content
include_site = filter_storage.site_manager.get(include["address"])
if not include_site:
continue
content = include_site.storage.loadJson(include["inner_path"])
include["mutes"] = content.get("mutes", {})
include["siteblocks"] = content.get("siteblocks", {})
back.append(include)
self.response(to, back)
@PluginManager.registerTo("SiteStorage")
class SiteStoragePlugin(object):
def updateDbFile(self, inner_path, file=None, cur=None):
if file is not False: # File deletion always allowed
# Find for bitcoin addresses in file path
matches = re.findall("/(1[A-Za-z0-9]{26,35})/", inner_path)
# Check if any of the adresses are in the mute list
for auth_address in matches:
if filter_storage.isMuted(auth_address):
self.log.debug("Mute match: %s, ignoring %s" % (auth_address, inner_path))
return False
return super(SiteStoragePlugin, self).updateDbFile(inner_path, file=file, cur=cur)
def onUpdated(self, inner_path, file=None):
file_path = "%s/%s" % (self.site.address, inner_path)
if file_path in filter_storage.file_content["includes"]:
self.log.debug("Filter file updated: %s" % inner_path)
filter_storage.includeUpdateAll()
return super(SiteStoragePlugin, self).onUpdated(inner_path, file=file)
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
def actionWrapper(self, path, extra_headers=None):
match = re.match(r"/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
if not match:
return False
address = match.group("address")
if self.server.site_manager.get(address): # Site already exists
return super(UiRequestPlugin, self).actionWrapper(path, extra_headers)
if self.isDomain(address):
address = self.resolveDomain(address)
if address:
address_hashed = filter_storage.getSiteAddressHashed(address)
else:
address_hashed = None
if filter_storage.isSiteblocked(address) or filter_storage.isSiteblocked(address_hashed):
site = self.server.site_manager.get(config.homepage)
if not extra_headers:
extra_headers = {}
script_nonce = self.getScriptNonce()
self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce)
return iter([super(UiRequestPlugin, self).renderWrapper(
site, path, "uimedia/plugins/contentfilter/blocklisted.html?address=" + address,
"Blacklisted site", extra_headers, show_loadingscreen=False, script_nonce=script_nonce
)])
else:
return super(UiRequestPlugin, self).actionWrapper(path, extra_headers)
def actionUiMedia(self, path, *args, **kwargs):
if path.startswith("/uimedia/plugins/contentfilter/"):
file_path = path.replace("/uimedia/plugins/contentfilter/", plugin_dir + "/media/")
return self.actionFile(file_path)
else:
return super(UiRequestPlugin, self).actionUiMedia(path)

View File

@ -1,164 +0,0 @@
import os
import json
import logging
import collections
import time
import hashlib
from Debug import Debug
from Plugin import PluginManager
from Config import config
from util import helper
class ContentFilterStorage(object):
def __init__(self, site_manager):
self.log = logging.getLogger("ContentFilterStorage")
self.file_path = "%s/filters.json" % config.data_dir
self.site_manager = site_manager
self.file_content = self.load()
# Set default values for filters.json
if not self.file_content:
self.file_content = {}
# Site blacklist renamed to site blocks
if "site_blacklist" in self.file_content:
self.file_content["siteblocks"] = self.file_content["site_blacklist"]
del self.file_content["site_blacklist"]
for key in ["mutes", "siteblocks", "includes"]:
if key not in self.file_content:
self.file_content[key] = {}
self.include_filters = collections.defaultdict(set) # Merged list of mutes and blacklists from all include
self.includeUpdateAll(update_site_dbs=False)
def load(self):
# Rename previously used mutes.json -> filters.json
if os.path.isfile("%s/mutes.json" % config.data_dir):
self.log.info("Renaming mutes.json to filters.json...")
os.rename("%s/mutes.json" % config.data_dir, self.file_path)
if os.path.isfile(self.file_path):
try:
return json.load(open(self.file_path))
except Exception as err:
self.log.error("Error loading filters.json: %s" % err)
return None
else:
return None
def includeUpdateAll(self, update_site_dbs=True):
s = time.time()
new_include_filters = collections.defaultdict(set)
# Load all include files data into a merged set
for include_path in self.file_content["includes"]:
address, inner_path = include_path.split("/", 1)
try:
content = self.site_manager.get(address).storage.loadJson(inner_path)
except Exception as err:
self.log.warning(
"Error loading include %s: %s" %
(include_path, Debug.formatException(err))
)
continue
for key, val in content.items():
if type(val) is not dict:
continue
new_include_filters[key].update(val.keys())
mutes_added = new_include_filters["mutes"].difference(self.include_filters["mutes"])
mutes_removed = self.include_filters["mutes"].difference(new_include_filters["mutes"])
self.include_filters = new_include_filters
if update_site_dbs:
for auth_address in mutes_added:
self.changeDbs(auth_address, "remove")
for auth_address in mutes_removed:
if not self.isMuted(auth_address):
self.changeDbs(auth_address, "load")
num_mutes = len(self.include_filters["mutes"])
num_siteblocks = len(self.include_filters["siteblocks"])
self.log.debug(
"Loaded %s mutes, %s blocked sites from %s includes in %.3fs" %
(num_mutes, num_siteblocks, len(self.file_content["includes"]), time.time() - s)
)
def includeAdd(self, address, inner_path, description=None):
self.file_content["includes"]["%s/%s" % (address, inner_path)] = {
"date_added": time.time(),
"address": address,
"description": description,
"inner_path": inner_path
}
self.includeUpdateAll()
self.save()
def includeRemove(self, address, inner_path):
del self.file_content["includes"]["%s/%s" % (address, inner_path)]
self.includeUpdateAll()
self.save()
def save(self):
s = time.time()
helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True).encode("utf8"))
self.log.debug("Saved in %.3fs" % (time.time() - s))
def isMuted(self, auth_address):
if auth_address in self.file_content["mutes"] or auth_address in self.include_filters["mutes"]:
return True
else:
return False
def getSiteAddressHashed(self, address):
return "0x" + hashlib.sha256(address.encode("ascii")).hexdigest()
def isSiteblocked(self, address):
if address in self.file_content["siteblocks"] or address in self.include_filters["siteblocks"]:
return True
return False
def getSiteblockDetails(self, address):
details = self.file_content["siteblocks"].get(address)
if not details:
address_sha256 = self.getSiteAddressHashed(address)
details = self.file_content["siteblocks"].get(address_sha256)
if not details:
includes = self.file_content.get("includes", {}).values()
for include in includes:
include_site = self.site_manager.get(include["address"])
if not include_site:
continue
content = include_site.storage.loadJson(include["inner_path"])
details = content.get("siteblocks", {}).get(address)
if details:
details["include"] = include
break
return details
# Search and remove or readd files of an user
def changeDbs(self, auth_address, action):
self.log.debug("Mute action %s on user %s" % (action, auth_address))
res = list(self.site_manager.list().values())[0].content_manager.contents.db.execute(
"SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path",
{"inner_path": "%%/%s/%%" % auth_address}
)
for row in res:
site = self.site_manager.sites.get(row["address"])
if not site:
continue
dir_inner_path = helper.getDirname(row["inner_path"])
for file_name in site.storage.walk(dir_inner_path):
if action == "remove":
site.storage.onUpdated(dir_inner_path + file_name, False)
else:
site.storage.onUpdated(dir_inner_path + file_name)
site.onFileDone(dir_inner_path + file_name)

View File

@ -1,82 +0,0 @@
import pytest
from ContentFilter import ContentFilterPlugin
from Site import SiteManager
@pytest.fixture
def filter_storage():
ContentFilterPlugin.filter_storage = ContentFilterPlugin.ContentFilterStorage(SiteManager.site_manager)
return ContentFilterPlugin.filter_storage
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestContentFilter:
def createInclude(self, site):
site.storage.writeJson("filters.json", {
"mutes": {"1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C": {}},
"siteblocks": {site.address: {}}
})
def testIncludeLoad(self, site, filter_storage):
self.createInclude(site)
filter_storage.file_content["includes"]["%s/%s" % (site.address, "filters.json")] = {
"date_added": 1528295893,
}
assert not filter_storage.include_filters["mutes"]
assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
assert not filter_storage.isSiteblocked(site.address)
filter_storage.includeUpdateAll(update_site_dbs=False)
assert len(filter_storage.include_filters["mutes"]) == 1
assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
assert filter_storage.isSiteblocked(site.address)
def testIncludeAdd(self, site, filter_storage):
self.createInclude(site)
query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C'"
assert not filter_storage.isSiteblocked(site.address)
assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
assert site.storage.query(query_num_json).fetchone()["num"] == 2
# Add include
filter_storage.includeAdd(site.address, "filters.json")
assert filter_storage.isSiteblocked(site.address)
assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
assert site.storage.query(query_num_json).fetchone()["num"] == 0
# Remove include
filter_storage.includeRemove(site.address, "filters.json")
assert not filter_storage.isSiteblocked(site.address)
assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
assert site.storage.query(query_num_json).fetchone()["num"] == 2
def testIncludeChange(self, site, filter_storage):
self.createInclude(site)
filter_storage.includeAdd(site.address, "filters.json")
assert filter_storage.isSiteblocked(site.address)
assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
# Add new blocked site
assert not filter_storage.isSiteblocked("1Hello")
filter_content = site.storage.loadJson("filters.json")
filter_content["siteblocks"]["1Hello"] = {}
site.storage.writeJson("filters.json", filter_content)
assert filter_storage.isSiteblocked("1Hello")
# Add new muted user
query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q'"
assert not filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert site.storage.query(query_num_json).fetchone()["num"] == 2
filter_content["mutes"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"] = {}
site.storage.writeJson("filters.json", filter_content)
assert filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert site.storage.query(query_num_json).fetchone()["num"] == 0

View File

@ -1 +0,0 @@
from src.Test.conftest import *

View File

@ -1,5 +0,0 @@
[pytest]
python_files = Test*.py
addopts = -rsxX -v --durations=6
markers =
webtest: mark a test as a webtest.

View File

@ -1 +0,0 @@
from . import ContentFilterPlugin

View File

@ -1,6 +0,0 @@
{
"Hide all content from <b>%s</b>?": "<b>%s</b> tartalmaniak elrejtése?",
"Mute": "Elnémítás",
"Unmute <b>%s</b>?": "<b>%s</b> tartalmaniak megjelenítése?",
"Unmute": "Némítás visszavonása"
}

View File

@ -1,6 +0,0 @@
{
"Hide all content from <b>%s</b>?": "<b>%s</b> Vuoi nascondere i contenuti di questo utente ?",
"Mute": "Attiva Silenzia",
"Unmute <b>%s</b>?": "<b>%s</b> Vuoi mostrare i contenuti di questo utente ?",
"Unmute": "Disattiva Silenzia"
}

View File

@ -1,6 +0,0 @@
{
"Hide all content from <b>%s</b>?": "<b>%s</b> のコンテンツをすべて隠しますか?",
"Mute": "ミュート",
"Unmute <b>%s</b>?": "<b>%s</b> のミュートを解除しますか?",
"Unmute": "ミュート解除"
}

View File

@ -1,6 +0,0 @@
{
"Hide all content from <b>%s</b>?": "<b>%s</b> Ocultar todo o conteúdo de ?",
"Mute": "Ativar o Silêncio",
"Unmute <b>%s</b>?": "<b>%s</b> Você quer mostrar o conteúdo deste usuário ?",
"Unmute": "Desligar o silêncio"
}

View File

@ -1,6 +0,0 @@
{
"Hide all content from <b>%s</b>?": "屏蔽 <b>%s</b> 的所有內容?",
"Mute": "屏蔽",
"Unmute <b>%s</b>?": "對 <b>%s</b> 解除屏蔽?",
"Unmute": "解除屏蔽"
}

View File

@ -1,6 +0,0 @@
{
"Hide all content from <b>%s</b>?": "屏蔽 <b>%s</b> 的所有内容?",
"Mute": "屏蔽",
"Unmute <b>%s</b>?": "对 <b>%s</b> 解除屏蔽?",
"Unmute": "解除屏蔽"
}

View File

@ -1,89 +0,0 @@
<html>
<body>
<style>
.content { line-height: 24px; font-family: monospace; font-size: 14px; color: #636363; text-transform: uppercase; top: 38%; position: relative; text-align: center; perspective: 1000px }
.content h1, .content h2 { font-weight: normal; letter-spacing: 1px; }
.content h2 { font-size: 15px; }
.content #details {
text-align: left; display: inline-block; width: 350px; background-color: white; padding: 17px 27px; border-radius: 0px;
box-shadow: 0px 2px 7px -1px #d8d8d8; text-transform: none; margin: 15px; transform: scale(0) rotateX(90deg); transition: all 0.6s cubic-bezier(0.785, 0.135, 0.15, 0.86);
}
.content #details #added { font-size: 12px; text-align: right; color: #a9a9a9; }
#button { transition: all 1s cubic-bezier(0.075, 0.82, 0.165, 1); opacity: 0; transform: translateY(50px); transition-delay: 0.5s }
.button {
padding: 8px 20px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; border-radius: 2px;
text-decoration: none; transition: all 0.5s; background-position: left center; display: inline-block; margin-top: 10px; color: black;
}
.button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; transition: none; }
.button:active { position: relative; top: 1px; }
.button:focus { outline: none; }
.textbutton { color: #999; margin-top: 25px; display: inline-block; text-transform: none; font-family: Arial, Helvetica; text-decoration: none; padding: 5px 15px; }
.textbutton-main { background-color: #FFF; color: #333; border-radius: 5px; }
.textbutton:hover { text-decoration: underline; color: #333; transition: none !important; }
.textbutton:active { background-color: #fafbfc; }
</style>
<div class="content">
<h1>Site blocked</h1>
<h2>This site is on your blocklist:</h2>
<div id="details">
<div id="reason">Too much image</div>
<div id="added">on 2015-01-25 12:32:11</div>
</div>
<div id="buttons">
<a href="/" class="textbutton textbutton-main" id="back">Back to homepage</a>
<a href="#Visit+Site" class="textbutton" id="visit">Remove from blocklist and visit the site</a>
</div>
</div>
<script type="text/javascript" src="js/ZeroFrame.js"></script>
<script>
function buf2hex(buffer) {
return Array.prototype.map.call(new Uint8Array(buffer), x => ('00' + x.toString(16)).slice(-2)).join('');
}
async function sha256hex(s) {
var buff = new TextEncoder("utf-8").encode(s)
return "0x" + buf2hex(await crypto.subtle.digest("SHA-256", buff))
}
class Page extends ZeroFrame {
onOpenWebsocket () {
this.cmd("wrapperSetTitle", "Visiting a blocked site - ZeroNet")
this.cmd("siteInfo", {}, (site_info) => {
this.site_info = site_info
})
var address = document.location.search.match(/address=(.*?)[&\?]/)[1]
this.updateSiteblockDetails(address)
}
async updateSiteblockDetails(address) {
var block = await this.cmdp("siteblockGet", address)
var reason = block["reason"]
if (!reason) reason = "Unknown reason"
var date = new Date(block["date_added"] * 1000)
document.getElementById("reason").innerText = reason
document.getElementById("added").innerText = "at " + date.toLocaleDateString() + " " + date.toLocaleTimeString()
if (block["include"]) {
document.getElementById("added").innerText += " from a shared blocklist"
document.getElementById("visit").innerText = "Ignore blocking and visit the site"
}
document.getElementById("details").style.transform = "scale(1) rotateX(0deg)"
document.getElementById("visit").style.transform = "translateY(0)"
document.getElementById("visit").style.opacity = "1"
document.getElementById("visit").onclick = () => {
if (block["include"])
this.cmd("siteblockIgnoreAddSite", address, () => { this.cmd("wrapperReload") })
else
this.cmd("siteblockRemove", address, () => { this.cmd("wrapperReload") })
}
}
}
page = new Page()
</script>
</body>
</html>

View File

@ -1,119 +0,0 @@
// Version 1.0.0 - Initial release
// Version 1.1.0 (2017-08-02) - Added cmdp function that returns promise instead of using callback
// Version 1.2.0 (2017-08-02) - Added Ajax monkey patch to emulate XMLHttpRequest over ZeroFrame API
const CMD_INNER_READY = 'innerReady'
const CMD_RESPONSE = 'response'
const CMD_WRAPPER_READY = 'wrapperReady'
const CMD_PING = 'ping'
const CMD_PONG = 'pong'
const CMD_WRAPPER_OPENED_WEBSOCKET = 'wrapperOpenedWebsocket'
const CMD_WRAPPER_CLOSE_WEBSOCKET = 'wrapperClosedWebsocket'
class ZeroFrame {
constructor(url) {
this.url = url
this.waiting_cb = {}
this.wrapper_nonce = document.location.href.replace(/.*wrapper_nonce=([A-Za-z0-9]+).*/, "$1")
this.connect()
this.next_message_id = 1
this.init()
}
init() {
return this
}
connect() {
this.target = window.parent
window.addEventListener('message', e => this.onMessage(e), false)
this.cmd(CMD_INNER_READY)
}
onMessage(e) {
let message = e.data
let cmd = message.cmd
if (cmd === CMD_RESPONSE) {
if (this.waiting_cb[message.to] !== undefined) {
this.waiting_cb[message.to](message.result)
}
else {
this.log("Websocket callback not found:", message)
}
} else if (cmd === CMD_WRAPPER_READY) {
this.cmd(CMD_INNER_READY)
} else if (cmd === CMD_PING) {
this.response(message.id, CMD_PONG)
} else if (cmd === CMD_WRAPPER_OPENED_WEBSOCKET) {
this.onOpenWebsocket()
} else if (cmd === CMD_WRAPPER_CLOSE_WEBSOCKET) {
this.onCloseWebsocket()
} else {
this.onRequest(cmd, message)
}
}
onRequest(cmd, message) {
this.log("Unknown request", message)
}
response(to, result) {
this.send({
cmd: CMD_RESPONSE,
to: to,
result: result
})
}
cmd(cmd, params={}, cb=null) {
this.send({
cmd: cmd,
params: params
}, cb)
}
cmdp(cmd, params={}) {
return new Promise((resolve, reject) => {
this.cmd(cmd, params, (res) => {
if (res && res.error) {
reject(res.error)
} else {
resolve(res)
}
})
})
}
send(message, cb=null) {
message.wrapper_nonce = this.wrapper_nonce
message.id = this.next_message_id
this.next_message_id++
this.target.postMessage(message, '*')
if (cb) {
this.waiting_cb[message.id] = cb
}
}
log(...args) {
console.log.apply(console, ['[ZeroFrame]'].concat(args))
}
onOpenWebsocket() {
this.log('Websocket open')
}
onCloseWebsocket() {
this.log('Websocket close')
}
monkeyPatchAjax() {
var page = this
XMLHttpRequest.prototype.realOpen = XMLHttpRequest.prototype.open
this.cmd("wrapperGetAjaxKey", [], (res) => { this.ajax_key = res })
var newOpen = function (method, url, async) {
url += "?ajax_key=" + page.ajax_key
return this.realOpen(method, url, async)
}
XMLHttpRequest.prototype.open = newOpen
}
}

View File

@ -1,5 +0,0 @@
{
"name": "ContentFilter",
"description": "Manage site and user block list.",
"default": "enabled"
}

View File

@ -1,139 +0,0 @@
import re
import html
import copy
import os
import gevent
from Plugin import PluginManager
from Translate import Translate
plugin_dir = os.path.dirname(__file__)
if "_" not in locals():
_ = Translate(plugin_dir + "/languages/")
def getCorsPath(site, inner_path):
match = re.match("^cors-([A-Za-z0-9]{26,35})/(.*)", inner_path)
if not match:
raise Exception("Invalid cors path: %s" % inner_path)
cors_address = match.group(1)
cors_inner_path = match.group(2)
if not "Cors:%s" % cors_address in site.settings["permissions"]:
raise Exception("This site has no permission to access site %s" % cors_address)
return cors_address, cors_inner_path
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
def hasSitePermission(self, address, cmd=None):
if super(UiWebsocketPlugin, self).hasSitePermission(address, cmd=cmd):
return True
allowed_commands = [
"fileGet", "fileList", "dirList", "fileRules", "optionalFileInfo",
"fileQuery", "dbQuery", "userGetSettings", "siteInfo"
]
if not "Cors:%s" % address in self.site.settings["permissions"] or cmd not in allowed_commands:
return False
else:
return True
# Add cors support for file commands
def corsFuncWrapper(self, func_name, to, inner_path, *args, **kwargs):
if inner_path.startswith("cors-"):
cors_address, cors_inner_path = getCorsPath(self.site, inner_path)
req_self = copy.copy(self)
req_self.site = self.server.sites.get(cors_address) # Change the site to the merged one
if not req_self.site:
return {"error": "No site found"}
func = getattr(super(UiWebsocketPlugin, req_self), func_name)
back = func(to, cors_inner_path, *args, **kwargs)
return back
else:
func = getattr(super(UiWebsocketPlugin, self), func_name)
return func(to, inner_path, *args, **kwargs)
def actionFileGet(self, to, inner_path, *args, **kwargs):
return self.corsFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs)
def actionFileList(self, to, inner_path, *args, **kwargs):
return self.corsFuncWrapper("actionFileList", to, inner_path, *args, **kwargs)
def actionDirList(self, to, inner_path, *args, **kwargs):
return self.corsFuncWrapper("actionDirList", to, inner_path, *args, **kwargs)
def actionFileRules(self, to, inner_path, *args, **kwargs):
return self.corsFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs)
def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs):
return self.corsFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs)
def actionCorsPermission(self, to, address):
if isinstance(address, list):
addresses = address
else:
addresses = [address]
button_title = _["Grant"]
site_names = []
site_addresses = []
for address in addresses:
site = self.server.sites.get(address)
if site:
site_name = site.content_manager.contents.get("content.json", {}).get("title", address)
else:
site_name = address
# If at least one site is not downloaded yet, show "Grant & Add" instead
button_title = _["Grant & Add"]
if not (site and "Cors:" + address in self.permissions):
# No site or no permission
site_names.append(site_name)
site_addresses.append(address)
if len(site_names) == 0:
return "ignored"
self.cmd(
"confirm",
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % ", ".join(map(html.escape, site_names)), button_title],
lambda res: self.cbCorsPermission(to, site_addresses)
)
def cbCorsPermission(self, to, addresses):
# Add permissions
for address in addresses:
permission = "Cors:" + address
if permission not in self.site.settings["permissions"]:
self.site.settings["permissions"].append(permission)
self.site.saveSettings()
self.site.updateWebsocket(permission_added=permission)
self.response(to, "ok")
for address in addresses:
site = self.server.sites.get(address)
if not site:
gevent.spawn(self.server.site_manager.need, address)
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
# Allow to load cross origin files using /cors-address/file.jpg
def parsePath(self, path):
path_parts = super(UiRequestPlugin, self).parsePath(path)
if "cors-" not in path: # Optimization
return path_parts
site = self.server.sites[path_parts["address"]]
try:
path_parts["address"], path_parts["inner_path"] = getCorsPath(site, path_parts["inner_path"])
except Exception:
return None
return path_parts

View File

@ -1 +0,0 @@
from . import CorsPlugin

View File

@ -1,5 +0,0 @@
{
"name": "Cors",
"description": "Cross site resource read.",
"default": "enabled"
}

View File

@ -1,58 +0,0 @@
import hashlib
import base64
import struct
from lib import sslcrypto
from Crypt import Crypt
curve = sslcrypto.ecc.get_curve("secp256k1")
def eciesEncrypt(data, pubkey, ciphername="aes-256-cbc"):
ciphertext, key_e = curve.encrypt(
data,
base64.b64decode(pubkey),
algo=ciphername,
derivation="sha512",
return_aes_key=True
)
return key_e, ciphertext
@Crypt.thread_pool_crypt.wrap
def eciesDecryptMulti(encrypted_datas, privatekey):
texts = [] # Decoded texts
for encrypted_data in encrypted_datas:
try:
text = eciesDecrypt(encrypted_data, privatekey).decode("utf8")
texts.append(text)
except Exception:
texts.append(None)
return texts
def eciesDecrypt(ciphertext, privatekey):
return curve.decrypt(base64.b64decode(ciphertext), curve.wif_to_private(privatekey.encode()), derivation="sha512")
def decodePubkey(pubkey):
i = 0
curve = struct.unpack('!H', pubkey[i:i + 2])[0]
i += 2
tmplen = struct.unpack('!H', pubkey[i:i + 2])[0]
i += 2
pubkey_x = pubkey[i:i + tmplen]
i += tmplen
tmplen = struct.unpack('!H', pubkey[i:i + 2])[0]
i += 2
pubkey_y = pubkey[i:i + tmplen]
i += tmplen
return curve, pubkey_x, pubkey_y, i
def split(encrypted):
iv = encrypted[0:16]
curve, pubkey_x, pubkey_y, i = decodePubkey(encrypted[16:])
ciphertext = encrypted[16 + i:-32]
return iv, ciphertext

View File

@ -1,225 +0,0 @@
import base64
import os
import gevent
from Plugin import PluginManager
from Crypt import CryptBitcoin, CryptHash
from Config import config
import sslcrypto
from . import CryptMessage
curve = sslcrypto.ecc.get_curve("secp256k1")
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
# - Actions -
# Returns user's public key unique to site
# Return: Public key
def actionUserPublickey(self, to, index=0):
self.response(to, self.user.getEncryptPublickey(self.site.address, index))
# Encrypt a text using the publickey or user's sites unique publickey
# Return: Encrypted text using base64 encoding
def actionEciesEncrypt(self, to, text, publickey=0, return_aes_key=False):
if type(publickey) is int: # Encrypt using user's publickey
publickey = self.user.getEncryptPublickey(self.site.address, publickey)
aes_key, encrypted = CryptMessage.eciesEncrypt(text.encode("utf8"), publickey)
if return_aes_key:
self.response(to, [base64.b64encode(encrypted).decode("utf8"), base64.b64encode(aes_key).decode("utf8")])
else:
self.response(to, base64.b64encode(encrypted).decode("utf8"))
# Decrypt a text using privatekey or the user's site unique private key
# Return: Decrypted text or list of decrypted texts
def actionEciesDecrypt(self, to, param, privatekey=0):
if type(privatekey) is int: # Decrypt using user's privatekey
privatekey = self.user.getEncryptPrivatekey(self.site.address, privatekey)
if type(param) == list:
encrypted_texts = param
else:
encrypted_texts = [param]
texts = CryptMessage.eciesDecryptMulti(encrypted_texts, privatekey)
if type(param) == list:
self.response(to, texts)
else:
self.response(to, texts[0])
# Encrypt a text using AES
# Return: Iv, AES key, Encrypted text
def actionAesEncrypt(self, to, text, key=None):
if key:
key = base64.b64decode(key)
else:
key = sslcrypto.aes.new_key()
if text:
encrypted, iv = sslcrypto.aes.encrypt(text.encode("utf8"), key)
else:
encrypted, iv = b"", b""
res = [base64.b64encode(item).decode("utf8") for item in [key, iv, encrypted]]
self.response(to, res)
# Decrypt a text using AES
# Return: Decrypted text
def actionAesDecrypt(self, to, *args):
if len(args) == 3: # Single decrypt
encrypted_texts = [(args[0], args[1])]
keys = [args[2]]
else: # Batch decrypt
encrypted_texts, keys = args
texts = [] # Decoded texts
for iv, encrypted_text in encrypted_texts:
encrypted_text = base64.b64decode(encrypted_text)
iv = base64.b64decode(iv)
text = None
for key in keys:
try:
decrypted = sslcrypto.aes.decrypt(encrypted_text, iv, base64.b64decode(key))
if decrypted and decrypted.decode("utf8"): # Valid text decoded
text = decrypted.decode("utf8")
except Exception as err:
pass
texts.append(text)
if len(args) == 3:
self.response(to, texts[0])
else:
self.response(to, texts)
# Sign data using ECDSA
# Return: Signature
def actionEcdsaSign(self, to, data, privatekey=None):
if privatekey is None: # Sign using user's privatekey
privatekey = self.user.getAuthPrivatekey(self.site.address)
self.response(to, CryptBitcoin.sign(data, privatekey))
# Verify data using ECDSA (address is either a address or array of addresses)
# Return: bool
def actionEcdsaVerify(self, to, data, address, signature):
self.response(to, CryptBitcoin.verify(data, address, signature))
# Gets the publickey of a given privatekey
def actionEccPrivToPub(self, to, privatekey):
self.response(to, curve.private_to_public(curve.wif_to_private(privatekey.encode())))
# Gets the address of a given publickey
def actionEccPubToAddr(self, to, publickey):
self.response(to, curve.public_to_address(bytes.fromhex(publickey)))
@PluginManager.registerTo("User")
class UserPlugin(object):
def getEncryptPrivatekey(self, address, param_index=0):
if param_index < 0 or param_index > 1000:
raise Exception("Param_index out of range")
site_data = self.getSiteData(address)
if site_data.get("cert"): # Different privatekey for different cert provider
index = param_index + self.getAddressAuthIndex(site_data["cert"])
else:
index = param_index
if "encrypt_privatekey_%s" % index not in site_data:
address_index = self.getAddressAuthIndex(address)
crypt_index = address_index + 1000 + index
site_data["encrypt_privatekey_%s" % index] = CryptBitcoin.hdPrivatekey(self.master_seed, crypt_index)
self.log.debug("New encrypt privatekey generated for %s:%s" % (address, index))
return site_data["encrypt_privatekey_%s" % index]
def getEncryptPublickey(self, address, param_index=0):
if param_index < 0 or param_index > 1000:
raise Exception("Param_index out of range")
site_data = self.getSiteData(address)
if site_data.get("cert"): # Different privatekey for different cert provider
index = param_index + self.getAddressAuthIndex(site_data["cert"])
else:
index = param_index
if "encrypt_publickey_%s" % index not in site_data:
privatekey = self.getEncryptPrivatekey(address, param_index).encode()
publickey = curve.private_to_public(curve.wif_to_private(privatekey) + b"\x01")
site_data["encrypt_publickey_%s" % index] = base64.b64encode(publickey).decode("utf8")
return site_data["encrypt_publickey_%s" % index]
@PluginManager.registerTo("Actions")
class ActionsPlugin:
publickey = "A3HatibU4S6eZfIQhVs2u7GLN5G9wXa9WwlkyYIfwYaj"
privatekey = "5JBiKFYBm94EUdbxtnuLi6cvNcPzcKymCUHBDf2B6aq19vvG3rL"
utf8_text = '\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9p'
def getBenchmarkTests(self, online=False):
if hasattr(super(), "getBenchmarkTests"):
tests = super().getBenchmarkTests(online)
else:
tests = []
aes_key, encrypted = CryptMessage.eciesEncrypt(self.utf8_text.encode("utf8"), self.publickey) # Warm-up
tests.extend([
{"func": self.testCryptEciesEncrypt, "kwargs": {}, "num": 100, "time_standard": 1.2},
{"func": self.testCryptEciesDecrypt, "kwargs": {}, "num": 500, "time_standard": 1.3},
{"func": self.testCryptEciesDecryptMulti, "kwargs": {}, "num": 5, "time_standard": 0.68},
{"func": self.testCryptAesEncrypt, "kwargs": {}, "num": 10000, "time_standard": 0.27},
{"func": self.testCryptAesDecrypt, "kwargs": {}, "num": 10000, "time_standard": 0.25}
])
return tests
def testCryptEciesEncrypt(self, num_run=1):
for i in range(num_run):
aes_key, encrypted = CryptMessage.eciesEncrypt(self.utf8_text.encode("utf8"), self.publickey)
assert len(aes_key) == 32
yield "."
def testCryptEciesDecrypt(self, num_run=1):
aes_key, encrypted = CryptMessage.eciesEncrypt(self.utf8_text.encode("utf8"), self.publickey)
for i in range(num_run):
assert len(aes_key) == 32
decrypted = CryptMessage.eciesDecrypt(base64.b64encode(encrypted), self.privatekey)
assert decrypted == self.utf8_text.encode("utf8"), "%s != %s" % (decrypted, self.utf8_text.encode("utf8"))
yield "."
def testCryptEciesDecryptMulti(self, num_run=1):
yield "x 100 (%s threads) " % config.threads_crypt
aes_key, encrypted = CryptMessage.eciesEncrypt(self.utf8_text.encode("utf8"), self.publickey)
threads = []
for i in range(num_run):
assert len(aes_key) == 32
threads.append(gevent.spawn(
CryptMessage.eciesDecryptMulti, [base64.b64encode(encrypted)] * 100, self.privatekey
))
for thread in threads:
res = thread.get()
assert res[0] == self.utf8_text, "%s != %s" % (res[0], self.utf8_text)
assert res[0] == res[-1], "%s != %s" % (res[0], res[-1])
yield "."
gevent.joinall(threads)
def testCryptAesEncrypt(self, num_run=1):
for i in range(num_run):
key = os.urandom(32)
encrypted = sslcrypto.aes.encrypt(self.utf8_text.encode("utf8"), key)
yield "."
def testCryptAesDecrypt(self, num_run=1):
key = os.urandom(32)
encrypted_text, iv = sslcrypto.aes.encrypt(self.utf8_text.encode("utf8"), key)
for i in range(num_run):
decrypted = sslcrypto.aes.decrypt(encrypted_text, iv, key).decode("utf8")
assert decrypted == self.utf8_text
yield "."

View File

@ -1,136 +0,0 @@
import pytest
import base64
from CryptMessage import CryptMessage
@pytest.mark.usefixtures("resetSettings")
class TestCrypt:
publickey = "A3HatibU4S6eZfIQhVs2u7GLN5G9wXa9WwlkyYIfwYaj"
privatekey = "5JBiKFYBm94EUdbxtnuLi6cvNcPzcKymCUHBDf2B6aq19vvG3rL"
utf8_text = '\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9'
ecies_encrypted_text = "R5J1RFIDOzE5bnWopvccmALKACCk/CRcd/KSE9OgExJKASyMbZ57JVSUenL2TpABMmcT+wAgr2UrOqClxpOWvIUwvwwupXnMbRTzthhIJJrTRW3sCJVaYlGEMn9DAcvbflgEkQX/MVVdLV3tWKySs1Vk8sJC/y+4pGYCrZz7vwDNEEERaqU="
@pytest.mark.parametrize("text", [b"hello", '\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9'.encode("utf8")])
@pytest.mark.parametrize("text_repeat", [1, 10, 128, 1024])
def testEncryptEcies(self, text, text_repeat):
text_repeated = text * text_repeat
aes_key, encrypted = CryptMessage.eciesEncrypt(text_repeated, self.publickey)
assert len(aes_key) == 32
# assert len(encrypted) == 134 + int(len(text) / 16) * 16 # Not always true
assert CryptMessage.eciesDecrypt(base64.b64encode(encrypted), self.privatekey) == text_repeated
def testDecryptEcies(self, user):
assert CryptMessage.eciesDecrypt(self.ecies_encrypted_text, self.privatekey) == b"hello"
def testPublickey(self, ui_websocket):
pub = ui_websocket.testAction("UserPublickey", 0)
assert len(pub) == 44 # Compressed, b64 encoded publickey
# Different pubkey for specificed index
assert ui_websocket.testAction("UserPublickey", 1) != ui_websocket.testAction("UserPublickey", 0)
# Same publickey for same index
assert ui_websocket.testAction("UserPublickey", 2) == ui_websocket.testAction("UserPublickey", 2)
# Different publickey for different cert
site_data = ui_websocket.user.getSiteData(ui_websocket.site.address)
site_data["cert"] = None
pub1 = ui_websocket.testAction("UserPublickey", 0)
site_data = ui_websocket.user.getSiteData(ui_websocket.site.address)
site_data["cert"] = "zeroid.bit"
pub2 = ui_websocket.testAction("UserPublickey", 0)
assert pub1 != pub2
def testEcies(self, ui_websocket):
pub = ui_websocket.testAction("UserPublickey")
encrypted = ui_websocket.testAction("EciesEncrypt", "hello", pub)
assert len(encrypted) == 180
# Don't allow decrypt using other privatekey index
decrypted = ui_websocket.testAction("EciesDecrypt", encrypted, 123)
assert decrypted != "hello"
# Decrypt using correct privatekey
decrypted = ui_websocket.testAction("EciesDecrypt", encrypted)
assert decrypted == "hello"
# Decrypt incorrect text
decrypted = ui_websocket.testAction("EciesDecrypt", "baad")
assert decrypted is None
# Decrypt batch
decrypted = ui_websocket.testAction("EciesDecrypt", [encrypted, "baad", encrypted])
assert decrypted == ["hello", None, "hello"]
def testEciesUtf8(self, ui_websocket):
# Utf8 test
ui_websocket.actionEciesEncrypt(0, self.utf8_text)
encrypted = ui_websocket.ws.getResult()
ui_websocket.actionEciesDecrypt(0, encrypted)
assert ui_websocket.ws.getResult() == self.utf8_text
def testEciesAes(self, ui_websocket):
ui_websocket.actionEciesEncrypt(0, "hello", return_aes_key=True)
ecies_encrypted, aes_key = ui_websocket.ws.getResult()
# Decrypt using Ecies
ui_websocket.actionEciesDecrypt(0, ecies_encrypted)
assert ui_websocket.ws.getResult() == "hello"
# Decrypt using AES
aes_iv, aes_encrypted = CryptMessage.split(base64.b64decode(ecies_encrypted))
ui_websocket.actionAesDecrypt(0, base64.b64encode(aes_iv), base64.b64encode(aes_encrypted), aes_key)
assert ui_websocket.ws.getResult() == "hello"
def testEciesAesLongpubkey(self, ui_websocket):
privatekey = "5HwVS1bTFnveNk9EeGaRenWS1QFzLFb5kuncNbiY3RiHZrVR6ok"
ecies_encrypted, aes_key = ["lWiXfEikIjw1ac3J/RaY/gLKACALRUfksc9rXYRFyKDSaxhwcSFBYCgAdIyYlY294g/6VgAf/68PYBVMD3xKH1n7Zbo+ge8b4i/XTKmCZRJvy0eutMKWckYCMVcxgIYNa/ZL1BY1kvvH7omgzg1wBraoLfdbNmVtQgdAZ9XS8PwRy6OB2Q==", "Rvlf7zsMuBFHZIGHcbT1rb4If+YTmsWDv6kGwcvSeMM="]
# Decrypt using Ecies
ui_websocket.actionEciesDecrypt(0, ecies_encrypted, privatekey)
assert ui_websocket.ws.getResult() == "hello"
# Decrypt using AES
aes_iv, aes_encrypted = CryptMessage.split(base64.b64decode(ecies_encrypted))
ui_websocket.actionAesDecrypt(0, base64.b64encode(aes_iv), base64.b64encode(aes_encrypted), aes_key)
assert ui_websocket.ws.getResult() == "hello"
def testAes(self, ui_websocket):
ui_websocket.actionAesEncrypt(0, "hello")
key, iv, encrypted = ui_websocket.ws.getResult()
assert len(key) == 44
assert len(iv) == 24
assert len(encrypted) == 24
# Single decrypt
ui_websocket.actionAesDecrypt(0, iv, encrypted, key)
assert ui_websocket.ws.getResult() == "hello"
# Batch decrypt
ui_websocket.actionAesEncrypt(0, "hello")
key2, iv2, encrypted2 = ui_websocket.ws.getResult()
assert [key, iv, encrypted] != [key2, iv2, encrypted2]
# 2 correct key
ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key])
assert ui_websocket.ws.getResult() == ["hello", "hello", None, None]
# 3 key
ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key, key2])
assert ui_websocket.ws.getResult() == ["hello", "hello", None, "hello"]
def testAesUtf8(self, ui_websocket):
ui_websocket.actionAesEncrypt(0, self.utf8_text)
key, iv, encrypted = ui_websocket.ws.getResult()
ui_websocket.actionAesDecrypt(0, iv, encrypted, key)
assert ui_websocket.ws.getResult() == self.utf8_text

View File

@ -1 +0,0 @@
from src.Test.conftest import *

View File

@ -1,5 +0,0 @@
[pytest]
python_files = Test*.py
addopts = -rsxX -v --durations=6
markers =
webtest: mark a test as a webtest.

View File

@ -1 +0,0 @@
from . import CryptMessagePlugin

View File

@ -1,5 +0,0 @@
{
"name": "CryptMessage",
"description": "Cryptographic functions of ECIES and AES data encryption/decryption.",
"default": "enabled"
}

View File

@ -1,193 +0,0 @@
import os
import re
import gevent
from Plugin import PluginManager
from Config import config
from Debug import Debug
# Keep archive open for faster reponse times for large sites
archive_cache = {}
def closeArchive(archive_path):
if archive_path in archive_cache:
del archive_cache[archive_path]
def openArchive(archive_path, file_obj=None):
if archive_path not in archive_cache:
if archive_path.endswith("tar.gz"):
import tarfile
archive_cache[archive_path] = tarfile.open(archive_path, fileobj=file_obj, mode="r:gz")
else:
import zipfile
archive_cache[archive_path] = zipfile.ZipFile(file_obj or archive_path)
gevent.spawn_later(5, lambda: closeArchive(archive_path)) # Close after 5 sec
archive = archive_cache[archive_path]
return archive
def openArchiveFile(archive_path, path_within, file_obj=None):
archive = openArchive(archive_path, file_obj=file_obj)
if archive_path.endswith(".zip"):
return archive.open(path_within)
else:
return archive.extractfile(path_within)
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
def actionSiteMedia(self, path, **kwargs):
if ".zip/" in path or ".tar.gz/" in path:
file_obj = None
path_parts = self.parsePath(path)
file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"])
match = re.match("^(.*\.(?:tar.gz|zip))/(.*)", file_path)
archive_path, path_within = match.groups()
if archive_path not in archive_cache:
site = self.server.site_manager.get(path_parts["address"])
if not site:
return self.actionSiteAddPrompt(path)
archive_inner_path = site.storage.getInnerPath(archive_path)
if not os.path.isfile(archive_path):
# Wait until file downloads
result = site.needFile(archive_inner_path, priority=10)
# Send virutal file path download finished event to remove loading screen
site.updateWebsocket(file_done=archive_inner_path)
if not result:
return self.error404(archive_inner_path)
file_obj = site.storage.openBigfile(archive_inner_path)
if file_obj == False:
file_obj = None
header_allow_ajax = False
if self.get.get("ajax_key"):
requester_site = self.server.site_manager.get(path_parts["request_address"])
if self.get["ajax_key"] == requester_site.settings["ajax_key"]:
header_allow_ajax = True
else:
return self.error403("Invalid ajax_key")
try:
file = openArchiveFile(archive_path, path_within, file_obj=file_obj)
content_type = self.getContentType(file_path)
self.sendHeader(200, content_type=content_type, noscript=kwargs.get("header_noscript", False), allow_ajax=header_allow_ajax)
return self.streamFile(file)
except Exception as err:
self.log.debug("Error opening archive file: %s" % Debug.formatException(err))
return self.error404(path)
return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs)
def streamFile(self, file):
for i in range(100): # Read max 6MB
try:
block = file.read(60 * 1024)
if block:
yield block
else:
raise StopIteration
except StopIteration:
file.close()
break
@PluginManager.registerTo("SiteStorage")
class SiteStoragePlugin(object):
def isFile(self, inner_path):
if ".zip/" in inner_path or ".tar.gz/" in inner_path:
match = re.match("^(.*\.(?:tar.gz|zip))/(.*)", inner_path)
archive_inner_path, path_within = match.groups()
return super(SiteStoragePlugin, self).isFile(archive_inner_path)
else:
return super(SiteStoragePlugin, self).isFile(inner_path)
def openArchive(self, inner_path):
archive_path = self.getPath(inner_path)
file_obj = None
if archive_path not in archive_cache:
if not os.path.isfile(archive_path):
result = self.site.needFile(inner_path, priority=10)
self.site.updateWebsocket(file_done=inner_path)
if not result:
raise Exception("Unable to download file")
file_obj = self.site.storage.openBigfile(inner_path)
if file_obj == False:
file_obj = None
try:
archive = openArchive(archive_path, file_obj=file_obj)
except Exception as err:
raise Exception("Unable to download file: %s" % Debug.formatException(err))
return archive
def walk(self, inner_path, *args, **kwags):
if ".zip" in inner_path or ".tar.gz" in inner_path:
match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
archive_inner_path, path_within = match.groups()
archive = self.openArchive(archive_inner_path)
path_within = path_within.lstrip("/")
if archive_inner_path.endswith(".zip"):
namelist = [name for name in archive.namelist() if not name.endswith("/")]
else:
namelist = [item.name for item in archive.getmembers() if not item.isdir()]
namelist_relative = []
for name in namelist:
if not name.startswith(path_within):
continue
name_relative = name.replace(path_within, "", 1).rstrip("/")
namelist_relative.append(name_relative)
return namelist_relative
else:
return super(SiteStoragePlugin, self).walk(inner_path, *args, **kwags)
def list(self, inner_path, *args, **kwags):
if ".zip" in inner_path or ".tar.gz" in inner_path:
match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
archive_inner_path, path_within = match.groups()
archive = self.openArchive(archive_inner_path)
path_within = path_within.lstrip("/")
if archive_inner_path.endswith(".zip"):
namelist = [name for name in archive.namelist()]
else:
namelist = [item.name for item in archive.getmembers()]
namelist_relative = []
for name in namelist:
if not name.startswith(path_within):
continue
name_relative = name.replace(path_within, "", 1).rstrip("/")
if "/" in name_relative: # File is in sub-directory
continue
namelist_relative.append(name_relative)
return namelist_relative
else:
return super(SiteStoragePlugin, self).list(inner_path, *args, **kwags)
def read(self, inner_path, mode="rb", **kwargs):
if ".zip/" in inner_path or ".tar.gz/" in inner_path:
match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
archive_inner_path, path_within = match.groups()
archive = self.openArchive(archive_inner_path)
path_within = path_within.lstrip("/")
if archive_inner_path.endswith(".zip"):
return archive.open(path_within).read()
else:
return archive.extractfile(path_within).read()
else:
return super(SiteStoragePlugin, self).read(inner_path, mode, **kwargs)

View File

@ -1 +0,0 @@
from . import FilePackPlugin

View File

@ -1,5 +0,0 @@
{
"name": "FilePack",
"description": "Transparent web access for Zip and Tar.gz files.",
"default": "enabled"
}

View File

@ -1,399 +0,0 @@
import re
import time
import copy
import os
from Plugin import PluginManager
from Translate import Translate
from util import RateLimit
from util import helper
from util.Flag import flag
from Debug import Debug
try:
import OptionalManager.UiWebsocketPlugin # To make optioanlFileInfo merger sites compatible
except Exception:
pass
if "merger_db" not in locals().keys(): # To keep merger_sites between module reloads
merger_db = {} # Sites that allowed to list other sites {address: [type1, type2...]}
merged_db = {} # Sites that allowed to be merged to other sites {address: type, ...}
merged_to_merger = {} # {address: [site1, site2, ...]} cache
site_manager = None # Site manager for merger sites
plugin_dir = os.path.dirname(__file__)
if "_" not in locals():
_ = Translate(plugin_dir + "/languages/")
# Check if the site has permission to this merger site
def checkMergerPath(address, inner_path):
merged_match = re.match("^merged-(.*?)/([A-Za-z0-9]{26,35})/", inner_path)
if merged_match:
merger_type = merged_match.group(1)
# Check if merged site is allowed to include other sites
if merger_type in merger_db.get(address, []):
# Check if included site allows to include
merged_address = merged_match.group(2)
if merged_db.get(merged_address) == merger_type:
inner_path = re.sub("^merged-(.*?)/([A-Za-z0-9]{26,35})/", "", inner_path)
return merged_address, inner_path
else:
raise Exception(
"Merger site (%s) does not have permission for merged site: %s (%s)" %
(merger_type, merged_address, merged_db.get(merged_address))
)
else:
raise Exception("No merger (%s) permission to load: <br>%s (%s not in %s)" % (
address, inner_path, merger_type, merger_db.get(address, []))
)
else:
raise Exception("Invalid merger path: %s" % inner_path)
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
# Download new site
def actionMergerSiteAdd(self, to, addresses):
if type(addresses) != list:
# Single site add
addresses = [addresses]
# Check if the site has merger permission
merger_types = merger_db.get(self.site.address)
if not merger_types:
return self.response(to, {"error": "Not a merger site"})
if RateLimit.isAllowed(self.site.address + "-MergerSiteAdd", 10) and len(addresses) == 1:
# Without confirmation if only one site address and not called in last 10 sec
self.cbMergerSiteAdd(to, addresses)
else:
self.cmd(
"confirm",
[_["Add <b>%s</b> new site?"] % len(addresses), "Add"],
lambda res: self.cbMergerSiteAdd(to, addresses)
)
self.response(to, "ok")
# Callback of adding new site confirmation
def cbMergerSiteAdd(self, to, addresses):
added = 0
for address in addresses:
try:
site_manager.need(address)
added += 1
except Exception as err:
self.cmd("notification", ["error", _["Adding <b>%s</b> failed: %s"] % (address, err)])
if added:
self.cmd("notification", ["done", _["Added <b>%s</b> new site"] % added, 5000])
RateLimit.called(self.site.address + "-MergerSiteAdd")
site_manager.updateMergerSites()
# Delete a merged site
@flag.no_multiuser
def actionMergerSiteDelete(self, to, address):
site = self.server.sites.get(address)
if not site:
return self.response(to, {"error": "No site found: %s" % address})
merger_types = merger_db.get(self.site.address)
if not merger_types:
return self.response(to, {"error": "Not a merger site"})
if merged_db.get(address) not in merger_types:
return self.response(to, {"error": "Merged type (%s) not in %s" % (merged_db.get(address), merger_types)})
self.cmd("notification", ["done", _["Site deleted: <b>%s</b>"] % address, 5000])
self.response(to, "ok")
# Lists merged sites
def actionMergerSiteList(self, to, query_site_info=False):
merger_types = merger_db.get(self.site.address)
ret = {}
if not merger_types:
return self.response(to, {"error": "Not a merger site"})
for address, merged_type in merged_db.items():
if merged_type not in merger_types:
continue # Site not for us
if query_site_info:
site = self.server.sites.get(address)
ret[address] = self.formatSiteInfo(site, create_user=False)
else:
ret[address] = merged_type
self.response(to, ret)
def hasSitePermission(self, address, *args, **kwargs):
if super(UiWebsocketPlugin, self).hasSitePermission(address, *args, **kwargs):
return True
else:
if self.site.address in [merger_site.address for merger_site in merged_to_merger.get(address, [])]:
return True
else:
return False
# Add support merger sites for file commands
def mergerFuncWrapper(self, func_name, to, inner_path, *args, **kwargs):
if inner_path.startswith("merged-"):
merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path)
# Set the same cert for merged site
merger_cert = self.user.getSiteData(self.site.address).get("cert")
if merger_cert and self.user.getSiteData(merged_address).get("cert") != merger_cert:
self.user.setCert(merged_address, merger_cert)
req_self = copy.copy(self)
req_self.site = self.server.sites.get(merged_address) # Change the site to the merged one
func = getattr(super(UiWebsocketPlugin, req_self), func_name)
return func(to, merged_inner_path, *args, **kwargs)
else:
func = getattr(super(UiWebsocketPlugin, self), func_name)
return func(to, inner_path, *args, **kwargs)
def actionFileList(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionFileList", to, inner_path, *args, **kwargs)
def actionDirList(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionDirList", to, inner_path, *args, **kwargs)
def actionFileGet(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs)
def actionFileWrite(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionFileWrite", to, inner_path, *args, **kwargs)
def actionFileDelete(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionFileDelete", to, inner_path, *args, **kwargs)
def actionFileRules(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs)
def actionFileNeed(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionFileNeed", to, inner_path, *args, **kwargs)
def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs)
def actionOptionalFileDelete(self, to, inner_path, *args, **kwargs):
return self.mergerFuncWrapper("actionOptionalFileDelete", to, inner_path, *args, **kwargs)
def actionBigfileUploadInit(self, to, inner_path, *args, **kwargs):
back = self.mergerFuncWrapper("actionBigfileUploadInit", to, inner_path, *args, **kwargs)
if inner_path.startswith("merged-"):
merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path)
back["inner_path"] = "merged-%s/%s/%s" % (merged_db[merged_address], merged_address, back["inner_path"])
return back
# Add support merger sites for file commands with privatekey parameter
def mergerFuncWrapperWithPrivatekey(self, func_name, to, privatekey, inner_path, *args, **kwargs):
func = getattr(super(UiWebsocketPlugin, self), func_name)
if inner_path.startswith("merged-"):
merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path)
merged_site = self.server.sites.get(merged_address)
# Set the same cert for merged site
merger_cert = self.user.getSiteData(self.site.address).get("cert")
if merger_cert:
self.user.setCert(merged_address, merger_cert)
site_before = self.site # Save to be able to change it back after we ran the command
self.site = merged_site # Change the site to the merged one
try:
back = func(to, privatekey, merged_inner_path, *args, **kwargs)
finally:
self.site = site_before # Change back to original site
return back
else:
return func(to, privatekey, inner_path, *args, **kwargs)
def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
return self.mergerFuncWrapperWithPrivatekey("actionSiteSign", to, privatekey, inner_path, *args, **kwargs)
def actionSitePublish(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
return self.mergerFuncWrapperWithPrivatekey("actionSitePublish", to, privatekey, inner_path, *args, **kwargs)
def actionPermissionAdd(self, to, permission):
super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission)
if permission.startswith("Merger"):
self.site.storage.rebuildDb()
def actionPermissionDetails(self, to, permission):
if not permission.startswith("Merger"):
return super(UiWebsocketPlugin, self).actionPermissionDetails(to, permission)
merger_type = permission.replace("Merger:", "")
if not re.match("^[A-Za-z0-9-]+$", merger_type):
raise Exception("Invalid merger_type: %s" % merger_type)
merged_sites = []
for address, merged_type in merged_db.items():
if merged_type != merger_type:
continue
site = self.server.sites.get(address)
try:
merged_sites.append(site.content_manager.contents.get("content.json").get("title", address))
except Exception:
merged_sites.append(address)
details = _["Read and write permissions to sites with merged type of <b>%s</b> "] % merger_type
details += _["(%s sites)"] % len(merged_sites)
details += "<div style='white-space: normal; max-width: 400px'>%s</div>" % ", ".join(merged_sites)
self.response(to, details)
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
# Allow to load merged site files using /merged-ZeroMe/address/file.jpg
def parsePath(self, path):
path_parts = super(UiRequestPlugin, self).parsePath(path)
if "merged-" not in path: # Optimization
return path_parts
path_parts["address"], path_parts["inner_path"] = checkMergerPath(path_parts["address"], path_parts["inner_path"])
return path_parts
@PluginManager.registerTo("SiteStorage")
class SiteStoragePlugin(object):
# Also rebuild from merged sites
def getDbFiles(self):
merger_types = merger_db.get(self.site.address)
# First return the site's own db files
for item in super(SiteStoragePlugin, self).getDbFiles():
yield item
# Not a merger site, that's all
if not merger_types:
return
merged_sites = [
site_manager.sites[address]
for address, merged_type in merged_db.items()
if merged_type in merger_types
]
found = 0
for merged_site in merged_sites:
self.log.debug("Loading merged site: %s" % merged_site)
merged_type = merged_db[merged_site.address]
for content_inner_path, content in merged_site.content_manager.contents.items():
# content.json file itself
if merged_site.storage.isFile(content_inner_path): # Missing content.json file
merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)
yield merged_inner_path, merged_site.storage.getPath(content_inner_path)
else:
merged_site.log.error("[MISSING] %s" % content_inner_path)
# Data files in content.json
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
if not file_relative_path.endswith(".json"):
continue # We only interesed in json files
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
if merged_site.storage.isFile(file_inner_path):
merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, file_inner_path)
yield merged_inner_path, merged_site.storage.getPath(file_inner_path)
else:
merged_site.log.error("[MISSING] %s" % file_inner_path)
found += 1
if found % 100 == 0:
time.sleep(0.001) # Context switch to avoid UI block
# Also notice merger sites on a merged site file change
def onUpdated(self, inner_path, file=None):
super(SiteStoragePlugin, self).onUpdated(inner_path, file)
merged_type = merged_db.get(self.site.address)
for merger_site in merged_to_merger.get(self.site.address, []):
if merger_site.address == self.site.address: # Avoid infinite loop
continue
virtual_path = "merged-%s/%s/%s" % (merged_type, self.site.address, inner_path)
if inner_path.endswith(".json"):
if file is not None:
merger_site.storage.onUpdated(virtual_path, file=file)
else:
merger_site.storage.onUpdated(virtual_path, file=self.open(inner_path))
else:
merger_site.storage.onUpdated(virtual_path)
@PluginManager.registerTo("Site")
class SitePlugin(object):
def fileDone(self, inner_path):
super(SitePlugin, self).fileDone(inner_path)
for merger_site in merged_to_merger.get(self.address, []):
if merger_site.address == self.address:
continue
for ws in merger_site.websockets:
ws.event("siteChanged", self, {"event": ["file_done", inner_path]})
def fileFailed(self, inner_path):
super(SitePlugin, self).fileFailed(inner_path)
for merger_site in merged_to_merger.get(self.address, []):
if merger_site.address == self.address:
continue
for ws in merger_site.websockets:
ws.event("siteChanged", self, {"event": ["file_failed", inner_path]})
@PluginManager.registerTo("SiteManager")
class SiteManagerPlugin(object):
# Update merger site for site types
def updateMergerSites(self):
global merger_db, merged_db, merged_to_merger, site_manager
s = time.time()
merger_db_new = {}
merged_db_new = {}
merged_to_merger_new = {}
site_manager = self
if not self.sites:
return
for site in self.sites.values():
# Update merged sites
try:
merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type")
except Exception as err:
self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err)))
continue
if merged_type:
merged_db_new[site.address] = merged_type
# Update merger sites
for permission in site.settings["permissions"]:
if not permission.startswith("Merger:"):
continue
if merged_type:
self.log.error(
"Removing permission %s from %s: Merger and merged at the same time." %
(permission, site.address)
)
site.settings["permissions"].remove(permission)
continue
merger_type = permission.replace("Merger:", "")
if site.address not in merger_db_new:
merger_db_new[site.address] = []
merger_db_new[site.address].append(merger_type)
site_manager.sites[site.address] = site
# Update merged to merger
if merged_type:
for merger_site in self.sites.values():
if "Merger:" + merged_type in merger_site.settings["permissions"]:
if site.address not in merged_to_merger_new:
merged_to_merger_new[site.address] = []
merged_to_merger_new[site.address].append(merger_site)
# Update globals
merger_db = merger_db_new
merged_db = merged_db_new
merged_to_merger = merged_to_merger_new
self.log.debug("Updated merger sites in %.3fs" % (time.time() - s))
def load(self, *args, **kwags):
super(SiteManagerPlugin, self).load(*args, **kwags)
self.updateMergerSites()
def saveDelayed(self, *args, **kwags):
super(SiteManagerPlugin, self).saveDelayed(*args, **kwags)
self.updateMergerSites()

View File

@ -1 +0,0 @@
from . import MergerSitePlugin

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "¿Agregar <b>%s</b> nuevo sitio?",
"Added <b>%s</b> new site": "Sitio <b>%s</b> agregado",
"Site deleted: <b>%s</b>": "Sitio removido: <b>%s</b>"
}

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "Ajouter le site <b>%s</b> ?",
"Added <b>%s</b> new site": "Site <b>%s</b> ajouté",
"Site deleted: <b>%s</b>": "Site <b>%s</b> supprimé"
}

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "Új oldal hozzáadása: <b>%s</b>?",
"Added <b>%s</b> new site": "Új oldal hozzáadva: <b>%s</b>",
"Site deleted: <b>%s</b>": "Oldal törölve: <b>%s</b>"
}

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "Aggiungere <b>%s</b> nuovo sito ?",
"Added <b>%s</b> new site": "Sito <b>%s</b> aggiunto",
"Site deleted: <b>%s</b>": "Sito <b>%s</b> eliminato"
}

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "サイト: <b>%s</b> を追加しますか?",
"Added <b>%s</b> new site": "サイト: <b>%s</b> を追加しました",
"Site deleted: <b>%s</b>": "サイト: <b>%s</b> を削除しました"
}

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "Adicionar <b>%s</b> novo site?",
"Added <b>%s</b> new site": "Site <b>%s</b> adicionado",
"Site deleted: <b>%s</b>": "Site removido: <b>%s</b>"
}

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "<b>%s</b> sitesi eklensin mi?",
"Added <b>%s</b> new site": "<b>%s</b> sitesi eklendi",
"Site deleted: <b>%s</b>": "<b>%s</b> sitesi silindi"
}

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "添加新網站: <b>%s</b>",
"Added <b>%s</b> new site": "已添加到新網站:<b>%s</b>",
"Site deleted: <b>%s</b>": "網站已刪除:<b>%s</b>"
}

View File

@ -1,5 +0,0 @@
{
"Add <b>%s</b> new site?": "添加新站点: <b>%s</b>",
"Added <b>%s</b> new site": "已添加到新站点:<b>%s</b>",
"Site deleted: <b>%s</b>": "站点已删除:<b>%s</b>"
}

View File

@ -1,187 +0,0 @@
import time
import re
from Plugin import PluginManager
from Db.DbQuery import DbQuery
from Debug import Debug
from util import helper
from util.Flag import flag
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
def formatSiteInfo(self, site, create_user=True):
site_info = super(UiWebsocketPlugin, self).formatSiteInfo(site, create_user=create_user)
feed_following = self.user.sites.get(site.address, {}).get("follow", None)
if feed_following == None:
site_info["feed_follow_num"] = None
else:
site_info["feed_follow_num"] = len(feed_following)
return site_info
def actionFeedFollow(self, to, feeds):
self.user.setFeedFollow(self.site.address, feeds)
self.user.save()
self.response(to, "ok")
def actionFeedListFollow(self, to):
feeds = self.user.sites.get(self.site.address, {}).get("follow", {})
self.response(to, feeds)
@flag.admin
def actionFeedQuery(self, to, limit=10, day_limit=3):
from Site import SiteManager
rows = []
stats = []
total_s = time.time()
num_sites = 0
for address, site_data in list(self.user.sites.items()):
feeds = site_data.get("follow")
if not feeds:
continue
if type(feeds) is not dict:
self.log.debug("Invalid feed for site %s" % address)
continue
num_sites += 1
for name, query_set in feeds.items():
site = SiteManager.site_manager.get(address)
if not site or not site.storage.has_db:
continue
s = time.time()
try:
query_raw, params = query_set
query_parts = re.split(r"UNION(?:\s+ALL|)", query_raw)
for i, query_part in enumerate(query_parts):
db_query = DbQuery(query_part)
if day_limit:
where = " WHERE %s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit)
if "WHERE" in query_part:
query_part = re.sub("WHERE (.*?)(?=$| GROUP BY)", where+" AND (\\1)", query_part)
else:
query_part += where
query_parts[i] = query_part
query = " UNION ".join(query_parts)
if ":params" in query:
query_params = map(helper.sqlquote, params)
query = query.replace(":params", ",".join(query_params))
res = site.storage.query(query + " ORDER BY date_added DESC LIMIT %s" % limit)
except Exception as err: # Log error
self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
stats.append({"site": site.address, "feed_name": name, "error": str(err)})
continue
for row in res:
row = dict(row)
if not isinstance(row["date_added"], (int, float, complex)):
self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"]))
continue
if row["date_added"] > 1000000000000: # Formatted as millseconds
row["date_added"] = row["date_added"] / 1000
if "date_added" not in row or row["date_added"] > time.time() + 120:
self.log.debug("Newsfeed item from the future from from site %s" % address)
continue # Feed item is in the future, skip it
row["site"] = address
row["feed_name"] = name
rows.append(row)
stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)})
time.sleep(0.001)
return self.response(to, {"rows": rows, "stats": stats, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3)})
def parseSearch(self, search):
parts = re.split("(site|type):", search)
if len(parts) > 1: # Found filter
search_text = parts[0]
parts = [part.strip() for part in parts]
filters = dict(zip(parts[1::2], parts[2::2]))
else:
search_text = search
filters = {}
return [search_text, filters]
def actionFeedSearch(self, to, search, limit=30, day_limit=30):
if "ADMIN" not in self.site.settings["permissions"]:
return self.response(to, "FeedSearch not allowed")
from Site import SiteManager
rows = []
stats = []
num_sites = 0
total_s = time.time()
search_text, filters = self.parseSearch(search)
for address, site in SiteManager.site_manager.list().items():
if not site.storage.has_db:
continue
if "site" in filters:
if filters["site"].lower() not in [site.address, site.content_manager.contents["content.json"].get("title").lower()]:
continue
if site.storage.db: # Database loaded
feeds = site.storage.db.schema.get("feeds")
else:
try:
feeds = site.storage.loadJson("dbschema.json").get("feeds")
except:
continue
if not feeds:
continue
num_sites += 1
for name, query in feeds.items():
s = time.time()
try:
db_query = DbQuery(query)
params = []
# Filters
if search_text:
db_query.wheres.append("(%s LIKE ? OR %s LIKE ?)" % (db_query.fields["body"], db_query.fields["title"]))
search_like = "%" + search_text.replace(" ", "%") + "%"
params.append(search_like)
params.append(search_like)
if filters.get("type") and filters["type"] not in query:
continue
if day_limit:
db_query.wheres.append(
"%s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit)
)
# Order
db_query.parts["ORDER BY"] = "date_added DESC"
db_query.parts["LIMIT"] = str(limit)
res = site.storage.query(str(db_query), params)
except Exception as err:
self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query})
continue
for row in res:
row = dict(row)
if not row["date_added"] or row["date_added"] > time.time() + 120:
continue # Feed item is in the future, skip it
row["site"] = address
row["feed_name"] = name
rows.append(row)
stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)})
return self.response(to, {"rows": rows, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3), "stats": stats})
@PluginManager.registerTo("User")
class UserPlugin(object):
# Set queries that user follows
def setFeedFollow(self, address, feeds):
site_data = self.getSiteData(address)
site_data["follow"] = feeds
self.save()
return site_data

View File

@ -1 +0,0 @@
from . import NewsfeedPlugin

View File

@ -1,414 +0,0 @@
import time
import collections
import itertools
import re
import gevent
from util import helper
from Plugin import PluginManager
from Config import config
from Debug import Debug
if "content_db" not in locals().keys(): # To keep between module reloads
content_db = None
@PluginManager.registerTo("ContentDb")
class ContentDbPlugin(object):
def __init__(self, *args, **kwargs):
global content_db
content_db = self
self.filled = {} # Site addresses that already filled from content.json
self.need_filling = False # file_optional table just created, fill data from content.json files
self.time_peer_numbers_updated = 0
self.my_optional_files = {} # Last 50 site_address/inner_path called by fileWrite (auto-pinning these files)
self.optional_files = collections.defaultdict(dict)
self.optional_files_loaded = False
self.timer_check_optional = helper.timer(60 * 5, self.checkOptionalLimit)
super(ContentDbPlugin, self).__init__(*args, **kwargs)
def getSchema(self):
schema = super(ContentDbPlugin, self).getSchema()
# Need file_optional table
schema["tables"]["file_optional"] = {
"cols": [
["file_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"],
["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"],
["inner_path", "TEXT"],
["hash_id", "INTEGER"],
["size", "INTEGER"],
["peer", "INTEGER DEFAULT 0"],
["uploaded", "INTEGER DEFAULT 0"],
["is_downloaded", "INTEGER DEFAULT 0"],
["is_pinned", "INTEGER DEFAULT 0"],
["time_added", "INTEGER DEFAULT 0"],
["time_downloaded", "INTEGER DEFAULT 0"],
["time_accessed", "INTEGER DEFAULT 0"]
],
"indexes": [
"CREATE UNIQUE INDEX file_optional_key ON file_optional (site_id, inner_path)",
"CREATE INDEX is_downloaded ON file_optional (is_downloaded)"
],
"schema_changed": 11
}
return schema
def initSite(self, site):
super(ContentDbPlugin, self).initSite(site)
if self.need_filling:
self.fillTableFileOptional(site)
def checkTables(self):
changed_tables = super(ContentDbPlugin, self).checkTables()
if "file_optional" in changed_tables:
self.need_filling = True
return changed_tables
# Load optional files ending
def loadFilesOptional(self):
s = time.time()
num = 0
total = 0
total_downloaded = 0
res = content_db.execute("SELECT site_id, inner_path, size, is_downloaded FROM file_optional")
site_sizes = collections.defaultdict(lambda: collections.defaultdict(int))
for row in res:
self.optional_files[row["site_id"]][row["inner_path"][-8:]] = 1
num += 1
# Update site size stats
site_sizes[row["site_id"]]["size_optional"] += row["size"]
if row["is_downloaded"]:
site_sizes[row["site_id"]]["optional_downloaded"] += row["size"]
# Site site size stats to sites.json settings
site_ids_reverse = {val: key for key, val in self.site_ids.items()}
for site_id, stats in site_sizes.items():
site_address = site_ids_reverse.get(site_id)
if not site_address or site_address not in self.sites:
self.log.error("Not found site_id: %s" % site_id)
continue
site = self.sites[site_address]
site.settings["size_optional"] = stats["size_optional"]
site.settings["optional_downloaded"] = stats["optional_downloaded"]
total += stats["size_optional"]
total_downloaded += stats["optional_downloaded"]
self.log.info(
"Loaded %s optional files: %.2fMB, downloaded: %.2fMB in %.3fs" %
(num, float(total) / 1024 / 1024, float(total_downloaded) / 1024 / 1024, time.time() - s)
)
if self.need_filling and self.getOptionalLimitBytes() >= 0 and self.getOptionalLimitBytes() < total_downloaded:
limit_bytes = self.getOptionalLimitBytes()
limit_new = round((float(total_downloaded) / 1024 / 1024 / 1024) * 1.1, 2) # Current limit + 10%
self.log.info(
"First startup after update and limit is smaller than downloaded files size (%.2fGB), increasing it from %.2fGB to %.2fGB" %
(float(total_downloaded) / 1024 / 1024 / 1024, float(limit_bytes) / 1024 / 1024 / 1024, limit_new)
)
config.saveValue("optional_limit", limit_new)
config.optional_limit = str(limit_new)
# Predicts if the file is optional
def isOptionalFile(self, site_id, inner_path):
return self.optional_files[site_id].get(inner_path[-8:])
# Fill file_optional table with optional files found in sites
def fillTableFileOptional(self, site):
s = time.time()
site_id = self.site_ids.get(site.address)
if not site_id:
return False
cur = self.getCursor()
res = cur.execute("SELECT * FROM content WHERE size_files_optional > 0 AND site_id = %s" % site_id)
num = 0
for row in res.fetchall():
content = site.content_manager.contents[row["inner_path"]]
try:
num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur)
except Exception as err:
self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err))
cur.close()
# Set my files to pinned
from User import UserManager
user = UserManager.user_manager.get()
if not user:
user = UserManager.user_manager.create()
auth_address = user.getAuthAddress(site.address)
res = self.execute(
"UPDATE file_optional SET is_pinned = 1 WHERE site_id = :site_id AND inner_path LIKE :inner_path",
{"site_id": site_id, "inner_path": "%%/%s/%%" % auth_address}
)
self.log.debug(
"Filled file_optional table for %s in %.3fs (loaded: %s, is_pinned: %s)" %
(site.address, time.time() - s, num, res.rowcount)
)
self.filled[site.address] = True
def setContentFilesOptional(self, site, content_inner_path, content, cur=None):
if not cur:
cur = self
num = 0
site_id = self.site_ids[site.address]
content_inner_dir = helper.getDirname(content_inner_path)
for relative_inner_path, file in content.get("files_optional", {}).items():
file_inner_path = content_inner_dir + relative_inner_path
hash_id = int(file["sha512"][0:4], 16)
if hash_id in site.content_manager.hashfield:
is_downloaded = 1
else:
is_downloaded = 0
if site.address + "/" + content_inner_dir in self.my_optional_files:
is_pinned = 1
else:
is_pinned = 0
cur.insertOrUpdate("file_optional", {
"hash_id": hash_id,
"size": int(file["size"])
}, {
"site_id": site_id,
"inner_path": file_inner_path
}, oninsert={
"time_added": int(time.time()),
"time_downloaded": int(time.time()) if is_downloaded else 0,
"is_downloaded": is_downloaded,
"peer": is_downloaded,
"is_pinned": is_pinned
})
self.optional_files[site_id][file_inner_path[-8:]] = 1
num += 1
return num
def setContent(self, site, inner_path, content, size=0):
super(ContentDbPlugin, self).setContent(site, inner_path, content, size=size)
old_content = site.content_manager.contents.get(inner_path, {})
if (not self.need_filling or self.filled.get(site.address)) and ("files_optional" in content or "files_optional" in old_content):
self.setContentFilesOptional(site, inner_path, content)
# Check deleted files
if old_content:
old_files = old_content.get("files_optional", {}).keys()
new_files = content.get("files_optional", {}).keys()
content_inner_dir = helper.getDirname(inner_path)
deleted = [content_inner_dir + key for key in old_files if key not in new_files]
if deleted:
site_id = self.site_ids[site.address]
self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": deleted})
def deleteContent(self, site, inner_path):
content = site.content_manager.contents.get(inner_path)
if content and "files_optional" in content:
site_id = self.site_ids[site.address]
content_inner_dir = helper.getDirname(inner_path)
optional_inner_paths = [
content_inner_dir + relative_inner_path
for relative_inner_path in content.get("files_optional", {}).keys()
]
self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": optional_inner_paths})
super(ContentDbPlugin, self).deleteContent(site, inner_path)
def updatePeerNumbers(self):
s = time.time()
num_file = 0
num_updated = 0
num_site = 0
for site in list(self.sites.values()):
if not site.content_manager.has_optional_files:
continue
if not site.isServing():
continue
has_updated_hashfield = next((
peer
for peer in site.peers.values()
if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated
), None)
if not has_updated_hashfield and site.content_manager.hashfield.time_changed < self.time_peer_numbers_updated:
continue
hashfield_peers = itertools.chain.from_iterable(
peer.hashfield.storage
for peer in site.peers.values()
if peer.has_hashfield
)
peer_nums = collections.Counter(
itertools.chain(
hashfield_peers,
site.content_manager.hashfield
)
)
site_id = self.site_ids[site.address]
if not site_id:
continue
res = self.execute("SELECT file_id, hash_id, peer FROM file_optional WHERE ?", {"site_id": site_id})
updates = {}
for row in res:
peer_num = peer_nums.get(row["hash_id"], 0)
if peer_num != row["peer"]:
updates[row["file_id"]] = peer_num
for file_id, peer_num in updates.items():
self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id))
num_updated += len(updates)
num_file += len(peer_nums)
num_site += 1
self.time_peer_numbers_updated = time.time()
self.log.debug("%s/%s peer number for %s site updated in %.3fs" % (num_updated, num_file, num_site, time.time() - s))
def queryDeletableFiles(self):
# First return the files with atleast 10 seeder and not accessed in last week
query = """
SELECT * FROM file_optional
WHERE peer > 10 AND %s
ORDER BY time_accessed < %s DESC, uploaded / size
""" % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7))
limit_start = 0
while 1:
num = 0
res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
for row in res:
yield row
num += 1
if num < 50:
break
limit_start += 50
self.log.debug("queryDeletableFiles returning less-seeded files")
# Then return files less seeder but still not accessed in last week
query = """
SELECT * FROM file_optional
WHERE peer <= 10 AND %s
ORDER BY peer DESC, time_accessed < %s DESC, uploaded / size
""" % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7))
limit_start = 0
while 1:
num = 0
res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
for row in res:
yield row
num += 1
if num < 50:
break
limit_start += 50
self.log.debug("queryDeletableFiles returning everyting")
# At the end return all files
query = """
SELECT * FROM file_optional
WHERE peer <= 10 AND %s
ORDER BY peer DESC, time_accessed, uploaded / size
""" % self.getOptionalUsedWhere()
limit_start = 0
while 1:
num = 0
res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
for row in res:
yield row
num += 1
if num < 50:
break
limit_start += 50
def getOptionalLimitBytes(self):
if config.optional_limit.endswith("%"):
limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit))
limit_bytes = helper.getFreeSpace() * (limit_percent / 100)
else:
limit_bytes = float(re.sub("[^0-9.]", "", config.optional_limit)) * 1024 * 1024 * 1024
return limit_bytes
def getOptionalUsedWhere(self):
maxsize = config.optional_limit_exclude_minsize * 1024 * 1024
query = "is_downloaded = 1 AND is_pinned = 0 AND size < %s" % maxsize
# Don't delete optional files from owned sites
my_site_ids = []
for address, site in self.sites.items():
if site.settings["own"]:
my_site_ids.append(str(self.site_ids[address]))
if my_site_ids:
query += " AND site_id NOT IN (%s)" % ", ".join(my_site_ids)
return query
def getOptionalUsedBytes(self):
size = self.execute("SELECT SUM(size) FROM file_optional WHERE %s" % self.getOptionalUsedWhere()).fetchone()[0]
if not size:
size = 0
return size
def getOptionalNeedDelete(self, size):
if config.optional_limit.endswith("%"):
limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit))
need_delete = size - ((helper.getFreeSpace() + size) * (limit_percent / 100))
else:
need_delete = size - self.getOptionalLimitBytes()
return need_delete
def checkOptionalLimit(self, limit=None):
if not limit:
limit = self.getOptionalLimitBytes()
if limit < 0:
self.log.debug("Invalid limit for optional files: %s" % limit)
return False
size = self.getOptionalUsedBytes()
need_delete = self.getOptionalNeedDelete(size)
self.log.debug(
"Optional size: %.1fMB/%.1fMB, Need delete: %.1fMB" %
(float(size) / 1024 / 1024, float(limit) / 1024 / 1024, float(need_delete) / 1024 / 1024)
)
if need_delete <= 0:
return False
self.updatePeerNumbers()
site_ids_reverse = {val: key for key, val in self.site_ids.items()}
deleted_file_ids = []
for row in self.queryDeletableFiles():
site_address = site_ids_reverse.get(row["site_id"])
site = self.sites.get(site_address)
if not site:
self.log.error("No site found for id: %s" % row["site_id"])
continue
site.log.debug("Deleting %s %.3f MB left" % (row["inner_path"], float(need_delete) / 1024 / 1024))
deleted_file_ids.append(row["file_id"])
try:
site.content_manager.optionalRemoved(row["inner_path"], row["hash_id"], row["size"])
site.storage.delete(row["inner_path"])
need_delete -= row["size"]
except Exception as err:
site.log.error("Error deleting %s: %s" % (row["inner_path"], err))
if need_delete <= 0:
break
cur = self.getCursor()
for file_id in deleted_file_ids:
cur.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"file_id": file_id})
cur.close()
@PluginManager.registerTo("SiteManager")
class SiteManagerPlugin(object):
def load(self, *args, **kwargs):
back = super(SiteManagerPlugin, self).load(*args, **kwargs)
if self.sites and not content_db.optional_files_loaded and content_db.conn:
content_db.optional_files_loaded = True
content_db.loadFilesOptional()
return back

View File

@ -1,253 +0,0 @@
import time
import re
import collections
import gevent
from util import helper
from Plugin import PluginManager
from . import ContentDbPlugin
# We can only import plugin host clases after the plugins are loaded
@PluginManager.afterLoad
def importPluginnedClasses():
global config
from Config import config
def processAccessLog():
global access_log
if access_log:
content_db = ContentDbPlugin.content_db
if not content_db.conn:
return False
s = time.time()
access_log_prev = access_log
access_log = collections.defaultdict(dict)
now = int(time.time())
num = 0
for site_id in access_log_prev:
content_db.execute(
"UPDATE file_optional SET time_accessed = %s WHERE ?" % now,
{"site_id": site_id, "inner_path": list(access_log_prev[site_id].keys())}
)
num += len(access_log_prev[site_id])
content_db.log.debug("Inserted %s web request stat in %.3fs" % (num, time.time() - s))
def processRequestLog():
global request_log
if request_log:
content_db = ContentDbPlugin.content_db
if not content_db.conn:
return False
s = time.time()
request_log_prev = request_log
request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}}
num = 0
for site_id in request_log_prev:
for inner_path, uploaded in request_log_prev[site_id].items():
content_db.execute(
"UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded,
{"site_id": site_id, "inner_path": inner_path}
)
num += 1
content_db.log.debug("Inserted %s file request stat in %.3fs" % (num, time.time() - s))
if "access_log" not in locals().keys(): # To keep between module reloads
access_log = collections.defaultdict(dict) # {site_id: {inner_path1: 1, inner_path2: 1...}}
request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}}
helper.timer(61, processAccessLog)
helper.timer(60, processRequestLog)
@PluginManager.registerTo("ContentManager")
class ContentManagerPlugin(object):
def __init__(self, *args, **kwargs):
self.cache_is_pinned = {}
super(ContentManagerPlugin, self).__init__(*args, **kwargs)
def optionalDownloaded(self, inner_path, hash_id, size=None, own=False):
if "|" in inner_path: # Big file piece
file_inner_path, file_range = inner_path.split("|")
else:
file_inner_path = inner_path
self.contents.db.executeDelayed(
"UPDATE file_optional SET time_downloaded = :now, is_downloaded = 1, peer = peer + 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 0",
{"now": int(time.time()), "site_id": self.contents.db.site_ids[self.site.address], "inner_path": file_inner_path}
)
return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own)
def optionalRemoved(self, inner_path, hash_id, size=None):
res = self.contents.db.execute(
"UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 1",
{"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
)
if res.rowcount > 0:
back = super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size)
# Re-add to hashfield if we have other file with the same hash_id
if self.isDownloaded(hash_id=hash_id, force_check_db=True):
self.hashfield.appendHashId(hash_id)
else:
back = False
self.cache_is_pinned = {}
return back
def optionalRenamed(self, inner_path_old, inner_path_new):
back = super(ContentManagerPlugin, self).optionalRenamed(inner_path_old, inner_path_new)
self.cache_is_pinned = {}
self.contents.db.execute(
"UPDATE file_optional SET inner_path = :inner_path_new WHERE site_id = :site_id AND inner_path = :inner_path_old",
{"site_id": self.contents.db.site_ids[self.site.address], "inner_path_old": inner_path_old, "inner_path_new": inner_path_new}
)
return back
def isDownloaded(self, inner_path=None, hash_id=None, force_check_db=False):
if hash_id and not force_check_db and hash_id not in self.hashfield:
return False
if inner_path:
res = self.contents.db.execute(
"SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1",
{"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
)
else:
res = self.contents.db.execute(
"SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND hash_id = :hash_id AND is_downloaded = 1 LIMIT 1",
{"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id}
)
row = res.fetchone()
if row and row["is_downloaded"]:
return True
else:
return False
def isPinned(self, inner_path):
if inner_path in self.cache_is_pinned:
self.site.log.debug("Cached is pinned: %s" % inner_path)
return self.cache_is_pinned[inner_path]
res = self.contents.db.execute(
"SELECT is_pinned FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1",
{"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
)
row = res.fetchone()
if row and row[0]:
is_pinned = True
else:
is_pinned = False
self.cache_is_pinned[inner_path] = is_pinned
self.site.log.debug("Cache set is pinned: %s %s" % (inner_path, is_pinned))
return is_pinned
def setPin(self, inner_path, is_pinned):
content_db = self.contents.db
site_id = content_db.site_ids[self.site.address]
content_db.execute("UPDATE file_optional SET is_pinned = %d WHERE ?" % is_pinned, {"site_id": site_id, "inner_path": inner_path})
self.cache_is_pinned = {}
def optionalDelete(self, inner_path):
if self.isPinned(inner_path):
self.site.log.debug("Skip deleting pinned optional file: %s" % inner_path)
return False
else:
return super(ContentManagerPlugin, self).optionalDelete(inner_path)
@PluginManager.registerTo("WorkerManager")
class WorkerManagerPlugin(object):
def doneTask(self, task):
super(WorkerManagerPlugin, self).doneTask(task)
if task["optional_hash_id"] and not self.tasks: # Execute delayed queries immedietly after tasks finished
ContentDbPlugin.content_db.processDelayed()
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
def parsePath(self, path):
global access_log
path_parts = super(UiRequestPlugin, self).parsePath(path)
if path_parts:
site_id = ContentDbPlugin.content_db.site_ids.get(path_parts["request_address"])
if site_id:
if ContentDbPlugin.content_db.isOptionalFile(site_id, path_parts["inner_path"]):
access_log[site_id][path_parts["inner_path"]] = 1
return path_parts
@PluginManager.registerTo("FileRequest")
class FileRequestPlugin(object):
def actionGetFile(self, params):
stats = super(FileRequestPlugin, self).actionGetFile(params)
self.recordFileRequest(params["site"], params["inner_path"], stats)
return stats
def actionStreamFile(self, params):
stats = super(FileRequestPlugin, self).actionStreamFile(params)
self.recordFileRequest(params["site"], params["inner_path"], stats)
return stats
def recordFileRequest(self, site_address, inner_path, stats):
if not stats:
# Only track the last request of files
return False
site_id = ContentDbPlugin.content_db.site_ids[site_address]
if site_id and ContentDbPlugin.content_db.isOptionalFile(site_id, inner_path):
request_log[site_id][inner_path] += stats["bytes_sent"]
@PluginManager.registerTo("Site")
class SitePlugin(object):
def isDownloadable(self, inner_path):
is_downloadable = super(SitePlugin, self).isDownloadable(inner_path)
if is_downloadable:
return is_downloadable
for path in self.settings.get("optional_help", {}).keys():
if inner_path.startswith(path):
return True
return False
def fileForgot(self, inner_path):
if "|" in inner_path and self.content_manager.isPinned(re.sub(r"\|.*", "", inner_path)):
self.log.debug("File %s is pinned, no fileForgot" % inner_path)
return False
else:
return super(SitePlugin, self).fileForgot(inner_path)
def fileDone(self, inner_path):
if "|" in inner_path and self.bad_files.get(inner_path, 0) > 5: # Idle optional file done
inner_path_file = re.sub(r"\|.*", "", inner_path)
num_changed = 0
for key, val in self.bad_files.items():
if key.startswith(inner_path_file) and val > 1:
self.bad_files[key] = 1
num_changed += 1
self.log.debug("Idle optional file piece done, changed retry number of %s pieces." % num_changed)
if num_changed:
gevent.spawn(self.retryBadFiles)
return super(SitePlugin, self).fileDone(inner_path)
@PluginManager.registerTo("ConfigPlugin")
class ConfigPlugin(object):
def createArguments(self):
group = self.parser.add_argument_group("OptionalManager plugin")
group.add_argument('--optional_limit', help='Limit total size of optional files', default="10%", metavar="GB or free space %")
group.add_argument('--optional_limit_exclude_minsize', help='Exclude files larger than this limit from optional size limit calculation', default=20, metavar="MB", type=int)
return super(ConfigPlugin, self).createArguments()

View File

@ -1,158 +0,0 @@
import copy
import pytest
@pytest.mark.usefixtures("resetSettings")
class TestOptionalManager:
def testDbFill(self, site):
contents = site.content_manager.contents
assert len(site.content_manager.hashfield) > 0
assert contents.db.execute("SELECT COUNT(*) FROM file_optional WHERE is_downloaded = 1").fetchone()[0] == len(site.content_manager.hashfield)
def testSetContent(self, site):
contents = site.content_manager.contents
# Add new file
new_content = copy.deepcopy(contents["content.json"])
new_content["files_optional"]["testfile"] = {
"size": 1234,
"sha512": "aaaabbbbcccc"
}
num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
contents["content.json"] = new_content
assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] > num_optional_files_before
# Remove file
new_content = copy.deepcopy(contents["content.json"])
del new_content["files_optional"]["testfile"]
num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
contents["content.json"] = new_content
assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before
def testDeleteContent(self, site):
contents = site.content_manager.contents
num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
del contents["content.json"]
assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before
def testVerifyFiles(self, site):
contents = site.content_manager.contents
# Add new file
new_content = copy.deepcopy(contents["content.json"])
new_content["files_optional"]["testfile"] = {
"size": 1234,
"sha512": "aaaabbbbcccc"
}
contents["content.json"] = new_content
file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
assert not file_row["is_downloaded"]
# Write file from outside of ZeroNet
site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size
hashfield_len_before = len(site.content_manager.hashfield)
site.storage.verifyFiles(quick_check=True)
assert len(site.content_manager.hashfield) == hashfield_len_before + 1
file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
assert file_row["is_downloaded"]
# Delete file outside of ZeroNet
site.storage.delete("testfile")
site.storage.verifyFiles(quick_check=True)
file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
assert not file_row["is_downloaded"]
def testVerifyFilesSameHashId(self, site):
contents = site.content_manager.contents
new_content = copy.deepcopy(contents["content.json"])
# Add two files with same hashid (first 4 character)
new_content["files_optional"]["testfile1"] = {
"size": 1234,
"sha512": "aaaabbbbcccc"
}
new_content["files_optional"]["testfile2"] = {
"size": 2345,
"sha512": "aaaabbbbdddd"
}
contents["content.json"] = new_content
assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd")
# Write files from outside of ZeroNet (For quick check hash does not matter only file size)
site.storage.open("testfile1", "wb").write(b"A" * 1234)
site.storage.open("testfile2", "wb").write(b"B" * 2345)
site.storage.verifyFiles(quick_check=True)
# Make sure that both is downloaded
assert site.content_manager.isDownloaded("testfile1")
assert site.content_manager.isDownloaded("testfile2")
assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") in site.content_manager.hashfield
# Delete one of the files
site.storage.delete("testfile1")
site.storage.verifyFiles(quick_check=True)
assert not site.content_manager.isDownloaded("testfile1")
assert site.content_manager.isDownloaded("testfile2")
assert site.content_manager.hashfield.getHashId("aaaabbbbdddd") in site.content_manager.hashfield
def testIsPinned(self, site):
assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png")
site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
assert len(site.content_manager.cache_is_pinned) == 1
site.content_manager.cache_is_pinned = {}
assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
def testBigfilePieceReset(self, site):
site.bad_files = {
"data/fake_bigfile.mp4|0-1024": 10,
"data/fake_bigfile.mp4|1024-2048": 10,
"data/fake_bigfile.mp4|2048-3064": 10
}
site.onFileDone("data/fake_bigfile.mp4|0-1024")
assert site.bad_files["data/fake_bigfile.mp4|1024-2048"] == 1
assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1
def testOptionalDelete(self, site):
contents = site.content_manager.contents
site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
site.content_manager.setPin("data/img/zeroid.png", False)
new_content = copy.deepcopy(contents["content.json"])
del new_content["files_optional"]["data/img/zerotalk-upvote.png"]
del new_content["files_optional"]["data/img/zeroid.png"]
assert site.storage.isFile("data/img/zerotalk-upvote.png")
assert site.storage.isFile("data/img/zeroid.png")
site.storage.writeJson("content.json", new_content)
site.content_manager.loadContent("content.json", force=True)
assert not site.storage.isFile("data/img/zeroid.png")
assert site.storage.isFile("data/img/zerotalk-upvote.png")
def testOptionalRename(self, site):
contents = site.content_manager.contents
site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
new_content = copy.deepcopy(contents["content.json"])
new_content["files_optional"]["data/img/zerotalk-upvote-new.png"] = new_content["files_optional"]["data/img/zerotalk-upvote.png"]
del new_content["files_optional"]["data/img/zerotalk-upvote.png"]
assert site.storage.isFile("data/img/zerotalk-upvote.png")
assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
site.storage.writeJson("content.json", new_content)
site.content_manager.loadContent("content.json", force=True)
assert not site.storage.isFile("data/img/zerotalk-upvote.png")
assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png")
assert site.content_manager.isPinned("data/img/zerotalk-upvote-new.png")
assert site.storage.isFile("data/img/zerotalk-upvote-new.png")

View File

@ -1 +0,0 @@
from src.Test.conftest import *

View File

@ -1,5 +0,0 @@
[pytest]
python_files = Test*.py
addopts = -rsxX -v --durations=6
markers =
webtest: mark a test as a webtest.

View File

@ -1,396 +0,0 @@
import re
import time
import html
import os
import gevent
from Plugin import PluginManager
from Config import config
from util import helper
from util.Flag import flag
from Translate import Translate
plugin_dir = os.path.dirname(__file__)
if "_" not in locals():
_ = Translate(plugin_dir + "/languages/")
bigfile_sha512_cache = {}
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
def __init__(self, *args, **kwargs):
self.time_peer_numbers_updated = 0
super(UiWebsocketPlugin, self).__init__(*args, **kwargs)
def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
# Add file to content.db and set it as pinned
content_db = self.site.content_manager.contents.db
content_inner_dir = helper.getDirname(inner_path)
content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
if len(content_db.my_optional_files) > 50: # Keep only last 50
oldest_key = min(
iter(content_db.my_optional_files.keys()),
key=(lambda key: content_db.my_optional_files[key])
)
del content_db.my_optional_files[oldest_key]
return super(UiWebsocketPlugin, self).actionSiteSign(to, privatekey, inner_path, *args, **kwargs)
def updatePeerNumbers(self):
self.site.updateHashfield()
content_db = self.site.content_manager.contents.db
content_db.updatePeerNumbers()
self.site.updateWebsocket(peernumber_updated=True)
def addBigfileInfo(self, row):
global bigfile_sha512_cache
content_db = self.site.content_manager.contents.db
site = content_db.sites[row["address"]]
if not site.settings.get("has_bigfile"):
return False
file_key = row["address"] + "/" + row["inner_path"]
sha512 = bigfile_sha512_cache.get(file_key)
file_info = None
if not sha512:
file_info = site.content_manager.getFileInfo(row["inner_path"])
if not file_info or not file_info.get("piece_size"):
return False
sha512 = file_info["sha512"]
bigfile_sha512_cache[file_key] = sha512
if sha512 in site.storage.piecefields:
piecefield = site.storage.piecefields[sha512].tobytes()
else:
piecefield = None
if piecefield:
row["pieces"] = len(piecefield)
row["pieces_downloaded"] = piecefield.count(b"\x01")
row["downloaded_percent"] = 100 * row["pieces_downloaded"] / row["pieces"]
if row["pieces_downloaded"]:
if row["pieces"] == row["pieces_downloaded"]:
row["bytes_downloaded"] = row["size"]
else:
if not file_info:
file_info = site.content_manager.getFileInfo(row["inner_path"])
row["bytes_downloaded"] = row["pieces_downloaded"] * file_info.get("piece_size", 0)
else:
row["bytes_downloaded"] = 0
row["is_downloading"] = bool(next((inner_path for inner_path in site.bad_files if inner_path.startswith(row["inner_path"])), False))
# Add leech / seed stats
row["peer_seed"] = 0
row["peer_leech"] = 0
for peer in site.peers.values():
if not peer.time_piecefields_updated or sha512 not in peer.piecefields:
continue
peer_piecefield = peer.piecefields[sha512].tobytes()
if not peer_piecefield:
continue
if peer_piecefield == b"\x01" * len(peer_piecefield):
row["peer_seed"] += 1
else:
row["peer_leech"] += 1
# Add myself
if piecefield:
if row["pieces_downloaded"] == row["pieces"]:
row["peer_seed"] += 1
else:
row["peer_leech"] += 1
return True
# Optional file functions
def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10, filter="downloaded", filter_inner_path=None):
if not address:
address = self.site.address
# Update peer numbers if necessary
content_db = self.site.content_manager.contents.db
if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5:
# Start in new thread to avoid blocking
self.time_peer_numbers_updated = time.time()
gevent.spawn(self.updatePeerNumbers)
if address == "all" and "ADMIN" not in self.permissions:
return self.response(to, {"error": "Forbidden"})
if not self.hasSitePermission(address):
return self.response(to, {"error": "Forbidden"})
if not all([re.match("^[a-z_*/+-]+( DESC| ASC|)$", part.strip()) for part in orderby.split(",")]):
return self.response(to, "Invalid order_by")
if type(limit) != int:
return self.response(to, "Invalid limit")
back = []
content_db = self.site.content_manager.contents.db
wheres = {}
wheres_raw = []
if "bigfile" in filter:
wheres["size >"] = 1024 * 1024 * 1
if "downloaded" in filter:
wheres_raw.append("(is_downloaded = 1 OR is_pinned = 1)")
if "pinned" in filter:
wheres["is_pinned"] = 1
if filter_inner_path:
wheres["inner_path__like"] = filter_inner_path
if address == "all":
join = "LEFT JOIN site USING (site_id)"
else:
wheres["site_id"] = content_db.site_ids[address]
join = ""
if wheres_raw:
query_wheres_raw = "AND" + " AND ".join(wheres_raw)
else:
query_wheres_raw = ""
query = "SELECT * FROM file_optional %s WHERE ? %s ORDER BY %s LIMIT %s" % (join, query_wheres_raw, orderby, limit)
for row in content_db.execute(query, wheres):
row = dict(row)
if address != "all":
row["address"] = address
if row["size"] > 1024 * 1024:
has_bigfile_info = self.addBigfileInfo(row)
else:
has_bigfile_info = False
if not has_bigfile_info and "bigfile" in filter:
continue
if not has_bigfile_info:
if row["is_downloaded"]:
row["bytes_downloaded"] = row["size"]
row["downloaded_percent"] = 100
else:
row["bytes_downloaded"] = 0
row["downloaded_percent"] = 0
back.append(row)
self.response(to, back)
def actionOptionalFileInfo(self, to, inner_path):
content_db = self.site.content_manager.contents.db
site_id = content_db.site_ids[self.site.address]
# Update peer numbers if necessary
if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5:
# Start in new thread to avoid blocking
self.time_peer_numbers_updated = time.time()
gevent.spawn(self.updatePeerNumbers)
query = "SELECT * FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1"
res = content_db.execute(query, {"site_id": site_id, "inner_path": inner_path})
row = next(res, None)
if row:
row = dict(row)
if row["size"] > 1024 * 1024:
row["address"] = self.site.address
self.addBigfileInfo(row)
self.response(to, row)
else:
self.response(to, None)
def setPin(self, inner_path, is_pinned, address=None):
if not address:
address = self.site.address
if not self.hasSitePermission(address):
return {"error": "Forbidden"}
site = self.server.sites[address]
site.content_manager.setPin(inner_path, is_pinned)
return "ok"
@flag.no_multiuser
def actionOptionalFilePin(self, to, inner_path, address=None):
if type(inner_path) is not list:
inner_path = [inner_path]
back = self.setPin(inner_path, 1, address)
num_file = len(inner_path)
if back == "ok":
if num_file == 1:
self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
else:
self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000])
self.response(to, back)
@flag.no_multiuser
def actionOptionalFileUnpin(self, to, inner_path, address=None):
if type(inner_path) is not list:
inner_path = [inner_path]
back = self.setPin(inner_path, 0, address)
num_file = len(inner_path)
if back == "ok":
if num_file == 1:
self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
else:
self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000])
self.response(to, back)
@flag.no_multiuser
def actionOptionalFileDelete(self, to, inner_path, address=None):
if not address:
address = self.site.address
if not self.hasSitePermission(address):
return self.response(to, {"error": "Forbidden"})
site = self.server.sites[address]
content_db = site.content_manager.contents.db
site_id = content_db.site_ids[site.address]
res = content_db.execute("SELECT * FROM file_optional WHERE ? LIMIT 1", {"site_id": site_id, "inner_path": inner_path, "is_downloaded": 1})
row = next(res, None)
if not row:
return self.response(to, {"error": "Not found in content.db"})
removed = site.content_manager.optionalRemoved(inner_path, row["hash_id"], row["size"])
# if not removed:
# return self.response(to, {"error": "Not found in hash_id: %s" % row["hash_id"]})
content_db.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"site_id": site_id, "inner_path": inner_path})
try:
site.storage.delete(inner_path)
except Exception as err:
return self.response(to, {"error": "File delete error: %s" % err})
site.updateWebsocket(file_delete=inner_path)
if inner_path in site.content_manager.cache_is_pinned:
site.content_manager.cache_is_pinned = {}
self.response(to, "ok")
# Limit functions
@flag.admin
def actionOptionalLimitStats(self, to):
back = {}
back["limit"] = config.optional_limit
back["used"] = self.site.content_manager.contents.db.getOptionalUsedBytes()
back["free"] = helper.getFreeSpace()
self.response(to, back)
@flag.no_multiuser
@flag.admin
def actionOptionalLimitSet(self, to, limit):
config.optional_limit = re.sub(r"\.0+$", "", limit) # Remove unnecessary digits from end
config.saveValue("optional_limit", limit)
self.response(to, "ok")
# Distribute help functions
def actionOptionalHelpList(self, to, address=None):
if not address:
address = self.site.address
if not self.hasSitePermission(address):
return self.response(to, {"error": "Forbidden"})
site = self.server.sites[address]
self.response(to, site.settings.get("optional_help", {}))
@flag.no_multiuser
def actionOptionalHelp(self, to, directory, title, address=None):
if not address:
address = self.site.address
if not self.hasSitePermission(address):
return self.response(to, {"error": "Forbidden"})
site = self.server.sites[address]
content_db = site.content_manager.contents.db
site_id = content_db.site_ids[address]
if "optional_help" not in site.settings:
site.settings["optional_help"] = {}
stats = content_db.execute(
"SELECT COUNT(*) AS num, SUM(size) AS size FROM file_optional WHERE site_id = :site_id AND inner_path LIKE :inner_path",
{"site_id": site_id, "inner_path": directory + "%"}
).fetchone()
stats = dict(stats)
if not stats["size"]:
stats["size"] = 0
if not stats["num"]:
stats["num"] = 0
self.cmd("notification", [
"done",
_["You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>"] %
(html.escape(title), html.escape(directory)),
10000
])
site.settings["optional_help"][directory] = title
self.response(to, dict(stats))
@flag.no_multiuser
def actionOptionalHelpRemove(self, to, directory, address=None):
if not address:
address = self.site.address
if not self.hasSitePermission(address):
return self.response(to, {"error": "Forbidden"})
site = self.server.sites[address]
try:
del site.settings["optional_help"][directory]
self.response(to, "ok")
except Exception:
self.response(to, {"error": "Not found"})
def cbOptionalHelpAll(self, to, site, value):
site.settings["autodownloadoptional"] = value
self.response(to, value)
@flag.no_multiuser
def actionOptionalHelpAll(self, to, value, address=None):
if not address:
address = self.site.address
if not self.hasSitePermission(address):
return self.response(to, {"error": "Forbidden"})
site = self.server.sites[address]
if value:
if "ADMIN" in self.site.settings["permissions"]:
self.cbOptionalHelpAll(to, site, True)
else:
site_title = site.content_manager.contents["content.json"].get("title", address)
self.cmd(
"confirm",
[
_["Help distribute all new optional files on site <b>%s</b>"] % html.escape(site_title),
_["Yes, I want to help!"]
],
lambda res: self.cbOptionalHelpAll(to, site, True)
)
else:
site.settings["autodownloadoptional"] = False
self.response(to, False)

View File

@ -1,2 +0,0 @@
from . import OptionalManagerPlugin
from . import UiWebsocketPlugin

View File

@ -1,7 +0,0 @@
{
"Pinned %s files": "Archivos %s fijados",
"Removed pin from %s files": "Archivos %s que no estan fijados",
"You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>": "Tu empezaste a ayudar a distribuir <b>%s</b>.<br><small>Directorio: %s</small>",
"Help distribute all new optional files on site <b>%s</b>": "Ayude a distribuir todos los archivos opcionales en el sitio <b>%s</b>",
"Yes, I want to help!": "¡Si, yo quiero ayudar!"
}

View File

@ -1,7 +0,0 @@
{
"Pinned %s files": "Fichiers %s épinglés",
"Removed pin from %s files": "Fichiers %s ne sont plus épinglés",
"You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>": "Vous avez commencé à aider à distribuer <b>%s</b>.<br><small>Dossier : %s</small>",
"Help distribute all new optional files on site <b>%s</b>": "Aider à distribuer tous les fichiers optionnels du site <b>%s</b>",
"Yes, I want to help!": "Oui, je veux aider !"
}

View File

@ -1,7 +0,0 @@
{
"Pinned %s files": "%s fájl rögzítve",
"Removed pin from %s files": "%s fájl rögzítés eltávolítva",
"You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>": "Új segítség a terjesztésben: <b>%s</b>.<br><small>Könyvtár: %s</small>",
"Help distribute all new optional files on site <b>%s</b>": "Segítség az összes új opcionális fájl terjesztésében az <b>%s</b> oldalon",
"Yes, I want to help!": "Igen, segíteni akarok!"
}

View File

@ -1,7 +0,0 @@
{
"Pinned %s files": "%s 件のファイルを固定",
"Removed pin from %s files": "%s 件のファイルの固定を解除",
"You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>": "あなたはサイト: <b>%s</b> の配布の援助を開始しました。<br><small>ディレクトリ: %s</small>",
"Help distribute all new optional files on site <b>%s</b>": "サイト: <b>%s</b> のすべての新しいオプションファイルの配布を援助しますか?",
"Yes, I want to help!": "はい、やります!"
}

View File

@ -1,7 +0,0 @@
{
"Pinned %s files": "Arquivos %s fixados",
"Removed pin from %s files": "Arquivos %s não estão fixados",
"You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>": "Você começou a ajudar a distribuir <b>%s</b>.<br><small>Pasta: %s</small>",
"Help distribute all new optional files on site <b>%s</b>": "Ajude a distribuir todos os novos arquivos opcionais no site <b>%s</b>",
"Yes, I want to help!": "Sim, eu quero ajudar!"
}

View File

@ -1,7 +0,0 @@
{
"Pinned %s files": "已固定 %s 個檔",
"Removed pin from %s files": "已解除固定 %s 個檔",
"You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>": "你已經開始幫助分發 <b>%s</b> 。<br><small>目錄:%s</small>",
"Help distribute all new optional files on site <b>%s</b>": "你想要幫助分發 <b>%s</b> 網站的所有檔嗎?",
"Yes, I want to help!": "是,我想要幫助!"
}

Some files were not shown because too many files have changed in this diff Show More