From 829e825fdd6596c131c1ec6070063567c8aaa386 Mon Sep 17 00:00:00 2001
From: wupg98 <106410199+wupg98@users.noreply.github.com>
Date: Sat, 2 Sep 2023 07:46:33 +0200
Subject: [PATCH] Delete plugins directory
---
.../AnnounceBitTorrentPlugin.py | 148 -
plugins/AnnounceBitTorrent/__init__.py | 1 -
plugins/AnnounceBitTorrent/plugin_info.json | 5 -
plugins/AnnounceLocal/AnnounceLocalPlugin.py | 147 -
plugins/AnnounceLocal/BroadcastServer.py | 139 -
plugins/AnnounceLocal/Test/TestAnnounce.py | 113 -
plugins/AnnounceLocal/Test/conftest.py | 4 -
plugins/AnnounceLocal/Test/pytest.ini | 5 -
plugins/AnnounceLocal/__init__.py | 1 -
plugins/AnnounceLocal/plugin_info.json | 5 -
plugins/AnnounceShare/AnnounceSharePlugin.py | 190 -
.../AnnounceShare/Test/TestAnnounceShare.py | 24 -
plugins/AnnounceShare/Test/conftest.py | 3 -
plugins/AnnounceShare/Test/pytest.ini | 5 -
plugins/AnnounceShare/__init__.py | 1 -
plugins/AnnounceShare/plugin_info.json | 5 -
plugins/AnnounceZero/AnnounceZeroPlugin.py | 140 -
plugins/AnnounceZero/__init__.py | 1 -
plugins/AnnounceZero/plugin_info.json | 5 -
plugins/Benchmark/BenchmarkDb.py | 143 -
plugins/Benchmark/BenchmarkPack.py | 183 -
plugins/Benchmark/BenchmarkPlugin.py | 428 -
plugins/Benchmark/__init__.py | 3 -
plugins/Benchmark/media/benchmark.html | 123 -
plugins/Benchmark/plugin_info.json | 5 -
plugins/Bigfile/BigfilePiecefield.py | 170 -
plugins/Bigfile/BigfilePlugin.py | 843 -
plugins/Bigfile/Test/TestBigfile.py | 574 -
plugins/Bigfile/Test/conftest.py | 1 -
plugins/Bigfile/Test/pytest.ini | 5 -
plugins/Bigfile/__init__.py | 2 -
plugins/Chart/ChartCollector.py | 181 -
plugins/Chart/ChartDb.py | 133 -
plugins/Chart/ChartPlugin.py | 57 -
plugins/Chart/__init__.py | 1 -
plugins/Chart/plugin_info.json | 5 -
plugins/ContentFilter/ContentFilterPlugin.py | 262 -
plugins/ContentFilter/ContentFilterStorage.py | 164 -
.../ContentFilter/Test/TestContentFilter.py | 82 -
plugins/ContentFilter/Test/conftest.py | 1 -
plugins/ContentFilter/Test/pytest.ini | 5 -
plugins/ContentFilter/__init__.py | 1 -
plugins/ContentFilter/languages/hu.json | 6 -
plugins/ContentFilter/languages/it.json | 6 -
plugins/ContentFilter/languages/jp.json | 6 -
plugins/ContentFilter/languages/pt-br.json | 6 -
plugins/ContentFilter/languages/zh-tw.json | 6 -
plugins/ContentFilter/languages/zh.json | 6 -
plugins/ContentFilter/media/blocklisted.html | 89 -
plugins/ContentFilter/media/js/ZeroFrame.js | 119 -
plugins/ContentFilter/plugin_info.json | 5 -
plugins/Cors/CorsPlugin.py | 139 -
plugins/Cors/__init__.py | 1 -
plugins/Cors/plugin_info.json | 5 -
plugins/CryptMessage/CryptMessage.py | 58 -
plugins/CryptMessage/CryptMessagePlugin.py | 225 -
plugins/CryptMessage/Test/TestCrypt.py | 136 -
plugins/CryptMessage/Test/conftest.py | 1 -
plugins/CryptMessage/Test/pytest.ini | 5 -
plugins/CryptMessage/__init__.py | 1 -
plugins/CryptMessage/plugin_info.json | 5 -
plugins/FilePack/FilePackPlugin.py | 193 -
plugins/FilePack/__init__.py | 1 -
plugins/FilePack/plugin_info.json | 5 -
plugins/MergerSite/MergerSitePlugin.py | 399 -
plugins/MergerSite/__init__.py | 1 -
plugins/MergerSite/languages/es.json | 5 -
plugins/MergerSite/languages/fr.json | 5 -
plugins/MergerSite/languages/hu.json | 5 -
plugins/MergerSite/languages/it.json | 5 -
plugins/MergerSite/languages/jp.json | 5 -
plugins/MergerSite/languages/pt-br.json | 5 -
plugins/MergerSite/languages/tr.json | 5 -
plugins/MergerSite/languages/zh-tw.json | 5 -
plugins/MergerSite/languages/zh.json | 5 -
plugins/Newsfeed/NewsfeedPlugin.py | 187 -
plugins/Newsfeed/__init__.py | 1 -
plugins/OptionalManager/ContentDbPlugin.py | 414 -
.../OptionalManager/OptionalManagerPlugin.py | 253 -
.../Test/TestOptionalManager.py | 158 -
plugins/OptionalManager/Test/conftest.py | 1 -
plugins/OptionalManager/Test/pytest.ini | 5 -
plugins/OptionalManager/UiWebsocketPlugin.py | 396 -
plugins/OptionalManager/__init__.py | 2 -
plugins/OptionalManager/languages/es.json | 7 -
plugins/OptionalManager/languages/fr.json | 7 -
plugins/OptionalManager/languages/hu.json | 7 -
plugins/OptionalManager/languages/jp.json | 7 -
plugins/OptionalManager/languages/pt-br.json | 7 -
plugins/OptionalManager/languages/zh-tw.json | 7 -
plugins/OptionalManager/languages/zh.json | 7 -
plugins/PeerDb/PeerDbPlugin.py | 100 -
plugins/PeerDb/__init__.py | 2 -
plugins/PeerDb/plugin_info.json | 5 -
plugins/Sidebar/ConsolePlugin.py | 101 -
plugins/Sidebar/SidebarPlugin.py | 805 -
plugins/Sidebar/ZipStream.py | 59 -
plugins/Sidebar/__init__.py | 2 -
plugins/Sidebar/languages/da.json | 81 -
plugins/Sidebar/languages/de.json | 81 -
plugins/Sidebar/languages/es.json | 79 -
plugins/Sidebar/languages/fr.json | 82 -
plugins/Sidebar/languages/hu.json | 82 -
plugins/Sidebar/languages/it.json | 81 -
plugins/Sidebar/languages/jp.json | 104 -
plugins/Sidebar/languages/pl.json | 82 -
plugins/Sidebar/languages/pt-br.json | 97 -
plugins/Sidebar/languages/ru.json | 82 -
plugins/Sidebar/languages/tr.json | 82 -
plugins/Sidebar/languages/zh-tw.json | 83 -
plugins/Sidebar/languages/zh.json | 101 -
plugins/Sidebar/media/Class.coffee | 23 -
plugins/Sidebar/media/Console.coffee | 201 -
plugins/Sidebar/media/Console.css | 31 -
plugins/Sidebar/media/Menu.coffee | 49 -
plugins/Sidebar/media/Menu.css | 19 -
plugins/Sidebar/media/Prototypes.coffee | 9 -
plugins/Sidebar/media/RateLimit.coffee | 14 -
plugins/Sidebar/media/Scrollable.js | 91 -
plugins/Sidebar/media/Scrollbable.css | 44 -
plugins/Sidebar/media/Sidebar.coffee | 644 -
plugins/Sidebar/media/Sidebar.css | 169 -
plugins/Sidebar/media/all.css | 281 -
plugins/Sidebar/media/all.js | 1770 --
plugins/Sidebar/media/morphdom.js | 340 -
plugins/Sidebar/media_globe/Detector.js | 60 -
plugins/Sidebar/media_globe/Tween.js | 12 -
plugins/Sidebar/media_globe/all.js | 1345 --
plugins/Sidebar/media_globe/globe.js | 436 -
plugins/Sidebar/media_globe/three.min.js | 814 -
plugins/Sidebar/media_globe/world.jpg | Bin 94795 -> 0 bytes
plugins/Sidebar/plugin_info.json | 5 -
plugins/Stats/StatsPlugin.py | 627 -
plugins/Stats/__init__.py | 1 -
plugins/Stats/plugin_info.json | 5 -
plugins/TranslateSite/TranslateSitePlugin.py | 80 -
plugins/TranslateSite/__init__.py | 1 -
plugins/TranslateSite/plugin_info.json | 5 -
plugins/Trayicon/TrayiconPlugin.py | 171 -
plugins/Trayicon/__init__.py | 4 -
plugins/Trayicon/languages/es.json | 14 -
plugins/Trayicon/languages/fr.json | 14 -
plugins/Trayicon/languages/hu.json | 14 -
plugins/Trayicon/languages/it.json | 14 -
plugins/Trayicon/languages/jp.json | 14 -
plugins/Trayicon/languages/pl.json | 14 -
plugins/Trayicon/languages/pt-br.json | 14 -
plugins/Trayicon/languages/tr.json | 14 -
plugins/Trayicon/languages/zh-tw.json | 14 -
plugins/Trayicon/languages/zh.json | 14 -
plugins/Trayicon/lib/__init__.py | 0
plugins/Trayicon/lib/notificationicon.py | 730 -
plugins/Trayicon/lib/winfolders.py | 54 -
plugins/Trayicon/plugin_info.json | 5 -
plugins/Trayicon/trayicon.ico | Bin 1150 -> 0 bytes
plugins/UiConfig/UiConfigPlugin.py | 72 -
plugins/UiConfig/__init__.py | 1 -
plugins/UiConfig/languages/hu.json | 33 -
plugins/UiConfig/languages/jp.json | 62 -
plugins/UiConfig/languages/pl.json | 62 -
plugins/UiConfig/languages/pt-br.json | 56 -
plugins/UiConfig/languages/zh.json | 62 -
plugins/UiConfig/media/config.html | 20 -
plugins/UiConfig/media/css/Config.css | 68 -
plugins/UiConfig/media/css/all.css | 124 -
plugins/UiConfig/media/css/button.css | 12 -
plugins/UiConfig/media/css/fonts.css | 30 -
plugins/UiConfig/media/img/loading.gif | Bin 723 -> 0 bytes
.../UiConfig/media/js/ConfigStorage.coffee | 222 -
plugins/UiConfig/media/js/ConfigView.coffee | 124 -
plugins/UiConfig/media/js/UiConfig.coffee | 129 -
plugins/UiConfig/media/js/all.js | 2066 --
plugins/UiConfig/media/js/lib/Class.coffee | 23 -
plugins/UiConfig/media/js/lib/Promise.coffee | 74 -
.../UiConfig/media/js/lib/Prototypes.coffee | 8 -
plugins/UiConfig/media/js/lib/maquette.js | 770 -
.../UiConfig/media/js/utils/Animation.coffee | 138 -
plugins/UiConfig/media/js/utils/Dollar.coffee | 3 -
.../UiConfig/media/js/utils/ZeroFrame.coffee | 85 -
plugins/UiConfig/plugin_info.json | 5 -
plugins/UiFileManager/UiFileManagerPlugin.py | 90 -
plugins/UiFileManager/__init__.py | 1 -
plugins/UiFileManager/languages/hu.json | 20 -
plugins/UiFileManager/languages/jp.json | 20 -
.../UiFileManager/media/codemirror/LICENSE | 21 -
.../UiFileManager/media/codemirror/all.css | 678 -
plugins/UiFileManager/media/codemirror/all.js | 19964 ----------------
.../media/codemirror/base/codemirror.css | 349 -
.../media/codemirror/base/codemirror.js | 9778 --------
.../codemirror/extension/dialog/dialog.css | 32 -
.../codemirror/extension/dialog/dialog.js | 163 -
.../extension/edit/closebrackets.js | 191 -
.../codemirror/extension/edit/closetag.js | 184 -
.../codemirror/extension/edit/continuelist.js | 101 -
.../extension/edit/matchbrackets.js | 158 -
.../codemirror/extension/edit/matchtags.js | 66 -
.../extension/edit/trailingspace.js | 27 -
.../codemirror/extension/fold/brace-fold.js | 105 -
.../codemirror/extension/fold/comment-fold.js | 59 -
.../codemirror/extension/fold/foldcode.js | 157 -
.../codemirror/extension/fold/foldgutter.css | 20 -
.../codemirror/extension/fold/foldgutter.js | 163 -
.../codemirror/extension/fold/indent-fold.js | 48 -
.../extension/fold/markdown-fold.js | 49 -
.../codemirror/extension/fold/xml-fold.js | 184 -
.../codemirror/extension/hint/anyword-hint.js | 41 -
.../codemirror/extension/hint/html-hint.js | 350 -
.../codemirror/extension/hint/show-hint.css | 36 -
.../codemirror/extension/hint/show-hint.js | 479 -
.../codemirror/extension/hint/sql-hint.js | 304 -
.../codemirror/extension/hint/xml-hint.js | 123 -
.../codemirror/extension/lint/json-lint.js | 40 -
.../codemirror/extension/lint/jsonlint.js | 1 -
.../media/codemirror/extension/lint/lint.css | 73 -
.../media/codemirror/extension/lint/lint.js | 255 -
.../codemirror/extension/mdn-like-custom.css | 44 -
.../extension/scroll/annotatescrollbar.js | 122 -
.../extension/scroll/scrollpastend.js | 48 -
.../extension/scroll/simplescrollbars.css | 66 -
.../extension/scroll/simplescrollbars.js | 152 -
.../extension/search/jump-to-line.js | 50 -
.../extension/search/match-highlighter.js | 167 -
.../extension/search/matchesonscrollbar.css | 8 -
.../extension/search/matchesonscrollbar.js | 97 -
.../codemirror/extension/search/search.js | 260 -
.../extension/search/searchcursor.js | 296 -
.../extension/selection/active-line.js | 72 -
.../extension/selection/mark-selection.js | 119 -
.../extension/selection/selection-pointer.js | 98 -
.../media/codemirror/extension/simple.js | 216 -
.../media/codemirror/extension/sublime.js | 714 -
.../media/codemirror/mode/coffeescript.js | 359 -
.../media/codemirror/mode/css.js | 860 -
.../UiFileManager/media/codemirror/mode/go.js | 187 -
.../media/codemirror/mode/htmlembedded.js | 37 -
.../media/codemirror/mode/htmlmixed.js | 152 -
.../media/codemirror/mode/javascript.js | 934 -
.../media/codemirror/mode/markdown.js | 886 -
.../media/codemirror/mode/python.js | 399 -
.../media/codemirror/mode/rust.js | 72 -
.../media/codemirror/mode/xml.js | 413 -
plugins/UiFileManager/media/css/Menu.css | 33 -
plugins/UiFileManager/media/css/Selectbar.css | 17 -
.../UiFileManager/media/css/UiFileManager.css | 148 -
plugins/UiFileManager/media/css/all.css | 211 -
plugins/UiFileManager/media/img/loading.gif | Bin 723 -> 0 bytes
plugins/UiFileManager/media/js/Config.coffee | 15 -
.../UiFileManager/media/js/FileEditor.coffee | 179 -
.../media/js/FileItemList.coffee | 194 -
.../UiFileManager/media/js/FileList.coffee | 268 -
.../media/js/UiFileManager.coffee | 79 -
plugins/UiFileManager/media/js/all.js | 3042 ---
.../media/js/lib/Animation.coffee | 138 -
.../UiFileManager/media/js/lib/Class.coffee | 23 -
.../UiFileManager/media/js/lib/Dollar.coffee | 3 -
.../media/js/lib/ItemList.coffee | 26 -
.../UiFileManager/media/js/lib/Menu.coffee | 110 -
.../UiFileManager/media/js/lib/Promise.coffee | 74 -
.../media/js/lib/Prototypes.coffee | 9 -
.../media/js/lib/RateLimitCb.coffee | 62 -
.../UiFileManager/media/js/lib/Text.coffee | 147 -
.../UiFileManager/media/js/lib/Time.coffee | 59 -
.../media/js/lib/ZeroFrame.coffee | 85 -
.../UiFileManager/media/js/lib/maquette.js | 770 -
plugins/UiFileManager/media/list.html | 18 -
.../UiPluginManager/UiPluginManagerPlugin.py | 221 -
plugins/UiPluginManager/__init__.py | 1 -
.../media/css/PluginManager.css | 75 -
plugins/UiPluginManager/media/css/all.css | 129 -
plugins/UiPluginManager/media/css/button.css | 12 -
plugins/UiPluginManager/media/css/fonts.css | 30 -
plugins/UiPluginManager/media/img/loading.gif | Bin 723 -> 0 bytes
.../media/js/PluginList.coffee | 132 -
.../media/js/UiPluginManager.coffee | 71 -
plugins/UiPluginManager/media/js/all.js | 1606 --
.../UiPluginManager/media/js/lib/Class.coffee | 23 -
.../media/js/lib/Promise.coffee | 74 -
.../media/js/lib/Prototypes.coffee | 8 -
.../UiPluginManager/media/js/lib/maquette.js | 770 -
.../media/js/utils/Animation.coffee | 138 -
.../media/js/utils/Dollar.coffee | 3 -
.../media/js/utils/ZeroFrame.coffee | 85 -
.../UiPluginManager/media/plugin_manager.html | 19 -
plugins/Zeroname/README.md | 55 -
plugins/Zeroname/SiteManagerPlugin.py | 69 -
plugins/Zeroname/__init__.py | 1 -
plugins/Zeroname/updater/zeroname_updater.py | 249 -
plugins/__init__.py | 0
.../disabled-Bootstrapper/BootstrapperDb.py | 156 -
.../BootstrapperPlugin.py | 156 -
.../Test/TestBootstrapper.py | 246 -
.../disabled-Bootstrapper/Test/conftest.py | 1 -
plugins/disabled-Bootstrapper/Test/pytest.ini | 6 -
plugins/disabled-Bootstrapper/__init__.py | 1 -
.../disabled-Bootstrapper/plugin_info.json | 5 -
.../disabled-Dnschain/SiteManagerPlugin.py | 153 -
plugins/disabled-Dnschain/UiRequestPlugin.py | 34 -
plugins/disabled-Dnschain/__init__.py | 3 -
.../DonationMessagePlugin.py | 22 -
plugins/disabled-DonationMessage/__init__.py | 1 -
plugins/disabled-Multiuser/MultiuserPlugin.py | 275 -
.../disabled-Multiuser/Test/TestMultiuser.py | 14 -
plugins/disabled-Multiuser/Test/conftest.py | 1 -
plugins/disabled-Multiuser/Test/pytest.ini | 5 -
plugins/disabled-Multiuser/UserPlugin.py | 35 -
plugins/disabled-Multiuser/__init__.py | 1 -
plugins/disabled-Multiuser/plugin_info.json | 5 -
plugins/disabled-StemPort/StemPortPlugin.py | 135 -
plugins/disabled-StemPort/__init__.py | 10 -
.../disabled-UiPassword/UiPasswordPlugin.py | 183 -
plugins/disabled-UiPassword/__init__.py | 1 -
plugins/disabled-UiPassword/login.html | 116 -
plugins/disabled-UiPassword/plugin_info.json | 5 -
.../SiteManagerPlugin.py | 180 -
.../disabled-ZeronameLocal/UiRequestPlugin.py | 39 -
plugins/disabled-ZeronameLocal/__init__.py | 2 -
316 files changed, 74299 deletions(-)
delete mode 100644 plugins/AnnounceBitTorrent/AnnounceBitTorrentPlugin.py
delete mode 100644 plugins/AnnounceBitTorrent/__init__.py
delete mode 100644 plugins/AnnounceBitTorrent/plugin_info.json
delete mode 100644 plugins/AnnounceLocal/AnnounceLocalPlugin.py
delete mode 100644 plugins/AnnounceLocal/BroadcastServer.py
delete mode 100644 plugins/AnnounceLocal/Test/TestAnnounce.py
delete mode 100644 plugins/AnnounceLocal/Test/conftest.py
delete mode 100644 plugins/AnnounceLocal/Test/pytest.ini
delete mode 100644 plugins/AnnounceLocal/__init__.py
delete mode 100644 plugins/AnnounceLocal/plugin_info.json
delete mode 100644 plugins/AnnounceShare/AnnounceSharePlugin.py
delete mode 100644 plugins/AnnounceShare/Test/TestAnnounceShare.py
delete mode 100644 plugins/AnnounceShare/Test/conftest.py
delete mode 100644 plugins/AnnounceShare/Test/pytest.ini
delete mode 100644 plugins/AnnounceShare/__init__.py
delete mode 100644 plugins/AnnounceShare/plugin_info.json
delete mode 100644 plugins/AnnounceZero/AnnounceZeroPlugin.py
delete mode 100644 plugins/AnnounceZero/__init__.py
delete mode 100644 plugins/AnnounceZero/plugin_info.json
delete mode 100644 plugins/Benchmark/BenchmarkDb.py
delete mode 100644 plugins/Benchmark/BenchmarkPack.py
delete mode 100644 plugins/Benchmark/BenchmarkPlugin.py
delete mode 100644 plugins/Benchmark/__init__.py
delete mode 100644 plugins/Benchmark/media/benchmark.html
delete mode 100644 plugins/Benchmark/plugin_info.json
delete mode 100644 plugins/Bigfile/BigfilePiecefield.py
delete mode 100644 plugins/Bigfile/BigfilePlugin.py
delete mode 100644 plugins/Bigfile/Test/TestBigfile.py
delete mode 100644 plugins/Bigfile/Test/conftest.py
delete mode 100644 plugins/Bigfile/Test/pytest.ini
delete mode 100644 plugins/Bigfile/__init__.py
delete mode 100644 plugins/Chart/ChartCollector.py
delete mode 100644 plugins/Chart/ChartDb.py
delete mode 100644 plugins/Chart/ChartPlugin.py
delete mode 100644 plugins/Chart/__init__.py
delete mode 100644 plugins/Chart/plugin_info.json
delete mode 100644 plugins/ContentFilter/ContentFilterPlugin.py
delete mode 100644 plugins/ContentFilter/ContentFilterStorage.py
delete mode 100644 plugins/ContentFilter/Test/TestContentFilter.py
delete mode 100644 plugins/ContentFilter/Test/conftest.py
delete mode 100644 plugins/ContentFilter/Test/pytest.ini
delete mode 100644 plugins/ContentFilter/__init__.py
delete mode 100644 plugins/ContentFilter/languages/hu.json
delete mode 100644 plugins/ContentFilter/languages/it.json
delete mode 100644 plugins/ContentFilter/languages/jp.json
delete mode 100644 plugins/ContentFilter/languages/pt-br.json
delete mode 100644 plugins/ContentFilter/languages/zh-tw.json
delete mode 100644 plugins/ContentFilter/languages/zh.json
delete mode 100644 plugins/ContentFilter/media/blocklisted.html
delete mode 100644 plugins/ContentFilter/media/js/ZeroFrame.js
delete mode 100644 plugins/ContentFilter/plugin_info.json
delete mode 100644 plugins/Cors/CorsPlugin.py
delete mode 100644 plugins/Cors/__init__.py
delete mode 100644 plugins/Cors/plugin_info.json
delete mode 100644 plugins/CryptMessage/CryptMessage.py
delete mode 100644 plugins/CryptMessage/CryptMessagePlugin.py
delete mode 100644 plugins/CryptMessage/Test/TestCrypt.py
delete mode 100644 plugins/CryptMessage/Test/conftest.py
delete mode 100644 plugins/CryptMessage/Test/pytest.ini
delete mode 100644 plugins/CryptMessage/__init__.py
delete mode 100644 plugins/CryptMessage/plugin_info.json
delete mode 100644 plugins/FilePack/FilePackPlugin.py
delete mode 100644 plugins/FilePack/__init__.py
delete mode 100644 plugins/FilePack/plugin_info.json
delete mode 100644 plugins/MergerSite/MergerSitePlugin.py
delete mode 100644 plugins/MergerSite/__init__.py
delete mode 100644 plugins/MergerSite/languages/es.json
delete mode 100644 plugins/MergerSite/languages/fr.json
delete mode 100644 plugins/MergerSite/languages/hu.json
delete mode 100644 plugins/MergerSite/languages/it.json
delete mode 100644 plugins/MergerSite/languages/jp.json
delete mode 100644 plugins/MergerSite/languages/pt-br.json
delete mode 100644 plugins/MergerSite/languages/tr.json
delete mode 100644 plugins/MergerSite/languages/zh-tw.json
delete mode 100644 plugins/MergerSite/languages/zh.json
delete mode 100644 plugins/Newsfeed/NewsfeedPlugin.py
delete mode 100644 plugins/Newsfeed/__init__.py
delete mode 100644 plugins/OptionalManager/ContentDbPlugin.py
delete mode 100644 plugins/OptionalManager/OptionalManagerPlugin.py
delete mode 100644 plugins/OptionalManager/Test/TestOptionalManager.py
delete mode 100644 plugins/OptionalManager/Test/conftest.py
delete mode 100644 plugins/OptionalManager/Test/pytest.ini
delete mode 100644 plugins/OptionalManager/UiWebsocketPlugin.py
delete mode 100644 plugins/OptionalManager/__init__.py
delete mode 100644 plugins/OptionalManager/languages/es.json
delete mode 100644 plugins/OptionalManager/languages/fr.json
delete mode 100644 plugins/OptionalManager/languages/hu.json
delete mode 100644 plugins/OptionalManager/languages/jp.json
delete mode 100644 plugins/OptionalManager/languages/pt-br.json
delete mode 100644 plugins/OptionalManager/languages/zh-tw.json
delete mode 100644 plugins/OptionalManager/languages/zh.json
delete mode 100644 plugins/PeerDb/PeerDbPlugin.py
delete mode 100644 plugins/PeerDb/__init__.py
delete mode 100644 plugins/PeerDb/plugin_info.json
delete mode 100644 plugins/Sidebar/ConsolePlugin.py
delete mode 100644 plugins/Sidebar/SidebarPlugin.py
delete mode 100644 plugins/Sidebar/ZipStream.py
delete mode 100644 plugins/Sidebar/__init__.py
delete mode 100644 plugins/Sidebar/languages/da.json
delete mode 100644 plugins/Sidebar/languages/de.json
delete mode 100644 plugins/Sidebar/languages/es.json
delete mode 100644 plugins/Sidebar/languages/fr.json
delete mode 100644 plugins/Sidebar/languages/hu.json
delete mode 100644 plugins/Sidebar/languages/it.json
delete mode 100644 plugins/Sidebar/languages/jp.json
delete mode 100644 plugins/Sidebar/languages/pl.json
delete mode 100644 plugins/Sidebar/languages/pt-br.json
delete mode 100644 plugins/Sidebar/languages/ru.json
delete mode 100644 plugins/Sidebar/languages/tr.json
delete mode 100644 plugins/Sidebar/languages/zh-tw.json
delete mode 100644 plugins/Sidebar/languages/zh.json
delete mode 100644 plugins/Sidebar/media/Class.coffee
delete mode 100644 plugins/Sidebar/media/Console.coffee
delete mode 100644 plugins/Sidebar/media/Console.css
delete mode 100644 plugins/Sidebar/media/Menu.coffee
delete mode 100644 plugins/Sidebar/media/Menu.css
delete mode 100644 plugins/Sidebar/media/Prototypes.coffee
delete mode 100644 plugins/Sidebar/media/RateLimit.coffee
delete mode 100644 plugins/Sidebar/media/Scrollable.js
delete mode 100644 plugins/Sidebar/media/Scrollbable.css
delete mode 100644 plugins/Sidebar/media/Sidebar.coffee
delete mode 100644 plugins/Sidebar/media/Sidebar.css
delete mode 100644 plugins/Sidebar/media/all.css
delete mode 100644 plugins/Sidebar/media/all.js
delete mode 100644 plugins/Sidebar/media/morphdom.js
delete mode 100644 plugins/Sidebar/media_globe/Detector.js
delete mode 100644 plugins/Sidebar/media_globe/Tween.js
delete mode 100644 plugins/Sidebar/media_globe/all.js
delete mode 100644 plugins/Sidebar/media_globe/globe.js
delete mode 100644 plugins/Sidebar/media_globe/three.min.js
delete mode 100644 plugins/Sidebar/media_globe/world.jpg
delete mode 100644 plugins/Sidebar/plugin_info.json
delete mode 100644 plugins/Stats/StatsPlugin.py
delete mode 100644 plugins/Stats/__init__.py
delete mode 100644 plugins/Stats/plugin_info.json
delete mode 100644 plugins/TranslateSite/TranslateSitePlugin.py
delete mode 100644 plugins/TranslateSite/__init__.py
delete mode 100644 plugins/TranslateSite/plugin_info.json
delete mode 100644 plugins/Trayicon/TrayiconPlugin.py
delete mode 100644 plugins/Trayicon/__init__.py
delete mode 100644 plugins/Trayicon/languages/es.json
delete mode 100644 plugins/Trayicon/languages/fr.json
delete mode 100644 plugins/Trayicon/languages/hu.json
delete mode 100644 plugins/Trayicon/languages/it.json
delete mode 100644 plugins/Trayicon/languages/jp.json
delete mode 100644 plugins/Trayicon/languages/pl.json
delete mode 100644 plugins/Trayicon/languages/pt-br.json
delete mode 100644 plugins/Trayicon/languages/tr.json
delete mode 100644 plugins/Trayicon/languages/zh-tw.json
delete mode 100644 plugins/Trayicon/languages/zh.json
delete mode 100644 plugins/Trayicon/lib/__init__.py
delete mode 100644 plugins/Trayicon/lib/notificationicon.py
delete mode 100644 plugins/Trayicon/lib/winfolders.py
delete mode 100644 plugins/Trayicon/plugin_info.json
delete mode 100644 plugins/Trayicon/trayicon.ico
delete mode 100644 plugins/UiConfig/UiConfigPlugin.py
delete mode 100644 plugins/UiConfig/__init__.py
delete mode 100644 plugins/UiConfig/languages/hu.json
delete mode 100644 plugins/UiConfig/languages/jp.json
delete mode 100644 plugins/UiConfig/languages/pl.json
delete mode 100644 plugins/UiConfig/languages/pt-br.json
delete mode 100644 plugins/UiConfig/languages/zh.json
delete mode 100644 plugins/UiConfig/media/config.html
delete mode 100644 plugins/UiConfig/media/css/Config.css
delete mode 100644 plugins/UiConfig/media/css/all.css
delete mode 100644 plugins/UiConfig/media/css/button.css
delete mode 100644 plugins/UiConfig/media/css/fonts.css
delete mode 100644 plugins/UiConfig/media/img/loading.gif
delete mode 100644 plugins/UiConfig/media/js/ConfigStorage.coffee
delete mode 100644 plugins/UiConfig/media/js/ConfigView.coffee
delete mode 100644 plugins/UiConfig/media/js/UiConfig.coffee
delete mode 100644 plugins/UiConfig/media/js/all.js
delete mode 100644 plugins/UiConfig/media/js/lib/Class.coffee
delete mode 100644 plugins/UiConfig/media/js/lib/Promise.coffee
delete mode 100644 plugins/UiConfig/media/js/lib/Prototypes.coffee
delete mode 100644 plugins/UiConfig/media/js/lib/maquette.js
delete mode 100644 plugins/UiConfig/media/js/utils/Animation.coffee
delete mode 100644 plugins/UiConfig/media/js/utils/Dollar.coffee
delete mode 100644 plugins/UiConfig/media/js/utils/ZeroFrame.coffee
delete mode 100644 plugins/UiConfig/plugin_info.json
delete mode 100644 plugins/UiFileManager/UiFileManagerPlugin.py
delete mode 100644 plugins/UiFileManager/__init__.py
delete mode 100644 plugins/UiFileManager/languages/hu.json
delete mode 100644 plugins/UiFileManager/languages/jp.json
delete mode 100644 plugins/UiFileManager/media/codemirror/LICENSE
delete mode 100644 plugins/UiFileManager/media/codemirror/all.css
delete mode 100644 plugins/UiFileManager/media/codemirror/all.js
delete mode 100644 plugins/UiFileManager/media/codemirror/base/codemirror.css
delete mode 100644 plugins/UiFileManager/media/codemirror/base/codemirror.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/dialog/dialog.css
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/dialog/dialog.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/edit/closebrackets.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/edit/closetag.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/edit/continuelist.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/edit/matchbrackets.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/edit/matchtags.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/edit/trailingspace.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/fold/brace-fold.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/fold/comment-fold.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/fold/foldcode.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/fold/foldgutter.css
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/fold/foldgutter.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/fold/indent-fold.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/fold/markdown-fold.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/fold/xml-fold.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/hint/anyword-hint.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/hint/html-hint.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/hint/show-hint.css
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/hint/show-hint.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/hint/sql-hint.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/hint/xml-hint.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/lint/json-lint.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/lint/jsonlint.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/lint/lint.css
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/lint/lint.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/mdn-like-custom.css
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/scroll/annotatescrollbar.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/scroll/scrollpastend.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/scroll/simplescrollbars.css
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/scroll/simplescrollbars.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/search/jump-to-line.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/search/match-highlighter.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/search/matchesonscrollbar.css
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/search/matchesonscrollbar.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/search/search.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/search/searchcursor.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/selection/active-line.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/selection/mark-selection.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/selection/selection-pointer.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/simple.js
delete mode 100644 plugins/UiFileManager/media/codemirror/extension/sublime.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/coffeescript.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/css.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/go.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/htmlembedded.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/htmlmixed.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/javascript.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/markdown.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/python.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/rust.js
delete mode 100644 plugins/UiFileManager/media/codemirror/mode/xml.js
delete mode 100644 plugins/UiFileManager/media/css/Menu.css
delete mode 100644 plugins/UiFileManager/media/css/Selectbar.css
delete mode 100644 plugins/UiFileManager/media/css/UiFileManager.css
delete mode 100644 plugins/UiFileManager/media/css/all.css
delete mode 100644 plugins/UiFileManager/media/img/loading.gif
delete mode 100644 plugins/UiFileManager/media/js/Config.coffee
delete mode 100644 plugins/UiFileManager/media/js/FileEditor.coffee
delete mode 100644 plugins/UiFileManager/media/js/FileItemList.coffee
delete mode 100644 plugins/UiFileManager/media/js/FileList.coffee
delete mode 100644 plugins/UiFileManager/media/js/UiFileManager.coffee
delete mode 100644 plugins/UiFileManager/media/js/all.js
delete mode 100644 plugins/UiFileManager/media/js/lib/Animation.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/Class.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/Dollar.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/ItemList.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/Menu.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/Promise.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/Prototypes.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/RateLimitCb.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/Text.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/Time.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/ZeroFrame.coffee
delete mode 100644 plugins/UiFileManager/media/js/lib/maquette.js
delete mode 100644 plugins/UiFileManager/media/list.html
delete mode 100644 plugins/UiPluginManager/UiPluginManagerPlugin.py
delete mode 100644 plugins/UiPluginManager/__init__.py
delete mode 100644 plugins/UiPluginManager/media/css/PluginManager.css
delete mode 100644 plugins/UiPluginManager/media/css/all.css
delete mode 100644 plugins/UiPluginManager/media/css/button.css
delete mode 100644 plugins/UiPluginManager/media/css/fonts.css
delete mode 100644 plugins/UiPluginManager/media/img/loading.gif
delete mode 100644 plugins/UiPluginManager/media/js/PluginList.coffee
delete mode 100644 plugins/UiPluginManager/media/js/UiPluginManager.coffee
delete mode 100644 plugins/UiPluginManager/media/js/all.js
delete mode 100644 plugins/UiPluginManager/media/js/lib/Class.coffee
delete mode 100644 plugins/UiPluginManager/media/js/lib/Promise.coffee
delete mode 100644 plugins/UiPluginManager/media/js/lib/Prototypes.coffee
delete mode 100644 plugins/UiPluginManager/media/js/lib/maquette.js
delete mode 100644 plugins/UiPluginManager/media/js/utils/Animation.coffee
delete mode 100644 plugins/UiPluginManager/media/js/utils/Dollar.coffee
delete mode 100644 plugins/UiPluginManager/media/js/utils/ZeroFrame.coffee
delete mode 100644 plugins/UiPluginManager/media/plugin_manager.html
delete mode 100644 plugins/Zeroname/README.md
delete mode 100644 plugins/Zeroname/SiteManagerPlugin.py
delete mode 100644 plugins/Zeroname/__init__.py
delete mode 100644 plugins/Zeroname/updater/zeroname_updater.py
delete mode 100644 plugins/__init__.py
delete mode 100644 plugins/disabled-Bootstrapper/BootstrapperDb.py
delete mode 100644 plugins/disabled-Bootstrapper/BootstrapperPlugin.py
delete mode 100644 plugins/disabled-Bootstrapper/Test/TestBootstrapper.py
delete mode 100644 plugins/disabled-Bootstrapper/Test/conftest.py
delete mode 100644 plugins/disabled-Bootstrapper/Test/pytest.ini
delete mode 100644 plugins/disabled-Bootstrapper/__init__.py
delete mode 100644 plugins/disabled-Bootstrapper/plugin_info.json
delete mode 100644 plugins/disabled-Dnschain/SiteManagerPlugin.py
delete mode 100644 plugins/disabled-Dnschain/UiRequestPlugin.py
delete mode 100644 plugins/disabled-Dnschain/__init__.py
delete mode 100644 plugins/disabled-DonationMessage/DonationMessagePlugin.py
delete mode 100644 plugins/disabled-DonationMessage/__init__.py
delete mode 100644 plugins/disabled-Multiuser/MultiuserPlugin.py
delete mode 100644 plugins/disabled-Multiuser/Test/TestMultiuser.py
delete mode 100644 plugins/disabled-Multiuser/Test/conftest.py
delete mode 100644 plugins/disabled-Multiuser/Test/pytest.ini
delete mode 100644 plugins/disabled-Multiuser/UserPlugin.py
delete mode 100644 plugins/disabled-Multiuser/__init__.py
delete mode 100644 plugins/disabled-Multiuser/plugin_info.json
delete mode 100644 plugins/disabled-StemPort/StemPortPlugin.py
delete mode 100644 plugins/disabled-StemPort/__init__.py
delete mode 100644 plugins/disabled-UiPassword/UiPasswordPlugin.py
delete mode 100644 plugins/disabled-UiPassword/__init__.py
delete mode 100644 plugins/disabled-UiPassword/login.html
delete mode 100644 plugins/disabled-UiPassword/plugin_info.json
delete mode 100644 plugins/disabled-ZeronameLocal/SiteManagerPlugin.py
delete mode 100644 plugins/disabled-ZeronameLocal/UiRequestPlugin.py
delete mode 100644 plugins/disabled-ZeronameLocal/__init__.py
diff --git a/plugins/AnnounceBitTorrent/AnnounceBitTorrentPlugin.py b/plugins/AnnounceBitTorrent/AnnounceBitTorrentPlugin.py
deleted file mode 100644
index fab7bb1f..00000000
--- a/plugins/AnnounceBitTorrent/AnnounceBitTorrentPlugin.py
+++ /dev/null
@@ -1,148 +0,0 @@
-import time
-import urllib.request
-import struct
-import socket
-
-import lib.bencode_open as bencode_open
-from lib.subtl.subtl import UdpTrackerClient
-import socks
-import sockshandler
-import gevent
-
-from Plugin import PluginManager
-from Config import config
-from Debug import Debug
-from util import helper
-
-
-# We can only import plugin host clases after the plugins are loaded
-@PluginManager.afterLoad
-def importHostClasses():
- global Peer, AnnounceError
- from Peer import Peer
- from Site.SiteAnnouncer import AnnounceError
-
-
-@PluginManager.registerTo("SiteAnnouncer")
-class SiteAnnouncerPlugin(object):
- def getSupportedTrackers(self):
- trackers = super(SiteAnnouncerPlugin, self).getSupportedTrackers()
- if config.disable_udp or config.trackers_proxy != "disable":
- trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")]
-
- return trackers
-
- def getTrackerHandler(self, protocol):
- if protocol == "udp":
- handler = self.announceTrackerUdp
- elif protocol == "http":
- handler = self.announceTrackerHttp
- elif protocol == "https":
- handler = self.announceTrackerHttps
- else:
- handler = super(SiteAnnouncerPlugin, self).getTrackerHandler(protocol)
- return handler
-
- def announceTrackerUdp(self, tracker_address, mode="start", num_want=10):
- s = time.time()
- if config.disable_udp:
- raise AnnounceError("Udp disabled by config")
- if config.trackers_proxy != "disable":
- raise AnnounceError("Udp trackers not available with proxies")
-
- ip, port = tracker_address.split("/")[0].split(":")
- tracker = UdpTrackerClient(ip, int(port))
- if helper.getIpType(ip) in self.getOpenedServiceTypes():
- tracker.peer_port = self.fileserver_port
- else:
- tracker.peer_port = 0
- tracker.connect()
- if not tracker.poll_once():
- raise AnnounceError("Could not connect")
- tracker.announce(info_hash=self.site.address_sha1, num_want=num_want, left=431102370)
- back = tracker.poll_once()
- if not back:
- raise AnnounceError("No response after %.0fs" % (time.time() - s))
- elif type(back) is dict and "response" in back:
- peers = back["response"]["peers"]
- else:
- raise AnnounceError("Invalid response: %r" % back)
-
- return peers
-
- def httpRequest(self, url):
- headers = {
- 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
- 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
- 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
- 'Accept-Encoding': 'none',
- 'Accept-Language': 'en-US,en;q=0.8',
- 'Connection': 'keep-alive'
- }
-
- req = urllib.request.Request(url, headers=headers)
-
- if config.trackers_proxy == "tor":
- tor_manager = self.site.connection_server.tor_manager
- handler = sockshandler.SocksiPyHandler(socks.SOCKS5, tor_manager.proxy_ip, tor_manager.proxy_port)
- opener = urllib.request.build_opener(handler)
- return opener.open(req, timeout=50)
- elif config.trackers_proxy == "disable":
- return urllib.request.urlopen(req, timeout=25)
- else:
- proxy_ip, proxy_port = config.trackers_proxy.split(":")
- handler = sockshandler.SocksiPyHandler(socks.SOCKS5, proxy_ip, int(proxy_port))
- opener = urllib.request.build_opener(handler)
- return opener.open(req, timeout=50)
-
- def announceTrackerHttps(self, *args, **kwargs):
- kwargs["protocol"] = "https"
- return self.announceTrackerHttp(*args, **kwargs)
-
- def announceTrackerHttp(self, tracker_address, mode="start", num_want=10, protocol="http"):
- tracker_ip, tracker_port = tracker_address.rsplit(":", 1)
- if helper.getIpType(tracker_ip) in self.getOpenedServiceTypes():
- port = self.fileserver_port
- else:
- port = 1
- params = {
- 'info_hash': self.site.address_sha1,
- 'peer_id': self.peer_id, 'port': port,
- 'uploaded': 0, 'downloaded': 0, 'left': 431102370, 'compact': 1, 'numwant': num_want,
- 'event': 'started'
- }
-
- url = protocol + "://" + tracker_address + "?" + urllib.parse.urlencode(params)
-
- s = time.time()
- response = None
- # Load url
- if config.tor == "always" or config.trackers_proxy != "disable":
- timeout = 60
- else:
- timeout = 30
-
- with gevent.Timeout(timeout, False): # Make sure of timeout
- req = self.httpRequest(url)
- response = req.read()
- req.close()
- req = None
-
- if not response:
- raise AnnounceError("No response after %.0fs" % (time.time() - s))
-
- # Decode peers
- try:
- peer_data = bencode_open.loads(response)[b"peers"]
- response = None
- peer_count = int(len(peer_data) / 6)
- peers = []
- for peer_offset in range(peer_count):
- off = 6 * peer_offset
- peer = peer_data[off:off + 6]
- addr, port = struct.unpack('!LH', peer)
- peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port})
- except Exception as err:
- raise AnnounceError("Invalid response: %r (%s)" % (response, Debug.formatException(err)))
-
- return peers
diff --git a/plugins/AnnounceBitTorrent/__init__.py b/plugins/AnnounceBitTorrent/__init__.py
deleted file mode 100644
index c7422855..00000000
--- a/plugins/AnnounceBitTorrent/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import AnnounceBitTorrentPlugin
\ No newline at end of file
diff --git a/plugins/AnnounceBitTorrent/plugin_info.json b/plugins/AnnounceBitTorrent/plugin_info.json
deleted file mode 100644
index 824749ee..00000000
--- a/plugins/AnnounceBitTorrent/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "AnnounceBitTorrent",
- "description": "Discover new peers using BitTorrent trackers.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/AnnounceLocal/AnnounceLocalPlugin.py b/plugins/AnnounceLocal/AnnounceLocalPlugin.py
deleted file mode 100644
index b9225966..00000000
--- a/plugins/AnnounceLocal/AnnounceLocalPlugin.py
+++ /dev/null
@@ -1,147 +0,0 @@
-import time
-
-import gevent
-
-from Plugin import PluginManager
-from Config import config
-from . import BroadcastServer
-
-
-@PluginManager.registerTo("SiteAnnouncer")
-class SiteAnnouncerPlugin(object):
- def announce(self, force=False, *args, **kwargs):
- local_announcer = self.site.connection_server.local_announcer
-
- thread = None
- if local_announcer and (force or time.time() - local_announcer.last_discover > 5 * 60):
- thread = gevent.spawn(local_announcer.discover, force=force)
- back = super(SiteAnnouncerPlugin, self).announce(force=force, *args, **kwargs)
-
- if thread:
- thread.join()
-
- return back
-
-
-class LocalAnnouncer(BroadcastServer.BroadcastServer):
- def __init__(self, server, listen_port):
- super(LocalAnnouncer, self).__init__("zeronet", listen_port=listen_port)
- self.server = server
-
- self.sender_info["peer_id"] = self.server.peer_id
- self.sender_info["port"] = self.server.port
- self.sender_info["broadcast_port"] = listen_port
- self.sender_info["rev"] = config.rev
-
- self.known_peers = {}
- self.last_discover = 0
-
- def discover(self, force=False):
- self.log.debug("Sending discover request (force: %s)" % force)
- self.last_discover = time.time()
- if force: # Probably new site added, clean cache
- self.known_peers = {}
-
- for peer_id, known_peer in list(self.known_peers.items()):
- if time.time() - known_peer["found"] > 20 * 60:
- del(self.known_peers[peer_id])
- self.log.debug("Timeout, removing from known_peers: %s" % peer_id)
- self.broadcast({"cmd": "discoverRequest", "params": {}}, port=self.listen_port)
-
- def actionDiscoverRequest(self, sender, params):
- back = {
- "cmd": "discoverResponse",
- "params": {
- "sites_changed": self.server.site_manager.sites_changed
- }
- }
-
- if sender["peer_id"] not in self.known_peers:
- self.known_peers[sender["peer_id"]] = {"added": time.time(), "sites_changed": 0, "updated": 0, "found": time.time()}
- self.log.debug("Got discover request from unknown peer %s (%s), time to refresh known peers" % (sender["ip"], sender["peer_id"]))
- gevent.spawn_later(1.0, self.discover) # Let the response arrive first to the requester
-
- return back
-
- def actionDiscoverResponse(self, sender, params):
- if sender["peer_id"] in self.known_peers:
- self.known_peers[sender["peer_id"]]["found"] = time.time()
- if params["sites_changed"] != self.known_peers.get(sender["peer_id"], {}).get("sites_changed"):
- # Peer's site list changed, request the list of new sites
- return {"cmd": "siteListRequest"}
- else:
- # Peer's site list is the same
- for site in self.server.sites.values():
- peer = site.peers.get("%s:%s" % (sender["ip"], sender["port"]))
- if peer:
- peer.found("local")
-
- def actionSiteListRequest(self, sender, params):
- back = []
- sites = list(self.server.sites.values())
-
- # Split adresses to group of 100 to avoid UDP size limit
- site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)]
- for site_group in site_groups:
- res = {}
- res["sites_changed"] = self.server.site_manager.sites_changed
- res["sites"] = [site.address_hash for site in site_group]
- back.append({"cmd": "siteListResponse", "params": res})
- return back
-
- def actionSiteListResponse(self, sender, params):
- s = time.time()
- peer_sites = set(params["sites"])
- num_found = 0
- added_sites = []
- for site in self.server.sites.values():
- if site.address_hash in peer_sites:
- added = site.addPeer(sender["ip"], sender["port"], source="local")
- num_found += 1
- if added:
- site.worker_manager.onPeers()
- site.updateWebsocket(peers_added=1)
- added_sites.append(site)
-
- # Save sites changed value to avoid unnecessary site list download
- if sender["peer_id"] not in self.known_peers:
- self.known_peers[sender["peer_id"]] = {"added": time.time()}
-
- self.known_peers[sender["peer_id"]]["sites_changed"] = params["sites_changed"]
- self.known_peers[sender["peer_id"]]["updated"] = time.time()
- self.known_peers[sender["peer_id"]]["found"] = time.time()
-
- self.log.debug(
- "Tracker result: Discover from %s response parsed in %.3fs, found: %s added: %s of %s" %
- (sender["ip"], time.time() - s, num_found, added_sites, len(peer_sites))
- )
-
-
-@PluginManager.registerTo("FileServer")
-class FileServerPlugin(object):
- def __init__(self, *args, **kwargs):
- super(FileServerPlugin, self).__init__(*args, **kwargs)
- if config.broadcast_port and config.tor != "always" and not config.disable_udp:
- self.local_announcer = LocalAnnouncer(self, config.broadcast_port)
- else:
- self.local_announcer = None
-
- def start(self, *args, **kwargs):
- if self.local_announcer:
- gevent.spawn(self.local_announcer.start)
- return super(FileServerPlugin, self).start(*args, **kwargs)
-
- def stop(self):
- if self.local_announcer:
- self.local_announcer.stop()
- res = super(FileServerPlugin, self).stop()
- return res
-
-
-@PluginManager.registerTo("ConfigPlugin")
-class ConfigPlugin(object):
- def createArguments(self):
- group = self.parser.add_argument_group("AnnounceLocal plugin")
- group.add_argument('--broadcast_port', help='UDP broadcasting port for local peer discovery', default=1544, type=int, metavar='port')
-
- return super(ConfigPlugin, self).createArguments()
diff --git a/plugins/AnnounceLocal/BroadcastServer.py b/plugins/AnnounceLocal/BroadcastServer.py
deleted file mode 100644
index 74678896..00000000
--- a/plugins/AnnounceLocal/BroadcastServer.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import socket
-import logging
-import time
-from contextlib import closing
-
-from Debug import Debug
-from util import UpnpPunch
-from util import Msgpack
-
-
-class BroadcastServer(object):
- def __init__(self, service_name, listen_port=1544, listen_ip=''):
- self.log = logging.getLogger("BroadcastServer")
- self.listen_port = listen_port
- self.listen_ip = listen_ip
-
- self.running = False
- self.sock = None
- self.sender_info = {"service": service_name}
-
- def createBroadcastSocket(self):
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- if hasattr(socket, 'SO_REUSEPORT'):
- try:
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
- except Exception as err:
- self.log.warning("Error setting SO_REUSEPORT: %s" % err)
-
- binded = False
- for retry in range(3):
- try:
- sock.bind((self.listen_ip, self.listen_port))
- binded = True
- break
- except Exception as err:
- self.log.error(
- "Socket bind to %s:%s error: %s, retry #%s" %
- (self.listen_ip, self.listen_port, Debug.formatException(err), retry)
- )
- time.sleep(retry)
-
- if binded:
- return sock
- else:
- return False
-
- def start(self): # Listens for discover requests
- self.sock = self.createBroadcastSocket()
- if not self.sock:
- self.log.error("Unable to listen on port %s" % self.listen_port)
- return
-
- self.log.debug("Started on port %s" % self.listen_port)
-
- self.running = True
-
- while self.running:
- try:
- data, addr = self.sock.recvfrom(8192)
- except Exception as err:
- if self.running:
- self.log.error("Listener receive error: %s" % err)
- continue
-
- if not self.running:
- break
-
- try:
- message = Msgpack.unpack(data)
- response_addr, message = self.handleMessage(addr, message)
- if message:
- self.send(response_addr, message)
- except Exception as err:
- self.log.error("Handlemessage error: %s" % Debug.formatException(err))
- self.log.debug("Stopped listening on port %s" % self.listen_port)
-
- def stop(self):
- self.log.debug("Stopping, socket: %s" % self.sock)
- self.running = False
- if self.sock:
- self.sock.close()
-
- def send(self, addr, message):
- if type(message) is not list:
- message = [message]
-
- for message_part in message:
- message_part["sender"] = self.sender_info
-
- self.log.debug("Send to %s: %s" % (addr, message_part["cmd"]))
- with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock:
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- sock.sendto(Msgpack.pack(message_part), addr)
-
- def getMyIps(self):
- return UpnpPunch._get_local_ips()
-
- def broadcast(self, message, port=None):
- if not port:
- port = self.listen_port
-
- my_ips = self.getMyIps()
- addr = ("255.255.255.255", port)
-
- message["sender"] = self.sender_info
- self.log.debug("Broadcast using ips %s on port %s: %s" % (my_ips, port, message["cmd"]))
-
- for my_ip in my_ips:
- try:
- with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock:
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
- sock.bind((my_ip, 0))
- sock.sendto(Msgpack.pack(message), addr)
- except Exception as err:
- self.log.warning("Error sending broadcast using ip %s: %s" % (my_ip, err))
-
- def handleMessage(self, addr, message):
- self.log.debug("Got from %s: %s" % (addr, message["cmd"]))
- cmd = message["cmd"]
- params = message.get("params", {})
- sender = message["sender"]
- sender["ip"] = addr[0]
-
- func_name = "action" + cmd[0].upper() + cmd[1:]
- func = getattr(self, func_name, None)
-
- if sender["service"] != "zeronet" or sender["peer_id"] == self.sender_info["peer_id"]:
- # Skip messages not for us or sent by us
- message = None
- elif func:
- message = func(sender, params)
- else:
- self.log.debug("Unknown cmd: %s" % cmd)
- message = None
-
- return (sender["ip"], sender["broadcast_port"]), message
diff --git a/plugins/AnnounceLocal/Test/TestAnnounce.py b/plugins/AnnounceLocal/Test/TestAnnounce.py
deleted file mode 100644
index 4def02ed..00000000
--- a/plugins/AnnounceLocal/Test/TestAnnounce.py
+++ /dev/null
@@ -1,113 +0,0 @@
-import time
-import copy
-
-import gevent
-import pytest
-import mock
-
-from AnnounceLocal import AnnounceLocalPlugin
-from File import FileServer
-from Test import Spy
-
-@pytest.fixture
-def announcer(file_server, site):
- file_server.sites[site.address] = site
- announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server, listen_port=1100)
- file_server.local_announcer = announcer
- announcer.listen_port = 1100
- announcer.sender_info["broadcast_port"] = 1100
- announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"])
- announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically
- gevent.spawn(announcer.start)
- time.sleep(0.5)
-
- assert file_server.local_announcer.running
- return file_server.local_announcer
-
-@pytest.fixture
-def announcer_remote(request, site_temp):
- file_server_remote = FileServer("127.0.0.1", 1545)
- file_server_remote.sites[site_temp.address] = site_temp
- announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server_remote, listen_port=1101)
- file_server_remote.local_announcer = announcer
- announcer.listen_port = 1101
- announcer.sender_info["broadcast_port"] = 1101
- announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"])
- announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically
- gevent.spawn(announcer.start)
- time.sleep(0.5)
-
- assert file_server_remote.local_announcer.running
-
- def cleanup():
- file_server_remote.stop()
- request.addfinalizer(cleanup)
-
-
- return file_server_remote.local_announcer
-
-@pytest.mark.usefixtures("resetSettings")
-@pytest.mark.usefixtures("resetTempSettings")
-class TestAnnounce:
- def testSenderInfo(self, announcer):
- sender_info = announcer.sender_info
- assert sender_info["port"] > 0
- assert len(sender_info["peer_id"]) == 20
- assert sender_info["rev"] > 0
-
- def testIgnoreSelfMessages(self, announcer):
- # No response to messages that has same peer_id as server
- assert not announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": announcer.sender_info, "params": {}})[1]
-
- # Response to messages with different peer id
- sender_info = copy.copy(announcer.sender_info)
- sender_info["peer_id"] += "-"
- addr, res = announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": sender_info, "params": {}})
- assert res["params"]["sites_changed"] > 0
-
- def testDiscoverRequest(self, announcer, announcer_remote):
- assert len(announcer_remote.known_peers) == 0
- with Spy.Spy(announcer_remote, "handleMessage") as responses:
- announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port)
- time.sleep(0.1)
-
- response_cmds = [response[1]["cmd"] for response in responses]
- assert response_cmds == ["discoverResponse", "siteListResponse"]
- assert len(responses[-1][1]["params"]["sites"]) == 1
-
- # It should only request siteList if sites_changed value is different from last response
- with Spy.Spy(announcer_remote, "handleMessage") as responses:
- announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port)
- time.sleep(0.1)
-
- response_cmds = [response[1]["cmd"] for response in responses]
- assert response_cmds == ["discoverResponse"]
-
- def testPeerDiscover(self, announcer, announcer_remote, site):
- assert announcer.server.peer_id != announcer_remote.server.peer_id
- assert len(list(announcer.server.sites.values())[0].peers) == 0
- announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port)
- time.sleep(0.1)
- assert len(list(announcer.server.sites.values())[0].peers) == 1
-
- def testRecentPeerList(self, announcer, announcer_remote, site):
- assert len(site.peers_recent) == 0
- assert len(site.peers) == 0
- with Spy.Spy(announcer, "handleMessage") as responses:
- announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
- time.sleep(0.1)
- assert [response[1]["cmd"] for response in responses] == ["discoverResponse", "siteListResponse"]
- assert len(site.peers_recent) == 1
- assert len(site.peers) == 1
-
- # It should update peer without siteListResponse
- last_time_found = list(site.peers.values())[0].time_found
- site.peers_recent.clear()
- with Spy.Spy(announcer, "handleMessage") as responses:
- announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
- time.sleep(0.1)
- assert [response[1]["cmd"] for response in responses] == ["discoverResponse"]
- assert len(site.peers_recent) == 1
- assert list(site.peers.values())[0].time_found > last_time_found
-
-
diff --git a/plugins/AnnounceLocal/Test/conftest.py b/plugins/AnnounceLocal/Test/conftest.py
deleted file mode 100644
index a88c642c..00000000
--- a/plugins/AnnounceLocal/Test/conftest.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from src.Test.conftest import *
-
-from Config import config
-config.broadcast_port = 0
diff --git a/plugins/AnnounceLocal/Test/pytest.ini b/plugins/AnnounceLocal/Test/pytest.ini
deleted file mode 100644
index d09210d1..00000000
--- a/plugins/AnnounceLocal/Test/pytest.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-[pytest]
-python_files = Test*.py
-addopts = -rsxX -v --durations=6
-markers =
- webtest: mark a test as a webtest.
\ No newline at end of file
diff --git a/plugins/AnnounceLocal/__init__.py b/plugins/AnnounceLocal/__init__.py
deleted file mode 100644
index 5b80abd2..00000000
--- a/plugins/AnnounceLocal/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import AnnounceLocalPlugin
\ No newline at end of file
diff --git a/plugins/AnnounceLocal/plugin_info.json b/plugins/AnnounceLocal/plugin_info.json
deleted file mode 100644
index 2908cbf1..00000000
--- a/plugins/AnnounceLocal/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "AnnounceLocal",
- "description": "Discover LAN clients using UDP broadcasting.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/AnnounceShare/AnnounceSharePlugin.py b/plugins/AnnounceShare/AnnounceSharePlugin.py
deleted file mode 100644
index 057ce55a..00000000
--- a/plugins/AnnounceShare/AnnounceSharePlugin.py
+++ /dev/null
@@ -1,190 +0,0 @@
-import time
-import os
-import logging
-import json
-import atexit
-
-import gevent
-
-from Config import config
-from Plugin import PluginManager
-from util import helper
-
-
-class TrackerStorage(object):
- def __init__(self):
- self.log = logging.getLogger("TrackerStorage")
- self.file_path = "%s/trackers.json" % config.data_dir
- self.load()
- self.time_discover = 0.0
- atexit.register(self.save)
-
- def getDefaultFile(self):
- return {"shared": {}}
-
- def onTrackerFound(self, tracker_address, type="shared", my=False):
- if not tracker_address.startswith("zero://"):
- return False
-
- trackers = self.getTrackers()
- added = False
- if tracker_address not in trackers:
- trackers[tracker_address] = {
- "time_added": time.time(),
- "time_success": 0,
- "latency": 99.0,
- "num_error": 0,
- "my": False
- }
- self.log.debug("New tracker found: %s" % tracker_address)
- added = True
-
- trackers[tracker_address]["time_found"] = time.time()
- trackers[tracker_address]["my"] = my
- return added
-
- def onTrackerSuccess(self, tracker_address, latency):
- trackers = self.getTrackers()
- if tracker_address not in trackers:
- return False
-
- trackers[tracker_address]["latency"] = latency
- trackers[tracker_address]["time_success"] = time.time()
- trackers[tracker_address]["num_error"] = 0
-
- def onTrackerError(self, tracker_address):
- trackers = self.getTrackers()
- if tracker_address not in trackers:
- return False
-
- trackers[tracker_address]["time_error"] = time.time()
- trackers[tracker_address]["num_error"] += 1
-
- if len(self.getWorkingTrackers()) >= config.working_shared_trackers_limit:
- error_limit = 5
- else:
- error_limit = 30
- error_limit
-
- if trackers[tracker_address]["num_error"] > error_limit and trackers[tracker_address]["time_success"] < time.time() - 60 * 60:
- self.log.debug("Tracker %s looks down, removing." % tracker_address)
- del trackers[tracker_address]
-
- def getTrackers(self, type="shared"):
- return self.file_content.setdefault(type, {})
-
- def getWorkingTrackers(self, type="shared"):
- trackers = {
- key: tracker for key, tracker in self.getTrackers(type).items()
- if tracker["time_success"] > time.time() - 60 * 60
- }
- return trackers
-
- def getFileContent(self):
- if not os.path.isfile(self.file_path):
- open(self.file_path, "w").write("{}")
- return self.getDefaultFile()
- try:
- return json.load(open(self.file_path))
- except Exception as err:
- self.log.error("Error loading trackers list: %s" % err)
- return self.getDefaultFile()
-
- def load(self):
- self.file_content = self.getFileContent()
-
- trackers = self.getTrackers()
- self.log.debug("Loaded %s shared trackers" % len(trackers))
- for address, tracker in list(trackers.items()):
- tracker["num_error"] = 0
- if not address.startswith("zero://"):
- del trackers[address]
-
- def save(self):
- s = time.time()
- helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True).encode("utf8"))
- self.log.debug("Saved in %.3fs" % (time.time() - s))
-
- def discoverTrackers(self, peers):
- if len(self.getWorkingTrackers()) > config.working_shared_trackers_limit:
- return False
- s = time.time()
- num_success = 0
- for peer in peers:
- if peer.connection and peer.connection.handshake.get("rev", 0) < 3560:
- continue # Not supported
-
- res = peer.request("getTrackers")
- if not res or "error" in res:
- continue
-
- num_success += 1
- for tracker_address in res["trackers"]:
- if type(tracker_address) is bytes: # Backward compatibilitys
- tracker_address = tracker_address.decode("utf8")
- added = self.onTrackerFound(tracker_address)
- if added: # Only add one tracker from one source
- break
-
- if not num_success and len(peers) < 20:
- self.time_discover = 0.0
-
- if num_success:
- self.save()
-
- self.log.debug("Trackers discovered from %s/%s peers in %.3fs" % (num_success, len(peers), time.time() - s))
-
-
-if "tracker_storage" not in locals():
- tracker_storage = TrackerStorage()
-
-
-@PluginManager.registerTo("SiteAnnouncer")
-class SiteAnnouncerPlugin(object):
- def getTrackers(self):
- if tracker_storage.time_discover < time.time() - 5 * 60:
- tracker_storage.time_discover = time.time()
- gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers())
- trackers = super(SiteAnnouncerPlugin, self).getTrackers()
- shared_trackers = list(tracker_storage.getTrackers("shared").keys())
- if shared_trackers:
- return trackers + shared_trackers
- else:
- return trackers
-
- def announceTracker(self, tracker, *args, **kwargs):
- res = super(SiteAnnouncerPlugin, self).announceTracker(tracker, *args, **kwargs)
- if res:
- latency = res
- tracker_storage.onTrackerSuccess(tracker, latency)
- elif res is False:
- tracker_storage.onTrackerError(tracker)
-
- return res
-
-
-@PluginManager.registerTo("FileRequest")
-class FileRequestPlugin(object):
- def actionGetTrackers(self, params):
- shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys())
- self.response({"trackers": shared_trackers})
-
-
-@PluginManager.registerTo("FileServer")
-class FileServerPlugin(object):
- def portCheck(self, *args, **kwargs):
- res = super(FileServerPlugin, self).portCheck(*args, **kwargs)
- if res and not config.tor == "always" and "Bootstrapper" in PluginManager.plugin_manager.plugin_names:
- for ip in self.ip_external_list:
- my_tracker_address = "zero://%s:%s" % (ip, config.fileserver_port)
- tracker_storage.onTrackerFound(my_tracker_address, my=True)
- return res
-
-
-@PluginManager.registerTo("ConfigPlugin")
-class ConfigPlugin(object):
- def createArguments(self):
- group = self.parser.add_argument_group("AnnounceShare plugin")
- group.add_argument('--working_shared_trackers_limit', help='Stop discovering new shared trackers after this number of shared trackers reached', default=5, type=int, metavar='limit')
-
- return super(ConfigPlugin, self).createArguments()
diff --git a/plugins/AnnounceShare/Test/TestAnnounceShare.py b/plugins/AnnounceShare/Test/TestAnnounceShare.py
deleted file mode 100644
index 7178eac8..00000000
--- a/plugins/AnnounceShare/Test/TestAnnounceShare.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import pytest
-
-from AnnounceShare import AnnounceSharePlugin
-from Peer import Peer
-from Config import config
-
-
-@pytest.mark.usefixtures("resetSettings")
-@pytest.mark.usefixtures("resetTempSettings")
-class TestAnnounceShare:
- def testAnnounceList(self, file_server):
- open("%s/trackers.json" % config.data_dir, "w").write("{}")
- tracker_storage = AnnounceSharePlugin.tracker_storage
- tracker_storage.load()
- peer = Peer(file_server.ip, 1544, connection_server=file_server)
- assert peer.request("getTrackers")["trackers"] == []
-
- tracker_storage.onTrackerFound("zero://%s:15441" % file_server.ip)
- assert peer.request("getTrackers")["trackers"] == []
-
- # It needs to have at least one successfull announce to be shared to other peers
- tracker_storage.onTrackerSuccess("zero://%s:15441" % file_server.ip, 1.0)
- assert peer.request("getTrackers")["trackers"] == ["zero://%s:15441" % file_server.ip]
-
diff --git a/plugins/AnnounceShare/Test/conftest.py b/plugins/AnnounceShare/Test/conftest.py
deleted file mode 100644
index 5abd4dd6..00000000
--- a/plugins/AnnounceShare/Test/conftest.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from src.Test.conftest import *
-
-from Config import config
diff --git a/plugins/AnnounceShare/Test/pytest.ini b/plugins/AnnounceShare/Test/pytest.ini
deleted file mode 100644
index d09210d1..00000000
--- a/plugins/AnnounceShare/Test/pytest.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-[pytest]
-python_files = Test*.py
-addopts = -rsxX -v --durations=6
-markers =
- webtest: mark a test as a webtest.
\ No newline at end of file
diff --git a/plugins/AnnounceShare/__init__.py b/plugins/AnnounceShare/__init__.py
deleted file mode 100644
index dc1e40bd..00000000
--- a/plugins/AnnounceShare/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import AnnounceSharePlugin
diff --git a/plugins/AnnounceShare/plugin_info.json b/plugins/AnnounceShare/plugin_info.json
deleted file mode 100644
index 0ad07e71..00000000
--- a/plugins/AnnounceShare/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "AnnounceShare",
- "description": "Share possible trackers between clients.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/AnnounceZero/AnnounceZeroPlugin.py b/plugins/AnnounceZero/AnnounceZeroPlugin.py
deleted file mode 100644
index 623cd4b5..00000000
--- a/plugins/AnnounceZero/AnnounceZeroPlugin.py
+++ /dev/null
@@ -1,140 +0,0 @@
-import time
-import itertools
-
-from Plugin import PluginManager
-from util import helper
-from Crypt import CryptRsa
-
-allow_reload = False # No source reload supported in this plugin
-time_full_announced = {} # Tracker address: Last announced all site to tracker
-connection_pool = {} # Tracker address: Peer object
-
-
-# We can only import plugin host clases after the plugins are loaded
-@PluginManager.afterLoad
-def importHostClasses():
- global Peer, AnnounceError
- from Peer import Peer
- from Site.SiteAnnouncer import AnnounceError
-
-
-# Process result got back from tracker
-def processPeerRes(tracker_address, site, peers):
- added = 0
-
- # Onion
- found_onion = 0
- for packed_address in peers["onion"]:
- found_onion += 1
- peer_onion, peer_port = helper.unpackOnionAddress(packed_address)
- if site.addPeer(peer_onion, peer_port, source="tracker"):
- added += 1
-
- # Ip4
- found_ipv4 = 0
- peers_normal = itertools.chain(peers.get("ip4", []), peers.get("ipv4", []), peers.get("ipv6", []))
- for packed_address in peers_normal:
- found_ipv4 += 1
- peer_ip, peer_port = helper.unpackAddress(packed_address)
- if site.addPeer(peer_ip, peer_port, source="tracker"):
- added += 1
-
- if added:
- site.worker_manager.onPeers()
- site.updateWebsocket(peers_added=added)
- return added
-
-
-@PluginManager.registerTo("SiteAnnouncer")
-class SiteAnnouncerPlugin(object):
- def getTrackerHandler(self, protocol):
- if protocol == "zero":
- return self.announceTrackerZero
- else:
- return super(SiteAnnouncerPlugin, self).getTrackerHandler(protocol)
-
- def announceTrackerZero(self, tracker_address, mode="start", num_want=10):
- global time_full_announced
- s = time.time()
-
- need_types = ["ip4"] # ip4 for backward compatibility reasons
- need_types += self.site.connection_server.supported_ip_types
- if self.site.connection_server.tor_manager.enabled:
- need_types.append("onion")
-
- if mode == "start" or mode == "more": # Single: Announce only this site
- sites = [self.site]
- full_announce = False
- else: # Multi: Announce all currently serving site
- full_announce = True
- if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 15: # No reannounce all sites within short time
- return None
- time_full_announced[tracker_address] = time.time()
- from Site import SiteManager
- sites = [site for site in SiteManager.site_manager.sites.values() if site.isServing()]
-
- # Create request
- add_types = self.getOpenedServiceTypes()
- request = {
- "hashes": [], "onions": [], "port": self.fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types
- }
- for site in sites:
- if "onion" in add_types:
- onion = self.site.connection_server.tor_manager.getOnion(site.address)
- request["onions"].append(onion)
- request["hashes"].append(site.address_hash)
-
- # Tracker can remove sites that we don't announce
- if full_announce:
- request["delete"] = True
-
- # Sent request to tracker
- tracker_peer = connection_pool.get(tracker_address) # Re-use tracker connection if possible
- if not tracker_peer:
- tracker_ip, tracker_port = tracker_address.rsplit(":", 1)
- tracker_peer = Peer(str(tracker_ip), int(tracker_port), connection_server=self.site.connection_server)
- tracker_peer.is_tracker_connection = True
- connection_pool[tracker_address] = tracker_peer
-
- res = tracker_peer.request("announce", request)
-
- if not res or "peers" not in res:
- if full_announce:
- time_full_announced[tracker_address] = 0
- raise AnnounceError("Invalid response: %s" % res)
-
- # Add peers from response to site
- site_index = 0
- peers_added = 0
- for site_res in res["peers"]:
- site = sites[site_index]
- peers_added += processPeerRes(tracker_address, site, site_res)
- site_index += 1
-
- # Check if we need to sign prove the onion addresses
- if "onion_sign_this" in res:
- self.site.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites)))
- request["onion_signs"] = {}
- request["onion_sign_this"] = res["onion_sign_this"]
- request["need_num"] = 0
- for site in sites:
- onion = self.site.connection_server.tor_manager.getOnion(site.address)
- publickey = self.site.connection_server.tor_manager.getPublickey(onion)
- if publickey not in request["onion_signs"]:
- sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion))
- request["onion_signs"][publickey] = sign
- res = tracker_peer.request("announce", request)
- if not res or "onion_sign_this" in res:
- if full_announce:
- time_full_announced[tracker_address] = 0
- raise AnnounceError("Announce onion address to failed: %s" % res)
-
- if full_announce:
- tracker_peer.remove() # Close connection, we don't need it in next 5 minute
-
- self.site.log.debug(
- "Tracker announce result: zero://%s (sites: %s, new peers: %s, add: %s, mode: %s) in %.3fs" %
- (tracker_address, site_index, peers_added, add_types, mode, time.time() - s)
- )
-
- return True
diff --git a/plugins/AnnounceZero/__init__.py b/plugins/AnnounceZero/__init__.py
deleted file mode 100644
index 8aec5ddb..00000000
--- a/plugins/AnnounceZero/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import AnnounceZeroPlugin
\ No newline at end of file
diff --git a/plugins/AnnounceZero/plugin_info.json b/plugins/AnnounceZero/plugin_info.json
deleted file mode 100644
index 50e7cf7f..00000000
--- a/plugins/AnnounceZero/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "AnnounceZero",
- "description": "Announce using ZeroNet protocol.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/Benchmark/BenchmarkDb.py b/plugins/Benchmark/BenchmarkDb.py
deleted file mode 100644
index a767a3f4..00000000
--- a/plugins/Benchmark/BenchmarkDb.py
+++ /dev/null
@@ -1,143 +0,0 @@
-import os
-import json
-import contextlib
-import time
-
-from Plugin import PluginManager
-from Config import config
-
-
-@PluginManager.registerTo("Actions")
-class ActionsPlugin:
- def getBenchmarkTests(self, online=False):
- tests = super().getBenchmarkTests(online)
- tests.extend([
- {"func": self.testDbConnect, "num": 10, "time_standard": 0.27},
- {"func": self.testDbInsert, "num": 10, "time_standard": 0.91},
- {"func": self.testDbInsertMultiuser, "num": 1, "time_standard": 0.57},
- {"func": self.testDbQueryIndexed, "num": 1000, "time_standard": 0.84},
- {"func": self.testDbQueryNotIndexed, "num": 1000, "time_standard": 1.30}
- ])
- return tests
-
-
- @contextlib.contextmanager
- def getTestDb(self):
- from Db import Db
- path = "%s/benchmark.db" % config.data_dir
- if os.path.isfile(path):
- os.unlink(path)
- schema = {
- "db_name": "TestDb",
- "db_file": path,
- "maps": {
- ".*": {
- "to_table": {
- "test": "test"
- }
- }
- },
- "tables": {
- "test": {
- "cols": [
- ["test_id", "INTEGER"],
- ["title", "TEXT"],
- ["json_id", "INTEGER REFERENCES json (json_id)"]
- ],
- "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
- "schema_changed": 1426195822
- }
- }
- }
-
- db = Db.Db(schema, path)
-
- yield db
-
- db.close()
- if os.path.isfile(path):
- os.unlink(path)
-
- def testDbConnect(self, num_run=1):
- import sqlite3
- for i in range(num_run):
- with self.getTestDb() as db:
- db.checkTables()
- yield "."
- yield "(SQLite version: %s, API: %s)" % (sqlite3.sqlite_version, sqlite3.version)
-
- def testDbInsert(self, num_run=1):
- yield "x 1000 lines "
- for u in range(num_run):
- with self.getTestDb() as db:
- db.checkTables()
- data = {"test": []}
- for i in range(1000): # 1000 line of data
- data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
- json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
- db.updateJson("%s/test_%s.json" % (config.data_dir, u))
- os.unlink("%s/test_%s.json" % (config.data_dir, u))
- assert db.execute("SELECT COUNT(*) FROM test").fetchone()[0] == 1000
- yield "."
-
- def fillTestDb(self, db):
- db.checkTables()
- cur = db.getCursor()
- cur.logging = False
- for u in range(100, 200): # 100 user
- data = {"test": []}
- for i in range(100): # 1000 line of data
- data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
- json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
- db.updateJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
- os.unlink("%s/test_%s.json" % (config.data_dir, u))
- if u % 10 == 0:
- yield "."
-
- def testDbInsertMultiuser(self, num_run=1):
- yield "x 100 users x 100 lines "
- for u in range(num_run):
- with self.getTestDb() as db:
- for progress in self.fillTestDb(db):
- yield progress
- num_rows = db.execute("SELECT COUNT(*) FROM test").fetchone()[0]
- assert num_rows == 10000, "%s != 10000" % num_rows
-
- def testDbQueryIndexed(self, num_run=1):
- s = time.time()
- with self.getTestDb() as db:
- for progress in self.fillTestDb(db):
- pass
- yield " (Db warmup done in %.3fs) " % (time.time() - s)
- found_total = 0
- for i in range(num_run): # 1000x by test_id
- found = 0
- res = db.execute("SELECT * FROM test WHERE test_id = %s" % (i % 100))
- for row in res:
- found_total += 1
- found += 1
- del(res)
- yield "."
- assert found == 100, "%s != 100 (i: %s)" % (found, i)
- yield "Found: %s" % found_total
-
- def testDbQueryNotIndexed(self, num_run=1):
- s = time.time()
- with self.getTestDb() as db:
- for progress in self.fillTestDb(db):
- pass
- yield " (Db warmup done in %.3fs) " % (time.time() - s)
- found_total = 0
- for i in range(num_run): # 1000x by test_id
- found = 0
- res = db.execute("SELECT * FROM test WHERE json_id = %s" % i)
- for row in res:
- found_total += 1
- found += 1
- yield "."
- del(res)
- if i == 0 or i > 100:
- assert found == 0, "%s != 0 (i: %s)" % (found, i)
- else:
- assert found == 100, "%s != 100 (i: %s)" % (found, i)
- yield "Found: %s" % found_total
diff --git a/plugins/Benchmark/BenchmarkPack.py b/plugins/Benchmark/BenchmarkPack.py
deleted file mode 100644
index 6b92e43a..00000000
--- a/plugins/Benchmark/BenchmarkPack.py
+++ /dev/null
@@ -1,183 +0,0 @@
-import os
-import io
-from collections import OrderedDict
-
-from Plugin import PluginManager
-from Config import config
-from util import Msgpack
-
-
-@PluginManager.registerTo("Actions")
-class ActionsPlugin:
- def createZipFile(self, path):
- import zipfile
- test_data = b"Test" * 1024
- file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
- with zipfile.ZipFile(path, 'w') as archive:
- for y in range(100):
- zip_info = zipfile.ZipInfo(file_name % y, (1980, 1, 1, 0, 0, 0))
- zip_info.compress_type = zipfile.ZIP_DEFLATED
- zip_info.create_system = 3
- zip_info.flag_bits = 0
- zip_info.external_attr = 25165824
- archive.writestr(zip_info, test_data)
-
- def testPackZip(self, num_run=1):
- """
- Test zip file creating
- """
- yield "x 100 x 5KB "
- from Crypt import CryptHash
- zip_path = '%s/test.zip' % config.data_dir
- for i in range(num_run):
- self.createZipFile(zip_path)
- yield "."
-
- archive_size = os.path.getsize(zip_path) / 1024
- yield "(Generated file size: %.2fkB)" % archive_size
-
- hash = CryptHash.sha512sum(open(zip_path, "rb"))
- valid = "cb32fb43783a1c06a2170a6bc5bb228a032b67ff7a1fd7a5efb9b467b400f553"
- assert hash == valid, "Invalid hash: %s != %s " % (hash, valid)
- os.unlink(zip_path)
-
- def testUnpackZip(self, num_run=1):
- """
- Test zip file reading
- """
- yield "x 100 x 5KB "
- import zipfile
- zip_path = '%s/test.zip' % config.data_dir
- test_data = b"Test" * 1024
- file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91".decode("utf8")
-
- self.createZipFile(zip_path)
- for i in range(num_run):
- with zipfile.ZipFile(zip_path) as archive:
- for f in archive.filelist:
- assert f.filename.startswith(file_name), "Invalid filename: %s != %s" % (f.filename, file_name)
- data = archive.open(f.filename).read()
- assert archive.open(f.filename).read() == test_data, "Invalid data: %s..." % data[0:30]
- yield "."
-
- os.unlink(zip_path)
-
- def createArchiveFile(self, path, archive_type="gz"):
- import tarfile
- import gzip
-
- # Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime
- def nodate_write_gzip_header(self):
- self._write_mtime = 0
- original_write_gzip_header(self)
-
- test_data_io = io.BytesIO(b"Test" * 1024)
- file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
-
- original_write_gzip_header = gzip.GzipFile._write_gzip_header
- gzip.GzipFile._write_gzip_header = nodate_write_gzip_header
- with tarfile.open(path, 'w:%s' % archive_type) as archive:
- for y in range(100):
- test_data_io.seek(0)
- tar_info = tarfile.TarInfo(file_name % y)
- tar_info.size = 4 * 1024
- archive.addfile(tar_info, test_data_io)
-
- def testPackArchive(self, num_run=1, archive_type="gz"):
- """
- Test creating tar archive files
- """
- yield "x 100 x 5KB "
- from Crypt import CryptHash
-
- hash_valid_db = {
- "gz": "92caec5121a31709cbbc8c11b0939758e670b055bbbe84f9beb3e781dfde710f",
- "bz2": "b613f41e6ee947c8b9b589d3e8fa66f3e28f63be23f4faf015e2f01b5c0b032d",
- "xz": "ae43892581d770959c8d993daffab25fd74490b7cf9fafc7aaee746f69895bcb",
- }
- archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type)
- for i in range(num_run):
- self.createArchiveFile(archive_path, archive_type=archive_type)
- yield "."
-
- archive_size = os.path.getsize(archive_path) / 1024
- yield "(Generated file size: %.2fkB)" % archive_size
-
- hash = CryptHash.sha512sum(open("%s/test.tar.%s" % (config.data_dir, archive_type), "rb"))
- valid = hash_valid_db[archive_type]
- assert hash == valid, "Invalid hash: %s != %s " % (hash, valid)
-
- if os.path.isfile(archive_path):
- os.unlink(archive_path)
-
- def testUnpackArchive(self, num_run=1, archive_type="gz"):
- """
- Test reading tar archive files
- """
- yield "x 100 x 5KB "
- import tarfile
-
- test_data = b"Test" * 1024
- file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8")
- archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type)
- self.createArchiveFile(archive_path, archive_type=archive_type)
- for i in range(num_run):
- with tarfile.open(archive_path, 'r:%s' % archive_type) as archive:
- for y in range(100):
- assert archive.extractfile(file_name % y).read() == test_data
- yield "."
- if os.path.isfile(archive_path):
- os.unlink(archive_path)
-
- def testPackMsgpack(self, num_run=1):
- """
- Test msgpack encoding
- """
- yield "x 100 x 5KB "
- binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
- data = OrderedDict(
- sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
- )
- data_packed_valid = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
- data_packed_valid += b'hello' * 1024
- for y in range(num_run):
- for i in range(100):
- data_packed = Msgpack.pack(data)
- yield "."
- assert data_packed == data_packed_valid, "%s != %s" % (repr(data_packed), repr(data_packed_valid))
-
- def testUnpackMsgpack(self, num_run=1):
- """
- Test msgpack decoding
- """
- yield "x 5KB "
- binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
- data = OrderedDict(
- sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
- )
- data_packed = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
- data_packed += b'hello' * 1024
- for y in range(num_run):
- data_unpacked = Msgpack.unpack(data_packed, decode=False)
- yield "."
- assert data_unpacked == data, "%s != %s" % (data_unpacked, data)
-
- def testUnpackMsgpackStreaming(self, num_run=1, fallback=False):
- """
- Test streaming msgpack decoding
- """
- yield "x 1000 x 5KB "
- binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
- data = OrderedDict(
- sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
- )
- data_packed = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00'
- data_packed += b'hello' * 1024
- for i in range(num_run):
- unpacker = Msgpack.getUnpacker(decode=False, fallback=fallback)
- for y in range(1000):
- unpacker.feed(data_packed)
- for data_unpacked in unpacker:
- pass
- yield "."
- assert data == data_unpacked, "%s != %s" % (data_unpacked, data)
diff --git a/plugins/Benchmark/BenchmarkPlugin.py b/plugins/Benchmark/BenchmarkPlugin.py
deleted file mode 100644
index fd6cacf3..00000000
--- a/plugins/Benchmark/BenchmarkPlugin.py
+++ /dev/null
@@ -1,428 +0,0 @@
-import os
-import time
-import io
-import math
-import hashlib
-import re
-import sys
-
-from Config import config
-from Crypt import CryptHash
-from Plugin import PluginManager
-from Debug import Debug
-from util import helper
-
-plugin_dir = os.path.dirname(__file__)
-
-benchmark_key = None
-
-
-@PluginManager.registerTo("UiRequest")
-class UiRequestPlugin(object):
- @helper.encodeResponse
- def actionBenchmark(self):
- global benchmark_key
- script_nonce = self.getScriptNonce()
- if not benchmark_key:
- benchmark_key = CryptHash.random(encoding="base64")
- self.sendHeader(script_nonce=script_nonce)
-
- if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
- yield "This function is disabled on this proxy"
- return
-
- data = self.render(
- plugin_dir + "/media/benchmark.html",
- script_nonce=script_nonce,
- benchmark_key=benchmark_key,
- filter=re.sub("[^A-Za-z0-9]", "", self.get.get("filter", ""))
- )
- yield data
-
- @helper.encodeResponse
- def actionBenchmarkResult(self):
- global benchmark_key
- if self.get.get("benchmark_key", "") != benchmark_key:
- return self.error403("Invalid benchmark key")
-
- self.sendHeader(content_type="text/plain", noscript=True)
-
- if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
- yield "This function is disabled on this proxy"
- return
-
- yield " " * 1024 # Head (required for streaming)
-
- import main
- s = time.time()
-
- for part in main.actions.testBenchmark(filter=self.get.get("filter", "")):
- yield part
-
- yield "\n - Total time: %.3fs" % (time.time() - s)
-
-
-@PluginManager.registerTo("Actions")
-class ActionsPlugin:
- def getMultiplerTitle(self, multipler):
- if multipler < 0.3:
- multipler_title = "Sloooow"
- elif multipler < 0.6:
- multipler_title = "Ehh"
- elif multipler < 0.8:
- multipler_title = "Goodish"
- elif multipler < 1.2:
- multipler_title = "OK"
- elif multipler < 1.7:
- multipler_title = "Fine"
- elif multipler < 2.5:
- multipler_title = "Fast"
- elif multipler < 3.5:
- multipler_title = "WOW"
- else:
- multipler_title = "Insane!!"
- return multipler_title
-
- def formatResult(self, taken, standard):
- if not standard:
- return " Done in %.3fs" % taken
-
- if taken > 0:
- multipler = standard / taken
- else:
- multipler = 99
- multipler_title = self.getMultiplerTitle(multipler)
-
- return " Done in %.3fs = %s (%.2fx)" % (taken, multipler_title, multipler)
-
- def getBenchmarkTests(self, online=False):
- if hasattr(super(), "getBenchmarkTests"):
- tests = super().getBenchmarkTests(online)
- else:
- tests = []
-
- tests.extend([
- {"func": self.testHdPrivatekey, "num": 50, "time_standard": 0.57},
- {"func": self.testSign, "num": 20, "time_standard": 0.46},
- {"func": self.testVerify, "kwargs": {"lib_verify": "sslcrypto_fallback"}, "num": 20, "time_standard": 0.38},
- {"func": self.testVerify, "kwargs": {"lib_verify": "sslcrypto"}, "num": 200, "time_standard": 0.30},
- {"func": self.testVerify, "kwargs": {"lib_verify": "libsecp256k1"}, "num": 200, "time_standard": 0.10},
-
- {"func": self.testPackMsgpack, "num": 100, "time_standard": 0.35},
- {"func": self.testUnpackMsgpackStreaming, "kwargs": {"fallback": False}, "num": 100, "time_standard": 0.35},
- {"func": self.testUnpackMsgpackStreaming, "kwargs": {"fallback": True}, "num": 10, "time_standard": 0.5},
-
- {"func": self.testPackZip, "num": 5, "time_standard": 0.065},
- {"func": self.testPackArchive, "kwargs": {"archive_type": "gz"}, "num": 5, "time_standard": 0.08},
- {"func": self.testPackArchive, "kwargs": {"archive_type": "bz2"}, "num": 5, "time_standard": 0.68},
- {"func": self.testPackArchive, "kwargs": {"archive_type": "xz"}, "num": 5, "time_standard": 0.47},
- {"func": self.testUnpackZip, "num": 20, "time_standard": 0.25},
- {"func": self.testUnpackArchive, "kwargs": {"archive_type": "gz"}, "num": 20, "time_standard": 0.28},
- {"func": self.testUnpackArchive, "kwargs": {"archive_type": "bz2"}, "num": 20, "time_standard": 0.83},
- {"func": self.testUnpackArchive, "kwargs": {"archive_type": "xz"}, "num": 20, "time_standard": 0.38},
-
- {"func": self.testCryptHash, "kwargs": {"hash_type": "sha256"}, "num": 10, "time_standard": 0.50},
- {"func": self.testCryptHash, "kwargs": {"hash_type": "sha512"}, "num": 10, "time_standard": 0.33},
- {"func": self.testCryptHashlib, "kwargs": {"hash_type": "sha3_256"}, "num": 10, "time_standard": 0.33},
- {"func": self.testCryptHashlib, "kwargs": {"hash_type": "sha3_512"}, "num": 10, "time_standard": 0.65},
-
- {"func": self.testRandom, "num": 100, "time_standard": 0.08},
- ])
-
- if online:
- tests += [
- {"func": self.testHttps, "num": 1, "time_standard": 2.1}
- ]
- return tests
-
- def testBenchmark(self, num_multipler=1, online=False, num_run=None, filter=None):
- """
- Run benchmark on client functions
- """
- tests = self.getBenchmarkTests(online=online)
-
- if filter:
- tests = [test for test in tests[:] if filter.lower() in test["func"].__name__.lower()]
-
- yield "\n"
- res = {}
- res_time_taken = {}
- multiplers = []
- for test in tests:
- s = time.time()
- if num_run:
- num_run_test = num_run
- else:
- num_run_test = math.ceil(test["num"] * num_multipler)
- func = test["func"]
- func_name = func.__name__
- kwargs = test.get("kwargs", {})
- key = "%s %s" % (func_name, kwargs)
- if kwargs:
- yield "* Running %s (%s) x %s " % (func_name, kwargs, num_run_test)
- else:
- yield "* Running %s x %s " % (func_name, num_run_test)
- i = 0
- try:
- for progress in func(num_run_test, **kwargs):
- i += 1
- if num_run_test > 10:
- should_print = i % (num_run_test / 10) == 0 or progress != "."
- else:
- should_print = True
-
- if should_print:
- if num_run_test == 1 and progress == ".":
- progress = "..."
- yield progress
- time_taken = time.time() - s
- if num_run:
- time_standard = 0
- else:
- time_standard = test["time_standard"] * num_multipler
- yield self.formatResult(time_taken, time_standard)
- yield "\n"
- res[key] = "ok"
- res_time_taken[key] = time_taken
- multiplers.append(time_standard / max(time_taken, 0.001))
- except Exception as err:
- res[key] = err
- yield "Failed!\n! Error: %s\n\n" % Debug.formatException(err)
-
- yield "\n== Result ==\n"
-
- # Check verification speed
- if "testVerify {'lib_verify': 'sslcrypto'}" in res_time_taken:
- speed_order = ["sslcrypto_fallback", "sslcrypto", "libsecp256k1"]
- time_taken = {}
- for lib_verify in speed_order:
- time_taken[lib_verify] = res_time_taken["testVerify {'lib_verify': '%s'}" % lib_verify]
-
- time_taken["sslcrypto_fallback"] *= 10 # fallback benchmark only run 20 times instead of 200
- speedup_sslcrypto = time_taken["sslcrypto_fallback"] / time_taken["sslcrypto"]
- speedup_libsecp256k1 = time_taken["sslcrypto_fallback"] / time_taken["libsecp256k1"]
-
- yield "\n* Verification speedup:\n"
- yield " - OpenSSL: %.1fx (reference: 7.0x)\n" % speedup_sslcrypto
- yield " - libsecp256k1: %.1fx (reference: 23.8x)\n" % speedup_libsecp256k1
-
- if speedup_sslcrypto < 2:
- res["Verification speed"] = "error: OpenSSL speedup low: %.1fx" % speedup_sslcrypto
-
- if speedup_libsecp256k1 < speedup_sslcrypto:
- res["Verification speed"] = "error: libsecp256k1 speedup low: %.1fx" % speedup_libsecp256k1
-
- if not res:
- yield "! No tests found"
- if config.action == "test":
- sys.exit(1)
- else:
- num_failed = len([res_key for res_key, res_val in res.items() if res_val != "ok"])
- num_success = len([res_key for res_key, res_val in res.items() if res_val == "ok"])
- yield "\n* Tests:\n"
- yield " - Total: %s tests\n" % len(res)
- yield " - Success: %s tests\n" % num_success
- yield " - Failed: %s tests\n" % num_failed
- if any(multiplers):
- multipler_avg = sum(multiplers) / len(multiplers)
- multipler_title = self.getMultiplerTitle(multipler_avg)
- yield " - Average speed factor: %.2fx (%s)\n" % (multipler_avg, multipler_title)
-
- # Display errors
- for res_key, res_val in res.items():
- if res_val != "ok":
- yield " ! %s %s\n" % (res_key, res_val)
-
- if num_failed != 0 and config.action == "test":
- sys.exit(1)
-
- def testHttps(self, num_run=1):
- """
- Test https connection with valid and invalid certs
- """
- import urllib.request
- import urllib.error
-
- body = urllib.request.urlopen("https://google.com").read()
- assert len(body) > 100
- yield "."
-
- badssl_urls = [
- "https://expired.badssl.com/",
- "https://wrong.host.badssl.com/",
- "https://self-signed.badssl.com/",
- "https://untrusted-root.badssl.com/"
- ]
- for badssl_url in badssl_urls:
- try:
- body = urllib.request.urlopen(badssl_url).read()
- https_err = None
- except urllib.error.URLError as err:
- https_err = err
- assert https_err
- yield "."
-
- def testCryptHash(self, num_run=1, hash_type="sha256"):
- """
- Test hashing functions
- """
- yield "(5MB) "
-
- from Crypt import CryptHash
-
- hash_types = {
- "sha256": {"func": CryptHash.sha256sum, "hash_valid": "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa"},
- "sha512": {"func": CryptHash.sha512sum, "hash_valid": "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d"}
- }
- hash_func = hash_types[hash_type]["func"]
- hash_valid = hash_types[hash_type]["hash_valid"]
-
- data = io.BytesIO(b"Hello" * 1024 * 1024) # 5MB
- for i in range(num_run):
- data.seek(0)
- hash = hash_func(data)
- yield "."
- assert hash == hash_valid, "%s != %s" % (hash, hash_valid)
-
- def testCryptHashlib(self, num_run=1, hash_type="sha3_256"):
- """
- Test SHA3 hashing functions
- """
- yield "x 5MB "
-
- hash_types = {
- "sha3_256": {"func": hashlib.sha3_256, "hash_valid": "c8aeb3ef9fe5d6404871c0d2a4410a4d4e23268e06735648c9596f436c495f7e"},
- "sha3_512": {"func": hashlib.sha3_512, "hash_valid": "b75dba9472d8af3cc945ce49073f3f8214d7ac12086c0453fb08944823dee1ae83b3ffbc87a53a57cc454521d6a26fe73ff0f3be38dddf3f7de5d7692ebc7f95"},
- }
-
- hash_func = hash_types[hash_type]["func"]
- hash_valid = hash_types[hash_type]["hash_valid"]
-
- data = io.BytesIO(b"Hello" * 1024 * 1024) # 5MB
- for i in range(num_run):
- data.seek(0)
- h = hash_func()
- while 1:
- buff = data.read(1024 * 64)
- if not buff:
- break
- h.update(buff)
- hash = h.hexdigest()
- yield "."
- assert hash == hash_valid, "%s != %s" % (hash, hash_valid)
-
- def testRandom(self, num_run=1):
- """
- Test generating random data
- """
- yield "x 1000 x 256 bytes "
- for i in range(num_run):
- data_last = None
- for y in range(1000):
- data = os.urandom(256)
- assert data != data_last
- assert len(data) == 256
- data_last = data
- yield "."
-
- def testHdPrivatekey(self, num_run=2):
- """
- Test generating deterministic private keys from a master seed
- """
- from Crypt import CryptBitcoin
- seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"
- privatekeys = []
- for i in range(num_run):
- privatekeys.append(CryptBitcoin.hdPrivatekey(seed, i * 10))
- yield "."
- valid = "5JSbeF5PevdrsYjunqpg7kAGbnCVYa1T4APSL3QRu8EoAmXRc7Y"
- assert privatekeys[0] == valid, "%s != %s" % (privatekeys[0], valid)
- if len(privatekeys) > 1:
- assert privatekeys[0] != privatekeys[-1]
-
- def testSign(self, num_run=1):
- """
- Test signing data using a private key
- """
- from Crypt import CryptBitcoin
- data = "Hello" * 1024
- privatekey = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
- for i in range(num_run):
- yield "."
- sign = CryptBitcoin.sign(data, privatekey)
- valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
- assert sign == valid, "%s != %s" % (sign, valid)
-
- def testVerify(self, num_run=1, lib_verify="sslcrypto"):
- """
- Test verification of generated signatures
- """
- from Crypt import CryptBitcoin
- CryptBitcoin.loadLib(lib_verify, silent=True)
-
-
- data = "Hello" * 1024
- privatekey = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
- address = CryptBitcoin.privatekeyToAddress(privatekey)
- sign = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
-
- for i in range(num_run):
- ok = CryptBitcoin.verify(data, address, sign, lib_verify=lib_verify)
- yield "."
- assert ok, "does not verify from %s" % address
-
- if lib_verify == "sslcrypto":
- yield("(%s)" % CryptBitcoin.sslcrypto.ecc.get_backend())
-
- def testPortCheckers(self):
- """
- Test all active open port checker
- """
- from Peer import PeerPortchecker
- for ip_type, func_names in PeerPortchecker.PeerPortchecker.checker_functions.items():
- yield "\n- %s:" % ip_type
- for func_name in func_names:
- yield "\n - Tracker %s: " % func_name
- try:
- for res in self.testPortChecker(func_name):
- yield res
- except Exception as err:
- yield Debug.formatException(err)
-
- def testPortChecker(self, func_name):
- """
- Test single open port checker
- """
- from Peer import PeerPortchecker
- peer_portchecker = PeerPortchecker.PeerPortchecker(None)
- announce_func = getattr(peer_portchecker, func_name)
- res = announce_func(3894)
- yield res
-
- def testAll(self):
- """
- Run all tests to check system compatibility with ZeroNet functions
- """
- for progress in self.testBenchmark(online=not config.offline, num_run=1):
- yield progress
-
-
-@PluginManager.registerTo("ConfigPlugin")
-class ConfigPlugin(object):
- def createArguments(self):
- back = super(ConfigPlugin, self).createArguments()
- if self.getCmdlineValue("test") == "benchmark":
- self.test_parser.add_argument(
- '--num_multipler', help='Benchmark run time multipler',
- default=1.0, type=float, metavar='num'
- )
- self.test_parser.add_argument(
- '--filter', help='Filter running benchmark',
- default=None, metavar='test name'
- )
- elif self.getCmdlineValue("test") == "portChecker":
- self.test_parser.add_argument(
- '--func_name', help='Name of open port checker function',
- default=None, metavar='func_name'
- )
- return back
diff --git a/plugins/Benchmark/__init__.py b/plugins/Benchmark/__init__.py
deleted file mode 100644
index 76a5ae9c..00000000
--- a/plugins/Benchmark/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from . import BenchmarkPlugin
-from . import BenchmarkDb
-from . import BenchmarkPack
diff --git a/plugins/Benchmark/media/benchmark.html b/plugins/Benchmark/media/benchmark.html
deleted file mode 100644
index 73571367..00000000
--- a/plugins/Benchmark/media/benchmark.html
+++ /dev/null
@@ -1,123 +0,0 @@
-
-
-
-
-
-Benchmark
-
-
-
-
-
\ No newline at end of file
diff --git a/plugins/Benchmark/plugin_info.json b/plugins/Benchmark/plugin_info.json
deleted file mode 100644
index f3f57417..00000000
--- a/plugins/Benchmark/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "Benchmark",
- "description": "Test and benchmark database and cryptographic functions related to ZeroNet.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/Bigfile/BigfilePiecefield.py b/plugins/Bigfile/BigfilePiecefield.py
deleted file mode 100644
index 9a6f370b..00000000
--- a/plugins/Bigfile/BigfilePiecefield.py
+++ /dev/null
@@ -1,170 +0,0 @@
-import array
-
-
-def packPiecefield(data):
- if not isinstance(data, bytes) and not isinstance(data, bytearray):
- raise Exception("Invalid data type: %s" % type(data))
-
- res = []
- if not data:
- return array.array("H", b"")
-
- if data[0] == b"\x00":
- res.append(0)
- find = b"\x01"
- else:
- find = b"\x00"
- last_pos = 0
- pos = 0
- while 1:
- pos = data.find(find, pos)
- if find == b"\x00":
- find = b"\x01"
- else:
- find = b"\x00"
- if pos == -1:
- res.append(len(data) - last_pos)
- break
- res.append(pos - last_pos)
- last_pos = pos
- return array.array("H", res)
-
-
-def unpackPiecefield(data):
- if not data:
- return b""
-
- res = []
- char = b"\x01"
- for times in data:
- if times > 10000:
- return b""
- res.append(char * times)
- if char == b"\x01":
- char = b"\x00"
- else:
- char = b"\x01"
- return b"".join(res)
-
-
-def spliceBit(data, idx, bit):
- if bit != b"\x00" and bit != b"\x01":
- raise Exception("Invalid bit: %s" % bit)
-
- if len(data) < idx:
- data = data.ljust(idx + 1, b"\x00")
- return data[:idx] + bit + data[idx+ 1:]
-
-class Piecefield(object):
- def tostring(self):
- return "".join(["1" if b else "0" for b in self.tobytes()])
-
-
-class BigfilePiecefield(Piecefield):
- __slots__ = ["data"]
-
- def __init__(self):
- self.data = b""
-
- def frombytes(self, s):
- if not isinstance(s, bytes) and not isinstance(s, bytearray):
- raise Exception("Invalid type: %s" % type(s))
- self.data = s
-
- def tobytes(self):
- return self.data
-
- def pack(self):
- return packPiecefield(self.data).tobytes()
-
- def unpack(self, s):
- self.data = unpackPiecefield(array.array("H", s))
-
- def __getitem__(self, key):
- try:
- return self.data[key]
- except IndexError:
- return False
-
- def __setitem__(self, key, value):
- self.data = spliceBit(self.data, key, value)
-
-class BigfilePiecefieldPacked(Piecefield):
- __slots__ = ["data"]
-
- def __init__(self):
- self.data = b""
-
- def frombytes(self, data):
- if not isinstance(data, bytes) and not isinstance(data, bytearray):
- raise Exception("Invalid type: %s" % type(data))
- self.data = packPiecefield(data).tobytes()
-
- def tobytes(self):
- return unpackPiecefield(array.array("H", self.data))
-
- def pack(self):
- return array.array("H", self.data).tobytes()
-
- def unpack(self, data):
- self.data = data
-
- def __getitem__(self, key):
- try:
- return self.tobytes()[key]
- except IndexError:
- return False
-
- def __setitem__(self, key, value):
- data = spliceBit(self.tobytes(), key, value)
- self.frombytes(data)
-
-
-if __name__ == "__main__":
- import os
- import psutil
- import time
- testdata = b"\x01" * 100 + b"\x00" * 900 + b"\x01" * 4000 + b"\x00" * 4999 + b"\x01"
- meminfo = psutil.Process(os.getpid()).memory_info
-
- for storage in [BigfilePiecefieldPacked, BigfilePiecefield]:
- print("-- Testing storage: %s --" % storage)
- m = meminfo()[0]
- s = time.time()
- piecefields = {}
- for i in range(10000):
- piecefield = storage()
- piecefield.frombytes(testdata[:i] + b"\x00" + testdata[i + 1:])
- piecefields[i] = piecefield
-
- print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
-
- m = meminfo()[0]
- s = time.time()
- for piecefield in list(piecefields.values()):
- val = piecefield[1000]
-
- print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
-
- m = meminfo()[0]
- s = time.time()
- for piecefield in list(piecefields.values()):
- piecefield[1000] = b"\x01"
-
- print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
-
- m = meminfo()[0]
- s = time.time()
- for piecefield in list(piecefields.values()):
- packed = piecefield.pack()
-
- print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)))
-
- m = meminfo()[0]
- s = time.time()
- for piecefield in list(piecefields.values()):
- piecefield.unpack(packed)
-
- print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
-
- piecefields = {}
diff --git a/plugins/Bigfile/BigfilePlugin.py b/plugins/Bigfile/BigfilePlugin.py
deleted file mode 100644
index 78a27b05..00000000
--- a/plugins/Bigfile/BigfilePlugin.py
+++ /dev/null
@@ -1,843 +0,0 @@
-import time
-import os
-import subprocess
-import shutil
-import collections
-import math
-import warnings
-import base64
-import binascii
-import json
-
-import gevent
-import gevent.lock
-
-from Plugin import PluginManager
-from Debug import Debug
-from Crypt import CryptHash
-with warnings.catch_warnings():
- warnings.filterwarnings("ignore") # Ignore missing sha3 warning
- import merkletools
-
-from util import helper
-from util import Msgpack
-from util.Flag import flag
-import util
-from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
-
-
-# We can only import plugin host clases after the plugins are loaded
-@PluginManager.afterLoad
-def importPluginnedClasses():
- global VerifyError, config
- from Content.ContentManager import VerifyError
- from Config import config
-
-
-if "upload_nonces" not in locals():
- upload_nonces = {}
-
-
-@PluginManager.registerTo("UiRequest")
-class UiRequestPlugin(object):
- def isCorsAllowed(self, path):
- if path == "/ZeroNet-Internal/BigfileUpload":
- return True
- else:
- return super(UiRequestPlugin, self).isCorsAllowed(path)
-
- @helper.encodeResponse
- def actionBigfileUpload(self):
- nonce = self.get.get("upload_nonce")
- if nonce not in upload_nonces:
- return self.error403("Upload nonce error.")
-
- upload_info = upload_nonces[nonce]
- del upload_nonces[nonce]
-
- self.sendHeader(200, "text/html", noscript=True, extra_headers={
- "Access-Control-Allow-Origin": "null",
- "Access-Control-Allow-Credentials": "true"
- })
-
- self.readMultipartHeaders(self.env['wsgi.input']) # Skip http headers
- result = self.handleBigfileUpload(upload_info, self.env['wsgi.input'].read)
- return json.dumps(result)
-
- def actionBigfileUploadWebsocket(self):
- ws = self.env.get("wsgi.websocket")
-
- if not ws:
- self.start_response("400 Bad Request", [])
- return [b"Not a websocket request!"]
-
- nonce = self.get.get("upload_nonce")
- if nonce not in upload_nonces:
- return self.error403("Upload nonce error.")
-
- upload_info = upload_nonces[nonce]
- del upload_nonces[nonce]
-
- ws.send("poll")
-
- buffer = b""
- def read(size):
- nonlocal buffer
- while len(buffer) < size:
- buffer += ws.receive()
- ws.send("poll")
- part, buffer = buffer[:size], buffer[size:]
- return part
-
- result = self.handleBigfileUpload(upload_info, read)
- ws.send(json.dumps(result))
-
- def handleBigfileUpload(self, upload_info, read):
- site = upload_info["site"]
- inner_path = upload_info["inner_path"]
-
- with site.storage.open(inner_path, "wb", create_dirs=True) as out_file:
- merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile(
- read, upload_info["size"], upload_info["piece_size"], out_file
- )
-
- if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
- hash = binascii.hexlify(piecemap_info["sha512_pieces"][0])
- hash_id = site.content_manager.hashfield.getHashId(hash)
- site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
-
- else: # Big file
- file_name = helper.getFilename(inner_path)
- site.storage.open(upload_info["piecemap"], "wb").write(Msgpack.pack({file_name: piecemap_info}))
-
- # Find piecemap and file relative path to content.json
- file_info = site.content_manager.getFileInfo(inner_path, new_file=True)
- content_inner_path_dir = helper.getDirname(file_info["content_inner_path"])
- piecemap_relative_path = upload_info["piecemap"][len(content_inner_path_dir):]
- file_relative_path = inner_path[len(content_inner_path_dir):]
-
- # Add file to content.json
- if site.storage.isFile(file_info["content_inner_path"]):
- content = site.storage.loadJson(file_info["content_inner_path"])
- else:
- content = {}
- if "files_optional" not in content:
- content["files_optional"] = {}
-
- content["files_optional"][file_relative_path] = {
- "sha512": merkle_root,
- "size": upload_info["size"],
- "piecemap": piecemap_relative_path,
- "piece_size": piece_size
- }
-
- merkle_root_hash_id = site.content_manager.hashfield.getHashId(merkle_root)
- site.content_manager.optionalDownloaded(inner_path, merkle_root_hash_id, upload_info["size"], own=True)
- site.storage.writeJson(file_info["content_inner_path"], content)
-
- site.content_manager.contents.loadItem(file_info["content_inner_path"]) # reload cache
-
- return {
- "merkle_root": merkle_root,
- "piece_num": len(piecemap_info["sha512_pieces"]),
- "piece_size": piece_size,
- "inner_path": inner_path
- }
-
- def readMultipartHeaders(self, wsgi_input):
- found = False
- for i in range(100):
- line = wsgi_input.readline()
- if line == b"\r\n":
- found = True
- break
- if not found:
- raise Exception("No multipart header found")
- return i
-
- def actionFile(self, file_path, *args, **kwargs):
- if kwargs.get("file_size", 0) > 1024 * 1024 and kwargs.get("path_parts"): # Only check files larger than 1MB
- path_parts = kwargs["path_parts"]
- site = self.server.site_manager.get(path_parts["address"])
- big_file = site.storage.openBigfile(path_parts["inner_path"], prebuffer=2 * 1024 * 1024)
- if big_file:
- kwargs["file_obj"] = big_file
- kwargs["file_size"] = big_file.size
-
- return super(UiRequestPlugin, self).actionFile(file_path, *args, **kwargs)
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- def actionBigfileUploadInit(self, to, inner_path, size, protocol="xhr"):
- valid_signers = self.site.content_manager.getValidSigners(inner_path)
- auth_address = self.user.getAuthAddress(self.site.address)
- if not self.site.settings["own"] and auth_address not in valid_signers:
- self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers))
- return self.response(to, {"error": "Forbidden, you can only modify your own files"})
-
- nonce = CryptHash.random()
- piece_size = 1024 * 1024
- inner_path = self.site.content_manager.sanitizePath(inner_path)
- file_info = self.site.content_manager.getFileInfo(inner_path, new_file=True)
-
- content_inner_path_dir = helper.getDirname(file_info["content_inner_path"])
- file_relative_path = inner_path[len(content_inner_path_dir):]
-
- upload_nonces[nonce] = {
- "added": time.time(),
- "site": self.site,
- "inner_path": inner_path,
- "websocket_client": self,
- "size": size,
- "piece_size": piece_size,
- "piecemap": inner_path + ".piecemap.msgpack"
- }
-
- if protocol == "xhr":
- return {
- "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
- "piece_size": piece_size,
- "inner_path": inner_path,
- "file_relative_path": file_relative_path
- }
- elif protocol == "websocket":
- server_url = self.request.getWsServerUrl()
- if server_url:
- proto, host = server_url.split("://")
- origin = proto.replace("http", "ws") + "://" + host
- else:
- origin = "{origin}"
- return {
- "url": origin + "/ZeroNet-Internal/BigfileUploadWebsocket?upload_nonce=" + nonce,
- "piece_size": piece_size,
- "inner_path": inner_path,
- "file_relative_path": file_relative_path
- }
- else:
- return {"error": "Unknown protocol"}
-
- @flag.no_multiuser
- def actionSiteSetAutodownloadBigfileLimit(self, to, limit):
- permissions = self.getPermissions(to)
- if "ADMIN" not in permissions:
- return self.response(to, "You don't have permission to run this command")
-
- self.site.settings["autodownload_bigfile_size_limit"] = int(limit)
- self.response(to, "ok")
-
- def actionFileDelete(self, to, inner_path):
- piecemap_inner_path = inner_path + ".piecemap.msgpack"
- if self.hasFilePermission(inner_path) and self.site.storage.isFile(piecemap_inner_path):
- # Also delete .piecemap.msgpack file if exists
- self.log.debug("Deleting piecemap: %s" % piecemap_inner_path)
- file_info = self.site.content_manager.getFileInfo(piecemap_inner_path)
- if file_info:
- content_json = self.site.storage.loadJson(file_info["content_inner_path"])
- relative_path = file_info["relative_path"]
- if relative_path in content_json.get("files_optional", {}):
- del content_json["files_optional"][relative_path]
- self.site.storage.writeJson(file_info["content_inner_path"], content_json)
- self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
- try:
- self.site.storage.delete(piecemap_inner_path)
- except Exception as err:
- self.log.error("File %s delete error: %s" % (piecemap_inner_path, err))
-
- return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path)
-
-
-@PluginManager.registerTo("ContentManager")
-class ContentManagerPlugin(object):
- def getFileInfo(self, inner_path, *args, **kwargs):
- if "|" not in inner_path:
- return super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs)
-
- inner_path, file_range = inner_path.split("|")
- pos_from, pos_to = map(int, file_range.split("-"))
- file_info = super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs)
- return file_info
-
- def readFile(self, read_func, size, buff_size=1024 * 64):
- part_num = 0
- recv_left = size
-
- while 1:
- part_num += 1
- read_size = min(buff_size, recv_left)
- part = read_func(read_size)
-
- if not part:
- break
- yield part
-
- if part_num % 100 == 0: # Avoid blocking ZeroNet execution during upload
- time.sleep(0.001)
-
- recv_left -= read_size
- if recv_left <= 0:
- break
-
- def hashBigfile(self, read_func, size, piece_size=1024 * 1024, file_out=None):
- self.site.settings["has_bigfile"] = True
-
- recv = 0
- try:
- piece_hash = CryptHash.sha512t()
- piece_hashes = []
- piece_recv = 0
-
- mt = merkletools.MerkleTools()
- mt.hash_function = CryptHash.sha512t
-
- part = ""
- for part in self.readFile(read_func, size):
- if file_out:
- file_out.write(part)
-
- recv += len(part)
- piece_recv += len(part)
- piece_hash.update(part)
- if piece_recv >= piece_size:
- piece_digest = piece_hash.digest()
- piece_hashes.append(piece_digest)
- mt.leaves.append(piece_digest)
- piece_hash = CryptHash.sha512t()
- piece_recv = 0
-
- if len(piece_hashes) % 100 == 0 or recv == size:
- self.log.info("- [HASHING:%.0f%%] Pieces: %s, %.1fMB/%.1fMB" % (
- float(recv) / size * 100, len(piece_hashes), recv / 1024 / 1024, size / 1024 / 1024
- ))
- part = ""
- if len(part) > 0:
- piece_digest = piece_hash.digest()
- piece_hashes.append(piece_digest)
- mt.leaves.append(piece_digest)
- except Exception as err:
- raise err
- finally:
- if file_out:
- file_out.close()
-
- mt.make_tree()
- merkle_root = mt.get_merkle_root()
- if type(merkle_root) is bytes: # Python <3.5
- merkle_root = merkle_root.decode()
- return merkle_root, piece_size, {
- "sha512_pieces": piece_hashes
- }
-
- def hashFile(self, dir_inner_path, file_relative_path, optional=False):
- inner_path = dir_inner_path + file_relative_path
-
- file_size = self.site.storage.getSize(inner_path)
- # Only care about optional files >1MB
- if not optional or file_size < 1 * 1024 * 1024:
- return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)
-
- back = {}
- content = self.contents.get(dir_inner_path + "content.json")
-
- hash = None
- piecemap_relative_path = None
- piece_size = None
-
- # Don't re-hash if it's already in content.json
- if content and file_relative_path in content.get("files_optional", {}):
- file_node = content["files_optional"][file_relative_path]
- if file_node["size"] == file_size:
- self.log.info("- [SAME SIZE] %s" % file_relative_path)
- hash = file_node.get("sha512")
- piecemap_relative_path = file_node.get("piecemap")
- piece_size = file_node.get("piece_size")
-
- if not hash or not piecemap_relative_path: # Not in content.json yet
- if file_size < 5 * 1024 * 1024: # Don't create piecemap automatically for files smaller than 5MB
- return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)
-
- self.log.info("- [HASHING] %s" % file_relative_path)
- merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb").read, file_size)
- if not hash:
- hash = merkle_root
-
- if not piecemap_relative_path:
- file_name = helper.getFilename(file_relative_path)
- piecemap_relative_path = file_relative_path + ".piecemap.msgpack"
- piecemap_inner_path = inner_path + ".piecemap.msgpack"
-
- self.site.storage.open(piecemap_inner_path, "wb").write(Msgpack.pack({file_name: piecemap_info}))
-
- back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True))
-
- piece_num = int(math.ceil(float(file_size) / piece_size))
-
- # Add the merkle root to hashfield
- hash_id = self.site.content_manager.hashfield.getHashId(hash)
- self.optionalDownloaded(inner_path, hash_id, file_size, own=True)
- self.site.storage.piecefields[hash].frombytes(b"\x01" * piece_num)
-
- back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size}
- return back
-
- def getPiecemap(self, inner_path):
- file_info = self.site.content_manager.getFileInfo(inner_path)
- piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
- self.site.needFile(piecemap_inner_path, priority=20)
- piecemap = Msgpack.unpack(self.site.storage.open(piecemap_inner_path, "rb").read())[helper.getFilename(inner_path)]
- piecemap["piece_size"] = file_info["piece_size"]
- return piecemap
-
- def verifyPiece(self, inner_path, pos, piece):
- try:
- piecemap = self.getPiecemap(inner_path)
- except Exception as err:
- raise VerifyError("Unable to download piecemap: %s" % Debug.formatException(err))
-
- piece_i = int(pos / piecemap["piece_size"])
- if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
- raise VerifyError("Invalid hash")
- return True
-
- def verifyFile(self, inner_path, file, ignore_same=True):
- if "|" not in inner_path:
- return super(ContentManagerPlugin, self).verifyFile(inner_path, file, ignore_same)
-
- inner_path, file_range = inner_path.split("|")
- pos_from, pos_to = map(int, file_range.split("-"))
-
- return self.verifyPiece(inner_path, pos_from, file)
-
- def optionalDownloaded(self, inner_path, hash_id, size=None, own=False):
- if "|" in inner_path:
- inner_path, file_range = inner_path.split("|")
- pos_from, pos_to = map(int, file_range.split("-"))
- file_info = self.getFileInfo(inner_path)
-
- # Mark piece downloaded
- piece_i = int(pos_from / file_info["piece_size"])
- self.site.storage.piecefields[file_info["sha512"]][piece_i] = b"\x01"
-
- # Only add to site size on first request
- if hash_id in self.hashfield:
- size = 0
- elif size > 1024 * 1024:
- file_info = self.getFileInfo(inner_path)
- if file_info and "sha512" in file_info: # We already have the file, but not in piecefield
- sha512 = file_info["sha512"]
- if sha512 not in self.site.storage.piecefields:
- self.site.storage.checkBigfile(inner_path)
-
- return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own)
-
- def optionalRemoved(self, inner_path, hash_id, size=None):
- if size and size > 1024 * 1024:
- file_info = self.getFileInfo(inner_path)
- sha512 = file_info["sha512"]
- if sha512 in self.site.storage.piecefields:
- del self.site.storage.piecefields[sha512]
-
- # Also remove other pieces of the file from download queue
- for key in list(self.site.bad_files.keys()):
- if key.startswith(inner_path + "|"):
- del self.site.bad_files[key]
- self.site.worker_manager.removeSolvedFileTasks()
- return super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size)
-
-
-@PluginManager.registerTo("SiteStorage")
-class SiteStoragePlugin(object):
- def __init__(self, *args, **kwargs):
- super(SiteStoragePlugin, self).__init__(*args, **kwargs)
- self.piecefields = collections.defaultdict(BigfilePiecefield)
- if "piecefields" in self.site.settings.get("cache", {}):
- for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
- if piecefield_packed:
- self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed))
- self.site.settings["cache"]["piecefields"] = {}
-
- def createSparseFile(self, inner_path, size, sha512=None):
- file_path = self.getPath(inner_path)
-
- self.ensureDir(os.path.dirname(inner_path))
-
- f = open(file_path, 'wb')
- f.truncate(min(1024 * 1024 * 5, size)) # Only pre-allocate up to 5MB
- f.close()
- if os.name == "nt":
- startupinfo = subprocess.STARTUPINFO()
- startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- subprocess.call(["fsutil", "sparse", "setflag", file_path], close_fds=True, startupinfo=startupinfo)
-
- if sha512 and sha512 in self.piecefields:
- self.log.debug("%s: File not exists, but has piecefield. Deleting piecefield." % inner_path)
- del self.piecefields[sha512]
-
- def write(self, inner_path, content):
- if "|" not in inner_path:
- return super(SiteStoragePlugin, self).write(inner_path, content)
-
- # Write to specific position by passing |{pos} after the filename
- inner_path, file_range = inner_path.split("|")
- pos_from, pos_to = map(int, file_range.split("-"))
- file_path = self.getPath(inner_path)
-
- # Create dir if not exist
- self.ensureDir(os.path.dirname(inner_path))
-
- if not os.path.isfile(file_path):
- file_info = self.site.content_manager.getFileInfo(inner_path)
- self.createSparseFile(inner_path, file_info["size"])
-
- # Write file
- with open(file_path, "rb+") as file:
- file.seek(pos_from)
- if hasattr(content, 'read'): # File-like object
- shutil.copyfileobj(content, file) # Write buff to disk
- else: # Simple string
- file.write(content)
- del content
- self.onUpdated(inner_path)
-
- def checkBigfile(self, inner_path):
- file_info = self.site.content_manager.getFileInfo(inner_path)
- if not file_info or (file_info and "piecemap" not in file_info): # It's not a big file
- return False
-
- self.site.settings["has_bigfile"] = True
- file_path = self.getPath(inner_path)
- sha512 = file_info["sha512"]
- piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"]))
- if os.path.isfile(file_path):
- if sha512 not in self.piecefields:
- if open(file_path, "rb").read(128) == b"\0" * 128:
- piece_data = b"\x00"
- else:
- piece_data = b"\x01"
- self.log.debug("%s: File exists, but not in piecefield. Filling piecefiled with %s * %s." % (inner_path, piece_num, piece_data))
- self.piecefields[sha512].frombytes(piece_data * piece_num)
- else:
- self.log.debug("Creating bigfile: %s" % inner_path)
- self.createSparseFile(inner_path, file_info["size"], sha512)
- self.piecefields[sha512].frombytes(b"\x00" * piece_num)
- self.log.debug("Created bigfile: %s" % inner_path)
- return True
-
- def openBigfile(self, inner_path, prebuffer=0):
- if not self.checkBigfile(inner_path):
- return False
- self.site.needFile(inner_path, blocking=False) # Download piecemap
- return BigFile(self.site, inner_path, prebuffer=prebuffer)
-
-
-class BigFile(object):
- def __init__(self, site, inner_path, prebuffer=0):
- self.site = site
- self.inner_path = inner_path
- file_path = site.storage.getPath(inner_path)
- file_info = self.site.content_manager.getFileInfo(inner_path)
- self.piece_size = file_info["piece_size"]
- self.sha512 = file_info["sha512"]
- self.size = file_info["size"]
- self.prebuffer = prebuffer
- self.read_bytes = 0
-
- self.piecefield = self.site.storage.piecefields[self.sha512]
- self.f = open(file_path, "rb+")
- self.read_lock = gevent.lock.Semaphore()
-
- def read(self, buff=64 * 1024):
- with self.read_lock:
- pos = self.f.tell()
- read_until = min(self.size, pos + buff)
- requests = []
- # Request all required blocks
- while 1:
- piece_i = int(pos / self.piece_size)
- if piece_i * self.piece_size >= read_until:
- break
- pos_from = piece_i * self.piece_size
- pos_to = pos_from + self.piece_size
- if not self.piecefield[piece_i]:
- requests.append(self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=10))
- pos += self.piece_size
-
- if not all(requests):
- return None
-
- # Request prebuffer
- if self.prebuffer:
- prebuffer_until = min(self.size, read_until + self.prebuffer)
- priority = 3
- while 1:
- piece_i = int(pos / self.piece_size)
- if piece_i * self.piece_size >= prebuffer_until:
- break
- pos_from = piece_i * self.piece_size
- pos_to = pos_from + self.piece_size
- if not self.piecefield[piece_i]:
- self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=max(0, priority))
- priority -= 1
- pos += self.piece_size
-
- gevent.joinall(requests)
- self.read_bytes += buff
-
- # Increase buffer for long reads
- if self.read_bytes > 7 * 1024 * 1024 and self.prebuffer < 5 * 1024 * 1024:
- self.site.log.debug("%s: Increasing bigfile buffer size to 5MB..." % self.inner_path)
- self.prebuffer = 5 * 1024 * 1024
-
- return self.f.read(buff)
-
- def seek(self, pos, whence=0):
- with self.read_lock:
- if whence == 2: # Relative from file end
- pos = self.size + pos # Use the real size instead of size on the disk
- whence = 0
- return self.f.seek(pos, whence)
-
- def seekable(self):
- return self.f.seekable()
-
- def tell(self):
- return self.f.tell()
-
- def close(self):
- self.f.close()
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.close()
-
-
-@PluginManager.registerTo("WorkerManager")
-class WorkerManagerPlugin(object):
- def addTask(self, inner_path, *args, **kwargs):
- file_info = kwargs.get("file_info")
- if file_info and "piecemap" in file_info: # Bigfile
- self.site.settings["has_bigfile"] = True
-
- piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
- piecemap_task = None
- if not self.site.storage.isFile(piecemap_inner_path):
- # Start download piecemap
- piecemap_task = super(WorkerManagerPlugin, self).addTask(piecemap_inner_path, priority=30)
- autodownload_bigfile_size_limit = self.site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit)
- if "|" not in inner_path and self.site.isDownloadable(inner_path) and file_info["size"] / 1024 / 1024 <= autodownload_bigfile_size_limit:
- gevent.spawn_later(0.1, self.site.needFile, inner_path + "|all") # Download all pieces
-
- if "|" in inner_path:
- # Start download piece
- task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs)
-
- inner_path, file_range = inner_path.split("|")
- pos_from, pos_to = map(int, file_range.split("-"))
- task["piece_i"] = int(pos_from / file_info["piece_size"])
- task["sha512"] = file_info["sha512"]
- else:
- if inner_path in self.site.bad_files:
- del self.site.bad_files[inner_path]
- if piecemap_task:
- task = piecemap_task
- else:
- fake_evt = gevent.event.AsyncResult() # Don't download anything if no range specified
- fake_evt.set(True)
- task = {"evt": fake_evt}
-
- if not self.site.storage.isFile(inner_path):
- self.site.storage.createSparseFile(inner_path, file_info["size"], file_info["sha512"])
- piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"]))
- self.site.storage.piecefields[file_info["sha512"]].frombytes(b"\x00" * piece_num)
- else:
- task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs)
- return task
-
- def taskAddPeer(self, task, peer):
- if "piece_i" in task:
- if not peer.piecefields[task["sha512"]][task["piece_i"]]:
- if task["sha512"] not in peer.piecefields:
- gevent.spawn(peer.updatePiecefields, force=True)
- elif not task["peers"]:
- gevent.spawn(peer.updatePiecefields)
-
- return False # Deny to add peers to task if file not in piecefield
- return super(WorkerManagerPlugin, self).taskAddPeer(task, peer)
-
-
-@PluginManager.registerTo("FileRequest")
-class FileRequestPlugin(object):
- def isReadable(self, site, inner_path, file, pos):
- # Peek into file
- if file.read(10) == b"\0" * 10:
- # Looks empty, but makes sures we don't have that piece
- file_info = site.content_manager.getFileInfo(inner_path)
- if "piece_size" in file_info:
- piece_i = int(pos / file_info["piece_size"])
- if not site.storage.piecefields[file_info["sha512"]][piece_i]:
- return False
- # Seek back to position we want to read
- file.seek(pos)
- return super(FileRequestPlugin, self).isReadable(site, inner_path, file, pos)
-
- def actionGetPiecefields(self, params):
- site = self.sites.get(params["site"])
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- return False
-
- # Add peer to site if not added before
- peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True)
- if not peer.connection: # Just added
- peer.connect(self.connection) # Assign current connection to peer
-
- piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()}
- self.response({"piecefields_packed": piecefields_packed})
-
- def actionSetPiecefields(self, params):
- site = self.sites.get(params["site"])
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- self.connection.badAction(5)
- return False
-
- # Add or get peer
- peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection)
- if not peer.connection:
- peer.connect(self.connection)
-
- peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
- for sha512, piecefield_packed in params["piecefields_packed"].items():
- peer.piecefields[sha512].unpack(piecefield_packed)
- site.settings["has_bigfile"] = True
-
- self.response({"ok": "Updated"})
-
-
-@PluginManager.registerTo("Peer")
-class PeerPlugin(object):
- def __getattr__(self, key):
- if key == "piecefields":
- self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
- return self.piecefields
- elif key == "time_piecefields_updated":
- self.time_piecefields_updated = None
- return self.time_piecefields_updated
- else:
- return super(PeerPlugin, self).__getattr__(key)
-
- @util.Noparallel(ignore_args=True)
- def updatePiecefields(self, force=False):
- if self.connection and self.connection.handshake.get("rev", 0) < 2190:
- return False # Not supported
-
- # Don't update piecefield again in 1 min
- if self.time_piecefields_updated and time.time() - self.time_piecefields_updated < 60 and not force:
- return False
-
- self.time_piecefields_updated = time.time()
- res = self.request("getPiecefields", {"site": self.site.address})
- if not res or "error" in res:
- return False
-
- self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
- try:
- for sha512, piecefield_packed in res["piecefields_packed"].items():
- self.piecefields[sha512].unpack(piecefield_packed)
- except Exception as err:
- self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
-
- return self.piecefields
-
- def sendMyHashfield(self, *args, **kwargs):
- return super(PeerPlugin, self).sendMyHashfield(*args, **kwargs)
-
- def updateHashfield(self, *args, **kwargs):
- if self.site.settings.get("has_bigfile"):
- thread = gevent.spawn(self.updatePiecefields, *args, **kwargs)
- back = super(PeerPlugin, self).updateHashfield(*args, **kwargs)
- thread.join()
- return back
- else:
- return super(PeerPlugin, self).updateHashfield(*args, **kwargs)
-
- def getFile(self, site, inner_path, *args, **kwargs):
- if "|" in inner_path:
- inner_path, file_range = inner_path.split("|")
- pos_from, pos_to = map(int, file_range.split("-"))
- kwargs["pos_from"] = pos_from
- kwargs["pos_to"] = pos_to
- return super(PeerPlugin, self).getFile(site, inner_path, *args, **kwargs)
-
-
-@PluginManager.registerTo("Site")
-class SitePlugin(object):
- def isFileDownloadAllowed(self, inner_path, file_info):
- if "piecemap" in file_info:
- file_size_mb = file_info["size"] / 1024 / 1024
- if config.bigfile_size_limit and file_size_mb > config.bigfile_size_limit:
- self.log.debug(
- "Bigfile size %s too large: %sMB > %sMB, skipping..." %
- (inner_path, file_size_mb, config.bigfile_size_limit)
- )
- return False
-
- file_info = file_info.copy()
- file_info["size"] = file_info["piece_size"]
- return super(SitePlugin, self).isFileDownloadAllowed(inner_path, file_info)
-
- def getSettingsCache(self):
- back = super(SitePlugin, self).getSettingsCache()
- if self.storage.piecefields:
- back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()}
- return back
-
- def needFile(self, inner_path, *args, **kwargs):
- if inner_path.endswith("|all"):
- @util.Pooled(20)
- def pooledNeedBigfile(inner_path, *args, **kwargs):
- if inner_path not in self.bad_files:
- self.log.debug("Cancelled piece, skipping %s" % inner_path)
- return False
- return self.needFile(inner_path, *args, **kwargs)
-
- inner_path = inner_path.replace("|all", "")
- file_info = self.needFileInfo(inner_path)
-
- # Use default function to download non-optional file
- if "piece_size" not in file_info:
- return super(SitePlugin, self).needFile(inner_path, *args, **kwargs)
-
- file_size = file_info["size"]
- piece_size = file_info["piece_size"]
-
- piece_num = int(math.ceil(float(file_size) / piece_size))
-
- file_threads = []
-
- piecefield = self.storage.piecefields.get(file_info["sha512"])
-
- for piece_i in range(piece_num):
- piece_from = piece_i * piece_size
- piece_to = min(file_size, piece_from + piece_size)
- if not piecefield or not piecefield[piece_i]:
- inner_path_piece = "%s|%s-%s" % (inner_path, piece_from, piece_to)
- self.bad_files[inner_path_piece] = self.bad_files.get(inner_path_piece, 1)
- res = pooledNeedBigfile(inner_path_piece, blocking=False)
- if res is not True and res is not False:
- file_threads.append(res)
- gevent.joinall(file_threads)
- else:
- return super(SitePlugin, self).needFile(inner_path, *args, **kwargs)
-
-
-@PluginManager.registerTo("ConfigPlugin")
-class ConfigPlugin(object):
- def createArguments(self):
- group = self.parser.add_argument_group("Bigfile plugin")
- group.add_argument('--autodownload_bigfile_size_limit', help='Also download bigfiles smaller than this limit if help distribute option is checked', default=10, metavar="MB", type=int)
- group.add_argument('--bigfile_size_limit', help='Maximum size of downloaded big files', default=False, metavar="MB", type=int)
-
- return super(ConfigPlugin, self).createArguments()
diff --git a/plugins/Bigfile/Test/TestBigfile.py b/plugins/Bigfile/Test/TestBigfile.py
deleted file mode 100644
index 402646a6..00000000
--- a/plugins/Bigfile/Test/TestBigfile.py
+++ /dev/null
@@ -1,574 +0,0 @@
-import time
-import io
-import binascii
-
-import pytest
-import mock
-
-from Connection import ConnectionServer
-from Content.ContentManager import VerifyError
-from File import FileServer
-from File import FileRequest
-from Worker import WorkerManager
-from Peer import Peer
-from Bigfile import BigfilePiecefield, BigfilePiecefieldPacked
-from Test import Spy
-from util import Msgpack
-
-
-@pytest.mark.usefixtures("resetSettings")
-@pytest.mark.usefixtures("resetTempSettings")
-class TestBigfile:
- privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
- piece_size = 1024 * 1024
-
- def createBigfile(self, site, inner_path="data/optional.any.iso", pieces=10):
- f = site.storage.open(inner_path, "w")
- for i in range(pieces * 100):
- f.write(("Test%s" % i).ljust(10, "-") * 1000)
- f.close()
- assert site.content_manager.sign("content.json", self.privatekey)
- return inner_path
-
- def testPiecemapCreate(self, site):
- inner_path = self.createBigfile(site)
- content = site.storage.loadJson("content.json")
- assert "data/optional.any.iso" in content["files_optional"]
- file_node = content["files_optional"][inner_path]
- assert file_node["size"] == 10 * 1000 * 1000
- assert file_node["sha512"] == "47a72cde3be80b4a829e7674f72b7c6878cf6a70b0c58c6aa6c17d7e9948daf6"
- assert file_node["piecemap"] == inner_path + ".piecemap.msgpack"
-
- piecemap = Msgpack.unpack(site.storage.open(file_node["piecemap"], "rb").read())["optional.any.iso"]
- assert len(piecemap["sha512_pieces"]) == 10
- assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
- assert binascii.hexlify(piecemap["sha512_pieces"][0]) == b"a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
-
- def testVerifyPiece(self, site):
- inner_path = self.createBigfile(site)
-
- # Verify all 10 piece
- f = site.storage.open(inner_path, "rb")
- for i in range(10):
- piece = io.BytesIO(f.read(1024 * 1024))
- piece.seek(0)
- site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
- f.close()
-
- # Try to verify piece 0 with piece 1 hash
- with pytest.raises(VerifyError) as err:
- i = 1
- f = site.storage.open(inner_path, "rb")
- piece = io.BytesIO(f.read(1024 * 1024))
- f.close()
- site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
- assert "Invalid hash" in str(err.value)
-
- def testSparseFile(self, site):
- inner_path = "sparsefile"
-
- # Create a 100MB sparse file
- site.storage.createSparseFile(inner_path, 100 * 1024 * 1024)
-
- # Write to file beginning
- s = time.time()
- f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024)
- time_write_start = time.time() - s
-
- # Write to file end
- s = time.time()
- f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024)
- time_write_end = time.time() - s
-
- # Verify writes
- f = site.storage.open(inner_path)
- assert f.read(10) == b"hellostart"
- f.seek(99 * 1024 * 1024)
- assert f.read(8) == b"helloend"
- f.close()
-
- site.storage.delete(inner_path)
-
- # Writing to end shold not take much longer, than writing to start
- assert time_write_end <= max(0.1, time_write_start * 1.1)
-
- def testRangedFileRequest(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- file_server.sites[site.address] = site
- client = FileServer(file_server.ip, 1545)
- client.sites[site_temp.address] = site_temp
- site_temp.connection_server = client
- connection = client.getConnection(file_server.ip, 1544)
-
- # Add file_server as peer to client
- peer_file_server = site_temp.addPeer(file_server.ip, 1544)
-
- buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
-
- assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
- assert buff.getvalue().startswith(b"Test524") # Correct data
- buff.seek(0)
- assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
-
- connection.close()
- client.stop()
-
- def testRangedFileDownload(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Make sure the file and the piecemap in the optional hashfield
- file_info = site.content_manager.getFileInfo(inner_path)
- assert site.content_manager.hashfield.hasHash(file_info["sha512"])
-
- piecemap_hash = site.content_manager.getFileInfo(file_info["piecemap"])["sha512"]
- assert site.content_manager.hashfield.hasHash(piecemap_hash)
-
- # Init client server
- client = ConnectionServer(file_server.ip, 1545)
- site_temp.connection_server = client
- peer_client = site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
- assert not bad_files
-
- # client_piecefield = peer_client.piecefields[file_info["sha512"]].tostring()
- # assert client_piecefield == "1" * 10
-
- # Download 5. and 10. block
-
- site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
- site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024))
-
- # Verify 0. block not downloaded
- f = site_temp.storage.open(inner_path)
- assert f.read(10) == b"\0" * 10
- # Verify 5. and 10. block downloaded
- f.seek(5 * 1024 * 1024)
- assert f.read(7) == b"Test524"
- f.seek(9 * 1024 * 1024)
- assert f.read(7) == b"943---T"
-
- # Verify hashfield
- assert set(site_temp.content_manager.hashfield) == set([18343, 43727]) # 18343: data/optional.any.iso, 43727: data/optional.any.iso.hashmap.msgpack
-
- def testOpenBigfile(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = ConnectionServer(file_server.ip, 1545)
- site_temp.connection_server = client
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- # Open virtual file
- assert not site_temp.storage.isFile(inner_path)
-
- with site_temp.storage.openBigfile(inner_path) as f:
- with Spy.Spy(FileRequest, "route") as requests:
- f.seek(5 * 1024 * 1024)
- assert f.read(7) == b"Test524"
-
- f.seek(9 * 1024 * 1024)
- assert f.read(7) == b"943---T"
-
- assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
-
- assert set(site_temp.content_manager.hashfield) == set([18343, 43727])
-
- assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
- assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
-
- # Test requesting already downloaded
- with Spy.Spy(FileRequest, "route") as requests:
- f.seek(5 * 1024 * 1024)
- assert f.read(7) == b"Test524"
-
- assert len(requests) == 0
-
- # Test requesting multi-block overflow reads
- with Spy.Spy(FileRequest, "route") as requests:
- f.seek(5 * 1024 * 1024) # We already have this block
- data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
- assert data.startswith(b"Test524")
- assert data.endswith(b"Test838-")
- assert b"\0" not in data # No null bytes allowed
-
- assert len(requests) == 2 # Two block download
-
- # Test out of range request
- f.seek(5 * 1024 * 1024)
- data = f.read(1024 * 1024 * 30)
- assert len(data) == 10 * 1000 * 1000 - (5 * 1024 * 1024)
-
- f.seek(30 * 1024 * 1024)
- data = f.read(1024 * 1024 * 30)
- assert len(data) == 0
-
- @pytest.mark.parametrize("piecefield_obj", [BigfilePiecefield, BigfilePiecefieldPacked])
- def testPiecefield(self, piecefield_obj, site):
- testdatas = [
- b"\x01" * 100 + b"\x00" * 900 + b"\x01" * 4000 + b"\x00" * 4999 + b"\x01",
- b"\x00\x01\x00\x01\x00\x01" * 10 + b"\x00\x01" * 90 + b"\x01\x00" * 400 + b"\x00" * 4999,
- b"\x01" * 10000,
- b"\x00" * 10000
- ]
- for testdata in testdatas:
- piecefield = piecefield_obj()
-
- piecefield.frombytes(testdata)
- assert piecefield.tobytes() == testdata
- assert piecefield[0] == testdata[0]
- assert piecefield[100] == testdata[100]
- assert piecefield[1000] == testdata[1000]
- assert piecefield[len(testdata) - 1] == testdata[len(testdata) - 1]
-
- packed = piecefield.pack()
- piecefield_new = piecefield_obj()
- piecefield_new.unpack(packed)
- assert piecefield.tobytes() == piecefield_new.tobytes()
- assert piecefield_new.tobytes() == testdata
-
- def testFileGet(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- site_temp.connection_server = FileServer(file_server.ip, 1545)
- site_temp.connection_server.sites[site_temp.address] = site_temp
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- # Download second block
- with site_temp.storage.openBigfile(inner_path) as f:
- f.seek(1024 * 1024)
- assert f.read(1024)[0:1] != b"\0"
-
- # Make sure first block not download
- with site_temp.storage.open(inner_path) as f:
- assert f.read(1024)[0:1] == b"\0"
-
- peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
-
- # Should drop error on first block request
- assert not peer2.getFile(site.address, "%s|0-%s" % (inner_path, 1024 * 1024 * 1))
-
- # Should not drop error for second block request
- assert peer2.getFile(site.address, "%s|%s-%s" % (inner_path, 1024 * 1024 * 1, 1024 * 1024 * 2))
-
- def benchmarkPeerMemory(self, site, file_server):
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- import psutil, os
- meminfo = psutil.Process(os.getpid()).memory_info
-
- mem_s = meminfo()[0]
- s = time.time()
- for i in range(25000):
- site.addPeer(file_server.ip, i)
- print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB
- print(list(site.peers.values())[0].piecefields)
-
- def testUpdatePiecefield(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- server1 = file_server
- server1.sites[site.address] = site
- server2 = FileServer(file_server.ip, 1545)
- server2.sites[site_temp.address] = site_temp
- site_temp.connection_server = server2
-
- # Add file_server as peer to client
- server2_peer1 = site_temp.addPeer(file_server.ip, 1544)
-
- # Testing piecefield sync
- assert len(server2_peer1.piecefields) == 0
- assert server2_peer1.updatePiecefields() # Query piecefields from peer
- assert len(server2_peer1.piecefields) > 0
-
- def testWorkerManagerPiecefieldDeny(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- server1 = file_server
- server1.sites[site.address] = site
- server2 = FileServer(file_server.ip, 1545)
- server2.sites[site_temp.address] = site_temp
- site_temp.connection_server = server2
-
- # Add file_server as peer to client
- server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Working
-
- site_temp.downloadContent("content.json", download_files=False)
- site_temp.needFile("data/optional.any.iso.piecemap.msgpack")
-
- # Add fake peers with optional files downloaded
- for i in range(5):
- fake_peer = site_temp.addPeer("127.0.1.%s" % i, 1544)
- fake_peer.hashfield = site.content_manager.hashfield
- fake_peer.has_hashfield = True
-
- with Spy.Spy(WorkerManager, "addWorker") as requests:
- site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
- site_temp.needFile("%s|%s-%s" % (inner_path, 6 * 1024 * 1024, 7 * 1024 * 1024))
-
- # It should only request parts from peer1 as the other peers does not have the requested parts in piecefields
- assert len([request[1] for request in requests if request[1] != server2_peer1]) == 0
-
- def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- server1 = file_server
- server1.sites[site.address] = site
- server2 = FileServer(file_server.ip, 1545)
- server2.sites[site_temp.address] = site_temp
- site_temp.connection_server = server2
- sha512 = site.content_manager.getFileInfo(inner_path)["sha512"]
-
- # Create 10 fake peer for each piece
- for i in range(10):
- peer = Peer(file_server.ip, 1544, site_temp, server2)
- peer.piecefields[sha512][i] = b"\x01"
- peer.updateHashfield = mock.MagicMock(return_value=False)
- peer.updatePiecefields = mock.MagicMock(return_value=False)
- peer.findHashIds = mock.MagicMock(return_value={"nope": []})
- peer.hashfield = site.content_manager.hashfield
- peer.has_hashfield = True
- peer.key = "Peer:%s" % i
- site_temp.peers["Peer:%s" % i] = peer
-
- site_temp.downloadContent("content.json", download_files=False)
- site_temp.needFile("data/optional.any.iso.piecemap.msgpack")
-
- with Spy.Spy(Peer, "getFile") as requests:
- for i in range(10):
- site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024))
-
- assert len(requests) == 10
- for i in range(10):
- assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer
-
- def testDownloadStats(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = ConnectionServer(file_server.ip, 1545)
- site_temp.connection_server = client
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- # Open virtual file
- assert not site_temp.storage.isFile(inner_path)
-
- # Check size before downloads
- assert site_temp.settings["size"] < 10 * 1024 * 1024
- assert site_temp.settings["optional_downloaded"] == 0
- size_piecemap = site_temp.content_manager.getFileInfo(inner_path + ".piecemap.msgpack")["size"]
- size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
-
- with site_temp.storage.openBigfile(inner_path) as f:
- assert b"\0" not in f.read(1024)
- assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
-
- with site_temp.storage.openBigfile(inner_path) as f:
- # Don't count twice
- assert b"\0" not in f.read(1024)
- assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
-
- # Add second block
- assert b"\0" not in f.read(1024 * 1024)
- assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
-
- def testPrebuffer(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = ConnectionServer(file_server.ip, 1545)
- site_temp.connection_server = client
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- # Open virtual file
- assert not site_temp.storage.isFile(inner_path)
-
- with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
- with Spy.Spy(FileRequest, "route") as requests:
- f.seek(5 * 1024 * 1024)
- assert f.read(7) == b"Test524"
- # assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
- assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
-
- time.sleep(0.5) # Wait prebuffer download
-
- sha512 = site.content_manager.getFileInfo(inner_path)["sha512"]
- assert site_temp.storage.piecefields[sha512].tostring() == "0000011100"
-
- # No prebuffer beyond end of the file
- f.seek(9 * 1024 * 1024)
- assert b"\0" not in f.read(7)
-
- assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0
-
- def testDownloadAllPieces(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = ConnectionServer(file_server.ip, 1545)
- site_temp.connection_server = client
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- # Open virtual file
- assert not site_temp.storage.isFile(inner_path)
-
- with Spy.Spy(FileRequest, "route") as requests:
- site_temp.needFile("%s|all" % inner_path)
-
- assert len(requests) == 12 # piecemap.msgpack, getPiecefields, 10 x piece
-
- # Don't re-download already got pieces
- with Spy.Spy(FileRequest, "route") as requests:
- site_temp.needFile("%s|all" % inner_path)
-
- assert len(requests) == 0
-
- def testFileSize(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = ConnectionServer(file_server.ip, 1545)
- site_temp.connection_server = client
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- # Open virtual file
- assert not site_temp.storage.isFile(inner_path)
-
- # Download first block
- site_temp.needFile("%s|%s-%s" % (inner_path, 0 * 1024 * 1024, 1 * 1024 * 1024))
- assert site_temp.storage.getSize(inner_path) < 1000 * 1000 * 10 # Size on the disk should be smaller than the real size
-
- site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024))
- assert site_temp.storage.getSize(inner_path) == site.storage.getSize(inner_path)
-
- def testFileRename(self, file_server, site, site_temp):
- inner_path = self.createBigfile(site)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- site_temp.connection_server = FileServer(file_server.ip, 1545)
- site_temp.connection_server.sites[site_temp.address] = site_temp
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- with Spy.Spy(FileRequest, "route") as requests:
- site_temp.needFile("%s|%s-%s" % (inner_path, 0, 1 * self.piece_size))
-
- assert len([req for req in requests if req[1] == "streamFile"]) == 2 # 1 piece + piecemap
-
- # Rename the file
- inner_path_new = inner_path.replace(".iso", "-new.iso")
- site.storage.rename(inner_path, inner_path_new)
- site.storage.delete("data/optional.any.iso.piecemap.msgpack")
- assert site.content_manager.sign("content.json", self.privatekey, remove_missing_optional=True)
-
- files_optional = site.content_manager.contents["content.json"]["files_optional"].keys()
-
- assert "data/optional.any-new.iso.piecemap.msgpack" in files_optional
- assert "data/optional.any.iso.piecemap.msgpack" not in files_optional
- assert "data/optional.any.iso" not in files_optional
-
- with Spy.Spy(FileRequest, "route") as requests:
- site.publish()
- time.sleep(0.1)
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10) # Wait for download
-
- assert len([req[1] for req in requests if req[1] == "streamFile"]) == 0
-
- with site_temp.storage.openBigfile(inner_path_new, prebuffer=0) as f:
- f.read(1024)
-
- # First piece already downloaded
- assert [req for req in requests if req[1] == "streamFile"] == []
-
- # Second piece needs to be downloaded + changed piecemap
- f.seek(self.piece_size)
- f.read(1024)
- assert [req[3]["inner_path"] for req in requests if req[1] == "streamFile"] == [inner_path_new + ".piecemap.msgpack", inner_path_new]
-
- @pytest.mark.parametrize("size", [1024 * 3, 1024 * 1024 * 3, 1024 * 1024 * 30])
- def testNullFileRead(self, file_server, site, site_temp, size):
- inner_path = "data/optional.iso"
-
- f = site.storage.open(inner_path, "w")
- f.write("\0" * size)
- f.close()
- assert site.content_manager.sign("content.json", self.privatekey)
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- site_temp.connection_server = FileServer(file_server.ip, 1545)
- site_temp.connection_server.sites[site_temp.address] = site_temp
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
-
- if "piecemap" in site.content_manager.getFileInfo(inner_path): # Bigfile
- site_temp.needFile(inner_path + "|all")
- else:
- site_temp.needFile(inner_path)
-
-
- assert site_temp.storage.getSize(inner_path) == size
diff --git a/plugins/Bigfile/Test/conftest.py b/plugins/Bigfile/Test/conftest.py
deleted file mode 100644
index 634e66e2..00000000
--- a/plugins/Bigfile/Test/conftest.py
+++ /dev/null
@@ -1 +0,0 @@
-from src.Test.conftest import *
diff --git a/plugins/Bigfile/Test/pytest.ini b/plugins/Bigfile/Test/pytest.ini
deleted file mode 100644
index d09210d1..00000000
--- a/plugins/Bigfile/Test/pytest.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-[pytest]
-python_files = Test*.py
-addopts = -rsxX -v --durations=6
-markers =
- webtest: mark a test as a webtest.
\ No newline at end of file
diff --git a/plugins/Bigfile/__init__.py b/plugins/Bigfile/__init__.py
deleted file mode 100644
index cf2dcb49..00000000
--- a/plugins/Bigfile/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from . import BigfilePlugin
-from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
\ No newline at end of file
diff --git a/plugins/Chart/ChartCollector.py b/plugins/Chart/ChartCollector.py
deleted file mode 100644
index 215c603c..00000000
--- a/plugins/Chart/ChartCollector.py
+++ /dev/null
@@ -1,181 +0,0 @@
-import time
-import sys
-import collections
-import itertools
-import logging
-
-import gevent
-from util import helper
-from Config import config
-
-
-class ChartCollector(object):
- def __init__(self, db):
- self.db = db
- if config.action == "main":
- gevent.spawn_later(60 * 3, self.collector)
- self.log = logging.getLogger("ChartCollector")
- self.last_values = collections.defaultdict(dict)
-
- def setInitialLastValues(self, sites):
- # Recover last value of site bytes/sent
- for site in sites:
- self.last_values["site:" + site.address]["site_bytes_recv"] = site.settings.get("bytes_recv", 0)
- self.last_values["site:" + site.address]["site_bytes_sent"] = site.settings.get("bytes_sent", 0)
-
- def getCollectors(self):
- collectors = {}
- import main
- file_server = main.file_server
- sites = file_server.sites
- if not sites:
- return collectors
- content_db = list(sites.values())[0].content_manager.contents.db
-
- # Connection stats
- collectors["connection"] = lambda: len(file_server.connections)
- collectors["connection_in"] = (
- lambda: len([1 for connection in file_server.connections if connection.type == "in"])
- )
- collectors["connection_onion"] = (
- lambda: len([1 for connection in file_server.connections if connection.ip.endswith(".onion")])
- )
- collectors["connection_ping_avg"] = (
- lambda: round(1000 * helper.avg(
- [connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay]
- ))
- )
- collectors["connection_ping_min"] = (
- lambda: round(1000 * min(
- [connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay]
- ))
- )
- collectors["connection_rev_avg"] = (
- lambda: helper.avg(
- [connection.handshake["rev"] for connection in file_server.connections if connection.handshake]
- )
- )
-
- # Request stats
- collectors["file_bytes_recv|change"] = lambda: file_server.bytes_recv
- collectors["file_bytes_sent|change"] = lambda: file_server.bytes_sent
- collectors["request_num_recv|change"] = lambda: file_server.num_recv
- collectors["request_num_sent|change"] = lambda: file_server.num_sent
-
- # Limit
- collectors["optional_limit"] = lambda: content_db.getOptionalLimitBytes()
- collectors["optional_used"] = lambda: content_db.getOptionalUsedBytes()
- collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()])
-
- # Peers
- collectors["peer"] = lambda peers: len(peers)
- collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer])
-
- # Size
- collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()])
- collectors["size_optional"] = lambda: sum([site.settings.get("size_optional", 0) for site in sites.values()])
- collectors["content"] = lambda: sum([len(site.content_manager.contents) for site in sites.values()])
-
- return collectors
-
- def getSiteCollectors(self):
- site_collectors = {}
-
- # Size
- site_collectors["site_size"] = lambda site: site.settings.get("size", 0)
- site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0)
- site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0)
- site_collectors["site_content"] = lambda site: len(site.content_manager.contents)
-
- # Data transfer
- site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0)
- site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0)
-
- # Peers
- site_collectors["site_peer"] = lambda site: len(site.peers)
- site_collectors["site_peer_onion"] = lambda site: len(
- [True for peer in site.peers.values() if peer.ip.endswith(".onion")]
- )
- site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection])
-
- return site_collectors
-
- def getUniquePeers(self):
- import main
- sites = main.file_server.sites
- return set(itertools.chain.from_iterable(
- [site.peers.keys() for site in sites.values()]
- ))
-
- def collectDatas(self, collectors, last_values, site=None):
- if site is None:
- peers = self.getUniquePeers()
- datas = {}
- for key, collector in collectors.items():
- try:
- if site:
- value = collector(site)
- elif key.startswith("peer"):
- value = collector(peers)
- else:
- value = collector()
- except ValueError:
- value = None
- except Exception as err:
- self.log.info("Collector %s error: %s" % (key, err))
- value = None
-
- if "|change" in key: # Store changes relative to last value
- key = key.replace("|change", "")
- last_value = last_values.get(key, 0)
- last_values[key] = value
- value = value - last_value
-
- if value is None:
- datas[key] = None
- else:
- datas[key] = round(value, 3)
- return datas
-
- def collectGlobal(self, collectors, last_values):
- now = int(time.time())
- s = time.time()
- datas = self.collectDatas(collectors, last_values["global"])
- values = []
- for key, value in datas.items():
- values.append((self.db.getTypeId(key), value, now))
- self.log.debug("Global collectors done in %.3fs" % (time.time() - s))
-
- s = time.time()
- cur = self.db.getCursor()
- cur.executemany("INSERT INTO data (type_id, value, date_added) VALUES (?, ?, ?)", values)
- self.log.debug("Global collectors inserted in %.3fs" % (time.time() - s))
-
- def collectSites(self, sites, collectors, last_values):
- now = int(time.time())
- s = time.time()
- values = []
- for address, site in list(sites.items()):
- site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site)
- for key, value in site_datas.items():
- values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now))
- time.sleep(0.001)
- self.log.debug("Site collections done in %.3fs" % (time.time() - s))
-
- s = time.time()
- cur = self.db.getCursor()
- cur.executemany("INSERT INTO data (type_id, site_id, value, date_added) VALUES (?, ?, ?, ?)", values)
- self.log.debug("Site collectors inserted in %.3fs" % (time.time() - s))
-
- def collector(self):
- collectors = self.getCollectors()
- site_collectors = self.getSiteCollectors()
- import main
- sites = main.file_server.sites
- i = 0
- while 1:
- self.collectGlobal(collectors, self.last_values)
- if i % 12 == 0: # Only collect sites data every hour
- self.collectSites(sites, site_collectors, self.last_values)
- time.sleep(60 * 5)
- i += 1
diff --git a/plugins/Chart/ChartDb.py b/plugins/Chart/ChartDb.py
deleted file mode 100644
index 66a22082..00000000
--- a/plugins/Chart/ChartDb.py
+++ /dev/null
@@ -1,133 +0,0 @@
-from Config import config
-from Db.Db import Db
-import time
-
-
-class ChartDb(Db):
- def __init__(self):
- self.version = 2
- super(ChartDb, self).__init__(self.getSchema(), "%s/chart.db" % config.data_dir)
- self.foreign_keys = True
- self.checkTables()
- self.sites = self.loadSites()
- self.types = self.loadTypes()
-
- def getSchema(self):
- schema = {}
- schema["db_name"] = "Chart"
- schema["tables"] = {}
- schema["tables"]["data"] = {
- "cols": [
- ["data_id", "INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE"],
- ["type_id", "INTEGER NOT NULL"],
- ["site_id", "INTEGER"],
- ["value", "INTEGER"],
- ["date_added", "DATETIME DEFAULT (CURRENT_TIMESTAMP)"]
- ],
- "indexes": [
- "CREATE INDEX site_id ON data (site_id)",
- "CREATE INDEX date_added ON data (date_added)"
- ],
- "schema_changed": 2
- }
- schema["tables"]["type"] = {
- "cols": [
- ["type_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"],
- ["name", "TEXT"]
- ],
- "schema_changed": 1
- }
- schema["tables"]["site"] = {
- "cols": [
- ["site_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"],
- ["address", "TEXT"]
- ],
- "schema_changed": 1
- }
- return schema
-
- def getTypeId(self, name):
- if name not in self.types:
- res = self.execute("INSERT INTO type ?", {"name": name})
- self.types[name] = res.lastrowid
-
- return self.types[name]
-
- def getSiteId(self, address):
- if address not in self.sites:
- res = self.execute("INSERT INTO site ?", {"address": address})
- self.sites[address] = res.lastrowid
-
- return self.sites[address]
-
- def loadSites(self):
- sites = {}
- for row in self.execute("SELECT * FROM site"):
- sites[row["address"]] = row["site_id"]
- return sites
-
- def loadTypes(self):
- types = {}
- for row in self.execute("SELECT * FROM type"):
- types[row["name"]] = row["type_id"]
- return types
-
- def deleteSite(self, address):
- if address in self.sites:
- site_id = self.sites[address]
- del self.sites[address]
- self.execute("DELETE FROM site WHERE ?", {"site_id": site_id})
- self.execute("DELETE FROM data WHERE ?", {"site_id": site_id})
-
- def archive(self):
- week_back = 1
- while 1:
- s = time.time()
- date_added_from = time.time() - 60 * 60 * 24 * 7 * (week_back + 1)
- date_added_to = date_added_from + 60 * 60 * 24 * 7
- res = self.execute("""
- SELECT
- MAX(date_added) AS date_added,
- SUM(value) AS value,
- GROUP_CONCAT(data_id) AS data_ids,
- type_id,
- site_id,
- COUNT(*) AS num
- FROM data
- WHERE
- site_id IS NULL AND
- date_added > :date_added_from AND
- date_added < :date_added_to
- GROUP BY strftime('%Y-%m-%d %H', date_added, 'unixepoch', 'localtime'), type_id
- """, {"date_added_from": date_added_from, "date_added_to": date_added_to})
-
- num_archived = 0
- cur = self.getCursor()
- for row in res:
- if row["num"] == 1:
- continue
- cur.execute("INSERT INTO data ?", {
- "type_id": row["type_id"],
- "site_id": row["site_id"],
- "value": row["value"],
- "date_added": row["date_added"]
- })
- cur.execute("DELETE FROM data WHERE data_id IN (%s)" % row["data_ids"])
- num_archived += row["num"]
- self.log.debug("Archived %s data from %s weeks ago in %.3fs" % (num_archived, week_back, time.time() - s))
- week_back += 1
- time.sleep(0.1)
- if num_archived == 0:
- break
- # Only keep 6 month of global stats
- self.execute(
- "DELETE FROM data WHERE site_id IS NULL AND date_added < :date_added_limit",
- {"date_added_limit": time.time() - 60 * 60 * 24 * 30 * 6 }
- )
- # Only keep 1 month of site stats
- self.execute(
- "DELETE FROM data WHERE site_id IS NOT NULL AND date_added < :date_added_limit",
- {"date_added_limit": time.time() - 60 * 60 * 24 * 30 }
- )
- if week_back > 1:
- self.execute("VACUUM")
diff --git a/plugins/Chart/ChartPlugin.py b/plugins/Chart/ChartPlugin.py
deleted file mode 100644
index 80a4d976..00000000
--- a/plugins/Chart/ChartPlugin.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import time
-import itertools
-
-import gevent
-
-from Config import config
-from util import helper
-from util.Flag import flag
-from Plugin import PluginManager
-from .ChartDb import ChartDb
-from .ChartCollector import ChartCollector
-
-if "db" not in locals().keys(): # Share on reloads
- db = ChartDb()
- gevent.spawn_later(10 * 60, db.archive)
- helper.timer(60 * 60 * 6, db.archive)
- collector = ChartCollector(db)
-
-@PluginManager.registerTo("SiteManager")
-class SiteManagerPlugin(object):
- def load(self, *args, **kwargs):
- back = super(SiteManagerPlugin, self).load(*args, **kwargs)
- collector.setInitialLastValues(self.sites.values())
- return back
-
- def delete(self, address, *args, **kwargs):
- db.deleteSite(address)
- return super(SiteManagerPlugin, self).delete(address, *args, **kwargs)
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- @flag.admin
- def actionChartDbQuery(self, to, query, params=None):
- if config.debug or config.verbose:
- s = time.time()
- rows = []
- try:
- if not query.strip().upper().startswith("SELECT"):
- raise Exception("Only SELECT query supported")
- res = db.execute(query, params)
- except Exception as err: # Response the error to client
- self.log.error("ChartDbQuery error: %s" % err)
- return {"error": str(err)}
- # Convert result to dict
- for row in res:
- rows.append(dict(row))
- if config.verbose and time.time() - s > 0.1: # Log slow query
- self.log.debug("Slow query: %s (%.3fs)" % (query, time.time() - s))
- return rows
-
- @flag.admin
- def actionChartGetPeerLocations(self, to):
- peers = {}
- for site in self.server.sites.values():
- peers.update(site.peers)
- peer_locations = self.getPeerLocations(peers)
- return peer_locations
diff --git a/plugins/Chart/__init__.py b/plugins/Chart/__init__.py
deleted file mode 100644
index 2c284609..00000000
--- a/plugins/Chart/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import ChartPlugin
\ No newline at end of file
diff --git a/plugins/Chart/plugin_info.json b/plugins/Chart/plugin_info.json
deleted file mode 100644
index 3bdaea8a..00000000
--- a/plugins/Chart/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "Chart",
- "description": "Collect and provide stats of client information.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/ContentFilter/ContentFilterPlugin.py b/plugins/ContentFilter/ContentFilterPlugin.py
deleted file mode 100644
index f2f84b49..00000000
--- a/plugins/ContentFilter/ContentFilterPlugin.py
+++ /dev/null
@@ -1,262 +0,0 @@
-import time
-import re
-import html
-import os
-
-from Plugin import PluginManager
-from Translate import Translate
-from Config import config
-from util.Flag import flag
-
-from .ContentFilterStorage import ContentFilterStorage
-
-
-plugin_dir = os.path.dirname(__file__)
-
-if "_" not in locals():
- _ = Translate(plugin_dir + "/languages/")
-
-
-@PluginManager.registerTo("SiteManager")
-class SiteManagerPlugin(object):
- def load(self, *args, **kwargs):
- global filter_storage
- super(SiteManagerPlugin, self).load(*args, **kwargs)
- filter_storage = ContentFilterStorage(site_manager=self)
-
- def add(self, address, *args, **kwargs):
- should_ignore_block = kwargs.get("ignore_block") or kwargs.get("settings")
- if should_ignore_block:
- block_details = None
- elif filter_storage.isSiteblocked(address):
- block_details = filter_storage.getSiteblockDetails(address)
- else:
- address_hashed = filter_storage.getSiteAddressHashed(address)
- if filter_storage.isSiteblocked(address_hashed):
- block_details = filter_storage.getSiteblockDetails(address_hashed)
- else:
- block_details = None
-
- if block_details:
- raise Exception("Site blocked: %s" % html.escape(block_details.get("reason", "unknown reason")))
- else:
- return super(SiteManagerPlugin, self).add(address, *args, **kwargs)
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- # Mute
- def cbMuteAdd(self, to, auth_address, cert_user_id, reason):
- filter_storage.file_content["mutes"][auth_address] = {
- "cert_user_id": cert_user_id, "reason": reason, "source": self.site.address, "date_added": time.time()
- }
- filter_storage.save()
- filter_storage.changeDbs(auth_address, "remove")
- self.response(to, "ok")
-
- @flag.no_multiuser
- def actionMuteAdd(self, to, auth_address, cert_user_id, reason):
- if "ADMIN" in self.getPermissions(to):
- self.cbMuteAdd(to, auth_address, cert_user_id, reason)
- else:
- self.cmd(
- "confirm",
- [_["Hide all content from %s ?"] % html.escape(cert_user_id), _["Mute"]],
- lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason)
- )
-
- @flag.no_multiuser
- def cbMuteRemove(self, to, auth_address):
- del filter_storage.file_content["mutes"][auth_address]
- filter_storage.save()
- filter_storage.changeDbs(auth_address, "load")
- self.response(to, "ok")
-
- @flag.no_multiuser
- def actionMuteRemove(self, to, auth_address):
- if "ADMIN" in self.getPermissions(to):
- self.cbMuteRemove(to, auth_address)
- else:
- cert_user_id = html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"])
- self.cmd(
- "confirm",
- [_["Unmute %s ?"] % cert_user_id, _["Unmute"]],
- lambda res: self.cbMuteRemove(to, auth_address)
- )
-
- @flag.admin
- def actionMuteList(self, to):
- self.response(to, filter_storage.file_content["mutes"])
-
- # Siteblock
- @flag.no_multiuser
- @flag.admin
- def actionSiteblockIgnoreAddSite(self, to, site_address):
- if site_address in filter_storage.site_manager.sites:
- return {"error": "Site already added"}
- else:
- if filter_storage.site_manager.need(site_address, ignore_block=True):
- return "ok"
- else:
- return {"error": "Invalid address"}
-
- @flag.no_multiuser
- @flag.admin
- def actionSiteblockAdd(self, to, site_address, reason=None):
- filter_storage.file_content["siteblocks"][site_address] = {"date_added": time.time(), "reason": reason}
- filter_storage.save()
- self.response(to, "ok")
-
- @flag.no_multiuser
- @flag.admin
- def actionSiteblockRemove(self, to, site_address):
- del filter_storage.file_content["siteblocks"][site_address]
- filter_storage.save()
- self.response(to, "ok")
-
- @flag.admin
- def actionSiteblockList(self, to):
- self.response(to, filter_storage.file_content["siteblocks"])
-
- @flag.admin
- def actionSiteblockGet(self, to, site_address):
- if filter_storage.isSiteblocked(site_address):
- res = filter_storage.getSiteblockDetails(site_address)
- else:
- site_address_hashed = filter_storage.getSiteAddressHashed(site_address)
- if filter_storage.isSiteblocked(site_address_hashed):
- res = filter_storage.getSiteblockDetails(site_address_hashed)
- else:
- res = {"error": "Site block not found"}
- self.response(to, res)
-
- # Include
- @flag.no_multiuser
- def actionFilterIncludeAdd(self, to, inner_path, description=None, address=None):
- if address:
- if "ADMIN" not in self.getPermissions(to):
- return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"})
- site = self.server.sites[address]
- else:
- address = self.site.address
- site = self.site
-
- if "ADMIN" in self.getPermissions(to):
- self.cbFilterIncludeAdd(to, True, address, inner_path, description)
- else:
- content = site.storage.loadJson(inner_path)
- title = _["New shared global content filter: %s (%s sites, %s users)"] % (
- html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
- )
-
- self.cmd(
- "confirm",
- [title, "Add"],
- lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description)
- )
-
- def cbFilterIncludeAdd(self, to, res, address, inner_path, description):
- if not res:
- self.response(to, res)
- return False
-
- filter_storage.includeAdd(address, inner_path, description)
- self.response(to, "ok")
-
- @flag.no_multiuser
- def actionFilterIncludeRemove(self, to, inner_path, address=None):
- if address:
- if "ADMIN" not in self.getPermissions(to):
- return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"})
- else:
- address = self.site.address
-
- key = "%s/%s" % (address, inner_path)
- if key not in filter_storage.file_content["includes"]:
- self.response(to, {"error": "Include not found"})
- filter_storage.includeRemove(address, inner_path)
- self.response(to, "ok")
-
- def actionFilterIncludeList(self, to, all_sites=False, filters=False):
- if all_sites and "ADMIN" not in self.getPermissions(to):
- return self.response(to, {"error": "Forbidden: Only ADMIN sites can list all sites includes"})
-
- back = []
- includes = filter_storage.file_content.get("includes", {}).values()
- for include in includes:
- if not all_sites and include["address"] != self.site.address:
- continue
- if filters:
- include = dict(include) # Don't modify original file_content
- include_site = filter_storage.site_manager.get(include["address"])
- if not include_site:
- continue
- content = include_site.storage.loadJson(include["inner_path"])
- include["mutes"] = content.get("mutes", {})
- include["siteblocks"] = content.get("siteblocks", {})
- back.append(include)
- self.response(to, back)
-
-
-@PluginManager.registerTo("SiteStorage")
-class SiteStoragePlugin(object):
- def updateDbFile(self, inner_path, file=None, cur=None):
- if file is not False: # File deletion always allowed
- # Find for bitcoin addresses in file path
- matches = re.findall("/(1[A-Za-z0-9]{26,35})/", inner_path)
- # Check if any of the adresses are in the mute list
- for auth_address in matches:
- if filter_storage.isMuted(auth_address):
- self.log.debug("Mute match: %s, ignoring %s" % (auth_address, inner_path))
- return False
-
- return super(SiteStoragePlugin, self).updateDbFile(inner_path, file=file, cur=cur)
-
- def onUpdated(self, inner_path, file=None):
- file_path = "%s/%s" % (self.site.address, inner_path)
- if file_path in filter_storage.file_content["includes"]:
- self.log.debug("Filter file updated: %s" % inner_path)
- filter_storage.includeUpdateAll()
- return super(SiteStoragePlugin, self).onUpdated(inner_path, file=file)
-
-
-@PluginManager.registerTo("UiRequest")
-class UiRequestPlugin(object):
- def actionWrapper(self, path, extra_headers=None):
- match = re.match(r"/(?P[A-Za-z0-9\._-]+)(?P/.*|$)", path)
- if not match:
- return False
- address = match.group("address")
-
- if self.server.site_manager.get(address): # Site already exists
- return super(UiRequestPlugin, self).actionWrapper(path, extra_headers)
-
- if self.isDomain(address):
- address = self.resolveDomain(address)
-
- if address:
- address_hashed = filter_storage.getSiteAddressHashed(address)
- else:
- address_hashed = None
-
- if filter_storage.isSiteblocked(address) or filter_storage.isSiteblocked(address_hashed):
- site = self.server.site_manager.get(config.homepage)
- if not extra_headers:
- extra_headers = {}
-
- script_nonce = self.getScriptNonce()
-
- self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce)
- return iter([super(UiRequestPlugin, self).renderWrapper(
- site, path, "uimedia/plugins/contentfilter/blocklisted.html?address=" + address,
- "Blacklisted site", extra_headers, show_loadingscreen=False, script_nonce=script_nonce
- )])
- else:
- return super(UiRequestPlugin, self).actionWrapper(path, extra_headers)
-
- def actionUiMedia(self, path, *args, **kwargs):
- if path.startswith("/uimedia/plugins/contentfilter/"):
- file_path = path.replace("/uimedia/plugins/contentfilter/", plugin_dir + "/media/")
- return self.actionFile(file_path)
- else:
- return super(UiRequestPlugin, self).actionUiMedia(path)
diff --git a/plugins/ContentFilter/ContentFilterStorage.py b/plugins/ContentFilter/ContentFilterStorage.py
deleted file mode 100644
index 289ec2a9..00000000
--- a/plugins/ContentFilter/ContentFilterStorage.py
+++ /dev/null
@@ -1,164 +0,0 @@
-import os
-import json
-import logging
-import collections
-import time
-import hashlib
-
-from Debug import Debug
-from Plugin import PluginManager
-from Config import config
-from util import helper
-
-
-class ContentFilterStorage(object):
- def __init__(self, site_manager):
- self.log = logging.getLogger("ContentFilterStorage")
- self.file_path = "%s/filters.json" % config.data_dir
- self.site_manager = site_manager
- self.file_content = self.load()
-
- # Set default values for filters.json
- if not self.file_content:
- self.file_content = {}
-
- # Site blacklist renamed to site blocks
- if "site_blacklist" in self.file_content:
- self.file_content["siteblocks"] = self.file_content["site_blacklist"]
- del self.file_content["site_blacklist"]
-
- for key in ["mutes", "siteblocks", "includes"]:
- if key not in self.file_content:
- self.file_content[key] = {}
-
- self.include_filters = collections.defaultdict(set) # Merged list of mutes and blacklists from all include
- self.includeUpdateAll(update_site_dbs=False)
-
- def load(self):
- # Rename previously used mutes.json -> filters.json
- if os.path.isfile("%s/mutes.json" % config.data_dir):
- self.log.info("Renaming mutes.json to filters.json...")
- os.rename("%s/mutes.json" % config.data_dir, self.file_path)
- if os.path.isfile(self.file_path):
- try:
- return json.load(open(self.file_path))
- except Exception as err:
- self.log.error("Error loading filters.json: %s" % err)
- return None
- else:
- return None
-
- def includeUpdateAll(self, update_site_dbs=True):
- s = time.time()
- new_include_filters = collections.defaultdict(set)
-
- # Load all include files data into a merged set
- for include_path in self.file_content["includes"]:
- address, inner_path = include_path.split("/", 1)
- try:
- content = self.site_manager.get(address).storage.loadJson(inner_path)
- except Exception as err:
- self.log.warning(
- "Error loading include %s: %s" %
- (include_path, Debug.formatException(err))
- )
- continue
-
- for key, val in content.items():
- if type(val) is not dict:
- continue
-
- new_include_filters[key].update(val.keys())
-
- mutes_added = new_include_filters["mutes"].difference(self.include_filters["mutes"])
- mutes_removed = self.include_filters["mutes"].difference(new_include_filters["mutes"])
-
- self.include_filters = new_include_filters
-
- if update_site_dbs:
- for auth_address in mutes_added:
- self.changeDbs(auth_address, "remove")
-
- for auth_address in mutes_removed:
- if not self.isMuted(auth_address):
- self.changeDbs(auth_address, "load")
-
- num_mutes = len(self.include_filters["mutes"])
- num_siteblocks = len(self.include_filters["siteblocks"])
- self.log.debug(
- "Loaded %s mutes, %s blocked sites from %s includes in %.3fs" %
- (num_mutes, num_siteblocks, len(self.file_content["includes"]), time.time() - s)
- )
-
- def includeAdd(self, address, inner_path, description=None):
- self.file_content["includes"]["%s/%s" % (address, inner_path)] = {
- "date_added": time.time(),
- "address": address,
- "description": description,
- "inner_path": inner_path
- }
- self.includeUpdateAll()
- self.save()
-
- def includeRemove(self, address, inner_path):
- del self.file_content["includes"]["%s/%s" % (address, inner_path)]
- self.includeUpdateAll()
- self.save()
-
- def save(self):
- s = time.time()
- helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True).encode("utf8"))
- self.log.debug("Saved in %.3fs" % (time.time() - s))
-
- def isMuted(self, auth_address):
- if auth_address in self.file_content["mutes"] or auth_address in self.include_filters["mutes"]:
- return True
- else:
- return False
-
- def getSiteAddressHashed(self, address):
- return "0x" + hashlib.sha256(address.encode("ascii")).hexdigest()
-
- def isSiteblocked(self, address):
- if address in self.file_content["siteblocks"] or address in self.include_filters["siteblocks"]:
- return True
- return False
-
- def getSiteblockDetails(self, address):
- details = self.file_content["siteblocks"].get(address)
- if not details:
- address_sha256 = self.getSiteAddressHashed(address)
- details = self.file_content["siteblocks"].get(address_sha256)
-
- if not details:
- includes = self.file_content.get("includes", {}).values()
- for include in includes:
- include_site = self.site_manager.get(include["address"])
- if not include_site:
- continue
- content = include_site.storage.loadJson(include["inner_path"])
- details = content.get("siteblocks", {}).get(address)
- if details:
- details["include"] = include
- break
-
- return details
-
- # Search and remove or readd files of an user
- def changeDbs(self, auth_address, action):
- self.log.debug("Mute action %s on user %s" % (action, auth_address))
- res = list(self.site_manager.list().values())[0].content_manager.contents.db.execute(
- "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path",
- {"inner_path": "%%/%s/%%" % auth_address}
- )
- for row in res:
- site = self.site_manager.sites.get(row["address"])
- if not site:
- continue
- dir_inner_path = helper.getDirname(row["inner_path"])
- for file_name in site.storage.walk(dir_inner_path):
- if action == "remove":
- site.storage.onUpdated(dir_inner_path + file_name, False)
- else:
- site.storage.onUpdated(dir_inner_path + file_name)
- site.onFileDone(dir_inner_path + file_name)
diff --git a/plugins/ContentFilter/Test/TestContentFilter.py b/plugins/ContentFilter/Test/TestContentFilter.py
deleted file mode 100644
index e1b37b16..00000000
--- a/plugins/ContentFilter/Test/TestContentFilter.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import pytest
-from ContentFilter import ContentFilterPlugin
-from Site import SiteManager
-
-
-@pytest.fixture
-def filter_storage():
- ContentFilterPlugin.filter_storage = ContentFilterPlugin.ContentFilterStorage(SiteManager.site_manager)
- return ContentFilterPlugin.filter_storage
-
-
-@pytest.mark.usefixtures("resetSettings")
-@pytest.mark.usefixtures("resetTempSettings")
-class TestContentFilter:
- def createInclude(self, site):
- site.storage.writeJson("filters.json", {
- "mutes": {"1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C": {}},
- "siteblocks": {site.address: {}}
- })
-
- def testIncludeLoad(self, site, filter_storage):
- self.createInclude(site)
- filter_storage.file_content["includes"]["%s/%s" % (site.address, "filters.json")] = {
- "date_added": 1528295893,
- }
-
- assert not filter_storage.include_filters["mutes"]
- assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
- assert not filter_storage.isSiteblocked(site.address)
- filter_storage.includeUpdateAll(update_site_dbs=False)
- assert len(filter_storage.include_filters["mutes"]) == 1
- assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
- assert filter_storage.isSiteblocked(site.address)
-
- def testIncludeAdd(self, site, filter_storage):
- self.createInclude(site)
- query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C'"
- assert not filter_storage.isSiteblocked(site.address)
- assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
- assert site.storage.query(query_num_json).fetchone()["num"] == 2
-
- # Add include
- filter_storage.includeAdd(site.address, "filters.json")
-
- assert filter_storage.isSiteblocked(site.address)
- assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
- assert site.storage.query(query_num_json).fetchone()["num"] == 0
-
- # Remove include
- filter_storage.includeRemove(site.address, "filters.json")
-
- assert not filter_storage.isSiteblocked(site.address)
- assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
- assert site.storage.query(query_num_json).fetchone()["num"] == 2
-
- def testIncludeChange(self, site, filter_storage):
- self.createInclude(site)
- filter_storage.includeAdd(site.address, "filters.json")
- assert filter_storage.isSiteblocked(site.address)
- assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C")
-
- # Add new blocked site
- assert not filter_storage.isSiteblocked("1Hello")
-
- filter_content = site.storage.loadJson("filters.json")
- filter_content["siteblocks"]["1Hello"] = {}
- site.storage.writeJson("filters.json", filter_content)
-
- assert filter_storage.isSiteblocked("1Hello")
-
- # Add new muted user
- query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q'"
- assert not filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
- assert site.storage.query(query_num_json).fetchone()["num"] == 2
-
- filter_content["mutes"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"] = {}
- site.storage.writeJson("filters.json", filter_content)
-
- assert filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
- assert site.storage.query(query_num_json).fetchone()["num"] == 0
-
-
diff --git a/plugins/ContentFilter/Test/conftest.py b/plugins/ContentFilter/Test/conftest.py
deleted file mode 100644
index 634e66e2..00000000
--- a/plugins/ContentFilter/Test/conftest.py
+++ /dev/null
@@ -1 +0,0 @@
-from src.Test.conftest import *
diff --git a/plugins/ContentFilter/Test/pytest.ini b/plugins/ContentFilter/Test/pytest.ini
deleted file mode 100644
index d09210d1..00000000
--- a/plugins/ContentFilter/Test/pytest.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-[pytest]
-python_files = Test*.py
-addopts = -rsxX -v --durations=6
-markers =
- webtest: mark a test as a webtest.
\ No newline at end of file
diff --git a/plugins/ContentFilter/__init__.py b/plugins/ContentFilter/__init__.py
deleted file mode 100644
index 2cbca8ee..00000000
--- a/plugins/ContentFilter/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import ContentFilterPlugin
diff --git a/plugins/ContentFilter/languages/hu.json b/plugins/ContentFilter/languages/hu.json
deleted file mode 100644
index 9b57e697..00000000
--- a/plugins/ContentFilter/languages/hu.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "Hide all content from %s ?": "%s tartalmaniak elrejtése?",
- "Mute": "Elnémítás",
- "Unmute %s ?": "%s tartalmaniak megjelenítése?",
- "Unmute": "Némítás visszavonása"
-}
diff --git a/plugins/ContentFilter/languages/it.json b/plugins/ContentFilter/languages/it.json
deleted file mode 100644
index 9a2c6761..00000000
--- a/plugins/ContentFilter/languages/it.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "Hide all content from %s ?": "%s Vuoi nascondere i contenuti di questo utente ?",
- "Mute": "Attiva Silenzia",
- "Unmute %s ?": "%s Vuoi mostrare i contenuti di questo utente ?",
- "Unmute": "Disattiva Silenzia"
-}
diff --git a/plugins/ContentFilter/languages/jp.json b/plugins/ContentFilter/languages/jp.json
deleted file mode 100644
index ef586a1a..00000000
--- a/plugins/ContentFilter/languages/jp.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "Hide all content from %s ?": "%s のコンテンツをすべて隠しますか?",
- "Mute": "ミュート",
- "Unmute %s ?": "%s のミュートを解除しますか?",
- "Unmute": "ミュート解除"
-}
diff --git a/plugins/ContentFilter/languages/pt-br.json b/plugins/ContentFilter/languages/pt-br.json
deleted file mode 100644
index 3c6bfbdc..00000000
--- a/plugins/ContentFilter/languages/pt-br.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "Hide all content from %s ?": "%s Ocultar todo o conteúdo de ?",
- "Mute": "Ativar o Silêncio",
- "Unmute %s ?": "%s Você quer mostrar o conteúdo deste usuário ?",
- "Unmute": "Desligar o silêncio"
-}
diff --git a/plugins/ContentFilter/languages/zh-tw.json b/plugins/ContentFilter/languages/zh-tw.json
deleted file mode 100644
index 0995f3a0..00000000
--- a/plugins/ContentFilter/languages/zh-tw.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "Hide all content from %s ?": "屏蔽 %s 的所有內容?",
- "Mute": "屏蔽",
- "Unmute %s ?": "對 %s 解除屏蔽?",
- "Unmute": "解除屏蔽"
-}
diff --git a/plugins/ContentFilter/languages/zh.json b/plugins/ContentFilter/languages/zh.json
deleted file mode 100644
index bf63f107..00000000
--- a/plugins/ContentFilter/languages/zh.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "Hide all content from %s ?": "屏蔽 %s 的所有内容?",
- "Mute": "屏蔽",
- "Unmute %s ?": "对 %s 解除屏蔽?",
- "Unmute": "解除屏蔽"
-}
diff --git a/plugins/ContentFilter/media/blocklisted.html b/plugins/ContentFilter/media/blocklisted.html
deleted file mode 100644
index c9d201a9..00000000
--- a/plugins/ContentFilter/media/blocklisted.html
+++ /dev/null
@@ -1,89 +0,0 @@
-
-
-
-
-
-
-
Site blocked
-
This site is on your blocklist:
-
-
Too much image
-
on 2015-01-25 12:32:11
-
-
-
-
-
-
-
-
-
diff --git a/plugins/ContentFilter/media/js/ZeroFrame.js b/plugins/ContentFilter/media/js/ZeroFrame.js
deleted file mode 100644
index d6facdbf..00000000
--- a/plugins/ContentFilter/media/js/ZeroFrame.js
+++ /dev/null
@@ -1,119 +0,0 @@
-// Version 1.0.0 - Initial release
-// Version 1.1.0 (2017-08-02) - Added cmdp function that returns promise instead of using callback
-// Version 1.2.0 (2017-08-02) - Added Ajax monkey patch to emulate XMLHttpRequest over ZeroFrame API
-
-const CMD_INNER_READY = 'innerReady'
-const CMD_RESPONSE = 'response'
-const CMD_WRAPPER_READY = 'wrapperReady'
-const CMD_PING = 'ping'
-const CMD_PONG = 'pong'
-const CMD_WRAPPER_OPENED_WEBSOCKET = 'wrapperOpenedWebsocket'
-const CMD_WRAPPER_CLOSE_WEBSOCKET = 'wrapperClosedWebsocket'
-
-class ZeroFrame {
- constructor(url) {
- this.url = url
- this.waiting_cb = {}
- this.wrapper_nonce = document.location.href.replace(/.*wrapper_nonce=([A-Za-z0-9]+).*/, "$1")
- this.connect()
- this.next_message_id = 1
- this.init()
- }
-
- init() {
- return this
- }
-
- connect() {
- this.target = window.parent
- window.addEventListener('message', e => this.onMessage(e), false)
- this.cmd(CMD_INNER_READY)
- }
-
- onMessage(e) {
- let message = e.data
- let cmd = message.cmd
- if (cmd === CMD_RESPONSE) {
- if (this.waiting_cb[message.to] !== undefined) {
- this.waiting_cb[message.to](message.result)
- }
- else {
- this.log("Websocket callback not found:", message)
- }
- } else if (cmd === CMD_WRAPPER_READY) {
- this.cmd(CMD_INNER_READY)
- } else if (cmd === CMD_PING) {
- this.response(message.id, CMD_PONG)
- } else if (cmd === CMD_WRAPPER_OPENED_WEBSOCKET) {
- this.onOpenWebsocket()
- } else if (cmd === CMD_WRAPPER_CLOSE_WEBSOCKET) {
- this.onCloseWebsocket()
- } else {
- this.onRequest(cmd, message)
- }
- }
-
- onRequest(cmd, message) {
- this.log("Unknown request", message)
- }
-
- response(to, result) {
- this.send({
- cmd: CMD_RESPONSE,
- to: to,
- result: result
- })
- }
-
- cmd(cmd, params={}, cb=null) {
- this.send({
- cmd: cmd,
- params: params
- }, cb)
- }
-
- cmdp(cmd, params={}) {
- return new Promise((resolve, reject) => {
- this.cmd(cmd, params, (res) => {
- if (res && res.error) {
- reject(res.error)
- } else {
- resolve(res)
- }
- })
- })
- }
-
- send(message, cb=null) {
- message.wrapper_nonce = this.wrapper_nonce
- message.id = this.next_message_id
- this.next_message_id++
- this.target.postMessage(message, '*')
- if (cb) {
- this.waiting_cb[message.id] = cb
- }
- }
-
- log(...args) {
- console.log.apply(console, ['[ZeroFrame]'].concat(args))
- }
-
- onOpenWebsocket() {
- this.log('Websocket open')
- }
-
- onCloseWebsocket() {
- this.log('Websocket close')
- }
-
- monkeyPatchAjax() {
- var page = this
- XMLHttpRequest.prototype.realOpen = XMLHttpRequest.prototype.open
- this.cmd("wrapperGetAjaxKey", [], (res) => { this.ajax_key = res })
- var newOpen = function (method, url, async) {
- url += "?ajax_key=" + page.ajax_key
- return this.realOpen(method, url, async)
- }
- XMLHttpRequest.prototype.open = newOpen
- }
-}
diff --git a/plugins/ContentFilter/plugin_info.json b/plugins/ContentFilter/plugin_info.json
deleted file mode 100644
index f63bc984..00000000
--- a/plugins/ContentFilter/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "ContentFilter",
- "description": "Manage site and user block list.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/Cors/CorsPlugin.py b/plugins/Cors/CorsPlugin.py
deleted file mode 100644
index c9437538..00000000
--- a/plugins/Cors/CorsPlugin.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import re
-import html
-import copy
-import os
-import gevent
-
-from Plugin import PluginManager
-from Translate import Translate
-
-
-plugin_dir = os.path.dirname(__file__)
-
-if "_" not in locals():
- _ = Translate(plugin_dir + "/languages/")
-
-
-def getCorsPath(site, inner_path):
- match = re.match("^cors-([A-Za-z0-9]{26,35})/(.*)", inner_path)
- if not match:
- raise Exception("Invalid cors path: %s" % inner_path)
- cors_address = match.group(1)
- cors_inner_path = match.group(2)
-
- if not "Cors:%s" % cors_address in site.settings["permissions"]:
- raise Exception("This site has no permission to access site %s" % cors_address)
-
- return cors_address, cors_inner_path
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- def hasSitePermission(self, address, cmd=None):
- if super(UiWebsocketPlugin, self).hasSitePermission(address, cmd=cmd):
- return True
-
- allowed_commands = [
- "fileGet", "fileList", "dirList", "fileRules", "optionalFileInfo",
- "fileQuery", "dbQuery", "userGetSettings", "siteInfo"
- ]
- if not "Cors:%s" % address in self.site.settings["permissions"] or cmd not in allowed_commands:
- return False
- else:
- return True
-
- # Add cors support for file commands
- def corsFuncWrapper(self, func_name, to, inner_path, *args, **kwargs):
- if inner_path.startswith("cors-"):
- cors_address, cors_inner_path = getCorsPath(self.site, inner_path)
-
- req_self = copy.copy(self)
- req_self.site = self.server.sites.get(cors_address) # Change the site to the merged one
- if not req_self.site:
- return {"error": "No site found"}
-
- func = getattr(super(UiWebsocketPlugin, req_self), func_name)
- back = func(to, cors_inner_path, *args, **kwargs)
- return back
- else:
- func = getattr(super(UiWebsocketPlugin, self), func_name)
- return func(to, inner_path, *args, **kwargs)
-
- def actionFileGet(self, to, inner_path, *args, **kwargs):
- return self.corsFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs)
-
- def actionFileList(self, to, inner_path, *args, **kwargs):
- return self.corsFuncWrapper("actionFileList", to, inner_path, *args, **kwargs)
-
- def actionDirList(self, to, inner_path, *args, **kwargs):
- return self.corsFuncWrapper("actionDirList", to, inner_path, *args, **kwargs)
-
- def actionFileRules(self, to, inner_path, *args, **kwargs):
- return self.corsFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs)
-
- def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs):
- return self.corsFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs)
-
- def actionCorsPermission(self, to, address):
- if isinstance(address, list):
- addresses = address
- else:
- addresses = [address]
-
- button_title = _["Grant"]
- site_names = []
- site_addresses = []
- for address in addresses:
- site = self.server.sites.get(address)
- if site:
- site_name = site.content_manager.contents.get("content.json", {}).get("title", address)
- else:
- site_name = address
- # If at least one site is not downloaded yet, show "Grant & Add" instead
- button_title = _["Grant & Add"]
-
- if not (site and "Cors:" + address in self.permissions):
- # No site or no permission
- site_names.append(site_name)
- site_addresses.append(address)
-
- if len(site_names) == 0:
- return "ignored"
-
- self.cmd(
- "confirm",
- [_["This site requests read permission to: %s "] % ", ".join(map(html.escape, site_names)), button_title],
- lambda res: self.cbCorsPermission(to, site_addresses)
- )
-
- def cbCorsPermission(self, to, addresses):
- # Add permissions
- for address in addresses:
- permission = "Cors:" + address
- if permission not in self.site.settings["permissions"]:
- self.site.settings["permissions"].append(permission)
-
- self.site.saveSettings()
- self.site.updateWebsocket(permission_added=permission)
-
- self.response(to, "ok")
-
- for address in addresses:
- site = self.server.sites.get(address)
- if not site:
- gevent.spawn(self.server.site_manager.need, address)
-
-
-@PluginManager.registerTo("UiRequest")
-class UiRequestPlugin(object):
- # Allow to load cross origin files using /cors-address/file.jpg
- def parsePath(self, path):
- path_parts = super(UiRequestPlugin, self).parsePath(path)
- if "cors-" not in path: # Optimization
- return path_parts
- site = self.server.sites[path_parts["address"]]
- try:
- path_parts["address"], path_parts["inner_path"] = getCorsPath(site, path_parts["inner_path"])
- except Exception:
- return None
- return path_parts
diff --git a/plugins/Cors/__init__.py b/plugins/Cors/__init__.py
deleted file mode 100644
index bcaa502b..00000000
--- a/plugins/Cors/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import CorsPlugin
\ No newline at end of file
diff --git a/plugins/Cors/plugin_info.json b/plugins/Cors/plugin_info.json
deleted file mode 100644
index f8af18fa..00000000
--- a/plugins/Cors/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "Cors",
- "description": "Cross site resource read.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/CryptMessage/CryptMessage.py b/plugins/CryptMessage/CryptMessage.py
deleted file mode 100644
index 8349809c..00000000
--- a/plugins/CryptMessage/CryptMessage.py
+++ /dev/null
@@ -1,58 +0,0 @@
-import hashlib
-import base64
-import struct
-from lib import sslcrypto
-from Crypt import Crypt
-
-
-curve = sslcrypto.ecc.get_curve("secp256k1")
-
-
-def eciesEncrypt(data, pubkey, ciphername="aes-256-cbc"):
- ciphertext, key_e = curve.encrypt(
- data,
- base64.b64decode(pubkey),
- algo=ciphername,
- derivation="sha512",
- return_aes_key=True
- )
- return key_e, ciphertext
-
-
-@Crypt.thread_pool_crypt.wrap
-def eciesDecryptMulti(encrypted_datas, privatekey):
- texts = [] # Decoded texts
- for encrypted_data in encrypted_datas:
- try:
- text = eciesDecrypt(encrypted_data, privatekey).decode("utf8")
- texts.append(text)
- except Exception:
- texts.append(None)
- return texts
-
-
-def eciesDecrypt(ciphertext, privatekey):
- return curve.decrypt(base64.b64decode(ciphertext), curve.wif_to_private(privatekey.encode()), derivation="sha512")
-
-
-def decodePubkey(pubkey):
- i = 0
- curve = struct.unpack('!H', pubkey[i:i + 2])[0]
- i += 2
- tmplen = struct.unpack('!H', pubkey[i:i + 2])[0]
- i += 2
- pubkey_x = pubkey[i:i + tmplen]
- i += tmplen
- tmplen = struct.unpack('!H', pubkey[i:i + 2])[0]
- i += 2
- pubkey_y = pubkey[i:i + tmplen]
- i += tmplen
- return curve, pubkey_x, pubkey_y, i
-
-
-def split(encrypted):
- iv = encrypted[0:16]
- curve, pubkey_x, pubkey_y, i = decodePubkey(encrypted[16:])
- ciphertext = encrypted[16 + i:-32]
-
- return iv, ciphertext
diff --git a/plugins/CryptMessage/CryptMessagePlugin.py b/plugins/CryptMessage/CryptMessagePlugin.py
deleted file mode 100644
index 7c24f730..00000000
--- a/plugins/CryptMessage/CryptMessagePlugin.py
+++ /dev/null
@@ -1,225 +0,0 @@
-import base64
-import os
-
-import gevent
-
-from Plugin import PluginManager
-from Crypt import CryptBitcoin, CryptHash
-from Config import config
-import sslcrypto
-
-from . import CryptMessage
-
-curve = sslcrypto.ecc.get_curve("secp256k1")
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- # - Actions -
-
- # Returns user's public key unique to site
- # Return: Public key
- def actionUserPublickey(self, to, index=0):
- self.response(to, self.user.getEncryptPublickey(self.site.address, index))
-
- # Encrypt a text using the publickey or user's sites unique publickey
- # Return: Encrypted text using base64 encoding
- def actionEciesEncrypt(self, to, text, publickey=0, return_aes_key=False):
- if type(publickey) is int: # Encrypt using user's publickey
- publickey = self.user.getEncryptPublickey(self.site.address, publickey)
- aes_key, encrypted = CryptMessage.eciesEncrypt(text.encode("utf8"), publickey)
- if return_aes_key:
- self.response(to, [base64.b64encode(encrypted).decode("utf8"), base64.b64encode(aes_key).decode("utf8")])
- else:
- self.response(to, base64.b64encode(encrypted).decode("utf8"))
-
- # Decrypt a text using privatekey or the user's site unique private key
- # Return: Decrypted text or list of decrypted texts
- def actionEciesDecrypt(self, to, param, privatekey=0):
- if type(privatekey) is int: # Decrypt using user's privatekey
- privatekey = self.user.getEncryptPrivatekey(self.site.address, privatekey)
-
- if type(param) == list:
- encrypted_texts = param
- else:
- encrypted_texts = [param]
-
- texts = CryptMessage.eciesDecryptMulti(encrypted_texts, privatekey)
-
- if type(param) == list:
- self.response(to, texts)
- else:
- self.response(to, texts[0])
-
- # Encrypt a text using AES
- # Return: Iv, AES key, Encrypted text
- def actionAesEncrypt(self, to, text, key=None):
- if key:
- key = base64.b64decode(key)
- else:
- key = sslcrypto.aes.new_key()
-
- if text:
- encrypted, iv = sslcrypto.aes.encrypt(text.encode("utf8"), key)
- else:
- encrypted, iv = b"", b""
-
- res = [base64.b64encode(item).decode("utf8") for item in [key, iv, encrypted]]
- self.response(to, res)
-
- # Decrypt a text using AES
- # Return: Decrypted text
- def actionAesDecrypt(self, to, *args):
- if len(args) == 3: # Single decrypt
- encrypted_texts = [(args[0], args[1])]
- keys = [args[2]]
- else: # Batch decrypt
- encrypted_texts, keys = args
-
- texts = [] # Decoded texts
- for iv, encrypted_text in encrypted_texts:
- encrypted_text = base64.b64decode(encrypted_text)
- iv = base64.b64decode(iv)
- text = None
- for key in keys:
- try:
- decrypted = sslcrypto.aes.decrypt(encrypted_text, iv, base64.b64decode(key))
- if decrypted and decrypted.decode("utf8"): # Valid text decoded
- text = decrypted.decode("utf8")
- except Exception as err:
- pass
- texts.append(text)
-
- if len(args) == 3:
- self.response(to, texts[0])
- else:
- self.response(to, texts)
-
- # Sign data using ECDSA
- # Return: Signature
- def actionEcdsaSign(self, to, data, privatekey=None):
- if privatekey is None: # Sign using user's privatekey
- privatekey = self.user.getAuthPrivatekey(self.site.address)
-
- self.response(to, CryptBitcoin.sign(data, privatekey))
-
- # Verify data using ECDSA (address is either a address or array of addresses)
- # Return: bool
- def actionEcdsaVerify(self, to, data, address, signature):
- self.response(to, CryptBitcoin.verify(data, address, signature))
-
- # Gets the publickey of a given privatekey
- def actionEccPrivToPub(self, to, privatekey):
- self.response(to, curve.private_to_public(curve.wif_to_private(privatekey.encode())))
-
- # Gets the address of a given publickey
- def actionEccPubToAddr(self, to, publickey):
- self.response(to, curve.public_to_address(bytes.fromhex(publickey)))
-
-
-@PluginManager.registerTo("User")
-class UserPlugin(object):
- def getEncryptPrivatekey(self, address, param_index=0):
- if param_index < 0 or param_index > 1000:
- raise Exception("Param_index out of range")
-
- site_data = self.getSiteData(address)
-
- if site_data.get("cert"): # Different privatekey for different cert provider
- index = param_index + self.getAddressAuthIndex(site_data["cert"])
- else:
- index = param_index
-
- if "encrypt_privatekey_%s" % index not in site_data:
- address_index = self.getAddressAuthIndex(address)
- crypt_index = address_index + 1000 + index
- site_data["encrypt_privatekey_%s" % index] = CryptBitcoin.hdPrivatekey(self.master_seed, crypt_index)
- self.log.debug("New encrypt privatekey generated for %s:%s" % (address, index))
- return site_data["encrypt_privatekey_%s" % index]
-
- def getEncryptPublickey(self, address, param_index=0):
- if param_index < 0 or param_index > 1000:
- raise Exception("Param_index out of range")
-
- site_data = self.getSiteData(address)
-
- if site_data.get("cert"): # Different privatekey for different cert provider
- index = param_index + self.getAddressAuthIndex(site_data["cert"])
- else:
- index = param_index
-
- if "encrypt_publickey_%s" % index not in site_data:
- privatekey = self.getEncryptPrivatekey(address, param_index).encode()
- publickey = curve.private_to_public(curve.wif_to_private(privatekey) + b"\x01")
- site_data["encrypt_publickey_%s" % index] = base64.b64encode(publickey).decode("utf8")
- return site_data["encrypt_publickey_%s" % index]
-
-
-@PluginManager.registerTo("Actions")
-class ActionsPlugin:
- publickey = "A3HatibU4S6eZfIQhVs2u7GLN5G9wXa9WwlkyYIfwYaj"
- privatekey = "5JBiKFYBm94EUdbxtnuLi6cvNcPzcKymCUHBDf2B6aq19vvG3rL"
- utf8_text = '\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9p'
-
- def getBenchmarkTests(self, online=False):
- if hasattr(super(), "getBenchmarkTests"):
- tests = super().getBenchmarkTests(online)
- else:
- tests = []
-
- aes_key, encrypted = CryptMessage.eciesEncrypt(self.utf8_text.encode("utf8"), self.publickey) # Warm-up
- tests.extend([
- {"func": self.testCryptEciesEncrypt, "kwargs": {}, "num": 100, "time_standard": 1.2},
- {"func": self.testCryptEciesDecrypt, "kwargs": {}, "num": 500, "time_standard": 1.3},
- {"func": self.testCryptEciesDecryptMulti, "kwargs": {}, "num": 5, "time_standard": 0.68},
- {"func": self.testCryptAesEncrypt, "kwargs": {}, "num": 10000, "time_standard": 0.27},
- {"func": self.testCryptAesDecrypt, "kwargs": {}, "num": 10000, "time_standard": 0.25}
- ])
- return tests
-
- def testCryptEciesEncrypt(self, num_run=1):
- for i in range(num_run):
- aes_key, encrypted = CryptMessage.eciesEncrypt(self.utf8_text.encode("utf8"), self.publickey)
- assert len(aes_key) == 32
- yield "."
-
- def testCryptEciesDecrypt(self, num_run=1):
- aes_key, encrypted = CryptMessage.eciesEncrypt(self.utf8_text.encode("utf8"), self.publickey)
- for i in range(num_run):
- assert len(aes_key) == 32
- decrypted = CryptMessage.eciesDecrypt(base64.b64encode(encrypted), self.privatekey)
- assert decrypted == self.utf8_text.encode("utf8"), "%s != %s" % (decrypted, self.utf8_text.encode("utf8"))
- yield "."
-
- def testCryptEciesDecryptMulti(self, num_run=1):
- yield "x 100 (%s threads) " % config.threads_crypt
- aes_key, encrypted = CryptMessage.eciesEncrypt(self.utf8_text.encode("utf8"), self.publickey)
-
- threads = []
- for i in range(num_run):
- assert len(aes_key) == 32
- threads.append(gevent.spawn(
- CryptMessage.eciesDecryptMulti, [base64.b64encode(encrypted)] * 100, self.privatekey
- ))
-
- for thread in threads:
- res = thread.get()
- assert res[0] == self.utf8_text, "%s != %s" % (res[0], self.utf8_text)
- assert res[0] == res[-1], "%s != %s" % (res[0], res[-1])
- yield "."
- gevent.joinall(threads)
-
- def testCryptAesEncrypt(self, num_run=1):
- for i in range(num_run):
- key = os.urandom(32)
- encrypted = sslcrypto.aes.encrypt(self.utf8_text.encode("utf8"), key)
- yield "."
-
- def testCryptAesDecrypt(self, num_run=1):
- key = os.urandom(32)
- encrypted_text, iv = sslcrypto.aes.encrypt(self.utf8_text.encode("utf8"), key)
-
- for i in range(num_run):
- decrypted = sslcrypto.aes.decrypt(encrypted_text, iv, key).decode("utf8")
- assert decrypted == self.utf8_text
- yield "."
diff --git a/plugins/CryptMessage/Test/TestCrypt.py b/plugins/CryptMessage/Test/TestCrypt.py
deleted file mode 100644
index 25a077d8..00000000
--- a/plugins/CryptMessage/Test/TestCrypt.py
+++ /dev/null
@@ -1,136 +0,0 @@
-import pytest
-import base64
-from CryptMessage import CryptMessage
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestCrypt:
- publickey = "A3HatibU4S6eZfIQhVs2u7GLN5G9wXa9WwlkyYIfwYaj"
- privatekey = "5JBiKFYBm94EUdbxtnuLi6cvNcPzcKymCUHBDf2B6aq19vvG3rL"
- utf8_text = '\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9'
- ecies_encrypted_text = "R5J1RFIDOzE5bnWopvccmALKACCk/CRcd/KSE9OgExJKASyMbZ57JVSUenL2TpABMmcT+wAgr2UrOqClxpOWvIUwvwwupXnMbRTzthhIJJrTRW3sCJVaYlGEMn9DAcvbflgEkQX/MVVdLV3tWKySs1Vk8sJC/y+4pGYCrZz7vwDNEEERaqU="
-
- @pytest.mark.parametrize("text", [b"hello", '\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9'.encode("utf8")])
- @pytest.mark.parametrize("text_repeat", [1, 10, 128, 1024])
- def testEncryptEcies(self, text, text_repeat):
- text_repeated = text * text_repeat
- aes_key, encrypted = CryptMessage.eciesEncrypt(text_repeated, self.publickey)
- assert len(aes_key) == 32
- # assert len(encrypted) == 134 + int(len(text) / 16) * 16 # Not always true
-
- assert CryptMessage.eciesDecrypt(base64.b64encode(encrypted), self.privatekey) == text_repeated
-
- def testDecryptEcies(self, user):
- assert CryptMessage.eciesDecrypt(self.ecies_encrypted_text, self.privatekey) == b"hello"
-
- def testPublickey(self, ui_websocket):
- pub = ui_websocket.testAction("UserPublickey", 0)
- assert len(pub) == 44 # Compressed, b64 encoded publickey
-
- # Different pubkey for specificed index
- assert ui_websocket.testAction("UserPublickey", 1) != ui_websocket.testAction("UserPublickey", 0)
-
- # Same publickey for same index
- assert ui_websocket.testAction("UserPublickey", 2) == ui_websocket.testAction("UserPublickey", 2)
-
- # Different publickey for different cert
- site_data = ui_websocket.user.getSiteData(ui_websocket.site.address)
- site_data["cert"] = None
- pub1 = ui_websocket.testAction("UserPublickey", 0)
-
- site_data = ui_websocket.user.getSiteData(ui_websocket.site.address)
- site_data["cert"] = "zeroid.bit"
- pub2 = ui_websocket.testAction("UserPublickey", 0)
- assert pub1 != pub2
-
- def testEcies(self, ui_websocket):
- pub = ui_websocket.testAction("UserPublickey")
-
- encrypted = ui_websocket.testAction("EciesEncrypt", "hello", pub)
- assert len(encrypted) == 180
-
- # Don't allow decrypt using other privatekey index
- decrypted = ui_websocket.testAction("EciesDecrypt", encrypted, 123)
- assert decrypted != "hello"
-
- # Decrypt using correct privatekey
- decrypted = ui_websocket.testAction("EciesDecrypt", encrypted)
- assert decrypted == "hello"
-
- # Decrypt incorrect text
- decrypted = ui_websocket.testAction("EciesDecrypt", "baad")
- assert decrypted is None
-
- # Decrypt batch
- decrypted = ui_websocket.testAction("EciesDecrypt", [encrypted, "baad", encrypted])
- assert decrypted == ["hello", None, "hello"]
-
- def testEciesUtf8(self, ui_websocket):
- # Utf8 test
- ui_websocket.actionEciesEncrypt(0, self.utf8_text)
- encrypted = ui_websocket.ws.getResult()
-
- ui_websocket.actionEciesDecrypt(0, encrypted)
- assert ui_websocket.ws.getResult() == self.utf8_text
-
- def testEciesAes(self, ui_websocket):
- ui_websocket.actionEciesEncrypt(0, "hello", return_aes_key=True)
- ecies_encrypted, aes_key = ui_websocket.ws.getResult()
-
- # Decrypt using Ecies
- ui_websocket.actionEciesDecrypt(0, ecies_encrypted)
- assert ui_websocket.ws.getResult() == "hello"
-
- # Decrypt using AES
- aes_iv, aes_encrypted = CryptMessage.split(base64.b64decode(ecies_encrypted))
-
- ui_websocket.actionAesDecrypt(0, base64.b64encode(aes_iv), base64.b64encode(aes_encrypted), aes_key)
- assert ui_websocket.ws.getResult() == "hello"
-
- def testEciesAesLongpubkey(self, ui_websocket):
- privatekey = "5HwVS1bTFnveNk9EeGaRenWS1QFzLFb5kuncNbiY3RiHZrVR6ok"
-
- ecies_encrypted, aes_key = ["lWiXfEikIjw1ac3J/RaY/gLKACALRUfksc9rXYRFyKDSaxhwcSFBYCgAdIyYlY294g/6VgAf/68PYBVMD3xKH1n7Zbo+ge8b4i/XTKmCZRJvy0eutMKWckYCMVcxgIYNa/ZL1BY1kvvH7omgzg1wBraoLfdbNmVtQgdAZ9XS8PwRy6OB2Q==", "Rvlf7zsMuBFHZIGHcbT1rb4If+YTmsWDv6kGwcvSeMM="]
-
- # Decrypt using Ecies
- ui_websocket.actionEciesDecrypt(0, ecies_encrypted, privatekey)
- assert ui_websocket.ws.getResult() == "hello"
-
- # Decrypt using AES
- aes_iv, aes_encrypted = CryptMessage.split(base64.b64decode(ecies_encrypted))
-
- ui_websocket.actionAesDecrypt(0, base64.b64encode(aes_iv), base64.b64encode(aes_encrypted), aes_key)
- assert ui_websocket.ws.getResult() == "hello"
-
- def testAes(self, ui_websocket):
- ui_websocket.actionAesEncrypt(0, "hello")
- key, iv, encrypted = ui_websocket.ws.getResult()
-
- assert len(key) == 44
- assert len(iv) == 24
- assert len(encrypted) == 24
-
- # Single decrypt
- ui_websocket.actionAesDecrypt(0, iv, encrypted, key)
- assert ui_websocket.ws.getResult() == "hello"
-
- # Batch decrypt
- ui_websocket.actionAesEncrypt(0, "hello")
- key2, iv2, encrypted2 = ui_websocket.ws.getResult()
-
- assert [key, iv, encrypted] != [key2, iv2, encrypted2]
-
- # 2 correct key
- ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key])
- assert ui_websocket.ws.getResult() == ["hello", "hello", None, None]
-
- # 3 key
- ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key, key2])
- assert ui_websocket.ws.getResult() == ["hello", "hello", None, "hello"]
-
- def testAesUtf8(self, ui_websocket):
- ui_websocket.actionAesEncrypt(0, self.utf8_text)
- key, iv, encrypted = ui_websocket.ws.getResult()
-
- ui_websocket.actionAesDecrypt(0, iv, encrypted, key)
- assert ui_websocket.ws.getResult() == self.utf8_text
diff --git a/plugins/CryptMessage/Test/conftest.py b/plugins/CryptMessage/Test/conftest.py
deleted file mode 100644
index 8c1df5b2..00000000
--- a/plugins/CryptMessage/Test/conftest.py
+++ /dev/null
@@ -1 +0,0 @@
-from src.Test.conftest import *
\ No newline at end of file
diff --git a/plugins/CryptMessage/Test/pytest.ini b/plugins/CryptMessage/Test/pytest.ini
deleted file mode 100644
index d09210d1..00000000
--- a/plugins/CryptMessage/Test/pytest.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-[pytest]
-python_files = Test*.py
-addopts = -rsxX -v --durations=6
-markers =
- webtest: mark a test as a webtest.
\ No newline at end of file
diff --git a/plugins/CryptMessage/__init__.py b/plugins/CryptMessage/__init__.py
deleted file mode 100644
index 6aeb4e52..00000000
--- a/plugins/CryptMessage/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import CryptMessagePlugin
\ No newline at end of file
diff --git a/plugins/CryptMessage/plugin_info.json b/plugins/CryptMessage/plugin_info.json
deleted file mode 100644
index 96dfdd89..00000000
--- a/plugins/CryptMessage/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "CryptMessage",
- "description": "Cryptographic functions of ECIES and AES data encryption/decryption.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/FilePack/FilePackPlugin.py b/plugins/FilePack/FilePackPlugin.py
deleted file mode 100644
index a095c6d4..00000000
--- a/plugins/FilePack/FilePackPlugin.py
+++ /dev/null
@@ -1,193 +0,0 @@
-import os
-import re
-
-import gevent
-
-from Plugin import PluginManager
-from Config import config
-from Debug import Debug
-
-# Keep archive open for faster reponse times for large sites
-archive_cache = {}
-
-
-def closeArchive(archive_path):
- if archive_path in archive_cache:
- del archive_cache[archive_path]
-
-
-def openArchive(archive_path, file_obj=None):
- if archive_path not in archive_cache:
- if archive_path.endswith("tar.gz"):
- import tarfile
- archive_cache[archive_path] = tarfile.open(archive_path, fileobj=file_obj, mode="r:gz")
- else:
- import zipfile
- archive_cache[archive_path] = zipfile.ZipFile(file_obj or archive_path)
- gevent.spawn_later(5, lambda: closeArchive(archive_path)) # Close after 5 sec
-
- archive = archive_cache[archive_path]
- return archive
-
-
-def openArchiveFile(archive_path, path_within, file_obj=None):
- archive = openArchive(archive_path, file_obj=file_obj)
- if archive_path.endswith(".zip"):
- return archive.open(path_within)
- else:
- return archive.extractfile(path_within)
-
-
-@PluginManager.registerTo("UiRequest")
-class UiRequestPlugin(object):
- def actionSiteMedia(self, path, **kwargs):
- if ".zip/" in path or ".tar.gz/" in path:
- file_obj = None
- path_parts = self.parsePath(path)
- file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"])
- match = re.match("^(.*\.(?:tar.gz|zip))/(.*)", file_path)
- archive_path, path_within = match.groups()
- if archive_path not in archive_cache:
- site = self.server.site_manager.get(path_parts["address"])
- if not site:
- return self.actionSiteAddPrompt(path)
- archive_inner_path = site.storage.getInnerPath(archive_path)
- if not os.path.isfile(archive_path):
- # Wait until file downloads
- result = site.needFile(archive_inner_path, priority=10)
- # Send virutal file path download finished event to remove loading screen
- site.updateWebsocket(file_done=archive_inner_path)
- if not result:
- return self.error404(archive_inner_path)
- file_obj = site.storage.openBigfile(archive_inner_path)
- if file_obj == False:
- file_obj = None
-
- header_allow_ajax = False
- if self.get.get("ajax_key"):
- requester_site = self.server.site_manager.get(path_parts["request_address"])
- if self.get["ajax_key"] == requester_site.settings["ajax_key"]:
- header_allow_ajax = True
- else:
- return self.error403("Invalid ajax_key")
-
- try:
- file = openArchiveFile(archive_path, path_within, file_obj=file_obj)
- content_type = self.getContentType(file_path)
- self.sendHeader(200, content_type=content_type, noscript=kwargs.get("header_noscript", False), allow_ajax=header_allow_ajax)
- return self.streamFile(file)
- except Exception as err:
- self.log.debug("Error opening archive file: %s" % Debug.formatException(err))
- return self.error404(path)
-
- return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs)
-
- def streamFile(self, file):
- for i in range(100): # Read max 6MB
- try:
- block = file.read(60 * 1024)
- if block:
- yield block
- else:
- raise StopIteration
- except StopIteration:
- file.close()
- break
-
-
-@PluginManager.registerTo("SiteStorage")
-class SiteStoragePlugin(object):
- def isFile(self, inner_path):
- if ".zip/" in inner_path or ".tar.gz/" in inner_path:
- match = re.match("^(.*\.(?:tar.gz|zip))/(.*)", inner_path)
- archive_inner_path, path_within = match.groups()
- return super(SiteStoragePlugin, self).isFile(archive_inner_path)
- else:
- return super(SiteStoragePlugin, self).isFile(inner_path)
-
- def openArchive(self, inner_path):
- archive_path = self.getPath(inner_path)
- file_obj = None
- if archive_path not in archive_cache:
- if not os.path.isfile(archive_path):
- result = self.site.needFile(inner_path, priority=10)
- self.site.updateWebsocket(file_done=inner_path)
- if not result:
- raise Exception("Unable to download file")
- file_obj = self.site.storage.openBigfile(inner_path)
- if file_obj == False:
- file_obj = None
-
- try:
- archive = openArchive(archive_path, file_obj=file_obj)
- except Exception as err:
- raise Exception("Unable to download file: %s" % Debug.formatException(err))
-
- return archive
-
- def walk(self, inner_path, *args, **kwags):
- if ".zip" in inner_path or ".tar.gz" in inner_path:
- match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
- archive_inner_path, path_within = match.groups()
- archive = self.openArchive(archive_inner_path)
- path_within = path_within.lstrip("/")
-
- if archive_inner_path.endswith(".zip"):
- namelist = [name for name in archive.namelist() if not name.endswith("/")]
- else:
- namelist = [item.name for item in archive.getmembers() if not item.isdir()]
-
- namelist_relative = []
- for name in namelist:
- if not name.startswith(path_within):
- continue
- name_relative = name.replace(path_within, "", 1).rstrip("/")
- namelist_relative.append(name_relative)
-
- return namelist_relative
-
- else:
- return super(SiteStoragePlugin, self).walk(inner_path, *args, **kwags)
-
- def list(self, inner_path, *args, **kwags):
- if ".zip" in inner_path or ".tar.gz" in inner_path:
- match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
- archive_inner_path, path_within = match.groups()
- archive = self.openArchive(archive_inner_path)
- path_within = path_within.lstrip("/")
-
- if archive_inner_path.endswith(".zip"):
- namelist = [name for name in archive.namelist()]
- else:
- namelist = [item.name for item in archive.getmembers()]
-
- namelist_relative = []
- for name in namelist:
- if not name.startswith(path_within):
- continue
- name_relative = name.replace(path_within, "", 1).rstrip("/")
-
- if "/" in name_relative: # File is in sub-directory
- continue
-
- namelist_relative.append(name_relative)
- return namelist_relative
-
- else:
- return super(SiteStoragePlugin, self).list(inner_path, *args, **kwags)
-
- def read(self, inner_path, mode="rb", **kwargs):
- if ".zip/" in inner_path or ".tar.gz/" in inner_path:
- match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
- archive_inner_path, path_within = match.groups()
- archive = self.openArchive(archive_inner_path)
- path_within = path_within.lstrip("/")
-
- if archive_inner_path.endswith(".zip"):
- return archive.open(path_within).read()
- else:
- return archive.extractfile(path_within).read()
-
- else:
- return super(SiteStoragePlugin, self).read(inner_path, mode, **kwargs)
-
diff --git a/plugins/FilePack/__init__.py b/plugins/FilePack/__init__.py
deleted file mode 100644
index 660a0920..00000000
--- a/plugins/FilePack/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import FilePackPlugin
\ No newline at end of file
diff --git a/plugins/FilePack/plugin_info.json b/plugins/FilePack/plugin_info.json
deleted file mode 100644
index 42112f95..00000000
--- a/plugins/FilePack/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "FilePack",
- "description": "Transparent web access for Zip and Tar.gz files.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/MergerSite/MergerSitePlugin.py b/plugins/MergerSite/MergerSitePlugin.py
deleted file mode 100644
index 2dccc6de..00000000
--- a/plugins/MergerSite/MergerSitePlugin.py
+++ /dev/null
@@ -1,399 +0,0 @@
-import re
-import time
-import copy
-import os
-
-from Plugin import PluginManager
-from Translate import Translate
-from util import RateLimit
-from util import helper
-from util.Flag import flag
-from Debug import Debug
-try:
- import OptionalManager.UiWebsocketPlugin # To make optioanlFileInfo merger sites compatible
-except Exception:
- pass
-
-if "merger_db" not in locals().keys(): # To keep merger_sites between module reloads
- merger_db = {} # Sites that allowed to list other sites {address: [type1, type2...]}
- merged_db = {} # Sites that allowed to be merged to other sites {address: type, ...}
- merged_to_merger = {} # {address: [site1, site2, ...]} cache
- site_manager = None # Site manager for merger sites
-
-
-plugin_dir = os.path.dirname(__file__)
-
-if "_" not in locals():
- _ = Translate(plugin_dir + "/languages/")
-
-
-# Check if the site has permission to this merger site
-def checkMergerPath(address, inner_path):
- merged_match = re.match("^merged-(.*?)/([A-Za-z0-9]{26,35})/", inner_path)
- if merged_match:
- merger_type = merged_match.group(1)
- # Check if merged site is allowed to include other sites
- if merger_type in merger_db.get(address, []):
- # Check if included site allows to include
- merged_address = merged_match.group(2)
- if merged_db.get(merged_address) == merger_type:
- inner_path = re.sub("^merged-(.*?)/([A-Za-z0-9]{26,35})/", "", inner_path)
- return merged_address, inner_path
- else:
- raise Exception(
- "Merger site (%s) does not have permission for merged site: %s (%s)" %
- (merger_type, merged_address, merged_db.get(merged_address))
- )
- else:
- raise Exception("No merger (%s) permission to load: %s (%s not in %s)" % (
- address, inner_path, merger_type, merger_db.get(address, []))
- )
- else:
- raise Exception("Invalid merger path: %s" % inner_path)
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- # Download new site
- def actionMergerSiteAdd(self, to, addresses):
- if type(addresses) != list:
- # Single site add
- addresses = [addresses]
- # Check if the site has merger permission
- merger_types = merger_db.get(self.site.address)
- if not merger_types:
- return self.response(to, {"error": "Not a merger site"})
-
- if RateLimit.isAllowed(self.site.address + "-MergerSiteAdd", 10) and len(addresses) == 1:
- # Without confirmation if only one site address and not called in last 10 sec
- self.cbMergerSiteAdd(to, addresses)
- else:
- self.cmd(
- "confirm",
- [_["Add %s new site?"] % len(addresses), "Add"],
- lambda res: self.cbMergerSiteAdd(to, addresses)
- )
- self.response(to, "ok")
-
- # Callback of adding new site confirmation
- def cbMergerSiteAdd(self, to, addresses):
- added = 0
- for address in addresses:
- try:
- site_manager.need(address)
- added += 1
- except Exception as err:
- self.cmd("notification", ["error", _["Adding %s failed: %s"] % (address, err)])
- if added:
- self.cmd("notification", ["done", _["Added %s new site"] % added, 5000])
- RateLimit.called(self.site.address + "-MergerSiteAdd")
- site_manager.updateMergerSites()
-
- # Delete a merged site
- @flag.no_multiuser
- def actionMergerSiteDelete(self, to, address):
- site = self.server.sites.get(address)
- if not site:
- return self.response(to, {"error": "No site found: %s" % address})
-
- merger_types = merger_db.get(self.site.address)
- if not merger_types:
- return self.response(to, {"error": "Not a merger site"})
- if merged_db.get(address) not in merger_types:
- return self.response(to, {"error": "Merged type (%s) not in %s" % (merged_db.get(address), merger_types)})
-
- self.cmd("notification", ["done", _["Site deleted: %s "] % address, 5000])
- self.response(to, "ok")
-
- # Lists merged sites
- def actionMergerSiteList(self, to, query_site_info=False):
- merger_types = merger_db.get(self.site.address)
- ret = {}
- if not merger_types:
- return self.response(to, {"error": "Not a merger site"})
- for address, merged_type in merged_db.items():
- if merged_type not in merger_types:
- continue # Site not for us
- if query_site_info:
- site = self.server.sites.get(address)
- ret[address] = self.formatSiteInfo(site, create_user=False)
- else:
- ret[address] = merged_type
- self.response(to, ret)
-
- def hasSitePermission(self, address, *args, **kwargs):
- if super(UiWebsocketPlugin, self).hasSitePermission(address, *args, **kwargs):
- return True
- else:
- if self.site.address in [merger_site.address for merger_site in merged_to_merger.get(address, [])]:
- return True
- else:
- return False
-
- # Add support merger sites for file commands
- def mergerFuncWrapper(self, func_name, to, inner_path, *args, **kwargs):
- if inner_path.startswith("merged-"):
- merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path)
-
- # Set the same cert for merged site
- merger_cert = self.user.getSiteData(self.site.address).get("cert")
- if merger_cert and self.user.getSiteData(merged_address).get("cert") != merger_cert:
- self.user.setCert(merged_address, merger_cert)
-
- req_self = copy.copy(self)
- req_self.site = self.server.sites.get(merged_address) # Change the site to the merged one
-
- func = getattr(super(UiWebsocketPlugin, req_self), func_name)
- return func(to, merged_inner_path, *args, **kwargs)
- else:
- func = getattr(super(UiWebsocketPlugin, self), func_name)
- return func(to, inner_path, *args, **kwargs)
-
- def actionFileList(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionFileList", to, inner_path, *args, **kwargs)
-
- def actionDirList(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionDirList", to, inner_path, *args, **kwargs)
-
- def actionFileGet(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs)
-
- def actionFileWrite(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionFileWrite", to, inner_path, *args, **kwargs)
-
- def actionFileDelete(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionFileDelete", to, inner_path, *args, **kwargs)
-
- def actionFileRules(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs)
-
- def actionFileNeed(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionFileNeed", to, inner_path, *args, **kwargs)
-
- def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs)
-
- def actionOptionalFileDelete(self, to, inner_path, *args, **kwargs):
- return self.mergerFuncWrapper("actionOptionalFileDelete", to, inner_path, *args, **kwargs)
-
- def actionBigfileUploadInit(self, to, inner_path, *args, **kwargs):
- back = self.mergerFuncWrapper("actionBigfileUploadInit", to, inner_path, *args, **kwargs)
- if inner_path.startswith("merged-"):
- merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path)
- back["inner_path"] = "merged-%s/%s/%s" % (merged_db[merged_address], merged_address, back["inner_path"])
- return back
-
- # Add support merger sites for file commands with privatekey parameter
- def mergerFuncWrapperWithPrivatekey(self, func_name, to, privatekey, inner_path, *args, **kwargs):
- func = getattr(super(UiWebsocketPlugin, self), func_name)
- if inner_path.startswith("merged-"):
- merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path)
- merged_site = self.server.sites.get(merged_address)
-
- # Set the same cert for merged site
- merger_cert = self.user.getSiteData(self.site.address).get("cert")
- if merger_cert:
- self.user.setCert(merged_address, merger_cert)
-
- site_before = self.site # Save to be able to change it back after we ran the command
- self.site = merged_site # Change the site to the merged one
- try:
- back = func(to, privatekey, merged_inner_path, *args, **kwargs)
- finally:
- self.site = site_before # Change back to original site
- return back
- else:
- return func(to, privatekey, inner_path, *args, **kwargs)
-
- def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
- return self.mergerFuncWrapperWithPrivatekey("actionSiteSign", to, privatekey, inner_path, *args, **kwargs)
-
- def actionSitePublish(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
- return self.mergerFuncWrapperWithPrivatekey("actionSitePublish", to, privatekey, inner_path, *args, **kwargs)
-
- def actionPermissionAdd(self, to, permission):
- super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission)
- if permission.startswith("Merger"):
- self.site.storage.rebuildDb()
-
- def actionPermissionDetails(self, to, permission):
- if not permission.startswith("Merger"):
- return super(UiWebsocketPlugin, self).actionPermissionDetails(to, permission)
-
- merger_type = permission.replace("Merger:", "")
- if not re.match("^[A-Za-z0-9-]+$", merger_type):
- raise Exception("Invalid merger_type: %s" % merger_type)
- merged_sites = []
- for address, merged_type in merged_db.items():
- if merged_type != merger_type:
- continue
- site = self.server.sites.get(address)
- try:
- merged_sites.append(site.content_manager.contents.get("content.json").get("title", address))
- except Exception:
- merged_sites.append(address)
-
- details = _["Read and write permissions to sites with merged type of %s "] % merger_type
- details += _["(%s sites)"] % len(merged_sites)
- details += "%s
" % ", ".join(merged_sites)
- self.response(to, details)
-
-
-@PluginManager.registerTo("UiRequest")
-class UiRequestPlugin(object):
- # Allow to load merged site files using /merged-ZeroMe/address/file.jpg
- def parsePath(self, path):
- path_parts = super(UiRequestPlugin, self).parsePath(path)
- if "merged-" not in path: # Optimization
- return path_parts
- path_parts["address"], path_parts["inner_path"] = checkMergerPath(path_parts["address"], path_parts["inner_path"])
- return path_parts
-
-
-@PluginManager.registerTo("SiteStorage")
-class SiteStoragePlugin(object):
- # Also rebuild from merged sites
- def getDbFiles(self):
- merger_types = merger_db.get(self.site.address)
-
- # First return the site's own db files
- for item in super(SiteStoragePlugin, self).getDbFiles():
- yield item
-
- # Not a merger site, that's all
- if not merger_types:
- return
-
- merged_sites = [
- site_manager.sites[address]
- for address, merged_type in merged_db.items()
- if merged_type in merger_types
- ]
- found = 0
- for merged_site in merged_sites:
- self.log.debug("Loading merged site: %s" % merged_site)
- merged_type = merged_db[merged_site.address]
- for content_inner_path, content in merged_site.content_manager.contents.items():
- # content.json file itself
- if merged_site.storage.isFile(content_inner_path): # Missing content.json file
- merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)
- yield merged_inner_path, merged_site.storage.getPath(content_inner_path)
- else:
- merged_site.log.error("[MISSING] %s" % content_inner_path)
- # Data files in content.json
- content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
- for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
- if not file_relative_path.endswith(".json"):
- continue # We only interesed in json files
- file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
- file_inner_path = file_inner_path.strip("/") # Strip leading /
- if merged_site.storage.isFile(file_inner_path):
- merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, file_inner_path)
- yield merged_inner_path, merged_site.storage.getPath(file_inner_path)
- else:
- merged_site.log.error("[MISSING] %s" % file_inner_path)
- found += 1
- if found % 100 == 0:
- time.sleep(0.001) # Context switch to avoid UI block
-
- # Also notice merger sites on a merged site file change
- def onUpdated(self, inner_path, file=None):
- super(SiteStoragePlugin, self).onUpdated(inner_path, file)
-
- merged_type = merged_db.get(self.site.address)
-
- for merger_site in merged_to_merger.get(self.site.address, []):
- if merger_site.address == self.site.address: # Avoid infinite loop
- continue
- virtual_path = "merged-%s/%s/%s" % (merged_type, self.site.address, inner_path)
- if inner_path.endswith(".json"):
- if file is not None:
- merger_site.storage.onUpdated(virtual_path, file=file)
- else:
- merger_site.storage.onUpdated(virtual_path, file=self.open(inner_path))
- else:
- merger_site.storage.onUpdated(virtual_path)
-
-
-@PluginManager.registerTo("Site")
-class SitePlugin(object):
- def fileDone(self, inner_path):
- super(SitePlugin, self).fileDone(inner_path)
-
- for merger_site in merged_to_merger.get(self.address, []):
- if merger_site.address == self.address:
- continue
- for ws in merger_site.websockets:
- ws.event("siteChanged", self, {"event": ["file_done", inner_path]})
-
- def fileFailed(self, inner_path):
- super(SitePlugin, self).fileFailed(inner_path)
-
- for merger_site in merged_to_merger.get(self.address, []):
- if merger_site.address == self.address:
- continue
- for ws in merger_site.websockets:
- ws.event("siteChanged", self, {"event": ["file_failed", inner_path]})
-
-
-@PluginManager.registerTo("SiteManager")
-class SiteManagerPlugin(object):
- # Update merger site for site types
- def updateMergerSites(self):
- global merger_db, merged_db, merged_to_merger, site_manager
- s = time.time()
- merger_db_new = {}
- merged_db_new = {}
- merged_to_merger_new = {}
- site_manager = self
- if not self.sites:
- return
- for site in self.sites.values():
- # Update merged sites
- try:
- merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type")
- except Exception as err:
- self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err)))
- continue
- if merged_type:
- merged_db_new[site.address] = merged_type
-
- # Update merger sites
- for permission in site.settings["permissions"]:
- if not permission.startswith("Merger:"):
- continue
- if merged_type:
- self.log.error(
- "Removing permission %s from %s: Merger and merged at the same time." %
- (permission, site.address)
- )
- site.settings["permissions"].remove(permission)
- continue
- merger_type = permission.replace("Merger:", "")
- if site.address not in merger_db_new:
- merger_db_new[site.address] = []
- merger_db_new[site.address].append(merger_type)
- site_manager.sites[site.address] = site
-
- # Update merged to merger
- if merged_type:
- for merger_site in self.sites.values():
- if "Merger:" + merged_type in merger_site.settings["permissions"]:
- if site.address not in merged_to_merger_new:
- merged_to_merger_new[site.address] = []
- merged_to_merger_new[site.address].append(merger_site)
-
- # Update globals
- merger_db = merger_db_new
- merged_db = merged_db_new
- merged_to_merger = merged_to_merger_new
-
- self.log.debug("Updated merger sites in %.3fs" % (time.time() - s))
-
- def load(self, *args, **kwags):
- super(SiteManagerPlugin, self).load(*args, **kwags)
- self.updateMergerSites()
-
- def saveDelayed(self, *args, **kwags):
- super(SiteManagerPlugin, self).saveDelayed(*args, **kwags)
- self.updateMergerSites()
diff --git a/plugins/MergerSite/__init__.py b/plugins/MergerSite/__init__.py
deleted file mode 100644
index 2cf54611..00000000
--- a/plugins/MergerSite/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import MergerSitePlugin
\ No newline at end of file
diff --git a/plugins/MergerSite/languages/es.json b/plugins/MergerSite/languages/es.json
deleted file mode 100644
index d554c3a9..00000000
--- a/plugins/MergerSite/languages/es.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "¿Agregar %s nuevo sitio?",
- "Added %s new site": "Sitio %s agregado",
- "Site deleted: %s ": "Sitio removido: %s "
-}
diff --git a/plugins/MergerSite/languages/fr.json b/plugins/MergerSite/languages/fr.json
deleted file mode 100644
index 9d59fde9..00000000
--- a/plugins/MergerSite/languages/fr.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "Ajouter le site %s ?",
- "Added %s new site": "Site %s ajouté",
- "Site deleted: %s ": "Site %s supprimé"
-}
diff --git a/plugins/MergerSite/languages/hu.json b/plugins/MergerSite/languages/hu.json
deleted file mode 100644
index 8e377aaa..00000000
--- a/plugins/MergerSite/languages/hu.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "Új oldal hozzáadása: %s ?",
- "Added %s new site": "Új oldal hozzáadva: %s ",
- "Site deleted: %s ": "Oldal törölve: %s "
-}
diff --git a/plugins/MergerSite/languages/it.json b/plugins/MergerSite/languages/it.json
deleted file mode 100644
index d56c9817..00000000
--- a/plugins/MergerSite/languages/it.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "Aggiungere %s nuovo sito ?",
- "Added %s new site": "Sito %s aggiunto",
- "Site deleted: %s ": "Sito %s eliminato"
-}
diff --git a/plugins/MergerSite/languages/jp.json b/plugins/MergerSite/languages/jp.json
deleted file mode 100644
index 7216f268..00000000
--- a/plugins/MergerSite/languages/jp.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "サイト: %s を追加しますか?",
- "Added %s new site": "サイト: %s を追加しました",
- "Site deleted: %s ": "サイト: %s を削除しました"
-}
diff --git a/plugins/MergerSite/languages/pt-br.json b/plugins/MergerSite/languages/pt-br.json
deleted file mode 100644
index cdc298cb..00000000
--- a/plugins/MergerSite/languages/pt-br.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "Adicionar %s novo site?",
- "Added %s new site": "Site %s adicionado",
- "Site deleted: %s ": "Site removido: %s "
-}
diff --git a/plugins/MergerSite/languages/tr.json b/plugins/MergerSite/languages/tr.json
deleted file mode 100644
index 5afb3942..00000000
--- a/plugins/MergerSite/languages/tr.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "%s sitesi eklensin mi?",
- "Added %s new site": "%s sitesi eklendi",
- "Site deleted: %s ": "%s sitesi silindi"
-}
diff --git a/plugins/MergerSite/languages/zh-tw.json b/plugins/MergerSite/languages/zh-tw.json
deleted file mode 100644
index a0684e63..00000000
--- a/plugins/MergerSite/languages/zh-tw.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "添加新網站: %s ?",
- "Added %s new site": "已添加到新網站:%s ",
- "Site deleted: %s ": "網站已刪除:%s "
-}
diff --git a/plugins/MergerSite/languages/zh.json b/plugins/MergerSite/languages/zh.json
deleted file mode 100644
index 127044e6..00000000
--- a/plugins/MergerSite/languages/zh.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "Add %s new site?": "添加新站点: %s ?",
- "Added %s new site": "已添加到新站点:%s ",
- "Site deleted: %s ": "站点已删除:%s "
-}
diff --git a/plugins/Newsfeed/NewsfeedPlugin.py b/plugins/Newsfeed/NewsfeedPlugin.py
deleted file mode 100644
index 3eb14d6c..00000000
--- a/plugins/Newsfeed/NewsfeedPlugin.py
+++ /dev/null
@@ -1,187 +0,0 @@
-import time
-import re
-
-from Plugin import PluginManager
-from Db.DbQuery import DbQuery
-from Debug import Debug
-from util import helper
-from util.Flag import flag
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- def formatSiteInfo(self, site, create_user=True):
- site_info = super(UiWebsocketPlugin, self).formatSiteInfo(site, create_user=create_user)
- feed_following = self.user.sites.get(site.address, {}).get("follow", None)
- if feed_following == None:
- site_info["feed_follow_num"] = None
- else:
- site_info["feed_follow_num"] = len(feed_following)
- return site_info
-
- def actionFeedFollow(self, to, feeds):
- self.user.setFeedFollow(self.site.address, feeds)
- self.user.save()
- self.response(to, "ok")
-
- def actionFeedListFollow(self, to):
- feeds = self.user.sites.get(self.site.address, {}).get("follow", {})
- self.response(to, feeds)
-
- @flag.admin
- def actionFeedQuery(self, to, limit=10, day_limit=3):
- from Site import SiteManager
- rows = []
- stats = []
-
- total_s = time.time()
- num_sites = 0
-
- for address, site_data in list(self.user.sites.items()):
- feeds = site_data.get("follow")
- if not feeds:
- continue
- if type(feeds) is not dict:
- self.log.debug("Invalid feed for site %s" % address)
- continue
- num_sites += 1
- for name, query_set in feeds.items():
- site = SiteManager.site_manager.get(address)
- if not site or not site.storage.has_db:
- continue
-
- s = time.time()
- try:
- query_raw, params = query_set
- query_parts = re.split(r"UNION(?:\s+ALL|)", query_raw)
- for i, query_part in enumerate(query_parts):
- db_query = DbQuery(query_part)
- if day_limit:
- where = " WHERE %s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit)
- if "WHERE" in query_part:
- query_part = re.sub("WHERE (.*?)(?=$| GROUP BY)", where+" AND (\\1)", query_part)
- else:
- query_part += where
- query_parts[i] = query_part
- query = " UNION ".join(query_parts)
-
- if ":params" in query:
- query_params = map(helper.sqlquote, params)
- query = query.replace(":params", ",".join(query_params))
-
- res = site.storage.query(query + " ORDER BY date_added DESC LIMIT %s" % limit)
-
- except Exception as err: # Log error
- self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
- stats.append({"site": site.address, "feed_name": name, "error": str(err)})
- continue
-
- for row in res:
- row = dict(row)
- if not isinstance(row["date_added"], (int, float, complex)):
- self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"]))
- continue
- if row["date_added"] > 1000000000000: # Formatted as millseconds
- row["date_added"] = row["date_added"] / 1000
- if "date_added" not in row or row["date_added"] > time.time() + 120:
- self.log.debug("Newsfeed item from the future from from site %s" % address)
- continue # Feed item is in the future, skip it
- row["site"] = address
- row["feed_name"] = name
- rows.append(row)
- stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)})
- time.sleep(0.001)
- return self.response(to, {"rows": rows, "stats": stats, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3)})
-
- def parseSearch(self, search):
- parts = re.split("(site|type):", search)
- if len(parts) > 1: # Found filter
- search_text = parts[0]
- parts = [part.strip() for part in parts]
- filters = dict(zip(parts[1::2], parts[2::2]))
- else:
- search_text = search
- filters = {}
- return [search_text, filters]
-
- def actionFeedSearch(self, to, search, limit=30, day_limit=30):
- if "ADMIN" not in self.site.settings["permissions"]:
- return self.response(to, "FeedSearch not allowed")
-
- from Site import SiteManager
- rows = []
- stats = []
- num_sites = 0
- total_s = time.time()
-
- search_text, filters = self.parseSearch(search)
-
- for address, site in SiteManager.site_manager.list().items():
- if not site.storage.has_db:
- continue
-
- if "site" in filters:
- if filters["site"].lower() not in [site.address, site.content_manager.contents["content.json"].get("title").lower()]:
- continue
-
- if site.storage.db: # Database loaded
- feeds = site.storage.db.schema.get("feeds")
- else:
- try:
- feeds = site.storage.loadJson("dbschema.json").get("feeds")
- except:
- continue
-
- if not feeds:
- continue
-
- num_sites += 1
-
- for name, query in feeds.items():
- s = time.time()
- try:
- db_query = DbQuery(query)
-
- params = []
- # Filters
- if search_text:
- db_query.wheres.append("(%s LIKE ? OR %s LIKE ?)" % (db_query.fields["body"], db_query.fields["title"]))
- search_like = "%" + search_text.replace(" ", "%") + "%"
- params.append(search_like)
- params.append(search_like)
- if filters.get("type") and filters["type"] not in query:
- continue
-
- if day_limit:
- db_query.wheres.append(
- "%s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit)
- )
-
- # Order
- db_query.parts["ORDER BY"] = "date_added DESC"
- db_query.parts["LIMIT"] = str(limit)
-
- res = site.storage.query(str(db_query), params)
- except Exception as err:
- self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
- stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query})
- continue
- for row in res:
- row = dict(row)
- if not row["date_added"] or row["date_added"] > time.time() + 120:
- continue # Feed item is in the future, skip it
- row["site"] = address
- row["feed_name"] = name
- rows.append(row)
- stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)})
- return self.response(to, {"rows": rows, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3), "stats": stats})
-
-
-@PluginManager.registerTo("User")
-class UserPlugin(object):
- # Set queries that user follows
- def setFeedFollow(self, address, feeds):
- site_data = self.getSiteData(address)
- site_data["follow"] = feeds
- self.save()
- return site_data
diff --git a/plugins/Newsfeed/__init__.py b/plugins/Newsfeed/__init__.py
deleted file mode 100644
index 6e624df6..00000000
--- a/plugins/Newsfeed/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import NewsfeedPlugin
\ No newline at end of file
diff --git a/plugins/OptionalManager/ContentDbPlugin.py b/plugins/OptionalManager/ContentDbPlugin.py
deleted file mode 100644
index f0f8a877..00000000
--- a/plugins/OptionalManager/ContentDbPlugin.py
+++ /dev/null
@@ -1,414 +0,0 @@
-import time
-import collections
-import itertools
-import re
-
-import gevent
-
-from util import helper
-from Plugin import PluginManager
-from Config import config
-from Debug import Debug
-
-if "content_db" not in locals().keys(): # To keep between module reloads
- content_db = None
-
-
-@PluginManager.registerTo("ContentDb")
-class ContentDbPlugin(object):
- def __init__(self, *args, **kwargs):
- global content_db
- content_db = self
- self.filled = {} # Site addresses that already filled from content.json
- self.need_filling = False # file_optional table just created, fill data from content.json files
- self.time_peer_numbers_updated = 0
- self.my_optional_files = {} # Last 50 site_address/inner_path called by fileWrite (auto-pinning these files)
- self.optional_files = collections.defaultdict(dict)
- self.optional_files_loaded = False
- self.timer_check_optional = helper.timer(60 * 5, self.checkOptionalLimit)
- super(ContentDbPlugin, self).__init__(*args, **kwargs)
-
- def getSchema(self):
- schema = super(ContentDbPlugin, self).getSchema()
-
- # Need file_optional table
- schema["tables"]["file_optional"] = {
- "cols": [
- ["file_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"],
- ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"],
- ["inner_path", "TEXT"],
- ["hash_id", "INTEGER"],
- ["size", "INTEGER"],
- ["peer", "INTEGER DEFAULT 0"],
- ["uploaded", "INTEGER DEFAULT 0"],
- ["is_downloaded", "INTEGER DEFAULT 0"],
- ["is_pinned", "INTEGER DEFAULT 0"],
- ["time_added", "INTEGER DEFAULT 0"],
- ["time_downloaded", "INTEGER DEFAULT 0"],
- ["time_accessed", "INTEGER DEFAULT 0"]
- ],
- "indexes": [
- "CREATE UNIQUE INDEX file_optional_key ON file_optional (site_id, inner_path)",
- "CREATE INDEX is_downloaded ON file_optional (is_downloaded)"
- ],
- "schema_changed": 11
- }
-
- return schema
-
- def initSite(self, site):
- super(ContentDbPlugin, self).initSite(site)
- if self.need_filling:
- self.fillTableFileOptional(site)
-
- def checkTables(self):
- changed_tables = super(ContentDbPlugin, self).checkTables()
- if "file_optional" in changed_tables:
- self.need_filling = True
- return changed_tables
-
- # Load optional files ending
- def loadFilesOptional(self):
- s = time.time()
- num = 0
- total = 0
- total_downloaded = 0
- res = content_db.execute("SELECT site_id, inner_path, size, is_downloaded FROM file_optional")
- site_sizes = collections.defaultdict(lambda: collections.defaultdict(int))
- for row in res:
- self.optional_files[row["site_id"]][row["inner_path"][-8:]] = 1
- num += 1
-
- # Update site size stats
- site_sizes[row["site_id"]]["size_optional"] += row["size"]
- if row["is_downloaded"]:
- site_sizes[row["site_id"]]["optional_downloaded"] += row["size"]
-
- # Site site size stats to sites.json settings
- site_ids_reverse = {val: key for key, val in self.site_ids.items()}
- for site_id, stats in site_sizes.items():
- site_address = site_ids_reverse.get(site_id)
- if not site_address or site_address not in self.sites:
- self.log.error("Not found site_id: %s" % site_id)
- continue
- site = self.sites[site_address]
- site.settings["size_optional"] = stats["size_optional"]
- site.settings["optional_downloaded"] = stats["optional_downloaded"]
- total += stats["size_optional"]
- total_downloaded += stats["optional_downloaded"]
-
- self.log.info(
- "Loaded %s optional files: %.2fMB, downloaded: %.2fMB in %.3fs" %
- (num, float(total) / 1024 / 1024, float(total_downloaded) / 1024 / 1024, time.time() - s)
- )
-
- if self.need_filling and self.getOptionalLimitBytes() >= 0 and self.getOptionalLimitBytes() < total_downloaded:
- limit_bytes = self.getOptionalLimitBytes()
- limit_new = round((float(total_downloaded) / 1024 / 1024 / 1024) * 1.1, 2) # Current limit + 10%
- self.log.info(
- "First startup after update and limit is smaller than downloaded files size (%.2fGB), increasing it from %.2fGB to %.2fGB" %
- (float(total_downloaded) / 1024 / 1024 / 1024, float(limit_bytes) / 1024 / 1024 / 1024, limit_new)
- )
- config.saveValue("optional_limit", limit_new)
- config.optional_limit = str(limit_new)
-
- # Predicts if the file is optional
- def isOptionalFile(self, site_id, inner_path):
- return self.optional_files[site_id].get(inner_path[-8:])
-
- # Fill file_optional table with optional files found in sites
- def fillTableFileOptional(self, site):
- s = time.time()
- site_id = self.site_ids.get(site.address)
- if not site_id:
- return False
- cur = self.getCursor()
- res = cur.execute("SELECT * FROM content WHERE size_files_optional > 0 AND site_id = %s" % site_id)
- num = 0
- for row in res.fetchall():
- content = site.content_manager.contents[row["inner_path"]]
- try:
- num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur)
- except Exception as err:
- self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err))
- cur.close()
-
- # Set my files to pinned
- from User import UserManager
- user = UserManager.user_manager.get()
- if not user:
- user = UserManager.user_manager.create()
- auth_address = user.getAuthAddress(site.address)
- res = self.execute(
- "UPDATE file_optional SET is_pinned = 1 WHERE site_id = :site_id AND inner_path LIKE :inner_path",
- {"site_id": site_id, "inner_path": "%%/%s/%%" % auth_address}
- )
-
- self.log.debug(
- "Filled file_optional table for %s in %.3fs (loaded: %s, is_pinned: %s)" %
- (site.address, time.time() - s, num, res.rowcount)
- )
- self.filled[site.address] = True
-
- def setContentFilesOptional(self, site, content_inner_path, content, cur=None):
- if not cur:
- cur = self
-
- num = 0
- site_id = self.site_ids[site.address]
- content_inner_dir = helper.getDirname(content_inner_path)
- for relative_inner_path, file in content.get("files_optional", {}).items():
- file_inner_path = content_inner_dir + relative_inner_path
- hash_id = int(file["sha512"][0:4], 16)
- if hash_id in site.content_manager.hashfield:
- is_downloaded = 1
- else:
- is_downloaded = 0
- if site.address + "/" + content_inner_dir in self.my_optional_files:
- is_pinned = 1
- else:
- is_pinned = 0
- cur.insertOrUpdate("file_optional", {
- "hash_id": hash_id,
- "size": int(file["size"])
- }, {
- "site_id": site_id,
- "inner_path": file_inner_path
- }, oninsert={
- "time_added": int(time.time()),
- "time_downloaded": int(time.time()) if is_downloaded else 0,
- "is_downloaded": is_downloaded,
- "peer": is_downloaded,
- "is_pinned": is_pinned
- })
- self.optional_files[site_id][file_inner_path[-8:]] = 1
- num += 1
-
- return num
-
- def setContent(self, site, inner_path, content, size=0):
- super(ContentDbPlugin, self).setContent(site, inner_path, content, size=size)
- old_content = site.content_manager.contents.get(inner_path, {})
- if (not self.need_filling or self.filled.get(site.address)) and ("files_optional" in content or "files_optional" in old_content):
- self.setContentFilesOptional(site, inner_path, content)
- # Check deleted files
- if old_content:
- old_files = old_content.get("files_optional", {}).keys()
- new_files = content.get("files_optional", {}).keys()
- content_inner_dir = helper.getDirname(inner_path)
- deleted = [content_inner_dir + key for key in old_files if key not in new_files]
- if deleted:
- site_id = self.site_ids[site.address]
- self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": deleted})
-
- def deleteContent(self, site, inner_path):
- content = site.content_manager.contents.get(inner_path)
- if content and "files_optional" in content:
- site_id = self.site_ids[site.address]
- content_inner_dir = helper.getDirname(inner_path)
- optional_inner_paths = [
- content_inner_dir + relative_inner_path
- for relative_inner_path in content.get("files_optional", {}).keys()
- ]
- self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": optional_inner_paths})
- super(ContentDbPlugin, self).deleteContent(site, inner_path)
-
- def updatePeerNumbers(self):
- s = time.time()
- num_file = 0
- num_updated = 0
- num_site = 0
- for site in list(self.sites.values()):
- if not site.content_manager.has_optional_files:
- continue
- if not site.isServing():
- continue
- has_updated_hashfield = next((
- peer
- for peer in site.peers.values()
- if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated
- ), None)
-
- if not has_updated_hashfield and site.content_manager.hashfield.time_changed < self.time_peer_numbers_updated:
- continue
-
- hashfield_peers = itertools.chain.from_iterable(
- peer.hashfield.storage
- for peer in site.peers.values()
- if peer.has_hashfield
- )
- peer_nums = collections.Counter(
- itertools.chain(
- hashfield_peers,
- site.content_manager.hashfield
- )
- )
-
- site_id = self.site_ids[site.address]
- if not site_id:
- continue
-
- res = self.execute("SELECT file_id, hash_id, peer FROM file_optional WHERE ?", {"site_id": site_id})
- updates = {}
- for row in res:
- peer_num = peer_nums.get(row["hash_id"], 0)
- if peer_num != row["peer"]:
- updates[row["file_id"]] = peer_num
-
- for file_id, peer_num in updates.items():
- self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id))
-
- num_updated += len(updates)
- num_file += len(peer_nums)
- num_site += 1
-
- self.time_peer_numbers_updated = time.time()
- self.log.debug("%s/%s peer number for %s site updated in %.3fs" % (num_updated, num_file, num_site, time.time() - s))
-
- def queryDeletableFiles(self):
- # First return the files with atleast 10 seeder and not accessed in last week
- query = """
- SELECT * FROM file_optional
- WHERE peer > 10 AND %s
- ORDER BY time_accessed < %s DESC, uploaded / size
- """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7))
- limit_start = 0
- while 1:
- num = 0
- res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
- for row in res:
- yield row
- num += 1
- if num < 50:
- break
- limit_start += 50
-
- self.log.debug("queryDeletableFiles returning less-seeded files")
-
- # Then return files less seeder but still not accessed in last week
- query = """
- SELECT * FROM file_optional
- WHERE peer <= 10 AND %s
- ORDER BY peer DESC, time_accessed < %s DESC, uploaded / size
- """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7))
- limit_start = 0
- while 1:
- num = 0
- res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
- for row in res:
- yield row
- num += 1
- if num < 50:
- break
- limit_start += 50
-
- self.log.debug("queryDeletableFiles returning everyting")
-
- # At the end return all files
- query = """
- SELECT * FROM file_optional
- WHERE peer <= 10 AND %s
- ORDER BY peer DESC, time_accessed, uploaded / size
- """ % self.getOptionalUsedWhere()
- limit_start = 0
- while 1:
- num = 0
- res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
- for row in res:
- yield row
- num += 1
- if num < 50:
- break
- limit_start += 50
-
- def getOptionalLimitBytes(self):
- if config.optional_limit.endswith("%"):
- limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit))
- limit_bytes = helper.getFreeSpace() * (limit_percent / 100)
- else:
- limit_bytes = float(re.sub("[^0-9.]", "", config.optional_limit)) * 1024 * 1024 * 1024
- return limit_bytes
-
- def getOptionalUsedWhere(self):
- maxsize = config.optional_limit_exclude_minsize * 1024 * 1024
- query = "is_downloaded = 1 AND is_pinned = 0 AND size < %s" % maxsize
-
- # Don't delete optional files from owned sites
- my_site_ids = []
- for address, site in self.sites.items():
- if site.settings["own"]:
- my_site_ids.append(str(self.site_ids[address]))
-
- if my_site_ids:
- query += " AND site_id NOT IN (%s)" % ", ".join(my_site_ids)
- return query
-
- def getOptionalUsedBytes(self):
- size = self.execute("SELECT SUM(size) FROM file_optional WHERE %s" % self.getOptionalUsedWhere()).fetchone()[0]
- if not size:
- size = 0
- return size
-
- def getOptionalNeedDelete(self, size):
- if config.optional_limit.endswith("%"):
- limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit))
- need_delete = size - ((helper.getFreeSpace() + size) * (limit_percent / 100))
- else:
- need_delete = size - self.getOptionalLimitBytes()
- return need_delete
-
- def checkOptionalLimit(self, limit=None):
- if not limit:
- limit = self.getOptionalLimitBytes()
-
- if limit < 0:
- self.log.debug("Invalid limit for optional files: %s" % limit)
- return False
-
- size = self.getOptionalUsedBytes()
-
- need_delete = self.getOptionalNeedDelete(size)
-
- self.log.debug(
- "Optional size: %.1fMB/%.1fMB, Need delete: %.1fMB" %
- (float(size) / 1024 / 1024, float(limit) / 1024 / 1024, float(need_delete) / 1024 / 1024)
- )
- if need_delete <= 0:
- return False
-
- self.updatePeerNumbers()
-
- site_ids_reverse = {val: key for key, val in self.site_ids.items()}
- deleted_file_ids = []
- for row in self.queryDeletableFiles():
- site_address = site_ids_reverse.get(row["site_id"])
- site = self.sites.get(site_address)
- if not site:
- self.log.error("No site found for id: %s" % row["site_id"])
- continue
- site.log.debug("Deleting %s %.3f MB left" % (row["inner_path"], float(need_delete) / 1024 / 1024))
- deleted_file_ids.append(row["file_id"])
- try:
- site.content_manager.optionalRemoved(row["inner_path"], row["hash_id"], row["size"])
- site.storage.delete(row["inner_path"])
- need_delete -= row["size"]
- except Exception as err:
- site.log.error("Error deleting %s: %s" % (row["inner_path"], err))
-
- if need_delete <= 0:
- break
-
- cur = self.getCursor()
- for file_id in deleted_file_ids:
- cur.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"file_id": file_id})
- cur.close()
-
-
-@PluginManager.registerTo("SiteManager")
-class SiteManagerPlugin(object):
- def load(self, *args, **kwargs):
- back = super(SiteManagerPlugin, self).load(*args, **kwargs)
- if self.sites and not content_db.optional_files_loaded and content_db.conn:
- content_db.optional_files_loaded = True
- content_db.loadFilesOptional()
- return back
\ No newline at end of file
diff --git a/plugins/OptionalManager/OptionalManagerPlugin.py b/plugins/OptionalManager/OptionalManagerPlugin.py
deleted file mode 100644
index f01fab65..00000000
--- a/plugins/OptionalManager/OptionalManagerPlugin.py
+++ /dev/null
@@ -1,253 +0,0 @@
-import time
-import re
-import collections
-
-import gevent
-
-from util import helper
-from Plugin import PluginManager
-from . import ContentDbPlugin
-
-
-# We can only import plugin host clases after the plugins are loaded
-@PluginManager.afterLoad
-def importPluginnedClasses():
- global config
- from Config import config
-
-
-def processAccessLog():
- global access_log
- if access_log:
- content_db = ContentDbPlugin.content_db
- if not content_db.conn:
- return False
-
- s = time.time()
- access_log_prev = access_log
- access_log = collections.defaultdict(dict)
- now = int(time.time())
- num = 0
- for site_id in access_log_prev:
- content_db.execute(
- "UPDATE file_optional SET time_accessed = %s WHERE ?" % now,
- {"site_id": site_id, "inner_path": list(access_log_prev[site_id].keys())}
- )
- num += len(access_log_prev[site_id])
-
- content_db.log.debug("Inserted %s web request stat in %.3fs" % (num, time.time() - s))
-
-
-def processRequestLog():
- global request_log
- if request_log:
- content_db = ContentDbPlugin.content_db
- if not content_db.conn:
- return False
-
- s = time.time()
- request_log_prev = request_log
- request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}}
- num = 0
- for site_id in request_log_prev:
- for inner_path, uploaded in request_log_prev[site_id].items():
- content_db.execute(
- "UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded,
- {"site_id": site_id, "inner_path": inner_path}
- )
- num += 1
- content_db.log.debug("Inserted %s file request stat in %.3fs" % (num, time.time() - s))
-
-
-if "access_log" not in locals().keys(): # To keep between module reloads
- access_log = collections.defaultdict(dict) # {site_id: {inner_path1: 1, inner_path2: 1...}}
- request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}}
- helper.timer(61, processAccessLog)
- helper.timer(60, processRequestLog)
-
-
-@PluginManager.registerTo("ContentManager")
-class ContentManagerPlugin(object):
- def __init__(self, *args, **kwargs):
- self.cache_is_pinned = {}
- super(ContentManagerPlugin, self).__init__(*args, **kwargs)
-
- def optionalDownloaded(self, inner_path, hash_id, size=None, own=False):
- if "|" in inner_path: # Big file piece
- file_inner_path, file_range = inner_path.split("|")
- else:
- file_inner_path = inner_path
-
- self.contents.db.executeDelayed(
- "UPDATE file_optional SET time_downloaded = :now, is_downloaded = 1, peer = peer + 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 0",
- {"now": int(time.time()), "site_id": self.contents.db.site_ids[self.site.address], "inner_path": file_inner_path}
- )
-
- return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own)
-
- def optionalRemoved(self, inner_path, hash_id, size=None):
- res = self.contents.db.execute(
- "UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 1",
- {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
- )
-
- if res.rowcount > 0:
- back = super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size)
- # Re-add to hashfield if we have other file with the same hash_id
- if self.isDownloaded(hash_id=hash_id, force_check_db=True):
- self.hashfield.appendHashId(hash_id)
- else:
- back = False
- self.cache_is_pinned = {}
- return back
-
- def optionalRenamed(self, inner_path_old, inner_path_new):
- back = super(ContentManagerPlugin, self).optionalRenamed(inner_path_old, inner_path_new)
- self.cache_is_pinned = {}
- self.contents.db.execute(
- "UPDATE file_optional SET inner_path = :inner_path_new WHERE site_id = :site_id AND inner_path = :inner_path_old",
- {"site_id": self.contents.db.site_ids[self.site.address], "inner_path_old": inner_path_old, "inner_path_new": inner_path_new}
- )
- return back
-
- def isDownloaded(self, inner_path=None, hash_id=None, force_check_db=False):
- if hash_id and not force_check_db and hash_id not in self.hashfield:
- return False
-
- if inner_path:
- res = self.contents.db.execute(
- "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1",
- {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
- )
- else:
- res = self.contents.db.execute(
- "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND hash_id = :hash_id AND is_downloaded = 1 LIMIT 1",
- {"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id}
- )
- row = res.fetchone()
- if row and row["is_downloaded"]:
- return True
- else:
- return False
-
- def isPinned(self, inner_path):
- if inner_path in self.cache_is_pinned:
- self.site.log.debug("Cached is pinned: %s" % inner_path)
- return self.cache_is_pinned[inner_path]
-
- res = self.contents.db.execute(
- "SELECT is_pinned FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1",
- {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
- )
- row = res.fetchone()
-
- if row and row[0]:
- is_pinned = True
- else:
- is_pinned = False
-
- self.cache_is_pinned[inner_path] = is_pinned
- self.site.log.debug("Cache set is pinned: %s %s" % (inner_path, is_pinned))
-
- return is_pinned
-
- def setPin(self, inner_path, is_pinned):
- content_db = self.contents.db
- site_id = content_db.site_ids[self.site.address]
- content_db.execute("UPDATE file_optional SET is_pinned = %d WHERE ?" % is_pinned, {"site_id": site_id, "inner_path": inner_path})
- self.cache_is_pinned = {}
-
- def optionalDelete(self, inner_path):
- if self.isPinned(inner_path):
- self.site.log.debug("Skip deleting pinned optional file: %s" % inner_path)
- return False
- else:
- return super(ContentManagerPlugin, self).optionalDelete(inner_path)
-
-
-@PluginManager.registerTo("WorkerManager")
-class WorkerManagerPlugin(object):
- def doneTask(self, task):
- super(WorkerManagerPlugin, self).doneTask(task)
-
- if task["optional_hash_id"] and not self.tasks: # Execute delayed queries immedietly after tasks finished
- ContentDbPlugin.content_db.processDelayed()
-
-
-@PluginManager.registerTo("UiRequest")
-class UiRequestPlugin(object):
- def parsePath(self, path):
- global access_log
- path_parts = super(UiRequestPlugin, self).parsePath(path)
- if path_parts:
- site_id = ContentDbPlugin.content_db.site_ids.get(path_parts["request_address"])
- if site_id:
- if ContentDbPlugin.content_db.isOptionalFile(site_id, path_parts["inner_path"]):
- access_log[site_id][path_parts["inner_path"]] = 1
- return path_parts
-
-
-@PluginManager.registerTo("FileRequest")
-class FileRequestPlugin(object):
- def actionGetFile(self, params):
- stats = super(FileRequestPlugin, self).actionGetFile(params)
- self.recordFileRequest(params["site"], params["inner_path"], stats)
- return stats
-
- def actionStreamFile(self, params):
- stats = super(FileRequestPlugin, self).actionStreamFile(params)
- self.recordFileRequest(params["site"], params["inner_path"], stats)
- return stats
-
- def recordFileRequest(self, site_address, inner_path, stats):
- if not stats:
- # Only track the last request of files
- return False
- site_id = ContentDbPlugin.content_db.site_ids[site_address]
- if site_id and ContentDbPlugin.content_db.isOptionalFile(site_id, inner_path):
- request_log[site_id][inner_path] += stats["bytes_sent"]
-
-
-@PluginManager.registerTo("Site")
-class SitePlugin(object):
- def isDownloadable(self, inner_path):
- is_downloadable = super(SitePlugin, self).isDownloadable(inner_path)
- if is_downloadable:
- return is_downloadable
-
- for path in self.settings.get("optional_help", {}).keys():
- if inner_path.startswith(path):
- return True
-
- return False
-
- def fileForgot(self, inner_path):
- if "|" in inner_path and self.content_manager.isPinned(re.sub(r"\|.*", "", inner_path)):
- self.log.debug("File %s is pinned, no fileForgot" % inner_path)
- return False
- else:
- return super(SitePlugin, self).fileForgot(inner_path)
-
- def fileDone(self, inner_path):
- if "|" in inner_path and self.bad_files.get(inner_path, 0) > 5: # Idle optional file done
- inner_path_file = re.sub(r"\|.*", "", inner_path)
- num_changed = 0
- for key, val in self.bad_files.items():
- if key.startswith(inner_path_file) and val > 1:
- self.bad_files[key] = 1
- num_changed += 1
- self.log.debug("Idle optional file piece done, changed retry number of %s pieces." % num_changed)
- if num_changed:
- gevent.spawn(self.retryBadFiles)
-
- return super(SitePlugin, self).fileDone(inner_path)
-
-
-@PluginManager.registerTo("ConfigPlugin")
-class ConfigPlugin(object):
- def createArguments(self):
- group = self.parser.add_argument_group("OptionalManager plugin")
- group.add_argument('--optional_limit', help='Limit total size of optional files', default="10%", metavar="GB or free space %")
- group.add_argument('--optional_limit_exclude_minsize', help='Exclude files larger than this limit from optional size limit calculation', default=20, metavar="MB", type=int)
-
- return super(ConfigPlugin, self).createArguments()
diff --git a/plugins/OptionalManager/Test/TestOptionalManager.py b/plugins/OptionalManager/Test/TestOptionalManager.py
deleted file mode 100644
index 4bd44695..00000000
--- a/plugins/OptionalManager/Test/TestOptionalManager.py
+++ /dev/null
@@ -1,158 +0,0 @@
-import copy
-
-import pytest
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestOptionalManager:
- def testDbFill(self, site):
- contents = site.content_manager.contents
- assert len(site.content_manager.hashfield) > 0
- assert contents.db.execute("SELECT COUNT(*) FROM file_optional WHERE is_downloaded = 1").fetchone()[0] == len(site.content_manager.hashfield)
-
- def testSetContent(self, site):
- contents = site.content_manager.contents
-
- # Add new file
- new_content = copy.deepcopy(contents["content.json"])
- new_content["files_optional"]["testfile"] = {
- "size": 1234,
- "sha512": "aaaabbbbcccc"
- }
- num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
- contents["content.json"] = new_content
- assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] > num_optional_files_before
-
- # Remove file
- new_content = copy.deepcopy(contents["content.json"])
- del new_content["files_optional"]["testfile"]
- num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
- contents["content.json"] = new_content
- assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before
-
- def testDeleteContent(self, site):
- contents = site.content_manager.contents
- num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
- del contents["content.json"]
- assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before
-
- def testVerifyFiles(self, site):
- contents = site.content_manager.contents
-
- # Add new file
- new_content = copy.deepcopy(contents["content.json"])
- new_content["files_optional"]["testfile"] = {
- "size": 1234,
- "sha512": "aaaabbbbcccc"
- }
- contents["content.json"] = new_content
- file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
- assert not file_row["is_downloaded"]
-
- # Write file from outside of ZeroNet
- site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size
-
- hashfield_len_before = len(site.content_manager.hashfield)
- site.storage.verifyFiles(quick_check=True)
- assert len(site.content_manager.hashfield) == hashfield_len_before + 1
-
- file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
- assert file_row["is_downloaded"]
-
- # Delete file outside of ZeroNet
- site.storage.delete("testfile")
- site.storage.verifyFiles(quick_check=True)
- file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
- assert not file_row["is_downloaded"]
-
- def testVerifyFilesSameHashId(self, site):
- contents = site.content_manager.contents
-
- new_content = copy.deepcopy(contents["content.json"])
-
- # Add two files with same hashid (first 4 character)
- new_content["files_optional"]["testfile1"] = {
- "size": 1234,
- "sha512": "aaaabbbbcccc"
- }
- new_content["files_optional"]["testfile2"] = {
- "size": 2345,
- "sha512": "aaaabbbbdddd"
- }
- contents["content.json"] = new_content
-
- assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd")
-
- # Write files from outside of ZeroNet (For quick check hash does not matter only file size)
- site.storage.open("testfile1", "wb").write(b"A" * 1234)
- site.storage.open("testfile2", "wb").write(b"B" * 2345)
-
- site.storage.verifyFiles(quick_check=True)
-
- # Make sure that both is downloaded
- assert site.content_manager.isDownloaded("testfile1")
- assert site.content_manager.isDownloaded("testfile2")
- assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") in site.content_manager.hashfield
-
- # Delete one of the files
- site.storage.delete("testfile1")
- site.storage.verifyFiles(quick_check=True)
- assert not site.content_manager.isDownloaded("testfile1")
- assert site.content_manager.isDownloaded("testfile2")
- assert site.content_manager.hashfield.getHashId("aaaabbbbdddd") in site.content_manager.hashfield
-
- def testIsPinned(self, site):
- assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png")
- site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
- assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
-
- assert len(site.content_manager.cache_is_pinned) == 1
- site.content_manager.cache_is_pinned = {}
- assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
-
- def testBigfilePieceReset(self, site):
- site.bad_files = {
- "data/fake_bigfile.mp4|0-1024": 10,
- "data/fake_bigfile.mp4|1024-2048": 10,
- "data/fake_bigfile.mp4|2048-3064": 10
- }
- site.onFileDone("data/fake_bigfile.mp4|0-1024")
- assert site.bad_files["data/fake_bigfile.mp4|1024-2048"] == 1
- assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1
-
- def testOptionalDelete(self, site):
- contents = site.content_manager.contents
-
- site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
- site.content_manager.setPin("data/img/zeroid.png", False)
- new_content = copy.deepcopy(contents["content.json"])
- del new_content["files_optional"]["data/img/zerotalk-upvote.png"]
- del new_content["files_optional"]["data/img/zeroid.png"]
-
- assert site.storage.isFile("data/img/zerotalk-upvote.png")
- assert site.storage.isFile("data/img/zeroid.png")
-
- site.storage.writeJson("content.json", new_content)
- site.content_manager.loadContent("content.json", force=True)
-
- assert not site.storage.isFile("data/img/zeroid.png")
- assert site.storage.isFile("data/img/zerotalk-upvote.png")
-
- def testOptionalRename(self, site):
- contents = site.content_manager.contents
-
- site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
- new_content = copy.deepcopy(contents["content.json"])
- new_content["files_optional"]["data/img/zerotalk-upvote-new.png"] = new_content["files_optional"]["data/img/zerotalk-upvote.png"]
- del new_content["files_optional"]["data/img/zerotalk-upvote.png"]
-
- assert site.storage.isFile("data/img/zerotalk-upvote.png")
- assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
-
- site.storage.writeJson("content.json", new_content)
- site.content_manager.loadContent("content.json", force=True)
-
- assert not site.storage.isFile("data/img/zerotalk-upvote.png")
- assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png")
- assert site.content_manager.isPinned("data/img/zerotalk-upvote-new.png")
- assert site.storage.isFile("data/img/zerotalk-upvote-new.png")
diff --git a/plugins/OptionalManager/Test/conftest.py b/plugins/OptionalManager/Test/conftest.py
deleted file mode 100644
index 8c1df5b2..00000000
--- a/plugins/OptionalManager/Test/conftest.py
+++ /dev/null
@@ -1 +0,0 @@
-from src.Test.conftest import *
\ No newline at end of file
diff --git a/plugins/OptionalManager/Test/pytest.ini b/plugins/OptionalManager/Test/pytest.ini
deleted file mode 100644
index d09210d1..00000000
--- a/plugins/OptionalManager/Test/pytest.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-[pytest]
-python_files = Test*.py
-addopts = -rsxX -v --durations=6
-markers =
- webtest: mark a test as a webtest.
\ No newline at end of file
diff --git a/plugins/OptionalManager/UiWebsocketPlugin.py b/plugins/OptionalManager/UiWebsocketPlugin.py
deleted file mode 100644
index 0acc53cf..00000000
--- a/plugins/OptionalManager/UiWebsocketPlugin.py
+++ /dev/null
@@ -1,396 +0,0 @@
-import re
-import time
-import html
-import os
-
-import gevent
-
-from Plugin import PluginManager
-from Config import config
-from util import helper
-from util.Flag import flag
-from Translate import Translate
-
-
-plugin_dir = os.path.dirname(__file__)
-
-if "_" not in locals():
- _ = Translate(plugin_dir + "/languages/")
-
-bigfile_sha512_cache = {}
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- def __init__(self, *args, **kwargs):
- self.time_peer_numbers_updated = 0
- super(UiWebsocketPlugin, self).__init__(*args, **kwargs)
-
- def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
- # Add file to content.db and set it as pinned
- content_db = self.site.content_manager.contents.db
- content_inner_dir = helper.getDirname(inner_path)
- content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
- if len(content_db.my_optional_files) > 50: # Keep only last 50
- oldest_key = min(
- iter(content_db.my_optional_files.keys()),
- key=(lambda key: content_db.my_optional_files[key])
- )
- del content_db.my_optional_files[oldest_key]
-
- return super(UiWebsocketPlugin, self).actionSiteSign(to, privatekey, inner_path, *args, **kwargs)
-
- def updatePeerNumbers(self):
- self.site.updateHashfield()
- content_db = self.site.content_manager.contents.db
- content_db.updatePeerNumbers()
- self.site.updateWebsocket(peernumber_updated=True)
-
- def addBigfileInfo(self, row):
- global bigfile_sha512_cache
-
- content_db = self.site.content_manager.contents.db
- site = content_db.sites[row["address"]]
- if not site.settings.get("has_bigfile"):
- return False
-
- file_key = row["address"] + "/" + row["inner_path"]
- sha512 = bigfile_sha512_cache.get(file_key)
- file_info = None
- if not sha512:
- file_info = site.content_manager.getFileInfo(row["inner_path"])
- if not file_info or not file_info.get("piece_size"):
- return False
- sha512 = file_info["sha512"]
- bigfile_sha512_cache[file_key] = sha512
-
- if sha512 in site.storage.piecefields:
- piecefield = site.storage.piecefields[sha512].tobytes()
- else:
- piecefield = None
-
- if piecefield:
- row["pieces"] = len(piecefield)
- row["pieces_downloaded"] = piecefield.count(b"\x01")
- row["downloaded_percent"] = 100 * row["pieces_downloaded"] / row["pieces"]
- if row["pieces_downloaded"]:
- if row["pieces"] == row["pieces_downloaded"]:
- row["bytes_downloaded"] = row["size"]
- else:
- if not file_info:
- file_info = site.content_manager.getFileInfo(row["inner_path"])
- row["bytes_downloaded"] = row["pieces_downloaded"] * file_info.get("piece_size", 0)
- else:
- row["bytes_downloaded"] = 0
-
- row["is_downloading"] = bool(next((inner_path for inner_path in site.bad_files if inner_path.startswith(row["inner_path"])), False))
-
- # Add leech / seed stats
- row["peer_seed"] = 0
- row["peer_leech"] = 0
- for peer in site.peers.values():
- if not peer.time_piecefields_updated or sha512 not in peer.piecefields:
- continue
- peer_piecefield = peer.piecefields[sha512].tobytes()
- if not peer_piecefield:
- continue
- if peer_piecefield == b"\x01" * len(peer_piecefield):
- row["peer_seed"] += 1
- else:
- row["peer_leech"] += 1
-
- # Add myself
- if piecefield:
- if row["pieces_downloaded"] == row["pieces"]:
- row["peer_seed"] += 1
- else:
- row["peer_leech"] += 1
-
- return True
-
- # Optional file functions
-
- def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10, filter="downloaded", filter_inner_path=None):
- if not address:
- address = self.site.address
-
- # Update peer numbers if necessary
- content_db = self.site.content_manager.contents.db
- if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5:
- # Start in new thread to avoid blocking
- self.time_peer_numbers_updated = time.time()
- gevent.spawn(self.updatePeerNumbers)
-
- if address == "all" and "ADMIN" not in self.permissions:
- return self.response(to, {"error": "Forbidden"})
-
- if not self.hasSitePermission(address):
- return self.response(to, {"error": "Forbidden"})
-
- if not all([re.match("^[a-z_*/+-]+( DESC| ASC|)$", part.strip()) for part in orderby.split(",")]):
- return self.response(to, "Invalid order_by")
-
- if type(limit) != int:
- return self.response(to, "Invalid limit")
-
- back = []
- content_db = self.site.content_manager.contents.db
-
- wheres = {}
- wheres_raw = []
- if "bigfile" in filter:
- wheres["size >"] = 1024 * 1024 * 1
- if "downloaded" in filter:
- wheres_raw.append("(is_downloaded = 1 OR is_pinned = 1)")
- if "pinned" in filter:
- wheres["is_pinned"] = 1
- if filter_inner_path:
- wheres["inner_path__like"] = filter_inner_path
-
- if address == "all":
- join = "LEFT JOIN site USING (site_id)"
- else:
- wheres["site_id"] = content_db.site_ids[address]
- join = ""
-
- if wheres_raw:
- query_wheres_raw = "AND" + " AND ".join(wheres_raw)
- else:
- query_wheres_raw = ""
-
- query = "SELECT * FROM file_optional %s WHERE ? %s ORDER BY %s LIMIT %s" % (join, query_wheres_raw, orderby, limit)
-
- for row in content_db.execute(query, wheres):
- row = dict(row)
- if address != "all":
- row["address"] = address
-
- if row["size"] > 1024 * 1024:
- has_bigfile_info = self.addBigfileInfo(row)
- else:
- has_bigfile_info = False
-
- if not has_bigfile_info and "bigfile" in filter:
- continue
-
- if not has_bigfile_info:
- if row["is_downloaded"]:
- row["bytes_downloaded"] = row["size"]
- row["downloaded_percent"] = 100
- else:
- row["bytes_downloaded"] = 0
- row["downloaded_percent"] = 0
-
- back.append(row)
- self.response(to, back)
-
- def actionOptionalFileInfo(self, to, inner_path):
- content_db = self.site.content_manager.contents.db
- site_id = content_db.site_ids[self.site.address]
-
- # Update peer numbers if necessary
- if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5:
- # Start in new thread to avoid blocking
- self.time_peer_numbers_updated = time.time()
- gevent.spawn(self.updatePeerNumbers)
-
- query = "SELECT * FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1"
- res = content_db.execute(query, {"site_id": site_id, "inner_path": inner_path})
- row = next(res, None)
- if row:
- row = dict(row)
- if row["size"] > 1024 * 1024:
- row["address"] = self.site.address
- self.addBigfileInfo(row)
- self.response(to, row)
- else:
- self.response(to, None)
-
- def setPin(self, inner_path, is_pinned, address=None):
- if not address:
- address = self.site.address
-
- if not self.hasSitePermission(address):
- return {"error": "Forbidden"}
-
- site = self.server.sites[address]
- site.content_manager.setPin(inner_path, is_pinned)
-
- return "ok"
-
- @flag.no_multiuser
- def actionOptionalFilePin(self, to, inner_path, address=None):
- if type(inner_path) is not list:
- inner_path = [inner_path]
- back = self.setPin(inner_path, 1, address)
- num_file = len(inner_path)
- if back == "ok":
- if num_file == 1:
- self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
- else:
- self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000])
- self.response(to, back)
-
- @flag.no_multiuser
- def actionOptionalFileUnpin(self, to, inner_path, address=None):
- if type(inner_path) is not list:
- inner_path = [inner_path]
- back = self.setPin(inner_path, 0, address)
- num_file = len(inner_path)
- if back == "ok":
- if num_file == 1:
- self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
- else:
- self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000])
- self.response(to, back)
-
- @flag.no_multiuser
- def actionOptionalFileDelete(self, to, inner_path, address=None):
- if not address:
- address = self.site.address
-
- if not self.hasSitePermission(address):
- return self.response(to, {"error": "Forbidden"})
-
- site = self.server.sites[address]
-
- content_db = site.content_manager.contents.db
- site_id = content_db.site_ids[site.address]
-
- res = content_db.execute("SELECT * FROM file_optional WHERE ? LIMIT 1", {"site_id": site_id, "inner_path": inner_path, "is_downloaded": 1})
- row = next(res, None)
-
- if not row:
- return self.response(to, {"error": "Not found in content.db"})
-
- removed = site.content_manager.optionalRemoved(inner_path, row["hash_id"], row["size"])
- # if not removed:
- # return self.response(to, {"error": "Not found in hash_id: %s" % row["hash_id"]})
-
- content_db.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"site_id": site_id, "inner_path": inner_path})
-
- try:
- site.storage.delete(inner_path)
- except Exception as err:
- return self.response(to, {"error": "File delete error: %s" % err})
- site.updateWebsocket(file_delete=inner_path)
-
- if inner_path in site.content_manager.cache_is_pinned:
- site.content_manager.cache_is_pinned = {}
-
- self.response(to, "ok")
-
- # Limit functions
-
- @flag.admin
- def actionOptionalLimitStats(self, to):
- back = {}
- back["limit"] = config.optional_limit
- back["used"] = self.site.content_manager.contents.db.getOptionalUsedBytes()
- back["free"] = helper.getFreeSpace()
-
- self.response(to, back)
-
- @flag.no_multiuser
- @flag.admin
- def actionOptionalLimitSet(self, to, limit):
- config.optional_limit = re.sub(r"\.0+$", "", limit) # Remove unnecessary digits from end
- config.saveValue("optional_limit", limit)
- self.response(to, "ok")
-
- # Distribute help functions
-
- def actionOptionalHelpList(self, to, address=None):
- if not address:
- address = self.site.address
-
- if not self.hasSitePermission(address):
- return self.response(to, {"error": "Forbidden"})
-
- site = self.server.sites[address]
-
- self.response(to, site.settings.get("optional_help", {}))
-
- @flag.no_multiuser
- def actionOptionalHelp(self, to, directory, title, address=None):
- if not address:
- address = self.site.address
-
- if not self.hasSitePermission(address):
- return self.response(to, {"error": "Forbidden"})
-
- site = self.server.sites[address]
- content_db = site.content_manager.contents.db
- site_id = content_db.site_ids[address]
-
- if "optional_help" not in site.settings:
- site.settings["optional_help"] = {}
-
- stats = content_db.execute(
- "SELECT COUNT(*) AS num, SUM(size) AS size FROM file_optional WHERE site_id = :site_id AND inner_path LIKE :inner_path",
- {"site_id": site_id, "inner_path": directory + "%"}
- ).fetchone()
- stats = dict(stats)
-
- if not stats["size"]:
- stats["size"] = 0
- if not stats["num"]:
- stats["num"] = 0
-
- self.cmd("notification", [
- "done",
- _["You started to help distribute %s .Directory: %s "] %
- (html.escape(title), html.escape(directory)),
- 10000
- ])
-
- site.settings["optional_help"][directory] = title
-
- self.response(to, dict(stats))
-
- @flag.no_multiuser
- def actionOptionalHelpRemove(self, to, directory, address=None):
- if not address:
- address = self.site.address
-
- if not self.hasSitePermission(address):
- return self.response(to, {"error": "Forbidden"})
-
- site = self.server.sites[address]
-
- try:
- del site.settings["optional_help"][directory]
- self.response(to, "ok")
- except Exception:
- self.response(to, {"error": "Not found"})
-
- def cbOptionalHelpAll(self, to, site, value):
- site.settings["autodownloadoptional"] = value
- self.response(to, value)
-
- @flag.no_multiuser
- def actionOptionalHelpAll(self, to, value, address=None):
- if not address:
- address = self.site.address
-
- if not self.hasSitePermission(address):
- return self.response(to, {"error": "Forbidden"})
-
- site = self.server.sites[address]
-
- if value:
- if "ADMIN" in self.site.settings["permissions"]:
- self.cbOptionalHelpAll(to, site, True)
- else:
- site_title = site.content_manager.contents["content.json"].get("title", address)
- self.cmd(
- "confirm",
- [
- _["Help distribute all new optional files on site %s "] % html.escape(site_title),
- _["Yes, I want to help!"]
- ],
- lambda res: self.cbOptionalHelpAll(to, site, True)
- )
- else:
- site.settings["autodownloadoptional"] = False
- self.response(to, False)
diff --git a/plugins/OptionalManager/__init__.py b/plugins/OptionalManager/__init__.py
deleted file mode 100644
index 77b8c348..00000000
--- a/plugins/OptionalManager/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from . import OptionalManagerPlugin
-from . import UiWebsocketPlugin
diff --git a/plugins/OptionalManager/languages/es.json b/plugins/OptionalManager/languages/es.json
deleted file mode 100644
index 32ae46ae..00000000
--- a/plugins/OptionalManager/languages/es.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "Pinned %s files": "Archivos %s fijados",
- "Removed pin from %s files": "Archivos %s que no estan fijados",
- "You started to help distribute %s .Directory: %s ": "Tu empezaste a ayudar a distribuir %s .Directorio: %s ",
- "Help distribute all new optional files on site %s ": "Ayude a distribuir todos los archivos opcionales en el sitio %s ",
- "Yes, I want to help!": "¡Si, yo quiero ayudar!"
-}
diff --git a/plugins/OptionalManager/languages/fr.json b/plugins/OptionalManager/languages/fr.json
deleted file mode 100644
index 47a563dc..00000000
--- a/plugins/OptionalManager/languages/fr.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "Pinned %s files": "Fichiers %s épinglés",
- "Removed pin from %s files": "Fichiers %s ne sont plus épinglés",
- "You started to help distribute %s .Directory: %s ": "Vous avez commencé à aider à distribuer %s .Dossier : %s ",
- "Help distribute all new optional files on site %s ": "Aider à distribuer tous les fichiers optionnels du site %s ",
- "Yes, I want to help!": "Oui, je veux aider !"
-}
diff --git a/plugins/OptionalManager/languages/hu.json b/plugins/OptionalManager/languages/hu.json
deleted file mode 100644
index 7a23b86c..00000000
--- a/plugins/OptionalManager/languages/hu.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "Pinned %s files": "%s fájl rögzítve",
- "Removed pin from %s files": "%s fájl rögzítés eltávolítva",
- "You started to help distribute %s .Directory: %s ": "Új segítség a terjesztésben: %s .Könyvtár: %s ",
- "Help distribute all new optional files on site %s ": "Segítség az összes új opcionális fájl terjesztésében az %s oldalon",
- "Yes, I want to help!": "Igen, segíteni akarok!"
-}
diff --git a/plugins/OptionalManager/languages/jp.json b/plugins/OptionalManager/languages/jp.json
deleted file mode 100644
index af6dc79e..00000000
--- a/plugins/OptionalManager/languages/jp.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "Pinned %s files": "%s 件のファイルを固定",
- "Removed pin from %s files": "%s 件のファイルの固定を解除",
- "You started to help distribute %s .Directory: %s ": "あなたはサイト: %s の配布の援助を開始しました。ディレクトリ: %s ",
- "Help distribute all new optional files on site %s ": "サイト: %s のすべての新しいオプションファイルの配布を援助しますか?",
- "Yes, I want to help!": "はい、やります!"
-}
diff --git a/plugins/OptionalManager/languages/pt-br.json b/plugins/OptionalManager/languages/pt-br.json
deleted file mode 100644
index 21d90cc0..00000000
--- a/plugins/OptionalManager/languages/pt-br.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "Pinned %s files": "Arquivos %s fixados",
- "Removed pin from %s files": "Arquivos %s não estão fixados",
- "You started to help distribute %s .Directory: %s ": "Você começou a ajudar a distribuir %s .Pasta: %s ",
- "Help distribute all new optional files on site %s ": "Ajude a distribuir todos os novos arquivos opcionais no site %s ",
- "Yes, I want to help!": "Sim, eu quero ajudar!"
-}
diff --git a/plugins/OptionalManager/languages/zh-tw.json b/plugins/OptionalManager/languages/zh-tw.json
deleted file mode 100644
index dfa9eaf3..00000000
--- a/plugins/OptionalManager/languages/zh-tw.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "Pinned %s files": "已固定 %s 個檔",
- "Removed pin from %s files": "已解除固定 %s 個檔",
- "You started to help distribute %s .Directory: %s ": "你已經開始幫助分發 %s 。目錄:%s ",
- "Help distribute all new optional files on site %s ": "你想要幫助分發 %s 網站的所有檔嗎?",
- "Yes, I want to help!": "是,我想要幫助!"
-}
diff --git a/plugins/OptionalManager/languages/zh.json b/plugins/OptionalManager/languages/zh.json
deleted file mode 100644
index ae18118e..00000000
--- a/plugins/OptionalManager/languages/zh.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "Pinned %s files": "已固定 %s 个文件",
- "Removed pin from %s files": "已解除固定 %s 个文件",
- "You started to help distribute %s .Directory: %s ": "您已经开始帮助分发 %s 。目录:%s ",
- "Help distribute all new optional files on site %s ": "您想要帮助分发 %s 站点的所有文件吗?",
- "Yes, I want to help!": "是,我想要帮助!"
-}
diff --git a/plugins/PeerDb/PeerDbPlugin.py b/plugins/PeerDb/PeerDbPlugin.py
deleted file mode 100644
index a66b81cf..00000000
--- a/plugins/PeerDb/PeerDbPlugin.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import time
-import sqlite3
-import random
-import atexit
-
-import gevent
-from Plugin import PluginManager
-
-
-@PluginManager.registerTo("ContentDb")
-class ContentDbPlugin(object):
- def __init__(self, *args, **kwargs):
- atexit.register(self.saveAllPeers)
- super(ContentDbPlugin, self).__init__(*args, **kwargs)
-
- def getSchema(self):
- schema = super(ContentDbPlugin, self).getSchema()
-
- schema["tables"]["peer"] = {
- "cols": [
- ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"],
- ["address", "TEXT NOT NULL"],
- ["port", "INTEGER NOT NULL"],
- ["hashfield", "BLOB"],
- ["reputation", "INTEGER NOT NULL"],
- ["time_added", "INTEGER NOT NULL"],
- ["time_found", "INTEGER NOT NULL"]
- ],
- "indexes": [
- "CREATE UNIQUE INDEX peer_key ON peer (site_id, address, port)"
- ],
- "schema_changed": 2
- }
-
- return schema
-
- def loadPeers(self, site):
- s = time.time()
- site_id = self.site_ids.get(site.address)
- res = self.execute("SELECT * FROM peer WHERE site_id = :site_id", {"site_id": site_id})
- num = 0
- num_hashfield = 0
- for row in res:
- peer = site.addPeer(str(row["address"]), row["port"])
- if not peer: # Already exist
- continue
- if row["hashfield"]:
- peer.hashfield.replaceFromBytes(row["hashfield"])
- num_hashfield += 1
- peer.time_added = row["time_added"]
- peer.time_found = row["time_found"]
- peer.reputation = row["reputation"]
- if row["address"].endswith(".onion"):
- peer.reputation = peer.reputation / 2 - 1 # Onion peers less likely working
- num += 1
- if num_hashfield:
- site.content_manager.has_optional_files = True
- site.log.debug("%s peers (%s with hashfield) loaded in %.3fs" % (num, num_hashfield, time.time() - s))
-
- def iteratePeers(self, site):
- site_id = self.site_ids.get(site.address)
- for key, peer in list(site.peers.items()):
- address, port = key.rsplit(":", 1)
- if peer.has_hashfield:
- hashfield = sqlite3.Binary(peer.hashfield.tobytes())
- else:
- hashfield = ""
- yield (site_id, address, port, hashfield, peer.reputation, int(peer.time_added), int(peer.time_found))
-
- def savePeers(self, site, spawn=False):
- if spawn:
- # Save peers every hour (+random some secs to not update very site at same time)
- site.greenlet_manager.spawnLater(60 * 60 + random.randint(0, 60), self.savePeers, site, spawn=True)
- if not site.peers:
- site.log.debug("Peers not saved: No peers found")
- return
- s = time.time()
- site_id = self.site_ids.get(site.address)
- cur = self.getCursor()
- try:
- cur.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id})
- cur.executemany(
- "INSERT INTO peer (site_id, address, port, hashfield, reputation, time_added, time_found) VALUES (?, ?, ?, ?, ?, ?, ?)",
- self.iteratePeers(site)
- )
- except Exception as err:
- site.log.error("Save peer error: %s" % err)
- site.log.debug("Peers saved in %.3fs" % (time.time() - s))
-
- def initSite(self, site):
- super(ContentDbPlugin, self).initSite(site)
- site.greenlet_manager.spawnLater(0.5, self.loadPeers, site)
- site.greenlet_manager.spawnLater(60*60, self.savePeers, site, spawn=True)
-
- def saveAllPeers(self):
- for site in list(self.sites.values()):
- try:
- self.savePeers(site)
- except Exception as err:
- site.log.error("Save peer error: %s" % err)
diff --git a/plugins/PeerDb/__init__.py b/plugins/PeerDb/__init__.py
deleted file mode 100644
index bc8c93b9..00000000
--- a/plugins/PeerDb/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from . import PeerDbPlugin
-
diff --git a/plugins/PeerDb/plugin_info.json b/plugins/PeerDb/plugin_info.json
deleted file mode 100644
index b13915ff..00000000
--- a/plugins/PeerDb/plugin_info.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": "PeerDb",
- "description": "Save/restore peer list on client restart.",
- "default": "enabled"
-}
\ No newline at end of file
diff --git a/plugins/Sidebar/ConsolePlugin.py b/plugins/Sidebar/ConsolePlugin.py
deleted file mode 100644
index 15f6a1ba..00000000
--- a/plugins/Sidebar/ConsolePlugin.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import re
-import logging
-
-from Plugin import PluginManager
-from Config import config
-from Debug import Debug
-from util import SafeRe
-from util.Flag import flag
-
-
-class WsLogStreamer(logging.StreamHandler):
- def __init__(self, stream_id, ui_websocket, filter):
- self.stream_id = stream_id
- self.ui_websocket = ui_websocket
-
- if filter:
- if not SafeRe.isSafePattern(filter):
- raise Exception("Not a safe prex pattern")
- self.filter_re = re.compile(".*" + filter)
- else:
- self.filter_re = None
- return super(WsLogStreamer, self).__init__()
-
- def emit(self, record):
- if self.ui_websocket.ws.closed:
- self.stop()
- return
- line = self.format(record)
- if self.filter_re and not self.filter_re.match(line):
- return False
-
- self.ui_websocket.cmd("logLineAdd", {"stream_id": self.stream_id, "lines": [line]})
-
- def stop(self):
- logging.getLogger('').removeHandler(self)
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- def __init__(self, *args, **kwargs):
- self.log_streamers = {}
- return super(UiWebsocketPlugin, self).__init__(*args, **kwargs)
-
- @flag.no_multiuser
- @flag.admin
- def actionConsoleLogRead(self, to, filter=None, read_size=32 * 1024, limit=500):
- log_file_path = "%s/debug.log" % config.log_dir
- log_file = open(log_file_path, encoding="utf-8")
- log_file.seek(0, 2)
- end_pos = log_file.tell()
- log_file.seek(max(0, end_pos - read_size))
- if log_file.tell() != 0:
- log_file.readline() # Partial line junk
-
- pos_start = log_file.tell()
- lines = []
- if filter:
- assert SafeRe.isSafePattern(filter)
- filter_re = re.compile(".*" + filter)
-
- last_match = False
- for line in log_file:
- if not line.startswith("[") and last_match: # Multi-line log entry
- lines.append(line.replace(" ", " "))
- continue
-
- if filter and not filter_re.match(line):
- last_match = False
- continue
- last_match = True
- lines.append(line)
-
- num_found = len(lines)
- lines = lines[-limit:]
-
- return {"lines": lines, "pos_end": log_file.tell(), "pos_start": pos_start, "num_found": num_found}
-
- def addLogStreamer(self, stream_id, filter=None):
- logger = WsLogStreamer(stream_id, self, filter)
- logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s'))
- logger.setLevel(logging.getLevelName("DEBUG"))
-
- logging.getLogger('').addHandler(logger)
- return logger
-
- @flag.no_multiuser
- @flag.admin
- def actionConsoleLogStream(self, to, filter=None):
- stream_id = to
- self.log_streamers[stream_id] = self.addLogStreamer(stream_id, filter)
- self.response(to, {"stream_id": stream_id})
-
- @flag.no_multiuser
- @flag.admin
- def actionConsoleLogStreamRemove(self, to, stream_id):
- try:
- self.log_streamers[stream_id].stop()
- del self.log_streamers[stream_id]
- return "ok"
- except Exception as err:
- return {"error": Debug.formatException(err)}
diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py
deleted file mode 100644
index 4ecca75a..00000000
--- a/plugins/Sidebar/SidebarPlugin.py
+++ /dev/null
@@ -1,805 +0,0 @@
-import re
-import os
-import html
-import sys
-import math
-import time
-import json
-import io
-import urllib
-import urllib.parse
-
-import gevent
-
-import util
-from Config import config
-from Plugin import PluginManager
-from Debug import Debug
-from Translate import Translate
-from util import helper
-from util.Flag import flag
-from .ZipStream import ZipStream
-
-plugin_dir = os.path.dirname(__file__)
-media_dir = plugin_dir + "/media"
-
-loc_cache = {}
-if "_" not in locals():
- _ = Translate(plugin_dir + "/languages/")
-
-
-@PluginManager.registerTo("UiRequest")
-class UiRequestPlugin(object):
- # Inject our resources to end of original file streams
- def actionUiMedia(self, path):
- if path == "/uimedia/all.js" or path == "/uimedia/all.css":
- # First yield the original file and header
- body_generator = super(UiRequestPlugin, self).actionUiMedia(path)
- for part in body_generator:
- yield part
-
- # Append our media file to the end
- ext = re.match(".*(js|css)$", path).group(1)
- plugin_media_file = "%s/all.%s" % (media_dir, ext)
- if config.debug:
- # If debugging merge *.css to all.css and *.js to all.js
- from Debug import DebugMedia
- DebugMedia.merge(plugin_media_file)
- if ext == "js":
- yield _.translateData(open(plugin_media_file).read()).encode("utf8")
- else:
- for part in self.actionFile(plugin_media_file, send_header=False):
- yield part
- elif path.startswith("/uimedia/globe/"): # Serve WebGL globe files
- file_name = re.match(".*/(.*)", path).group(1)
- plugin_media_file = "%s_globe/%s" % (media_dir, file_name)
- if config.debug and path.endswith("all.js"):
- # If debugging merge *.css to all.css and *.js to all.js
- from Debug import DebugMedia
- DebugMedia.merge(plugin_media_file)
- for part in self.actionFile(plugin_media_file):
- yield part
- else:
- for part in super(UiRequestPlugin, self).actionUiMedia(path):
- yield part
-
- def actionZip(self):
- address = self.get["address"]
- site = self.server.site_manager.get(address)
- if not site:
- return self.error404("Site not found")
-
- title = site.content_manager.contents.get("content.json", {}).get("title", "")
- filename = "%s-backup-%s.zip" % (title, time.strftime("%Y-%m-%d_%H_%M"))
- filename_quoted = urllib.parse.quote(filename)
- self.sendHeader(content_type="application/zip", extra_headers={'Content-Disposition': 'attachment; filename="%s"' % filename_quoted})
-
- return self.streamZip(site.storage.getPath("."))
-
- def streamZip(self, dir_path):
- zs = ZipStream(dir_path)
- while 1:
- data = zs.read()
- if not data:
- break
- yield data
-
-
-@PluginManager.registerTo("UiWebsocket")
-class UiWebsocketPlugin(object):
- def sidebarRenderPeerStats(self, body, site):
- connected = len([peer for peer in list(site.peers.values()) if peer.connection and peer.connection.connected])
- connectable = len([peer_id for peer_id in list(site.peers.keys()) if not peer_id.endswith(":0")])
- onion = len([peer_id for peer_id in list(site.peers.keys()) if ".onion" in peer_id])
- local = len([peer for peer in list(site.peers.values()) if helper.isPrivateIp(peer.ip)])
- peers_total = len(site.peers)
-
- # Add myself
- if site.isServing():
- peers_total += 1
- if any(site.connection_server.port_opened.values()):
- connectable += 1
- if site.connection_server.tor_manager.start_onions:
- onion += 1
-
- if peers_total:
- percent_connected = float(connected) / peers_total
- percent_connectable = float(connectable) / peers_total
- percent_onion = float(onion) / peers_total
- else:
- percent_connectable = percent_connected = percent_onion = 0
-
- if local:
- local_html = _("{_[Local]}: {local} ")
- else:
- local_html = ""
-
- peer_ips = [peer.key for peer in site.getConnectablePeers(20, allow_private=False)]
- peer_ips.sort(key=lambda peer_ip: ".onion:" in peer_ip)
- copy_link = "http://127.0.0.1:43110/%s/?zeronet_peers=%s" % (
- site.content_manager.contents.get("content.json", {}).get("domain", site.address),
- ",".join(peer_ips)
- )
-
- body.append(_("""
-
-
- {_[Peers]}
- {_[Copy to clipboard]}
-
-
-
- {_[Connected]}: {connected}
- {_[Connectable]}: {connectable}
- {_[Onion]}: {onion}
- {local_html}
- {_[Total]}: {peers_total}
-
-
- """.replace("{local_html}", local_html)))
-
- def sidebarRenderTransferStats(self, body, site):
- recv = float(site.settings.get("bytes_recv", 0)) / 1024 / 1024
- sent = float(site.settings.get("bytes_sent", 0)) / 1024 / 1024
- transfer_total = recv + sent
- if transfer_total:
- percent_recv = recv / transfer_total
- percent_sent = sent / transfer_total
- else:
- percent_recv = 0.5
- percent_sent = 0.5
-
- body.append(_("""
-
- {_[Data transfer]}
-
-
- {_[Received]}: {recv:.2f}MB
- {_[Sent]}:{sent:.2f}MB
-
-
- """))
-
- def sidebarRenderFileStats(self, body, site):
- body.append(_("""
-
-
- {_[Files]}
- {_[Browse files]}
-
- {_[Save as .zip]}
-
-
-
- """))
-
- extensions = (
- ("html", "yellow"),
- ("css", "orange"),
- ("js", "purple"),
- ("Image", "green"),
- ("json", "darkblue"),
- ("User data", "blue"),
- ("Other", "white"),
- ("Total", "black")
- )
- # Collect stats
- size_filetypes = {}
- size_total = 0
- contents = site.content_manager.listContents() # Without user files
- for inner_path in contents:
- content = site.content_manager.contents[inner_path]
- if "files" not in content or content["files"] is None:
- continue
- for file_name, file_details in list(content["files"].items()):
- size_total += file_details["size"]
- ext = file_name.split(".")[-1]
- size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"]
-
- # Get user file sizes
- size_user_content = site.content_manager.contents.execute(
- "SELECT SUM(size) + SUM(size_files) AS size FROM content WHERE ?",
- {"not__inner_path": contents}
- ).fetchone()["size"]
- if not size_user_content:
- size_user_content = 0
- size_filetypes["User data"] = size_user_content
- size_total += size_user_content
-
- # The missing difference is content.json sizes
- if "json" in size_filetypes:
- size_filetypes["json"] += max(0, site.settings["size"] - size_total)
- size_total = size_other = site.settings["size"]
-
- # Bar
- for extension, color in extensions:
- if extension == "Total":
- continue
- if extension == "Other":
- size = max(0, size_other)
- elif extension == "Image":
- size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0)
- size_other -= size
- else:
- size = size_filetypes.get(extension, 0)
- size_other -= size
- if size_total == 0:
- percent = 0
- else:
- percent = 100 * (float(size) / size_total)
- percent = math.floor(percent * 100) / 100 # Floor to 2 digits
- body.append(
- """ """ %
- (percent, _[extension], color, _[extension])
- )
-
- # Legend
- body.append(" ")
- for extension, color in extensions:
- if extension == "Other":
- size = max(0, size_other)
- elif extension == "Image":
- size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0)
- elif extension == "Total":
- size = size_total
- else:
- size = size_filetypes.get(extension, 0)
-
- if extension == "js":
- title = "javascript"
- else:
- title = extension
-
- if size > 1024 * 1024 * 10: # Format as mB is more than 10mB
- size_formatted = "%.0fMB" % (size / 1024 / 1024)
- else:
- size_formatted = "%.0fkB" % (size / 1024)
-
- body.append("%s: %s " % (color, _[title], size_formatted))
-
- body.append(" ")
-
- def sidebarRenderSizeLimit(self, body, site):
- free_space = helper.getFreeSpace() / 1024 / 1024
- size = float(site.settings["size"]) / 1024 / 1024
- size_limit = site.getSizeLimit()
- percent_used = size / size_limit
-
- body.append(_("""
-
- {_[Size limit]} ({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,.0f}MB)
- MB
- {_[Set]}
-
- """))
-
- def sidebarRenderOptionalFileStats(self, body, site):
- size_total = float(site.settings["size_optional"])
- size_downloaded = float(site.settings["optional_downloaded"])
-
- if not size_total:
- return False
-
- percent_downloaded = size_downloaded / size_total
-
- size_formatted_total = size_total / 1024 / 1024
- size_formatted_downloaded = size_downloaded / 1024 / 1024
-
- body.append(_("""
-
- {_[Optional files]}
-
-
- {_[Downloaded]}: {size_formatted_downloaded:.2f}MB
- {_[Total]}: {size_formatted_total:.2f}MB
-
-
- """))
-
- return True
-
- def sidebarRenderOptionalFileSettings(self, body, site):
- if self.site.settings.get("autodownloadoptional"):
- checked = "checked='checked'"
- else:
- checked = ""
-
- body.append(_("""
-
- {_[Help distribute added optional files]}
-
- """))
-
- if hasattr(config, "autodownload_bigfile_size_limit"):
- autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit))
- body.append(_("""
-
- """))
- body.append(" ")
-
- def sidebarRenderBadFiles(self, body, site):
- body.append(_("""
-
- {_[Needs to be updated]}:
-
- """))
-
- i = 0
- for bad_file, tries in site.bad_files.items():
- i += 1
- body.append(_("""{bad_filename} """, {
- "bad_file_path": bad_file,
- "bad_filename": helper.getFilename(bad_file),
- "tries": _.pluralize(tries, "{} try", "{} tries")
- }))
- if i > 30:
- break
-
- if len(site.bad_files) > 30:
- num_bad_files = len(site.bad_files) - 30
- body.append(_("""{_[+ {num_bad_files} more]} """, nested=True))
-
- body.append("""
-
-
- """)
-
- def sidebarRenderDbOptions(self, body, site):
- if site.storage.db:
- inner_path = site.storage.getInnerPath(site.storage.db.db_path)
- size = float(site.storage.getSize(inner_path)) / 1024
- feeds = len(site.storage.db.schema.get("feeds", {}))
- else:
- inner_path = _["No database found"]
- size = 0.0
- feeds = 0
-
- body.append(_("""
-
- {_[Database]} ({size:.2f}kB, {_[search feeds]}: {_[{feeds} query]})
-
-
- """, nested=True))
-
- def sidebarRenderIdentity(self, body, site):
- auth_address = self.user.getAuthAddress(self.site.address, create=False)
- rules = self.site.content_manager.getRules("data/users/%s/content.json" % auth_address)
- if rules and rules.get("max_size"):
- quota = rules["max_size"] / 1024
- try:
- content = site.content_manager.contents["data/users/%s/content.json" % auth_address]
- used = len(json.dumps(content)) + sum([file["size"] for file in list(content["files"].values())])
- except:
- used = 0
- used = used / 1024
- else:
- quota = used = 0
-
- body.append(_("""
-
- {_[Identity address]} ({_[limit used]}: {used:.2f}kB / {quota:.2f}kB)
-
-
- """))
-
- def sidebarRenderControls(self, body, site):
- auth_address = self.user.getAuthAddress(self.site.address, create=False)
- if self.site.settings["serving"]:
- class_pause = ""
- class_resume = "hidden"
- else:
- class_pause = "hidden"
- class_resume = ""
-
- body.append(_("""
-
- {_[Site control]}
- {_[Update]}
- {_[Pause]}
- {_[Resume]}
- {_[Delete]}
-
- """))
-
- donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True)
- site_address = self.site.address
- body.append(_("""
-
- {_[Site address]}
-
- {site_address}
- """))
- if donate_key == False or donate_key == "":
- pass
- elif (type(donate_key) == str or type(donate_key) == str) and len(donate_key) > 0:
- body.append(_("""
-
-
-
- {_[Donate]}
-
- {donate_key}
- """))
- else:
- body.append(_("""
-
{_[Donate]}
- """))
- body.append(_("""
-
-
- """))
-
- def sidebarRenderOwnedCheckbox(self, body, site):
- if self.site.settings["own"]:
- checked = "checked='checked'"
- else:
- checked = ""
-
- body.append(_("""
- {_[This is my site]}
-
- """))
-
- def sidebarRenderOwnSettings(self, body, site):
- title = site.content_manager.contents.get("content.json", {}).get("title", "")
- description = site.content_manager.contents.get("content.json", {}).get("description", "")
-
- body.append(_("""
-
- {_[Site title]}
-
-
-
-
- {_[Site description]}
-
-
-
-
- {_[Save site settings]}
-
- """))
-
- def sidebarRenderContents(self, body, site):
- has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey"))
- if has_privatekey:
- tag_privatekey = _("{_[Private key saved.]} {_[Forget]} ")
- else:
- tag_privatekey = _("{_[Add saved private key]} ")
-
- body.append(_("""
-
- {_[Content publishing]} {tag_privatekey}
- """.replace("{tag_privatekey}", tag_privatekey)))
-
- # Choose content you want to sign
- body.append(_("""
-
- """))
-
- contents = ["content.json"]
- contents += list(site.content_manager.contents.get("content.json", {}).get("includes", {}).keys())
- body.append(_("{_[Choose]}: "))
- for content in contents:
- body.append(_("
{content} "))
- body.append("
")
- body.append(" ")
-
- @flag.admin
- def actionSidebarGetHtmlTag(self, to):
- site = self.site
-
- body = []
-
- body.append("")
- body.append("
× ")
- body.append("
%s " % html.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True))
-
- body.append("
")
-
- body.append("
")
-
- self.sidebarRenderPeerStats(body, site)
- self.sidebarRenderTransferStats(body, site)
- self.sidebarRenderFileStats(body, site)
- self.sidebarRenderSizeLimit(body, site)
- has_optional = self.sidebarRenderOptionalFileStats(body, site)
- if has_optional:
- self.sidebarRenderOptionalFileSettings(body, site)
- self.sidebarRenderDbOptions(body, site)
- self.sidebarRenderIdentity(body, site)
- self.sidebarRenderControls(body, site)
- if site.bad_files:
- self.sidebarRenderBadFiles(body, site)
-
- self.sidebarRenderOwnedCheckbox(body, site)
- body.append("")
- self.sidebarRenderOwnSettings(body, site)
- self.sidebarRenderContents(body, site)
- body.append("
")
- body.append(" ")
- body.append("
")
-
- body.append("")
-
- self.response(to, "".join(body))
-
- def downloadGeoLiteDb(self, db_path):
- import gzip
- import shutil
- from util import helper
-
- if config.offline:
- return False
-
- self.log.info("Downloading GeoLite2 City database...")
- self.cmd("progress", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], 0])
- db_urls = [
- "https://raw.githubusercontent.com/aemr3/GeoLite2-Database/master/GeoLite2-City.mmdb.gz",
- "https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz"
- ]
- for db_url in db_urls:
- downloadl_err = None
- try:
- # Download
- response = helper.httpRequest(db_url)
- data_size = response.getheader('content-length')
- data_recv = 0
- data = io.BytesIO()
- while True:
- buff = response.read(1024 * 512)
- if not buff:
- break
- data.write(buff)
- data_recv += 1024 * 512
- if data_size:
- progress = int(float(data_recv) / int(data_size) * 100)
- self.cmd("progress", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], progress])
- self.log.info("GeoLite2 City database downloaded (%s bytes), unpacking..." % data.tell())
- data.seek(0)
-
- # Unpack
- with gzip.GzipFile(fileobj=data) as gzip_file:
- shutil.copyfileobj(gzip_file, open(db_path, "wb"))
-
- self.cmd("progress", ["geolite-info", _["GeoLite2 City database downloaded!"], 100])
- time.sleep(2) # Wait for notify animation
- self.log.info("GeoLite2 City database is ready at: %s" % db_path)
- return True
- except Exception as err:
- download_err = err
- self.log.error("Error downloading %s: %s" % (db_url, err))
- pass
- self.cmd("progress", [
- "geolite-info",
- _["GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}"].format(download_err, db_urls[0]),
- -100
- ])
-
- def getLoc(self, geodb, ip):
- global loc_cache
-
- if ip in loc_cache:
- return loc_cache[ip]
- else:
- try:
- loc_data = geodb.get(ip)
- except:
- loc_data = None
-
- if not loc_data or "location" not in loc_data:
- loc_cache[ip] = None
- return None
-
- loc = {
- "lat": loc_data["location"]["latitude"],
- "lon": loc_data["location"]["longitude"],
- }
- if "city" in loc_data:
- loc["city"] = loc_data["city"]["names"]["en"]
-
- if "country" in loc_data:
- loc["country"] = loc_data["country"]["names"]["en"]
-
- loc_cache[ip] = loc
- return loc
-
- @util.Noparallel()
- def getGeoipDb(self):
- db_name = 'GeoLite2-City.mmdb'
-
- sys_db_paths = []
- if sys.platform == "linux":
- sys_db_paths += ['/usr/share/GeoIP/' + db_name]
-
- data_dir_db_path = os.path.join(config.data_dir, db_name)
-
- db_paths = sys_db_paths + [data_dir_db_path]
-
- for path in db_paths:
- if os.path.isfile(path) and os.path.getsize(path) > 0:
- return path
-
- self.log.info("GeoIP database not found at [%s]. Downloading to: %s",
- " ".join(db_paths), data_dir_db_path)
- if self.downloadGeoLiteDb(data_dir_db_path):
- return data_dir_db_path
- return None
-
- def getPeerLocations(self, peers):
- import maxminddb
-
- db_path = self.getGeoipDb()
- if not db_path:
- self.log.debug("Not showing peer locations: no GeoIP database")
- return False
-
- geodb = maxminddb.open_database(db_path)
-
- peers = list(peers.values())
- # Place bars
- peer_locations = []
- placed = {} # Already placed bars here
- for peer in peers:
- # Height of bar
- if peer.connection and peer.connection.last_ping_delay:
- ping = round(peer.connection.last_ping_delay * 1000)
- else:
- ping = None
- loc = self.getLoc(geodb, peer.ip)
-
- if not loc:
- continue
- # Create position array
- lat, lon = loc["lat"], loc["lon"]
- latlon = "%s,%s" % (lat, lon)
- if latlon in placed and helper.getIpType(peer.ip) == "ipv4": # Dont place more than 1 bar to same place, fake repos using ip address last two part
- lat += float(128 - int(peer.ip.split(".")[-2])) / 50
- lon += float(128 - int(peer.ip.split(".")[-1])) / 50
- latlon = "%s,%s" % (lat, lon)
- placed[latlon] = True
- peer_location = {}
- peer_location.update(loc)
- peer_location["lat"] = lat
- peer_location["lon"] = lon
- peer_location["ping"] = ping
-
- peer_locations.append(peer_location)
-
- # Append myself
- for ip in self.site.connection_server.ip_external_list:
- my_loc = self.getLoc(geodb, ip)
- if my_loc:
- my_loc["ping"] = 0
- peer_locations.append(my_loc)
-
- return peer_locations
-
- @flag.admin
- @flag.async_run
- def actionSidebarGetPeers(self, to):
- try:
- peer_locations = self.getPeerLocations(self.site.peers)
- globe_data = []
- ping_times = [
- peer_location["ping"]
- for peer_location in peer_locations
- if peer_location["ping"]
- ]
- if ping_times:
- ping_avg = sum(ping_times) / float(len(ping_times))
- else:
- ping_avg = 0
-
- for peer_location in peer_locations:
- if peer_location["ping"] == 0: # Me
- height = -0.135
- elif peer_location["ping"]:
- height = min(0.20, math.log(1 + peer_location["ping"] / ping_avg, 300))
- else:
- height = -0.03
-
- globe_data += [peer_location["lat"], peer_location["lon"], height]
-
- self.response(to, globe_data)
- except Exception as err:
- self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err))
- self.response(to, {"error": str(err)})
-
- @flag.admin
- @flag.no_multiuser
- def actionSiteSetOwned(self, to, owned):
- if self.site.address == config.updatesite:
- return {"error": "You can't change the ownership of the updater site"}
-
- self.site.settings["own"] = bool(owned)
- self.site.updateWebsocket(owned=owned)
- return "ok"
-
- @flag.admin
- @flag.no_multiuser
- def actionSiteRecoverPrivatekey(self, to):
- from Crypt import CryptBitcoin
-
- site_data = self.user.sites[self.site.address]
- if site_data.get("privatekey"):
- return {"error": "This site already has saved privated key"}
-
- address_index = self.site.content_manager.contents.get("content.json", {}).get("address_index")
- if not address_index:
- return {"error": "No address_index in content.json"}
-
- privatekey = CryptBitcoin.hdPrivatekey(self.user.master_seed, address_index)
- privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
-
- if privatekey_address == self.site.address:
- site_data["privatekey"] = privatekey
- self.user.save()
- self.site.updateWebsocket(recover_privatekey=True)
- return "ok"
- else:
- return {"error": "Unable to deliver private key for this site from current user's master_seed"}
-
- @flag.admin
- @flag.no_multiuser
- def actionUserSetSitePrivatekey(self, to, privatekey):
- site_data = self.user.sites[self.site.address]
- site_data["privatekey"] = privatekey
- self.site.updateWebsocket(set_privatekey=bool(privatekey))
- self.user.save()
-
- return "ok"
-
- @flag.admin
- @flag.no_multiuser
- def actionSiteSetAutodownloadoptional(self, to, owned):
- self.site.settings["autodownloadoptional"] = bool(owned)
- self.site.worker_manager.removeSolvedFileTasks()
-
- @flag.no_multiuser
- @flag.admin
- def actionDbReload(self, to):
- self.site.storage.closeDb()
- self.site.storage.getDb()
-
- return self.response(to, "ok")
-
- @flag.no_multiuser
- @flag.admin
- def actionDbRebuild(self, to):
- try:
- self.site.storage.rebuildDb()
- except Exception as err:
- return self.response(to, {"error": str(err)})
-
-
- return self.response(to, "ok")
diff --git a/plugins/Sidebar/ZipStream.py b/plugins/Sidebar/ZipStream.py
deleted file mode 100644
index b6e05b21..00000000
--- a/plugins/Sidebar/ZipStream.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import io
-import os
-import zipfile
-
-class ZipStream(object):
- def __init__(self, dir_path):
- self.dir_path = dir_path
- self.pos = 0
- self.buff_pos = 0
- self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
- self.buff = io.BytesIO()
- self.file_list = self.getFileList()
-
- def getFileList(self):
- for root, dirs, files in os.walk(self.dir_path):
- for file in files:
- file_path = root + "/" + file
- relative_path = os.path.join(os.path.relpath(root, self.dir_path), file)
- yield file_path, relative_path
- self.zf.close()
-
- def read(self, size=60 * 1024):
- for file_path, relative_path in self.file_list:
- self.zf.write(file_path, relative_path)
- if self.buff.tell() >= size:
- break
- self.buff.seek(0)
- back = self.buff.read()
- self.buff.truncate(0)
- self.buff.seek(0)
- self.buff_pos += len(back)
- return back
-
- def write(self, data):
- self.pos += len(data)
- self.buff.write(data)
-
- def tell(self):
- return self.pos
-
- def seek(self, pos, whence=0):
- if pos >= self.buff_pos:
- self.buff.seek(pos - self.buff_pos, whence)
- self.pos = pos
-
- def flush(self):
- pass
-
-
-if __name__ == "__main__":
- zs = ZipStream(".")
- out = open("out.zip", "wb")
- while 1:
- data = zs.read()
- print("Write %s" % len(data))
- if not data:
- break
- out.write(data)
- out.close()
diff --git a/plugins/Sidebar/__init__.py b/plugins/Sidebar/__init__.py
deleted file mode 100644
index be7f14e1..00000000
--- a/plugins/Sidebar/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from . import SidebarPlugin
-from . import ConsolePlugin
\ No newline at end of file
diff --git a/plugins/Sidebar/languages/da.json b/plugins/Sidebar/languages/da.json
deleted file mode 100644
index a421292c..00000000
--- a/plugins/Sidebar/languages/da.json
+++ /dev/null
@@ -1,81 +0,0 @@
-{
- "Peers": "Klienter",
- "Connected": "Forbundet",
- "Connectable": "Mulige",
- "Connectable peers": "Mulige klienter",
-
- "Data transfer": "Data overførsel",
- "Received": "Modtaget",
- "Received bytes": "Bytes modtaget",
- "Sent": "Sendt",
- "Sent bytes": "Bytes sendt",
-
- "Files": "Filer",
- "Total": "I alt",
- "Image": "Image",
- "Other": "Andet",
- "User data": "Bruger data",
-
- "Size limit": "Side max størrelse",
- "limit used": "brugt",
- "free space": "fri",
- "Set": "Opdater",
-
- "Optional files": "Valgfri filer",
- "Downloaded": "Downloadet",
- "Download and help distribute all files": "Download og hjælp med at dele filer",
- "Total size": "Størrelse i alt",
- "Downloaded files": "Filer downloadet",
-
- "Database": "Database",
- "search feeds": "søgninger",
- "{feeds} query": "{feeds} søgninger",
- "Reload": "Genindlæs",
- "Rebuild": "Genopbyg",
- "No database found": "Ingen database fundet",
-
- "Identity address": "Autorisations ID",
- "Change": "Skift",
-
- "Update": "Opdater",
- "Pause": "Pause",
- "Resume": "Aktiv",
- "Delete": "Slet",
- "Are you sure?": "Er du sikker?",
-
- "Site address": "Side addresse",
- "Donate": "Doner penge",
-
- "Missing files": "Manglende filer",
- "{} try": "{} forsøg",
- "{} tries": "{} forsøg",
- "+ {num_bad_files} more": "+ {num_bad_files} mere",
-
- "This is my site": "Dette er min side",
- "Site title": "Side navn",
- "Site description": "Side beskrivelse",
- "Save site settings": "Gem side opsætning",
-
- "Content publishing": "Indhold offentliggøres",
- "Choose": "Vælg",
- "Sign": "Signer",
- "Publish": "Offentliggør",
-
- "This function is disabled on this proxy": "Denne funktion er slået fra på denne ZeroNet proxyEz a funkció ki van kapcsolva ezen a proxy-n",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "GeoLite2 City database kunne ikke downloades: {}! Download venligst databasen manuelt og udpak i data folder: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...",
- "GeoLite2 City database downloaded!": "GeoLite2 City database downloadet!",
-
- "Are you sure?": "Er du sikker?",
- "Site storage limit modified!": "Side max størrelse ændret!",
- "Database schema reloaded!": "Database definition genindlæst!",
- "Database rebuilding....": "Genopbygger database...",
- "Database rebuilt!": "Database genopbygget!",
- "Site updated!": "Side opdateret!",
- "Delete this site": "Slet denne side",
- "File write error: ": "Fejl ved skrivning af fil: ",
- "Site settings saved!": "Side opsætning gemt!",
- "Enter your private key:": "Indtast din private nøgle:",
- " Signed!": " Signeret!",
- "WebGL not supported": "WebGL er ikke supporteret"
-}
\ No newline at end of file
diff --git a/plugins/Sidebar/languages/de.json b/plugins/Sidebar/languages/de.json
deleted file mode 100644
index 2f5feacd..00000000
--- a/plugins/Sidebar/languages/de.json
+++ /dev/null
@@ -1,81 +0,0 @@
-{
- "Peers": "Peers",
- "Connected": "Verbunden",
- "Connectable": "Verbindbar",
- "Connectable peers": "Verbindbare Peers",
-
- "Data transfer": "Datei Transfer",
- "Received": "Empfangen",
- "Received bytes": "Empfangene Bytes",
- "Sent": "Gesendet",
- "Sent bytes": "Gesendete Bytes",
-
- "Files": "Dateien",
- "Total": "Gesamt",
- "Image": "Bilder",
- "Other": "Sonstiges",
- "User data": "Nutzer Daten",
-
- "Size limit": "Speicher Limit",
- "limit used": "Limit benutzt",
- "free space": "freier Speicher",
- "Set": "Setzten",
-
- "Optional files": "Optionale Dateien",
- "Downloaded": "Heruntergeladen",
- "Download and help distribute all files": "Herunterladen und helfen alle Dateien zu verteilen",
- "Total size": "Gesamte Größe",
- "Downloaded files": "Heruntergeladene Dateien",
-
- "Database": "Datenbank",
- "search feeds": "Feeds durchsuchen",
- "{feeds} query": "{feeds} Abfrage",
- "Reload": "Neu laden",
- "Rebuild": "Neu bauen",
- "No database found": "Keine Datenbank gefunden",
-
- "Identity address": "Identitäts Adresse",
- "Change": "Ändern",
-
- "Update": "Aktualisieren",
- "Pause": "Pausieren",
- "Resume": "Fortsetzen",
- "Delete": "Löschen",
- "Are you sure?": "Bist du sicher?",
-
- "Site address": "Seiten Adresse",
- "Donate": "Spenden",
-
- "Missing files": "Fehlende Dateien",
- "{} try": "{} versuch",
- "{} tries": "{} versuche",
- "+ {num_bad_files} more": "+ {num_bad_files} mehr",
-
- "This is my site": "Das ist meine Seite",
- "Site title": "Seiten Titel",
- "Site description": "Seiten Beschreibung",
- "Save site settings": "Einstellungen der Seite speichern",
-
- "Content publishing": "Inhaltsveröffentlichung",
- "Choose": "Wähle",
- "Sign": "Signieren",
- "Publish": "Veröffentlichen",
-
- "This function is disabled on this proxy": "Diese Funktion ist auf dieser Proxy deaktiviert",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "GeoLite2 City Datenbank Download Fehler: {}! Bitte manuell herunterladen und die Datei in das Datei Verzeichnis extrahieren: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "Herunterladen der GeoLite2 City Datenbank (einmalig, ~20MB)...",
- "GeoLite2 City database downloaded!": "GeoLite2 City Datenbank heruntergeladen!",
-
- "Are you sure?": "Bist du sicher?",
- "Site storage limit modified!": "Speicher Limit der Seite modifiziert!",
- "Database schema reloaded!": "Datebank Schema neu geladen!",
- "Database rebuilding....": "Datenbank neu bauen...",
- "Database rebuilt!": "Datenbank neu gebaut!",
- "Site updated!": "Seite aktualisiert!",
- "Delete this site": "Diese Seite löschen",
- "File write error: ": "Datei schreib fehler:",
- "Site settings saved!": "Seiten Einstellungen gespeichert!",
- "Enter your private key:": "Gib deinen privaten Schlüssel ein:",
- " Signed!": " Signiert!",
- "WebGL not supported": "WebGL nicht unterstützt"
-}
diff --git a/plugins/Sidebar/languages/es.json b/plugins/Sidebar/languages/es.json
deleted file mode 100644
index b9e98c46..00000000
--- a/plugins/Sidebar/languages/es.json
+++ /dev/null
@@ -1,79 +0,0 @@
-{
- "Peers": "Pares",
- "Connected": "Conectados",
- "Connectable": "Conectables",
- "Connectable peers": "Pares conectables",
-
- "Data transfer": "Transferencia de datos",
- "Received": "Recibidos",
- "Received bytes": "Bytes recibidos",
- "Sent": "Enviados",
- "Sent bytes": "Bytes envidados",
-
- "Files": "Ficheros",
- "Total": "Total",
- "Image": "Imagen",
- "Other": "Otro",
- "User data": "Datos del usuario",
-
- "Size limit": "Límite de tamaño",
- "limit used": "Límite utilizado",
- "free space": "Espacio libre",
- "Set": "Establecer",
-
- "Optional files": "Ficheros opcionales",
- "Downloaded": "Descargado",
- "Download and help distribute all files": "Descargar y ayudar a distribuir todos los ficheros",
- "Total size": "Tamaño total",
- "Downloaded files": "Ficheros descargados",
-
- "Database": "Base de datos",
- "search feeds": "Fuentes de búsqueda",
- "{feeds} query": "{feeds} consulta",
- "Reload": "Recargar",
- "Rebuild": "Reconstruir",
- "No database found": "No se ha encontrado la base de datos",
-
- "Identity address": "Dirección de la identidad",
- "Change": "Cambiar",
-
- "Update": "Actualizar",
- "Pause": "Pausar",
- "Resume": "Reanudar",
- "Delete": "Borrar",
-
- "Site address": "Dirección del sitio",
- "Donate": "Donar",
-
- "Missing files": "Ficheros perdidos",
- "{} try": "{} intento",
- "{} tries": "{} intentos",
- "+ {num_bad_files} more": "+ {num_bad_files} más",
-
- "This is my site": "Este es mi sitio",
- "Site title": "Título del sitio",
- "Site description": "Descripción del sitio",
- "Save site settings": "Guardar la configuración del sitio",
-
- "Content publishing": "Publicación del contenido",
- "Choose": "Elegir",
- "Sign": "Firmar",
- "Publish": "Publicar",
- "This function is disabled on this proxy": "Esta función está deshabilitada en este proxy",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "¡Error de la base de datos GeoLite2: {}! Por favor, descárgalo manualmente y descomprime al directorio de datos: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "Descargando la base de datos de GeoLite2 (una única vez, ~20MB)...",
- "GeoLite2 City database downloaded!": "¡Base de datos de GeoLite2 descargada!",
-
- "Are you sure?": "¿Estás seguro?",
- "Site storage limit modified!": "¡Límite de almacenamiento del sitio modificado!",
- "Database schema reloaded!": "¡Esquema de la base de datos recargado!",
- "Database rebuilding....": "Reconstruyendo la base de datos...",
- "Database rebuilt!": "¡Base de datos reconstruida!",
- "Site updated!": "¡Sitio actualizado!",
- "Delete this site": "Borrar este sitio",
- "File write error: ": "Error de escritura de fichero:",
- "Site settings saved!": "¡Configuración del sitio guardada!",
- "Enter your private key:": "Introduce tu clave privada:",
- " Signed!": " ¡firmado!",
- "WebGL not supported": "WebGL no está soportado"
-}
diff --git a/plugins/Sidebar/languages/fr.json b/plugins/Sidebar/languages/fr.json
deleted file mode 100644
index 5c4b3ac7..00000000
--- a/plugins/Sidebar/languages/fr.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "Peers": "Pairs",
- "Connected": "Connectés",
- "Connectable": "Accessibles",
- "Connectable peers": "Pairs accessibles",
-
- "Data transfer": "Données transférées",
- "Received": "Reçues",
- "Received bytes": "Bytes reçus",
- "Sent": "Envoyées",
- "Sent bytes": "Bytes envoyés",
-
- "Files": "Fichiers",
- "Total": "Total",
- "Image": "Image",
- "Other": "Autre",
- "User data": "Utilisateurs",
-
- "Size limit": "Taille maximale",
- "limit used": "utlisé",
- "free space": "libre",
- "Set": "Modifier",
-
- "Optional files": "Fichiers optionnels",
- "Downloaded": "Téléchargé",
- "Download and help distribute all files": "Télécharger et distribuer tous les fichiers",
- "Total size": "Taille totale",
- "Downloaded files": "Fichiers téléchargés",
-
- "Database": "Base de données",
- "search feeds": "recherche",
- "{feeds} query": "{feeds} requête",
- "Reload": "Recharger",
- "Rebuild": "Reconstruire",
- "No database found": "Aucune base de données trouvée",
-
- "Identity address": "Adresse d'identité",
- "Change": "Modifier",
-
- "Site control": "Opérations",
- "Update": "Mettre à jour",
- "Pause": "Suspendre",
- "Resume": "Reprendre",
- "Delete": "Supprimer",
- "Are you sure?": "Êtes-vous certain?",
-
- "Site address": "Adresse du site",
- "Donate": "Faire un don",
-
- "Missing files": "Fichiers manquants",
- "{} try": "{} essai",
- "{} tries": "{} essais",
- "+ {num_bad_files} more": "+ {num_bad_files} manquants",
-
- "This is my site": "Ce site m'appartient",
- "Site title": "Nom du site",
- "Site description": "Description du site",
- "Save site settings": "Enregistrer les paramètres",
-
- "Content publishing": "Publication du contenu",
- "Choose": "Sélectionner",
- "Sign": "Signer",
- "Publish": "Publier",
-
- "This function is disabled on this proxy": "Cette fonction est désactivé sur ce proxy",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "Erreur au téléchargement de la base de données GeoLite2: {}! Téléchargez et décompressez dans le dossier data: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "Téléchargement de la base de données GeoLite2 (une seule fois, ~20MB)...",
- "GeoLite2 City database downloaded!": "Base de données GeoLite2 téléchargée!",
-
- "Are you sure?": "Êtes-vous certain?",
- "Site storage limit modified!": "Taille maximale modifiée!",
- "Database schema reloaded!": "Base de données rechargée!",
- "Database rebuilding....": "Reconstruction de la base de données...",
- "Database rebuilt!": "Base de données reconstruite!",
- "Site updated!": "Site mis à jour!",
- "Delete this site": "Supprimer ce site",
- "File write error: ": "Erreur à l'écriture du fichier: ",
- "Site settings saved!": "Paramètres du site enregistrés!",
- "Enter your private key:": "Entrez votre clé privée:",
- " Signed!": " Signé!",
- "WebGL not supported": "WebGL n'est pas supporté"
-}
diff --git a/plugins/Sidebar/languages/hu.json b/plugins/Sidebar/languages/hu.json
deleted file mode 100644
index 21216825..00000000
--- a/plugins/Sidebar/languages/hu.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "Peers": "Csatlakozási pontok",
- "Connected": "Csaltakozva",
- "Connectable": "Csatlakozható",
- "Connectable peers": "Csatlakozható peer-ek",
-
- "Data transfer": "Adatátvitel",
- "Received": "Fogadott",
- "Received bytes": "Fogadott byte-ok",
- "Sent": "Küldött",
- "Sent bytes": "Küldött byte-ok",
-
- "Files": "Fájlok",
- "Total": "Összesen",
- "Image": "Kép",
- "Other": "Egyéb",
- "User data": "Felh. adat",
-
- "Size limit": "Méret korlát",
- "limit used": "felhasznált",
- "free space": "szabad hely",
- "Set": "Beállít",
-
- "Optional files": "Opcionális fájlok",
- "Downloaded": "Letöltött",
- "Download and help distribute all files": "Minden opcionális fájl letöltése",
- "Total size": "Teljes méret",
- "Downloaded files": "Letöltve",
-
- "Database": "Adatbázis",
- "search feeds": "Keresés források",
- "{feeds} query": "{feeds} lekérdezés",
- "Reload": "Újratöltés",
- "Rebuild": "Újraépítés",
- "No database found": "Adatbázis nem található",
-
- "Identity address": "Azonosító cím",
- "Change": "Módosít",
-
- "Site control": "Oldal műveletek",
- "Update": "Frissít",
- "Pause": "Szünteltet",
- "Resume": "Folytat",
- "Delete": "Töröl",
- "Are you sure?": "Biztos vagy benne?",
-
- "Site address": "Oldal címe",
- "Donate": "Támogatás",
-
- "Missing files": "Hiányzó fájlok",
- "{} try": "{} próbálkozás",
- "{} tries": "{} próbálkozás",
- "+ {num_bad_files} more": "+ még {num_bad_files} darab",
-
- "This is my site": "Ez az én oldalam",
- "Site title": "Oldal neve",
- "Site description": "Oldal leírása",
- "Save site settings": "Oldal beállítások mentése",
-
- "Content publishing": "Tartalom publikálás",
- "Choose": "Válassz",
- "Sign": "Aláírás",
- "Publish": "Publikálás",
-
- "This function is disabled on this proxy": "Ez a funkció ki van kapcsolva ezen a proxy-n",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "GeoLite2 város adatbázis letöltési hiba: {}! A térképhez töltsd le és csomagold ki a data könyvtárba: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...",
- "GeoLite2 City database downloaded!": "GeoLite2 város adatbázis letöltve!",
-
- "Are you sure?": "Biztos vagy benne?",
- "Site storage limit modified!": "Az oldalt méret korlát módosítva!",
- "Database schema reloaded!": "Adatbázis séma újratöltve!",
- "Database rebuilding....": "Adatbázis újraépítés...",
- "Database rebuilt!": "Adatbázis újraépítve!",
- "Site updated!": "Az oldal frissítve!",
- "Delete this site": "Az oldal törlése",
- "File write error: ": "Fájl írási hiba: ",
- "Site settings saved!": "Az oldal beállításai elmentve!",
- "Enter your private key:": "Add meg a privát kulcsod:",
- " Signed!": " Aláírva!",
- "WebGL not supported": "WebGL nem támogatott"
-}
diff --git a/plugins/Sidebar/languages/it.json b/plugins/Sidebar/languages/it.json
deleted file mode 100644
index 6aa0969a..00000000
--- a/plugins/Sidebar/languages/it.json
+++ /dev/null
@@ -1,81 +0,0 @@
-{
- "Peers": "Peer",
- "Connected": "Connessi",
- "Connectable": "Collegabili",
- "Connectable peers": "Peer collegabili",
-
- "Data transfer": "Trasferimento dati",
- "Received": "Ricevuti",
- "Received bytes": "Byte ricevuti",
- "Sent": "Inviati",
- "Sent bytes": "Byte inviati",
-
- "Files": "File",
- "Total": "Totale",
- "Image": "Imagine",
- "Other": "Altro",
- "User data": "Dati utente",
-
- "Size limit": "Limite dimensione",
- "limit used": "limite usato",
- "free space": "spazio libero",
- "Set": "Imposta",
-
- "Optional files": "File facoltativi",
- "Downloaded": "Scaricati",
- "Download and help distribute all files": "Scarica e aiuta a distribuire tutti i file",
- "Total size": "Dimensione totale",
- "Downloaded files": "File scaricati",
-
- "Database": "Database",
- "search feeds": "ricerca di feed",
- "{feeds} query": "{feeds} interrogazione",
- "Reload": "Ricaricare",
- "Rebuild": "Ricostruire",
- "No database found": "Nessun database trovato",
-
- "Identity address": "Indirizzo di identità",
- "Change": "Cambia",
-
- "Update": "Aggiorna",
- "Pause": "Sospendi",
- "Resume": "Riprendi",
- "Delete": "Cancella",
- "Are you sure?": "Sei sicuro?",
-
- "Site address": "Indirizzo sito",
- "Donate": "Dona",
-
- "Missing files": "File mancanti",
- "{} try": "{} tenta",
- "{} tries": "{} prova",
- "+ {num_bad_files} more": "+ {num_bad_files} altri",
-
- "This is my site": "Questo è il mio sito",
- "Site title": "Titolo sito",
- "Site description": "Descrizione sito",
- "Save site settings": "Salva impostazioni sito",
-
- "Content publishing": "Pubblicazione contenuto",
- "Choose": "Scegli",
- "Sign": "Firma",
- "Publish": "Pubblica",
-
- "This function is disabled on this proxy": "Questa funzione è disabilitata su questo proxy",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "Errore scaricamento database GeoLite2 City: {}! Si prega di scaricarlo manualmente e spacchetarlo nella cartella dir: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "Scaricamento database GeoLite2 City (solo una volta, ~20MB)...",
- "GeoLite2 City database downloaded!": "Database GeoLite2 City scaricato!",
-
- "Are you sure?": "Sei sicuro?",
- "Site storage limit modified!": "Limite di archiviazione del sito modificato!",
- "Database schema reloaded!": "Schema database ricaricato!",
- "Database rebuilding....": "Ricostruzione database...",
- "Database rebuilt!": "Database ricostruito!",
- "Site updated!": "Sito aggiornato!",
- "Delete this site": "Cancella questo sito",
- "File write error: ": "Errore scrittura file:",
- "Site settings saved!": "Impostazioni sito salvate!",
- "Enter your private key:": "Inserisci la tua chiave privata:",
- " Signed!": " Firmato!",
- "WebGL not supported": "WebGL non supportato"
-}
diff --git a/plugins/Sidebar/languages/jp.json b/plugins/Sidebar/languages/jp.json
deleted file mode 100644
index 38bbd420..00000000
--- a/plugins/Sidebar/languages/jp.json
+++ /dev/null
@@ -1,104 +0,0 @@
-{
- "Copy to clipboard": "クリップボードにコピー",
- "Peers": "ピア",
- "Connected": "接続済み",
- "Connectable": "利用可能",
- "Connectable peers": "ピアに接続可能",
- "Onion": "Onion",
- "Local": "ローカル",
-
- "Data transfer": "データ転送",
- "Received": "受信",
- "Received bytes": "受信バイト数",
- "Sent": "送信",
- "Sent bytes": "送信バイト数",
-
- "Files": "ファイル",
- "Browse files": "ファイルを見る",
- "Save as .zip": "ZIP形式で保存",
- "Total": "合計",
- "Image": "画像",
- "Other": "その他",
- "User data": "ユーザーデータ",
-
- "Size limit": "サイズ制限",
- "limit used": "使用上限",
- "free space": "フリースペース",
- "Set": "セット",
-
- "Optional files": "オプション ファイル",
- "Downloaded": "ダウンロード済み",
- "Help distribute added optional files": "オプションファイルの配布を支援する",
- "Auto download big file size limit": "大きなファイルの自動ダウンロードのサイズ制限",
- "Download previous files": "以前のファイルのダウンロード",
- "Optional files download started": "オプションファイルのダウンロードを開始",
- "Optional files downloaded": "オプションファイルのダウンロードが完了しました",
- "Download and help distribute all files": "ダウンロードしてすべてのファイルの配布を支援する",
- "Total size": "合計サイズ",
- "Downloaded files": "ダウンロードされたファイル",
-
- "Database": "データベース",
- "search feeds": "フィードを検索する",
- "{feeds} query": "{feeds} お問い合わせ",
- "Reload": "再読込",
- "Rebuild": "再ビルド",
- "No database found": "データベースが見つかりません",
-
- "Identity address": "あなたの識別アドレス",
- "Change": "編集",
-
- "Site control": "サイト管理",
- "Update": "更新",
- "Pause": "一時停止",
- "Resume": "再開",
- "Delete": "削除",
- "Are you sure?": "本当によろしいですか?",
-
- "Site address": "サイトアドレス",
- "Donate": "寄付する",
-
- "Missing files": "ファイルがありません",
- "{} try": "{} 試す",
- "{} tries": "{} 試行",
- "+ {num_bad_files} more": "+ {num_bad_files} more",
-
- "This is my site": "これは私のサイトです",
- "Site title": "サイトタイトル",
- "Site description": "サイトの説明",
- "Save site settings": "サイトの設定を保存する",
- "Open site directory": "サイトのディレクトリを開く",
-
- "Content publishing": "コンテンツを公開する",
- "Add saved private key": "秘密鍵の追加と保存",
- "Save": "保存",
- "Private key saved.": "秘密鍵が保存されています",
- "Private key saved for site signing": "サイトに署名するための秘密鍵を保存",
- "Forgot": "わすれる",
- "Saved private key removed": "保存された秘密鍵を削除しました",
- "Choose": "選択",
- "Sign": "署名",
- "Publish": "公開する",
- "Sign and publish": "署名して公開",
-
- "This function is disabled on this proxy": "この機能はこのプロキシで無効になっています",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "GeoLite2 Cityデータベースのダウンロードエラー: {}! 手動でダウンロードして、フォルダに解凍してください。: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Cityデータベースの読み込み (これは一度だけ行われます, ~20MB)...",
- "GeoLite2 City database downloaded!": "GeoLite2 Cityデータベースがダウンロードされました!",
-
- "Are you sure?": "本当によろしいですか?",
- "Site storage limit modified!": "サイトの保存容量の制限が変更されました!",
- "Database schema reloaded!": "データベーススキーマがリロードされました!",
- "Database rebuilding....": "データベースの再構築中....",
- "Database rebuilt!": "データベースが再構築されました!",
- "Site updated!": "サイトが更新されました!",
- "Delete this site": "このサイトを削除する",
- "Blacklist": "NG",
- "Blacklist this site": "NGリストに入れる",
- "Reason": "理由",
- "Delete and Blacklist": "削除してNG",
- "File write error: ": "ファイル書き込みエラー:",
- "Site settings saved!": "サイト設定が保存されました!",
- "Enter your private key:": "秘密鍵を入力してください:",
- " Signed!": " 署名しました!",
- "WebGL not supported": "WebGLはサポートされていません"
-}
diff --git a/plugins/Sidebar/languages/pl.json b/plugins/Sidebar/languages/pl.json
deleted file mode 100644
index 93268507..00000000
--- a/plugins/Sidebar/languages/pl.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "Peers": "Użytkownicy równorzędni",
- "Connected": "Połączony",
- "Connectable": "Możliwy do podłączenia",
- "Connectable peers": "Połączeni użytkownicy równorzędni",
-
- "Data transfer": "Transfer danych",
- "Received": "Odebrane",
- "Received bytes": "Odebrany bajty",
- "Sent": "Wysłane",
- "Sent bytes": "Wysłane bajty",
-
- "Files": "Pliki",
- "Total": "Sumarycznie",
- "Image": "Obraz",
- "Other": "Inne",
- "User data": "Dane użytkownika",
-
- "Size limit": "Rozmiar limitu",
- "limit used": "zużyty limit",
- "free space": "wolna przestrzeń",
- "Set": "Ustaw",
-
- "Optional files": "Pliki opcjonalne",
- "Downloaded": "Ściągnięte",
- "Download and help distribute all files": "Ściągnij i pomóż rozpowszechniać wszystkie pliki",
- "Total size": "Rozmiar sumaryczny",
- "Downloaded files": "Ściągnięte pliki",
-
- "Database": "Baza danych",
- "search feeds": "przeszukaj zasoby",
- "{feeds} query": "{feeds} pytanie",
- "Reload": "Odśwież",
- "Rebuild": "Odbuduj",
- "No database found": "Nie odnaleziono bazy danych",
-
- "Identity address": "Adres identyfikacyjny",
- "Change": "Zmień",
-
- "Site control": "Kontrola strony",
- "Update": "Zaktualizuj",
- "Pause": "Wstrzymaj",
- "Resume": "Wznów",
- "Delete": "Skasuj",
- "Are you sure?": "Jesteś pewien?",
-
- "Site address": "Adres strony",
- "Donate": "Wspomóż",
-
- "Missing files": "Brakujące pliki",
- "{} try": "{} próba",
- "{} tries": "{} próby",
- "+ {num_bad_files} more": "+ {num_bad_files} więcej",
-
- "This is my site": "To moja strona",
- "Site title": "Tytuł strony",
- "Site description": "Opis strony",
- "Save site settings": "Zapisz ustawienia strony",
-
- "Content publishing": "Publikowanie treści",
- "Choose": "Wybierz",
- "Sign": "Podpisz",
- "Publish": "Opublikuj",
-
- "This function is disabled on this proxy": "Ta funkcja jest zablokowana w tym proxy",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "Błąd ściągania bazy danych GeoLite2 City: {}! Proszę ściągnąć ją recznie i wypakować do katalogu danych: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "Ściąganie bazy danych GeoLite2 City (tylko jednorazowo, ok. 20MB)...",
- "GeoLite2 City database downloaded!": "Baza danych GeoLite2 City ściagnięta!",
-
- "Are you sure?": "Jesteś pewien?",
- "Site storage limit modified!": "Limit pamięci strony zmodyfikowany!",
- "Database schema reloaded!": "Schemat bazy danych załadowany ponownie!",
- "Database rebuilding....": "Przebudowywanie bazy danych...",
- "Database rebuilt!": "Baza danych przebudowana!",
- "Site updated!": "Strona zaktualizowana!",
- "Delete this site": "Usuń tę stronę",
- "File write error: ": "Błąd zapisu pliku: ",
- "Site settings saved!": "Ustawienia strony zapisane!",
- "Enter your private key:": "Wpisz swój prywatny klucz:",
- " Signed!": " Podpisane!",
- "WebGL not supported": "WebGL nie jest obsługiwany"
-}
diff --git a/plugins/Sidebar/languages/pt-br.json b/plugins/Sidebar/languages/pt-br.json
deleted file mode 100644
index d5659171..00000000
--- a/plugins/Sidebar/languages/pt-br.json
+++ /dev/null
@@ -1,97 +0,0 @@
-{
- "Copy to clipboard": "Copiar para área de transferência (clipboard)",
- "Peers": "Peers",
- "Connected": "Ligados",
- "Connectable": "Disponíveis",
- "Onion": "Onion",
- "Local": "Locais",
- "Connectable peers": "Peers disponíveis",
-
- "Data transfer": "Transferência de dados",
- "Received": "Recebidos",
- "Received bytes": "Bytes recebidos",
- "Sent": "Enviados",
- "Sent bytes": "Bytes enviados",
-
- "Files": "Arquivos",
- "Save as .zip": "Salvar como .zip",
- "Total": "Total",
- "Image": "Imagem",
- "Other": "Outros",
- "User data": "Dados do usuário",
-
- "Size limit": "Limite de tamanho",
- "limit used": "limite utilizado",
- "free space": "espaço livre",
- "Set": "Definir",
-
- "Optional files": "Arquivos opcionais",
- "Downloaded": "Baixados",
- "Download and help distribute all files": "Baixar e ajudar a distribuir todos os arquivos",
- "Total size": "Tamanho total",
- "Downloaded files": "Arquivos baixados",
-
- "Database": "Banco de dados",
- "search feeds": "pesquisar feeds",
- "{feeds} query": "consulta de {feeds}",
- "Reload": "Recarregar",
- "Rebuild": "Reconstruir",
- "No database found": "Base de dados não encontrada",
-
- "Identity address": "Endereço de identidade",
- "Change": "Alterar",
-
- "Site control": "Controle do site",
- "Update": "Atualizar",
- "Pause": "Suspender",
- "Resume": "Continuar",
- "Delete": "Remover",
- "Are you sure?": "Tem certeza?",
-
- "Site address": "Endereço do site",
- "Donate": "Doar",
-
- "Needs to be updated": "Necessitam ser atualizados",
- "{} try": "{} tentativa",
- "{} tries": "{} tentativas",
- "+ {num_bad_files} more": "+ {num_bad_files} adicionais",
-
- "This is my site": "Este é o meu site",
- "Site title": "Título do site",
- "Site description": "Descrição do site",
- "Save site settings": "Salvar definições do site",
- "Open site directory": "Abrir diretório do site",
-
- "Content publishing": "Publicação do conteúdo",
- "Choose": "Escolher",
- "Sign": "Assinar",
- "Publish": "Publicar",
- "Sign and publish": "Assinar e publicar",
- "add saved private key": "adicionar privatekey (chave privada) para salvar",
- "Private key saved for site signing": "Privatekey foi salva para assinar o site",
- "Private key saved.": "Privatekey salva.",
- "forgot": "esquecer",
- "Saved private key removed": "Privatekey salva foi removida",
- "This function is disabled on this proxy": "Esta função encontra-se desativada neste proxy",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "Erro ao baixar a base de dados GeoLite2 City: {}! Por favor baixe manualmente e descompacte os dados para a seguinte pasta: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "Baixando a base de dados GeoLite2 City (uma única vez, ~20MB)...",
- "GeoLite2 City database downloaded!": "A base de dados GeoLite2 City foi baixada!",
-
- "Are you sure?": "Tem certeza?",
- "Site storage limit modified!": "O limite de armazenamento do site foi modificado!",
- "Database schema reloaded!": "O esquema da base de dados foi atualizado!",
- "Database rebuilding....": "Reconstruindo base de dados...",
- "Database rebuilt!": "Base de dados reconstruída!",
- "Site updated!": "Site atualizado!",
- "Delete this site": "Remover este site",
- "Blacklist": "Blacklist",
- "Blacklist this site": "Blacklistar este site",
- "Reason": "Motivo",
- "Delete and Blacklist": "Deletar e blacklistar",
- "File write error: ": "Erro de escrita de arquivo: ",
- "Site settings saved!": "Definições do site salvas!",
- "Enter your private key:": "Digite sua chave privada:",
- " Signed!": " Assinado!",
- "WebGL not supported": "WebGL não é suportado",
- "Save as .zip": "Salvar como .zip"
-}
diff --git a/plugins/Sidebar/languages/ru.json b/plugins/Sidebar/languages/ru.json
deleted file mode 100644
index f2eeca04..00000000
--- a/plugins/Sidebar/languages/ru.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "Peers": "Пиры",
- "Connected": "Подключенные",
- "Connectable": "Доступные",
- "Connectable peers": "Пиры доступны для подключения",
-
- "Data transfer": "Передача данных",
- "Received": "Получено",
- "Received bytes": "Получено байн",
- "Sent": "Отправлено",
- "Sent bytes": "Отправлено байт",
-
- "Files": "Файлы",
- "Total": "Всего",
- "Image": "Изображений",
- "Other": "Другое",
- "User data": "Ваш контент",
-
- "Size limit": "Ограничение по размеру",
- "limit used": "Использовано",
- "free space": "Доступно",
- "Set": "Установить",
-
- "Optional files": "Опциональные файлы",
- "Downloaded": "Загружено",
- "Download and help distribute all files": "Загрузить опциональные файлы для помощи сайту",
- "Total size": "Объём",
- "Downloaded files": "Загруженные файлы",
-
- "Database": "База данных",
- "search feeds": "поиск подписок",
- "{feeds} query": "{feeds} запрос",
- "Reload": "Перезагрузить",
- "Rebuild": "Перестроить",
- "No database found": "База данных не найдена",
-
- "Identity address": "Уникальный адрес",
- "Change": "Изменить",
-
- "Site control": "Управление сайтом",
- "Update": "Обновить",
- "Pause": "Пауза",
- "Resume": "Продолжить",
- "Delete": "Удалить",
- "Are you sure?": "Вы уверены?",
-
- "Site address": "Адрес сайта",
- "Donate": "Пожертвовать",
-
- "Missing files": "Отсутствующие файлы",
- "{} try": "{} попробовать",
- "{} tries": "{} попыток",
- "+ {num_bad_files} more": "+ {num_bad_files} ещё",
-
- "This is my site": "Это мой сайт",
- "Site title": "Название сайта",
- "Site description": "Описание сайта",
- "Save site settings": "Сохранить настройки сайта",
-
- "Content publishing": "Публикация контента",
- "Choose": "Выбрать",
- "Sign": "Подписать",
- "Publish": "Опубликовать",
-
- "This function is disabled on this proxy": "Эта функция отключена на этом прокси",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "Ошибка загрузки базы городов GeoLite2: {}! Пожалуйста, загрузите её вручную и распакуйте в папку: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "Загрузка базы городов GeoLite2 (это делается только 1 раз, ~20MB)...",
- "GeoLite2 City database downloaded!": "База GeoLite2 успешно загружена!",
-
- "Are you sure?": "Вы уверены?",
- "Site storage limit modified!": "Лимит хранилища для сайта изменен!",
- "Database schema reloaded!": "Схема базы данных перезагружена!",
- "Database rebuilding....": "Перестройка базы данных...",
- "Database rebuilt!": "База данных перестроена!",
- "Site updated!": "Сайт обновлён!",
- "Delete this site": "Удалить этот сайт",
- "File write error: ": "Ошибка записи файла:",
- "Site settings saved!": "Настройки сайта сохранены!",
- "Enter your private key:": "Введите свой приватный ключ:",
- " Signed!": " Подписано!",
- "WebGL not supported": "WebGL не поддерживается"
-}
diff --git a/plugins/Sidebar/languages/tr.json b/plugins/Sidebar/languages/tr.json
deleted file mode 100644
index 88fcd6e0..00000000
--- a/plugins/Sidebar/languages/tr.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "Peers": "Eşler",
- "Connected": "Bağlı",
- "Connectable": "Erişilebilir",
- "Connectable peers": "Bağlanılabilir eşler",
-
- "Data transfer": "Veri aktarımı",
- "Received": "Alınan",
- "Received bytes": "Bayt alındı",
- "Sent": "Gönderilen",
- "Sent bytes": "Bayt gönderildi",
-
- "Files": "Dosyalar",
- "Total": "Toplam",
- "Image": "Resim",
- "Other": "Diğer",
- "User data": "Kullanıcı verisi",
-
- "Size limit": "Boyut sınırı",
- "limit used": "kullanılan",
- "free space": "boş",
- "Set": "Ayarla",
-
- "Optional files": "İsteğe bağlı dosyalar",
- "Downloaded": "İndirilen",
- "Download and help distribute all files": "Tüm dosyaları indir ve yayılmalarına yardım et",
- "Total size": "Toplam boyut",
- "Downloaded files": "İndirilen dosyalar",
-
- "Database": "Veritabanı",
- "search feeds": "kaynak ara",
- "{feeds} query": "{feeds} sorgu",
- "Reload": "Yenile",
- "Rebuild": "Yapılandır",
- "No database found": "Veritabanı yok",
-
- "Identity address": "Kimlik adresi",
- "Change": "Değiştir",
-
- "Site control": "Site kontrolü",
- "Update": "Güncelle",
- "Pause": "Duraklat",
- "Resume": "Sürdür",
- "Delete": "Sil",
- "Are you sure?": "Emin misin?",
-
- "Site address": "Site adresi",
- "Donate": "Bağış yap",
-
- "Missing files": "Eksik dosyalar",
- "{} try": "{} deneme",
- "{} tries": "{} deneme",
- "+ {num_bad_files} more": "+ {num_bad_files} tane daha",
-
- "This is my site": "Bu benim sitem",
- "Site title": "Site başlığı",
- "Site description": "Site açıklaması",
- "Save site settings": "Site ayarlarını kaydet",
-
- "Content publishing": "İçerik yayımlanıyor",
- "Choose": "Seç",
- "Sign": "İmzala",
- "Publish": "Yayımla",
-
- "This function is disabled on this proxy": "Bu özellik bu vekilde kullanılamaz",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "GeoLite2 Şehir veritabanı indirme hatası: {}! Lütfen kendiniz indirip aşağıdaki konuma açınınız: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Şehir veritabanı indiriliyor (sadece bir kere, ~20MB)...",
- "GeoLite2 City database downloaded!": "GeoLite2 Şehir veritabanı indirildi!",
-
- "Are you sure?": "Emin misiniz?",
- "Site storage limit modified!": "Site saklama sınırı değiştirildi!",
- "Database schema reloaded!": "Veritabanı şeması yeniden yüklendi!",
- "Database rebuilding....": "Veritabanı yeniden inşa ediliyor...",
- "Database rebuilt!": "Veritabanı yeniden inşa edildi!",
- "Site updated!": "Site güncellendi!",
- "Delete this site": "Bu siteyi sil",
- "File write error: ": "Dosya yazma hatası: ",
- "Site settings saved!": "Site ayarları kaydedildi!",
- "Enter your private key:": "Özel anahtarınızı giriniz:",
- " Signed!": " İmzala!",
- "WebGL not supported": "WebGL desteklenmiyor"
-}
diff --git a/plugins/Sidebar/languages/zh-tw.json b/plugins/Sidebar/languages/zh-tw.json
deleted file mode 100644
index 9d4ea1be..00000000
--- a/plugins/Sidebar/languages/zh-tw.json
+++ /dev/null
@@ -1,83 +0,0 @@
-{
- "Peers": "節點數",
- "Connected": "已連線",
- "Connectable": "可連線",
- "Connectable peers": "可連線節點",
-
- "Data transfer": "數據傳輸",
- "Received": "已接收",
- "Received bytes": "已接收位元組",
- "Sent": "已傳送",
- "Sent bytes": "已傳送位元組",
-
- "Files": "檔案",
- "Total": "共計",
- "Image": "圖片",
- "Other": "其他",
- "User data": "使用者數據",
-
- "Size limit": "大小限制",
- "limit used": "已使用",
- "free space": "可用空間",
- "Set": "偏好設定",
-
- "Optional files": "可選檔案",
- "Downloaded": "已下載",
- "Download and help distribute all files": "下載並幫助分發所有檔案",
- "Total size": "總大小",
- "Downloaded files": "下載的檔案",
-
- "Database": "資料庫",
- "search feeds": "搜尋供稿",
- "{feeds} query": "{feeds} 查詢 ",
- "Reload": "重新整理",
- "Rebuild": "重建",
- "No database found": "未找到資料庫",
-
- "Identity address": "身分位址",
- "Change": "變更",
-
- "Site control": "網站控制",
- "Update": "更新",
- "Pause": "暫停",
- "Resume": "恢復",
- "Delete": "刪除",
- "Are you sure?": "你確定?",
-
- "Site address": "網站位址",
- "Donate": "捐贈",
-
- "Missing files": "缺少的檔案",
- "{} try": "{} 嘗試",
- "{} tries": "{} 已嘗試",
- "+ {num_bad_files} more": "+ {num_bad_files} 更多",
-
- "This is my site": "這是我的網站",
- "Site title": "網站標題",
- "Site description": "網站描述",
- "Save site settings": "存儲網站設定",
- "Open site directory": "打開所在資料夾",
-
- "Content publishing": "內容發布",
- "Choose": "選擇",
- "Sign": "簽署",
- "Publish": "發布",
- "Sign and publish": "簽名並發布",
- "This function is disabled on this proxy": "此代理上禁用此功能",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "GeoLite2 地理位置資料庫下載錯誤:{}! 請手動下載並解壓到數據目錄: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下載 GeoLite2 地理位置資料庫 (僅一次,約 20MB )...",
- "GeoLite2 City database downloaded!": "GeoLite2 地理位置資料庫已下載!",
-
- "Are you sure?": "你確定?",
- "Site storage limit modified!": "網站存儲限制已變更!",
- "Database schema reloaded!": "資料庫架構重新加載!",
- "Database rebuilding....": "資料庫重建中...",
- "Database rebuilt!": "資料庫已重建!",
- "Site updated!": "網站已更新!",
- "Delete this site": "刪除此網站",
- "File write error: ": "檔案寫入錯誤:",
- "Site settings saved!": "網站設置已保存!",
- "Enter your private key:": "輸入您的私鑰:",
- " Signed!": " 已簽署!",
- "WebGL not supported": "不支援 WebGL"
-}
diff --git a/plugins/Sidebar/languages/zh.json b/plugins/Sidebar/languages/zh.json
deleted file mode 100644
index 639ac7f6..00000000
--- a/plugins/Sidebar/languages/zh.json
+++ /dev/null
@@ -1,101 +0,0 @@
-{
- "Copy to clipboard": "复制到剪切板",
- "Peers": "节点数",
- "Connected": "已连接",
- "Connectable": "可连接",
- "Onion": "洋葱点",
- "Local": "局域网",
- "Connectable peers": "可连接节点",
-
- "Data transfer": "数据传输",
- "Received": "已接收",
- "Received bytes": "已接收字节",
- "Sent": "已发送",
- "Sent bytes": "已发送字节",
-
- "Files": "文件",
- "Save as .zip": "打包成zip文件",
- "Total": "总计",
- "Image": "图像",
- "Other": "其他",
- "User data": "用户数据",
-
- "Size limit": "大小限制",
- "limit used": "限额",
- "free space": "剩余空间",
- "Set": "设置",
-
- "Optional files": "可选文件",
- "Downloaded": "已下载",
- "Help distribute added optional files": "帮助分发新的可选文件",
- "Auto download big file size limit": "自动下载大文件大小限制",
- "Download previous files": "下载之前的文件",
- "Optional files download started": "可选文件下载启动",
- "Optional files downloaded": "可选文件下载完成",
- "Total size": "总大小",
- "Downloaded files": "已下载文件",
-
- "Database": "数据库",
- "search feeds": "搜索数据源",
- "{feeds} query": "{feeds} 请求",
- "Reload": "重载",
- "Rebuild": "重建",
- "No database found": "没有找到数据库",
-
- "Identity address": "身份地址",
- "Change": "更改",
-
- "Site control": "站点控制",
- "Update": "更新",
- "Pause": "暂停",
- "Resume": "恢复",
- "Delete": "删除",
- "Are you sure?": "您确定吗?",
-
- "Site address": "站点地址",
- "Donate": "捐赠",
-
- "Needs to be updated": "需要更新",
- "{} try": "{} 尝试",
- "{} tries": "{} 已尝试",
- "+ {num_bad_files} more": "+ {num_bad_files} 更多",
-
- "This is my site": "这是我的站点",
- "Site title": "站点标题",
- "Site description": "站点描述",
- "Save site settings": "保存站点设置",
- "Open site directory": "打开所在文件夹",
-
- "Content publishing": "内容发布",
- "Add saved private key": "添加并保存私钥",
- "Save": "保存",
- "Private key saved.": "私钥已保存",
- "Private key saved for site signing": "已保存用于站点签名的私钥",
- "Forgot": "删除私钥",
- "Saved private key removed": "保存的私钥已删除",
- "Choose": "选择",
- "Sign": "签名",
- "Publish": "发布",
- "Sign and publish": "签名并发布",
- "This function is disabled on this proxy": "此功能在代理上被禁用",
- "GeoLite2 City database download error: {}! Please download manually and unpack to data dir: {}": "GeoLite2 地理位置数据库下载错误:{}! 请手动下载并解压在数据目录: {}",
- "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下载 GeoLite2 地理位置数据库 (仅需一次,约 20MB )...",
- "GeoLite2 City database downloaded!": "GeoLite2 地理位置数据库已下载!",
-
- "Are you sure?": "您确定吗?",
- "Site storage limit modified!": "站点存储限制已更改!",
- "Database schema reloaded!": "数据库模式已重新加载!",
- "Database rebuilding....": "数据库重建中...",
- "Database rebuilt!": "数据库已重建!",
- "Site updated!": "站点已更新!",
- "Delete this site": "删除此站点",
- "Blacklist": "黑名单",
- "Blacklist this site": "拉黑此站点",
- "Reason": "原因",
- "Delete and Blacklist": "删除并拉黑",
- "File write error: ": "文件写入错误:",
- "Site settings saved!": "站点设置已保存!",
- "Enter your private key:": "输入您的私钥:",
- " Signed!": " 已签名!",
- "WebGL not supported": "不支持 WebGL"
-}
diff --git a/plugins/Sidebar/media/Class.coffee b/plugins/Sidebar/media/Class.coffee
deleted file mode 100644
index d62ab25c..00000000
--- a/plugins/Sidebar/media/Class.coffee
+++ /dev/null
@@ -1,23 +0,0 @@
-class Class
- trace: true
-
- log: (args...) ->
- return unless @trace
- return if typeof console is 'undefined'
- args.unshift("[#{@.constructor.name}]")
- console.log(args...)
- @
-
- logStart: (name, args...) ->
- return unless @trace
- @logtimers or= {}
- @logtimers[name] = +(new Date)
- @log "#{name}", args..., "(started)" if args.length > 0
- @
-
- logEnd: (name, args...) ->
- ms = +(new Date)-@logtimers[name]
- @log "#{name}", args..., "(Done in #{ms}ms)"
- @
-
-window.Class = Class
\ No newline at end of file
diff --git a/plugins/Sidebar/media/Console.coffee b/plugins/Sidebar/media/Console.coffee
deleted file mode 100644
index d5a83346..00000000
--- a/plugins/Sidebar/media/Console.coffee
+++ /dev/null
@@ -1,201 +0,0 @@
-class Console extends Class
- constructor: (@sidebar) ->
- @tag = null
- @opened = false
- @filter = null
- @tab_types = [
- {title: "All", filter: ""},
- {title: "Info", filter: "INFO"},
- {title: "Warning", filter: "WARNING"},
- {title: "Error", filter: "ERROR"}
- ]
- @read_size = 32 * 1024
- @tab_active = ""
- #@filter = @sidebar.wrapper.site_info.address_short
- handleMessageWebsocket_original = @sidebar.wrapper.handleMessageWebsocket
- @sidebar.wrapper.handleMessageWebsocket = (message) =>
- if message.cmd == "logLineAdd" and message.params.stream_id == @stream_id
- @addLines(message.params.lines)
- else
- handleMessageWebsocket_original(message)
-
- $(window).on "hashchange", =>
- if window.top.location.hash.startsWith("#ZeroNet:Console")
- @open()
-
- if window.top.location.hash.startsWith("#ZeroNet:Console")
- setTimeout (=> @open()), 10
-
- createHtmltag: ->
- if not @container
- @container = $("""
-
-
- """)
- @text = @container.find(".console-text")
- @text_elem = @text[0]
- @tabs = @container.find(".console-tabs")
-
- @text.on "mousewheel", (e) => # Stop animation on manual scrolling
- if e.originalEvent.deltaY < 0
- @text.stop()
- RateLimit 300, @checkTextIsBottom
-
- @text.is_bottom = true
-
- @container.appendTo(document.body)
- @tag = @container.find(".console")
- for tab_type in @tab_types
- tab = $(" ", {href: "#", "data-filter": tab_type.filter, "data-title": tab_type.title}).text(tab_type.title)
- if tab_type.filter == @tab_active
- tab.addClass("active")
- tab.on("click", @handleTabClick)
- if window.top.location.hash.endsWith(tab_type.title)
- @log "Triggering click on", tab
- tab.trigger("click")
- @tabs.append(tab)
-
- @container.on "mousedown touchend touchcancel", (e) =>
- if e.target != e.currentTarget
- return true
- @log "closing"
- if $(document.body).hasClass("body-console")
- @close()
- return true
-
- @loadConsoleText()
-
- checkTextIsBottom: =>
- @text.is_bottom = Math.round(@text_elem.scrollTop + @text_elem.clientHeight) >= @text_elem.scrollHeight - 15
-
- toColor: (text, saturation=60, lightness=70) ->
- hash = 0
- for i in [0..text.length-1]
- hash += text.charCodeAt(i)*i
- hash = hash % 1777
- return "hsl(" + (hash % 360) + ",#{saturation}%,#{lightness}%)";
-
- formatLine: (line) =>
- match = line.match(/(\[.*?\])[ ]+(.*?)[ ]+(.*?)[ ]+(.*)/)
- if not match
- return line.replace(/\/g, ">")
-
- [line, added, level, module, text] = line.match(/(\[.*?\])[ ]+(.*?)[ ]+(.*?)[ ]+(.*)/)
- added = "#{added} "
- level = "#{level} "
- module = "#{module} "
-
- text = text.replace(/(Site:[A-Za-z0-9\.]+)/g, "$1 ")
- text = text.replace(/\/g, ">")
- #text = text.replace(/( [0-9\.]+(|s|ms))/g, "$1 ")
- return "#{added} #{level} #{module} #{text}"
-
-
- addLines: (lines, animate=true) =>
- html_lines = []
- @logStart "formatting"
- for line in lines
- html_lines.push @formatLine(line)
- @logEnd "formatting"
- @logStart "adding"
- @text.append(html_lines.join(" ") + " ")
- @logEnd "adding"
- if @text.is_bottom and animate
- @text.stop().animate({scrollTop: @text_elem.scrollHeight - @text_elem.clientHeight + 1}, 600, 'easeInOutCubic')
-
-
- loadConsoleText: =>
- @sidebar.wrapper.ws.cmd "consoleLogRead", {filter: @filter, read_size: @read_size}, (res) =>
- @text.html("")
- pos_diff = res["pos_end"] - res["pos_start"]
- size_read = Math.round(pos_diff/1024)
- size_total = Math.round(res['pos_end']/1024)
- @text.append(" ")
- @text.append("Displaying #{res.lines.length} of #{res.num_found} lines found in the last #{size_read}kB of the log file. (#{size_total}kB total) ")
- @addLines res.lines, false
- @text_elem.scrollTop = @text_elem.scrollHeight
- if @stream_id
- @sidebar.wrapper.ws.cmd "consoleLogStreamRemove", {stream_id: @stream_id}
- @sidebar.wrapper.ws.cmd "consoleLogStream", {filter: @filter}, (res) =>
- @stream_id = res.stream_id
-
- close: =>
- window.top.location.hash = ""
- @sidebar.move_lock = "y"
- @sidebar.startDrag()
- @sidebar.stopDrag()
-
- open: =>
- @sidebar.startDrag()
- @sidebar.moved("y")
- @sidebar.fixbutton_targety = @sidebar.page_height - @sidebar.fixbutton_inity - 50
- @sidebar.stopDrag()
-
- onOpened: =>
- @sidebar.onClosed()
- @log "onOpened"
-
- onClosed: =>
- $(document.body).removeClass("body-console")
- if @stream_id
- @sidebar.wrapper.ws.cmd "consoleLogStreamRemove", {stream_id: @stream_id}
-
- cleanup: =>
- if @container
- @container.remove()
- @container = null
-
- stopDragY: =>
- # Animate sidebar and iframe
- if @sidebar.fixbutton_targety == @sidebar.fixbutton_inity
- # Closed
- targety = 0
- @opened = false
- else
- # Opened
- targety = @sidebar.fixbutton_targety - @sidebar.fixbutton_inity
- @onOpened()
- @opened = true
-
- # Revent sidebar transitions
- if @tag
- @tag.css("transition", "0.5s ease-out")
- @tag.css("transform", "translateY(#{targety}px)").one transitionEnd, =>
- @tag.css("transition", "")
- if not @opened
- @cleanup()
- # Revert body transformations
- @log "stopDragY", "opened:", @opened, targety
- if not @opened
- @onClosed()
-
- changeFilter: (filter) =>
- @filter = filter
- if @filter == ""
- @read_size = 32 * 1024
- else
- @read_size = 5 * 1024 * 1024
- @loadConsoleText()
-
- handleTabClick: (e) =>
- elem = $(e.currentTarget)
- @tab_active = elem.data("filter")
- $("a", @tabs).removeClass("active")
- elem.addClass("active")
- @changeFilter(@tab_active)
- window.top.location.hash = "#ZeroNet:Console:" + elem.data("title")
- return false
-
-window.Console = Console
diff --git a/plugins/Sidebar/media/Console.css b/plugins/Sidebar/media/Console.css
deleted file mode 100644
index 127d15bf..00000000
--- a/plugins/Sidebar/media/Console.css
+++ /dev/null
@@ -1,31 +0,0 @@
-.console-container { width: 100%; z-index: 998; position: absolute; top: -100vh; padding-bottom: 100%; }
-.console-container .console { background-color: #212121; height: 100vh; transform: translateY(0px); padding-top: 80px; box-sizing: border-box; }
-
-.console-top { color: white; font-family: Consolas, monospace; font-size: 11px; line-height: 20px; height: 100%; box-sizing: border-box; letter-spacing: 0.5px;}
-.console-text { overflow-y: scroll; height: calc(100% - 10px); color: #DDD; padding: 5px; margin-top: -36px; overflow-wrap: break-word; }
-.console-tabs {
- background-color: #41193fad; position: relative; margin-right: 17px; /*backdrop-filter: blur(2px);*/
- box-shadow: -30px 0px 45px #7d2463; background: linear-gradient(-75deg, #591a48ed, #70305e66); border-bottom: 1px solid #792e6473;
-}
-.console-tabs a {
- margin-right: 5px; padding: 5px 15px; text-decoration: none; color: #AAA;
- font-size: 11px; font-family: "Consolas"; text-transform: uppercase; border: 1px solid #666;
- border-bottom: 0px; display: inline-block; margin: 5px; margin-bottom: 0px; background-color: rgba(0,0,0,0.5);
-}
-.console-tabs a:hover { color: #FFF }
-.console-tabs a.active { background-color: #46223c; color: #FFF }
-.console-middle {height: 0px; top: 50%; position: absolute; width: 100%; left: 50%; display: none; }
-
-.console .mynode {
- border: 0.5px solid #aaa; width: 50px; height: 50px; transform: rotateZ(45deg); margin-top: -25px; margin-left: -25px;
- opacity: 1; display: inline-block; background-color: #EEE; z-index: 9; position: absolute; outline: 5px solid #EEE;
-}
-.console .peers { width: 0px; height: 0px; position: absolute; left: -20px; top: -20px; text-align: center; }
-.console .peer { left: 0px; top: 0px; position: absolute; }
-.console .peer .icon { width: 20px; height: 20px; padding: 10px; display: inline-block; text-decoration: none; left: 200px; position: absolute; color: #666; }
-.console .peer .icon:before { content: "\25BC"; position: absolute; margin-top: 3px; margin-left: -1px; opacity: 0; transition: all 0.3s }
-.console .peer .icon:hover:before { opacity: 1; transition: none }
-.console .peer .line {
- width: 187px; border-top: 1px solid #CCC; position: absolute; top: 20px; left: 20px;
- transform: rotateZ(334deg); transform-origin: bottom left;
-}
diff --git a/plugins/Sidebar/media/Menu.coffee b/plugins/Sidebar/media/Menu.coffee
deleted file mode 100644
index 3e19fd9f..00000000
--- a/plugins/Sidebar/media/Menu.coffee
+++ /dev/null
@@ -1,49 +0,0 @@
-class Menu
- constructor: (@button) ->
- @elem = $(".menu.template").clone().removeClass("template")
- @elem.appendTo("body")
- @items = []
-
- show: ->
- if window.visible_menu and window.visible_menu.button[0] == @button[0] # Same menu visible then hide it
- window.visible_menu.hide()
- @hide()
- else
- button_pos = @button.offset()
- left = button_pos.left
- @elem.css({"top": button_pos.top+@button.outerHeight(), "left": left})
- @button.addClass("menu-active")
- @elem.addClass("visible")
- if @elem.position().left + @elem.width() + 20 > window.innerWidth
- @elem.css("left", window.innerWidth - @elem.width() - 20)
- if window.visible_menu then window.visible_menu.hide()
- window.visible_menu = @
-
-
- hide: ->
- @elem.removeClass("visible")
- @button.removeClass("menu-active")
- window.visible_menu = null
-
-
- addItem: (title, cb) ->
- item = $(".menu-item.template", @elem).clone().removeClass("template")
- item.html(title)
- item.on "click", =>
- if not cb(item)
- @hide()
- return false
- item.appendTo(@elem)
- @items.push item
- return item
-
-
- log: (args...) ->
- console.log "[Menu]", args...
-
-window.Menu = Menu
-
-# Hide menu on outside click
-$("body").on "click", (e) ->
- if window.visible_menu and e.target != window.visible_menu.button[0] and $(e.target).parent()[0] != window.visible_menu.elem[0]
- window.visible_menu.hide()
diff --git a/plugins/Sidebar/media/Menu.css b/plugins/Sidebar/media/Menu.css
deleted file mode 100644
index e2afa16e..00000000
--- a/plugins/Sidebar/media/Menu.css
+++ /dev/null
@@ -1,19 +0,0 @@
-.menu {
- background-color: white; padding: 10px 0px; position: absolute; top: 0px; left: 0px; max-height: 0px; overflow: hidden; transform: translate(0px, -30px); pointer-events: none;
- box-shadow: 0px 2px 8px rgba(0,0,0,0.3); border-radius: 2px; opacity: 0; transition: opacity 0.2s ease-out, transform 1s ease-out, max-height 0.2s ease-in-out;
-}
-.menu.visible { opacity: 1; max-height: 350px; transform: translate(0px, 0px); transition: opacity 0.1s ease-out, transform 0.3s ease-out, max-height 0.3s ease-in-out; pointer-events: all }
-
-.menu-item { display: block; text-decoration: none; color: black; padding: 6px 24px; transition: all 0.2s; border-bottom: none; font-weight: normal; padding-left: 30px; }
-.menu-item-separator { margin-top: 5px; border-top: 1px solid #eee }
-
-.menu-item:hover { background-color: #F6F6F6; transition: none; color: inherit; border: none }
-.menu-item:active, .menu-item:focus { background-color: #AF3BFF; color: white; transition: none }
-.menu-item.selected:before {
- content: "L"; display: inline-block; transform: rotateZ(45deg) scaleX(-1);
- font-weight: bold; position: absolute; margin-left: -17px; font-size: 12px; margin-top: 2px;
-}
-
-@media only screen and (max-width: 800px) {
-.menu, .menu.visible { position: absolute; left: unset !important; right: 20px; }
-}
\ No newline at end of file
diff --git a/plugins/Sidebar/media/Prototypes.coffee b/plugins/Sidebar/media/Prototypes.coffee
deleted file mode 100644
index a9edd255..00000000
--- a/plugins/Sidebar/media/Prototypes.coffee
+++ /dev/null
@@ -1,9 +0,0 @@
-String::startsWith = (s) -> @[...s.length] is s
-String::endsWith = (s) -> s is '' or @[-s.length..] is s
-String::capitalize = -> if @.length then @[0].toUpperCase() + @.slice(1) else ""
-String::repeat = (count) -> new Array( count + 1 ).join(@)
-
-window.isEmpty = (obj) ->
- for key of obj
- return false
- return true
diff --git a/plugins/Sidebar/media/RateLimit.coffee b/plugins/Sidebar/media/RateLimit.coffee
deleted file mode 100644
index 17c67433..00000000
--- a/plugins/Sidebar/media/RateLimit.coffee
+++ /dev/null
@@ -1,14 +0,0 @@
-limits = {}
-call_after_interval = {}
-window.RateLimit = (interval, fn) ->
- if not limits[fn]
- call_after_interval[fn] = false
- fn() # First call is not delayed
- limits[fn] = setTimeout (->
- if call_after_interval[fn]
- fn()
- delete limits[fn]
- delete call_after_interval[fn]
- ), interval
- else # Called within iterval, delay the call
- call_after_interval[fn] = true
diff --git a/plugins/Sidebar/media/Scrollable.js b/plugins/Sidebar/media/Scrollable.js
deleted file mode 100644
index 689a5719..00000000
--- a/plugins/Sidebar/media/Scrollable.js
+++ /dev/null
@@ -1,91 +0,0 @@
-/* via http://jsfiddle.net/elGrecode/00dgurnn/ */
-
-window.initScrollable = function () {
-
- var scrollContainer = document.querySelector('.scrollable'),
- scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'),
- scrollContent = document.querySelector('.scrollable .content'),
- contentPosition = 0,
- scrollerBeingDragged = false,
- scroller,
- topPosition,
- scrollerHeight;
-
- function calculateScrollerHeight() {
- // *Calculation of how tall scroller should be
- var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight;
- if (visibleRatio == 1)
- scroller.style.display = "none";
- else
- scroller.style.display = "block";
- return visibleRatio * scrollContainer.offsetHeight;
- }
-
- function moveScroller(evt) {
- // Move Scroll bar to top offset
- var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight;
- topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box
- scroller.style.top = topPosition + 'px';
- }
-
- function startDrag(evt) {
- normalizedPosition = evt.pageY;
- contentPosition = scrollContentWrapper.scrollTop;
- scrollerBeingDragged = true;
- window.addEventListener('mousemove', scrollBarScroll);
- return false;
- }
-
- function stopDrag(evt) {
- scrollerBeingDragged = false;
- window.removeEventListener('mousemove', scrollBarScroll);
- }
-
- function scrollBarScroll(evt) {
- if (scrollerBeingDragged === true) {
- evt.preventDefault();
- var mouseDifferential = evt.pageY - normalizedPosition;
- var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight);
- scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent;
- }
- }
-
- function updateHeight() {
- scrollerHeight = calculateScrollerHeight() - 10;
- scroller.style.height = scrollerHeight + 'px';
- }
-
- function createScroller() {
- // *Creates scroller element and appends to '.scrollable' div
- // create scroller element
- scroller = document.createElement("div");
- scroller.className = 'scroller';
-
- // determine how big scroller should be based on content
- scrollerHeight = calculateScrollerHeight() - 10;
-
- if (scrollerHeight / scrollContainer.offsetHeight < 1) {
- // *If there is a need to have scroll bar based on content size
- scroller.style.height = scrollerHeight + 'px';
-
- // append scroller to scrollContainer div
- scrollContainer.appendChild(scroller);
-
- // show scroll path divot
- scrollContainer.className += ' showScroll';
-
- // attach related draggable listeners
- scroller.addEventListener('mousedown', startDrag);
- window.addEventListener('mouseup', stopDrag);
- }
-
- }
-
- createScroller();
-
-
- // *** Listeners ***
- scrollContentWrapper.addEventListener('scroll', moveScroller);
-
- return updateHeight;
-};
\ No newline at end of file
diff --git a/plugins/Sidebar/media/Scrollbable.css b/plugins/Sidebar/media/Scrollbable.css
deleted file mode 100644
index 6e3e0b6a..00000000
--- a/plugins/Sidebar/media/Scrollbable.css
+++ /dev/null
@@ -1,44 +0,0 @@
-.scrollable {
- overflow: hidden;
-}
-
-.scrollable.showScroll::after {
- position: absolute;
- content: '';
- top: 5%;
- right: 7px;
- height: 90%;
- width: 3px;
- background: rgba(224, 224, 255, .3);
-}
-
-.scrollable .content-wrapper {
- width: 100%;
- height: 100%;
- padding-right: 50%;
- overflow-y: scroll;
-}
-.scroller {
- margin-top: 5px;
- z-index: 5;
- cursor: pointer;
- position: absolute;
- width: 7px;
- border-radius: 5px;
- background: #3A3A3A;
- top: 0px;
- left: 395px;
- -webkit-transition: top .08s;
- -moz-transition: top .08s;
- -ms-transition: top .08s;
- -o-transition: top .08s;
- transition: top .08s;
-}
-.scroller {
- -webkit-touch-callout: none;
- -webkit-user-select: none;
- -khtml-user-select: none;
- -moz-user-select: none;
- -ms-user-select: none;
- user-select: none;
-}
diff --git a/plugins/Sidebar/media/Sidebar.coffee b/plugins/Sidebar/media/Sidebar.coffee
deleted file mode 100644
index 57d36eac..00000000
--- a/plugins/Sidebar/media/Sidebar.coffee
+++ /dev/null
@@ -1,644 +0,0 @@
-class Sidebar extends Class
- constructor: (@wrapper) ->
- @tag = null
- @container = null
- @opened = false
- @width = 410
- @console = new Console(@)
- @fixbutton = $(".fixbutton")
- @fixbutton_addx = 0
- @fixbutton_addy = 0
- @fixbutton_initx = 0
- @fixbutton_inity = 15
- @fixbutton_targetx = 0
- @move_lock = null
- @page_width = $(window).width()
- @page_height = $(window).height()
- @frame = $("#inner-iframe")
- @initFixbutton()
- @dragStarted = 0
- @globe = null
- @preload_html = null
-
- @original_set_site_info = @wrapper.setSiteInfo # We going to override this, save the original
-
- # Start in opened state for debugging
- if window.top.location.hash == "#ZeroNet:OpenSidebar"
- @startDrag()
- @moved("x")
- @fixbutton_targetx = @fixbutton_initx - @width
- @stopDrag()
-
-
- initFixbutton: ->
-
- # Detect dragging
- @fixbutton.on "mousedown touchstart", (e) =>
- if e.button > 0 # Right or middle click
- return
- e.preventDefault()
-
- # Disable previous listeners
- @fixbutton.off "click touchend touchcancel"
-
- # Make sure its not a click
- @dragStarted = (+ new Date)
-
- # Fullscreen drag bg to capture mouse events over iframe
- $(".drag-bg").remove()
- $("
").appendTo(document.body)
-
- $("body").one "mousemove touchmove", (e) =>
- mousex = e.pageX
- mousey = e.pageY
- if not mousex
- mousex = e.originalEvent.touches[0].pageX
- mousey = e.originalEvent.touches[0].pageY
-
- @fixbutton_addx = @fixbutton.offset().left - mousex
- @fixbutton_addy = @fixbutton.offset().top - mousey
- @startDrag()
- @fixbutton.parent().on "click touchend touchcancel", (e) =>
- if (+ new Date) - @dragStarted < 100
- window.top.location = @fixbutton.find(".fixbutton-bg").attr("href")
- @stopDrag()
- @resized()
- $(window).on "resize", @resized
-
- resized: =>
- @page_width = $(window).width()
- @page_height = $(window).height()
- @fixbutton_initx = @page_width - 75 # Initial x position
- if @opened
- @fixbutton.css
- left: @fixbutton_initx - @width
- else
- @fixbutton.css
- left: @fixbutton_initx
-
- # Start dragging the fixbutton
- startDrag: ->
- #@move_lock = "x" # Temporary until internals not finished
- @log "startDrag", @fixbutton_initx, @fixbutton_inity
- @fixbutton_targetx = @fixbutton_initx # Fallback x position
- @fixbutton_targety = @fixbutton_inity # Fallback y position
-
- @fixbutton.addClass("dragging")
-
- # IE position wrap fix
- if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0
- @fixbutton.css("pointer-events", "none")
-
- # Don't go to homepage
- @fixbutton.one "click", (e) =>
- @stopDrag()
- @fixbutton.removeClass("dragging")
- moved_x = Math.abs(@fixbutton.offset().left - @fixbutton_initx)
- moved_y = Math.abs(@fixbutton.offset().top - @fixbutton_inity)
- if moved_x > 5 or moved_y > 10
- # If moved more than some pixel the button then don't go to homepage
- e.preventDefault()
-
- # Animate drag
- @fixbutton.parents().on "mousemove touchmove", @animDrag
- @fixbutton.parents().on "mousemove touchmove" ,@waitMove
-
- # Stop dragging listener
- @fixbutton.parents().one "mouseup touchend touchcancel", (e) =>
- e.preventDefault()
- @stopDrag()
-
-
- # Wait for moving the fixbutton
- waitMove: (e) =>
- document.body.style.perspective = "1000px"
- document.body.style.height = "100%"
- document.body.style.willChange = "perspective"
- document.documentElement.style.height = "100%"
- #$(document.body).css("backface-visibility", "hidden").css("perspective", "1000px").css("height", "900px")
- # $("iframe").css("backface-visibility", "hidden")
-
- moved_x = Math.abs(parseInt(@fixbutton[0].style.left) - @fixbutton_targetx)
- moved_y = Math.abs(parseInt(@fixbutton[0].style.top) - @fixbutton_targety)
- if moved_x > 5 and (+ new Date) - @dragStarted + moved_x > 50
- @moved("x")
- @fixbutton.stop().animate {"top": @fixbutton_inity}, 1000
- @fixbutton.parents().off "mousemove touchmove" ,@waitMove
-
- else if moved_y > 5 and (+ new Date) - @dragStarted + moved_y > 50
- @moved("y")
- @fixbutton.parents().off "mousemove touchmove" ,@waitMove
-
- moved: (direction) ->
- @log "Moved", direction
- @move_lock = direction
- if direction == "y"
- $(document.body).addClass("body-console")
- return @console.createHtmltag()
- @createHtmltag()
- $(document.body).addClass("body-sidebar")
- @container.on "mousedown touchend touchcancel", (e) =>
- if e.target != e.currentTarget
- return true
- @log "closing"
- if $(document.body).hasClass("body-sidebar")
- @close()
- return true
-
- $(window).off "resize"
- $(window).on "resize", =>
- $(document.body).css "height", $(window).height()
- @scrollable()
- @resized()
-
- # Override setsiteinfo to catch changes
- @wrapper.setSiteInfo = (site_info) =>
- @setSiteInfo(site_info)
- @original_set_site_info.apply(@wrapper, arguments)
-
- # Preload world.jpg
- img = new Image();
- img.src = "/uimedia/globe/world.jpg";
-
- setSiteInfo: (site_info) ->
- RateLimit 1500, =>
- @updateHtmlTag()
- RateLimit 30000, =>
- @displayGlobe()
-
- # Create the sidebar html tag
- createHtmltag: ->
- @when_loaded = $.Deferred()
- if not @container
- @container = $("""
-
- """)
- @container.appendTo(document.body)
- @tag = @container.find(".sidebar")
- @updateHtmlTag()
- @scrollable = window.initScrollable()
-
-
- updateHtmlTag: ->
- if @preload_html
- @setHtmlTag(@preload_html)
- @preload_html = null
- else
- @wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag
-
- setHtmlTag: (res) =>
- if @tag.find(".content").children().length == 0 # First update
- @log "Creating content"
- @container.addClass("loaded")
- morphdom(@tag.find(".content")[0], ''+res+'
')
- # @scrollable()
- @when_loaded.resolve()
-
- else # Not first update, patch the html to keep unchanged dom elements
- morphdom @tag.find(".content")[0], ''+res+'
', {
- onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state
- if from_el.className == "globe" or from_el.className.indexOf("noupdate") >= 0
- return false
- else
- return true
- }
-
- # Save and forget privatekey for site signing
- @tag.find("#privatekey-add").off("click, touchend").on "click touchend", (e) =>
- @wrapper.displayPrompt "Enter your private key:", "password", "Save", "", (privatekey) =>
- @wrapper.ws.cmd "userSetSitePrivatekey", [privatekey], (res) =>
- @wrapper.notifications.add "privatekey", "done", "Private key saved for site signing", 5000
- return false
-
- @tag.find("#privatekey-forget").off("click, touchend").on "click touchend", (e) =>
- @wrapper.displayConfirm "Remove saved private key for this site?", "Forget", (res) =>
- if not res
- return false
- @wrapper.ws.cmd "userSetSitePrivatekey", [""], (res) =>
- @wrapper.notifications.add "privatekey", "done", "Saved private key removed", 5000
- return false
-
- # Use requested address for browse files urls
- @tag.find("#browse-files").attr("href", document.location.pathname.replace(/(\/.*?(\/|$)).*$/, "/list$1"))
-
-
-
- animDrag: (e) =>
- mousex = e.pageX
- mousey = e.pageY
- if not mousex and e.originalEvent.touches
- mousex = e.originalEvent.touches[0].pageX
- mousey = e.originalEvent.touches[0].pageY
-
- overdrag = @fixbutton_initx - @width - mousex
- if overdrag > 0 # Overdragged
- overdrag_percent = 1 + overdrag/300
- mousex = (mousex + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent)
- targetx = @fixbutton_initx - mousex - @fixbutton_addx
- targety = @fixbutton_inity - mousey - @fixbutton_addy
-
- if @move_lock == "x"
- targety = @fixbutton_inity
- else if @move_lock == "y"
- targetx = @fixbutton_initx
-
- if not @move_lock or @move_lock == "x"
- @fixbutton[0].style.left = (mousex + @fixbutton_addx) + "px"
- if @tag
- @tag[0].style.transform = "translateX(#{0 - targetx}px)"
-
- if not @move_lock or @move_lock == "y"
- @fixbutton[0].style.top = (mousey + @fixbutton_addy) + "px"
- if @console.tag
- @console.tag[0].style.transform = "translateY(#{0 - targety}px)"
-
- #if @move_lock == "x"
- # @fixbutton[0].style.left = "#{@fixbutton_targetx} px"
- #@fixbutton[0].style.top = "#{@fixbutton_inity}px"
- #if @move_lock == "y"
- # @fixbutton[0].style.top = "#{@fixbutton_targety} px"
-
- # Check if opened
- if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9)
- @fixbutton_targetx = @fixbutton_initx - @width # Make it opened
- else
- @fixbutton_targetx = @fixbutton_initx
-
- if (not @console.opened and 0 - targety > @page_height/10) or (@console.opened and 0 - targety > @page_height*0.8)
- @fixbutton_targety = @page_height - @fixbutton_inity - 50
- else
- @fixbutton_targety = @fixbutton_inity
-
-
- # Stop dragging the fixbutton
- stopDrag: ->
- @fixbutton.parents().off "mousemove touchmove"
- @fixbutton.off "mousemove touchmove"
- @fixbutton.css("pointer-events", "")
- $(".drag-bg").remove()
- if not @fixbutton.hasClass("dragging")
- return
- @fixbutton.removeClass("dragging")
-
- # Move back to initial position
- if @fixbutton_targetx != @fixbutton.offset().left or @fixbutton_targety != @fixbutton.offset().top
- # Animate fixbutton
- if @move_lock == "y"
- top = @fixbutton_targety
- left = @fixbutton_initx
- if @move_lock == "x"
- top = @fixbutton_inity
- left = @fixbutton_targetx
- @fixbutton.stop().animate {"left": left, "top": top}, 500, "easeOutBack", =>
- # Switch back to auto align
- if @fixbutton_targetx == @fixbutton_initx # Closed
- @fixbutton.css("left", "auto")
- else # Opened
- @fixbutton.css("left", left)
-
- $(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status
-
- @stopDragX()
- @console.stopDragY()
- @move_lock = null
-
- stopDragX: ->
- # Animate sidebar and iframe
- if @fixbutton_targetx == @fixbutton_initx or @move_lock == "y"
- # Closed
- targetx = 0
- @opened = false
- else
- # Opened
- targetx = @width
- if @opened
- @onOpened()
- else
- @when_loaded.done =>
- @onOpened()
- @opened = true
-
- # Revent sidebar transitions
- if @tag
- @tag.css("transition", "0.4s ease-out")
- @tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, =>
- @tag.css("transition", "")
- if not @opened
- @container.remove()
- @container = null
- if @tag
- @tag.remove()
- @tag = null
-
- # Revert body transformations
- @log "stopdrag", "opened:", @opened
- if not @opened
- @onClosed()
-
- sign: (inner_path, privatekey) ->
- @wrapper.displayProgress("sign", "Signing: #{inner_path}...", 0)
- @wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) =>
- if res == "ok"
- @wrapper.displayProgress("sign", "#{inner_path} signed!", 100)
- else
- @wrapper.displayProgress("sign", "Error signing #{inner_path}", -1)
-
- publish: (inner_path, privatekey) ->
- @wrapper.ws.cmd "sitePublish", {privatekey: privatekey, inner_path: inner_path, sign: true, update_changed_files: true}, (res) =>
- if res == "ok"
- @wrapper.notifications.add "sign", "done", "#{inner_path} Signed and published!", 5000
-
- handleSiteDeleteClick: ->
- if @wrapper.site_info.privatekey
- question = "Are you sure? This site has a saved private key"
- options = ["Forget private key and delete site"]
- else
- question = "Are you sure?"
- options = ["Delete this site", "Blacklist"]
- @wrapper.displayConfirm question, options, (confirmed) =>
- if confirmed == 1
- @tag.find("#button-delete").addClass("loading")
- @wrapper.ws.cmd "siteDelete", @wrapper.site_info.address, ->
- document.location = $(".fixbutton-bg").attr("href")
- else if confirmed == 2
- @wrapper.displayPrompt "Blacklist this site", "text", "Delete and Blacklist", "Reason", (reason) =>
- @tag.find("#button-delete").addClass("loading")
- @wrapper.ws.cmd "siteblockAdd", [@wrapper.site_info.address, reason]
- @wrapper.ws.cmd "siteDelete", @wrapper.site_info.address, ->
- document.location = $(".fixbutton-bg").attr("href")
-
- onOpened: ->
- @log "Opened"
- @scrollable()
-
- # Re-calculate height when site admin opened or closed
- @tag.find("#checkbox-owned, #checkbox-autodownloadoptional").off("click touchend").on "click touchend", =>
- setTimeout (=>
- @scrollable()
- ), 300
-
- # Site limit button
- @tag.find("#button-sitelimit").off("click touchend").on "click touchend", =>
- @wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), (res) =>
- if res == "ok"
- @wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000
- @updateHtmlTag()
- return false
-
- # Site autodownload limit button
- @tag.find("#button-autodownload_bigfile_size_limit").off("click touchend").on "click touchend", =>
- @wrapper.ws.cmd "siteSetAutodownloadBigfileLimit", $("#input-autodownload_bigfile_size_limit").val(), (res) =>
- if res == "ok"
- @wrapper.notifications.add "done-bigfilelimit", "done", "Site bigfile auto download limit modified!", 5000
- @updateHtmlTag()
- return false
-
- # Site start download optional files
- @tag.find("#button-autodownload_previous").off("click touchend").on "click touchend", =>
- @wrapper.ws.cmd "siteUpdate", {"address": @wrapper.site_info.address, "check_files": true}, =>
- @wrapper.notifications.add "done-download_optional", "done", "Optional files downloaded", 5000
-
- @wrapper.notifications.add "start-download_optional", "info", "Optional files download started", 5000
- return false
-
- # Database reload
- @tag.find("#button-dbreload").off("click touchend").on "click touchend", =>
- @wrapper.ws.cmd "dbReload", [], =>
- @wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded!", 5000
- @updateHtmlTag()
- return false
-
- # Database rebuild
- @tag.find("#button-dbrebuild").off("click touchend").on "click touchend", =>
- @wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...."
- @wrapper.ws.cmd "dbRebuild", [], =>
- @wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000
- @updateHtmlTag()
- return false
-
- # Update site
- @tag.find("#button-update").off("click touchend").on "click touchend", =>
- @tag.find("#button-update").addClass("loading")
- @wrapper.ws.cmd "siteUpdate", @wrapper.site_info.address, =>
- @wrapper.notifications.add "done-updated", "done", "Site updated!", 5000
- @tag.find("#button-update").removeClass("loading")
- return false
-
- # Pause site
- @tag.find("#button-pause").off("click touchend").on "click touchend", =>
- @tag.find("#button-pause").addClass("hidden")
- @wrapper.ws.cmd "sitePause", @wrapper.site_info.address
- return false
-
- # Resume site
- @tag.find("#button-resume").off("click touchend").on "click touchend", =>
- @tag.find("#button-resume").addClass("hidden")
- @wrapper.ws.cmd "siteResume", @wrapper.site_info.address
- return false
-
- # Delete site
- @tag.find("#button-delete").off("click touchend").on "click touchend", =>
- @handleSiteDeleteClick()
- return false
-
- # Owned checkbox
- @tag.find("#checkbox-owned").off("click touchend").on "click touchend", =>
- owned = @tag.find("#checkbox-owned").is(":checked")
- @wrapper.ws.cmd "siteSetOwned", [owned], (res_set_owned) =>
- @log "Owned", owned
- if owned
- @wrapper.ws.cmd "siteRecoverPrivatekey", [], (res_recover) =>
- if res_recover == "ok"
- @wrapper.notifications.add("recover", "done", "Private key recovered from master seed", 5000)
- else
- @log "Unable to recover private key: #{res_recover.error}"
-
-
- # Owned auto download checkbox
- @tag.find("#checkbox-autodownloadoptional").off("click touchend").on "click touchend", =>
- @wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")]
-
- # Change identity button
- @tag.find("#button-identity").off("click touchend").on "click touchend", =>
- @wrapper.ws.cmd "certSelect"
- return false
-
- # Save settings
- @tag.find("#button-settings").off("click touchend").on "click touchend", =>
- @wrapper.ws.cmd "fileGet", "content.json", (res) =>
- data = JSON.parse(res)
- data["title"] = $("#settings-title").val()
- data["description"] = $("#settings-description").val()
- json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t')))
- @wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw), true], (res) =>
- if res != "ok" # fileWrite failed
- @wrapper.notifications.add "file-write", "error", "File write error: #{res}"
- else
- @wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000
- if @wrapper.site_info.privatekey
- @wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: "content.json", update_changed_files: true}
- @updateHtmlTag()
- return false
-
-
- # Open site directory
- @tag.find("#link-directory").off("click touchend").on "click touchend", =>
- @wrapper.ws.cmd "serverShowdirectory", ["site", @wrapper.site_info.address]
- return false
-
- # Copy site with peers
- @tag.find("#link-copypeers").off("click touchend").on "click touchend", (e) =>
- copy_text = e.currentTarget.href
- handler = (e) =>
- e.clipboardData.setData('text/plain', copy_text)
- e.preventDefault()
- @wrapper.notifications.add "copy", "done", "Site address with peers copied to your clipboard", 5000
- document.removeEventListener('copy', handler, true)
-
- document.addEventListener('copy', handler, true)
- document.execCommand('copy')
- return false
-
- # Sign and publish content.json
- $(document).on "click touchend", =>
- @tag?.find("#button-sign-publish-menu").removeClass("visible")
- @tag?.find(".contents + .flex").removeClass("sign-publish-flex")
-
- @tag.find(".contents-content").off("click touchend").on "click touchend", (e) =>
- $("#input-contents").val(e.currentTarget.innerText);
- return false;
-
- menu = new Menu(@tag.find("#menu-sign-publish"))
- menu.elem.css("margin-top", "-130px") # Open upwards
- menu.addItem "Sign", =>
- inner_path = @tag.find("#input-contents").val()
-
- @wrapper.ws.cmd "fileRules", {inner_path: inner_path}, (rules) =>
- if @wrapper.site_info.auth_address in rules.signers
- # ZeroID or other ID provider
- @sign(inner_path)
- else if @wrapper.site_info.privatekey
- # Privatekey stored in users.json
- @sign(inner_path, "stored")
- else
- # Ask the user for privatekey
- @wrapper.displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key
- @sign(inner_path, privatekey)
-
- @tag.find(".contents + .flex").removeClass "active"
- menu.hide()
-
- menu.addItem "Publish", =>
- inner_path = @tag.find("#input-contents").val()
- @wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}
-
- @tag.find(".contents + .flex").removeClass "active"
- menu.hide()
-
- @tag.find("#menu-sign-publish").off("click touchend").on "click touchend", =>
- if window.visible_menu == menu
- @tag.find(".contents + .flex").removeClass "active"
- menu.hide()
- else
- @tag.find(".contents + .flex").addClass "active"
- @tag.find(".content-wrapper").prop "scrollTop", 10000
- menu.show()
- return false
-
- $("body").on "click", =>
- if @tag
- @tag.find(".contents + .flex").removeClass "active"
-
- @tag.find("#button-sign-publish").off("click touchend").on "click touchend", =>
- inner_path = @tag.find("#input-contents").val()
-
- @wrapper.ws.cmd "fileRules", {inner_path: inner_path}, (rules) =>
- if @wrapper.site_info.auth_address in rules.signers
- # ZeroID or other ID provider
- @publish(inner_path, null)
- else if @wrapper.site_info.privatekey
- # Privatekey stored in users.json
- @publish(inner_path, "stored")
- else
- # Ask the user for privatekey
- @wrapper.displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key
- @publish(inner_path, privatekey)
- return false
-
- # Close
- @tag.find(".close").off("click touchend").on "click touchend", (e) =>
- @close()
- return false
-
- @loadGlobe()
-
- close: ->
- @move_lock = "x"
- @startDrag()
- @stopDrag()
-
-
- onClosed: ->
- $(window).off "resize"
- $(window).on "resize", @resized
- $(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) =>
- if e.target == document.body and not $(document.body).hasClass("body-sidebar") and not $(document.body).hasClass("body-console")
- $(document.body).css("height", "auto").css("perspective", "").css("will-change", "").css("transition", "").off transitionEnd
- @unloadGlobe()
-
- # We dont need site info anymore
- @wrapper.setSiteInfo = @original_set_site_info
-
-
- loadGlobe: =>
- if @tag.find(".globe").hasClass("loading")
- setTimeout (=>
- if typeof(DAT) == "undefined" # Globe script not loaded, do it first
- script_tag = $("
-