summaryrefslogtreecommitdiffstats
path: root/pyload
diff options
context:
space:
mode:
Diffstat (limited to 'pyload')
-rw-r--r--pyload/Core.py16
-rw-r--r--pyload/__init__.py31
-rw-r--r--pyload/api/__init__.py2
-rw-r--r--pyload/database/File.py875
-rw-r--r--pyload/database/FileDatabase.py875
-rw-r--r--pyload/database/Storage.py (renamed from pyload/database/StorageDatabase.py)0
-rw-r--r--pyload/database/User.py (renamed from pyload/database/UserDatabase.py)0
-rw-r--r--pyload/database/__init__.py6
-rw-r--r--pyload/datatype/File.py270
-rw-r--r--pyload/datatype/Package.py64
-rw-r--r--pyload/datatype/PyFile.py270
-rw-r--r--pyload/datatype/PyPackage.py64
-rw-r--r--pyload/manager/Account.py191
-rw-r--r--pyload/manager/AccountManager.py191
-rw-r--r--pyload/manager/Addon.py304
-rw-r--r--pyload/manager/AddonManager.py304
-rw-r--r--pyload/manager/Captcha.py (renamed from pyload/manager/CaptchaManager.py)0
-rw-r--r--pyload/manager/Event.py (renamed from pyload/manager/event/PullEvents.py)0
-rw-r--r--pyload/manager/Plugin.py404
-rw-r--r--pyload/manager/PluginManager.py404
-rw-r--r--pyload/manager/Remote.py (renamed from pyload/manager/RemoteManager.py)0
-rw-r--r--pyload/manager/Thread.py302
-rw-r--r--pyload/manager/ThreadManager.py302
-rw-r--r--pyload/manager/thread/Addon.py69
-rw-r--r--pyload/manager/thread/AddonThread.py69
-rw-r--r--pyload/manager/thread/Decrypter.py101
-rw-r--r--pyload/manager/thread/DecrypterThread.py101
-rw-r--r--pyload/manager/thread/Download.py213
-rw-r--r--pyload/manager/thread/DownloadThread.py213
-rw-r--r--pyload/manager/thread/Info.py225
-rw-r--r--pyload/manager/thread/InfoThread.py225
-rw-r--r--pyload/manager/thread/Plugin.py130
-rw-r--r--pyload/manager/thread/PluginThread.py130
-rw-r--r--pyload/manager/thread/Server.py111
-rw-r--r--pyload/manager/thread/ServerThread.py111
-rw-r--r--pyload/network/HTTPDownload.py3
-rw-r--r--pyload/network/HTTPRequest.py2
-rw-r--r--pyload/network/XDCCRequest.py2
-rw-r--r--pyload/plugin/Account.py307
-rw-r--r--pyload/plugin/Addon.py185
-rw-r--r--pyload/plugin/Captcha.py51
-rw-r--r--pyload/plugin/Container.py66
-rw-r--r--pyload/plugin/Crypter.py107
-rw-r--r--pyload/plugin/Hoster.py21
-rw-r--r--pyload/plugin/OCR.py (renamed from pyload/plugins/OCR.py)0
-rw-r--r--pyload/plugin/Plugin.py (renamed from pyload/plugins/Plugin.py)0
-rw-r--r--pyload/plugin/__init__.py (renamed from pyload/plugins/__init__.py)0
-rw-r--r--pyload/plugin/account/AlldebridCom.py59
-rw-r--r--pyload/plugin/account/BayfilesCom.py37
-rw-r--r--pyload/plugin/account/BillionuploadsCom.py16
-rw-r--r--pyload/plugin/account/BitshareCom.py32
-rw-r--r--pyload/plugin/account/CatShareNet.py56
-rw-r--r--pyload/plugin/account/CramitIn.py16
-rw-r--r--pyload/plugin/account/CzshareCom.py44
-rw-r--r--pyload/plugin/account/DebridItaliaCom.py44
-rw-r--r--pyload/plugin/account/DepositfilesCom.py35
-rw-r--r--pyload/plugin/account/DropboxCom.py42
-rw-r--r--pyload/plugin/account/EasybytezCom.py19
-rw-r--r--pyload/plugin/account/EuroshareEu.py41
-rw-r--r--pyload/plugin/account/FastixRu.py38
-rw-r--r--pyload/plugin/account/FastshareCz.py53
-rw-r--r--pyload/plugin/account/File4safeCom.py18
-rw-r--r--pyload/plugin/account/FileParadoxIn.py16
-rw-r--r--pyload/plugin/account/FilecloudIo.py59
-rw-r--r--pyload/plugin/account/FilefactoryCom.py49
-rw-r--r--pyload/plugin/account/FilejungleCom.py49
-rw-r--r--pyload/plugin/account/FileomCom.py16
-rw-r--r--pyload/plugin/account/FilerNet.py50
-rw-r--r--pyload/plugin/account/FilerioCom.py16
-rw-r--r--pyload/plugin/account/FilesMailRu.py28
-rw-r--r--pyload/plugin/account/FileserveCom.py44
-rw-r--r--pyload/plugin/account/FourSharedCom.py33
-rw-r--r--pyload/plugin/account/FreakshareCom.py43
-rw-r--r--pyload/plugin/account/FreeWayMe.py55
-rw-r--r--pyload/plugin/account/FshareVn.py63
-rw-r--r--pyload/plugin/account/Ftp.py17
-rw-r--r--pyload/plugin/account/HellshareCz.py76
-rw-r--r--pyload/plugin/account/Http.py17
-rw-r--r--pyload/plugin/account/HugefilesNet.py16
-rw-r--r--pyload/plugin/account/HundredEightyUploadCom.py16
-rw-r--r--pyload/plugin/account/JunocloudMe.py16
-rw-r--r--pyload/plugin/account/Keep2shareCc.py69
-rw-r--r--pyload/plugin/account/LetitbitNet.py34
-rw-r--r--pyload/plugin/account/LinestorageCom.py16
-rw-r--r--pyload/plugin/account/LinksnappyCom.py50
-rw-r--r--pyload/plugin/account/LomafileCom.py16
-rw-r--r--pyload/plugin/account/MegaDebridEu.py39
-rw-r--r--pyload/plugin/account/MegaRapidCz.py59
-rw-r--r--pyload/plugin/account/MegasharesCom.py48
-rw-r--r--pyload/plugin/account/MovReelCom.py19
-rw-r--r--pyload/plugin/account/MultishareCz.py44
-rw-r--r--pyload/plugin/account/MyfastfileCom.py35
-rw-r--r--pyload/plugin/account/NetloadIn.py40
-rw-r--r--pyload/plugin/account/NosuploadCom.py16
-rw-r--r--pyload/plugin/account/NovafileCom.py16
-rw-r--r--pyload/plugin/account/NowVideoAt.py56
-rw-r--r--pyload/plugin/account/OboomCom.py62
-rw-r--r--pyload/plugin/account/OneFichierCom.py55
-rw-r--r--pyload/plugin/account/OverLoadMe.py36
-rw-r--r--pyload/plugin/account/PremiumTo.py34
-rw-r--r--pyload/plugin/account/PremiumizeMe.py49
-rw-r--r--pyload/plugin/account/QuickshareCz.py43
-rw-r--r--pyload/plugin/account/RPNetBiz.py51
-rw-r--r--pyload/plugin/account/RapidfileshareNet.py18
-rw-r--r--pyload/plugin/account/RapidgatorNet.py58
-rw-r--r--pyload/plugin/account/RapiduNet.py48
-rw-r--r--pyload/plugin/account/RarefileNet.py16
-rw-r--r--pyload/plugin/account/RealdebridCom.py36
-rw-r--r--pyload/plugin/account/RehostTo.py41
-rw-r--r--pyload/plugin/account/RyushareCom.py25
-rw-r--r--pyload/plugin/account/SafesharingEu.py16
-rw-r--r--pyload/plugin/account/SecureUploadEu.py16
-rw-r--r--pyload/plugin/account/SendmywayCom.py16
-rw-r--r--pyload/plugin/account/ShareonlineBiz.py45
-rw-r--r--pyload/plugin/account/SimplyPremiumCom.py46
-rw-r--r--pyload/plugin/account/SimplydebridCom.py34
-rw-r--r--pyload/plugin/account/StahnuTo.py34
-rw-r--r--pyload/plugin/account/StreamcloudEu.py16
-rw-r--r--pyload/plugin/account/TurbobitNet.py42
-rw-r--r--pyload/plugin/account/TusfilesNet.py23
-rw-r--r--pyload/plugin/account/UlozTo.py52
-rw-r--r--pyload/plugin/account/UnrestrictLi.py44
-rw-r--r--pyload/plugin/account/UploadcCom.py16
-rw-r--r--pyload/plugin/account/UploadedTo.py60
-rw-r--r--pyload/plugin/account/UploadheroCom.py41
-rw-r--r--pyload/plugin/account/UploadingCom.py63
-rw-r--r--pyload/plugin/account/UptoboxCom.py17
-rw-r--r--pyload/plugin/account/VidPlayNet.py16
-rw-r--r--pyload/plugin/account/XFileSharingPro.py30
-rw-r--r--pyload/plugin/account/YibaishiwuCom.py40
-rw-r--r--pyload/plugin/account/ZeveraCom.py56
-rw-r--r--pyload/plugin/account/__init__.py (renamed from pyload/plugins/account/__init__.py)0
-rw-r--r--pyload/plugin/addon/Checksum.py186
-rw-r--r--pyload/plugin/addon/ClickAndLoad.py74
-rw-r--r--pyload/plugin/addon/DeleteFinished.py79
-rw-r--r--pyload/plugin/addon/DownloadScheduler.py77
-rw-r--r--pyload/plugin/addon/ExternalScripts.py145
-rw-r--r--pyload/plugin/addon/ExtractArchive.py363
-rw-r--r--pyload/plugin/addon/HotFolder.py70
-rw-r--r--pyload/plugin/addon/IRCInterface.py431
-rw-r--r--pyload/plugin/addon/MergeFiles.py85
-rw-r--r--pyload/plugin/addon/MultiHome.py81
-rw-r--r--pyload/plugin/addon/RestartFailed.py45
-rw-r--r--pyload/plugin/addon/RestartSlow.py57
-rw-r--r--pyload/plugin/addon/SkipRev.py77
-rw-r--r--pyload/plugin/addon/UnSkipOnFail.py87
-rw-r--r--pyload/plugin/addon/UpdateManager.py305
-rw-r--r--pyload/plugin/addon/WindowsPhoneToastNotify.py57
-rw-r--r--pyload/plugin/addon/XMPPInterface.py252
-rw-r--r--pyload/plugin/addon/__init__.py (renamed from pyload/plugins/addon/__init__.py)0
-rw-r--r--pyload/plugin/captcha/AdYouLike.py107
-rw-r--r--pyload/plugin/captcha/AdsCaptcha.py77
-rw-r--r--pyload/plugin/captcha/ReCaptcha.py73
-rw-r--r--pyload/plugin/captcha/SolveMedia.py50
-rw-r--r--pyload/plugin/captcha/__init__.py (renamed from pyload/plugins/captcha/__init__.py)0
-rw-r--r--pyload/plugin/container/CCF.py43
-rw-r--r--pyload/plugin/container/LinkList.py71
-rw-r--r--pyload/plugin/container/RSDF.py56
-rw-r--r--pyload/plugin/container/__init__.py (renamed from pyload/plugins/container/__init__.py)0
-rw-r--r--pyload/plugin/crypter/BitshareCom.py21
-rw-r--r--pyload/plugin/crypter/C1neonCom.py19
-rw-r--r--pyload/plugin/crypter/ChipDe.py29
-rw-r--r--pyload/plugin/crypter/CrockoCom.py20
-rw-r--r--pyload/plugin/crypter/CryptItCom.py19
-rw-r--r--pyload/plugin/crypter/CzshareCom.py32
-rw-r--r--pyload/plugin/crypter/DDLMusicOrg.py51
-rw-r--r--pyload/plugin/crypter/DailymotionBatch.py106
-rw-r--r--pyload/plugin/crypter/DataHu.py40
-rw-r--r--pyload/plugin/crypter/DdlstorageCom.py20
-rw-r--r--pyload/plugin/crypter/DepositfilesCom.py20
-rw-r--r--pyload/plugin/crypter/Dereferer.py26
-rw-r--r--pyload/plugin/crypter/DevhostStFolder.py58
-rw-r--r--pyload/plugin/crypter/DlProtectCom.py65
-rw-r--r--pyload/plugin/crypter/DontKnowMe.py29
-rw-r--r--pyload/plugin/crypter/DuckCryptInfo.py59
-rw-r--r--pyload/plugin/crypter/DuploadOrg.py19
-rw-r--r--pyload/plugin/crypter/EasybytezCom.py22
-rw-r--r--pyload/plugin/crypter/EmbeduploadCom.py60
-rw-r--r--pyload/plugin/crypter/FilebeerInfo.py19
-rw-r--r--pyload/plugin/crypter/FilecloudIo.py21
-rw-r--r--pyload/plugin/crypter/FilecryptCc.py148
-rw-r--r--pyload/plugin/crypter/FilefactoryCom.py28
-rw-r--r--pyload/plugin/crypter/FilerNet.py26
-rw-r--r--pyload/plugin/crypter/FileserveCom.py38
-rw-r--r--pyload/plugin/crypter/FilesonicCom.py18
-rw-r--r--pyload/plugin/crypter/FilestubeCom.py21
-rw-r--r--pyload/plugin/crypter/FiletramCom.py22
-rw-r--r--pyload/plugin/crypter/FiredriveCom.py19
-rw-r--r--pyload/plugin/crypter/FourChanOrg.py27
-rw-r--r--pyload/plugin/crypter/FreakhareCom.py38
-rw-r--r--pyload/plugin/crypter/FreetexthostCom.py27
-rw-r--r--pyload/plugin/crypter/FshareVn.py20
-rw-r--r--pyload/plugin/crypter/Go4UpCom.py49
-rw-r--r--pyload/plugin/crypter/GooGl.py32
-rw-r--r--pyload/plugin/crypter/HoerbuchIn.py62
-rw-r--r--pyload/plugin/crypter/HotfileCom.py19
-rw-r--r--pyload/plugin/crypter/ILoadTo.py19
-rw-r--r--pyload/plugin/crypter/ImgurComAlbum.py27
-rw-r--r--pyload/plugin/crypter/JunocloudMe.py20
-rw-r--r--pyload/plugin/crypter/LetitbitNet.py33
-rw-r--r--pyload/plugin/crypter/LinkCryptWs.py327
-rw-r--r--pyload/plugin/crypter/LinkSaveIn.py246
-rw-r--r--pyload/plugin/crypter/LinkdecrypterCom.py92
-rw-r--r--pyload/plugin/crypter/LixIn.py62
-rw-r--r--pyload/plugin/crypter/LofCc.py19
-rw-r--r--pyload/plugin/crypter/MBLinkInfo.py20
-rw-r--r--pyload/plugin/crypter/MediafireCom.py58
-rw-r--r--pyload/plugin/crypter/MegaRapidCz.py20
-rw-r--r--pyload/plugin/crypter/MegauploadCom.py18
-rw-r--r--pyload/plugin/crypter/Movie2kTo.py19
-rw-r--r--pyload/plugin/crypter/MultiUpOrg.py38
-rw-r--r--pyload/plugin/crypter/MultiloadCz.py42
-rw-r--r--pyload/plugin/crypter/MultiuploadCom.py18
-rw-r--r--pyload/plugin/crypter/NCryptIn.py315
-rw-r--r--pyload/plugin/crypter/NetfolderIn.py70
-rw-r--r--pyload/plugin/crypter/NosvideoCom.py21
-rw-r--r--pyload/plugin/crypter/OneKhDe.py40
-rw-r--r--pyload/plugin/crypter/OronCom.py19
-rw-r--r--pyload/plugin/crypter/PastebinCom.py21
-rw-r--r--pyload/plugin/crypter/QuickshareCz.py31
-rw-r--r--pyload/plugin/crypter/RSLayerCom.py19
-rw-r--r--pyload/plugin/crypter/RapidfileshareNet.py20
-rw-r--r--pyload/plugin/crypter/RelinkUs.py293
-rw-r--r--pyload/plugin/crypter/SafelinkingNet.py79
-rw-r--r--pyload/plugin/crypter/SecuredIn.py19
-rw-r--r--pyload/plugin/crypter/SexuriaCom.py94
-rw-r--r--pyload/plugin/crypter/ShareLinksBiz.py286
-rw-r--r--pyload/plugin/crypter/SharingmatrixCom.py18
-rw-r--r--pyload/plugin/crypter/SpeedLoadOrg.py19
-rw-r--r--pyload/plugin/crypter/StealthTo.py19
-rw-r--r--pyload/plugin/crypter/TnyCz.py27
-rw-r--r--pyload/plugin/crypter/TrailerzoneInfo.py19
-rw-r--r--pyload/plugin/crypter/TurbobitNet.py44
-rw-r--r--pyload/plugin/crypter/TusfilesNet.py45
-rw-r--r--pyload/plugin/crypter/UlozTo.py46
-rw-r--r--pyload/plugin/crypter/UploadableCh.py24
-rw-r--r--pyload/plugin/crypter/UploadedTo.py34
-rw-r--r--pyload/plugin/crypter/WiiReloadedOrg.py19
-rw-r--r--pyload/plugin/crypter/WuploadCom.py18
-rw-r--r--pyload/plugin/crypter/XFileSharingPro.py47
-rw-r--r--pyload/plugin/crypter/XupPl.py25
-rw-r--r--pyload/plugin/crypter/YoutubeBatch.py148
-rw-r--r--pyload/plugin/crypter/__init__.py (renamed from pyload/plugins/crypter/__init__.py)0
-rw-r--r--pyload/plugin/hook/AlldebridCom.py27
-rw-r--r--pyload/plugin/hook/BypassCaptcha.py133
-rw-r--r--pyload/plugin/hook/Captcha9kw.py253
-rw-r--r--pyload/plugin/hook/CaptchaBrotherhood.py166
-rw-r--r--pyload/plugin/hook/DeathByCaptcha.py213
-rw-r--r--pyload/plugin/hook/DebridItaliaCom.py27
-rw-r--r--pyload/plugin/hook/EasybytezCom.py39
-rw-r--r--pyload/plugin/hook/ExpertDecoders.py92
-rw-r--r--pyload/plugin/hook/FastixRu.py28
-rw-r--r--pyload/plugin/hook/FreeWayMe.py25
-rw-r--r--pyload/plugin/hook/ImageTyperz.py151
-rw-r--r--pyload/plugin/hook/LinkdecrypterCom.py60
-rw-r--r--pyload/plugin/hook/LinksnappyCom.py27
-rw-r--r--pyload/plugin/hook/MegaDebridEu.py30
-rw-r--r--pyload/plugin/hook/MultishareCz.py27
-rw-r--r--pyload/plugin/hook/MyfastfileCom.py30
-rw-r--r--pyload/plugin/hook/OverLoadMe.py29
-rw-r--r--pyload/plugin/hook/PremiumTo.py38
-rw-r--r--pyload/plugin/hook/PremiumizeMe.py54
-rw-r--r--pyload/plugin/hook/RPNetBiz.py52
-rw-r--r--pyload/plugin/hook/RealdebridCom.py27
-rw-r--r--pyload/plugin/hook/RehostTo.py41
-rw-r--r--pyload/plugin/hook/SimplyPremiumCom.py29
-rw-r--r--pyload/plugin/hook/SimplydebridCom.py22
-rw-r--r--pyload/plugin/hook/UnrestrictLi.py30
-rw-r--r--pyload/plugin/hook/XFileSharingPro.py96
-rw-r--r--pyload/plugin/hook/ZeveraCom.py22
-rw-r--r--pyload/plugin/hook/__init__.py (renamed from pyload/plugins/hook/__init__.py)0
-rw-r--r--pyload/plugin/hoster/AlldebridCom.py87
-rw-r--r--pyload/plugin/hoster/BayfilesCom.py87
-rw-r--r--pyload/plugin/hoster/BezvadataCz.py94
-rw-r--r--pyload/plugin/hoster/BillionuploadsCom.py24
-rw-r--r--pyload/plugin/hoster/BitshareCom.py157
-rw-r--r--pyload/plugin/hoster/BoltsharingCom.py18
-rw-r--r--pyload/plugin/hoster/CatShareNet.py67
-rw-r--r--pyload/plugin/hoster/CloudzerNet.py20
-rw-r--r--pyload/plugin/hoster/CramitIn.py24
-rw-r--r--pyload/plugin/hoster/CrockoCom.py70
-rw-r--r--pyload/plugin/hoster/CyberlockerCh.py18
-rw-r--r--pyload/plugin/hoster/CzshareCom.py152
-rw-r--r--pyload/plugin/hoster/DailymotionCom.py125
-rw-r--r--pyload/plugin/hoster/DataHu.py42
-rw-r--r--pyload/plugin/hoster/DataportCz.py55
-rw-r--r--pyload/plugin/hoster/DateiTo.py82
-rw-r--r--pyload/plugin/hoster/DdlstorageCom.py19
-rw-r--r--pyload/plugin/hoster/DebridItaliaCom.py53
-rw-r--r--pyload/plugin/hoster/DepositfilesCom.py123
-rw-r--r--pyload/plugin/hoster/DevhostSt.py48
-rw-r--r--pyload/plugin/hoster/DlFreeFr.py136
-rw-r--r--pyload/plugin/hoster/DodanePl.py18
-rw-r--r--pyload/plugin/hoster/DuploadOrg.py18
-rw-r--r--pyload/plugin/hoster/EasybytezCom.py26
-rw-r--r--pyload/plugin/hoster/EdiskCz.py56
-rw-r--r--pyload/plugin/hoster/EgoFilesCom.py18
-rw-r--r--pyload/plugin/hoster/EnteruploadCom.py18
-rw-r--r--pyload/plugin/hoster/EpicShareNet.py18
-rw-r--r--pyload/plugin/hoster/EuroshareEu.py67
-rw-r--r--pyload/plugin/hoster/ExtabitCom.py79
-rw-r--r--pyload/plugin/hoster/FastixRu.py76
-rw-r--r--pyload/plugin/hoster/FastshareCz.py77
-rw-r--r--pyload/plugin/hoster/FileApeCom.py18
-rw-r--r--pyload/plugin/hoster/FileParadoxIn.py25
-rw-r--r--pyload/plugin/hoster/FileSharkPl.py138
-rw-r--r--pyload/plugin/hoster/FileStoreTo.py37
-rw-r--r--pyload/plugin/hoster/FilebeerInfo.py18
-rw-r--r--pyload/plugin/hoster/FilecloudIo.py125
-rw-r--r--pyload/plugin/hoster/FilefactoryCom.py90
-rw-r--r--pyload/plugin/hoster/FilejungleCom.py29
-rw-r--r--pyload/plugin/hoster/FileomCom.py35
-rw-r--r--pyload/plugin/hoster/FilepostCom.py130
-rw-r--r--pyload/plugin/hoster/FilepupNet.py51
-rw-r--r--pyload/plugin/hoster/FilerNet.py80
-rw-r--r--pyload/plugin/hoster/FilerioCom.py25
-rw-r--r--pyload/plugin/hoster/FilesMailRu.py106
-rw-r--r--pyload/plugin/hoster/FileserveCom.py217
-rw-r--r--pyload/plugin/hoster/FileshareInUa.py18
-rw-r--r--pyload/plugin/hoster/FilesonicCom.py19
-rw-r--r--pyload/plugin/hoster/FilezyNet.py18
-rw-r--r--pyload/plugin/hoster/FiredriveCom.py18
-rw-r--r--pyload/plugin/hoster/FlyFilesNet.py45
-rw-r--r--pyload/plugin/hoster/FourSharedCom.py61
-rw-r--r--pyload/plugin/hoster/FreakshareCom.py176
-rw-r--r--pyload/plugin/hoster/FreeWayMe.py36
-rw-r--r--pyload/plugin/hoster/FreevideoCz.py18
-rw-r--r--pyload/plugin/hoster/FshareVn.py125
-rw-r--r--pyload/plugin/hoster/Ftp.py79
-rw-r--r--pyload/plugin/hoster/GamefrontCom.py90
-rw-r--r--pyload/plugin/hoster/GigapetaCom.py64
-rw-r--r--pyload/plugin/hoster/GooIm.py39
-rw-r--r--pyload/plugin/hoster/HellshareCz.py48
-rw-r--r--pyload/plugin/hoster/HellspyCz.py18
-rw-r--r--pyload/plugin/hoster/HotfileCom.py21
-rw-r--r--pyload/plugin/hoster/HugefilesNet.py27
-rw-r--r--pyload/plugin/hoster/HundredEightyUploadCom.py27
-rw-r--r--pyload/plugin/hoster/IFileWs.py18
-rw-r--r--pyload/plugin/hoster/IcyFilesCom.py18
-rw-r--r--pyload/plugin/hoster/IfileIt.py67
-rw-r--r--pyload/plugin/hoster/IfolderRu.py76
-rw-r--r--pyload/plugin/hoster/JumbofilesCom.py38
-rw-r--r--pyload/plugin/hoster/JunocloudMe.py28
-rw-r--r--pyload/plugin/hoster/Keep2shareCc.py132
-rw-r--r--pyload/plugin/hoster/KickloadCom.py18
-rw-r--r--pyload/plugin/hoster/KingfilesNet.py82
-rw-r--r--pyload/plugin/hoster/LemUploadsCom.py18
-rw-r--r--pyload/plugin/hoster/LetitbitNet.py142
-rw-r--r--pyload/plugin/hoster/LinksnappyCom.py76
-rw-r--r--pyload/plugin/hoster/LoadTo.py75
-rw-r--r--pyload/plugin/hoster/LomafileCom.py30
-rw-r--r--pyload/plugin/hoster/LuckyShareNet.py73
-rw-r--r--pyload/plugin/hoster/MediafireCom.py124
-rw-r--r--pyload/plugin/hoster/MegaCoNz.py171
-rw-r--r--pyload/plugin/hoster/MegaDebridEu.py94
-rw-r--r--pyload/plugin/hoster/MegaFilesSe.py18
-rw-r--r--pyload/plugin/hoster/MegaRapidCz.py71
-rw-r--r--pyload/plugin/hoster/MegacrypterCom.py56
-rw-r--r--pyload/plugin/hoster/MegareleaseOrg.py19
-rw-r--r--pyload/plugin/hoster/MegasharesCom.py113
-rw-r--r--pyload/plugin/hoster/MegauploadCom.py18
-rw-r--r--pyload/plugin/hoster/MegavideoCom.py19
-rw-r--r--pyload/plugin/hoster/MovReelCom.py26
-rw-r--r--pyload/plugin/hoster/MultishareCz.py80
-rw-r--r--pyload/plugin/hoster/MyfastfileCom.py47
-rw-r--r--pyload/plugin/hoster/MyvideoDe.py49
-rw-r--r--pyload/plugin/hoster/NahrajCz.py18
-rw-r--r--pyload/plugin/hoster/NarodRu.py60
-rw-r--r--pyload/plugin/hoster/NetloadIn.py294
-rw-r--r--pyload/plugin/hoster/NosuploadCom.py43
-rw-r--r--pyload/plugin/hoster/NovafileCom.py31
-rw-r--r--pyload/plugin/hoster/NowDownloadSx.py64
-rw-r--r--pyload/plugin/hoster/NowVideoSx.py44
-rw-r--r--pyload/plugin/hoster/OboomCom.py145
-rw-r--r--pyload/plugin/hoster/OneFichierCom.py71
-rw-r--r--pyload/plugin/hoster/OronCom.py19
-rw-r--r--pyload/plugin/hoster/OverLoadMe.py84
-rw-r--r--pyload/plugin/hoster/PandaplaNet.py18
-rw-r--r--pyload/plugin/hoster/PornhostCom.py80
-rw-r--r--pyload/plugin/hoster/PornhubCom.py89
-rw-r--r--pyload/plugin/hoster/PotloadCom.py18
-rw-r--r--pyload/plugin/hoster/PremiumTo.py81
-rw-r--r--pyload/plugin/hoster/PremiumizeMe.py56
-rw-r--r--pyload/plugin/hoster/PromptfileCom.py45
-rw-r--r--pyload/plugin/hoster/PrzeklejPl.py18
-rw-r--r--pyload/plugin/hoster/QuickshareCz.py90
-rw-r--r--pyload/plugin/hoster/RPNetBiz.py85
-rw-r--r--pyload/plugin/hoster/RapidfileshareNet.py31
-rw-r--r--pyload/plugin/hoster/RapidgatorNet.py199
-rw-r--r--pyload/plugin/hoster/RapiduNet.py82
-rw-r--r--pyload/plugin/hoster/RarefileNet.py28
-rw-r--r--pyload/plugin/hoster/RealdebridCom.py94
-rw-r--r--pyload/plugin/hoster/RedtubeCom.py62
-rw-r--r--pyload/plugin/hoster/RehostTo.py44
-rw-r--r--pyload/plugin/hoster/RemixshareCom.py61
-rw-r--r--pyload/plugin/hoster/RgHostNet.py26
-rw-r--r--pyload/plugin/hoster/RyushareCom.py81
-rw-r--r--pyload/plugin/hoster/SafesharingEu.py25
-rw-r--r--pyload/plugin/hoster/SecureUploadEu.py23
-rw-r--r--pyload/plugin/hoster/SendmywayCom.py24
-rw-r--r--pyload/plugin/hoster/SendspaceCom.py60
-rw-r--r--pyload/plugin/hoster/Share4webCom.py22
-rw-r--r--pyload/plugin/hoster/Share76Com.py18
-rw-r--r--pyload/plugin/hoster/ShareFilesCo.py18
-rw-r--r--pyload/plugin/hoster/SharebeesCom.py18
-rw-r--r--pyload/plugin/hoster/ShareonlineBiz.py191
-rw-r--r--pyload/plugin/hoster/ShareplaceCom.py89
-rw-r--r--pyload/plugin/hoster/SharingmatrixCom.py19
-rw-r--r--pyload/plugin/hoster/ShragleCom.py19
-rw-r--r--pyload/plugin/hoster/SimplyPremiumCom.py82
-rw-r--r--pyload/plugin/hoster/SimplydebridCom.py64
-rw-r--r--pyload/plugin/hoster/SockshareCom.py20
-rw-r--r--pyload/plugin/hoster/SoundcloudCom.py57
-rw-r--r--pyload/plugin/hoster/SpeedLoadOrg.py18
-rw-r--r--pyload/plugin/hoster/SpeedfileCz.py18
-rw-r--r--pyload/plugin/hoster/SpeedyshareCom.py51
-rw-r--r--pyload/plugin/hoster/StorageTo.py18
-rw-r--r--pyload/plugin/hoster/StreamCz.py71
-rw-r--r--pyload/plugin/hoster/StreamcloudEu.py31
-rw-r--r--pyload/plugin/hoster/TurbobitNet.py173
-rw-r--r--pyload/plugin/hoster/TurbouploadCom.py18
-rw-r--r--pyload/plugin/hoster/TusfilesNet.py35
-rw-r--r--pyload/plugin/hoster/TwoSharedCom.py41
-rw-r--r--pyload/plugin/hoster/UlozTo.py164
-rw-r--r--pyload/plugin/hoster/UloziskoSk.py72
-rw-r--r--pyload/plugin/hoster/UnibytesCom.py70
-rw-r--r--pyload/plugin/hoster/UnrestrictLi.py91
-rw-r--r--pyload/plugin/hoster/UpleaCom.py60
-rw-r--r--pyload/plugin/hoster/UploadStationCom.py19
-rw-r--r--pyload/plugin/hoster/UploadableCh.py90
-rw-r--r--pyload/plugin/hoster/UploadboxCom.py18
-rw-r--r--pyload/plugin/hoster/UploadedTo.py245
-rw-r--r--pyload/plugin/hoster/UploadhereCom.py18
-rw-r--r--pyload/plugin/hoster/UploadheroCom.py81
-rw-r--r--pyload/plugin/hoster/UploadingCom.py104
-rw-r--r--pyload/plugin/hoster/UploadkingCom.py18
-rw-r--r--pyload/plugin/hoster/UpstoreNet.py73
-rw-r--r--pyload/plugin/hoster/UptoboxCom.py34
-rw-r--r--pyload/plugin/hoster/VeehdCom.py81
-rw-r--r--pyload/plugin/hoster/VeohCom.py53
-rw-r--r--pyload/plugin/hoster/VidPlayNet.py26
-rw-r--r--pyload/plugin/hoster/VimeoCom.py75
-rw-r--r--pyload/plugin/hoster/Vipleech4uCom.py18
-rw-r--r--pyload/plugin/hoster/WarserverCz.py18
-rw-r--r--pyload/plugin/hoster/WebshareCz.py62
-rw-r--r--pyload/plugin/hoster/WrzucTo.py52
-rw-r--r--pyload/plugin/hoster/WuploadCom.py19
-rw-r--r--pyload/plugin/hoster/X7To.py18
-rw-r--r--pyload/plugin/hoster/XFileSharingPro.py57
-rw-r--r--pyload/plugin/hoster/XHamsterCom.py129
-rw-r--r--pyload/plugin/hoster/XVideosCom.py28
-rw-r--r--pyload/plugin/hoster/Xdcc.py207
-rw-r--r--pyload/plugin/hoster/YibaishiwuCom.py55
-rw-r--r--pyload/plugin/hoster/YoupornCom.py60
-rw-r--r--pyload/plugin/hoster/YourfilesTo.py87
-rw-r--r--pyload/plugin/hoster/YoutubeCom.py185
-rw-r--r--pyload/plugin/hoster/ZDF.py59
-rw-r--r--pyload/plugin/hoster/ZShareNet.py19
-rw-r--r--pyload/plugin/hoster/ZeveraCom.py42
-rw-r--r--pyload/plugin/hoster/ZippyshareCom.py65
-rw-r--r--pyload/plugin/hoster/__init__.py (renamed from pyload/plugins/hoster/__init__.py)0
-rw-r--r--pyload/plugin/internal/AbstractExtractor.py (renamed from pyload/plugins/internal/AbstractExtractor.py)0
-rw-r--r--pyload/plugin/internal/BasePlugin.py106
-rw-r--r--pyload/plugin/internal/DeadCrypter.py32
-rw-r--r--pyload/plugin/internal/DeadHoster.py32
-rw-r--r--pyload/plugin/internal/MultiHoster.py202
-rw-r--r--pyload/plugin/internal/SimpleCrypter.py152
-rw-r--r--pyload/plugin/internal/SimpleHoster.py530
-rw-r--r--pyload/plugin/internal/UnRar.py221
-rw-r--r--pyload/plugin/internal/UnZip.py41
-rw-r--r--pyload/plugin/internal/UpdateManager.py300
-rw-r--r--pyload/plugin/internal/XFSAccount.py155
-rw-r--r--pyload/plugin/internal/XFSCrypter.py29
-rw-r--r--pyload/plugin/internal/XFSHoster.py339
-rw-r--r--pyload/plugin/internal/__init__.py (renamed from pyload/plugins/internal/__init__.py)0
-rw-r--r--pyload/plugin/ocr/GigasizeCom.py24
-rw-r--r--pyload/plugin/ocr/LinksaveIn.py158
-rw-r--r--pyload/plugin/ocr/NetloadIn.py29
-rw-r--r--pyload/plugin/ocr/ShareonlineBiz.py39
-rw-r--r--pyload/plugin/ocr/__init__.py (renamed from pyload/plugins/ocr/__init__.py)0
-rw-r--r--pyload/plugins/Account.py307
-rw-r--r--pyload/plugins/Addon.py185
-rw-r--r--pyload/plugins/Captcha.py51
-rw-r--r--pyload/plugins/Container.py66
-rw-r--r--pyload/plugins/Crypter.py107
-rw-r--r--pyload/plugins/Hoster.py21
-rw-r--r--pyload/plugins/account/AlldebridCom.py59
-rw-r--r--pyload/plugins/account/BayfilesCom.py37
-rw-r--r--pyload/plugins/account/BillionuploadsCom.py16
-rw-r--r--pyload/plugins/account/BitshareCom.py32
-rw-r--r--pyload/plugins/account/CatShareNet.py56
-rw-r--r--pyload/plugins/account/CramitIn.py16
-rw-r--r--pyload/plugins/account/CzshareCom.py44
-rw-r--r--pyload/plugins/account/DebridItaliaCom.py44
-rw-r--r--pyload/plugins/account/DepositfilesCom.py35
-rw-r--r--pyload/plugins/account/DropboxCom.py42
-rw-r--r--pyload/plugins/account/EasybytezCom.py19
-rw-r--r--pyload/plugins/account/EuroshareEu.py41
-rw-r--r--pyload/plugins/account/FastixRu.py38
-rw-r--r--pyload/plugins/account/FastshareCz.py53
-rw-r--r--pyload/plugins/account/File4safeCom.py18
-rw-r--r--pyload/plugins/account/FileParadoxIn.py16
-rw-r--r--pyload/plugins/account/FilecloudIo.py59
-rw-r--r--pyload/plugins/account/FilefactoryCom.py49
-rw-r--r--pyload/plugins/account/FilejungleCom.py49
-rw-r--r--pyload/plugins/account/FileomCom.py16
-rw-r--r--pyload/plugins/account/FilerNet.py50
-rw-r--r--pyload/plugins/account/FilerioCom.py16
-rw-r--r--pyload/plugins/account/FilesMailRu.py28
-rw-r--r--pyload/plugins/account/FileserveCom.py44
-rw-r--r--pyload/plugins/account/FourSharedCom.py33
-rw-r--r--pyload/plugins/account/FreakshareCom.py43
-rw-r--r--pyload/plugins/account/FreeWayMe.py55
-rw-r--r--pyload/plugins/account/FshareVn.py63
-rw-r--r--pyload/plugins/account/Ftp.py17
-rw-r--r--pyload/plugins/account/HellshareCz.py76
-rw-r--r--pyload/plugins/account/Http.py17
-rw-r--r--pyload/plugins/account/HugefilesNet.py16
-rw-r--r--pyload/plugins/account/HundredEightyUploadCom.py16
-rw-r--r--pyload/plugins/account/JunocloudMe.py16
-rw-r--r--pyload/plugins/account/Keep2shareCc.py69
-rw-r--r--pyload/plugins/account/LetitbitNet.py34
-rw-r--r--pyload/plugins/account/LinestorageCom.py16
-rw-r--r--pyload/plugins/account/LinksnappyCom.py50
-rw-r--r--pyload/plugins/account/LomafileCom.py16
-rw-r--r--pyload/plugins/account/MegaDebridEu.py39
-rw-r--r--pyload/plugins/account/MegaRapidCz.py59
-rw-r--r--pyload/plugins/account/MegasharesCom.py48
-rw-r--r--pyload/plugins/account/MovReelCom.py19
-rw-r--r--pyload/plugins/account/MultishareCz.py44
-rw-r--r--pyload/plugins/account/MyfastfileCom.py35
-rw-r--r--pyload/plugins/account/NetloadIn.py40
-rw-r--r--pyload/plugins/account/NosuploadCom.py16
-rw-r--r--pyload/plugins/account/NovafileCom.py16
-rw-r--r--pyload/plugins/account/NowVideoAt.py56
-rw-r--r--pyload/plugins/account/OboomCom.py62
-rw-r--r--pyload/plugins/account/OneFichierCom.py55
-rw-r--r--pyload/plugins/account/OverLoadMe.py36
-rw-r--r--pyload/plugins/account/PremiumTo.py34
-rw-r--r--pyload/plugins/account/PremiumizeMe.py49
-rw-r--r--pyload/plugins/account/QuickshareCz.py43
-rw-r--r--pyload/plugins/account/RPNetBiz.py51
-rw-r--r--pyload/plugins/account/RapidfileshareNet.py18
-rw-r--r--pyload/plugins/account/RapidgatorNet.py58
-rw-r--r--pyload/plugins/account/RapiduNet.py48
-rw-r--r--pyload/plugins/account/RarefileNet.py16
-rw-r--r--pyload/plugins/account/RealdebridCom.py36
-rw-r--r--pyload/plugins/account/RehostTo.py41
-rw-r--r--pyload/plugins/account/RyushareCom.py25
-rw-r--r--pyload/plugins/account/SafesharingEu.py16
-rw-r--r--pyload/plugins/account/SecureUploadEu.py16
-rw-r--r--pyload/plugins/account/SendmywayCom.py16
-rw-r--r--pyload/plugins/account/ShareonlineBiz.py45
-rw-r--r--pyload/plugins/account/SimplyPremiumCom.py46
-rw-r--r--pyload/plugins/account/SimplydebridCom.py34
-rw-r--r--pyload/plugins/account/StahnuTo.py34
-rw-r--r--pyload/plugins/account/StreamcloudEu.py16
-rw-r--r--pyload/plugins/account/TurbobitNet.py42
-rw-r--r--pyload/plugins/account/TusfilesNet.py23
-rw-r--r--pyload/plugins/account/UlozTo.py52
-rw-r--r--pyload/plugins/account/UnrestrictLi.py44
-rw-r--r--pyload/plugins/account/UploadcCom.py16
-rw-r--r--pyload/plugins/account/UploadedTo.py60
-rw-r--r--pyload/plugins/account/UploadheroCom.py41
-rw-r--r--pyload/plugins/account/UploadingCom.py63
-rw-r--r--pyload/plugins/account/UptoboxCom.py17
-rw-r--r--pyload/plugins/account/VidPlayNet.py16
-rw-r--r--pyload/plugins/account/XFileSharingPro.py30
-rw-r--r--pyload/plugins/account/YibaishiwuCom.py40
-rw-r--r--pyload/plugins/account/ZeveraCom.py56
-rw-r--r--pyload/plugins/addon/Checksum.py186
-rw-r--r--pyload/plugins/addon/ClickAndLoad.py74
-rw-r--r--pyload/plugins/addon/DeleteFinished.py79
-rw-r--r--pyload/plugins/addon/DownloadScheduler.py77
-rw-r--r--pyload/plugins/addon/ExternalScripts.py145
-rw-r--r--pyload/plugins/addon/ExtractArchive.py363
-rw-r--r--pyload/plugins/addon/HotFolder.py70
-rw-r--r--pyload/plugins/addon/IRCInterface.py431
-rw-r--r--pyload/plugins/addon/MergeFiles.py85
-rw-r--r--pyload/plugins/addon/MultiHome.py81
-rw-r--r--pyload/plugins/addon/RestartFailed.py45
-rw-r--r--pyload/plugins/addon/RestartSlow.py57
-rw-r--r--pyload/plugins/addon/SkipRev.py77
-rw-r--r--pyload/plugins/addon/UnSkipOnFail.py87
-rw-r--r--pyload/plugins/addon/UpdateManager.py305
-rw-r--r--pyload/plugins/addon/WindowsPhoneToastNotify.py57
-rw-r--r--pyload/plugins/addon/XMPPInterface.py252
-rw-r--r--pyload/plugins/captcha/AdYouLike.py107
-rw-r--r--pyload/plugins/captcha/AdsCaptcha.py77
-rw-r--r--pyload/plugins/captcha/ReCaptcha.py73
-rw-r--r--pyload/plugins/captcha/SolveMedia.py50
-rw-r--r--pyload/plugins/container/CCF.py43
-rw-r--r--pyload/plugins/container/LinkList.py71
-rw-r--r--pyload/plugins/container/RSDF.py56
-rw-r--r--pyload/plugins/crypter/BitshareCom.py21
-rw-r--r--pyload/plugins/crypter/C1neonCom.py19
-rw-r--r--pyload/plugins/crypter/ChipDe.py29
-rw-r--r--pyload/plugins/crypter/CrockoCom.py20
-rw-r--r--pyload/plugins/crypter/CryptItCom.py19
-rw-r--r--pyload/plugins/crypter/CzshareCom.py32
-rw-r--r--pyload/plugins/crypter/DDLMusicOrg.py51
-rw-r--r--pyload/plugins/crypter/DailymotionBatch.py106
-rw-r--r--pyload/plugins/crypter/DataHu.py40
-rw-r--r--pyload/plugins/crypter/DdlstorageCom.py20
-rw-r--r--pyload/plugins/crypter/DepositfilesCom.py20
-rw-r--r--pyload/plugins/crypter/Dereferer.py26
-rw-r--r--pyload/plugins/crypter/DevhostStFolder.py58
-rw-r--r--pyload/plugins/crypter/DlProtectCom.py65
-rw-r--r--pyload/plugins/crypter/DontKnowMe.py29
-rw-r--r--pyload/plugins/crypter/DuckCryptInfo.py59
-rw-r--r--pyload/plugins/crypter/DuploadOrg.py19
-rw-r--r--pyload/plugins/crypter/EasybytezCom.py22
-rw-r--r--pyload/plugins/crypter/EmbeduploadCom.py60
-rw-r--r--pyload/plugins/crypter/FilebeerInfo.py19
-rw-r--r--pyload/plugins/crypter/FilecloudIo.py21
-rw-r--r--pyload/plugins/crypter/FilecryptCc.py148
-rw-r--r--pyload/plugins/crypter/FilefactoryCom.py28
-rw-r--r--pyload/plugins/crypter/FilerNet.py26
-rw-r--r--pyload/plugins/crypter/FileserveCom.py38
-rw-r--r--pyload/plugins/crypter/FilesonicCom.py18
-rw-r--r--pyload/plugins/crypter/FilestubeCom.py21
-rw-r--r--pyload/plugins/crypter/FiletramCom.py22
-rw-r--r--pyload/plugins/crypter/FiredriveCom.py19
-rw-r--r--pyload/plugins/crypter/FourChanOrg.py27
-rw-r--r--pyload/plugins/crypter/FreakhareCom.py38
-rw-r--r--pyload/plugins/crypter/FreetexthostCom.py27
-rw-r--r--pyload/plugins/crypter/FshareVn.py20
-rw-r--r--pyload/plugins/crypter/Go4UpCom.py49
-rw-r--r--pyload/plugins/crypter/GooGl.py32
-rw-r--r--pyload/plugins/crypter/HoerbuchIn.py62
-rw-r--r--pyload/plugins/crypter/HotfileCom.py19
-rw-r--r--pyload/plugins/crypter/ILoadTo.py19
-rw-r--r--pyload/plugins/crypter/ImgurComAlbum.py27
-rw-r--r--pyload/plugins/crypter/JunocloudMe.py20
-rw-r--r--pyload/plugins/crypter/LetitbitNet.py33
-rw-r--r--pyload/plugins/crypter/LinkCryptWs.py327
-rw-r--r--pyload/plugins/crypter/LinkSaveIn.py246
-rw-r--r--pyload/plugins/crypter/LinkdecrypterCom.py92
-rw-r--r--pyload/plugins/crypter/LixIn.py62
-rw-r--r--pyload/plugins/crypter/LofCc.py19
-rw-r--r--pyload/plugins/crypter/MBLinkInfo.py20
-rw-r--r--pyload/plugins/crypter/MediafireCom.py58
-rw-r--r--pyload/plugins/crypter/MegaRapidCz.py20
-rw-r--r--pyload/plugins/crypter/MegauploadCom.py18
-rw-r--r--pyload/plugins/crypter/Movie2kTo.py19
-rw-r--r--pyload/plugins/crypter/MultiUpOrg.py38
-rw-r--r--pyload/plugins/crypter/MultiloadCz.py42
-rw-r--r--pyload/plugins/crypter/MultiuploadCom.py18
-rw-r--r--pyload/plugins/crypter/NCryptIn.py315
-rw-r--r--pyload/plugins/crypter/NetfolderIn.py70
-rw-r--r--pyload/plugins/crypter/NosvideoCom.py21
-rw-r--r--pyload/plugins/crypter/OneKhDe.py40
-rw-r--r--pyload/plugins/crypter/OronCom.py19
-rw-r--r--pyload/plugins/crypter/PastebinCom.py21
-rw-r--r--pyload/plugins/crypter/QuickshareCz.py31
-rw-r--r--pyload/plugins/crypter/RSLayerCom.py19
-rw-r--r--pyload/plugins/crypter/RapidfileshareNet.py20
-rw-r--r--pyload/plugins/crypter/RelinkUs.py293
-rw-r--r--pyload/plugins/crypter/SafelinkingNet.py79
-rw-r--r--pyload/plugins/crypter/SecuredIn.py19
-rw-r--r--pyload/plugins/crypter/SexuriaCom.py94
-rw-r--r--pyload/plugins/crypter/ShareLinksBiz.py286
-rw-r--r--pyload/plugins/crypter/SharingmatrixCom.py18
-rw-r--r--pyload/plugins/crypter/SpeedLoadOrg.py19
-rw-r--r--pyload/plugins/crypter/StealthTo.py19
-rw-r--r--pyload/plugins/crypter/TnyCz.py27
-rw-r--r--pyload/plugins/crypter/TrailerzoneInfo.py19
-rw-r--r--pyload/plugins/crypter/TurbobitNet.py44
-rw-r--r--pyload/plugins/crypter/TusfilesNet.py45
-rw-r--r--pyload/plugins/crypter/UlozTo.py46
-rw-r--r--pyload/plugins/crypter/UploadableCh.py24
-rw-r--r--pyload/plugins/crypter/UploadedTo.py34
-rw-r--r--pyload/plugins/crypter/WiiReloadedOrg.py19
-rw-r--r--pyload/plugins/crypter/WuploadCom.py18
-rw-r--r--pyload/plugins/crypter/XFileSharingPro.py47
-rw-r--r--pyload/plugins/crypter/XupPl.py25
-rw-r--r--pyload/plugins/crypter/YoutubeBatch.py148
-rw-r--r--pyload/plugins/hook/AlldebridCom.py27
-rw-r--r--pyload/plugins/hook/BypassCaptcha.py133
-rw-r--r--pyload/plugins/hook/Captcha9kw.py253
-rw-r--r--pyload/plugins/hook/CaptchaBrotherhood.py166
-rw-r--r--pyload/plugins/hook/DeathByCaptcha.py213
-rw-r--r--pyload/plugins/hook/DebridItaliaCom.py27
-rw-r--r--pyload/plugins/hook/EasybytezCom.py39
-rw-r--r--pyload/plugins/hook/ExpertDecoders.py92
-rw-r--r--pyload/plugins/hook/FastixRu.py28
-rw-r--r--pyload/plugins/hook/FreeWayMe.py25
-rw-r--r--pyload/plugins/hook/ImageTyperz.py151
-rw-r--r--pyload/plugins/hook/LinkdecrypterCom.py60
-rw-r--r--pyload/plugins/hook/LinksnappyCom.py27
-rw-r--r--pyload/plugins/hook/MegaDebridEu.py30
-rw-r--r--pyload/plugins/hook/MultishareCz.py27
-rw-r--r--pyload/plugins/hook/MyfastfileCom.py30
-rw-r--r--pyload/plugins/hook/OverLoadMe.py29
-rw-r--r--pyload/plugins/hook/PremiumTo.py38
-rw-r--r--pyload/plugins/hook/PremiumizeMe.py54
-rw-r--r--pyload/plugins/hook/RPNetBiz.py52
-rw-r--r--pyload/plugins/hook/RealdebridCom.py27
-rw-r--r--pyload/plugins/hook/RehostTo.py41
-rw-r--r--pyload/plugins/hook/SimplyPremiumCom.py29
-rw-r--r--pyload/plugins/hook/SimplydebridCom.py22
-rw-r--r--pyload/plugins/hook/UnrestrictLi.py30
-rw-r--r--pyload/plugins/hook/XFileSharingPro.py96
-rw-r--r--pyload/plugins/hook/ZeveraCom.py22
-rw-r--r--pyload/plugins/hoster/AlldebridCom.py87
-rw-r--r--pyload/plugins/hoster/BayfilesCom.py87
-rw-r--r--pyload/plugins/hoster/BezvadataCz.py94
-rw-r--r--pyload/plugins/hoster/BillionuploadsCom.py24
-rw-r--r--pyload/plugins/hoster/BitshareCom.py157
-rw-r--r--pyload/plugins/hoster/BoltsharingCom.py18
-rw-r--r--pyload/plugins/hoster/CatShareNet.py67
-rw-r--r--pyload/plugins/hoster/CloudzerNet.py20
-rw-r--r--pyload/plugins/hoster/CramitIn.py24
-rw-r--r--pyload/plugins/hoster/CrockoCom.py70
-rw-r--r--pyload/plugins/hoster/CyberlockerCh.py18
-rw-r--r--pyload/plugins/hoster/CzshareCom.py152
-rw-r--r--pyload/plugins/hoster/DailymotionCom.py125
-rw-r--r--pyload/plugins/hoster/DataHu.py42
-rw-r--r--pyload/plugins/hoster/DataportCz.py55
-rw-r--r--pyload/plugins/hoster/DateiTo.py82
-rw-r--r--pyload/plugins/hoster/DdlstorageCom.py19
-rw-r--r--pyload/plugins/hoster/DebridItaliaCom.py53
-rw-r--r--pyload/plugins/hoster/DepositfilesCom.py123
-rw-r--r--pyload/plugins/hoster/DevhostSt.py48
-rw-r--r--pyload/plugins/hoster/DlFreeFr.py136
-rw-r--r--pyload/plugins/hoster/DodanePl.py18
-rw-r--r--pyload/plugins/hoster/DuploadOrg.py18
-rw-r--r--pyload/plugins/hoster/EasybytezCom.py26
-rw-r--r--pyload/plugins/hoster/EdiskCz.py56
-rw-r--r--pyload/plugins/hoster/EgoFilesCom.py18
-rw-r--r--pyload/plugins/hoster/EnteruploadCom.py18
-rw-r--r--pyload/plugins/hoster/EpicShareNet.py18
-rw-r--r--pyload/plugins/hoster/EuroshareEu.py67
-rw-r--r--pyload/plugins/hoster/ExtabitCom.py79
-rw-r--r--pyload/plugins/hoster/FastixRu.py76
-rw-r--r--pyload/plugins/hoster/FastshareCz.py77
-rw-r--r--pyload/plugins/hoster/FileApeCom.py18
-rw-r--r--pyload/plugins/hoster/FileParadoxIn.py25
-rw-r--r--pyload/plugins/hoster/FileSharkPl.py138
-rw-r--r--pyload/plugins/hoster/FileStoreTo.py37
-rw-r--r--pyload/plugins/hoster/FilebeerInfo.py18
-rw-r--r--pyload/plugins/hoster/FilecloudIo.py125
-rw-r--r--pyload/plugins/hoster/FilefactoryCom.py90
-rw-r--r--pyload/plugins/hoster/FilejungleCom.py29
-rw-r--r--pyload/plugins/hoster/FileomCom.py35
-rw-r--r--pyload/plugins/hoster/FilepostCom.py130
-rw-r--r--pyload/plugins/hoster/FilepupNet.py51
-rw-r--r--pyload/plugins/hoster/FilerNet.py80
-rw-r--r--pyload/plugins/hoster/FilerioCom.py25
-rw-r--r--pyload/plugins/hoster/FilesMailRu.py106
-rw-r--r--pyload/plugins/hoster/FileserveCom.py217
-rw-r--r--pyload/plugins/hoster/FileshareInUa.py18
-rw-r--r--pyload/plugins/hoster/FilesonicCom.py19
-rw-r--r--pyload/plugins/hoster/FilezyNet.py18
-rw-r--r--pyload/plugins/hoster/FiredriveCom.py18
-rw-r--r--pyload/plugins/hoster/FlyFilesNet.py45
-rw-r--r--pyload/plugins/hoster/FourSharedCom.py61
-rw-r--r--pyload/plugins/hoster/FreakshareCom.py176
-rw-r--r--pyload/plugins/hoster/FreeWayMe.py36
-rw-r--r--pyload/plugins/hoster/FreevideoCz.py18
-rw-r--r--pyload/plugins/hoster/FshareVn.py125
-rw-r--r--pyload/plugins/hoster/Ftp.py79
-rw-r--r--pyload/plugins/hoster/GamefrontCom.py90
-rw-r--r--pyload/plugins/hoster/GigapetaCom.py64
-rw-r--r--pyload/plugins/hoster/GooIm.py39
-rw-r--r--pyload/plugins/hoster/HellshareCz.py48
-rw-r--r--pyload/plugins/hoster/HellspyCz.py18
-rw-r--r--pyload/plugins/hoster/HotfileCom.py21
-rw-r--r--pyload/plugins/hoster/HugefilesNet.py27
-rw-r--r--pyload/plugins/hoster/HundredEightyUploadCom.py27
-rw-r--r--pyload/plugins/hoster/IFileWs.py18
-rw-r--r--pyload/plugins/hoster/IcyFilesCom.py18
-rw-r--r--pyload/plugins/hoster/IfileIt.py67
-rw-r--r--pyload/plugins/hoster/IfolderRu.py76
-rw-r--r--pyload/plugins/hoster/JumbofilesCom.py38
-rw-r--r--pyload/plugins/hoster/JunocloudMe.py28
-rw-r--r--pyload/plugins/hoster/Keep2shareCc.py132
-rw-r--r--pyload/plugins/hoster/KickloadCom.py18
-rw-r--r--pyload/plugins/hoster/KingfilesNet.py82
-rw-r--r--pyload/plugins/hoster/LemUploadsCom.py18
-rw-r--r--pyload/plugins/hoster/LetitbitNet.py142
-rw-r--r--pyload/plugins/hoster/LinksnappyCom.py76
-rw-r--r--pyload/plugins/hoster/LoadTo.py75
-rw-r--r--pyload/plugins/hoster/LomafileCom.py30
-rw-r--r--pyload/plugins/hoster/LuckyShareNet.py73
-rw-r--r--pyload/plugins/hoster/MediafireCom.py124
-rw-r--r--pyload/plugins/hoster/MegaCoNz.py171
-rw-r--r--pyload/plugins/hoster/MegaDebridEu.py94
-rw-r--r--pyload/plugins/hoster/MegaFilesSe.py18
-rw-r--r--pyload/plugins/hoster/MegaRapidCz.py71
-rw-r--r--pyload/plugins/hoster/MegacrypterCom.py56
-rw-r--r--pyload/plugins/hoster/MegareleaseOrg.py19
-rw-r--r--pyload/plugins/hoster/MegasharesCom.py113
-rw-r--r--pyload/plugins/hoster/MegauploadCom.py18
-rw-r--r--pyload/plugins/hoster/MegavideoCom.py19
-rw-r--r--pyload/plugins/hoster/MovReelCom.py26
-rw-r--r--pyload/plugins/hoster/MultishareCz.py80
-rw-r--r--pyload/plugins/hoster/MyfastfileCom.py47
-rw-r--r--pyload/plugins/hoster/MyvideoDe.py49
-rw-r--r--pyload/plugins/hoster/NahrajCz.py18
-rw-r--r--pyload/plugins/hoster/NarodRu.py60
-rw-r--r--pyload/plugins/hoster/NetloadIn.py294
-rw-r--r--pyload/plugins/hoster/NosuploadCom.py43
-rw-r--r--pyload/plugins/hoster/NovafileCom.py31
-rw-r--r--pyload/plugins/hoster/NowDownloadSx.py64
-rw-r--r--pyload/plugins/hoster/NowVideoSx.py44
-rw-r--r--pyload/plugins/hoster/OboomCom.py145
-rw-r--r--pyload/plugins/hoster/OneFichierCom.py71
-rw-r--r--pyload/plugins/hoster/OronCom.py19
-rw-r--r--pyload/plugins/hoster/OverLoadMe.py84
-rw-r--r--pyload/plugins/hoster/PandaplaNet.py18
-rw-r--r--pyload/plugins/hoster/PornhostCom.py80
-rw-r--r--pyload/plugins/hoster/PornhubCom.py89
-rw-r--r--pyload/plugins/hoster/PotloadCom.py18
-rw-r--r--pyload/plugins/hoster/PremiumTo.py81
-rw-r--r--pyload/plugins/hoster/PremiumizeMe.py56
-rw-r--r--pyload/plugins/hoster/PromptfileCom.py45
-rw-r--r--pyload/plugins/hoster/PrzeklejPl.py18
-rw-r--r--pyload/plugins/hoster/QuickshareCz.py90
-rw-r--r--pyload/plugins/hoster/RPNetBiz.py85
-rw-r--r--pyload/plugins/hoster/RapidfileshareNet.py31
-rw-r--r--pyload/plugins/hoster/RapidgatorNet.py199
-rw-r--r--pyload/plugins/hoster/RapiduNet.py82
-rw-r--r--pyload/plugins/hoster/RarefileNet.py28
-rw-r--r--pyload/plugins/hoster/RealdebridCom.py94
-rw-r--r--pyload/plugins/hoster/RedtubeCom.py62
-rw-r--r--pyload/plugins/hoster/RehostTo.py44
-rw-r--r--pyload/plugins/hoster/RemixshareCom.py61
-rw-r--r--pyload/plugins/hoster/RgHostNet.py26
-rw-r--r--pyload/plugins/hoster/RyushareCom.py81
-rw-r--r--pyload/plugins/hoster/SafesharingEu.py25
-rw-r--r--pyload/plugins/hoster/SecureUploadEu.py23
-rw-r--r--pyload/plugins/hoster/SendmywayCom.py24
-rw-r--r--pyload/plugins/hoster/SendspaceCom.py60
-rw-r--r--pyload/plugins/hoster/Share4webCom.py22
-rw-r--r--pyload/plugins/hoster/Share76Com.py18
-rw-r--r--pyload/plugins/hoster/ShareFilesCo.py18
-rw-r--r--pyload/plugins/hoster/SharebeesCom.py18
-rw-r--r--pyload/plugins/hoster/ShareonlineBiz.py191
-rw-r--r--pyload/plugins/hoster/ShareplaceCom.py89
-rw-r--r--pyload/plugins/hoster/SharingmatrixCom.py19
-rw-r--r--pyload/plugins/hoster/ShragleCom.py19
-rw-r--r--pyload/plugins/hoster/SimplyPremiumCom.py82
-rw-r--r--pyload/plugins/hoster/SimplydebridCom.py64
-rw-r--r--pyload/plugins/hoster/SockshareCom.py20
-rw-r--r--pyload/plugins/hoster/SoundcloudCom.py57
-rw-r--r--pyload/plugins/hoster/SpeedLoadOrg.py18
-rw-r--r--pyload/plugins/hoster/SpeedfileCz.py18
-rw-r--r--pyload/plugins/hoster/SpeedyshareCom.py51
-rw-r--r--pyload/plugins/hoster/StorageTo.py18
-rw-r--r--pyload/plugins/hoster/StreamCz.py71
-rw-r--r--pyload/plugins/hoster/StreamcloudEu.py31
-rw-r--r--pyload/plugins/hoster/TurbobitNet.py173
-rw-r--r--pyload/plugins/hoster/TurbouploadCom.py18
-rw-r--r--pyload/plugins/hoster/TusfilesNet.py35
-rw-r--r--pyload/plugins/hoster/TwoSharedCom.py41
-rw-r--r--pyload/plugins/hoster/UlozTo.py164
-rw-r--r--pyload/plugins/hoster/UloziskoSk.py72
-rw-r--r--pyload/plugins/hoster/UnibytesCom.py70
-rw-r--r--pyload/plugins/hoster/UnrestrictLi.py91
-rw-r--r--pyload/plugins/hoster/UpleaCom.py60
-rw-r--r--pyload/plugins/hoster/UploadStationCom.py19
-rw-r--r--pyload/plugins/hoster/UploadableCh.py90
-rw-r--r--pyload/plugins/hoster/UploadboxCom.py18
-rw-r--r--pyload/plugins/hoster/UploadedTo.py245
-rw-r--r--pyload/plugins/hoster/UploadhereCom.py18
-rw-r--r--pyload/plugins/hoster/UploadheroCom.py81
-rw-r--r--pyload/plugins/hoster/UploadingCom.py104
-rw-r--r--pyload/plugins/hoster/UploadkingCom.py18
-rw-r--r--pyload/plugins/hoster/UpstoreNet.py73
-rw-r--r--pyload/plugins/hoster/UptoboxCom.py34
-rw-r--r--pyload/plugins/hoster/VeehdCom.py81
-rw-r--r--pyload/plugins/hoster/VeohCom.py53
-rw-r--r--pyload/plugins/hoster/VidPlayNet.py26
-rw-r--r--pyload/plugins/hoster/VimeoCom.py75
-rw-r--r--pyload/plugins/hoster/Vipleech4uCom.py18
-rw-r--r--pyload/plugins/hoster/WarserverCz.py18
-rw-r--r--pyload/plugins/hoster/WebshareCz.py62
-rw-r--r--pyload/plugins/hoster/WrzucTo.py52
-rw-r--r--pyload/plugins/hoster/WuploadCom.py19
-rw-r--r--pyload/plugins/hoster/X7To.py18
-rw-r--r--pyload/plugins/hoster/XFileSharingPro.py57
-rw-r--r--pyload/plugins/hoster/XHamsterCom.py129
-rw-r--r--pyload/plugins/hoster/XVideosCom.py28
-rw-r--r--pyload/plugins/hoster/Xdcc.py207
-rw-r--r--pyload/plugins/hoster/YibaishiwuCom.py55
-rw-r--r--pyload/plugins/hoster/YoupornCom.py60
-rw-r--r--pyload/plugins/hoster/YourfilesTo.py87
-rw-r--r--pyload/plugins/hoster/YoutubeCom.py185
-rw-r--r--pyload/plugins/hoster/ZDF.py59
-rw-r--r--pyload/plugins/hoster/ZShareNet.py19
-rw-r--r--pyload/plugins/hoster/ZeveraCom.py42
-rw-r--r--pyload/plugins/hoster/ZippyshareCom.py65
-rw-r--r--pyload/plugins/internal/BasePlugin.py106
-rw-r--r--pyload/plugins/internal/DeadCrypter.py32
-rw-r--r--pyload/plugins/internal/DeadHoster.py32
-rw-r--r--pyload/plugins/internal/MultiHoster.py202
-rw-r--r--pyload/plugins/internal/SimpleCrypter.py152
-rw-r--r--pyload/plugins/internal/SimpleHoster.py530
-rw-r--r--pyload/plugins/internal/UnRar.py221
-rw-r--r--pyload/plugins/internal/UnZip.py41
-rw-r--r--pyload/plugins/internal/UpdateManager.py300
-rw-r--r--pyload/plugins/internal/XFSAccount.py155
-rw-r--r--pyload/plugins/internal/XFSCrypter.py29
-rw-r--r--pyload/plugins/internal/XFSHoster.py339
-rw-r--r--pyload/plugins/ocr/GigasizeCom.py24
-rw-r--r--pyload/plugins/ocr/LinksaveIn.py158
-rw-r--r--pyload/plugins/ocr/NetloadIn.py29
-rw-r--r--pyload/plugins/ocr/ShareonlineBiz.py39
-rw-r--r--pyload/remote/ClickAndLoadBackend.py2
-rw-r--r--pyload/remote/SocketBackend.py2
-rw-r--r--pyload/remote/ThriftBackend.py2
-rw-r--r--pyload/remote/socketbackend/create_ttypes.py13
-rw-r--r--pyload/remote/thriftbackend/ThriftClient.py8
-rw-r--r--pyload/remote/thriftbackend/ThriftTest.py9
-rw-r--r--pyload/webui/app/pyload.py10
916 files changed, 30345 insertions, 30329 deletions
diff --git a/pyload/Core.py b/pyload/Core.py
index ec53bf7da..fe4ae566e 100644
--- a/pyload/Core.py
+++ b/pyload/Core.py
@@ -21,17 +21,17 @@ from sys import argv, executable, exit
from time import time, sleep
from traceback import print_exc
-from pyload.manager.AccountManager import AccountManager
-from pyload.manager.CaptchaManager import CaptchaManager
+from pyload.manager.Account import AccountManager
+from pyload.manager.Captcha import CaptchaManager
from pyload.config.Parser import ConfigParser
-from pyload.manager.PluginManager import PluginManager
-from pyload.manager.event.PullEvents import PullManager
+from pyload.manager.Plugin import PluginManager
+from pyload.manager.Event import PullManager
from pyload.network.RequestFactory import RequestFactory
-from pyload.manager.thread.ServerThread import WebServer
+from pyload.manager.thread.Server import WebServer
from pyload.manager.event.Scheduler import Scheduler
from pyload.network.JsEngine import JsEngine
from pyload import remote
-from pyload.manager.RemoteManager import RemoteManager
+from pyload.manager.Remote import RemoteManager
from pyload.database import DatabaseBackend, FileHandler
from pyload.utils import freeSpace, formatSize, get_console_encoding
@@ -358,8 +358,8 @@ class Core(object):
# later imported because they would trigger api import, and remote value not set correctly
from pyload import api
- from pyload.manager.AddonManager import AddonManager
- from pyload.manager.ThreadManager import ThreadManager
+ from pyload.manager.Addon import AddonManager
+ from pyload.manager.Thread import ThreadManager
if api.activated != self.remote:
self.log.warning("Import error: API remote status not correct.")
diff --git a/pyload/__init__.py b/pyload/__init__.py
index 03bab80fb..ade59b396 100644
--- a/pyload/__init__.py
+++ b/pyload/__init__.py
@@ -5,6 +5,7 @@ from __future__ import with_statement
import __builtin__
import os
+import platform
import sys
from codecs import getwriter
@@ -40,12 +41,18 @@ __authors__ = [("Marius" , "mkaay@mkaay.de" ),
################################# InitHomeDir #################################
-rootdir = os.path.abspath(os.path.join(__file__, ".."))
-homedir = os.path.expanduser("~")
-enc = get_console_encoding(sys.stdout.encoding)
+__builtin__.owd = os.path.abspath("") #: original working directory
+__builtin__.homedir = os.path.expanduser("~")
+__builtin__.rootdir = os.path.abspath(os.path.join(__file__, ".."))
+__builtin__.configdir = ""
+__builtin__.pypath = os.path.abspath(os.path.join(rootdir, ".."))
-sys.path.append(os.path.join(rootdir, "lib"))
-sys.stdout = getwriter(enc)(sys.stdout, errors="replace")
+
+if "64" in platform.machine():
+ sys.path.append(os.path.join(pypath, "lib64"))
+sys.path.append(os.path.join(pypath, "lib"))
+
+sys.stdout = getwriter(get_console_encoding(sys.stdout.encoding))(sys.stdout, errors="replace")
if homedir == "~" and os.name == "nt":
import ctypes
@@ -63,7 +70,7 @@ if homedir == "~" and os.name == "nt":
_SHGetFolderPath(0, CSIDL_APPDATA, 0, 0, path_buf)
- homedir = path_buf.value
+ __builtin__.homedir = path_buf.value
try:
p = os.path.join(rootdir, "config", "configdir")
@@ -83,13 +90,9 @@ try:
os.chdir(configdir)
-except IOError:
+except IOError, e:
+ print >> sys.stderr, "configdir init failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
-
-__builtin__.owd = os.path.abspath("") #: original working directory
-__builtin__.pypath = os.path.abspath(os.path.join(rootdir, ".."))
-
-__builtin__.rootdir = rootdir
-__builtin__.homedir = homedir
-__builtin__.configdir = configdir
+else:
+ __builtin__.configdir = configdir
diff --git a/pyload/api/__init__.py b/pyload/api/__init__.py
index 399117845..387481da2 100644
--- a/pyload/api/__init__.py
+++ b/pyload/api/__init__.py
@@ -8,7 +8,7 @@ import re
from urlparse import urlparse
-from pyload.datatype.PyFile import PyFile
+from pyload.datatype.File import PyFile
from pyload.utils.packagetools import parseNames
from pyload.network.RequestFactory import getURL
from pyload.remote import activated
diff --git a/pyload/database/File.py b/pyload/database/File.py
new file mode 100644
index 000000000..2b7c6cad9
--- /dev/null
+++ b/pyload/database/File.py
@@ -0,0 +1,875 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN, mkaay
+
+from threading import RLock
+from time import time
+
+from pyload.utils import formatSize, lock
+from pyload.manager.Event import InsertEvent, ReloadAllEvent, RemoveEvent, UpdateEvent
+from pyload.datatype.Package import PyPackage
+from pyload.datatype.File import PyFile
+from pyload.database import style, DatabaseBackend
+
+try:
+ from pysqlite2 import dbapi2 as sqlite3
+except Exception:
+ import sqlite3
+
+
+class FileHandler(object):
+ """Handles all request made to obtain information,
+ modify status or other request for links or packages"""
+
+ def __init__(self, core):
+ """Constructor"""
+ self.core = core
+
+ # translations
+ self.statusMsg = [_("finished"), _("offline"), _("online"), _("queued"), _("skipped"), _("waiting"), _("temp. offline"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), _("downloading"), _("processing"), _("unknown")]
+
+ self.cache = {} #holds instances for files
+ self.packageCache = {} # same for packages
+ #@TODO: purge the cache
+
+ self.jobCache = {}
+
+ self.lock = RLock() #@TODO should be a Lock w/o R
+ #self.lock._Verbose__verbose = True
+
+ self.filecount = -1 # if an invalid value is set get current value from db
+ self.queuecount = -1 #number of package to be loaded
+ self.unchanged = False #determines if any changes was made since last call
+
+ self.db = self.core.db
+
+ def change(func):
+ def new(*args):
+ args[0].unchanged = False
+ args[0].filecount = -1
+ args[0].queuecount = -1
+ args[0].jobCache = {}
+ return func(*args)
+ return new
+
+ #--------------------------------------------------------------------------
+ def save(self):
+ """saves all data to backend"""
+ self.db.commit()
+
+ #--------------------------------------------------------------------------
+ def syncSave(self):
+ """saves all data to backend and waits until all data are written"""
+ pyfiles = self.cache.values()
+ for pyfile in pyfiles:
+ pyfile.sync()
+
+ pypacks = self.packageCache.values()
+ for pypack in pypacks:
+ pypack.sync()
+
+ self.db.syncSave()
+
+ @lock
+ def getCompleteData(self, queue=1):
+ """gets a complete data representation"""
+
+ data = self.db.getAllLinks(queue)
+ packs = self.db.getAllPackages(queue)
+
+ data.update([(x.id, x.toDbDict()[x.id]) for x in self.cache.values()])
+
+ for x in self.packageCache.itervalues():
+ if x.queue != queue or x.id not in packs: continue
+ packs[x.id].update(x.toDict()[x.id])
+
+ for key, value in data.iteritems():
+ if value["package"] in packs:
+ packs[value["package"]]["links"][key] = value
+
+ return packs
+
+ @lock
+ def getInfoData(self, queue=1):
+ """gets a data representation without links"""
+
+ packs = self.db.getAllPackages(queue)
+ for x in self.packageCache.itervalues():
+ if x.queue != queue or x.id not in packs: continue
+ packs[x.id].update(x.toDict()[x.id])
+
+ return packs
+
+ @lock
+ @change
+ def addLinks(self, urls, package):
+ """adds links"""
+
+ self.core.addonManager.dispatchEvent("links-added", urls, package)
+
+ data = self.core.pluginManager.parseUrls(urls)
+
+ self.db.addLinks(data, package)
+ self.core.threadManager.createInfoThread(data, package)
+
+ #@TODO change from reloadAll event to package update event
+ self.core.pullManager.addEvent(ReloadAllEvent("collector"))
+
+ #--------------------------------------------------------------------------
+ @lock
+ @change
+ def addPackage(self, name, folder, queue=0):
+ """adds a package, default to link collector"""
+ lastID = self.db.addPackage(name, folder, queue)
+ p = self.db.getPackage(lastID)
+ e = InsertEvent("pack", lastID, p.order, "collector" if not queue else "queue")
+ self.core.pullManager.addEvent(e)
+ return lastID
+
+ #--------------------------------------------------------------------------
+ @lock
+ @change
+ def deletePackage(self, id):
+ """delete package and all contained links"""
+
+ p = self.getPackage(id)
+ if not p:
+ if id in self.packageCache: del self.packageCache[id]
+ return
+
+ oldorder = p.order
+ queue = p.queue
+
+ e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
+
+ pyfiles = self.cache.values()
+
+ for pyfile in pyfiles:
+ if pyfile.packageid == id:
+ pyfile.abortDownload()
+ pyfile.release()
+
+ self.db.deletePackage(p)
+ self.core.pullManager.addEvent(e)
+ self.core.addonManager.dispatchEvent("package-deleted", id)
+
+ if id in self.packageCache:
+ del self.packageCache[id]
+
+ packs = self.packageCache.values()
+ for pack in packs:
+ if pack.queue == queue and pack.order > oldorder:
+ pack.order -= 1
+ pack.notifyChange()
+
+ #--------------------------------------------------------------------------
+ @lock
+ @change
+ def deleteLink(self, id):
+ """deletes links"""
+
+ f = self.getFile(id)
+ if not f:
+ return None
+
+ pid = f.packageid
+ e = RemoveEvent("file", id, "collector" if not f.package().queue else "queue")
+
+ oldorder = f.order
+
+ if id in self.core.threadManager.processingIds():
+ self.cache[id].abortDownload()
+
+ if id in self.cache:
+ del self.cache[id]
+
+ self.db.deleteLink(f)
+
+ self.core.pullManager.addEvent(e)
+
+ p = self.getPackage(pid)
+ if not len(p.getChildren()):
+ p.delete()
+
+ pyfiles = self.cache.values()
+ for pyfile in pyfiles:
+ if pyfile.packageid == pid and pyfile.order > oldorder:
+ pyfile.order -= 1
+ pyfile.notifyChange()
+
+ #--------------------------------------------------------------------------
+ def releaseLink(self, id):
+ """removes pyfile from cache"""
+ if id in self.cache:
+ del self.cache[id]
+
+ #--------------------------------------------------------------------------
+ def releasePackage(self, id):
+ """removes package from cache"""
+ if id in self.packageCache:
+ del self.packageCache[id]
+
+ #--------------------------------------------------------------------------
+ def updateLink(self, pyfile):
+ """updates link"""
+ self.db.updateLink(pyfile)
+
+ e = UpdateEvent("file", pyfile.id, "collector" if not pyfile.package().queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ #--------------------------------------------------------------------------
+ def updatePackage(self, pypack):
+ """updates a package"""
+ self.db.updatePackage(pypack)
+
+ e = UpdateEvent("pack", pypack.id, "collector" if not pypack.queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ #--------------------------------------------------------------------------
+ def getPackage(self, id):
+ """return package instance"""
+
+ if id in self.packageCache:
+ return self.packageCache[id]
+ else:
+ return self.db.getPackage(id)
+
+ #--------------------------------------------------------------------------
+ def getPackageData(self, id):
+ """returns dict with package information"""
+ pack = self.getPackage(id)
+
+ if not pack:
+ return None
+
+ pack = pack.toDict()[id]
+
+ data = self.db.getPackageData(id)
+
+ tmplist = []
+
+ cache = self.cache.values()
+ for x in cache:
+ if int(x.toDbDict()[x.id]["package"]) == int(id):
+ tmplist.append((x.id, x.toDbDict()[x.id]))
+ data.update(tmplist)
+
+ pack["links"] = data
+
+ return pack
+
+ #--------------------------------------------------------------------------
+ def getFileData(self, id):
+ """returns dict with file information"""
+ if id in self.cache:
+ return self.cache[id].toDbDict()
+
+ return self.db.getLinkData(id)
+
+ #--------------------------------------------------------------------------
+ def getFile(self, id):
+ """returns pyfile instance"""
+ if id in self.cache:
+ return self.cache[id]
+ else:
+ return self.db.getFile(id)
+
+ #--------------------------------------------------------------------------
+ @lock
+ def getJob(self, occ):
+ """get suitable job"""
+
+ #@TODO clean mess
+ #@TODO improve selection of valid jobs
+
+ if occ in self.jobCache:
+ if self.jobCache[occ]:
+ id = self.jobCache[occ].pop()
+ if id == "empty":
+ pyfile = None
+ self.jobCache[occ].append("empty")
+ else:
+ pyfile = self.getFile(id)
+ else:
+ jobs = self.db.getJob(occ)
+ jobs.reverse()
+ if not jobs:
+ self.jobCache[occ].append("empty")
+ pyfile = None
+ else:
+ self.jobCache[occ].extend(jobs)
+ pyfile = self.getFile(self.jobCache[occ].pop())
+
+ else:
+ self.jobCache = {} #better not caching to much
+ jobs = self.db.getJob(occ)
+ jobs.reverse()
+ self.jobCache[occ] = jobs
+
+ if not jobs:
+ self.jobCache[occ].append("empty")
+ pyfile = None
+ else:
+ pyfile = self.getFile(self.jobCache[occ].pop())
+
+ #@TODO: maybe the new job has to be approved...
+
+
+ #pyfile = self.getFile(self.jobCache[occ].pop())
+ return pyfile
+
+ @lock
+ def getDecryptJob(self):
+ """return job for decrypting"""
+ if "decrypt" in self.jobCache:
+ return None
+
+ plugins = self.core.pluginManager.crypterPlugins.keys() + self.core.pluginManager.containerPlugins.keys()
+ plugins = str(tuple(plugins))
+
+ jobs = self.db.getPluginJob(plugins)
+ if jobs:
+ return self.getFile(jobs[0])
+ else:
+ self.jobCache["decrypt"] = "empty"
+ return None
+
+ def getFileCount(self):
+ """returns number of files"""
+
+ if self.filecount == -1:
+ self.filecount = self.db.filecount(1)
+
+ return self.filecount
+
+ def getQueueCount(self, force=False):
+ """number of files that have to be processed"""
+ if self.queuecount == -1 or force:
+ self.queuecount = self.db.queuecount(1)
+
+ return self.queuecount
+
+ def checkAllLinksFinished(self):
+ """checks if all files are finished and dispatch event"""
+
+ if not self.getQueueCount(True):
+ self.core.addonManager.dispatchEvent("all_downloads-finished")
+ self.core.log.debug("All downloads finished")
+ return True
+
+ return False
+
+ def checkAllLinksProcessed(self, fid):
+ """checks if all files was processed and pyload would idle now, needs fid which will be ignored when counting"""
+
+ # reset count so statistic will update (this is called when dl was processed)
+ self.resetCount()
+
+ if not self.db.processcount(1, fid):
+ self.core.addonManager.dispatchEvent("all_downloads-processed")
+ self.core.log.debug("All downloads processed")
+ return True
+
+ return False
+
+ def resetCount(self):
+ self.queuecount = -1
+
+ @lock
+ @change
+ def restartPackage(self, id):
+ """restart package"""
+ pyfiles = self.cache.values()
+ for pyfile in pyfiles:
+ if pyfile.packageid == id:
+ self.restartFile(pyfile.id)
+
+ self.db.restartPackage(id)
+
+ if id in self.packageCache:
+ self.packageCache[id].setFinished = False
+
+ e = UpdateEvent("pack", id, "collector" if not self.getPackage(id).queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ @lock
+ @change
+ def restartFile(self, id):
+ """ restart file"""
+ if id in self.cache:
+ self.cache[id].status = 3
+ self.cache[id].name = self.cache[id].url
+ self.cache[id].error = ""
+ self.cache[id].abortDownload()
+
+
+ self.db.restartFile(id)
+
+ e = UpdateEvent("file", id, "collector" if not self.getFile(id).package().queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ @lock
+ @change
+ def setPackageLocation(self, id, queue):
+ """push package to queue"""
+
+ p = self.db.getPackage(id)
+ oldorder = p.order
+
+ e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ self.db.clearPackageOrder(p)
+
+ p = self.db.getPackage(id)
+
+ p.queue = queue
+ self.db.updatePackage(p)
+
+ self.db.reorderPackage(p, -1, True)
+
+ packs = self.packageCache.values()
+ for pack in packs:
+ if pack.queue != queue and pack.order > oldorder:
+ pack.order -= 1
+ pack.notifyChange()
+
+ self.db.commit()
+ self.releasePackage(id)
+ p = self.getPackage(id)
+
+ e = InsertEvent("pack", id, p.order, "collector" if not p.queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ @lock
+ @change
+ def reorderPackage(self, id, position):
+ p = self.getPackage(id)
+
+ e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
+ self.core.pullManager.addEvent(e)
+ self.db.reorderPackage(p, position)
+
+ packs = self.packageCache.values()
+ for pack in packs:
+ if pack.queue != p.queue or pack.order < 0 or pack == p: continue
+ if p.order > position:
+ if pack.order >= position and pack.order < p.order:
+ pack.order += 1
+ pack.notifyChange()
+ elif p.order < position:
+ if pack.order <= position and pack.order > p.order:
+ pack.order -= 1
+ pack.notifyChange()
+
+ p.order = position
+ self.db.commit()
+
+ e = InsertEvent("pack", id, position, "collector" if not p.queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ @lock
+ @change
+ def reorderFile(self, id, position):
+ f = self.getFileData(id)
+ f = f[id]
+
+ e = RemoveEvent("file", id, "collector" if not self.getPackage(f["package"]).queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ self.db.reorderLink(f, position)
+
+ pyfiles = self.cache.values()
+ for pyfile in pyfiles:
+ if pyfile.packageid != f["package"] or pyfile.order < 0: continue
+ if f["order"] > position:
+ if pyfile.order >= position and pyfile.order < f["order"]:
+ pyfile.order += 1
+ pyfile.notifyChange()
+ elif f["order"] < position:
+ if pyfile.order <= position and pyfile.order > f["order"]:
+ pyfile.order -= 1
+ pyfile.notifyChange()
+
+ if id in self.cache:
+ self.cache[id].order = position
+
+ self.db.commit()
+
+ e = InsertEvent("file", id, position, "collector" if not self.getPackage(f["package"]).queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ @change
+ def updateFileInfo(self, data, pid):
+ """ updates file info (name, size, status, url)"""
+ ids = self.db.updateLinkInfo(data)
+ e = UpdateEvent("pack", pid, "collector" if not self.getPackage(pid).queue else "queue")
+ self.core.pullManager.addEvent(e)
+
+ def checkPackageFinished(self, pyfile):
+ """ checks if package is finished and calls AddonManager """
+
+ ids = self.db.getUnfinished(pyfile.packageid)
+ if not ids or (pyfile.id in ids and len(ids) == 1):
+ if not pyfile.package().setFinished:
+ self.core.log.info(_("Package finished: %s") % pyfile.package().name)
+ self.core.addonManager.packageFinished(pyfile.package())
+ pyfile.package().setFinished = True
+
+
+ def reCheckPackage(self, pid):
+ """ recheck links in package """
+ data = self.db.getPackageData(pid)
+
+ urls = []
+
+ for pyfile in data.itervalues():
+ if pyfile["status"] not in (0, 12, 13):
+ urls.append((pyfile["url"], pyfile["plugin"]))
+
+ self.core.threadManager.createInfoThread(urls, pid)
+
+ @lock
+ @change
+ def deleteFinishedLinks(self):
+ """ deletes finished links and packages, return deleted packages """
+
+ old_packs = self.getInfoData(0)
+ old_packs.update(self.getInfoData(1))
+
+ self.db.deleteFinished()
+
+ new_packs = self.db.getAllPackages(0)
+ new_packs.update(self.db.getAllPackages(1))
+ #get new packages only from db
+
+ deleted = []
+ for id in old_packs.iterkeys():
+ if id not in new_packs:
+ deleted.append(id)
+ self.deletePackage(int(id))
+
+ return deleted
+
+ @lock
+ @change
+ def restartFailed(self):
+ """ restart all failed links """
+ self.db.restartFailed()
+
+class FileMethods(object):
+ @style.queue
+ def filecount(self, queue):
+ """returns number of files in queue"""
+ self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=?", (queue,))
+ return self.c.fetchone()[0]
+
+ @style.queue
+ def queuecount(self, queue):
+ """ number of files in queue not finished yet"""
+ self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? AND l.status NOT IN (0, 4)", (queue,))
+ return self.c.fetchone()[0]
+
+ @style.queue
+ def processcount(self, queue, fid):
+ """ number of files which have to be proccessed """
+ self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? AND l.status IN (2, 3, 5, 7, 12) AND l.id != ?", (queue, str(fid)))
+ return self.c.fetchone()[0]
+
+ @style.inner
+ def _nextPackageOrder(self, queue=0):
+ self.c.execute('SELECT MAX(packageorder) FROM packages WHERE queue=?', (queue,))
+ max = self.c.fetchone()[0]
+ if max is not None:
+ return max + 1
+ else:
+ return 0
+
+ @style.inner
+ def _nextFileOrder(self, package):
+ self.c.execute('SELECT MAX(linkorder) FROM links WHERE package=?', (package,))
+ max = self.c.fetchone()[0]
+ if max is not None:
+ return max + 1
+ else:
+ return 0
+
+ @style.queue
+ def addLink(self, url, name, plugin, package):
+ order = self._nextFileOrder(package)
+ self.c.execute('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', (url, name, (plugintype, pluginname), package, order))
+ return self.c.lastrowid
+
+ @style.queue
+ def addLinks(self, links, package):
+ """ links is a list of tupels (url, plugin)"""
+ order = self._nextFileOrder(package)
+ orders = [order + x for x in range(len(links))]
+ links = [(x[0], x[0], (x[1], x[2]), package, o) for x, o in zip(links, orders)]
+ self.c.executemany('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', links)
+
+ @style.queue
+ def addPackage(self, name, folder, queue):
+ order = self._nextPackageOrder(queue)
+ self.c.execute('INSERT INTO packages(name, folder, queue, packageorder) VALUES(?,?,?,?)', (name, folder, queue, order))
+ return self.c.lastrowid
+
+ @style.queue
+ def deletePackage(self, p):
+
+ self.c.execute('DELETE FROM links WHERE package=?', (str(p.id),))
+ self.c.execute('DELETE FROM packages WHERE id=?', (str(p.id),))
+ self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=?', (p.order, p.queue))
+
+ @style.queue
+ def deleteLink(self, f):
+
+ self.c.execute('DELETE FROM links WHERE id=?', (str(f.id),))
+ self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder > ? AND package=?', (f.order, str(f.packageid)))
+
+
+ @style.queue
+ def getAllLinks(self, q):
+ """return information about all links in queue q
+
+ q0 queue
+ q1 collector
+
+ format:
+
+ {
+ id: {'name': name, ... 'package': id }, ...
+ }
+
+ """
+ self.c.execute('SELECT l.id, l.url, l.name, l.size, l.status, l.error, l.plugin, l.package, l.linkorder FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.linkorder', (q,))
+ data = {}
+ for r in self.c:
+ data[r[0]] = {
+ 'id': r[0],
+ 'url': r[1],
+ 'name': r[2],
+ 'size': r[3],
+ 'format_size': formatSize(r[3]),
+ 'status': r[4],
+ 'statusmsg': self.manager.statusMsg[r[4]],
+ 'error': r[5],
+ 'plugin': r[6],
+ 'package': r[7],
+ 'order': r[8],
+ }
+
+ return data
+
+ @style.queue
+ def getAllPackages(self, q):
+ """return information about packages in queue q
+ (only useful in get all data)
+
+ q0 queue
+ q1 collector
+
+ format:
+
+ {
+ id: {'name': name ... 'links': {}}, ...
+ }
+ """
+ self.c.execute('SELECT p.id, p.name, p.folder, p.site, p.password, p.queue, p.packageorder, s.sizetotal, s.sizedone, s.linksdone, s.linkstotal \
+ FROM packages p JOIN pstats s ON p.id = s.id \
+ WHERE p.queue=? ORDER BY p.packageorder', str(q))
+
+ data = {}
+ for r in self.c:
+ data[r[0]] = {
+ 'id': r[0],
+ 'name': r[1],
+ 'folder': r[2],
+ 'site': r[3],
+ 'password': r[4],
+ 'queue': r[5],
+ 'order': r[6],
+ 'sizetotal': int(r[7]),
+ 'sizedone': r[8] if r[8] else 0, #these can be None
+ 'linksdone': r[9] if r[9] else 0,
+ 'linkstotal': r[10],
+ 'links': {}
+ }
+
+ return data
+
+ @style.queue
+ def getLinkData(self, id):
+ """get link information as dict"""
+ self.c.execute('SELECT id, url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?', (str(id),))
+ data = {}
+ r = self.c.fetchone()
+ if not r:
+ return None
+ data[r[0]] = {
+ 'id': r[0],
+ 'url': r[1],
+ 'name': r[2],
+ 'size': r[3],
+ 'format_size': formatSize(r[3]),
+ 'status': r[4],
+ 'statusmsg': self.manager.statusMsg[r[4]],
+ 'error': r[5],
+ 'plugin': r[6],
+ 'package': r[7],
+ 'order': r[8],
+ }
+
+ return data
+
+ @style.queue
+ def getPackageData(self, id):
+ """get data about links for a package"""
+ self.c.execute('SELECT id, url, name, size, status, error, plugin, package, linkorder FROM links WHERE package=? ORDER BY linkorder', (str(id),))
+
+ data = {}
+ for r in self.c:
+ data[r[0]] = {
+ 'id': r[0],
+ 'url': r[1],
+ 'name': r[2],
+ 'size': r[3],
+ 'format_size': formatSize(r[3]),
+ 'status': r[4],
+ 'statusmsg': self.manager.statusMsg[r[4]],
+ 'error': r[5],
+ 'plugin': r[6],
+ 'package': r[7],
+ 'order': r[8],
+ }
+
+ return data
+
+
+ @style.async
+ def updateLink(self, f):
+ self.c.execute('UPDATE links SET url=?, name=?, size=?, status=?, error=?, package=? WHERE id=?', (f.url, f.name, f.size, f.status, f.error, str(f.packageid), str(f.id)))
+
+ @style.queue
+ def updatePackage(self, p):
+ self.c.execute('UPDATE packages SET name=?, folder=?, site=?, password=?, queue=? WHERE id=?', (p.name, p.folder, p.site, p.password, p.queue, str(p.id)))
+
+ @style.queue
+ def updateLinkInfo(self, data):
+ """ data is list of tupels (name, size, status, url) """
+ self.c.executemany('UPDATE links SET name=?, size=?, status=? WHERE url=? AND status IN (1, 2, 3, 14)', data)
+ ids = []
+ self.c.execute('SELECT id FROM links WHERE url IN (\'%s\')' % "','".join([x[3] for x in data]))
+ for r in self.c:
+ ids.append(int(r[0]))
+ return ids
+
+ @style.queue
+ def reorderPackage(self, p, position, noMove=False):
+ if position == -1:
+ position = self._nextPackageOrder(p.queue)
+ if not noMove:
+ if p.order > position:
+ self.c.execute('UPDATE packages SET packageorder=packageorder+1 WHERE packageorder >= ? AND packageorder < ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue))
+ elif p.order < position:
+ self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder <= ? AND packageorder > ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue))
+
+ self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (position, str(p.id)))
+
+ @style.queue
+ def reorderLink(self, f, position):
+ """ reorder link with f as dict for pyfile """
+ if f["order"] > position:
+ self.c.execute('UPDATE links SET linkorder=linkorder+1 WHERE linkorder >= ? AND linkorder < ? AND package=?', (position, f["order"], f["package"]))
+ elif f["order"] < position:
+ self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder <= ? AND linkorder > ? AND package=?', (position, f["order"], f["package"]))
+
+ self.c.execute('UPDATE links SET linkorder=? WHERE id=?', (position, f["id"]))
+
+ @style.queue
+ def clearPackageOrder(self, p):
+ self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (-1, str(p.id)))
+ self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=? AND id != ?', (p.order, p.queue, str(p.id)))
+
+ @style.async
+ def restartFile(self, id):
+ self.c.execute('UPDATE links SET status=3, error="" WHERE id=?', (str(id),))
+
+ @style.async
+ def restartPackage(self, id):
+ self.c.execute('UPDATE links SET status=3 WHERE package=?', (str(id),))
+
+ @style.queue
+ def getPackage(self, id):
+ """return package instance from id"""
+ self.c.execute("SELECT name, folder, site, password, queue, packageorder FROM packages WHERE id=?", (str(id),))
+ r = self.c.fetchone()
+ if not r: return None
+ return PyPackage(self.manager, id, * r)
+
+ #--------------------------------------------------------------------------
+ @style.queue
+ def getFile(self, id):
+ """return link instance from id"""
+ self.c.execute("SELECT url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?", (str(id),))
+ r = self.c.fetchone()
+ if not r: return None
+ return PyFile(self.manager, id, * r)
+
+
+ @style.queue
+ def getJob(self, occ):
+ """return pyfile ids, which are suitable for download and dont use a occupied plugin"""
+
+ #@TODO improve this hardcoded method
+ pre = "('DLC', 'LinkList', 'SerienjunkiesOrg', 'CCF', 'RSDF')" #plugins which are processed in collector
+
+ cmd = "("
+ for i, item in enumerate(occ):
+ if i: cmd += ", "
+ cmd += "'%s'" % item
+
+ cmd += ")"
+
+ cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE ((p.queue=1 AND l.plugin NOT IN %s) OR l.plugin IN %s) AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % (cmd, pre)
+
+ self.c.execute(cmd) # very bad!
+
+ return [x[0] for x in self.c]
+
+ @style.queue
+ def getPluginJob(self, plugins):
+ """returns pyfile ids with suited plugins"""
+ cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE l.plugin IN %s AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % plugins
+
+ self.c.execute(cmd) # very bad!
+
+ return [x[0] for x in self.c]
+
+ @style.queue
+ def getUnfinished(self, pid):
+ """return list of max length 3 ids with pyfiles in package not finished or processed"""
+
+ self.c.execute("SELECT id FROM links WHERE package=? AND status NOT IN (0, 4, 13) LIMIT 3", (str(pid),))
+ return [r[0] for r in self.c]
+
+ @style.queue
+ def deleteFinished(self):
+ self.c.execute("DELETE FROM links WHERE status IN (0, 4)")
+ self.c.execute("DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE packages.id=links.package)")
+
+ @style.queue
+ def restartFailed(self):
+ self.c.execute("UPDATE links SET status=3, error='' WHERE status IN (6, 8, 9)")
+
+ @style.queue
+ def findDuplicates(self, id, folder, filename):
+ """ checks if filename exists with different id and same package """
+ self.c.execute("SELECT l.plugin FROM links as l INNER JOIN packages as p ON l.package=p.id AND p.folder=? WHERE l.id!=? AND l.status=0 AND l.name=?", (folder, id, filename))
+ return self.c.fetchone()
+
+ @style.queue
+ def purgeLinks(self):
+ self.c.execute("DELETE FROM links;")
+ self.c.execute("DELETE FROM packages;")
+
+DatabaseBackend.registerSub(FileMethods)
diff --git a/pyload/database/FileDatabase.py b/pyload/database/FileDatabase.py
deleted file mode 100644
index 933e06d80..000000000
--- a/pyload/database/FileDatabase.py
+++ /dev/null
@@ -1,875 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN, mkaay
-
-from threading import RLock
-from time import time
-
-from pyload.utils import formatSize, lock
-from pyload.manager.event.PullEvents import InsertEvent, ReloadAllEvent, RemoveEvent, UpdateEvent
-from pyload.datatype.PyPackage import PyPackage
-from pyload.datatype.PyFile import PyFile
-from pyload.database import style, DatabaseBackend
-
-try:
- from pysqlite2 import dbapi2 as sqlite3
-except Exception:
- import sqlite3
-
-
-class FileHandler(object):
- """Handles all request made to obtain information,
- modify status or other request for links or packages"""
-
- def __init__(self, core):
- """Constructor"""
- self.core = core
-
- # translations
- self.statusMsg = [_("finished"), _("offline"), _("online"), _("queued"), _("skipped"), _("waiting"), _("temp. offline"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), _("downloading"), _("processing"), _("unknown")]
-
- self.cache = {} #holds instances for files
- self.packageCache = {} # same for packages
- #@TODO: purge the cache
-
- self.jobCache = {}
-
- self.lock = RLock() #@TODO should be a Lock w/o R
- #self.lock._Verbose__verbose = True
-
- self.filecount = -1 # if an invalid value is set get current value from db
- self.queuecount = -1 #number of package to be loaded
- self.unchanged = False #determines if any changes was made since last call
-
- self.db = self.core.db
-
- def change(func):
- def new(*args):
- args[0].unchanged = False
- args[0].filecount = -1
- args[0].queuecount = -1
- args[0].jobCache = {}
- return func(*args)
- return new
-
- #--------------------------------------------------------------------------
- def save(self):
- """saves all data to backend"""
- self.db.commit()
-
- #--------------------------------------------------------------------------
- def syncSave(self):
- """saves all data to backend and waits until all data are written"""
- pyfiles = self.cache.values()
- for pyfile in pyfiles:
- pyfile.sync()
-
- pypacks = self.packageCache.values()
- for pypack in pypacks:
- pypack.sync()
-
- self.db.syncSave()
-
- @lock
- def getCompleteData(self, queue=1):
- """gets a complete data representation"""
-
- data = self.db.getAllLinks(queue)
- packs = self.db.getAllPackages(queue)
-
- data.update([(x.id, x.toDbDict()[x.id]) for x in self.cache.values()])
-
- for x in self.packageCache.itervalues():
- if x.queue != queue or x.id not in packs: continue
- packs[x.id].update(x.toDict()[x.id])
-
- for key, value in data.iteritems():
- if value["package"] in packs:
- packs[value["package"]]["links"][key] = value
-
- return packs
-
- @lock
- def getInfoData(self, queue=1):
- """gets a data representation without links"""
-
- packs = self.db.getAllPackages(queue)
- for x in self.packageCache.itervalues():
- if x.queue != queue or x.id not in packs: continue
- packs[x.id].update(x.toDict()[x.id])
-
- return packs
-
- @lock
- @change
- def addLinks(self, urls, package):
- """adds links"""
-
- self.core.addonManager.dispatchEvent("links-added", urls, package)
-
- data = self.core.pluginManager.parseUrls(urls)
-
- self.db.addLinks(data, package)
- self.core.threadManager.createInfoThread(data, package)
-
- #@TODO change from reloadAll event to package update event
- self.core.pullManager.addEvent(ReloadAllEvent("collector"))
-
- #--------------------------------------------------------------------------
- @lock
- @change
- def addPackage(self, name, folder, queue=0):
- """adds a package, default to link collector"""
- lastID = self.db.addPackage(name, folder, queue)
- p = self.db.getPackage(lastID)
- e = InsertEvent("pack", lastID, p.order, "collector" if not queue else "queue")
- self.core.pullManager.addEvent(e)
- return lastID
-
- #--------------------------------------------------------------------------
- @lock
- @change
- def deletePackage(self, id):
- """delete package and all contained links"""
-
- p = self.getPackage(id)
- if not p:
- if id in self.packageCache: del self.packageCache[id]
- return
-
- oldorder = p.order
- queue = p.queue
-
- e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
-
- pyfiles = self.cache.values()
-
- for pyfile in pyfiles:
- if pyfile.packageid == id:
- pyfile.abortDownload()
- pyfile.release()
-
- self.db.deletePackage(p)
- self.core.pullManager.addEvent(e)
- self.core.addonManager.dispatchEvent("package-deleted", id)
-
- if id in self.packageCache:
- del self.packageCache[id]
-
- packs = self.packageCache.values()
- for pack in packs:
- if pack.queue == queue and pack.order > oldorder:
- pack.order -= 1
- pack.notifyChange()
-
- #--------------------------------------------------------------------------
- @lock
- @change
- def deleteLink(self, id):
- """deletes links"""
-
- f = self.getFile(id)
- if not f:
- return None
-
- pid = f.packageid
- e = RemoveEvent("file", id, "collector" if not f.package().queue else "queue")
-
- oldorder = f.order
-
- if id in self.core.threadManager.processingIds():
- self.cache[id].abortDownload()
-
- if id in self.cache:
- del self.cache[id]
-
- self.db.deleteLink(f)
-
- self.core.pullManager.addEvent(e)
-
- p = self.getPackage(pid)
- if not len(p.getChildren()):
- p.delete()
-
- pyfiles = self.cache.values()
- for pyfile in pyfiles:
- if pyfile.packageid == pid and pyfile.order > oldorder:
- pyfile.order -= 1
- pyfile.notifyChange()
-
- #--------------------------------------------------------------------------
- def releaseLink(self, id):
- """removes pyfile from cache"""
- if id in self.cache:
- del self.cache[id]
-
- #--------------------------------------------------------------------------
- def releasePackage(self, id):
- """removes package from cache"""
- if id in self.packageCache:
- del self.packageCache[id]
-
- #--------------------------------------------------------------------------
- def updateLink(self, pyfile):
- """updates link"""
- self.db.updateLink(pyfile)
-
- e = UpdateEvent("file", pyfile.id, "collector" if not pyfile.package().queue else "queue")
- self.core.pullManager.addEvent(e)
-
- #--------------------------------------------------------------------------
- def updatePackage(self, pypack):
- """updates a package"""
- self.db.updatePackage(pypack)
-
- e = UpdateEvent("pack", pypack.id, "collector" if not pypack.queue else "queue")
- self.core.pullManager.addEvent(e)
-
- #--------------------------------------------------------------------------
- def getPackage(self, id):
- """return package instance"""
-
- if id in self.packageCache:
- return self.packageCache[id]
- else:
- return self.db.getPackage(id)
-
- #--------------------------------------------------------------------------
- def getPackageData(self, id):
- """returns dict with package information"""
- pack = self.getPackage(id)
-
- if not pack:
- return None
-
- pack = pack.toDict()[id]
-
- data = self.db.getPackageData(id)
-
- tmplist = []
-
- cache = self.cache.values()
- for x in cache:
- if int(x.toDbDict()[x.id]["package"]) == int(id):
- tmplist.append((x.id, x.toDbDict()[x.id]))
- data.update(tmplist)
-
- pack["links"] = data
-
- return pack
-
- #--------------------------------------------------------------------------
- def getFileData(self, id):
- """returns dict with file information"""
- if id in self.cache:
- return self.cache[id].toDbDict()
-
- return self.db.getLinkData(id)
-
- #--------------------------------------------------------------------------
- def getFile(self, id):
- """returns pyfile instance"""
- if id in self.cache:
- return self.cache[id]
- else:
- return self.db.getFile(id)
-
- #--------------------------------------------------------------------------
- @lock
- def getJob(self, occ):
- """get suitable job"""
-
- #@TODO clean mess
- #@TODO improve selection of valid jobs
-
- if occ in self.jobCache:
- if self.jobCache[occ]:
- id = self.jobCache[occ].pop()
- if id == "empty":
- pyfile = None
- self.jobCache[occ].append("empty")
- else:
- pyfile = self.getFile(id)
- else:
- jobs = self.db.getJob(occ)
- jobs.reverse()
- if not jobs:
- self.jobCache[occ].append("empty")
- pyfile = None
- else:
- self.jobCache[occ].extend(jobs)
- pyfile = self.getFile(self.jobCache[occ].pop())
-
- else:
- self.jobCache = {} #better not caching to much
- jobs = self.db.getJob(occ)
- jobs.reverse()
- self.jobCache[occ] = jobs
-
- if not jobs:
- self.jobCache[occ].append("empty")
- pyfile = None
- else:
- pyfile = self.getFile(self.jobCache[occ].pop())
-
- #@TODO: maybe the new job has to be approved...
-
-
- #pyfile = self.getFile(self.jobCache[occ].pop())
- return pyfile
-
- @lock
- def getDecryptJob(self):
- """return job for decrypting"""
- if "decrypt" in self.jobCache:
- return None
-
- plugins = self.core.pluginManager.crypterPlugins.keys() + self.core.pluginManager.containerPlugins.keys()
- plugins = str(tuple(plugins))
-
- jobs = self.db.getPluginJob(plugins)
- if jobs:
- return self.getFile(jobs[0])
- else:
- self.jobCache["decrypt"] = "empty"
- return None
-
- def getFileCount(self):
- """returns number of files"""
-
- if self.filecount == -1:
- self.filecount = self.db.filecount(1)
-
- return self.filecount
-
- def getQueueCount(self, force=False):
- """number of files that have to be processed"""
- if self.queuecount == -1 or force:
- self.queuecount = self.db.queuecount(1)
-
- return self.queuecount
-
- def checkAllLinksFinished(self):
- """checks if all files are finished and dispatch event"""
-
- if not self.getQueueCount(True):
- self.core.addonManager.dispatchEvent("all_downloads-finished")
- self.core.log.debug("All downloads finished")
- return True
-
- return False
-
- def checkAllLinksProcessed(self, fid):
- """checks if all files was processed and pyload would idle now, needs fid which will be ignored when counting"""
-
- # reset count so statistic will update (this is called when dl was processed)
- self.resetCount()
-
- if not self.db.processcount(1, fid):
- self.core.addonManager.dispatchEvent("all_downloads-processed")
- self.core.log.debug("All downloads processed")
- return True
-
- return False
-
- def resetCount(self):
- self.queuecount = -1
-
- @lock
- @change
- def restartPackage(self, id):
- """restart package"""
- pyfiles = self.cache.values()
- for pyfile in pyfiles:
- if pyfile.packageid == id:
- self.restartFile(pyfile.id)
-
- self.db.restartPackage(id)
-
- if id in self.packageCache:
- self.packageCache[id].setFinished = False
-
- e = UpdateEvent("pack", id, "collector" if not self.getPackage(id).queue else "queue")
- self.core.pullManager.addEvent(e)
-
- @lock
- @change
- def restartFile(self, id):
- """ restart file"""
- if id in self.cache:
- self.cache[id].status = 3
- self.cache[id].name = self.cache[id].url
- self.cache[id].error = ""
- self.cache[id].abortDownload()
-
-
- self.db.restartFile(id)
-
- e = UpdateEvent("file", id, "collector" if not self.getFile(id).package().queue else "queue")
- self.core.pullManager.addEvent(e)
-
- @lock
- @change
- def setPackageLocation(self, id, queue):
- """push package to queue"""
-
- p = self.db.getPackage(id)
- oldorder = p.order
-
- e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
- self.core.pullManager.addEvent(e)
-
- self.db.clearPackageOrder(p)
-
- p = self.db.getPackage(id)
-
- p.queue = queue
- self.db.updatePackage(p)
-
- self.db.reorderPackage(p, -1, True)
-
- packs = self.packageCache.values()
- for pack in packs:
- if pack.queue != queue and pack.order > oldorder:
- pack.order -= 1
- pack.notifyChange()
-
- self.db.commit()
- self.releasePackage(id)
- p = self.getPackage(id)
-
- e = InsertEvent("pack", id, p.order, "collector" if not p.queue else "queue")
- self.core.pullManager.addEvent(e)
-
- @lock
- @change
- def reorderPackage(self, id, position):
- p = self.getPackage(id)
-
- e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
- self.core.pullManager.addEvent(e)
- self.db.reorderPackage(p, position)
-
- packs = self.packageCache.values()
- for pack in packs:
- if pack.queue != p.queue or pack.order < 0 or pack == p: continue
- if p.order > position:
- if pack.order >= position and pack.order < p.order:
- pack.order += 1
- pack.notifyChange()
- elif p.order < position:
- if pack.order <= position and pack.order > p.order:
- pack.order -= 1
- pack.notifyChange()
-
- p.order = position
- self.db.commit()
-
- e = InsertEvent("pack", id, position, "collector" if not p.queue else "queue")
- self.core.pullManager.addEvent(e)
-
- @lock
- @change
- def reorderFile(self, id, position):
- f = self.getFileData(id)
- f = f[id]
-
- e = RemoveEvent("file", id, "collector" if not self.getPackage(f["package"]).queue else "queue")
- self.core.pullManager.addEvent(e)
-
- self.db.reorderLink(f, position)
-
- pyfiles = self.cache.values()
- for pyfile in pyfiles:
- if pyfile.packageid != f["package"] or pyfile.order < 0: continue
- if f["order"] > position:
- if pyfile.order >= position and pyfile.order < f["order"]:
- pyfile.order += 1
- pyfile.notifyChange()
- elif f["order"] < position:
- if pyfile.order <= position and pyfile.order > f["order"]:
- pyfile.order -= 1
- pyfile.notifyChange()
-
- if id in self.cache:
- self.cache[id].order = position
-
- self.db.commit()
-
- e = InsertEvent("file", id, position, "collector" if not self.getPackage(f["package"]).queue else "queue")
- self.core.pullManager.addEvent(e)
-
- @change
- def updateFileInfo(self, data, pid):
- """ updates file info (name, size, status, url)"""
- ids = self.db.updateLinkInfo(data)
- e = UpdateEvent("pack", pid, "collector" if not self.getPackage(pid).queue else "queue")
- self.core.pullManager.addEvent(e)
-
- def checkPackageFinished(self, pyfile):
- """ checks if package is finished and calls AddonManager """
-
- ids = self.db.getUnfinished(pyfile.packageid)
- if not ids or (pyfile.id in ids and len(ids) == 1):
- if not pyfile.package().setFinished:
- self.core.log.info(_("Package finished: %s") % pyfile.package().name)
- self.core.addonManager.packageFinished(pyfile.package())
- pyfile.package().setFinished = True
-
-
- def reCheckPackage(self, pid):
- """ recheck links in package """
- data = self.db.getPackageData(pid)
-
- urls = []
-
- for pyfile in data.itervalues():
- if pyfile["status"] not in (0, 12, 13):
- urls.append((pyfile["url"], pyfile["plugin"]))
-
- self.core.threadManager.createInfoThread(urls, pid)
-
- @lock
- @change
- def deleteFinishedLinks(self):
- """ deletes finished links and packages, return deleted packages """
-
- old_packs = self.getInfoData(0)
- old_packs.update(self.getInfoData(1))
-
- self.db.deleteFinished()
-
- new_packs = self.db.getAllPackages(0)
- new_packs.update(self.db.getAllPackages(1))
- #get new packages only from db
-
- deleted = []
- for id in old_packs.iterkeys():
- if id not in new_packs:
- deleted.append(id)
- self.deletePackage(int(id))
-
- return deleted
-
- @lock
- @change
- def restartFailed(self):
- """ restart all failed links """
- self.db.restartFailed()
-
-class FileMethods(object):
- @style.queue
- def filecount(self, queue):
- """returns number of files in queue"""
- self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=?", (queue,))
- return self.c.fetchone()[0]
-
- @style.queue
- def queuecount(self, queue):
- """ number of files in queue not finished yet"""
- self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? AND l.status NOT IN (0, 4)", (queue,))
- return self.c.fetchone()[0]
-
- @style.queue
- def processcount(self, queue, fid):
- """ number of files which have to be proccessed """
- self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? AND l.status IN (2, 3, 5, 7, 12) AND l.id != ?", (queue, str(fid)))
- return self.c.fetchone()[0]
-
- @style.inner
- def _nextPackageOrder(self, queue=0):
- self.c.execute('SELECT MAX(packageorder) FROM packages WHERE queue=?', (queue,))
- max = self.c.fetchone()[0]
- if max is not None:
- return max + 1
- else:
- return 0
-
- @style.inner
- def _nextFileOrder(self, package):
- self.c.execute('SELECT MAX(linkorder) FROM links WHERE package=?', (package,))
- max = self.c.fetchone()[0]
- if max is not None:
- return max + 1
- else:
- return 0
-
- @style.queue
- def addLink(self, url, name, plugin, package):
- order = self._nextFileOrder(package)
- self.c.execute('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', (url, name, (plugintype, pluginname), package, order))
- return self.c.lastrowid
-
- @style.queue
- def addLinks(self, links, package):
- """ links is a list of tupels (url, plugin)"""
- order = self._nextFileOrder(package)
- orders = [order + x for x in range(len(links))]
- links = [(x[0], x[0], (x[1], x[2]), package, o) for x, o in zip(links, orders)]
- self.c.executemany('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', links)
-
- @style.queue
- def addPackage(self, name, folder, queue):
- order = self._nextPackageOrder(queue)
- self.c.execute('INSERT INTO packages(name, folder, queue, packageorder) VALUES(?,?,?,?)', (name, folder, queue, order))
- return self.c.lastrowid
-
- @style.queue
- def deletePackage(self, p):
-
- self.c.execute('DELETE FROM links WHERE package=?', (str(p.id),))
- self.c.execute('DELETE FROM packages WHERE id=?', (str(p.id),))
- self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=?', (p.order, p.queue))
-
- @style.queue
- def deleteLink(self, f):
-
- self.c.execute('DELETE FROM links WHERE id=?', (str(f.id),))
- self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder > ? AND package=?', (f.order, str(f.packageid)))
-
-
- @style.queue
- def getAllLinks(self, q):
- """return information about all links in queue q
-
- q0 queue
- q1 collector
-
- format:
-
- {
- id: {'name': name, ... 'package': id }, ...
- }
-
- """
- self.c.execute('SELECT l.id, l.url, l.name, l.size, l.status, l.error, l.plugin, l.package, l.linkorder FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.linkorder', (q,))
- data = {}
- for r in self.c:
- data[r[0]] = {
- 'id': r[0],
- 'url': r[1],
- 'name': r[2],
- 'size': r[3],
- 'format_size': formatSize(r[3]),
- 'status': r[4],
- 'statusmsg': self.manager.statusMsg[r[4]],
- 'error': r[5],
- 'plugin': r[6],
- 'package': r[7],
- 'order': r[8],
- }
-
- return data
-
- @style.queue
- def getAllPackages(self, q):
- """return information about packages in queue q
- (only useful in get all data)
-
- q0 queue
- q1 collector
-
- format:
-
- {
- id: {'name': name ... 'links': {}}, ...
- }
- """
- self.c.execute('SELECT p.id, p.name, p.folder, p.site, p.password, p.queue, p.packageorder, s.sizetotal, s.sizedone, s.linksdone, s.linkstotal \
- FROM packages p JOIN pstats s ON p.id = s.id \
- WHERE p.queue=? ORDER BY p.packageorder', str(q))
-
- data = {}
- for r in self.c:
- data[r[0]] = {
- 'id': r[0],
- 'name': r[1],
- 'folder': r[2],
- 'site': r[3],
- 'password': r[4],
- 'queue': r[5],
- 'order': r[6],
- 'sizetotal': int(r[7]),
- 'sizedone': r[8] if r[8] else 0, #these can be None
- 'linksdone': r[9] if r[9] else 0,
- 'linkstotal': r[10],
- 'links': {}
- }
-
- return data
-
- @style.queue
- def getLinkData(self, id):
- """get link information as dict"""
- self.c.execute('SELECT id, url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?', (str(id),))
- data = {}
- r = self.c.fetchone()
- if not r:
- return None
- data[r[0]] = {
- 'id': r[0],
- 'url': r[1],
- 'name': r[2],
- 'size': r[3],
- 'format_size': formatSize(r[3]),
- 'status': r[4],
- 'statusmsg': self.manager.statusMsg[r[4]],
- 'error': r[5],
- 'plugin': r[6],
- 'package': r[7],
- 'order': r[8],
- }
-
- return data
-
- @style.queue
- def getPackageData(self, id):
- """get data about links for a package"""
- self.c.execute('SELECT id, url, name, size, status, error, plugin, package, linkorder FROM links WHERE package=? ORDER BY linkorder', (str(id),))
-
- data = {}
- for r in self.c:
- data[r[0]] = {
- 'id': r[0],
- 'url': r[1],
- 'name': r[2],
- 'size': r[3],
- 'format_size': formatSize(r[3]),
- 'status': r[4],
- 'statusmsg': self.manager.statusMsg[r[4]],
- 'error': r[5],
- 'plugin': r[6],
- 'package': r[7],
- 'order': r[8],
- }
-
- return data
-
-
- @style.async
- def updateLink(self, f):
- self.c.execute('UPDATE links SET url=?, name=?, size=?, status=?, error=?, package=? WHERE id=?', (f.url, f.name, f.size, f.status, f.error, str(f.packageid), str(f.id)))
-
- @style.queue
- def updatePackage(self, p):
- self.c.execute('UPDATE packages SET name=?, folder=?, site=?, password=?, queue=? WHERE id=?', (p.name, p.folder, p.site, p.password, p.queue, str(p.id)))
-
- @style.queue
- def updateLinkInfo(self, data):
- """ data is list of tupels (name, size, status, url) """
- self.c.executemany('UPDATE links SET name=?, size=?, status=? WHERE url=? AND status IN (1, 2, 3, 14)', data)
- ids = []
- self.c.execute('SELECT id FROM links WHERE url IN (\'%s\')' % "','".join([x[3] for x in data]))
- for r in self.c:
- ids.append(int(r[0]))
- return ids
-
- @style.queue
- def reorderPackage(self, p, position, noMove=False):
- if position == -1:
- position = self._nextPackageOrder(p.queue)
- if not noMove:
- if p.order > position:
- self.c.execute('UPDATE packages SET packageorder=packageorder+1 WHERE packageorder >= ? AND packageorder < ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue))
- elif p.order < position:
- self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder <= ? AND packageorder > ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue))
-
- self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (position, str(p.id)))
-
- @style.queue
- def reorderLink(self, f, position):
- """ reorder link with f as dict for pyfile """
- if f["order"] > position:
- self.c.execute('UPDATE links SET linkorder=linkorder+1 WHERE linkorder >= ? AND linkorder < ? AND package=?', (position, f["order"], f["package"]))
- elif f["order"] < position:
- self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder <= ? AND linkorder > ? AND package=?', (position, f["order"], f["package"]))
-
- self.c.execute('UPDATE links SET linkorder=? WHERE id=?', (position, f["id"]))
-
- @style.queue
- def clearPackageOrder(self, p):
- self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (-1, str(p.id)))
- self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=? AND id != ?', (p.order, p.queue, str(p.id)))
-
- @style.async
- def restartFile(self, id):
- self.c.execute('UPDATE links SET status=3, error="" WHERE id=?', (str(id),))
-
- @style.async
- def restartPackage(self, id):
- self.c.execute('UPDATE links SET status=3 WHERE package=?', (str(id),))
-
- @style.queue
- def getPackage(self, id):
- """return package instance from id"""
- self.c.execute("SELECT name, folder, site, password, queue, packageorder FROM packages WHERE id=?", (str(id),))
- r = self.c.fetchone()
- if not r: return None
- return PyPackage(self.manager, id, * r)
-
- #--------------------------------------------------------------------------
- @style.queue
- def getFile(self, id):
- """return link instance from id"""
- self.c.execute("SELECT url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?", (str(id),))
- r = self.c.fetchone()
- if not r: return None
- return PyFile(self.manager, id, * r)
-
-
- @style.queue
- def getJob(self, occ):
- """return pyfile ids, which are suitable for download and dont use a occupied plugin"""
-
- #@TODO improve this hardcoded method
- pre = "('DLC', 'LinkList', 'SerienjunkiesOrg', 'CCF', 'RSDF')" #plugins which are processed in collector
-
- cmd = "("
- for i, item in enumerate(occ):
- if i: cmd += ", "
- cmd += "'%s'" % item
-
- cmd += ")"
-
- cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE ((p.queue=1 AND l.plugin NOT IN %s) OR l.plugin IN %s) AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % (cmd, pre)
-
- self.c.execute(cmd) # very bad!
-
- return [x[0] for x in self.c]
-
- @style.queue
- def getPluginJob(self, plugins):
- """returns pyfile ids with suited plugins"""
- cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE l.plugin IN %s AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % plugins
-
- self.c.execute(cmd) # very bad!
-
- return [x[0] for x in self.c]
-
- @style.queue
- def getUnfinished(self, pid):
- """return list of max length 3 ids with pyfiles in package not finished or processed"""
-
- self.c.execute("SELECT id FROM links WHERE package=? AND status NOT IN (0, 4, 13) LIMIT 3", (str(pid),))
- return [r[0] for r in self.c]
-
- @style.queue
- def deleteFinished(self):
- self.c.execute("DELETE FROM links WHERE status IN (0, 4)")
- self.c.execute("DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE packages.id=links.package)")
-
- @style.queue
- def restartFailed(self):
- self.c.execute("UPDATE links SET status=3, error='' WHERE status IN (6, 8, 9)")
-
- @style.queue
- def findDuplicates(self, id, folder, filename):
- """ checks if filename exists with different id and same package """
- self.c.execute("SELECT l.plugin FROM links as l INNER JOIN packages as p ON l.package=p.id AND p.folder=? WHERE l.id!=? AND l.status=0 AND l.name=?", (folder, id, filename))
- return self.c.fetchone()
-
- @style.queue
- def purgeLinks(self):
- self.c.execute("DELETE FROM links;")
- self.c.execute("DELETE FROM packages;")
-
-DatabaseBackend.registerSub(FileMethods)
diff --git a/pyload/database/StorageDatabase.py b/pyload/database/Storage.py
index 75e166d39..75e166d39 100644
--- a/pyload/database/StorageDatabase.py
+++ b/pyload/database/Storage.py
diff --git a/pyload/database/UserDatabase.py b/pyload/database/User.py
index 67cb62ab9..67cb62ab9 100644
--- a/pyload/database/UserDatabase.py
+++ b/pyload/database/User.py
diff --git a/pyload/database/__init__.py b/pyload/database/__init__.py
index 5c6658f01..64f049be1 100644
--- a/pyload/database/__init__.py
+++ b/pyload/database/__init__.py
@@ -2,6 +2,6 @@
from pyload.database.DatabaseBackend import DatabaseBackend, style
-from pyload.database.FileDatabase import FileHandler
-from pyload.database.UserDatabase import UserMethods
-from pyload.database.StorageDatabase import StorageMethods
+from pyload.database.File import FileHandler
+from pyload.database.User import UserMethods
+from pyload.database.Storage import StorageMethods
diff --git a/pyload/datatype/File.py b/pyload/datatype/File.py
new file mode 100644
index 000000000..1df0a8590
--- /dev/null
+++ b/pyload/datatype/File.py
@@ -0,0 +1,270 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN, mkaay
+
+from pyload.manager.Event import UpdateEvent
+from pyload.utils import formatSize, lock
+
+from time import sleep, time
+
+from threading import RLock
+
+statusMap = {
+ "finished": 0,
+ "offline": 1,
+ "online": 2,
+ "queued": 3,
+ "skipped": 4,
+ "waiting": 5,
+ "temp. offline": 6,
+ "starting": 7,
+ "failed": 8,
+ "aborted": 9,
+ "decrypting": 10,
+ "custom": 11,
+ "downloading": 12,
+ "processing": 13,
+ "unknown": 14,
+}
+
+
+def setSize(self, value):
+ self._size = int(value)
+
+class PyFile(object):
+ """
+ Represents a file object at runtime
+ """
+ __slots__ = ("m", "id", "url", "name", "size", "_size", "status", "plugin",
+ "packageid", "error", "order", "lock", "plugin", "waitUntil",
+ "active", "abort", "statusname", "reconnected", "progress",
+ "maxprogress", "pluginmodule", "pluginclass")
+
+ def __init__(self, manager, id, url, name, size, status, error, plugin, package, order):
+ self.m = manager
+
+ self.id = int(id)
+ self.url = url
+ self.name = name
+ self.size = size
+ self.status = status
+ self.plugin = self.plugintype, self.pluginname = plugin
+ self.packageid = package #should not be used, use package() instead
+ self.error = error
+ self.order = order
+ # database information ends here
+
+ self.lock = RLock()
+
+ self.plugin = None
+ #self.download = None
+
+ self.waitUntil = 0 # time() + time to wait
+
+ # status attributes
+ self.active = False #obsolete?
+ self.abort = False
+ self.reconnected = False
+
+ self.statusname = None
+
+ self.progress = 0
+ self.maxprogress = 100
+
+ self.m.cache[int(id)] = self
+
+
+ # will convert all sizes to ints
+ size = property(lambda self: self._size, setSize)
+
+ def __repr__(self):
+ return "PyFile %s: %s@%s" % (self.id, self.name, self.pluginname)
+
+ @lock
+ def initPlugin(self):
+ """ inits plugin instance """
+ if not self.plugin:
+ self.pluginmodule = self.m.core.pluginManager.getPlugin(self.plugintype, self.pluginname)
+ self.pluginclass = getattr(self.pluginmodule, self.m.core.pluginManager.getPluginName(self.plugintype, self.pluginname))
+ self.plugin = self.pluginclass(self)
+
+ @lock
+ def hasPlugin(self):
+ """Thread safe way to determine this file has initialized plugin attribute
+
+ :return:
+ """
+ return hasattr(self, "plugin") and self.plugin
+
+ def package(self):
+ """ return package instance"""
+ return self.m.getPackage(self.packageid)
+
+ def setStatus(self, status):
+ self.status = statusMap[status]
+ self.sync() #@TODO needed aslong no better job approving exists
+
+ def setCustomStatus(self, msg, status="processing"):
+ self.statusname = msg
+ self.setStatus(status)
+
+ def getStatusName(self):
+ if self.status not in (13, 14) or not self.statusname:
+ return self.m.statusMsg[self.status]
+ else:
+ return self.statusname
+
+ def hasStatus(self, status):
+ return statusMap[status] == self.status
+
+ def sync(self):
+ """sync PyFile instance with database"""
+ self.m.updateLink(self)
+
+ @lock
+ def release(self):
+ """sync and remove from cache"""
+ # file has valid package
+ if self.packageid > 0:
+ self.sync()
+
+ if hasattr(self, "plugin") and self.plugin:
+ self.plugin.clean()
+ del self.plugin
+
+ self.m.releaseLink(self.id)
+
+ def delete(self):
+ """delete pyfile from database"""
+ self.m.deleteLink(self.id)
+
+ def toDict(self):
+ """return dict with all information for interface"""
+ return self.toDbDict()
+
+ def toDbDict(self):
+ """return data as dict for databse
+
+ format:
+
+ {
+ id: {'url': url, 'name': name ... }
+ }
+
+ """
+ return {
+ self.id: {
+ 'id': self.id,
+ 'url': self.url,
+ 'name': self.name,
+ 'plugin': self.pluginname,
+ 'size': self.getSize(),
+ 'format_size': self.formatSize(),
+ 'status': self.status,
+ 'statusmsg': self.getStatusName(),
+ 'package': self.packageid,
+ 'error': self.error,
+ 'order': self.order
+ }
+ }
+
+ def abortDownload(self):
+ """abort pyfile if possible"""
+ while self.id in self.m.core.threadManager.processingIds():
+ self.abort = True
+ if self.plugin and self.plugin.req:
+ self.plugin.req.abortDownloads()
+ sleep(0.1)
+
+ self.abort = False
+ if self.hasPlugin() and self.plugin.req:
+ self.plugin.req.abortDownloads()
+
+ self.release()
+
+ def finishIfDone(self):
+ """set status to finish and release file if every thread is finished with it"""
+
+ if self.id in self.m.core.threadManager.processingIds():
+ return False
+
+ self.setStatus("finished")
+ self.release()
+ self.m.checkAllLinksFinished()
+ return True
+
+ def checkIfProcessed(self):
+ self.m.checkAllLinksProcessed(self.id)
+
+ def formatWait(self):
+ """ formats and return wait time in humanreadable format """
+ seconds = self.waitUntil - time()
+
+ if seconds < 0: return "00:00:00"
+
+ hours, seconds = divmod(seconds, 3600)
+ minutes, seconds = divmod(seconds, 60)
+ return "%.2i:%.2i:%.2i" % (hours, minutes, seconds)
+
+ def formatSize(self):
+ """ formats size to readable format """
+ return formatSize(self.getSize())
+
+ def formatETA(self):
+ """ formats eta to readable format """
+ seconds = self.getETA()
+
+ if seconds < 0: return "00:00:00"
+
+ hours, seconds = divmod(seconds, 3600)
+ minutes, seconds = divmod(seconds, 60)
+ return "%.2i:%.2i:%.2i" % (hours, minutes, seconds)
+
+ def getSpeed(self):
+ """ calculates speed """
+ try:
+ return self.plugin.req.speed
+ except Exception:
+ return 0
+
+ def getETA(self):
+ """ gets established time of arrival"""
+ try:
+ return self.getBytesLeft() / self.getSpeed()
+ except Exception:
+ return 0
+
+ def getBytesLeft(self):
+ """ gets bytes left """
+ try:
+ return self.getSize() - self.plugin.req.arrived
+ except Exception:
+ return 0
+
+ def getPercent(self):
+ """ get % of download """
+ if self.status == 12:
+ try:
+ return self.plugin.req.percent
+ except Exception:
+ return 0
+ else:
+ return self.progress
+
+ def getSize(self):
+ """ get size of download """
+ try:
+ if self.plugin.req.size:
+ return self.plugin.req.size
+ else:
+ return self.size
+ except Exception:
+ return self.size
+
+ def notifyChange(self):
+ e = UpdateEvent("file", self.id, "collector" if not self.package().queue else "queue")
+ self.m.core.pullManager.addEvent(e)
+
+ def setProgress(self, value):
+ if not value == self.progress:
+ self.progress = value
+ self.notifyChange()
diff --git a/pyload/datatype/Package.py b/pyload/datatype/Package.py
new file mode 100644
index 000000000..bf3edffea
--- /dev/null
+++ b/pyload/datatype/Package.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN, mkaay
+
+from pyload.manager.Event import UpdateEvent
+from pyload.utils import safe_filename
+
+class PyPackage(object):
+ """
+ Represents a package object at runtime
+ """
+ def __init__(self, manager, id, name, folder, site, password, queue, order):
+ self.m = manager
+ self.m.packageCache[int(id)] = self
+
+ self.id = int(id)
+ self.name = name
+ self._folder = folder
+ self.site = site
+ self.password = password
+ self.queue = queue
+ self.order = order
+ self.setFinished = False
+
+ @property
+ def folder(self):
+ return safe_filename(self._folder)
+
+ def toDict(self):
+ """ Returns a dictionary representation of the data.
+
+ :return: dict: {id: { attr: value }}
+ """
+ return {
+ self.id: {
+ 'id': self.id,
+ 'name': self.name,
+ 'folder': self.folder,
+ 'site': self.site,
+ 'password': self.password,
+ 'queue': self.queue,
+ 'order': self.order,
+ 'links': {}
+ }
+ }
+
+ def getChildren(self):
+ """get information about contained links"""
+ return self.m.getPackageData(self.id)["links"]
+
+ def sync(self):
+ """sync with db"""
+ self.m.updatePackage(self)
+
+ def release(self):
+ """sync and delete from cache"""
+ self.sync()
+ self.m.releasePackage(self.id)
+
+ def delete(self):
+ self.m.deletePackage(self.id)
+
+ def notifyChange(self):
+ e = UpdateEvent("pack", self.id, "collector" if not self.queue else "queue")
+ self.m.core.pullManager.addEvent(e)
diff --git a/pyload/datatype/PyFile.py b/pyload/datatype/PyFile.py
deleted file mode 100644
index 173203a8d..000000000
--- a/pyload/datatype/PyFile.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN, mkaay
-
-from pyload.manager.event.PullEvents import UpdateEvent
-from pyload.utils import formatSize, lock
-
-from time import sleep, time
-
-from threading import RLock
-
-statusMap = {
- "finished": 0,
- "offline": 1,
- "online": 2,
- "queued": 3,
- "skipped": 4,
- "waiting": 5,
- "temp. offline": 6,
- "starting": 7,
- "failed": 8,
- "aborted": 9,
- "decrypting": 10,
- "custom": 11,
- "downloading": 12,
- "processing": 13,
- "unknown": 14,
-}
-
-
-def setSize(self, value):
- self._size = int(value)
-
-class PyFile(object):
- """
- Represents a file object at runtime
- """
- __slots__ = ("m", "id", "url", "name", "size", "_size", "status", "plugin",
- "packageid", "error", "order", "lock", "plugin", "waitUntil",
- "active", "abort", "statusname", "reconnected", "progress",
- "maxprogress", "pluginmodule", "pluginclass")
-
- def __init__(self, manager, id, url, name, size, status, error, plugin, package, order):
- self.m = manager
-
- self.id = int(id)
- self.url = url
- self.name = name
- self.size = size
- self.status = status
- self.plugin = self.plugintype, self.pluginname = plugin
- self.packageid = package #should not be used, use package() instead
- self.error = error
- self.order = order
- # database information ends here
-
- self.lock = RLock()
-
- self.plugin = None
- #self.download = None
-
- self.waitUntil = 0 # time() + time to wait
-
- # status attributes
- self.active = False #obsolete?
- self.abort = False
- self.reconnected = False
-
- self.statusname = None
-
- self.progress = 0
- self.maxprogress = 100
-
- self.m.cache[int(id)] = self
-
-
- # will convert all sizes to ints
- size = property(lambda self: self._size, setSize)
-
- def __repr__(self):
- return "PyFile %s: %s@%s" % (self.id, self.name, self.pluginname)
-
- @lock
- def initPlugin(self):
- """ inits plugin instance """
- if not self.plugin:
- self.pluginmodule = self.m.core.pluginManager.getPlugin(self.plugintype, self.pluginname)
- self.pluginclass = getattr(self.pluginmodule, self.m.core.pluginManager.getPluginName(self.plugintype, self.pluginname))
- self.plugin = self.pluginclass(self)
-
- @lock
- def hasPlugin(self):
- """Thread safe way to determine this file has initialized plugin attribute
-
- :return:
- """
- return hasattr(self, "plugin") and self.plugin
-
- def package(self):
- """ return package instance"""
- return self.m.getPackage(self.packageid)
-
- def setStatus(self, status):
- self.status = statusMap[status]
- self.sync() #@TODO needed aslong no better job approving exists
-
- def setCustomStatus(self, msg, status="processing"):
- self.statusname = msg
- self.setStatus(status)
-
- def getStatusName(self):
- if self.status not in (13, 14) or not self.statusname:
- return self.m.statusMsg[self.status]
- else:
- return self.statusname
-
- def hasStatus(self, status):
- return statusMap[status] == self.status
-
- def sync(self):
- """sync PyFile instance with database"""
- self.m.updateLink(self)
-
- @lock
- def release(self):
- """sync and remove from cache"""
- # file has valid package
- if self.packageid > 0:
- self.sync()
-
- if hasattr(self, "plugin") and self.plugin:
- self.plugin.clean()
- del self.plugin
-
- self.m.releaseLink(self.id)
-
- def delete(self):
- """delete pyfile from database"""
- self.m.deleteLink(self.id)
-
- def toDict(self):
- """return dict with all information for interface"""
- return self.toDbDict()
-
- def toDbDict(self):
- """return data as dict for databse
-
- format:
-
- {
- id: {'url': url, 'name': name ... }
- }
-
- """
- return {
- self.id: {
- 'id': self.id,
- 'url': self.url,
- 'name': self.name,
- 'plugin': self.pluginname,
- 'size': self.getSize(),
- 'format_size': self.formatSize(),
- 'status': self.status,
- 'statusmsg': self.getStatusName(),
- 'package': self.packageid,
- 'error': self.error,
- 'order': self.order
- }
- }
-
- def abortDownload(self):
- """abort pyfile if possible"""
- while self.id in self.m.core.threadManager.processingIds():
- self.abort = True
- if self.plugin and self.plugin.req:
- self.plugin.req.abortDownloads()
- sleep(0.1)
-
- self.abort = False
- if self.hasPlugin() and self.plugin.req:
- self.plugin.req.abortDownloads()
-
- self.release()
-
- def finishIfDone(self):
- """set status to finish and release file if every thread is finished with it"""
-
- if self.id in self.m.core.threadManager.processingIds():
- return False
-
- self.setStatus("finished")
- self.release()
- self.m.checkAllLinksFinished()
- return True
-
- def checkIfProcessed(self):
- self.m.checkAllLinksProcessed(self.id)
-
- def formatWait(self):
- """ formats and return wait time in humanreadable format """
- seconds = self.waitUntil - time()
-
- if seconds < 0: return "00:00:00"
-
- hours, seconds = divmod(seconds, 3600)
- minutes, seconds = divmod(seconds, 60)
- return "%.2i:%.2i:%.2i" % (hours, minutes, seconds)
-
- def formatSize(self):
- """ formats size to readable format """
- return formatSize(self.getSize())
-
- def formatETA(self):
- """ formats eta to readable format """
- seconds = self.getETA()
-
- if seconds < 0: return "00:00:00"
-
- hours, seconds = divmod(seconds, 3600)
- minutes, seconds = divmod(seconds, 60)
- return "%.2i:%.2i:%.2i" % (hours, minutes, seconds)
-
- def getSpeed(self):
- """ calculates speed """
- try:
- return self.plugin.req.speed
- except Exception:
- return 0
-
- def getETA(self):
- """ gets established time of arrival"""
- try:
- return self.getBytesLeft() / self.getSpeed()
- except Exception:
- return 0
-
- def getBytesLeft(self):
- """ gets bytes left """
- try:
- return self.getSize() - self.plugin.req.arrived
- except Exception:
- return 0
-
- def getPercent(self):
- """ get % of download """
- if self.status == 12:
- try:
- return self.plugin.req.percent
- except Exception:
- return 0
- else:
- return self.progress
-
- def getSize(self):
- """ get size of download """
- try:
- if self.plugin.req.size:
- return self.plugin.req.size
- else:
- return self.size
- except Exception:
- return self.size
-
- def notifyChange(self):
- e = UpdateEvent("file", self.id, "collector" if not self.package().queue else "queue")
- self.m.core.pullManager.addEvent(e)
-
- def setProgress(self, value):
- if not value == self.progress:
- self.progress = value
- self.notifyChange()
diff --git a/pyload/datatype/PyPackage.py b/pyload/datatype/PyPackage.py
deleted file mode 100644
index dbd8c8c9b..000000000
--- a/pyload/datatype/PyPackage.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN, mkaay
-
-from pyload.manager.event.PullEvents import UpdateEvent
-from pyload.utils import safe_filename
-
-class PyPackage(object):
- """
- Represents a package object at runtime
- """
- def __init__(self, manager, id, name, folder, site, password, queue, order):
- self.m = manager
- self.m.packageCache[int(id)] = self
-
- self.id = int(id)
- self.name = name
- self._folder = folder
- self.site = site
- self.password = password
- self.queue = queue
- self.order = order
- self.setFinished = False
-
- @property
- def folder(self):
- return safe_filename(self._folder)
-
- def toDict(self):
- """ Returns a dictionary representation of the data.
-
- :return: dict: {id: { attr: value }}
- """
- return {
- self.id: {
- 'id': self.id,
- 'name': self.name,
- 'folder': self.folder,
- 'site': self.site,
- 'password': self.password,
- 'queue': self.queue,
- 'order': self.order,
- 'links': {}
- }
- }
-
- def getChildren(self):
- """get information about contained links"""
- return self.m.getPackageData(self.id)["links"]
-
- def sync(self):
- """sync with db"""
- self.m.updatePackage(self)
-
- def release(self):
- """sync and delete from cache"""
- self.sync()
- self.m.releasePackage(self.id)
-
- def delete(self):
- self.m.deletePackage(self.id)
-
- def notifyChange(self):
- e = UpdateEvent("pack", self.id, "collector" if not self.queue else "queue")
- self.m.core.pullManager.addEvent(e)
diff --git a/pyload/manager/Account.py b/pyload/manager/Account.py
new file mode 100644
index 000000000..2631e1c7d
--- /dev/null
+++ b/pyload/manager/Account.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+from os.path import exists
+from shutil import copy
+
+from threading import Lock
+
+from pyload.manager.Event import AccountUpdateEvent
+from pyload.utils import chmod, lock
+
+ACC_VERSION = 1
+
+
+class AccountManager(object):
+ """manages all accounts"""
+
+ #----------------------------------------------------------------------
+ def __init__(self, core):
+ """Constructor"""
+
+ self.core = core
+ self.lock = Lock()
+
+ self.initPlugins()
+ self.saveAccounts() # save to add categories to conf
+
+
+ def initPlugins(self):
+ self.accounts = {} # key = ( plugin )
+ self.plugins = {}
+
+ self.initAccountPlugins()
+ self.loadAccounts()
+
+
+ def getAccountPlugin(self, plugin):
+ """get account instance for plugin or None if anonymous"""
+ try:
+ if plugin in self.accounts:
+ if plugin not in self.plugins:
+ klass = self.core.pluginManager.loadClass("accounts", plugin)
+ if klass:
+ self.plugins[plugin] = klass(self, self.accounts[plugin])
+ else: #@NOTE: The account class no longer exists (blacklisted plugin). Skipping the account to avoid crash
+ raise
+
+ return self.plugins[plugin]
+ else:
+ raise
+ except Exception:
+ return None
+
+
+ def getAccountPlugins(self):
+ """ get all account instances"""
+
+ plugins = []
+ for plugin in self.accounts.keys():
+ plugins.append(self.getAccountPlugin(plugin))
+
+ return plugins
+
+
+ #----------------------------------------------------------------------
+ def loadAccounts(self):
+ """loads all accounts available"""
+
+ try:
+ with open("accounts.conf", "a+") as f:
+ content = f.readlines()
+ version = content[0].split(":")[1].strip() if content else ""
+
+ if not version or int(version) < ACC_VERSION:
+ copy("accounts.conf", "accounts.backup")
+ f.seek(0)
+ f.write("version: " + str(ACC_VERSION))
+
+ self.core.log.warning(_("Account settings deleted, due to new config format"))
+ return
+
+ except IOError, e:
+ self.core.log.error(str(e))
+ return
+
+ plugin = ""
+ name = ""
+
+ for line in content[1:]:
+ line = line.strip()
+
+ if not line: continue
+ if line.startswith("#"): continue
+ if line.startswith("version"): continue
+
+ if line.endswith(":") and line.count(":") == 1:
+ plugin = line[:-1]
+ self.accounts[plugin] = {}
+
+ elif line.startswith("@"):
+ try:
+ option = line[1:].split()
+ self.accounts[plugin][name]['options'][option[0]] = [] if len(option) < 2 else ([option[1]] if len(option) < 3 else option[1:])
+ except Exception:
+ pass
+
+ elif ":" in line:
+ name, sep, pw = line.partition(":")
+ self.accounts[plugin][name] = {"password": pw, "options": {}, "valid": True}
+
+
+ #----------------------------------------------------------------------
+ def saveAccounts(self):
+ """save all account information"""
+
+ try:
+ with open("accounts.conf", "wb") as f:
+ f.write("version: " + str(ACC_VERSION) + "\n")
+
+ for plugin, accounts in self.accounts.iteritems():
+ f.write("\n")
+ f.write(plugin + ":\n")
+
+ for name,data in accounts.iteritems():
+ f.write("\n\t%s:%s\n" % (name,data['password']) )
+ if data['options']:
+ for option, values in data['options'].iteritems():
+ f.write("\t@%s %s\n" % (option, " ".join(values)))
+
+ chmod(f.name, 0600)
+
+ except Exception, e:
+ self.core.log.error(str(e))
+
+
+ #----------------------------------------------------------------------
+ def initAccountPlugins(self):
+ """init names"""
+ for name in self.core.pluginManager.getAccountPlugins():
+ self.accounts[name] = {}
+
+
+ @lock
+ def updateAccount(self, plugin , user, password=None, options={}):
+ """add or update account"""
+ if plugin in self.accounts:
+ p = self.getAccountPlugin(plugin)
+ updated = p.updateAccounts(user, password, options)
+ #since accounts is a ref in plugin self.accounts doesnt need to be updated here
+
+ self.saveAccounts()
+ if updated: p.scheduleRefresh(user, force=False)
+
+
+ @lock
+ def removeAccount(self, plugin, user):
+ """remove account"""
+
+ if plugin in self.accounts:
+ p = self.getAccountPlugin(plugin)
+ p.removeAccount(user)
+
+ self.saveAccounts()
+
+
+ @lock
+ def getAccountInfos(self, force=True, refresh=False):
+ data = {}
+
+ if refresh:
+ self.core.scheduler.addJob(0, self.core.accountManager.getAccountInfos)
+ force = False
+
+ for p in self.accounts.keys():
+ if self.accounts[p]:
+ p = self.getAccountPlugin(p)
+ if p:
+ data[p.__name] = p.getAllAccounts(force)
+ else: #@NOTE: When an account has been skipped, p is None
+ data[p] = []
+ else:
+ data[p] = []
+ e = AccountUpdateEvent()
+ self.core.pullManager.addEvent(e)
+ return data
+
+
+ def sendChange(self):
+ e = AccountUpdateEvent()
+ self.core.pullManager.addEvent(e)
diff --git a/pyload/manager/AccountManager.py b/pyload/manager/AccountManager.py
deleted file mode 100644
index 22345de8d..000000000
--- a/pyload/manager/AccountManager.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-from os.path import exists
-from shutil import copy
-
-from threading import Lock
-
-from pyload.manager.event.PullEvents import AccountUpdateEvent
-from pyload.utils import chmod, lock
-
-ACC_VERSION = 1
-
-
-class AccountManager(object):
- """manages all accounts"""
-
- #----------------------------------------------------------------------
- def __init__(self, core):
- """Constructor"""
-
- self.core = core
- self.lock = Lock()
-
- self.initPlugins()
- self.saveAccounts() # save to add categories to conf
-
-
- def initPlugins(self):
- self.accounts = {} # key = ( plugin )
- self.plugins = {}
-
- self.initAccountPlugins()
- self.loadAccounts()
-
-
- def getAccountPlugin(self, plugin):
- """get account instance for plugin or None if anonymous"""
- try:
- if plugin in self.accounts:
- if plugin not in self.plugins:
- klass = self.core.pluginManager.loadClass("accounts", plugin)
- if klass:
- self.plugins[plugin] = klass(self, self.accounts[plugin])
- else: #@NOTE: The account class no longer exists (blacklisted plugin). Skipping the account to avoid crash
- raise
-
- return self.plugins[plugin]
- else:
- raise
- except Exception:
- return None
-
-
- def getAccountPlugins(self):
- """ get all account instances"""
-
- plugins = []
- for plugin in self.accounts.keys():
- plugins.append(self.getAccountPlugin(plugin))
-
- return plugins
-
-
- #----------------------------------------------------------------------
- def loadAccounts(self):
- """loads all accounts available"""
-
- try:
- with open("accounts.conf", "a+") as f:
- content = f.readlines()
- version = content[0].split(":")[1].strip() if content else ""
-
- if not version or int(version) < ACC_VERSION:
- copy("accounts.conf", "accounts.backup")
- f.seek(0)
- f.write("version: " + str(ACC_VERSION))
-
- self.core.log.warning(_("Account settings deleted, due to new config format"))
- return
-
- except IOError, e:
- self.core.log.error(str(e))
- return
-
- plugin = ""
- name = ""
-
- for line in content[1:]:
- line = line.strip()
-
- if not line: continue
- if line.startswith("#"): continue
- if line.startswith("version"): continue
-
- if line.endswith(":") and line.count(":") == 1:
- plugin = line[:-1]
- self.accounts[plugin] = {}
-
- elif line.startswith("@"):
- try:
- option = line[1:].split()
- self.accounts[plugin][name]['options'][option[0]] = [] if len(option) < 2 else ([option[1]] if len(option) < 3 else option[1:])
- except Exception:
- pass
-
- elif ":" in line:
- name, sep, pw = line.partition(":")
- self.accounts[plugin][name] = {"password": pw, "options": {}, "valid": True}
-
-
- #----------------------------------------------------------------------
- def saveAccounts(self):
- """save all account information"""
-
- try:
- with open("accounts.conf", "wb") as f:
- f.write("version: " + str(ACC_VERSION) + "\n")
-
- for plugin, accounts in self.accounts.iteritems():
- f.write("\n")
- f.write(plugin + ":\n")
-
- for name,data in accounts.iteritems():
- f.write("\n\t%s:%s\n" % (name,data['password']) )
- if data['options']:
- for option, values in data['options'].iteritems():
- f.write("\t@%s %s\n" % (option, " ".join(values)))
-
- chmod(f.name, 0600)
-
- except Exception, e:
- self.core.log.error(str(e))
-
-
- #----------------------------------------------------------------------
- def initAccountPlugins(self):
- """init names"""
- for name in self.core.pluginManager.getAccountPlugins():
- self.accounts[name] = {}
-
-
- @lock
- def updateAccount(self, plugin , user, password=None, options={}):
- """add or update account"""
- if plugin in self.accounts:
- p = self.getAccountPlugin(plugin)
- updated = p.updateAccounts(user, password, options)
- #since accounts is a ref in plugin self.accounts doesnt need to be updated here
-
- self.saveAccounts()
- if updated: p.scheduleRefresh(user, force=False)
-
-
- @lock
- def removeAccount(self, plugin, user):
- """remove account"""
-
- if plugin in self.accounts:
- p = self.getAccountPlugin(plugin)
- p.removeAccount(user)
-
- self.saveAccounts()
-
-
- @lock
- def getAccountInfos(self, force=True, refresh=False):
- data = {}
-
- if refresh:
- self.core.scheduler.addJob(0, self.core.accountManager.getAccountInfos)
- force = False
-
- for p in self.accounts.keys():
- if self.accounts[p]:
- p = self.getAccountPlugin(p)
- if p:
- data[p.__name] = p.getAllAccounts(force)
- else: #@NOTE: When an account has been skipped, p is None
- data[p] = []
- else:
- data[p] = []
- e = AccountUpdateEvent()
- self.core.pullManager.addEvent(e)
- return data
-
-
- def sendChange(self):
- e = AccountUpdateEvent()
- self.core.pullManager.addEvent(e)
diff --git a/pyload/manager/Addon.py b/pyload/manager/Addon.py
new file mode 100644
index 000000000..164068634
--- /dev/null
+++ b/pyload/manager/Addon.py
@@ -0,0 +1,304 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN, mkaay
+# @interface-version: 0.1
+
+import __builtin__
+
+import traceback
+from threading import RLock, Thread
+
+from types import MethodType
+
+from pyload.manager.thread.Addon import AddonThread
+from pyload.manager.Plugin import literal_eval
+from pyload.utils import lock
+
+
+class AddonManager(object):
+ """Manages addons, delegates and handles Events.
+
+ Every plugin can define events, \
+ but some very usefull events are called by the Core.
+ Contrary to overwriting addon methods you can use event listener,
+ which provides additional entry point in the control flow.
+ Only do very short tasks or use threads.
+
+ **Known Events:**
+ Most addon methods exists as events. These are the additional known events.
+
+ ======================= ============== ==================================
+ Name Arguments Description
+ ======================= ============== ==================================
+ download-preparing fid A download was just queued and will be prepared now.
+ download-start fid A plugin will immediately starts the download afterwards.
+ links-added links, pid Someone just added links, you are able to modify the links.
+ all_downloads-processed Every link was handled, pyload would idle afterwards.
+ all_downloads-finished Every download in queue is finished.
+ config-changed The config was changed via the api.
+ pluginConfigChanged The plugin config changed, due to api or internal process.
+ ======================= ============== ==================================
+
+ | Notes:
+ | all_downloads-processed is *always* called before all_downloads-finished.
+ | config-changed is *always* called before pluginConfigChanged.
+
+
+ """
+
+ def __init__(self, core):
+ self.core = core
+
+ __builtin__.addonManager = self #: needed to let addons register themself
+
+ self.plugins = []
+ self.pluginMap = {}
+ self.methods = {} #: dict of names and list of methods usable by rpc
+
+ self.events = {} #: contains events
+
+ # registering callback for config event
+ self.core.config.pluginCB = MethodType(self.dispatchEvent, "pluginConfigChanged", basestring) #@TODO: Rename event pluginConfigChanged
+
+ self.addEvent("pluginConfigChanged", self.manageAddon)
+
+ self.lock = RLock()
+ self.createIndex()
+
+
+ def try_catch(func):
+
+ def new(*args):
+ try:
+ return func(*args)
+ except Exception, e:
+ args[0].log.error(_("Error executing addon: %s") % e)
+ if args[0].core.debug:
+ traceback.print_exc()
+
+ return new
+
+
+ def addRPC(self, plugin, func, doc):
+ plugin = plugin.rpartition(".")[2]
+ doc = doc.strip() if doc else ""
+
+ if plugin in self.methods:
+ self.methods[plugin][func] = doc
+ else:
+ self.methods[plugin] = {func: doc}
+
+
+ def callRPC(self, plugin, func, args, parse):
+ if not args:
+ args = tuple()
+ if parse:
+ args = tuple([literal_eval(x) for x in args])
+ plugin = self.pluginMap[plugin]
+ f = getattr(plugin, func)
+ return f(*args)
+
+
+ def createIndex(self):
+ plugins = []
+ active = []
+ deactive = []
+
+ for pluginname in self.core.pluginManager.addonPlugins:
+ try:
+ # hookClass = getattr(plugin, plugin.__name)
+ if self.core.config.getPlugin(pluginname, "activated"):
+ pluginClass = self.core.pluginManager.loadClass("addon", pluginname)
+ if not pluginClass:
+ continue
+
+ plugin = pluginClass(self.core, self)
+ plugins.append(plugin)
+ self.pluginMap[pluginClass.__name] = plugin
+ if plugin.isActivated():
+ active.append(pluginClass.__name)
+ else:
+ deactive.append(pluginname)
+
+ except Exception:
+ self.core.log.warning(_("Failed activating %(name)s") % {"name": pluginname})
+ if self.core.debug:
+ traceback.print_exc()
+
+ self.core.log.info(_("Activated addons: %s") % ", ".join(sorted(active)))
+ self.core.log.info(_("Deactivated addons: %s") % ", ".join(sorted(deactive)))
+
+ self.plugins = plugins
+
+
+ def manageAddon(self, plugin, name, value):
+ if name == "activated" and value:
+ self.activateAddon(plugin)
+
+ elif name == "activated" and not value:
+ self.deactivateAddon(plugin)
+
+
+ def activateAddon(self, pluginname):
+ # check if already loaded
+ for inst in self.plugins:
+ if inst.__name == pluginname:
+ return
+
+ pluginClass = self.core.pluginManager.loadClass("addon", pluginname)
+
+ if not pluginClass:
+ return
+
+ self.core.log.debug("Activate addon: %s" % pluginname)
+
+ addon = pluginClass(self.core, self)
+ self.plugins.append(addon)
+ self.pluginMap[pluginClass.__name] = addon
+
+ addon.activate()
+
+
+ def deactivateAddon(self, pluginname):
+ for plugin in self.plugins:
+ if plugin.__name == pluginname:
+ addon = plugin
+ break
+ else:
+ return
+
+ self.core.log.debug("Deactivate addon: %s" % pluginname)
+
+ addon.deactivate()
+
+ #remove periodic call
+ self.core.log.debug("Removed callback: %s" % self.core.scheduler.removeJob(addon.cb))
+
+ self.plugins.remove(addon)
+ del self.pluginMap[addon.__name]
+
+
+ @try_catch
+ def coreReady(self):
+ for plugin in self.plugins:
+ if plugin.isActivated():
+ plugin.activate()
+
+ self.dispatchEvent("addon-start")
+
+
+ @try_catch
+ def coreExiting(self):
+ for plugin in self.plugins:
+ if plugin.isActivated():
+ plugin.exit()
+
+ self.dispatchEvent("addon-exit")
+
+
+ @lock
+ def downloadPreparing(self, pyfile):
+ for plugin in self.plugins:
+ if plugin.isActivated():
+ plugin.downloadPreparing(pyfile)
+
+ self.dispatchEvent("download-preparing", pyfile)
+
+
+ @lock
+ def downloadFinished(self, pyfile):
+ for plugin in self.plugins:
+ if plugin.isActivated():
+ plugin.downloadFinished(pyfile)
+
+ self.dispatchEvent("download-finished", pyfile)
+
+
+ @lock
+ @try_catch
+ def downloadFailed(self, pyfile):
+ for plugin in self.plugins:
+ if plugin.isActivated():
+ plugin.downloadFailed(pyfile)
+
+ self.dispatchEvent("download-failed", pyfile)
+
+
+ @lock
+ def packageFinished(self, package):
+ for plugin in self.plugins:
+ if plugin.isActivated():
+ plugin.packageFinished(package)
+
+ self.dispatchEvent("package-finished", package)
+
+
+ @lock
+ def beforeReconnecting(self, ip):
+ for plugin in self.plugins:
+ plugin.beforeReconnecting(ip)
+
+ self.dispatchEvent("beforeReconnecting", ip)
+
+
+ @lock
+ def afterReconnecting(self, ip):
+ for plugin in self.plugins:
+ if plugin.isActivated():
+ plugin.afterReconnecting(ip)
+
+ self.dispatchEvent("afterReconnecting", ip)
+
+
+ def startThread(self, function, *args, **kwargs):
+ return AddonThread(self.core.threadManager, function, args, kwargs)
+
+
+ def activePlugins(self):
+ """ returns all active plugins """
+ return [x for x in self.plugins if x.isActivated()]
+
+
+ def getAllInfo(self):
+ """returns info stored by addon plugins"""
+ info = {}
+ for name, plugin in self.pluginMap.iteritems():
+ if plugin.info:
+ # copy and convert so str
+ info[name] = dict(
+ [(x, str(y) if not isinstance(y, basestring) else y) for x, y in plugin.info.iteritems()])
+ return info
+
+
+ def getInfo(self, plugin):
+ info = {}
+ if plugin in self.pluginMap and self.pluginMap[plugin].info:
+ info = dict((x, str(y) if not isinstance(y, basestring) else y)
+ for x, y in self.pluginMap[plugin].info.iteritems())
+ return info
+
+
+ def addEvent(self, event, func):
+ """Adds an event listener for event name"""
+ if event in self.events:
+ self.events[event].append(func)
+ else:
+ self.events[event] = [func]
+
+
+ def removeEvent(self, event, func):
+ """removes previously added event listener"""
+ if event in self.events:
+ self.events[event].remove(func)
+
+
+ def dispatchEvent(self, event, *args):
+ """dispatches event with args"""
+ if event in self.events:
+ for f in self.events[event]:
+ try:
+ f(*args)
+ except Exception, e:
+ self.core.log.warning("Error calling event handler %s: %s, %s, %s"
+ % (event, f, args, str(e)))
+ if self.core.debug:
+ traceback.print_exc()
diff --git a/pyload/manager/AddonManager.py b/pyload/manager/AddonManager.py
deleted file mode 100644
index 2fd744776..000000000
--- a/pyload/manager/AddonManager.py
+++ /dev/null
@@ -1,304 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN, mkaay
-# @interface-version: 0.1
-
-import __builtin__
-
-import traceback
-from threading import RLock, Thread
-
-from types import MethodType
-
-from pyload.manager.thread.AddonThread import AddonThread
-from pyload.manager.PluginManager import literal_eval
-from pyload.utils import lock
-
-
-class AddonManager(object):
- """Manages addons, delegates and handles Events.
-
- Every plugin can define events, \
- but some very usefull events are called by the Core.
- Contrary to overwriting addon methods you can use event listener,
- which provides additional entry point in the control flow.
- Only do very short tasks or use threads.
-
- **Known Events:**
- Most addon methods exists as events. These are the additional known events.
-
- ======================= ============== ==================================
- Name Arguments Description
- ======================= ============== ==================================
- download-preparing fid A download was just queued and will be prepared now.
- download-start fid A plugin will immediately starts the download afterwards.
- links-added links, pid Someone just added links, you are able to modify the links.
- all_downloads-processed Every link was handled, pyload would idle afterwards.
- all_downloads-finished Every download in queue is finished.
- config-changed The config was changed via the api.
- pluginConfigChanged The plugin config changed, due to api or internal process.
- ======================= ============== ==================================
-
- | Notes:
- | all_downloads-processed is *always* called before all_downloads-finished.
- | config-changed is *always* called before pluginConfigChanged.
-
-
- """
-
- def __init__(self, core):
- self.core = core
-
- __builtin__.addonManager = self #: needed to let addons register themself
-
- self.plugins = []
- self.pluginMap = {}
- self.methods = {} #: dict of names and list of methods usable by rpc
-
- self.events = {} #: contains events
-
- # registering callback for config event
- self.core.config.pluginCB = MethodType(self.dispatchEvent, "pluginConfigChanged", basestring) #@TODO: Rename event pluginConfigChanged
-
- self.addEvent("pluginConfigChanged", self.manageAddon)
-
- self.lock = RLock()
- self.createIndex()
-
-
- def try_catch(func):
-
- def new(*args):
- try:
- return func(*args)
- except Exception, e:
- args[0].log.error(_("Error executing addon: %s") % e)
- if args[0].core.debug:
- traceback.print_exc()
-
- return new
-
-
- def addRPC(self, plugin, func, doc):
- plugin = plugin.rpartition(".")[2]
- doc = doc.strip() if doc else ""
-
- if plugin in self.methods:
- self.methods[plugin][func] = doc
- else:
- self.methods[plugin] = {func: doc}
-
-
- def callRPC(self, plugin, func, args, parse):
- if not args:
- args = tuple()
- if parse:
- args = tuple([literal_eval(x) for x in args])
- plugin = self.pluginMap[plugin]
- f = getattr(plugin, func)
- return f(*args)
-
-
- def createIndex(self):
- plugins = []
- active = []
- deactive = []
-
- for pluginname in self.core.pluginManager.addonPlugins:
- try:
- # hookClass = getattr(plugin, plugin.__name)
- if self.core.config.getPlugin(pluginname, "activated"):
- pluginClass = self.core.pluginManager.loadClass("addon", pluginname)
- if not pluginClass:
- continue
-
- plugin = pluginClass(self.core, self)
- plugins.append(plugin)
- self.pluginMap[pluginClass.__name] = plugin
- if plugin.isActivated():
- active.append(pluginClass.__name)
- else:
- deactive.append(pluginname)
-
- except Exception:
- self.core.log.warning(_("Failed activating %(name)s") % {"name": pluginname})
- if self.core.debug:
- traceback.print_exc()
-
- self.core.log.info(_("Activated addons: %s") % ", ".join(sorted(active)))
- self.core.log.info(_("Deactivated addons: %s") % ", ".join(sorted(deactive)))
-
- self.plugins = plugins
-
-
- def manageAddon(self, plugin, name, value):
- if name == "activated" and value:
- self.activateAddon(plugin)
-
- elif name == "activated" and not value:
- self.deactivateAddon(plugin)
-
-
- def activateAddon(self, pluginname):
- # check if already loaded
- for inst in self.plugins:
- if inst.__name == pluginname:
- return
-
- pluginClass = self.core.pluginManager.loadClass("addon", pluginname)
-
- if not pluginClass:
- return
-
- self.core.log.debug("Activate addon: %s" % pluginname)
-
- addon = pluginClass(self.core, self)
- self.plugins.append(addon)
- self.pluginMap[pluginClass.__name] = addon
-
- addon.activate()
-
-
- def deactivateAddon(self, pluginname):
- for plugin in self.plugins:
- if plugin.__name == pluginname:
- addon = plugin
- break
- else:
- return
-
- self.core.log.debug("Deactivate addon: %s" % pluginname)
-
- addon.deactivate()
-
- #remove periodic call
- self.core.log.debug("Removed callback: %s" % self.core.scheduler.removeJob(addon.cb))
-
- self.plugins.remove(addon)
- del self.pluginMap[addon.__name]
-
-
- @try_catch
- def coreReady(self):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.activate()
-
- self.dispatchEvent("addon-start")
-
-
- @try_catch
- def coreExiting(self):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.exit()
-
- self.dispatchEvent("addon-exit")
-
-
- @lock
- def downloadPreparing(self, pyfile):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.downloadPreparing(pyfile)
-
- self.dispatchEvent("download-preparing", pyfile)
-
-
- @lock
- def downloadFinished(self, pyfile):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.downloadFinished(pyfile)
-
- self.dispatchEvent("download-finished", pyfile)
-
-
- @lock
- @try_catch
- def downloadFailed(self, pyfile):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.downloadFailed(pyfile)
-
- self.dispatchEvent("download-failed", pyfile)
-
-
- @lock
- def packageFinished(self, package):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.packageFinished(package)
-
- self.dispatchEvent("package-finished", package)
-
-
- @lock
- def beforeReconnecting(self, ip):
- for plugin in self.plugins:
- plugin.beforeReconnecting(ip)
-
- self.dispatchEvent("beforeReconnecting", ip)
-
-
- @lock
- def afterReconnecting(self, ip):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.afterReconnecting(ip)
-
- self.dispatchEvent("afterReconnecting", ip)
-
-
- def startThread(self, function, *args, **kwargs):
- return AddonThread(self.core.threadManager, function, args, kwargs)
-
-
- def activePlugins(self):
- """ returns all active plugins """
- return [x for x in self.plugins if x.isActivated()]
-
-
- def getAllInfo(self):
- """returns info stored by addon plugins"""
- info = {}
- for name, plugin in self.pluginMap.iteritems():
- if plugin.info:
- # copy and convert so str
- info[name] = dict(
- [(x, str(y) if not isinstance(y, basestring) else y) for x, y in plugin.info.iteritems()])
- return info
-
-
- def getInfo(self, plugin):
- info = {}
- if plugin in self.pluginMap and self.pluginMap[plugin].info:
- info = dict((x, str(y) if not isinstance(y, basestring) else y)
- for x, y in self.pluginMap[plugin].info.iteritems())
- return info
-
-
- def addEvent(self, event, func):
- """Adds an event listener for event name"""
- if event in self.events:
- self.events[event].append(func)
- else:
- self.events[event] = [func]
-
-
- def removeEvent(self, event, func):
- """removes previously added event listener"""
- if event in self.events:
- self.events[event].remove(func)
-
-
- def dispatchEvent(self, event, *args):
- """dispatches event with args"""
- if event in self.events:
- for f in self.events[event]:
- try:
- f(*args)
- except Exception, e:
- self.core.log.warning("Error calling event handler %s: %s, %s, %s"
- % (event, f, args, str(e)))
- if self.core.debug:
- traceback.print_exc()
diff --git a/pyload/manager/CaptchaManager.py b/pyload/manager/Captcha.py
index e54eacf30..e54eacf30 100644
--- a/pyload/manager/CaptchaManager.py
+++ b/pyload/manager/Captcha.py
diff --git a/pyload/manager/event/PullEvents.py b/pyload/manager/Event.py
index 20897290e..20897290e 100644
--- a/pyload/manager/event/PullEvents.py
+++ b/pyload/manager/Event.py
diff --git a/pyload/manager/Plugin.py b/pyload/manager/Plugin.py
new file mode 100644
index 000000000..71a7131c4
--- /dev/null
+++ b/pyload/manager/Plugin.py
@@ -0,0 +1,404 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+import sys
+
+from itertools import chain
+from os import listdir, makedirs
+from os.path import isdir, isfile, join, exists, abspath
+from sys import version_info
+from traceback import print_exc
+from urllib import unquote
+
+from SafeEval import const_eval as literal_eval
+
+
+class PluginManager(object):
+ ROOT = "pyload.plugin."
+ USERROOT = "userplugins."
+ TYPES = ["account", "addon", "container", "crypter", "hook", "hoster", "internal", "ocr"]
+
+ PATTERN = re.compile(r'__pattern\s*=\s*u?r("|\')([^"\']+)')
+ VERSION = re.compile(r'__version\s*=\s*("|\')([\d.]+)')
+ CONFIG = re.compile(r'__config\s*=\s*\[([^\]]+)', re.M)
+ DESC = re.compile(r'__description\s*=\s*("|"""|\')([^"\']+)')
+
+
+ def __init__(self, core):
+ self.core = core
+
+ self.plugins = {}
+ self.createIndex()
+
+ #register for import addon
+ sys.meta_path.append(self)
+
+
+ def loadTypes(self):
+ rootdir = join(pypath, "pyload", "plugins")
+ userdir = "userplugins"
+
+ types = set().union(*[[d for d in listdir(p) if isdir(join(p, d))]
+ for p in (rootdir, userdir) if exists(p)])
+
+ if not types:
+ self.log.critical(_("No plugins found!"))
+
+ self.TYPES = list(set(self.TYPES) | types)
+
+
+ def createIndex(self):
+ """create information for all plugins available"""
+
+ sys.path.append(abspath(""))
+
+ self.loadTypes()
+
+ for type in self.TYPES:
+ self.plugins[type] = self.parse(type)
+ setattr(self, "%sPlugins" % type, self.plugins[type])
+
+ self.plugins['addon'] = self.addonPlugins.update(self.hookPlugins)
+
+ self.core.log.debug("Created index of plugins")
+
+
+ def parse(self, folder, rootplugins={}):
+ """
+ returns dict with information
+ home contains parsed plugins from pyload.
+ """
+
+ plugins = {}
+
+ if rootplugins:
+ try:
+ pfolder = join("userplugins", folder)
+ if not exists(pfolder):
+ makedirs(pfolder)
+
+ for ifile in (join("userplugins", "__init__.py"),
+ join(pfolder, "__init__.py")):
+ if not exists(ifile):
+ f = open(ifile, "wb")
+ f.close()
+
+ except IOError, e:
+ self.core.log.critical(str(e))
+ return rootplugins
+
+ else:
+ pfolder = join(pypath, "pyload", "plugins", folder)
+
+ for f in listdir(pfolder):
+ if (isfile(join(pfolder, f)) and f.endswith(".py") or f.endswith("_25.pyc") or f.endswith(
+ "_26.pyc") or f.endswith("_27.pyc")) and not f.startswith("_"):
+
+ try:
+ with open(join(pfolder, f)) as data:
+ content = data.read()
+
+ except IOError, e:
+ self.core.log.error(str(e))
+ continue
+
+ if f.endswith("_25.pyc") and version_info[0:2] != (2, 5): #@TODO: Remove in 0.4.10
+ continue
+
+ elif f.endswith("_26.pyc") and version_info[0:2] != (2, 6): #@TODO: Remove in 0.4.10
+ continue
+
+ elif f.endswith("_27.pyc") and version_info[0:2] != (2, 7): #@TODO: Remove in 0.4.10
+ continue
+
+ name = f[:-3]
+ if name[-1] == ".":
+ name = name[:-4]
+
+ version = self.VERSION.findall(content)
+ if version:
+ version = float(version[0][1])
+ else:
+ version = 0
+
+ if rootplugins and name in rootplugins:
+ if rootplugins[name]['version'] >= version:
+ continue
+
+ plugins[name] = {}
+ plugins[name]['version'] = version
+
+ module = f.replace(".pyc", "").replace(".py", "")
+
+ # the plugin is loaded from user directory
+ plugins[name]['user'] = True if rootplugins else False
+ plugins[name]['name'] = module
+
+ pattern = self.PATTERN.findall(content)
+
+ if pattern:
+ pattern = pattern[0][1]
+
+ try:
+ regexp = re.compile(pattern)
+ except Exception:
+ self.core.log.error(_("%s has a invalid pattern") % name)
+ pattern = r'^unmatchable$'
+ regexp = re.compile(pattern)
+
+ plugins[name]['pattern'] = pattern
+ plugins[name]['re'] = regexp
+
+ # internals have no config
+ if folder == "internal":
+ self.core.config.deleteConfig(name)
+ continue
+
+ config = self.CONFIG.findall(content)
+ if config:
+ try:
+ config = literal_eval(config[0].strip().replace("\n", "").replace("\r", ""))
+ desc = self.DESC.findall(content)
+ desc = desc[0][1] if desc else ""
+
+ if type(config[0]) == tuple:
+ config = [list(x) for x in config]
+ else:
+ config = [list(config)]
+
+ if folder not in ("account", "internal") and not [True for item in config if item[0] == "activated"]:
+ config.insert(0, ["activated", "bool", "Activated", False if folder in ("addon", "hook") else True])
+
+ self.core.config.addPluginConfig(name, config, desc)
+ except Exception:
+ self.core.log.error("Invalid config in %s: %s" % (name, config))
+
+ elif folder in ("addon", "hook"): #force config creation
+ desc = self.DESC.findall(content)
+ desc = desc[0][1] if desc else ""
+ config = (["activated", "bool", "Activated", False],)
+
+ try:
+ self.core.config.addPluginConfig(name, config, desc)
+ except Exception:
+ self.core.log.error("Invalid config in %s: %s" % (name, config))
+
+ if not rootplugins and plugins: #: Double check
+ plugins.update(self.parse(folder, plugins))
+
+ return plugins
+
+
+ def parseUrls(self, urls):
+ """parse plugins for given list of urls"""
+
+ last = None
+ res = [] #: tupels of (url, plugintype, pluginname)
+
+ for url in urls:
+ if type(url) not in (str, unicode, buffer):
+ continue
+
+ url = unquote(url)
+
+ if last and last[2]['re'].match(url):
+ res.append((url, last[0], last[1]))
+ continue
+
+ for type in self.TYPES:
+ for name, plugin in self.plugins[type]:
+
+ m = None
+ try:
+ if 'pattern' in plugin:
+ m = plugin['re'].match(url)
+
+ except KeyError:
+ self.core.log.error(_("Plugin [%(type)s] %(name)s skipped due broken pattern")
+ % {'name': name, 'type': type})
+
+ if m:
+ res.append((url, type, name))
+ last = (type, name, plugin)
+ break
+ else:
+ res.append((url, "internal", "BasePlugin"))
+
+ return res
+
+
+ def findPlugin(self, type, name):
+ if type not in self.plugins:
+ return None
+
+ elif name not in self.plugins[type]:
+ self.core.log.warning(_("Plugin [%(type)s] %(name)s not found | Using plugin: [internal] BasePlugin")
+ % {'name': name, 'type': type})
+ return self.internalPlugins["BasePlugin"]
+
+ else:
+ return self.plugins[type][name]
+
+
+ def getPlugin(self, type, name, original=False):
+ """return plugin module from hoster|decrypter|container"""
+ plugin = self.findPlugin(type, name)
+
+ if plugin is None:
+ return {}
+
+ if "new_module" in plugin and not original:
+ return plugin['new_module']
+ else:
+ return self.loadModule(type, name)
+
+
+ def getPluginName(self, type, name):
+ """ used to obtain new name if other plugin was injected"""
+ plugin = self.findPlugin(type, name)
+
+ if plugin is None:
+ return ""
+
+ if "new_name" in plugin:
+ return plugin['new_name']
+
+ return name
+
+
+ def loadModule(self, type, name):
+ """ Returns loaded module for plugin
+
+ :param type: plugin type, subfolder of pyload.plugins
+ :param name:
+ """
+ plugins = self.plugins[type]
+
+ if name in plugins:
+ if "module" in plugins[name]:
+ return plugins[name]['module']
+
+ try:
+ module = __import__(self.ROOT + "%s.%s" % (type, plugins[name]['name']), globals(), locals(),
+ plugins[name]['name'])
+
+ except Exception, e:
+ self.core.log.error(_("Error importing plugin: [%(type)s] %(name)s (v%(version).2f) | %(errmsg)s")
+ % {'name': name, 'type': type, 'version': plugins[name]['version'], "errmsg": str(e)})
+ if self.core.debug:
+ print_exc()
+
+ else:
+ plugins[name]['module'] = module #: cache import, maybe unneeded
+
+ self.core.log.debug(_("Loaded plugin: [%(type)s] %(name)s (v%(version).2f)")
+ % {'name': name, 'type': type, 'version': plugins[name]['version']})
+ return module
+
+
+ def loadClass(self, type, name):
+ """Returns the class of a plugin with the same name"""
+ module = self.loadModule(type, name)
+ if module:
+ return getattr(module, name)
+ else:
+ return None
+
+
+ def getAccountPlugins(self):
+ """return list of account plugin names"""
+ return self.accountPlugins.keys()
+
+
+ def find_module(self, fullname, path=None):
+ #redirecting imports if necesarry
+ if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #seperate pyload plugins
+ if fullname.startswith(self.USERROOT): user = 1
+ else: user = 0 #used as bool and int
+
+ split = fullname.split(".")
+ if len(split) != 4 - user: return
+ type, name = split[2 - user:4 - user]
+
+ if type in self.plugins and name in self.plugins[type]:
+ #userplugin is a newer version
+ if not user and self.plugins[type][name]['user']:
+ return self
+ #imported from userdir, but pyloads is newer
+ if user and not self.plugins[type][name]['user']:
+ return self
+
+
+ def load_module(self, name, replace=True):
+ if name not in sys.modules: #could be already in modules
+ if replace:
+ if self.ROOT in name:
+ newname = name.replace(self.ROOT, self.USERROOT)
+ else:
+ newname = name.replace(self.USERROOT, self.ROOT)
+ else:
+ newname = name
+
+ base, plugin = newname.rsplit(".", 1)
+
+ self.core.log.debug("Redirected import %s -> %s" % (name, newname))
+
+ module = __import__(newname, globals(), locals(), [plugin])
+ #inject under new an old name
+ sys.modules[name] = module
+ sys.modules[newname] = module
+
+ return sys.modules[name]
+
+
+ def reloadPlugins(self, type_plugins):
+ """ reload and reindex plugins """
+ if not type_plugins:
+ return None
+
+ self.core.log.debug("Request reload of plugins: %s" % type_plugins)
+
+ reloaded = []
+
+ as_dict = {}
+ for t,n in type_plugins:
+ if t in as_dict:
+ as_dict[t].append(n)
+ else:
+ as_dict[t] = [n]
+
+ for type in as_dict.iterkeys():
+ if type in ("addon", "internal"): #: do not reload them because would cause to much side effects
+ self.core.log.debug("Skipping reload for plugin: [%(type)s] %(name)s" % {'name': plugin, 'type': type})
+ continue
+
+ for plugin in as_dict[type]:
+ if plugin in self.plugins[type] and "module" in self.plugins[type][plugin]:
+ self.core.log.debug(_("Reloading plugin: [%(type)s] %(name)s") % {'name': plugin, 'type': type})
+
+ try:
+ reload(self.plugins[type][plugin]['module'])
+
+ except Exception, e:
+ self.core.log.error(_("Error when reloading plugin: [%(type)s] %(name)s") % {'name': plugin, 'type': type}, e)
+ continue
+
+ else:
+ reloaded.append((type, plugin))
+
+ #index creation
+ self.plugins[type] = self.parse(type)
+ setattr(self, "%sPlugins" % type, self.plugins[type])
+
+ if "account" in as_dict: #: accounts needs to be reloaded
+ self.core.accountManager.initPlugins()
+ self.core.scheduler.addJob(0, self.core.accountManager.getAccountInfos)
+
+ return reloaded #: return a list of the plugins successfully reloaded
+
+
+ def reloadPlugin(self, type_plugin):
+ """ reload and reindex ONE plugin """
+ return True if self.reloadPlugins(type_plugin) else False
diff --git a/pyload/manager/PluginManager.py b/pyload/manager/PluginManager.py
deleted file mode 100644
index c327c991a..000000000
--- a/pyload/manager/PluginManager.py
+++ /dev/null
@@ -1,404 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-import sys
-
-from itertools import chain
-from os import listdir, makedirs
-from os.path import isdir, isfile, join, exists, abspath
-from sys import version_info
-from traceback import print_exc
-from urllib import unquote
-
-from SafeEval import const_eval as literal_eval
-
-
-class PluginManager(object):
- ROOT = "pyload.plugins."
- USERROOT = "userplugins."
- TYPES = ["account", "addon", "container", "crypter", "hook", "hoster", "internal", "ocr"]
-
- PATTERN = re.compile(r'__pattern\s*=\s*u?r("|\')([^"\']+)')
- VERSION = re.compile(r'__version\s*=\s*("|\')([\d.]+)')
- CONFIG = re.compile(r'__config\s*=\s*\[([^\]]+)', re.M)
- DESC = re.compile(r'__description\s*=\s*("|"""|\')([^"\']+)')
-
-
- def __init__(self, core):
- self.core = core
-
- self.plugins = {}
- self.createIndex()
-
- #register for import addon
- sys.meta_path.append(self)
-
-
- def loadTypes(self):
- rootdir = join(pypath, "pyload", "plugins")
- userdir = "userplugins"
-
- types = set().union(*[[d for d in listdir(p) if isdir(join(p, d))]
- for p in (rootdir, userdir) if exists(p)])
-
- if not types:
- self.log.critical(_("No plugins found!"))
-
- self.TYPES = list(set(self.TYPES) | types)
-
-
- def createIndex(self):
- """create information for all plugins available"""
-
- sys.path.append(abspath(""))
-
- self.loadTypes()
-
- for type in self.TYPES:
- self.plugins[type] = self.parse(type)
- setattr(self, "%sPlugins" % type, self.plugins[type])
-
- self.plugins['addon'] = self.addonPlugins.update(self.hookPlugins)
-
- self.core.log.debug("Created index of plugins")
-
-
- def parse(self, folder, rootplugins={}):
- """
- returns dict with information
- home contains parsed plugins from pyload.
- """
-
- plugins = {}
-
- if rootplugins:
- try:
- pfolder = join("userplugins", folder)
- if not exists(pfolder):
- makedirs(pfolder)
-
- for ifile in (join("userplugins", "__init__.py"),
- join(pfolder, "__init__.py")):
- if not exists(ifile):
- f = open(ifile, "wb")
- f.close()
-
- except IOError, e:
- self.core.log.critical(str(e))
- return rootplugins
-
- else:
- pfolder = join(pypath, "pyload", "plugins", folder)
-
- for f in listdir(pfolder):
- if (isfile(join(pfolder, f)) and f.endswith(".py") or f.endswith("_25.pyc") or f.endswith(
- "_26.pyc") or f.endswith("_27.pyc")) and not f.startswith("_"):
-
- try:
- with open(join(pfolder, f)) as data:
- content = data.read()
-
- except IOError, e:
- self.core.log.error(str(e))
- continue
-
- if f.endswith("_25.pyc") and version_info[0:2] != (2, 5): #@TODO: Remove in 0.4.10
- continue
-
- elif f.endswith("_26.pyc") and version_info[0:2] != (2, 6): #@TODO: Remove in 0.4.10
- continue
-
- elif f.endswith("_27.pyc") and version_info[0:2] != (2, 7): #@TODO: Remove in 0.4.10
- continue
-
- name = f[:-3]
- if name[-1] == ".":
- name = name[:-4]
-
- version = self.VERSION.findall(content)
- if version:
- version = float(version[0][1])
- else:
- version = 0
-
- if rootplugins and name in rootplugins:
- if rootplugins[name]['version'] >= version:
- continue
-
- plugins[name] = {}
- plugins[name]['version'] = version
-
- module = f.replace(".pyc", "").replace(".py", "")
-
- # the plugin is loaded from user directory
- plugins[name]['user'] = True if rootplugins else False
- plugins[name]['name'] = module
-
- pattern = self.PATTERN.findall(content)
-
- if pattern:
- pattern = pattern[0][1]
-
- try:
- regexp = re.compile(pattern)
- except Exception:
- self.core.log.error(_("%s has a invalid pattern") % name)
- pattern = r'^unmatchable$'
- regexp = re.compile(pattern)
-
- plugins[name]['pattern'] = pattern
- plugins[name]['re'] = regexp
-
- # internals have no config
- if folder == "internal":
- self.core.config.deleteConfig(name)
- continue
-
- config = self.CONFIG.findall(content)
- if config:
- try:
- config = literal_eval(config[0].strip().replace("\n", "").replace("\r", ""))
- desc = self.DESC.findall(content)
- desc = desc[0][1] if desc else ""
-
- if type(config[0]) == tuple:
- config = [list(x) for x in config]
- else:
- config = [list(config)]
-
- if folder not in ("account", "internal") and not [True for item in config if item[0] == "activated"]:
- config.insert(0, ["activated", "bool", "Activated", False if folder in ("addon", "hook") else True])
-
- self.core.config.addPluginConfig(name, config, desc)
- except Exception:
- self.core.log.error("Invalid config in %s: %s" % (name, config))
-
- elif folder in ("addon", "hook"): #force config creation
- desc = self.DESC.findall(content)
- desc = desc[0][1] if desc else ""
- config = (["activated", "bool", "Activated", False],)
-
- try:
- self.core.config.addPluginConfig(name, config, desc)
- except Exception:
- self.core.log.error("Invalid config in %s: %s" % (name, config))
-
- if not rootplugins and plugins: #: Double check
- plugins.update(self.parse(folder, plugins))
-
- return plugins
-
-
- def parseUrls(self, urls):
- """parse plugins for given list of urls"""
-
- last = None
- res = [] #: tupels of (url, plugintype, pluginname)
-
- for url in urls:
- if type(url) not in (str, unicode, buffer):
- continue
-
- url = unquote(url)
-
- if last and last[2]['re'].match(url):
- res.append((url, last[0], last[1]))
- continue
-
- for type in self.TYPES:
- for name, plugin in self.plugins[type]:
-
- m = None
- try:
- if 'pattern' in plugin:
- m = plugin['re'].match(url)
-
- except KeyError:
- self.core.log.error(_("Plugin [%(type)s] %(name)s skipped due broken pattern")
- % {'name': name, 'type': type})
-
- if m:
- res.append((url, type, name))
- last = (type, name, plugin)
- break
- else:
- res.append((url, "internal", "BasePlugin"))
-
- return res
-
-
- def findPlugin(self, type, name):
- if type not in self.plugins:
- return None
-
- elif name not in self.plugins[type]:
- self.core.log.warning(_("Plugin [%(type)s] %(name)s not found | Using plugin: [internal] BasePlugin")
- % {'name': name, 'type': type})
- return self.internalPlugins["BasePlugin"]
-
- else:
- return self.plugins[type][name]
-
-
- def getPlugin(self, type, name, original=False):
- """return plugin module from hoster|decrypter|container"""
- plugin = self.findPlugin(type, name)
-
- if plugin is None:
- return {}
-
- if "new_module" in plugin and not original:
- return plugin['new_module']
- else:
- return self.loadModule(type, name)
-
-
- def getPluginName(self, type, name):
- """ used to obtain new name if other plugin was injected"""
- plugin = self.findPlugin(type, name)
-
- if plugin is None:
- return ""
-
- if "new_name" in plugin:
- return plugin['new_name']
-
- return name
-
-
- def loadModule(self, type, name):
- """ Returns loaded module for plugin
-
- :param type: plugin type, subfolder of pyload.plugins
- :param name:
- """
- plugins = self.plugins[type]
-
- if name in plugins:
- if "module" in plugins[name]:
- return plugins[name]['module']
-
- try:
- module = __import__(self.ROOT + "%s.%s" % (type, plugins[name]['name']), globals(), locals(),
- plugins[name]['name'])
-
- except Exception, e:
- self.core.log.error(_("Error importing plugin: [%(type)s] %(name)s (v%(version).2f) | %(errmsg)s")
- % {'name': name, 'type': type, 'version': plugins[name]['version'], "errmsg": str(e)})
- if self.core.debug:
- print_exc()
-
- else:
- plugins[name]['module'] = module #: cache import, maybe unneeded
-
- self.core.log.debug(_("Loaded plugin: [%(type)s] %(name)s (v%(version).2f)")
- % {'name': name, 'type': type, 'version': plugins[name]['version']})
- return module
-
-
- def loadClass(self, type, name):
- """Returns the class of a plugin with the same name"""
- module = self.loadModule(type, name)
- if module:
- return getattr(module, name)
- else:
- return None
-
-
- def getAccountPlugins(self):
- """return list of account plugin names"""
- return self.accountPlugins.keys()
-
-
- def find_module(self, fullname, path=None):
- #redirecting imports if necesarry
- if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #seperate pyload plugins
- if fullname.startswith(self.USERROOT): user = 1
- else: user = 0 #used as bool and int
-
- split = fullname.split(".")
- if len(split) != 4 - user: return
- type, name = split[2 - user:4 - user]
-
- if type in self.plugins and name in self.plugins[type]:
- #userplugin is a newer version
- if not user and self.plugins[type][name]['user']:
- return self
- #imported from userdir, but pyloads is newer
- if user and not self.plugins[type][name]['user']:
- return self
-
-
- def load_module(self, name, replace=True):
- if name not in sys.modules: #could be already in modules
- if replace:
- if self.ROOT in name:
- newname = name.replace(self.ROOT, self.USERROOT)
- else:
- newname = name.replace(self.USERROOT, self.ROOT)
- else:
- newname = name
-
- base, plugin = newname.rsplit(".", 1)
-
- self.core.log.debug("Redirected import %s -> %s" % (name, newname))
-
- module = __import__(newname, globals(), locals(), [plugin])
- #inject under new an old name
- sys.modules[name] = module
- sys.modules[newname] = module
-
- return sys.modules[name]
-
-
- def reloadPlugins(self, type_plugins):
- """ reload and reindex plugins """
- if not type_plugins:
- return None
-
- self.core.log.debug("Request reload of plugins: %s" % type_plugins)
-
- reloaded = []
-
- as_dict = {}
- for t,n in type_plugins:
- if t in as_dict:
- as_dict[t].append(n)
- else:
- as_dict[t] = [n]
-
- for type in as_dict.iterkeys():
- if type in ("addon", "internal"): #: do not reload them because would cause to much side effects
- self.core.log.debug("Skipping reload for plugin: [%(type)s] %(name)s" % {'name': plugin, 'type': type})
- continue
-
- for plugin in as_dict[type]:
- if plugin in self.plugins[type] and "module" in self.plugins[type][plugin]:
- self.core.log.debug(_("Reloading plugin: [%(type)s] %(name)s") % {'name': plugin, 'type': type})
-
- try:
- reload(self.plugins[type][plugin]['module'])
-
- except Exception, e:
- self.core.log.error(_("Error when reloading plugin: [%(type)s] %(name)s") % {'name': plugin, 'type': type}, e)
- continue
-
- else:
- reloaded.append((type, plugin))
-
- #index creation
- self.plugins[type] = self.parse(type)
- setattr(self, "%sPlugins" % type, self.plugins[type])
-
- if "account" in as_dict: #: accounts needs to be reloaded
- self.core.accountManager.initPlugins()
- self.core.scheduler.addJob(0, self.core.accountManager.getAccountInfos)
-
- return reloaded #: return a list of the plugins successfully reloaded
-
-
- def reloadPlugin(self, type_plugin):
- """ reload and reindex ONE plugin """
- return True if self.reloadPlugins(type_plugin) else False
diff --git a/pyload/manager/RemoteManager.py b/pyload/manager/Remote.py
index 910881164..910881164 100644
--- a/pyload/manager/RemoteManager.py
+++ b/pyload/manager/Remote.py
diff --git a/pyload/manager/Thread.py b/pyload/manager/Thread.py
new file mode 100644
index 000000000..6c9304e87
--- /dev/null
+++ b/pyload/manager/Thread.py
@@ -0,0 +1,302 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN
+
+from os.path import exists, join
+import re
+from subprocess import Popen
+from threading import Event, Lock
+from time import sleep, time
+from traceback import print_exc
+from random import choice
+
+import pycurl
+
+from pyload.manager.thread.Decrypter import DecrypterThread
+from pyload.manager.thread.Download import DownloadThread
+from pyload.manager.thread.Info import InfoThread
+from pyload.datatype.File import PyFile
+from pyload.network.RequestFactory import getURL
+from pyload.utils import freeSpace, lock
+
+
+class ThreadManager(object):
+ """manages the download threads, assign jobs, reconnect etc"""
+
+
+ def __init__(self, core):
+ """Constructor"""
+ self.core = core
+
+ self.threads = [] #: thread list
+ self.localThreads = [] #: addon+decrypter threads
+
+ self.pause = True
+
+ self.reconnecting = Event()
+ self.reconnecting.clear()
+ self.downloaded = 0 #number of files downloaded since last cleanup
+
+ self.lock = Lock()
+
+ # some operations require to fetch url info from hoster, so we caching them so it wont be done twice
+ # contains a timestamp and will be purged after timeout
+ self.infoCache = {}
+
+ # pool of ids for online check
+ self.resultIDs = 0
+
+ # threads which are fetching hoster results
+ self.infoResults = {}
+ #timeout for cache purge
+ self.timestamp = 0
+
+ pycurl.global_init(pycurl.GLOBAL_DEFAULT)
+
+ for i in range(0, self.core.config.get("download", "max_downloads")):
+ self.createThread()
+
+
+ def createThread(self):
+ """create a download thread"""
+
+ thread = DownloadThread(self)
+ self.threads.append(thread)
+
+ def createInfoThread(self, data, pid):
+ """
+ start a thread whichs fetches online status and other infos
+ data = [ .. () .. ]
+ """
+ self.timestamp = time() + 5 * 60
+
+ InfoThread(self, data, pid)
+
+ @lock
+ def createResultThread(self, data, add=False):
+ """ creates a thread to fetch online status, returns result id """
+ self.timestamp = time() + 5 * 60
+
+ rid = self.resultIDs
+ self.resultIDs += 1
+
+ InfoThread(self, data, rid=rid, add=add)
+
+ return rid
+
+
+ @lock
+ def getInfoResult(self, rid):
+ """returns result and clears it"""
+ self.timestamp = time() + 5 * 60
+
+ if rid in self.infoResults:
+ data = self.infoResults[rid]
+ self.infoResults[rid] = {}
+ return data
+ else:
+ return {}
+
+ @lock
+ def setInfoResults(self, rid, result):
+ self.infoResults[rid].update(result)
+
+ def getActiveFiles(self):
+ active = [x.active for x in self.threads if x.active and isinstance(x.active, PyFile)]
+
+ for t in self.localThreads:
+ active.extend(t.getActiveFiles())
+
+ return active
+
+ def processingIds(self):
+ """get a id list of all pyfiles processed"""
+ return [x.id for x in self.getActiveFiles()]
+
+
+ def work(self):
+ """run all task which have to be done (this is for repetivive call by core)"""
+ try:
+ self.tryReconnect()
+ except Exception, e:
+ self.core.log.error(_("Reconnect Failed: %s") % str(e) )
+ self.reconnecting.clear()
+ if self.core.debug:
+ print_exc()
+ self.checkThreadCount()
+
+ try:
+ self.assignJob()
+ except Exception, e:
+ self.core.log.warning("Assign job error", e)
+ if self.core.debug:
+ print_exc()
+
+ sleep(0.5)
+ self.assignJob()
+ #it may be failed non critical so we try it again
+
+ if (self.infoCache or self.infoResults) and self.timestamp < time():
+ self.infoCache.clear()
+ self.infoResults.clear()
+ self.core.log.debug("Cleared Result cache")
+
+ #--------------------------------------------------------------------------
+ def tryReconnect(self):
+ """checks if reconnect needed"""
+
+ if not (self.core.config["reconnect"]["activated"] and self.core.api.isTimeReconnect()):
+ return False
+
+ active = [x.active.plugin.wantReconnect and x.active.plugin.waiting for x in self.threads if x.active]
+
+ if not (0 < active.count(True) == len(active)):
+ return False
+
+ if not exists(self.core.config['reconnect']['method']):
+ if exists(join(pypath, self.core.config['reconnect']['method'])):
+ self.core.config['reconnect']['method'] = join(pypath, self.core.config['reconnect']['method'])
+ else:
+ self.core.config["reconnect"]["activated"] = False
+ self.core.log.warning(_("Reconnect script not found!"))
+ return
+
+ self.reconnecting.set()
+
+ #Do reconnect
+ self.core.log.info(_("Starting reconnect"))
+
+ while [x.active.plugin.waiting for x in self.threads if x.active].count(True) != 0:
+ sleep(0.25)
+
+ ip = self.getIP()
+
+ self.core.addonManager.beforeReconnecting(ip)
+
+ self.core.log.debug("Old IP: %s" % ip)
+
+ try:
+ reconn = Popen(self.core.config['reconnect']['method'], bufsize=-1, shell=True)#, stdout=subprocess.PIPE)
+ except Exception:
+ self.core.log.warning(_("Failed executing reconnect script!"))
+ self.core.config["reconnect"]["activated"] = False
+ self.reconnecting.clear()
+ if self.core.debug:
+ print_exc()
+ return
+
+ reconn.wait()
+ sleep(1)
+ ip = self.getIP()
+ self.core.addonManager.afterReconnecting(ip)
+
+ self.core.log.info(_("Reconnected, new IP: %s") % ip)
+
+ self.reconnecting.clear()
+
+ def getIP(self):
+ """retrieve current ip"""
+ services = [("http://automation.whatismyip.com/n09230945.asp", "(\S+)"),
+ ("http://checkip.dyndns.org/",".*Current IP Address: (\S+)</body>.*")]
+
+ ip = ""
+ for i in range(10):
+ try:
+ sv = choice(services)
+ ip = getURL(sv[0])
+ ip = re.match(sv[1], ip).group(1)
+ break
+ except Exception:
+ ip = ""
+ sleep(1)
+
+ return ip
+
+ #--------------------------------------------------------------------------
+ def checkThreadCount(self):
+ """checks if there are need for increasing or reducing thread count"""
+
+ if len(self.threads) == self.core.config.get("download", "max_downloads"):
+ return True
+ elif len(self.threads) < self.core.config.get("download", "max_downloads"):
+ self.createThread()
+ else:
+ free = [x for x in self.threads if not x.active]
+ if free:
+ free[0].put("quit")
+
+
+ def cleanPycurl(self):
+ """ make a global curl cleanup (currently ununused) """
+ if self.processingIds():
+ return False
+ pycurl.global_cleanup()
+ pycurl.global_init(pycurl.GLOBAL_DEFAULT)
+ self.downloaded = 0
+ self.core.log.debug("Cleaned up pycurl")
+ return True
+
+ #--------------------------------------------------------------------------
+ def assignJob(self):
+ """assing a job to a thread if possible"""
+
+ if self.pause or not self.core.api.isTimeDownload(): return
+
+ #if self.downloaded > 20:
+ # if not self.cleanPyCurl(): return
+
+ free = [x for x in self.threads if not x.active]
+
+ inuse = set([(x.active.pluginname, self.getLimit(x)) for x in self.threads if x.active and x.active.hasPlugin() and x.active.plugin.account])
+ inuse = map(lambda x: (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])) ,inuse)
+ onlimit = [x[0] for x in inuse if x[1] > 0 and x[2] >= x[1]]
+
+ occ = [x.active.pluginname for x in self.threads if x.active and x.active.hasPlugin() and not x.active.plugin.multiDL] + onlimit
+
+ occ.sort()
+ occ = tuple(set(occ))
+ job = self.core.files.getJob(occ)
+ if job:
+ try:
+ job.initPlugin()
+ except Exception, e:
+ self.core.log.critical(str(e))
+ print_exc()
+ job.setStatus("failed")
+ job.error = str(e)
+ job.release()
+ return
+
+ if job.plugin.__type == "hoster":
+ spaceLeft = freeSpace(self.core.config["general"]["download_folder"]) / 1024 / 1024
+ if spaceLeft < self.core.config["general"]["min_free_space"]:
+ self.core.log.warning(_("Not enough space left on device"))
+ self.pause = True
+
+ if free and not self.pause:
+ thread = free[0]
+ #self.downloaded += 1
+
+ thread.put(job)
+ else:
+ #put job back
+ if occ not in self.core.files.jobCache:
+ self.core.files.jobCache[occ] = []
+ self.core.files.jobCache[occ].append(job.id)
+
+ #check for decrypt jobs
+ job = self.core.files.getDecryptJob()
+ if job:
+ job.initPlugin()
+ thread = DecrypterThread(self, job)
+
+
+ else:
+ thread = DecrypterThread(self, job)
+
+ def getLimit(self, thread):
+ limit = thread.active.plugin.account.getAccountData(thread.active.plugin.user)["options"].get("limitDL", ["0"])[0]
+ return int(limit)
+
+ def cleanup(self):
+ """do global cleanup, should be called when finished with pycurl"""
+ pycurl.global_cleanup()
diff --git a/pyload/manager/ThreadManager.py b/pyload/manager/ThreadManager.py
deleted file mode 100644
index d6f3c0005..000000000
--- a/pyload/manager/ThreadManager.py
+++ /dev/null
@@ -1,302 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from os.path import exists, join
-import re
-from subprocess import Popen
-from threading import Event, Lock
-from time import sleep, time
-from traceback import print_exc
-from random import choice
-
-import pycurl
-
-from pyload.manager.thread.DecrypterThread import DecrypterThread
-from pyload.manager.thread.DownloadThread import DownloadThread
-from pyload.manager.thread.InfoThread import InfoThread
-from pyload.datatype.PyFile import PyFile
-from pyload.network.RequestFactory import getURL
-from pyload.utils import freeSpace, lock
-
-
-class ThreadManager(object):
- """manages the download threads, assign jobs, reconnect etc"""
-
-
- def __init__(self, core):
- """Constructor"""
- self.core = core
-
- self.threads = [] #: thread list
- self.localThreads = [] #: addon+decrypter threads
-
- self.pause = True
-
- self.reconnecting = Event()
- self.reconnecting.clear()
- self.downloaded = 0 #number of files downloaded since last cleanup
-
- self.lock = Lock()
-
- # some operations require to fetch url info from hoster, so we caching them so it wont be done twice
- # contains a timestamp and will be purged after timeout
- self.infoCache = {}
-
- # pool of ids for online check
- self.resultIDs = 0
-
- # threads which are fetching hoster results
- self.infoResults = {}
- #timeout for cache purge
- self.timestamp = 0
-
- pycurl.global_init(pycurl.GLOBAL_DEFAULT)
-
- for i in range(0, self.core.config.get("download", "max_downloads")):
- self.createThread()
-
-
- def createThread(self):
- """create a download thread"""
-
- thread = DownloadThread(self)
- self.threads.append(thread)
-
- def createInfoThread(self, data, pid):
- """
- start a thread whichs fetches online status and other infos
- data = [ .. () .. ]
- """
- self.timestamp = time() + 5 * 60
-
- InfoThread(self, data, pid)
-
- @lock
- def createResultThread(self, data, add=False):
- """ creates a thread to fetch online status, returns result id """
- self.timestamp = time() + 5 * 60
-
- rid = self.resultIDs
- self.resultIDs += 1
-
- InfoThread(self, data, rid=rid, add=add)
-
- return rid
-
-
- @lock
- def getInfoResult(self, rid):
- """returns result and clears it"""
- self.timestamp = time() + 5 * 60
-
- if rid in self.infoResults:
- data = self.infoResults[rid]
- self.infoResults[rid] = {}
- return data
- else:
- return {}
-
- @lock
- def setInfoResults(self, rid, result):
- self.infoResults[rid].update(result)
-
- def getActiveFiles(self):
- active = [x.active for x in self.threads if x.active and isinstance(x.active, PyFile)]
-
- for t in self.localThreads:
- active.extend(t.getActiveFiles())
-
- return active
-
- def processingIds(self):
- """get a id list of all pyfiles processed"""
- return [x.id for x in self.getActiveFiles()]
-
-
- def work(self):
- """run all task which have to be done (this is for repetivive call by core)"""
- try:
- self.tryReconnect()
- except Exception, e:
- self.core.log.error(_("Reconnect Failed: %s") % str(e) )
- self.reconnecting.clear()
- if self.core.debug:
- print_exc()
- self.checkThreadCount()
-
- try:
- self.assignJob()
- except Exception, e:
- self.core.log.warning("Assign job error", e)
- if self.core.debug:
- print_exc()
-
- sleep(0.5)
- self.assignJob()
- #it may be failed non critical so we try it again
-
- if (self.infoCache or self.infoResults) and self.timestamp < time():
- self.infoCache.clear()
- self.infoResults.clear()
- self.core.log.debug("Cleared Result cache")
-
- #--------------------------------------------------------------------------
- def tryReconnect(self):
- """checks if reconnect needed"""
-
- if not (self.core.config["reconnect"]["activated"] and self.core.api.isTimeReconnect()):
- return False
-
- active = [x.active.plugin.wantReconnect and x.active.plugin.waiting for x in self.threads if x.active]
-
- if not (0 < active.count(True) == len(active)):
- return False
-
- if not exists(self.core.config['reconnect']['method']):
- if exists(join(pypath, self.core.config['reconnect']['method'])):
- self.core.config['reconnect']['method'] = join(pypath, self.core.config['reconnect']['method'])
- else:
- self.core.config["reconnect"]["activated"] = False
- self.core.log.warning(_("Reconnect script not found!"))
- return
-
- self.reconnecting.set()
-
- #Do reconnect
- self.core.log.info(_("Starting reconnect"))
-
- while [x.active.plugin.waiting for x in self.threads if x.active].count(True) != 0:
- sleep(0.25)
-
- ip = self.getIP()
-
- self.core.addonManager.beforeReconnecting(ip)
-
- self.core.log.debug("Old IP: %s" % ip)
-
- try:
- reconn = Popen(self.core.config['reconnect']['method'], bufsize=-1, shell=True)#, stdout=subprocess.PIPE)
- except Exception:
- self.core.log.warning(_("Failed executing reconnect script!"))
- self.core.config["reconnect"]["activated"] = False
- self.reconnecting.clear()
- if self.core.debug:
- print_exc()
- return
-
- reconn.wait()
- sleep(1)
- ip = self.getIP()
- self.core.addonManager.afterReconnecting(ip)
-
- self.core.log.info(_("Reconnected, new IP: %s") % ip)
-
- self.reconnecting.clear()
-
- def getIP(self):
- """retrieve current ip"""
- services = [("http://automation.whatismyip.com/n09230945.asp", "(\S+)"),
- ("http://checkip.dyndns.org/",".*Current IP Address: (\S+)</body>.*")]
-
- ip = ""
- for i in range(10):
- try:
- sv = choice(services)
- ip = getURL(sv[0])
- ip = re.match(sv[1], ip).group(1)
- break
- except Exception:
- ip = ""
- sleep(1)
-
- return ip
-
- #--------------------------------------------------------------------------
- def checkThreadCount(self):
- """checks if there are need for increasing or reducing thread count"""
-
- if len(self.threads) == self.core.config.get("download", "max_downloads"):
- return True
- elif len(self.threads) < self.core.config.get("download", "max_downloads"):
- self.createThread()
- else:
- free = [x for x in self.threads if not x.active]
- if free:
- free[0].put("quit")
-
-
- def cleanPycurl(self):
- """ make a global curl cleanup (currently ununused) """
- if self.processingIds():
- return False
- pycurl.global_cleanup()
- pycurl.global_init(pycurl.GLOBAL_DEFAULT)
- self.downloaded = 0
- self.core.log.debug("Cleaned up pycurl")
- return True
-
- #--------------------------------------------------------------------------
- def assignJob(self):
- """assing a job to a thread if possible"""
-
- if self.pause or not self.core.api.isTimeDownload(): return
-
- #if self.downloaded > 20:
- # if not self.cleanPyCurl(): return
-
- free = [x for x in self.threads if not x.active]
-
- inuse = set([(x.active.pluginname, self.getLimit(x)) for x in self.threads if x.active and x.active.hasPlugin() and x.active.plugin.account])
- inuse = map(lambda x: (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])) ,inuse)
- onlimit = [x[0] for x in inuse if x[1] > 0 and x[2] >= x[1]]
-
- occ = [x.active.pluginname for x in self.threads if x.active and x.active.hasPlugin() and not x.active.plugin.multiDL] + onlimit
-
- occ.sort()
- occ = tuple(set(occ))
- job = self.core.files.getJob(occ)
- if job:
- try:
- job.initPlugin()
- except Exception, e:
- self.core.log.critical(str(e))
- print_exc()
- job.setStatus("failed")
- job.error = str(e)
- job.release()
- return
-
- if job.plugin.__type == "hoster":
- spaceLeft = freeSpace(self.core.config["general"]["download_folder"]) / 1024 / 1024
- if spaceLeft < self.core.config["general"]["min_free_space"]:
- self.core.log.warning(_("Not enough space left on device"))
- self.pause = True
-
- if free and not self.pause:
- thread = free[0]
- #self.downloaded += 1
-
- thread.put(job)
- else:
- #put job back
- if occ not in self.core.files.jobCache:
- self.core.files.jobCache[occ] = []
- self.core.files.jobCache[occ].append(job.id)
-
- #check for decrypt jobs
- job = self.core.files.getDecryptJob()
- if job:
- job.initPlugin()
- thread = DecrypterThread(self, job)
-
-
- else:
- thread = DecrypterThread(self, job)
-
- def getLimit(self, thread):
- limit = thread.active.plugin.account.getAccountData(thread.active.plugin.user)["options"].get("limitDL", ["0"])[0]
- return int(limit)
-
- def cleanup(self):
- """do global cleanup, should be called when finished with pycurl"""
- pycurl.global_cleanup()
diff --git a/pyload/manager/thread/Addon.py b/pyload/manager/thread/Addon.py
new file mode 100644
index 000000000..7feec227e
--- /dev/null
+++ b/pyload/manager/thread/Addon.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN
+
+from Queue import Queue
+from threading import Thread
+from os import listdir, stat
+from os.path import join
+from time import sleep, time, strftime, gmtime
+from traceback import print_exc, format_exc
+from pprint import pformat
+from sys import exc_info, exc_clear
+from copy import copy
+from types import MethodType
+
+from pycurl import error
+
+from pyload.manager.thread.Plugin import PluginThread
+
+
+class AddonThread(PluginThread):
+ """thread for addons"""
+
+ #--------------------------------------------------------------------------
+ def __init__(self, m, function, args, kwargs):
+ """Constructor"""
+ PluginThread.__init__(self, m)
+
+ self.f = function
+ self.args = args
+ self.kwargs = kwargs
+
+ self.active = []
+
+ m.localThreads.append(self)
+
+ self.start()
+
+ def getActiveFiles(self):
+ return self.active
+
+ def addActive(self, pyfile):
+ """ Adds a pyfile to active list and thus will be displayed on overview"""
+ if pyfile not in self.active:
+ self.active.append(pyfile)
+
+ def finishFile(self, pyfile):
+ if pyfile in self.active:
+ self.active.remove(pyfile)
+
+ pyfile.finishIfDone()
+
+ def run(self):
+ try:
+ try:
+ self.kwargs["thread"] = self
+ self.f(*self.args, **self.kwargs)
+ except TypeError, e:
+ #dirty method to filter out exceptions
+ if "unexpected keyword argument 'thread'" not in e.args[0]:
+ raise
+
+ del self.kwargs["thread"]
+ self.f(*self.args, **self.kwargs)
+ finally:
+ local = copy(self.active)
+ for x in local:
+ self.finishFile(x)
+
+ self.m.localThreads.remove(self)
diff --git a/pyload/manager/thread/AddonThread.py b/pyload/manager/thread/AddonThread.py
deleted file mode 100644
index a84856b0a..000000000
--- a/pyload/manager/thread/AddonThread.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.manager.thread.PluginThread import PluginThread
-
-
-class AddonThread(PluginThread):
- """thread for addons"""
-
- #--------------------------------------------------------------------------
- def __init__(self, m, function, args, kwargs):
- """Constructor"""
- PluginThread.__init__(self, m)
-
- self.f = function
- self.args = args
- self.kwargs = kwargs
-
- self.active = []
-
- m.localThreads.append(self)
-
- self.start()
-
- def getActiveFiles(self):
- return self.active
-
- def addActive(self, pyfile):
- """ Adds a pyfile to active list and thus will be displayed on overview"""
- if pyfile not in self.active:
- self.active.append(pyfile)
-
- def finishFile(self, pyfile):
- if pyfile in self.active:
- self.active.remove(pyfile)
-
- pyfile.finishIfDone()
-
- def run(self):
- try:
- try:
- self.kwargs["thread"] = self
- self.f(*self.args, **self.kwargs)
- except TypeError, e:
- #dirty method to filter out exceptions
- if "unexpected keyword argument 'thread'" not in e.args[0]:
- raise
-
- del self.kwargs["thread"]
- self.f(*self.args, **self.kwargs)
- finally:
- local = copy(self.active)
- for x in local:
- self.finishFile(x)
-
- self.m.localThreads.remove(self)
diff --git a/pyload/manager/thread/Decrypter.py b/pyload/manager/thread/Decrypter.py
new file mode 100644
index 000000000..51544d1b9
--- /dev/null
+++ b/pyload/manager/thread/Decrypter.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN
+
+from Queue import Queue
+from threading import Thread
+from os import listdir, stat
+from os.path import join
+from time import sleep, time, strftime, gmtime
+from traceback import print_exc, format_exc
+from pprint import pformat
+from sys import exc_info, exc_clear
+from copy import copy
+from types import MethodType
+
+from pycurl import error
+
+from pyload.manager.thread.Plugin import PluginThread
+from pyload.plugin.Plugin import Abort, Fail, Retry
+
+
+class DecrypterThread(PluginThread):
+ """thread for decrypting"""
+
+ def __init__(self, manager, pyfile):
+ """constructor"""
+ PluginThread.__init__(self, manager)
+
+ self.active = pyfile
+ manager.localThreads.append(self)
+
+ pyfile.setStatus("decrypting")
+
+ self.start()
+
+ def getActiveFiles(self):
+ return [self.active]
+
+ def run(self):
+ """run method"""
+
+ pyfile = self.active
+ retry = False
+
+ try:
+ self.m.log.info(_("Decrypting starts: %s") % pyfile.name)
+ pyfile.error = ""
+ pyfile.plugin.preprocessing(self)
+
+ except NotImplementedError:
+ self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ return
+
+ except Fail, e:
+ msg = e.args[0]
+
+ if msg == "offline":
+ pyfile.setStatus("offline")
+ self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ else:
+ pyfile.setStatus("failed")
+ self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ pyfile.error = msg
+
+ if self.m.core.debug:
+ print_exc()
+ return
+
+ except Abort:
+ self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ pyfile.setStatus("aborted")
+
+ if self.m.core.debug:
+ print_exc()
+ return
+
+ except Retry:
+ self.m.log.info(_("Retrying %s") % pyfile.name)
+ retry = True
+ return self.run()
+
+ except Exception, e:
+ pyfile.setStatus("failed")
+ self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ pyfile.error = str(e)
+
+ if self.m.core.debug:
+ print_exc()
+ self.writeDebugReport(pyfile)
+
+ return
+
+ finally:
+ if not retry:
+ pyfile.release()
+ self.active = False
+ self.m.core.files.save()
+ self.m.localThreads.remove(self)
+ exc_clear()
+
+ if not retry:
+ pyfile.delete()
diff --git a/pyload/manager/thread/DecrypterThread.py b/pyload/manager/thread/DecrypterThread.py
deleted file mode 100644
index 003b5f082..000000000
--- a/pyload/manager/thread/DecrypterThread.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.manager.thread.PluginThread import PluginThread
-from pyload.plugins.Plugin import Abort, Fail, Retry
-
-
-class DecrypterThread(PluginThread):
- """thread for decrypting"""
-
- def __init__(self, manager, pyfile):
- """constructor"""
- PluginThread.__init__(self, manager)
-
- self.active = pyfile
- manager.localThreads.append(self)
-
- pyfile.setStatus("decrypting")
-
- self.start()
-
- def getActiveFiles(self):
- return [self.active]
-
- def run(self):
- """run method"""
-
- pyfile = self.active
- retry = False
-
- try:
- self.m.log.info(_("Decrypting starts: %s") % pyfile.name)
- pyfile.error = ""
- pyfile.plugin.preprocessing(self)
-
- except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
- return
-
- except Fail, e:
- msg = e.args[0]
-
- if msg == "offline":
- pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
- else:
- pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
- pyfile.error = msg
-
- if self.m.core.debug:
- print_exc()
- return
-
- except Abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- pyfile.setStatus("aborted")
-
- if self.m.core.debug:
- print_exc()
- return
-
- except Retry:
- self.m.log.info(_("Retrying %s") % pyfile.name)
- retry = True
- return self.run()
-
- except Exception, e:
- pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
- pyfile.error = str(e)
-
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- return
-
- finally:
- if not retry:
- pyfile.release()
- self.active = False
- self.m.core.files.save()
- self.m.localThreads.remove(self)
- exc_clear()
-
- if not retry:
- pyfile.delete()
diff --git a/pyload/manager/thread/Download.py b/pyload/manager/thread/Download.py
new file mode 100644
index 000000000..c7d21a4ba
--- /dev/null
+++ b/pyload/manager/thread/Download.py
@@ -0,0 +1,213 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN
+
+from Queue import Queue
+from threading import Thread
+from os import listdir, stat
+from os.path import join
+from time import sleep, time, strftime, gmtime
+from traceback import print_exc, format_exc
+from pprint import pformat
+from sys import exc_info, exc_clear
+from copy import copy
+from types import MethodType
+
+from pycurl import error
+
+from pyload.manager.thread.Plugin import PluginThread
+from pyload.plugin.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload
+
+
+class DownloadThread(PluginThread):
+ """thread for downloading files from 'real' hoster plugins"""
+
+ #--------------------------------------------------------------------------
+ def __init__(self, manager):
+ """Constructor"""
+ PluginThread.__init__(self, manager)
+
+ self.queue = Queue() #: job queue
+ self.active = False
+
+ self.start()
+
+ #--------------------------------------------------------------------------
+ def run(self):
+ """run method"""
+ pyfile = None
+
+ while True:
+ del pyfile
+ self.active = self.queue.get()
+ pyfile = self.active
+
+ if self.active == "quit":
+ self.active = False
+ self.m.threads.remove(self)
+ return True
+
+ try:
+ if not pyfile.hasPlugin():
+ continue
+ #this pyfile was deleted while queueing
+
+ pyfile.plugin.checkForSameFiles(starting=True)
+ self.m.log.info(_("Download starts: %s" % pyfile.name))
+
+ # start download
+ self.m.core.addonManager.downloadPreparing(pyfile)
+ pyfile.error = ""
+ pyfile.plugin.preprocessing(self)
+
+ self.m.log.info(_("Download finished: %s") % pyfile.name)
+ self.m.core.addonManager.downloadFinished(pyfile)
+ self.m.core.files.checkPackageFinished(pyfile)
+
+ except NotImplementedError:
+ self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ pyfile.setStatus("failed")
+ pyfile.error = "Plugin does not work"
+ self.clean(pyfile)
+ continue
+
+ except Abort:
+ try:
+ self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ except Exception:
+ pass
+
+ pyfile.setStatus("aborted")
+
+ if self.m.core.debug:
+ print_exc()
+
+ self.clean(pyfile)
+ continue
+
+ except Reconnect:
+ self.queue.put(pyfile)
+ #pyfile.req.clearCookies()
+
+ while self.m.reconnecting.isSet():
+ sleep(0.5)
+
+ continue
+
+ except Retry, e:
+ reason = e.args[0]
+ self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
+ self.queue.put(pyfile)
+ continue
+
+ except Fail, e:
+ msg = e.args[0]
+
+ if msg == "offline":
+ pyfile.setStatus("offline")
+ self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ elif msg == "temp. offline":
+ pyfile.setStatus("temp. offline")
+ self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name)
+ else:
+ pyfile.setStatus("failed")
+ self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ pyfile.error = msg
+
+ if self.m.core.debug:
+ print_exc()
+
+ self.m.core.addonManager.downloadFailed(pyfile)
+ self.clean(pyfile)
+ continue
+
+ except error, e:
+ if len(e.args) == 2:
+ code, msg = e.args
+ else:
+ code = 0
+ msg = e.args
+
+ self.m.log.debug("pycurl exception %s: %s" % (code, msg))
+
+ if code in (7, 18, 28, 52, 56):
+ self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
+ wait = time() + 60
+
+ pyfile.waitUntil = wait
+ pyfile.setStatus("waiting")
+ while time() < wait:
+ sleep(1)
+ if pyfile.abort:
+ break
+
+ if pyfile.abort:
+ self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ pyfile.setStatus("aborted")
+
+ self.clean(pyfile)
+ else:
+ self.queue.put(pyfile)
+
+ continue
+
+ else:
+ pyfile.setStatus("failed")
+ self.m.log.error("pycurl error %s: %s" % (code, msg))
+ if self.m.core.debug:
+ print_exc()
+ self.writeDebugReport(pyfile)
+
+ self.m.core.addonManager.downloadFailed(pyfile)
+
+ self.clean(pyfile)
+ continue
+
+ except SkipDownload, e:
+ pyfile.setStatus("skipped")
+
+ self.m.log.info(
+ _("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
+
+ self.clean(pyfile)
+
+ self.m.core.files.checkPackageFinished(pyfile)
+
+ self.active = False
+ self.m.core.files.save()
+
+ continue
+
+
+ except Exception, e:
+ pyfile.setStatus("failed")
+ self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ pyfile.error = str(e)
+
+ if self.m.core.debug:
+ print_exc()
+ self.writeDebugReport(pyfile)
+
+ self.m.core.addonManager.downloadFailed(pyfile)
+ self.clean(pyfile)
+ continue
+
+ finally:
+ self.m.core.files.save()
+ pyfile.checkIfProcessed()
+ exc_clear()
+
+ #pyfile.plugin.req.clean()
+
+ self.active = False
+ pyfile.finishIfDone()
+ self.m.core.files.save()
+
+
+ def put(self, job):
+ """assing job to thread"""
+ self.queue.put(job)
+
+
+ def stop(self):
+ """stops the thread"""
+ self.put("quit")
diff --git a/pyload/manager/thread/DownloadThread.py b/pyload/manager/thread/DownloadThread.py
deleted file mode 100644
index d876000da..000000000
--- a/pyload/manager/thread/DownloadThread.py
+++ /dev/null
@@ -1,213 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.manager.thread.PluginThread import PluginThread
-from pyload.plugins.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload
-
-
-class DownloadThread(PluginThread):
- """thread for downloading files from 'real' hoster plugins"""
-
- #--------------------------------------------------------------------------
- def __init__(self, manager):
- """Constructor"""
- PluginThread.__init__(self, manager)
-
- self.queue = Queue() #: job queue
- self.active = False
-
- self.start()
-
- #--------------------------------------------------------------------------
- def run(self):
- """run method"""
- pyfile = None
-
- while True:
- del pyfile
- self.active = self.queue.get()
- pyfile = self.active
-
- if self.active == "quit":
- self.active = False
- self.m.threads.remove(self)
- return True
-
- try:
- if not pyfile.hasPlugin():
- continue
- #this pyfile was deleted while queueing
-
- pyfile.plugin.checkForSameFiles(starting=True)
- self.m.log.info(_("Download starts: %s" % pyfile.name))
-
- # start download
- self.m.core.addonManager.downloadPreparing(pyfile)
- pyfile.error = ""
- pyfile.plugin.preprocessing(self)
-
- self.m.log.info(_("Download finished: %s") % pyfile.name)
- self.m.core.addonManager.downloadFinished(pyfile)
- self.m.core.files.checkPackageFinished(pyfile)
-
- except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
- pyfile.setStatus("failed")
- pyfile.error = "Plugin does not work"
- self.clean(pyfile)
- continue
-
- except Abort:
- try:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- except Exception:
- pass
-
- pyfile.setStatus("aborted")
-
- if self.m.core.debug:
- print_exc()
-
- self.clean(pyfile)
- continue
-
- except Reconnect:
- self.queue.put(pyfile)
- #pyfile.req.clearCookies()
-
- while self.m.reconnecting.isSet():
- sleep(0.5)
-
- continue
-
- except Retry, e:
- reason = e.args[0]
- self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
- self.queue.put(pyfile)
- continue
-
- except Fail, e:
- msg = e.args[0]
-
- if msg == "offline":
- pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
- elif msg == "temp. offline":
- pyfile.setStatus("temp. offline")
- self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name)
- else:
- pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
- pyfile.error = msg
-
- if self.m.core.debug:
- print_exc()
-
- self.m.core.addonManager.downloadFailed(pyfile)
- self.clean(pyfile)
- continue
-
- except error, e:
- if len(e.args) == 2:
- code, msg = e.args
- else:
- code = 0
- msg = e.args
-
- self.m.log.debug("pycurl exception %s: %s" % (code, msg))
-
- if code in (7, 18, 28, 52, 56):
- self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
- wait = time() + 60
-
- pyfile.waitUntil = wait
- pyfile.setStatus("waiting")
- while time() < wait:
- sleep(1)
- if pyfile.abort:
- break
-
- if pyfile.abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- pyfile.setStatus("aborted")
-
- self.clean(pyfile)
- else:
- self.queue.put(pyfile)
-
- continue
-
- else:
- pyfile.setStatus("failed")
- self.m.log.error("pycurl error %s: %s" % (code, msg))
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- self.m.core.addonManager.downloadFailed(pyfile)
-
- self.clean(pyfile)
- continue
-
- except SkipDownload, e:
- pyfile.setStatus("skipped")
-
- self.m.log.info(
- _("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
-
- self.clean(pyfile)
-
- self.m.core.files.checkPackageFinished(pyfile)
-
- self.active = False
- self.m.core.files.save()
-
- continue
-
-
- except Exception, e:
- pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
- pyfile.error = str(e)
-
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- self.m.core.addonManager.downloadFailed(pyfile)
- self.clean(pyfile)
- continue
-
- finally:
- self.m.core.files.save()
- pyfile.checkIfProcessed()
- exc_clear()
-
- #pyfile.plugin.req.clean()
-
- self.active = False
- pyfile.finishIfDone()
- self.m.core.files.save()
-
-
- def put(self, job):
- """assing job to thread"""
- self.queue.put(job)
-
-
- def stop(self):
- """stops the thread"""
- self.put("quit")
diff --git a/pyload/manager/thread/Info.py b/pyload/manager/thread/Info.py
new file mode 100644
index 000000000..4526a07ed
--- /dev/null
+++ b/pyload/manager/thread/Info.py
@@ -0,0 +1,225 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN
+
+from Queue import Queue
+from threading import Thread
+from os import listdir, stat
+from os.path import join
+from time import sleep, time, strftime, gmtime
+from traceback import print_exc, format_exc
+from pprint import pformat
+from sys import exc_info, exc_clear
+from copy import copy
+from types import MethodType
+
+from pycurl import error
+
+from pyload.datatype.File import PyFile
+from pyload.manager.thread.Plugin import PluginThread
+from pyload.api import OnlineStatus
+
+
+class InfoThread(PluginThread):
+
+ def __init__(self, manager, data, pid=-1, rid=-1, add=False):
+ """Constructor"""
+ PluginThread.__init__(self, manager)
+
+ self.data = data
+ self.pid = pid # package id
+ # [ .. (name, plugin) .. ]
+
+ self.rid = rid #result id
+ self.add = add #add packages instead of return result
+
+ self.cache = [] #accumulated data
+
+ self.start()
+
+ def run(self):
+ """run method"""
+
+ plugins = {}
+ container = []
+
+ for url, plugintype, pluginname in data:
+ try:
+ plugins[plugintype][pluginname].append(url)
+ except Exception:
+ plugins[plugintype][pluginname] = [url]
+
+ # filter out container plugins
+ for name in self.m.core.pluginManager.containerPlugins:
+ if name in plugins:
+ container.extend([(name, url) for url in plugins[name]])
+
+ del plugins[name]
+
+ #directly write to database
+ if self.pid > -1:
+ for plugintype, pluginname, urls in plugins.iteritems():
+ plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
+ if hasattr(plugin, "getInfo"):
+ self.fetchForPlugin(pluginname, plugin, urls, self.updateDB)
+ self.m.core.files.save()
+
+ elif self.add:
+ for plugintype, pluginname, urls in plugins.iteritems():
+ plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
+ if hasattr(plugin, "getInfo"):
+ self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True)
+
+ else:
+ #generate default result
+ result = [(url, 0, 3, url) for url in urls]
+
+ self.updateCache(pluginname, result)
+
+ packs = parseNames([(name, url) for name, x, y, url in self.cache])
+
+ self.m.log.debug("Fetched and generated %d packages" % len(packs))
+
+ for k, v in packs:
+ self.m.core.api.addPackage(k, v)
+
+ #empty cache
+ del self.cache[:]
+
+ else: #post the results
+
+
+ for name, url in container:
+ #attach container content
+ try:
+ data = self.decryptContainer(name, url)
+ except Exception:
+ print_exc()
+ self.m.log.error("Could not decrypt container.")
+ data = []
+
+ for url, plugintype, pluginname in data:
+ try:
+ plugins[plugintype][pluginname].append(url)
+ except Exception:
+ plugins[plugintype][pluginname] = [url]
+
+ self.m.infoResults[self.rid] = {}
+
+ for plugintype, pluginname, urls in plugins.iteritems():
+ plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
+ if hasattr(plugin, "getInfo"):
+ self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True)
+
+ #force to process cache
+ if self.cache:
+ self.updateResult(pluginname, [], True)
+
+ else:
+ #generate default result
+ result = [(url, 0, 3, url) for url in urls]
+
+ self.updateResult(pluginname, result, True)
+
+ self.m.infoResults[self.rid]["ALL_INFO_FETCHED"] = {}
+
+ self.m.timestamp = time() + 5 * 60
+
+
+ def updateDB(self, plugin, result):
+ self.m.core.files.updateFileInfo(result, self.pid)
+
+ def updateResult(self, plugin, result, force=False):
+ #parse package name and generate result
+ #accumulate results
+
+ self.cache.extend(result)
+
+ if len(self.cache) >= 20 or force:
+ #used for package generating
+ tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size))))
+ for name, size, status, url in self.cache]
+
+ data = parseNames(tmp)
+ result = {}
+ for k, v in data.iteritems():
+ for url, status in v:
+ status.packagename = k
+ result[url] = status
+
+ self.m.setInfoResults(self.rid, result)
+
+ self.cache = []
+
+ def updateCache(self, plugin, result):
+ self.cache.extend(result)
+
+ def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None):
+ try:
+ result = [] #result loaded from cache
+ process = [] #urls to process
+ for url in urls:
+ if url in self.m.infoCache:
+ result.append(self.m.infoCache[url])
+ else:
+ process.append(url)
+
+ if result:
+ self.m.log.debug("Fetched %d values from cache for %s" % (len(result), pluginname))
+ cb(pluginname, result)
+
+ if process:
+ self.m.log.debug("Run Info Fetching for %s" % pluginname)
+ for result in plugin.getInfo(process):
+ #result = [ .. (name, size, status, url) .. ]
+ if not type(result) == list:
+ result = [result]
+
+ for res in result:
+ self.m.infoCache[res[3]] = res #: why don't assign res dict directly?
+
+ cb(pluginname, result)
+
+ self.m.log.debug("Finished Info Fetching for %s" % pluginname)
+ except Exception, e:
+ self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") %
+ {"name": pluginname, "err": str(e)})
+ if self.m.core.debug:
+ print_exc()
+
+ # generate default results
+ if err:
+ result = [(url, 0, 3, url) for url in urls]
+ cb(pluginname, result)
+
+
+ def decryptContainer(self, plugin, url):
+ data = []
+ # only works on container plugins
+
+ self.m.log.debug("Pre decrypting %s with %s" % (url, plugin))
+
+ # dummy pyfile
+ pyfile = PyFile(self.m.core.files, -1, url, url, 0, 0, "", plugin, -1, -1)
+
+ pyfile.initPlugin()
+
+ # little plugin lifecycle
+ try:
+ pyfile.plugin.setup()
+ pyfile.plugin.loadToDisk()
+ pyfile.plugin.decrypt(pyfile)
+ pyfile.plugin.deleteTmp()
+
+ for pack in pyfile.plugin.packages:
+ pyfile.plugin.urls.extend(pack[1])
+
+ data = self.m.core.pluginManager.parseUrls(pyfile.plugin.urls)
+
+ self.m.log.debug("Got %d links." % len(data))
+
+ except Exception, e:
+ self.m.log.debug("Pre decrypting error: %s" % str(e))
+ finally:
+ pyfile.release()
+
+ return data
diff --git a/pyload/manager/thread/InfoThread.py b/pyload/manager/thread/InfoThread.py
deleted file mode 100644
index 99bf97fc8..000000000
--- a/pyload/manager/thread/InfoThread.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.datatype.PyFile import PyFile
-from pyload.manager.thread.PluginThread import PluginThread
-from pyload.api import OnlineStatus
-
-
-class InfoThread(PluginThread):
-
- def __init__(self, manager, data, pid=-1, rid=-1, add=False):
- """Constructor"""
- PluginThread.__init__(self, manager)
-
- self.data = data
- self.pid = pid # package id
- # [ .. (name, plugin) .. ]
-
- self.rid = rid #result id
- self.add = add #add packages instead of return result
-
- self.cache = [] #accumulated data
-
- self.start()
-
- def run(self):
- """run method"""
-
- plugins = {}
- container = []
-
- for url, plugintype, pluginname in data:
- try:
- plugins[plugintype][pluginname].append(url)
- except Exception:
- plugins[plugintype][pluginname] = [url]
-
- # filter out container plugins
- for name in self.m.core.pluginManager.containerPlugins:
- if name in plugins:
- container.extend([(name, url) for url in plugins[name]])
-
- del plugins[name]
-
- #directly write to database
- if self.pid > -1:
- for plugintype, pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateDB)
- self.m.core.files.save()
-
- elif self.add:
- for plugintype, pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True)
-
- else:
- #generate default result
- result = [(url, 0, 3, url) for url in urls]
-
- self.updateCache(pluginname, result)
-
- packs = parseNames([(name, url) for name, x, y, url in self.cache])
-
- self.m.log.debug("Fetched and generated %d packages" % len(packs))
-
- for k, v in packs:
- self.m.core.api.addPackage(k, v)
-
- #empty cache
- del self.cache[:]
-
- else: #post the results
-
-
- for name, url in container:
- #attach container content
- try:
- data = self.decryptContainer(name, url)
- except Exception:
- print_exc()
- self.m.log.error("Could not decrypt container.")
- data = []
-
- for url, plugintype, pluginname in data:
- try:
- plugins[plugintype][pluginname].append(url)
- except Exception:
- plugins[plugintype][pluginname] = [url]
-
- self.m.infoResults[self.rid] = {}
-
- for plugintype, pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True)
-
- #force to process cache
- if self.cache:
- self.updateResult(pluginname, [], True)
-
- else:
- #generate default result
- result = [(url, 0, 3, url) for url in urls]
-
- self.updateResult(pluginname, result, True)
-
- self.m.infoResults[self.rid]["ALL_INFO_FETCHED"] = {}
-
- self.m.timestamp = time() + 5 * 60
-
-
- def updateDB(self, plugin, result):
- self.m.core.files.updateFileInfo(result, self.pid)
-
- def updateResult(self, plugin, result, force=False):
- #parse package name and generate result
- #accumulate results
-
- self.cache.extend(result)
-
- if len(self.cache) >= 20 or force:
- #used for package generating
- tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size))))
- for name, size, status, url in self.cache]
-
- data = parseNames(tmp)
- result = {}
- for k, v in data.iteritems():
- for url, status in v:
- status.packagename = k
- result[url] = status
-
- self.m.setInfoResults(self.rid, result)
-
- self.cache = []
-
- def updateCache(self, plugin, result):
- self.cache.extend(result)
-
- def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None):
- try:
- result = [] #result loaded from cache
- process = [] #urls to process
- for url in urls:
- if url in self.m.infoCache:
- result.append(self.m.infoCache[url])
- else:
- process.append(url)
-
- if result:
- self.m.log.debug("Fetched %d values from cache for %s" % (len(result), pluginname))
- cb(pluginname, result)
-
- if process:
- self.m.log.debug("Run Info Fetching for %s" % pluginname)
- for result in plugin.getInfo(process):
- #result = [ .. (name, size, status, url) .. ]
- if not type(result) == list:
- result = [result]
-
- for res in result:
- self.m.infoCache[res[3]] = res #: why don't assign res dict directly?
-
- cb(pluginname, result)
-
- self.m.log.debug("Finished Info Fetching for %s" % pluginname)
- except Exception, e:
- self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") %
- {"name": pluginname, "err": str(e)})
- if self.m.core.debug:
- print_exc()
-
- # generate default results
- if err:
- result = [(url, 0, 3, url) for url in urls]
- cb(pluginname, result)
-
-
- def decryptContainer(self, plugin, url):
- data = []
- # only works on container plugins
-
- self.m.log.debug("Pre decrypting %s with %s" % (url, plugin))
-
- # dummy pyfile
- pyfile = PyFile(self.m.core.files, -1, url, url, 0, 0, "", plugin, -1, -1)
-
- pyfile.initPlugin()
-
- # little plugin lifecycle
- try:
- pyfile.plugin.setup()
- pyfile.plugin.loadToDisk()
- pyfile.plugin.decrypt(pyfile)
- pyfile.plugin.deleteTmp()
-
- for pack in pyfile.plugin.packages:
- pyfile.plugin.urls.extend(pack[1])
-
- data = self.m.core.pluginManager.parseUrls(pyfile.plugin.urls)
-
- self.m.log.debug("Got %d links." % len(data))
-
- except Exception, e:
- self.m.log.debug("Pre decrypting error: %s" % str(e))
- finally:
- pyfile.release()
-
- return data
diff --git a/pyload/manager/thread/Plugin.py b/pyload/manager/thread/Plugin.py
new file mode 100644
index 000000000..70ee747a8
--- /dev/null
+++ b/pyload/manager/thread/Plugin.py
@@ -0,0 +1,130 @@
+# -*- coding: utf-8 -*-
+# @author: RaNaN
+
+from Queue import Queue
+from threading import Thread
+from os import listdir, stat
+from os.path import join
+from time import sleep, time, strftime, gmtime
+from traceback import print_exc, format_exc
+from pprint import pformat
+from sys import exc_info, exc_clear
+from copy import copy
+from types import MethodType
+
+from pycurl import error
+
+from pyload.datatype.File import PyFile
+from pyload.plugin.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload
+from pyload.utils.packagetools import parseNames
+from pyload.utils import safe_join
+from pyload.api import OnlineStatus
+
+class PluginThread(Thread):
+ """abstract base class for thread types"""
+
+ #--------------------------------------------------------------------------
+ def __init__(self, manager):
+ """Constructor"""
+ Thread.__init__(self)
+ self.setDaemon(True)
+ self.m = manager #thread manager
+
+
+ def writeDebugReport(self, pyfile):
+ """ writes a
+ :return:
+ """
+
+ dump_name = "debug_%s_%s.zip" % (pyfile.pluginname, strftime("%d-%m-%Y_%H-%M-%S"))
+ dump = self.getDebugDump(pyfile)
+
+ try:
+ import zipfile
+
+ zip = zipfile.ZipFile(dump_name, "w")
+
+ for f in listdir(join("tmp", pyfile.pluginname)):
+ try:
+ # avoid encoding errors
+ zip.write(join("tmp", pyfile.pluginname, f), safe_join(pyfile.pluginname, f))
+ except Exception:
+ pass
+
+ info = zipfile.ZipInfo(safe_join(pyfile.pluginname, "debug_Report.txt"), gmtime())
+ info.external_attr = 0644 << 16L # change permissions
+
+ zip.writestr(info, dump)
+ zip.close()
+
+ if not stat(dump_name).st_size:
+ raise Exception("Empty Zipfile")
+
+ except Exception, e:
+ self.m.log.debug("Error creating zip file: %s" % e)
+
+ dump_name = dump_name.replace(".zip", ".txt")
+ f = open(dump_name, "wb")
+ f.write(dump)
+ f.close()
+
+ self.m.core.log.info("Debug Report written to %s" % dump_name)
+
+ def getDebugDump(self, pyfile):
+ dump = "pyLoad %s Debug Report of %s %s \n\nTRACEBACK:\n %s \n\nFRAMESTACK:\n" % (
+ self.m.core.api.getServerVersion(), pyfile.pluginname, pyfile.plugin.__version, format_exc())
+
+ tb = exc_info()[2]
+ stack = []
+ while tb:
+ stack.append(tb.tb_frame)
+ tb = tb.tb_next
+
+ for frame in stack[1:]:
+ dump += "\nFrame %s in %s at line %s\n" % (frame.f_code.co_name,
+ frame.f_code.co_filename,
+ frame.f_lineno)
+
+ for key, value in frame.f_locals.items():
+ dump += "\t%20s = " % key
+ try:
+ dump += pformat(value) + "\n"
+ except Exception, e:
+ dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
+
+ del frame
+
+ del stack #delete it just to be sure...
+
+ dump += "\n\nPLUGIN OBJECT DUMP: \n\n"
+
+ for name in dir(pyfile.plugin):
+ attr = getattr(pyfile.plugin, name)
+ if not name.endswith("__") and type(attr) != MethodType:
+ dump += "\t%20s = " % name
+ try:
+ dump += pformat(attr) + "\n"
+ except Exception, e:
+ dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
+
+ dump += "\nPYFILE OBJECT DUMP: \n\n"
+
+ for name in dir(pyfile):
+ attr = getattr(pyfile, name)
+ if not name.endswith("__") and type(attr) != MethodType:
+ dump += "\t%20s = " % name
+ try:
+ dump += pformat(attr) + "\n"
+ except Exception, e:
+ dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
+
+ if pyfile.pluginname in self.m.core.config.plugin:
+ dump += "\n\nCONFIG: \n\n"
+ dump += pformat(self.m.core.config.plugin[pyfile.pluginname]) + "\n"
+
+ return dump
+
+ def clean(self, pyfile):
+ """ set thread unactive and release pyfile """
+ self.active = False
+ pyfile.release()
diff --git a/pyload/manager/thread/PluginThread.py b/pyload/manager/thread/PluginThread.py
deleted file mode 100644
index 3cdae122f..000000000
--- a/pyload/manager/thread/PluginThread.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.datatype.PyFile import PyFile
-from pyload.plugins.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload
-from pyload.utils.packagetools import parseNames
-from pyload.utils import safe_join
-from pyload.api import OnlineStatus
-
-class PluginThread(Thread):
- """abstract base class for thread types"""
-
- #--------------------------------------------------------------------------
- def __init__(self, manager):
- """Constructor"""
- Thread.__init__(self)
- self.setDaemon(True)
- self.m = manager #thread manager
-
-
- def writeDebugReport(self, pyfile):
- """ writes a
- :return:
- """
-
- dump_name = "debug_%s_%s.zip" % (pyfile.pluginname, strftime("%d-%m-%Y_%H-%M-%S"))
- dump = self.getDebugDump(pyfile)
-
- try:
- import zipfile
-
- zip = zipfile.ZipFile(dump_name, "w")
-
- for f in listdir(join("tmp", pyfile.pluginname)):
- try:
- # avoid encoding errors
- zip.write(join("tmp", pyfile.pluginname, f), safe_join(pyfile.pluginname, f))
- except Exception:
- pass
-
- info = zipfile.ZipInfo(safe_join(pyfile.pluginname, "debug_Report.txt"), gmtime())
- info.external_attr = 0644 << 16L # change permissions
-
- zip.writestr(info, dump)
- zip.close()
-
- if not stat(dump_name).st_size:
- raise Exception("Empty Zipfile")
-
- except Exception, e:
- self.m.log.debug("Error creating zip file: %s" % e)
-
- dump_name = dump_name.replace(".zip", ".txt")
- f = open(dump_name, "wb")
- f.write(dump)
- f.close()
-
- self.m.core.log.info("Debug Report written to %s" % dump_name)
-
- def getDebugDump(self, pyfile):
- dump = "pyLoad %s Debug Report of %s %s \n\nTRACEBACK:\n %s \n\nFRAMESTACK:\n" % (
- self.m.core.api.getServerVersion(), pyfile.pluginname, pyfile.plugin.__version, format_exc())
-
- tb = exc_info()[2]
- stack = []
- while tb:
- stack.append(tb.tb_frame)
- tb = tb.tb_next
-
- for frame in stack[1:]:
- dump += "\nFrame %s in %s at line %s\n" % (frame.f_code.co_name,
- frame.f_code.co_filename,
- frame.f_lineno)
-
- for key, value in frame.f_locals.items():
- dump += "\t%20s = " % key
- try:
- dump += pformat(value) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- del frame
-
- del stack #delete it just to be sure...
-
- dump += "\n\nPLUGIN OBJECT DUMP: \n\n"
-
- for name in dir(pyfile.plugin):
- attr = getattr(pyfile.plugin, name)
- if not name.endswith("__") and type(attr) != MethodType:
- dump += "\t%20s = " % name
- try:
- dump += pformat(attr) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- dump += "\nPYFILE OBJECT DUMP: \n\n"
-
- for name in dir(pyfile):
- attr = getattr(pyfile, name)
- if not name.endswith("__") and type(attr) != MethodType:
- dump += "\t%20s = " % name
- try:
- dump += pformat(attr) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- if pyfile.pluginname in self.m.core.config.plugin:
- dump += "\n\nCONFIG: \n\n"
- dump += pformat(self.m.core.config.plugin[pyfile.pluginname]) + "\n"
-
- return dump
-
- def clean(self, pyfile):
- """ set thread unactive and release pyfile """
- self.active = False
- pyfile.release()
diff --git a/pyload/manager/thread/Server.py b/pyload/manager/thread/Server.py
new file mode 100644
index 000000000..f3f174e74
--- /dev/null
+++ b/pyload/manager/thread/Server.py
@@ -0,0 +1,111 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+from os.path import exists
+
+import os
+import threading
+import logging
+
+core = None
+setup = None
+log = logging.getLogger("log")
+
+class WebServer(threading.Thread):
+ def __init__(self, pycore):
+ global core
+ threading.Thread.__init__(self)
+ self.core = pycore
+ core = pycore
+ self.running = True
+ self.server = pycore.config['webinterface']['server']
+ self.https = pycore.config['webinterface']['https']
+ self.cert = pycore.config["ssl"]["cert"]
+ self.key = pycore.config["ssl"]["key"]
+ self.host = pycore.config['webinterface']['host']
+ self.port = pycore.config['webinterface']['port']
+
+ self.setDaemon(True)
+
+ def run(self):
+ import pyload.webui as webinterface
+ global webinterface
+
+ reset = False
+
+ if self.https and (not exists(self.cert) or not exists(self.key)):
+ log.warning(_("SSL certificates not found."))
+ self.https = False
+
+ if self.server in ("lighttpd", "nginx"):
+ log.warning(_("Sorry, we dropped support for starting %s directly within pyLoad") % self.server)
+ log.warning(_("You can use the threaded server which offers good performance and ssl,"))
+ log.warning(_("of course you can still use your existing %s with pyLoads fastcgi server") % self.server)
+ log.warning(_("sample configs are located in the pyload/web/servers directory"))
+ reset = True
+ elif self.server == "fastcgi":
+ try:
+ import flup
+ except Exception:
+ log.warning(_("Can't use %(server)s, python-flup is not installed!") % {
+ "server": self.server})
+ reset = True
+
+ if reset or self.server == "lightweight":
+ if os.name != "nt":
+ try:
+ import bjoern
+ except Exception, e:
+ log.error(_("Error importing lightweight server: %s") % e)
+ log.warning(_("You need to download and compile bjoern, https://github.com/jonashaag/bjoern"))
+ log.warning(_("Copy the boern.so to the lib folder or use setup.py install"))
+ log.warning(_("Of course you need to be familiar with linux and know how to compile software"))
+ self.server = "builtin"
+ else:
+ self.core.log.info(_("Server set to threaded, due to known performance problems on windows."))
+ self.core.config['webinterface']['server'] = "threaded"
+ self.server = "threaded"
+
+ if self.server == "threaded":
+ self.start_threaded()
+ elif self.server == "fastcgi":
+ self.start_fcgi()
+ elif self.server == "lightweight":
+ self.start_lightweight()
+ else:
+ self.start_builtin()
+
+ def start_builtin(self):
+
+ if self.https:
+ log.warning(_("This server offers no SSL, please consider using threaded instead"))
+
+ self.core.log.info(_("Starting builtin webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
+ webinterface.run_simple(host=self.host, port=self.port)
+
+ def start_threaded(self):
+ if self.https:
+ self.core.log.info(_("Starting threaded SSL webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
+ else:
+ self.cert = ""
+ self.key = ""
+ self.core.log.info(_("Starting threaded webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
+
+ webinterface.run_threaded(host=self.host, port=self.port, cert=self.cert, key=self.key)
+
+ def start_fcgi(self):
+
+ self.core.log.info(_("Starting fastcgi server: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
+ webinterface.run_fcgi(host=self.host, port=self.port)
+
+
+ def start_lightweight(self):
+ if self.https:
+ log.warning(_("This server offers no SSL, please consider using threaded instead"))
+
+ self.core.log.info(_("Starting lightweight webserver (bjoern): %(host)s:%(port)d") % {"host": self.host, "port": self.port})
+ webinterface.run_lightweight(host=self.host, port=self.port)
+
+ def quit(self):
+ self.running = False
diff --git a/pyload/manager/thread/ServerThread.py b/pyload/manager/thread/ServerThread.py
deleted file mode 100644
index a8b95cd56..000000000
--- a/pyload/manager/thread/ServerThread.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-from os.path import exists
-
-import os
-import threading
-import logging
-
-core = None
-setup = None
-log = logging.getLogger("log")
-
-class WebServer(threading.Thread):
- def __init__(self, pycore):
- global core
- threading.Thread.__init__(self)
- self.core = pycore
- core = pycore
- self.running = True
- self.server = pycore.config['webinterface']['server']
- self.https = pycore.config['webinterface']['https']
- self.cert = pycore.config["ssl"]["cert"]
- self.key = pycore.config["ssl"]["key"]
- self.host = pycore.config['webinterface']['host']
- self.port = pycore.config['webinterface']['port']
-
- self.setDaemon(True)
-
- def run(self):
- import pyload.webui as webinterface
- global webinterface
-
- reset = False
-
- if self.https and (not exists(self.cert) or not exists(self.key)):
- log.warning(_("SSL certificates not found."))
- self.https = False
-
- if self.server in ("lighttpd", "nginx"):
- log.warning(_("Sorry, we dropped support for starting %s directly within pyLoad") % self.server)
- log.warning(_("You can use the threaded server which offers good performance and ssl,"))
- log.warning(_("of course you can still use your existing %s with pyLoads fastcgi server") % self.server)
- log.warning(_("sample configs are located in the pyload/web/servers directory"))
- reset = True
- elif self.server == "fastcgi":
- try:
- import flup
- except Exception:
- log.warning(_("Can't use %(server)s, python-flup is not installed!") % {
- "server": self.server})
- reset = True
-
- if reset or self.server == "lightweight":
- if os.name != "nt":
- try:
- import bjoern
- except Exception, e:
- log.error(_("Error importing lightweight server: %s") % e)
- log.warning(_("You need to download and compile bjoern, https://github.com/jonashaag/bjoern"))
- log.warning(_("Copy the boern.so to pyload/lib folder or use setup.py install"))
- log.warning(_("Of course you need to be familiar with linux and know how to compile software"))
- self.server = "builtin"
- else:
- self.core.log.info(_("Server set to threaded, due to known performance problems on windows."))
- self.core.config['webinterface']['server'] = "threaded"
- self.server = "threaded"
-
- if self.server == "threaded":
- self.start_threaded()
- elif self.server == "fastcgi":
- self.start_fcgi()
- elif self.server == "lightweight":
- self.start_lightweight()
- else:
- self.start_builtin()
-
- def start_builtin(self):
-
- if self.https:
- log.warning(_("This server offers no SSL, please consider using threaded instead"))
-
- self.core.log.info(_("Starting builtin webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
- webinterface.run_simple(host=self.host, port=self.port)
-
- def start_threaded(self):
- if self.https:
- self.core.log.info(_("Starting threaded SSL webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
- else:
- self.cert = ""
- self.key = ""
- self.core.log.info(_("Starting threaded webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
-
- webinterface.run_threaded(host=self.host, port=self.port, cert=self.cert, key=self.key)
-
- def start_fcgi(self):
-
- self.core.log.info(_("Starting fastcgi server: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
- webinterface.run_fcgi(host=self.host, port=self.port)
-
-
- def start_lightweight(self):
- if self.https:
- log.warning(_("This server offers no SSL, please consider using threaded instead"))
-
- self.core.log.info(_("Starting lightweight webserver (bjoern): %(host)s:%(port)d") % {"host": self.host, "port": self.port})
- webinterface.run_lightweight(host=self.host, port=self.port)
-
- def quit(self):
- self.running = False
diff --git a/pyload/network/HTTPDownload.py b/pyload/network/HTTPDownload.py
index 65c893ad7..3b2bf26ca 100644
--- a/pyload/network/HTTPDownload.py
+++ b/pyload/network/HTTPDownload.py
@@ -12,9 +12,10 @@ import pycurl
from pyload.network.HTTPChunk import ChunkInfo, HTTPChunk
from pyload.network.HTTPRequest import BadHeader
-from pyload.plugins.Plugin import Abort
+from pyload.plugin.Plugin import Abort
from pyload.utils import safe_join, fs_encode
+
class HTTPDownload(object):
""" loads a url http + ftp """
diff --git a/pyload/network/HTTPRequest.py b/pyload/network/HTTPRequest.py
index 2f49fbe91..eac03a365 100644
--- a/pyload/network/HTTPRequest.py
+++ b/pyload/network/HTTPRequest.py
@@ -11,7 +11,7 @@ from httplib import responses
from logging import getLogger
from cStringIO import StringIO
-from pyload.plugins.Plugin import Abort, Fail
+from pyload.plugin.Plugin import Abort, Fail
from pyload.utils import encode
diff --git a/pyload/network/XDCCRequest.py b/pyload/network/XDCCRequest.py
index 7ac8165db..c49f418c4 100644
--- a/pyload/network/XDCCRequest.py
+++ b/pyload/network/XDCCRequest.py
@@ -12,7 +12,7 @@ from time import time
import struct
from select import select
-from pyload.plugins.Plugin import Abort
+from pyload.plugin.Plugin import Abort
class XDCCRequest(object):
diff --git a/pyload/plugin/Account.py b/pyload/plugin/Account.py
new file mode 100644
index 000000000..b14615d3a
--- /dev/null
+++ b/pyload/plugin/Account.py
@@ -0,0 +1,307 @@
+# -*- coding: utf-8 -*-
+
+from random import choice
+from time import time
+from traceback import print_exc
+from threading import RLock
+
+from pyload.plugin.Plugin import Base
+from pyload.utils import compare_time, parseFileSize, lock
+
+
+class WrongPassword(Exception):
+ pass
+
+
+class Account(Base):
+ """
+ Base class for every Account plugin.
+ Just overwrite `login` and cookies will be stored and account becomes accessible in\
+ associated hoster plugin. Plugin should also provide `loadAccountInfo`
+ """
+ __name = "Account"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """Base account plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+ #: after that time (in minutes) pyload will relogin the account
+ login_timeout = 10 * 60
+ #: after that time (in minutes) account data will be reloaded
+ info_threshold = 10 * 60
+
+
+ def __init__(self, manager, accounts):
+ Base.__init__(self, manager.core)
+
+ self.manager = manager
+ self.accounts = {}
+ self.infos = {} #: cache for account information
+ self.lock = RLock()
+ self.timestamps = {}
+
+ self.init()
+
+ self.setAccounts(accounts)
+
+
+ def init(self):
+ pass
+
+
+ def login(self, user, data, req):
+ """login into account, the cookies will be saved so user can be recognized
+
+ :param user: loginname
+ :param data: data dictionary
+ :param req: `Request` instance
+ """
+ pass
+
+
+ @lock
+ def _login(self, user, data):
+ # set timestamp for login
+ self.timestamps[user] = time()
+
+ req = self.getAccountRequest(user)
+ try:
+ self.login(user, data, req)
+ except WrongPassword:
+ self.logWarning(
+ _("Could not login with account %(user)s | %(msg)s") % {"user": user,
+ "msg": _("Wrong Password")})
+ success = data['valid'] = False
+ except Exception, e:
+ self.logWarning(
+ _("Could not login with account %(user)s | %(msg)s") % {"user": user,
+ "msg": e})
+ success = data['valid'] = False
+ if self.core.debug:
+ print_exc()
+ else:
+ success = True
+ finally:
+ if req:
+ req.close()
+ return success
+
+
+ def relogin(self, user):
+ req = self.getAccountRequest(user)
+ if req:
+ req.cj.clear()
+ req.close()
+ if user in self.infos:
+ del self.infos[user] #delete old information
+
+ return self._login(user, self.accounts[user])
+
+
+ def setAccounts(self, accounts):
+ self.accounts = accounts
+ for user, data in self.accounts.iteritems():
+ self._login(user, data)
+ self.infos[user] = {}
+
+
+ def updateAccounts(self, user, password=None, options={}):
+ """ updates account and return true if anything changed """
+
+ if user in self.accounts:
+ self.accounts[user]['valid'] = True #do not remove or accounts will not login
+ if password:
+ self.accounts[user]['password'] = password
+ self.relogin(user)
+ return True
+ if options:
+ before = self.accounts[user]['options']
+ self.accounts[user]['options'].update(options)
+ return self.accounts[user]['options'] != before
+ else:
+ self.accounts[user] = {"password": password, "options": options, "valid": True}
+ self._login(user, self.accounts[user])
+ return True
+
+
+ def removeAccount(self, user):
+ if user in self.accounts:
+ del self.accounts[user]
+ if user in self.infos:
+ del self.infos[user]
+ if user in self.timestamps:
+ del self.timestamps[user]
+
+
+ @lock
+ def getAccountInfo(self, name, force=False):
+ """retrieve account infos for an user, do **not** overwrite this method!\\
+ just use it to retrieve infos in hoster plugins. see `loadAccountInfo`
+
+ :param name: username
+ :param force: reloads cached account information
+ :return: dictionary with information
+ """
+ data = Account.loadAccountInfo(self, name)
+
+ if force or name not in self.infos:
+ self.logDebug("Get Account Info for %s" % name)
+ req = self.getAccountRequest(name)
+
+ try:
+ infos = self.loadAccountInfo(name, req)
+ if not type(infos) == dict:
+ raise Exception("Wrong return format")
+ except Exception, e:
+ infos = {"error": str(e)}
+ print_exc()
+
+ if req:
+ req.close()
+
+ self.logDebug("Account Info: %s" % infos)
+
+ infos['timestamp'] = time()
+ self.infos[name] = infos
+ elif "timestamp" in self.infos[name] and self.infos[name][
+ "timestamp"] + self.info_threshold * 60 < time():
+ self.logDebug("Reached timeout for account data")
+ self.scheduleRefresh(name)
+
+ data.update(self.infos[name])
+ return data
+
+
+ def isPremium(self, user):
+ info = self.getAccountInfo(user)
+ return info['premium']
+
+
+ def loadAccountInfo(self, name, req=None):
+ """this should be overwritten in account plugin,\
+ and retrieving account information for user
+
+ :param name:
+ :param req: `Request` instance
+ :return:
+ """
+ return {"validuntil" : None, #: -1 for unlimited
+ "login" : name,
+ # "password" : self.accounts[name]['password'], #: commented due security reason
+ "options" : self.accounts[name]['options'],
+ "valid" : self.accounts[name]['valid'],
+ "trafficleft": None, #: in bytes, -1 for unlimited
+ "maxtraffic" : None,
+ "premium" : None,
+ "timestamp" : 0, #: time this info was retrieved
+ "type" : self.__name}
+
+
+ def getAllAccounts(self, force=False):
+ return [self.getAccountInfo(user, force) for user, data in self.accounts.iteritems()]
+
+
+ def getAccountRequest(self, user=None):
+ if not user:
+ user, data = self.selectAccount()
+ if not user:
+ return None
+
+ req = self.core.requestFactory.getRequest(self.__name, user)
+ return req
+
+
+ def getAccountCookies(self, user=None):
+ if not user:
+ user, data = self.selectAccount()
+ if not user:
+ return None
+
+ cj = self.core.requestFactory.getCookieJar(self.__name, user)
+ return cj
+
+
+ def getAccountData(self, user):
+ return self.accounts[user]
+
+
+ def selectAccount(self):
+ """ returns an valid account name and data"""
+ usable = []
+ for user, data in self.accounts.iteritems():
+ if not data['valid']: continue
+
+ if "time" in data['options'] and data['options']['time']:
+ time_data = ""
+ try:
+ time_data = data['options']['time'][0]
+ start, end = time_data.split("-")
+ if not compare_time(start.split(":"), end.split(":")):
+ continue
+ except Exception:
+ self.logWarning(_("Your Time %s has wrong format, use: 1:22-3:44") % time_data)
+
+ if user in self.infos:
+ if "validuntil" in self.infos[user]:
+ if self.infos[user]['validuntil'] > 0 and time() > self.infos[user]['validuntil']:
+ continue
+ if "trafficleft" in self.infos[user]:
+ if self.infos[user]['trafficleft'] == 0:
+ continue
+
+ usable.append((user, data))
+
+ if not usable: return None, None
+ return choice(usable)
+
+
+ def canUse(self):
+ return False if self.selectAccount() == (None, None) else True
+
+
+ def parseTraffic(self, value, unit=None): #: return bytes
+ if not unit and not isinstance(value, basestring):
+ unit = "KB"
+ return parseFileSize(value, unit)
+
+
+ def wrongPassword(self):
+ raise WrongPassword
+
+
+ def empty(self, user):
+ if user in self.infos:
+ self.logWarning(_("Account %s has not enough traffic, checking again in 30min") % user)
+
+ self.infos[user].update({"trafficleft": 0})
+ self.scheduleRefresh(user, 30 * 60)
+
+
+ def expired(self, user):
+ if user in self.infos:
+ self.logWarning(_("Account %s is expired, checking again in 1h") % user)
+
+ self.infos[user].update({"validuntil": time() - 1})
+ self.scheduleRefresh(user, 60 * 60)
+
+
+ def scheduleRefresh(self, user, time=0, force=True):
+ """ add task to refresh account info to sheduler """
+ self.logDebug("Scheduled Account refresh for %s in %s seconds." % (user, time))
+ self.core.scheduler.addJob(time, self.getAccountInfo, [user, force])
+
+
+ @lock
+ def checkLogin(self, user):
+ """ checks if user is still logged in """
+ if user in self.timestamps:
+ if self.login_timeout > 0 and self.timestamps[user] + self.login_timeout * 60 < time():
+ self.logDebug("Reached login timeout for %s" % user)
+ return self.relogin(user)
+ else:
+ return True
+ else:
+ return False
diff --git a/pyload/plugin/Addon.py b/pyload/plugin/Addon.py
new file mode 100644
index 000000000..bf8151027
--- /dev/null
+++ b/pyload/plugin/Addon.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+
+from traceback import print_exc
+
+from pyload.plugin.Plugin import Base
+from pyload.utils import has_method
+
+
+class Expose(object):
+ """ used for decoration to declare rpc services """
+
+ def __new__(cls, f, *args, **kwargs):
+ addonManager.addRPC(f.__module__, f.func_name, f.func_doc)
+ return f
+
+
+def threaded(fn):
+
+ def run(*args,**kwargs):
+ addonManager.startThread(fn, *args, **kwargs)
+
+ return run
+
+
+class Addon(Base):
+ """
+ Base class for addon plugins.
+ """
+ __name = "Addon"
+ __type = "addon"
+ __version = "0.03"
+
+ __config = [] #: [("name", "type", "desc", "default")]
+
+ __description = """Base addon/hook plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de"),
+ ("RaNaN", "RaNaN@pyload.org")]
+
+
+ #: automatically register event listeners for functions, attribute will be deleted dont use it yourself
+ event_map = {}
+
+ # Deprecated alternative to event_map
+ #: List of events the plugin can handle, name the functions exactly like eventname.
+ event_list = [] #@NOTE: dont make duplicate entries in event_map
+
+
+ def __init__(self, core, manager):
+ Base.__init__(self, core)
+
+ #: Provide information in dict here, usable by API `getInfo`
+ self.info = {}
+
+ #: Callback of periodical job task, used by AddonManager
+ self.cb = None
+ self.interval = 60
+
+ #: `AddonManager`
+ self.manager = manager
+
+ #register events
+ if self.event_map:
+ for event, funcs in self.event_map.iteritems():
+ if type(funcs) in (list, tuple):
+ for f in funcs:
+ self.manager.addEvent(event, getattr(self,f))
+ else:
+ self.manager.addEvent(event, getattr(self,funcs))
+
+ #delete for various reasons
+ self.event_map = None
+
+ if self.event_list:
+ for f in self.event_list:
+ self.manager.addEvent(f, getattr(self,f))
+
+ self.event_list = None
+
+ self.setup()
+
+ # self.initPeriodical()
+
+
+ def initPeriodical(self, delay=0, threaded=False):
+ self.cb = self.core.scheduler.addJob(delay, self._periodical, args=[threaded], threaded=threaded)
+
+
+ def _periodical(self, threaded):
+ if self.interval < 0:
+ self.cb = None
+ return
+
+ try:
+ self.periodical()
+
+ except Exception, e:
+ self.logError(_("Error executing addon: %s") % e)
+ if self.core.debug:
+ print_exc()
+
+ self.cb = self.core.scheduler.addJob(self.interval, self._periodical, threaded=threaded)
+
+
+ def __repr__(self):
+ return "<Addon %s>" % self.__name
+
+
+ def setup(self):
+ """ more init stuff if needed """
+ pass
+
+
+ def deactivate(self):
+ """ called when addon was deactivated """
+ if has_method(self.__class__, "unload"):
+ self.unload()
+
+ def unload(self): # Deprecated, use method deactivate() instead
+ pass
+
+
+ def isActivated(self):
+ """ checks if addon is activated"""
+ return self.core.config.getPlugin(self.__name, "activated")
+
+
+ # Event methods - overwrite these if needed
+ def activate(self):
+ """ called when addon was activated """
+ if has_method(self.__class__, "coreReady"):
+ self.coreReady()
+
+ def coreReady(self): # Deprecated, use method activate() instead
+ pass
+
+
+ def exit(self):
+ """ called by core.shutdown just before pyLoad exit """
+ if has_method(self.__class__, "coreExiting"):
+ self.coreExiting()
+
+ def coreExiting(self): # Deprecated, use method exit() instead
+ pass
+
+
+ def downloadPreparing(self, pyfile):
+ pass
+
+
+ def downloadFinished(self, pyfile):
+ pass
+
+
+ def downloadFailed(self, pyfile):
+ pass
+
+
+ def packageFinished(self, pypack):
+ pass
+
+
+ def beforeReconnecting(self, ip):
+ pass
+
+
+ def afterReconnecting(self, ip):
+ pass
+
+
+ def periodical(self):
+ pass
+
+
+ def captchaTask(self, task):
+ """ new captcha task for the plugin, it MUST set the handler and timeout or will be ignored """
+ pass
+
+
+ def captchaCorrect(self, task):
+ pass
+
+
+ def captchaInvalid(self, task):
+ pass
diff --git a/pyload/plugin/Captcha.py b/pyload/plugin/Captcha.py
new file mode 100644
index 000000000..1b3f34e33
--- /dev/null
+++ b/pyload/plugin/Captcha.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Plugin import Plugin
+
+
+class Captcha(Plugin):
+ __name = "Captcha"
+ __type = "captcha"
+ __version = "0.14"
+
+ __description = """Base captcha service plugin"""
+ __license = "GPLv3"
+ __authors = [("pyLoad Team", "admin@pyload.org")]
+
+
+ KEY_PATTERN = None
+
+ key = None #: last key detected
+
+
+ def __init__(self, plugin):
+ self.plugin = plugin
+
+
+ def detect_key(self, html=None):
+ if not html:
+ if hasattr(self.plugin, "html") and self.plugin.html:
+ html = self.plugin.html
+ else:
+ errmsg = _("%s html not found") % self.__name
+ self.plugin.error(errmsg)
+ raise TypeError(errmsg)
+
+ m = re.search(self.KEY_PATTERN, html)
+ if m:
+ self.key = m.group("KEY")
+ self.plugin.logDebug("%s key: %s" % (self.__name, self.key))
+ return self.key
+ else:
+ self.plugin.logDebug("%s key not found" % self.__name)
+ return None
+
+
+ def challenge(self, key=None):
+ raise NotImplementedError
+
+
+ def result(self, server, challenge):
+ raise NotImplementedError
diff --git a/pyload/plugin/Container.py b/pyload/plugin/Container.py
new file mode 100644
index 000000000..e2e0f2248
--- /dev/null
+++ b/pyload/plugin/Container.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+
+from os import remove
+from os.path import basename, exists
+
+from pyload.plugin.internal.Crypter import Crypter
+from pyload.utils import safe_join
+
+
+class Container(Crypter):
+ __name = "Container"
+ __type = "container"
+ __version = "0.01"
+
+ __pattern = r'^unmatchable$'
+ __config = [] #: [("name", "type", "desc", "default")]
+
+ __description = """Base container decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+ def preprocessing(self, thread):
+ """prepare"""
+
+ self.setup()
+ self.thread = thread
+
+ self.loadToDisk()
+
+ self.decrypt(self.pyfile)
+ self.deleteTmp()
+
+ self.createPackages()
+
+
+ def loadToDisk(self):
+ """loads container to disk if its stored remotely and overwrite url,
+ or check existent on several places at disk"""
+
+ if self.pyfile.url.startswith("http"):
+ self.pyfile.name = re.findall("([^\/=]+)", self.pyfile.url)[-1]
+ content = self.load(self.pyfile.url)
+ self.pyfile.url = safe_join(self.core.config['general']['download_folder'], self.pyfile.name)
+ try:
+ with open(self.pyfile.url, "wb") as f:
+ f.write(content)
+ except IOError, e:
+ self.fail(str(e))
+
+ else:
+ self.pyfile.name = basename(self.pyfile.url)
+ if not exists(self.pyfile.url):
+ if exists(safe_join(pypath, self.pyfile.url)):
+ self.pyfile.url = safe_join(pypath, self.pyfile.url)
+ else:
+ self.fail(_("File not exists"))
+
+
+ def deleteTmp(self):
+ if self.pyfile.name.startswith("tmp_"):
+ remove(self.pyfile.url)
diff --git a/pyload/plugin/Crypter.py b/pyload/plugin/Crypter.py
new file mode 100644
index 000000000..aa9966ab4
--- /dev/null
+++ b/pyload/plugin/Crypter.py
@@ -0,0 +1,107 @@
+# -*- coding: utf-8 -*-
+
+from urlparse import urlparse
+
+from pyload.plugin.Plugin import Plugin
+from pyload.utils import decode, safe_filename
+
+
+class Crypter(Plugin):
+ __name = "Crypter"
+ __type = "crypter"
+ __version = "0.05"
+
+ __pattern = r'^unmatchable$'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True), #: Overrides core.config['general']['folder_per_package']
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Base decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ html = None #: last html loaded
+
+
+ def __init__(self, pyfile):
+ #: Put all packages here. It's a list of tuples like: ( name, [list of links], folder )
+ self.packages = []
+
+ #: List of urls, pyLoad will generate packagenames
+ self.urls = []
+
+ Plugin.__init__(self, pyfile)
+
+
+ def process(self, pyfile):
+ """ main method """
+
+ self.decrypt(pyfile)
+
+ if self.urls:
+ self.generatePackages()
+
+ elif not self.packages:
+ self.error(_("No link extracted"), "decrypt")
+
+ self.createPackages()
+
+
+ def decrypt(self, pyfile):
+ raise NotImplementedError
+
+
+ def generatePackages(self):
+ """ generate new packages from self.urls """
+
+ packages = map(lambda name, links: (name, links, None), self.core.api.generatePackages(self.urls).iteritems())
+ self.packages.extend(packages)
+
+
+ def createPackages(self):
+ """ create new packages from self.packages """
+
+ package_folder = self.pyfile.package().folder
+ package_password = self.pyfile.package().password
+ package_queue = self.pyfile.package().queue
+
+ folder_per_package = self.core.config['general']['folder_per_package']
+ try:
+ use_subfolder = self.getConfig('use_subfolder')
+ except Exception:
+ use_subfolder = folder_per_package
+ try:
+ subfolder_per_package = self.getConfig('subfolder_per_package')
+ except Exception:
+ subfolder_per_package = True
+
+ for pack in self.packages:
+ name, links, folder = pack
+
+ self.logDebug("Parsed package: %s" % name,
+ "%d links" % len(links),
+ "Saved to folder: %s" % folder if folder else "Saved to download folder")
+
+ links = map(decode, links)
+
+ pid = self.core.api.addPackage(name, links, package_queue)
+
+ if package_password:
+ self.core.api.setPackageData(pid, {"password": package_password})
+
+ setFolder = lambda x: self.core.api.setPackageData(pid, {"folder": x or ""}) #: Workaround to do not break API addPackage method
+
+ if use_subfolder:
+ if not subfolder_per_package:
+ setFolder(package_folder)
+ self.logDebug("Set package %(name)s folder to: %(folder)s" % {"name": name, "folder": folder})
+
+ elif not folder_per_package or name != folder:
+ if not folder:
+ folder = urlparse(name).path.split("/")[-1]
+
+ setFolder(safe_filename(folder))
+ self.logDebug("Set package %(name)s folder to: %(folder)s" % {"name": name, "folder": folder})
+
+ elif folder_per_package:
+ setFolder(None)
diff --git a/pyload/plugin/Hoster.py b/pyload/plugin/Hoster.py
new file mode 100644
index 000000000..df778c72f
--- /dev/null
+++ b/pyload/plugin/Hoster.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Plugin import Plugin
+
+
+def getInfo(self):
+ #result = [ .. (name, size, status, url) .. ]
+ return
+
+
+class Hoster(Plugin):
+ __name = "Hoster"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'^unmatchable$'
+ __config = [] #: [("name", "type", "desc", "default")]
+
+ __description = """Base hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
diff --git a/pyload/plugins/OCR.py b/pyload/plugin/OCR.py
index 9e8d49d70..9e8d49d70 100644
--- a/pyload/plugins/OCR.py
+++ b/pyload/plugin/OCR.py
diff --git a/pyload/plugins/Plugin.py b/pyload/plugin/Plugin.py
index 07797e8c6..07797e8c6 100644
--- a/pyload/plugins/Plugin.py
+++ b/pyload/plugin/Plugin.py
diff --git a/pyload/plugins/__init__.py b/pyload/plugin/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/__init__.py
+++ b/pyload/plugin/__init__.py
diff --git a/pyload/plugin/account/AlldebridCom.py b/pyload/plugin/account/AlldebridCom.py
new file mode 100644
index 000000000..14b1755ca
--- /dev/null
+++ b/pyload/plugin/account/AlldebridCom.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+import re
+import xml.dom.minidom as dom
+
+from time import time
+from urllib import urlencode
+
+from BeautifulSoup import BeautifulSoup
+
+from pyload.plugin.Account import Account
+
+
+class AlldebridCom(Account):
+ __name = "AlldebridCom"
+ __type = "account"
+ __version = "0.22"
+
+ __description = """AllDebrid.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Andy Voigt", "spamsales@online.de")]
+
+
+ def loadAccountInfo(self, user, req):
+ data = self.getAccountData(user)
+ page = req.load("http://www.alldebrid.com/account/")
+ soup = BeautifulSoup(page)
+ #Try to parse expiration date directly from the control panel page (better accuracy)
+ try:
+ time_text = soup.find('div', attrs={'class': 'remaining_time_text'}).strong.string
+ self.logDebug("Account expires in: %s" % time_text)
+ p = re.compile('\d+')
+ exp_data = p.findall(time_text)
+ exp_time = time() + int(exp_data[0]) * 24 * 60 * 60 + int(
+ exp_data[1]) * 60 * 60 + (int(exp_data[2]) - 1) * 60
+ #Get expiration date from API
+ except Exception:
+ data = self.getAccountData(user)
+ page = req.load("http://www.alldebrid.com/api.php",
+ get={'action': "info_user", 'login': user, 'pw': data['password']})
+ self.logDebug(page)
+ xml = dom.parseString(page)
+ exp_time = time() + int(xml.getElementsByTagName("date")[0].childNodes[0].nodeValue) * 24 * 60 * 60
+ account_info = {"validuntil": exp_time, "trafficleft": -1}
+ return account_info
+
+
+ def login(self, user, data, req):
+ urlparams = urlencode({'action': 'login', 'login_login': user, 'login_password': data['password']})
+ page = req.load("http://www.alldebrid.com/register/?%s" % urlparams)
+
+ if "This login doesn't exist" in page:
+ self.wrongPassword()
+
+ if "The password is not valid" in page:
+ self.wrongPassword()
+
+ if "Invalid captcha" in page:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/BayfilesCom.py b/pyload/plugin/account/BayfilesCom.py
new file mode 100644
index 000000000..5ca04c86b
--- /dev/null
+++ b/pyload/plugin/account/BayfilesCom.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+
+from time import time
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class BayfilesCom(Account):
+ __name = "BayfilesCom"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """Bayfiles.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def loadAccountInfo(self, user, req):
+ for _i in xrange(2):
+ res = json_loads(req.load("http://api.bayfiles.com/v1/account/info"))
+ self.logDebug(res)
+ if not res['error']:
+ break
+ self.logWarning(res['error'])
+ self.relogin(user)
+
+ return {"premium": bool(res['premium']), "trafficleft": -1,
+ "validuntil": res['expires'] if res['expires'] >= int(time()) else -1}
+
+
+ def login(self, user, data, req):
+ res = json_loads(req.load("http://api.bayfiles.com/v1/account/login/%s/%s" % (user, data['password'])))
+ self.logDebug(res)
+ if res['error']:
+ self.logError(res['error'])
+ self.wrongPassword()
diff --git a/pyload/plugin/account/BillionuploadsCom.py b/pyload/plugin/account/BillionuploadsCom.py
new file mode 100644
index 000000000..982a2cc34
--- /dev/null
+++ b/pyload/plugin/account/BillionuploadsCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class BillionuploadsCom(XFSAccount):
+ __name = "BillionuploadsCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Billionuploads.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "billionuploads.com"
diff --git a/pyload/plugin/account/BitshareCom.py b/pyload/plugin/account/BitshareCom.py
new file mode 100644
index 000000000..591fe7f89
--- /dev/null
+++ b/pyload/plugin/account/BitshareCom.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+
+
+class BitshareCom(Account):
+ __name = "BitshareCom"
+ __type = "account"
+ __version = "0.12"
+
+ __description = """Bitshare account plugin"""
+ __license = "GPLv3"
+ __authors = [("Paul King", "")]
+
+
+ def loadAccountInfo(self, user, req):
+ page = req.load("http://bitshare.com/mysettings.html")
+
+ if "\"http://bitshare.com/myupgrade.html\">Free" in page:
+ return {"validuntil": -1, "trafficleft": -1, "premium": False}
+
+ if not '<input type="checkbox" name="directdownload" checked="checked" />' in page:
+ self.logWarning(_("Activate direct Download in your Bitshare Account"))
+
+ return {"validuntil": -1, "trafficleft": -1, "premium": True}
+
+
+ def login(self, user, data, req):
+ page = req.load("http://bitshare.com/login.html",
+ post={"user": user, "password": data['password'], "submit": "Login"}, cookies=True)
+ if "login" in req.lastEffectiveURL:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/CatShareNet.py b/pyload/plugin/account/CatShareNet.py
new file mode 100644
index 000000000..8fe3d05cf
--- /dev/null
+++ b/pyload/plugin/account/CatShareNet.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class CatShareNet(Account):
+ __name = "CatShareNet"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """CatShareNet account plugin"""
+ __license = "GPLv3"
+ __authors = [("prOq", "")]
+
+
+ PREMIUM_PATTERN = r'class="nav-collapse collapse pull-right">[\s\w<>=-."/:]*\sz.</a></li>\s*<li><a href="/premium">.*\s*<span style="color: red">(.*?)</span>[\s\w<>/]*href="/logout"'
+ VALID_UNTIL_PATTERN = r'<div class="span6 pull-right">[\s\w<>=-":;]*<span style="font-size:13px;">.*?<strong>(.*?)</strong></span>'
+
+
+ def loadAccountInfo(self, user, req):
+ premium = False
+ validuntil = -1
+
+ html = req.load("http://catshare.net/", decode=True)
+
+ try:
+ m = re.search(self.PREMIUM_PATTERN, html)
+ if "Premium" in m.group(1):
+ premium = True
+ except Exception:
+ pass
+
+ try:
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ expiredate = m.group(1)
+ if "-" not in expiredate:
+ validuntil = mktime(strptime(expiredate, "%d.%m.%Y"))
+ except Exception:
+ pass
+
+ return {'premium': premium, 'trafficleft': -1, 'validuntil': validuntil}
+
+
+ def login(self, user, data, req):
+ html = req.load("http://catshare.net/login",
+ post={'user_email': user,
+ 'user_password': data['password'],
+ 'remindPassword': 0,
+ 'user[submit]': "Login"})
+
+ if not '<a href="/logout">Wyloguj</a>' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/CramitIn.py b/pyload/plugin/account/CramitIn.py
new file mode 100644
index 000000000..ccd291776
--- /dev/null
+++ b/pyload/plugin/account/CramitIn.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class CramitIn(XFSAccount):
+ __name = "CramitIn"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """Cramit.in account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "cramit.in"
diff --git a/pyload/plugin/account/CzshareCom.py b/pyload/plugin/account/CzshareCom.py
new file mode 100644
index 000000000..f11d4efd4
--- /dev/null
+++ b/pyload/plugin/account/CzshareCom.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+from time import mktime, strptime
+import re
+
+from pyload.plugin.Account import Account
+
+
+class CzshareCom(Account):
+ __name = "CzshareCom"
+ __type = "account"
+ __version = "0.14"
+
+ __description = """Czshare.com account plugin, now Sdilej.cz"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ CREDIT_LEFT_PATTERN = r'<tr class="active">\s*<td>([\d ,]+) (KiB|MiB|GiB)</td>\s*<td>([^<]*)</td>\s*</tr>'
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("http://sdilej.cz/prehled_kreditu/")
+
+ m = re.search(self.CREDIT_LEFT_PATTERN, html)
+ if m is None:
+ return {"validuntil": 0, "trafficleft": 0}
+ else:
+ credits = float(m.group(1).replace(' ', '').replace(',', '.'))
+ credits = credits * 1024 ** {'KiB': 0, 'MiB': 1, 'GiB': 2}[m.group(2)]
+ validuntil = mktime(strptime(m.group(3), '%d.%m.%y %H:%M'))
+ return {"validuntil": validuntil, "trafficleft": credits}
+
+
+ def login(self, user, data, req):
+ html = req.load('https://sdilej.cz/index.php', post={
+ "Prihlasit": "Prihlasit",
+ "login-password": data['password'],
+ "login-name": user
+ })
+
+ if '<div class="login' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/DebridItaliaCom.py b/pyload/plugin/account/DebridItaliaCom.py
new file mode 100644
index 000000000..c895842f0
--- /dev/null
+++ b/pyload/plugin/account/DebridItaliaCom.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class DebridItaliaCom(Account):
+ __name = "DebridItaliaCom"
+ __type = "account"
+ __version = "0.11"
+
+ __description = """Debriditalia.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ WALID_UNTIL_PATTERN = r'Premium valid till: (.+?) \|'
+
+
+ def loadAccountInfo(self, user, req):
+ info = {'premium': False, 'validuntil': None, 'trafficleft': None}
+ html = req.load("http://debriditalia.com/")
+
+ if 'Account premium not activated' not in html:
+ m = re.search(self.WALID_UNTIL_PATTERN, html)
+ if m:
+ validuntil = int(mktime(strptime(m.group(1), "%d/%m/%Y %H:%M")))
+ info = {'premium': True, 'validuntil': validuntil, 'trafficleft': -1}
+ else:
+ self.logError(_("Unable to retrieve account information"))
+
+ return info
+
+
+ def login(self, user, data, req):
+ html = req.load("http://debriditalia.com/login.php",
+ get={'u': user, 'p': data['password']})
+
+ if 'NO' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/DepositfilesCom.py b/pyload/plugin/account/DepositfilesCom.py
new file mode 100644
index 000000000..ac1da7b0c
--- /dev/null
+++ b/pyload/plugin/account/DepositfilesCom.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import strptime, mktime
+
+from pyload.plugin.Account import Account
+
+
+class DepositfilesCom(Account):
+ __name = "DepositfilesCom"
+ __type = "account"
+ __version = "0.30"
+
+ __description = """Depositfiles.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de"),
+ ("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("https://dfiles.eu/de/gold/")
+ validuntil = re.search(r"Sie haben Gold Zugang bis: <b>(.*?)</b></div>", html).group(1)
+
+ validuntil = int(mktime(strptime(validuntil, "%Y-%m-%d %H:%M:%S")))
+
+ return {"validuntil": validuntil, "trafficleft": -1}
+
+
+ def login(self, user, data, req):
+ html = req.load("https://dfiles.eu/de/login.php", get={"return": "/de/gold/payment.php"},
+ post={"login": user, "password": data['password']})
+ if r'<div class="error_message">Sie haben eine falsche Benutzername-Passwort-Kombination verwendet.</div>' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/DropboxCom.py b/pyload/plugin/account/DropboxCom.py
new file mode 100644
index 000000000..2c4c36c4a
--- /dev/null
+++ b/pyload/plugin/account/DropboxCom.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class DropboxCom(SimpleHoster):
+ __name = "DropboxCom"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'https?://(?:www\.)?dropbox\.com/.+'
+
+ __description = """Dropbox.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
+
+
+ NAME_PATTERN = r'<title>Dropbox - (?P<N>.+?)<'
+ SIZE_PATTERN = r'&nbsp;&middot;&nbsp; (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ OFFLINE_PATTERN = r'<title>Dropbox - (404|Shared link error)<'
+
+ COOKIES = [("dropbox.com", "lang", "en")]
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+ self.resumeDownload = True
+
+
+ def handleFree(self):
+ self.download(self.pyfile.url, get={'dl': "1"})
+
+ check = self.checkDownload({'html': re.compile("html")})
+ if check == "html":
+ self.error(_("Downloaded file is an html page"))
+
+
+getInfo = create_getInfo(DropboxCom)
diff --git a/pyload/plugin/account/EasybytezCom.py b/pyload/plugin/account/EasybytezCom.py
new file mode 100644
index 000000000..c1f641669
--- /dev/null
+++ b/pyload/plugin/account/EasybytezCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class EasybytezCom(XFSAccount):
+ __name = "EasybytezCom"
+ __type = "account"
+ __version = "0.12"
+
+ __description = """EasyBytez.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "easybytez.com"
diff --git a/pyload/plugin/account/EuroshareEu.py b/pyload/plugin/account/EuroshareEu.py
new file mode 100644
index 000000000..9e948816c
--- /dev/null
+++ b/pyload/plugin/account/EuroshareEu.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+from time import mktime, strptime
+import re
+
+from pyload.plugin.Account import Account
+
+
+class EuroshareEu(Account):
+ __name = "EuroshareEu"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """Euroshare.eu account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def loadAccountInfo(self, user, req):
+ self.relogin(user)
+ html = req.load("http://euroshare.eu/customer-zone/settings/")
+
+ m = re.search('id="input_expire_date" value="(\d+\.\d+\.\d+ \d+:\d+)"', html)
+ if m is None:
+ premium, validuntil = False, -1
+ else:
+ premium = True
+ validuntil = mktime(strptime(m.group(1), "%d.%m.%Y %H:%M"))
+
+ return {"validuntil": validuntil, "trafficleft": -1, "premium": premium}
+
+
+ def login(self, user, data, req):
+ html = req.load('http://euroshare.eu/customer-zone/login/', post={
+ "trvale": "1",
+ "login": user,
+ "password": data['password']
+ }, decode=True)
+
+ if u">Nesprávne prihlasovacie meno alebo heslo" in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/FastixRu.py b/pyload/plugin/account/FastixRu.py
new file mode 100644
index 000000000..b2ef7f685
--- /dev/null
+++ b/pyload/plugin/account/FastixRu.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class FastixRu(Account):
+ __name = "FastixRu"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Fastix account plugin"""
+ __license = "GPLv3"
+ __authors = [("Massimo Rosamilia", "max@spiritix.eu")]
+
+
+ def loadAccountInfo(self, user, req):
+ data = self.getAccountData(user)
+ page = json_loads(req.load("http://fastix.ru/api_v2/", get={'apikey': data['api'], 'sub': "getaccountdetails"}))
+
+ points = page['points']
+ kb = float(points) * 1024 ** 2 / 1000
+
+ if points > 0:
+ account_info = {"validuntil": -1, "trafficleft": kb}
+ else:
+ account_info = {"validuntil": None, "trafficleft": None, "premium": False}
+ return account_info
+
+
+ def login(self, user, data, req):
+ page = req.load("http://fastix.ru/api_v2/",
+ get={'sub': "get_apikey", 'email': user, 'password': data['password']})
+ api = json_loads(page)
+ api = api['apikey']
+ data['api'] = api
+ if "error_code" in page:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/FastshareCz.py b/pyload/plugin/account/FastshareCz.py
new file mode 100644
index 000000000..4ce94be5e
--- /dev/null
+++ b/pyload/plugin/account/FastshareCz.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Account import Account
+from pyload.utils import parseFileSize
+
+
+class FastshareCz(Account):
+ __name = "FastshareCz"
+ __type = "account"
+ __version = "0.05"
+
+ __description = """Fastshare.cz account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ CREDIT_PATTERN = r'My account\s*\((.+?)\)'
+
+
+ def loadAccountInfo(self, user, req):
+ validuntil = None
+ trafficleft = None
+ premium = None
+
+ html = req.load("http://www.fastshare.cz/user", decode=True)
+
+ m = re.search(self.CREDIT_PATTERN, html)
+ if m:
+ trafficleft = self.parseTraffic(m.group(1))
+
+ if trafficleft:
+ premium = True
+ validuntil = -1
+ else:
+ premium = False
+
+ return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
+
+
+ def login(self, user, data, req):
+ req.cj.setCookie("fastshare.cz", "lang", "en")
+
+ req.load('http://www.fastshare.cz/login') # Do not remove or it will not login
+
+ html = req.load("http://www.fastshare.cz/sql.php",
+ post={'login': user, 'heslo': data['password']},
+ decode=True)
+
+ if ">Wrong username or password" in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/File4safeCom.py b/pyload/plugin/account/File4safeCom.py
new file mode 100644
index 000000000..d7004b463
--- /dev/null
+++ b/pyload/plugin/account/File4safeCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class File4safeCom(XFSAccount):
+ __name = "File4safeCom"
+ __type = "account"
+ __version = "0.04"
+
+ __description = """File4safe.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ HOSTER_DOMAIN = "file4safe.com"
+
+ LOGIN_FAIL_PATTERN = r'input_login'
diff --git a/pyload/plugin/account/FileParadoxIn.py b/pyload/plugin/account/FileParadoxIn.py
new file mode 100644
index 000000000..0e103c4e7
--- /dev/null
+++ b/pyload/plugin/account/FileParadoxIn.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class FileParadoxIn(XFSAccount):
+ __name = "FileParadoxIn"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """FileParadox.in account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "fileparadox.in"
diff --git a/pyload/plugin/account/FilecloudIo.py b/pyload/plugin/account/FilecloudIo.py
new file mode 100644
index 000000000..c6638bb1f
--- /dev/null
+++ b/pyload/plugin/account/FilecloudIo.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class FilecloudIo(Account):
+ __name = "FilecloudIo"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """FilecloudIo account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ def loadAccountInfo(self, user, req):
+ # It looks like the first API request always fails, so we retry 5 times, it should work on the second try
+ for _i in xrange(5):
+ rep = req.load("https://secure.filecloud.io/api-fetch_apikey.api",
+ post={"username": user, "password": self.accounts[user]['password']})
+ rep = json_loads(rep)
+ if rep['status'] == 'ok':
+ break
+ elif rep['status'] == 'error' and rep['message'] == 'no such user or wrong password':
+ self.logError(_("Wrong username or password"))
+ return {"valid": False, "premium": False}
+ else:
+ return {"premium": False}
+
+ akey = rep['akey']
+ self.accounts[user]['akey'] = akey # Saved for hoster plugin
+ rep = req.load("http://api.filecloud.io/api-fetch_account_details.api",
+ post={"akey": akey})
+ rep = json_loads(rep)
+
+ if rep['is_premium'] == 1:
+ return {"validuntil": int(rep['premium_until']), "trafficleft": -1}
+ else:
+ return {"premium": False}
+
+
+ def login(self, user, data, req):
+ req.cj.setCookie("secure.filecloud.io", "lang", "en")
+ html = req.load('https://secure.filecloud.io/user-login.html')
+
+ if not hasattr(self, "form_data"):
+ self.form_data = {}
+
+ self.form_data['username'] = user
+ self.form_data['password'] = data['password']
+
+ html = req.load('https://secure.filecloud.io/user-login_p.html',
+ post=self.form_data,
+ multipart=True)
+
+ self.logged_in = True if "you have successfully logged in - filecloud.io" in html else False
+ self.form_data = {}
diff --git a/pyload/plugin/account/FilefactoryCom.py b/pyload/plugin/account/FilefactoryCom.py
new file mode 100644
index 000000000..d79875423
--- /dev/null
+++ b/pyload/plugin/account/FilefactoryCom.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+
+import re
+from time import mktime, strptime
+
+from pycurl import REFERER
+
+from pyload.plugin.Account import Account
+
+
+class FilefactoryCom(Account):
+ __name = "FilefactoryCom"
+ __type = "account"
+ __version = "0.14"
+
+ __description = """Filefactory.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ VALID_UNTIL_PATTERN = r'Premium valid until: <strong>(?P<d>\d{1,2})\w{1,2} (?P<m>\w{3}), (?P<y>\d{4})</strong>'
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("http://www.filefactory.com/account/")
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ premium = True
+ validuntil = re.sub(self.VALID_UNTIL_PATTERN, '\g<d> \g<m> \g<y>', m.group(0))
+ validuntil = mktime(strptime(validuntil, "%d %b %Y"))
+ else:
+ premium = False
+ validuntil = -1
+
+ return {"premium": premium, "trafficleft": -1, "validuntil": validuntil}
+
+
+ def login(self, user, data, req):
+ req.http.c.setopt(REFERER, "http://www.filefactory.com/member/login.php")
+
+ html = req.load("http://www.filefactory.com/member/signin.php", post={
+ "loginEmail": user,
+ "loginPassword": data['password'],
+ "Submit": "Sign In"})
+
+ if req.lastEffectiveURL != "http://www.filefactory.com/account/":
+ self.wrongPassword()
diff --git a/pyload/plugin/account/FilejungleCom.py b/pyload/plugin/account/FilejungleCom.py
new file mode 100644
index 000000000..5de4a244d
--- /dev/null
+++ b/pyload/plugin/account/FilejungleCom.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+
+import re
+from time import mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class FilejungleCom(Account):
+ __name = "FilejungleCom"
+ __type = "account"
+ __version = "0.11"
+
+ __description = """Filejungle.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ login_timeout = 60
+
+ URL = "http://filejungle.com/"
+ TRAFFIC_LEFT_PATTERN = r'"/extend_premium\.php">Until (\d+ \w+ \d+)<br'
+ LOGIN_FAILED_PATTERN = r'<span htmlfor="loginUser(Name|Password)" generated="true" class="fail_info">'
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load(self.URL + "dashboard.php")
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ if m:
+ premium = True
+ validuntil = mktime(strptime(m.group(1), "%d %b %Y"))
+ else:
+ premium = False
+ validuntil = -1
+
+ return {"premium": premium, "trafficleft": -1, "validuntil": validuntil}
+
+
+ def login(self, user, data, req):
+ html = req.load(self.URL + "login.php", post={
+ "loginUserName": user,
+ "loginUserPassword": data['password'],
+ "loginFormSubmit": "Login",
+ "recaptcha_challenge_field": "",
+ "recaptcha_response_field": "",
+ "recaptcha_shortencode_field": ""})
+
+ if re.search(self.LOGIN_FAILED_PATTERN, html):
+ self.wrongPassword()
diff --git a/pyload/plugin/account/FileomCom.py b/pyload/plugin/account/FileomCom.py
new file mode 100644
index 000000000..2868e49e6
--- /dev/null
+++ b/pyload/plugin/account/FileomCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class FileomCom(XFSAccount):
+ __name = "FileomCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Fileom.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "fileom.com"
diff --git a/pyload/plugin/account/FilerNet.py b/pyload/plugin/account/FilerNet.py
new file mode 100644
index 000000000..9420a6e8e
--- /dev/null
+++ b/pyload/plugin/account/FilerNet.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+
+import re
+import time
+
+from pyload.plugin.Account import Account
+
+
+class FilerNet(Account):
+ __name = "FilerNet"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Filer.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ TOKEN_PATTERN = r'_csrf_token" value="([^"]+)" />'
+ WALID_UNTIL_PATTERN = r'Der Premium-Zugang ist gÃŒltig bis (.+)\.\s*</td>'
+ TRAFFIC_PATTERN = r'Traffic</th>\s*<td>([^<]+)</td>'
+ FREE_PATTERN = r'Account Status</th>\s*<td>\s*Free'
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("https://filer.net/profile")
+
+ # Free user
+ if re.search(self.FREE_PATTERN, html):
+ return {"premium": False, "validuntil": None, "trafficleft": None}
+
+ until = re.search(self.WALID_UNTIL_PATTERN, html)
+ traffic = re.search(self.TRAFFIC_PATTERN, html)
+ if until and traffic:
+ validuntil = int(time.mktime(time.strptime(until.group(1), "%d.%m.%Y %H:%M:%S")))
+ trafficleft = self.parseTraffic(traffic.group(1))
+ return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
+ else:
+ self.logError(_("Unable to retrieve account information"))
+ return {"premium": False, "validuntil": None, "trafficleft": None}
+
+
+ def login(self, user, data, req):
+ html = req.load("https://filer.net/login")
+ token = re.search(self.TOKEN_PATTERN, html).group(1)
+ html = req.load("https://filer.net/login_check",
+ post={"_username": user, "_password": data['password'],
+ "_remember_me": "on", "_csrf_token": token, "_target_path": "https://filer.net/"})
+ if 'Logout' not in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/FilerioCom.py b/pyload/plugin/account/FilerioCom.py
new file mode 100644
index 000000000..d222fa78b
--- /dev/null
+++ b/pyload/plugin/account/FilerioCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class FilerioCom(XFSAccount):
+ __name = "FilerioCom"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """FileRio.in account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "filerio.in"
diff --git a/pyload/plugin/account/FilesMailRu.py b/pyload/plugin/account/FilesMailRu.py
new file mode 100644
index 000000000..41433687f
--- /dev/null
+++ b/pyload/plugin/account/FilesMailRu.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+
+
+class FilesMailRu(Account):
+ __name = "FilesMailRu"
+ __type = "account"
+ __version = "0.10"
+
+ __description = """Filesmail.ru account plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ def loadAccountInfo(self, user, req):
+ return {"validuntil": None, "trafficleft": None}
+
+
+ def login(self, user, data, req):
+ user, domain = user.split("@")
+
+ page = req.load("http://swa.mail.ru/cgi-bin/auth", None,
+ {"Domain": domain, "Login": user, "Password": data['password'],
+ "Page": "http://files.mail.ru/"}, cookies=True)
+
+ if "НеверМПе ОЌя пПльзПвателя ОлО парПль" in page: # @TODO seems not to work
+ self.wrongPassword()
diff --git a/pyload/plugin/account/FileserveCom.py b/pyload/plugin/account/FileserveCom.py
new file mode 100644
index 000000000..d0ef0cd62
--- /dev/null
+++ b/pyload/plugin/account/FileserveCom.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+from time import mktime, strptime
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class FileserveCom(Account):
+ __name = "FileserveCom"
+ __type = "account"
+ __version = "0.20"
+
+ __description = """Fileserve.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+ def loadAccountInfo(self, user, req):
+ data = self.getAccountData(user)
+
+ page = req.load("http://app.fileserve.com/api/login/", post={"username": user, "password": data['password'],
+ "submit": "Submit+Query"})
+ res = json_loads(page)
+
+ if res['type'] == "premium":
+ validuntil = mktime(strptime(res['expireTime'], "%Y-%m-%d %H:%M:%S"))
+ return {"trafficleft": res['traffic'], "validuntil": validuntil}
+ else:
+ return {"premium": False, "trafficleft": None, "validuntil": None}
+
+
+ def login(self, user, data, req):
+ page = req.load("http://app.fileserve.com/api/login/", post={"username": user, "password": data['password'],
+ "submit": "Submit+Query"})
+ res = json_loads(page)
+
+ if not res['type']:
+ self.wrongPassword()
+
+ #login at fileserv page
+ req.load("http://www.fileserve.com/login.php",
+ post={"loginUserName": user, "loginUserPassword": data['password'], "autoLogin": "checked",
+ "loginFormSubmit": "Login"})
diff --git a/pyload/plugin/account/FourSharedCom.py b/pyload/plugin/account/FourSharedCom.py
new file mode 100644
index 000000000..84c062e84
--- /dev/null
+++ b/pyload/plugin/account/FourSharedCom.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class FourSharedCom(Account):
+ __name = "FourSharedCom"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """FourShared.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ def loadAccountInfo(self, user, req):
+ # Free mode only for now
+ return {"premium": False}
+
+
+ def login(self, user, data, req):
+ req.cj.setCookie("4shared.com", "4langcookie", "en")
+ res = req.load('http://www.4shared.com/web/login',
+ post={'login': user,
+ 'password': data['password'],
+ 'remember': "on",
+ '_remember': "on",
+ 'returnTo': "http://www.4shared.com/account/home.jsp"})
+
+ if 'Please log in to access your 4shared account' in res:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/FreakshareCom.py b/pyload/plugin/account/FreakshareCom.py
new file mode 100644
index 000000000..76d094a8b
--- /dev/null
+++ b/pyload/plugin/account/FreakshareCom.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import strptime, mktime
+
+from pyload.plugin.Account import Account
+
+
+class FreakshareCom(Account):
+ __name = "FreakshareCom"
+ __type = "account"
+ __version = "0.11"
+
+ __description = """Freakshare.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ def loadAccountInfo(self, user, req):
+ page = req.load("http://freakshare.com/")
+
+ validuntil = r'ltig bis:</td>\s*<td><b>([\d.:-]+)</b></td>'
+ validuntil = re.search(validuntil, page, re.M)
+ validuntil = validuntil.group(1).strip()
+ validuntil = mktime(strptime(validuntil, "%d.%m.%Y - %H:%M"))
+
+ traffic = r'Traffic verbleibend:</td>\s*<td>([^<]+)'
+ traffic = re.search(traffic, page, re.M)
+ traffic = traffic.group(1).strip()
+ traffic = self.parseTraffic(traffic)
+
+ return {"validuntil": validuntil, "trafficleft": traffic}
+
+
+ def login(self, user, data, req):
+ req.load("http://freakshare.com/index.php?language=EN")
+
+ page = req.load("http://freakshare.com/login.html", None,
+ {"submit": "Login", "user": user, "pass": data['password']}, cookies=True)
+
+ if ">Wrong Username or Password" in page:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/FreeWayMe.py b/pyload/plugin/account/FreeWayMe.py
new file mode 100644
index 000000000..90c504c70
--- /dev/null
+++ b/pyload/plugin/account/FreeWayMe.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class FreeWayMe(Account):
+ __name = "FreeWayMe"
+ __type = "account"
+ __version = "0.11"
+
+ __description = """FreeWayMe account plugin"""
+ __license = "GPLv3"
+ __authors = [("Nicolas Giese", "james@free-way.me")]
+
+
+ def loadAccountInfo(self, user, req):
+ status = self.getAccountStatus(user, req)
+ if not status:
+ return False
+ self.logDebug(status)
+
+ account_info = {"validuntil": -1, "premium": False}
+ if status['premium'] == "Free":
+ account_info['trafficleft'] = int(status['guthaben']) * 1024
+ elif status['premium'] == "Spender":
+ account_info['trafficleft'] = -1
+ elif status['premium'] == "Flatrate":
+ account_info = {"validuntil": int(status['Flatrate']),
+ "trafficleft": -1,
+ "premium": True}
+
+ return account_info
+
+
+ def getpw(self, user):
+ return self.accounts[user]['password']
+
+
+ def login(self, user, data, req):
+ status = self.getAccountStatus(user, req)
+
+ # Check if user and password are valid
+ if not status:
+ self.wrongPassword()
+
+
+ def getAccountStatus(self, user, req):
+ answer = req.load("https://www.free-way.me/ajax/jd.php",
+ get={"id": 4, "user": user, "pass": self.accounts[user]['password']})
+ self.logDebug("Login: %s" % answer)
+ if answer == "Invalid login":
+ self.wrongPassword()
+ return False
+ return json_loads(answer)
diff --git a/pyload/plugin/account/FshareVn.py b/pyload/plugin/account/FshareVn.py
new file mode 100644
index 000000000..130892490
--- /dev/null
+++ b/pyload/plugin/account/FshareVn.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+
+from time import mktime, strptime
+from pycurl import REFERER
+import re
+
+from pyload.plugin.Account import Account
+
+
+class FshareVn(Account):
+ __name = "FshareVn"
+ __type = "account"
+ __version = "0.07"
+
+ __description = """Fshare.vn account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ VALID_UNTIL_PATTERN = ur'<dt>Thời hạn dùng:</dt>\s*<dd>([^<]+)</dd>'
+ LIFETIME_PATTERN = ur'<dt>Lần đăng nhập trước:</dt>\s*<dd>[^<]+</dd>'
+ TRAFFIC_LEFT_PATTERN = ur'<dt>Tổng Dung Lượng Tài Khoản</dt>\s*<dd[^>]*>([\d.]+) ([kKMG])B</dd>'
+ DIRECT_DOWNLOAD_PATTERN = ur'<input type="checkbox"\s*([^=>]*)[^>]*/>Kích hoạt download trực tiếp</dt>'
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("http://www.fshare.vn/account_info.php", decode=True)
+
+ if re.search(self.LIFETIME_PATTERN, html):
+ self.logDebug("Lifetime membership detected")
+ trafficleft = self.getTrafficLeft()
+ return {"validuntil": -1, "trafficleft": trafficleft, "premium": True}
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ premium = True
+ validuntil = mktime(strptime(m.group(1), '%I:%M:%S %p %d-%m-%Y'))
+ trafficleft = self.getTrafficLeft()
+ else:
+ premium = False
+ validuntil = None
+ trafficleft = None
+
+ return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
+
+
+ def login(self, user, data, req):
+ req.http.c.setopt(REFERER, "https://www.fshare.vn/login.php")
+
+ html = req.load('https://www.fshare.vn/login.php', post={
+ "login_password": data['password'],
+ "login_useremail": user,
+ "url_refe": "http://www.fshare.vn/index.php"
+ }, referer=True, decode=True)
+
+ if not re.search(r'<img\s+alt="VIP"', html):
+ self.wrongPassword()
+
+
+ def getTrafficLeft(self):
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ return float(m.group(1)) * 1024 ** {'k': 0, 'K': 0, 'M': 1, 'G': 2}[m.group(2)] if m else 0
diff --git a/pyload/plugin/account/Ftp.py b/pyload/plugin/account/Ftp.py
new file mode 100644
index 000000000..c7983b0c2
--- /dev/null
+++ b/pyload/plugin/account/Ftp.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+
+
+class Ftp(Account):
+ __name = "Ftp"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """Ftp dummy account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ login_timeout = -1 #: Unlimited
+ info_threshold = -1 #: Unlimited
diff --git a/pyload/plugin/account/HellshareCz.py b/pyload/plugin/account/HellshareCz.py
new file mode 100644
index 000000000..08d45bdda
--- /dev/null
+++ b/pyload/plugin/account/HellshareCz.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+
+import re
+import time
+
+from pyload.plugin.Account import Account
+
+
+class HellshareCz(Account):
+ __name = "HellshareCz"
+ __type = "account"
+ __version = "0.14"
+
+ __description = """Hellshare.cz account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ CREDIT_LEFT_PATTERN = r'<div class="credit-link">\s*<table>\s*<tr>\s*<th>(\d+|\d\d\.\d\d\.)</th>'
+
+
+ def loadAccountInfo(self, user, req):
+ self.relogin(user)
+ html = req.load("http://www.hellshare.com/")
+
+ m = re.search(self.CREDIT_LEFT_PATTERN, html)
+ if m is None:
+ trafficleft = None
+ validuntil = None
+ premium = False
+ else:
+ credit = m.group(1)
+ premium = True
+ try:
+ if "." in credit:
+ #Time-based account
+ vt = [int(x) for x in credit.split('.')[:2]]
+ lt = time.localtime()
+ year = lt.tm_year + int(vt[1] < lt.tm_mon or (vt[1] == lt.tm_mon and vt[0] < lt.tm_mday))
+ validuntil = time.mktime(time.strptime("%s%d 23:59:59" % (credit, year), "%d.%m.%Y %H:%M:%S"))
+ trafficleft = -1
+ else:
+ #Traffic-based account
+ trafficleft = int(credit) * 1024
+ validuntil = -1
+ except Exception, e:
+ self.logError(_("Unable to parse credit info"), e)
+ validuntil = -1
+ trafficleft = -1
+
+ return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
+
+
+ def login(self, user, data, req):
+ html = req.load('http://www.hellshare.com/')
+ if req.lastEffectiveURL != 'http://www.hellshare.com/':
+ #Switch to English
+ self.logDebug("Switch lang - URL: %s" % req.lastEffectiveURL)
+ json = req.load("%s?do=locRouter-show" % req.lastEffectiveURL)
+ hash = re.search(r"(\-\-[0-9a-f]+\-)", json).group(1)
+ self.logDebug("Switch lang - HASH: %s" % hash)
+ html = req.load('http://www.hellshare.com/%s/' % hash)
+
+ if re.search(self.CREDIT_LEFT_PATTERN, html):
+ self.logDebug("Already logged in")
+ return
+
+ html = req.load('http://www.hellshare.com/login?do=loginForm-submit', post={
+ "login": "Log in",
+ "password": data['password'],
+ "username": user,
+ "perm_login": "on"
+ })
+
+ if "<p>You input a wrong user name or wrong password</p>" in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/Http.py b/pyload/plugin/account/Http.py
new file mode 100644
index 000000000..aacdbf89f
--- /dev/null
+++ b/pyload/plugin/account/Http.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+
+
+class Http(Account):
+ __name = "Http"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """Http dummy account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ login_timeout = -1 #: Unlimited
+ info_threshold = -1 #: Unlimited
diff --git a/pyload/plugin/account/HugefilesNet.py b/pyload/plugin/account/HugefilesNet.py
new file mode 100644
index 000000000..b4cd6f8c4
--- /dev/null
+++ b/pyload/plugin/account/HugefilesNet.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class HugefilesNet(XFSAccount):
+ __name = "HugefilesNet"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Hugefiles.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "hugefiles.net"
diff --git a/pyload/plugin/account/HundredEightyUploadCom.py b/pyload/plugin/account/HundredEightyUploadCom.py
new file mode 100644
index 000000000..79af089ca
--- /dev/null
+++ b/pyload/plugin/account/HundredEightyUploadCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class HundredEightyUploadCom(XFSAccount):
+ __name = "HundredEightyUploadCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """180upload.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "180upload.com"
diff --git a/pyload/plugin/account/JunocloudMe.py b/pyload/plugin/account/JunocloudMe.py
new file mode 100644
index 000000000..75307c6dd
--- /dev/null
+++ b/pyload/plugin/account/JunocloudMe.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class JunocloudMe(XFSAccount):
+ __name = "JunocloudMe"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Junocloud.me account plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "junocloud.me"
diff --git a/pyload/plugin/account/Keep2shareCc.py b/pyload/plugin/account/Keep2shareCc.py
new file mode 100644
index 000000000..1086d893f
--- /dev/null
+++ b/pyload/plugin/account/Keep2shareCc.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import gmtime, mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class Keep2shareCc(Account):
+ __name = "Keep2shareCc"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Keep2share.cc account plugin"""
+ __license = "GPLv3"
+ __authors = [("aeronaut", "aeronaut@pianoguy.de")]
+
+
+ VALID_UNTIL_PATTERN = r'Premium expires: <b>(.+?)</b>'
+ TRAFFIC_LEFT_PATTERN = r'Available traffic \(today\):<b><a href="/user/statistic.html">(.+?)</a>'
+
+ LOGIN_FAIL_PATTERN = r'Please fix the following input errors'
+
+
+ def loadAccountInfo(self, user, req):
+ validuntil = None
+ trafficleft = None
+ premium = None
+
+ html = req.load("http://keep2share.cc/site/profile.html", decode=True)
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ expiredate = m.group(1).strip()
+ self.logDebug("Expire date: " + expiredate)
+
+ try:
+ validuntil = mktime(strptime(expiredate, "%Y.%m.%d"))
+
+ except Exception, e:
+ self.logError(e)
+
+ else:
+ if validuntil > mktime(gmtime()):
+ premium = True
+ else:
+ premium = False
+ validuntil = None
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ if m:
+ try:
+ trafficleft = self.parseTraffic(m.group(1))
+
+ except Exception, e:
+ self.logError(e)
+
+ return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium}
+
+
+ def login(self, user, data, req):
+ req.cj.setCookie("keep2share.cc", "lang", "en")
+
+ html = req.load("http://keep2share.cc/login.html",
+ post={'LoginForm[username]': user, 'LoginForm[password]': data['password']})
+
+ if re.search(self.LOGIN_FAIL_PATTERN, html):
+ self.wrongPassword()
diff --git a/pyload/plugin/account/LetitbitNet.py b/pyload/plugin/account/LetitbitNet.py
new file mode 100644
index 000000000..b014268ae
--- /dev/null
+++ b/pyload/plugin/account/LetitbitNet.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+# from pyload.utils import json_loads, json_dumps
+
+
+class LetitbitNet(Account):
+ __name = "LetitbitNet"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """Letitbit.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def loadAccountInfo(self, user, req):
+ ## DISABLED BECAUSE IT GET 'key exausted' EVEN IF VALID ##
+ # api_key = self.accounts[user]['password']
+ # json_data = [api_key, ['key/info']]
+ # api_rep = req.load('http://api.letitbit.net/json', post={'r': json_dumps(json_data)})
+ # self.logDebug("API Key Info: " + api_rep)
+ # api_rep = json_loads(api_rep)
+ #
+ # if api_rep['status'] == 'FAIL':
+ # self.logWarning(api_rep['data'])
+ # return {'valid': False, 'premium': False}
+
+ return {"premium": True}
+
+
+ def login(self, user, data, req):
+ # API_KEY is the username and the PREMIUM_KEY is the password
+ self.logInfo(_("You must use your API KEY as username and the PREMIUM KEY as password"))
diff --git a/pyload/plugin/account/LinestorageCom.py b/pyload/plugin/account/LinestorageCom.py
new file mode 100644
index 000000000..353c1e811
--- /dev/null
+++ b/pyload/plugin/account/LinestorageCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class LinestorageCom(XFSAccount):
+ __name = "LinestorageCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Linestorage.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "linestorage.com"
diff --git a/pyload/plugin/account/LinksnappyCom.py b/pyload/plugin/account/LinksnappyCom.py
new file mode 100644
index 000000000..a510a59ea
--- /dev/null
+++ b/pyload/plugin/account/LinksnappyCom.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+
+from hashlib import md5
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class LinksnappyCom(Account):
+ __name = "LinksnappyCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Linksnappy.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def loadAccountInfo(self, user, req):
+ data = self.getAccountData(user)
+ r = req.load('http://gen.linksnappy.com/lseAPI.php',
+ get={'act': 'USERDETAILS', 'username': user, 'password': md5(data['password']).hexdigest()})
+ self.logDebug("JSON data: " + r)
+ j = json_loads(r)
+
+ if j['error']:
+ return {"premium": False}
+
+ validuntil = j['return']['expire']
+ if validuntil == 'lifetime':
+ validuntil = -1
+ elif validuntil == 'expired':
+ return {"premium": False}
+ else:
+ validuntil = float(validuntil)
+
+ if 'trafficleft' not in j['return'] or isinstance(j['return']['trafficleft'], str):
+ trafficleft = -1
+ else:
+ trafficleft = int(j['return']['trafficleft']) * 1024
+
+ return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
+
+
+ def login(self, user, data, req):
+ r = req.load('http://gen.linksnappy.com/lseAPI.php',
+ get={'act': 'USERDETAILS', 'username': user, 'password': md5(data['password']).hexdigest()})
+
+ if 'Invalid Account Details' in r:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/LomafileCom.py b/pyload/plugin/account/LomafileCom.py
new file mode 100644
index 000000000..c55c9538d
--- /dev/null
+++ b/pyload/plugin/account/LomafileCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class LomafileCom(XFSAccount):
+ __name = "LomafileCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Lomafile.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "lomafile.com"
diff --git a/pyload/plugin/account/MegaDebridEu.py b/pyload/plugin/account/MegaDebridEu.py
new file mode 100644
index 000000000..67af94541
--- /dev/null
+++ b/pyload/plugin/account/MegaDebridEu.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class MegaDebridEu(Account):
+ __name = "MegaDebridEu"
+ __type = "account"
+ __version = "0.20"
+
+ __description = """mega-debrid.eu account plugin"""
+ __license = "GPLv3"
+ __authors = [("D.Ducatel", "dducatel@je-geek.fr")]
+
+
+ # Define the base URL of MegaDebrid api
+ API_URL = "https://www.mega-debrid.eu/api.php"
+
+
+ def loadAccountInfo(self, user, req):
+ data = self.getAccountData(user)
+ jsonResponse = req.load(self.API_URL,
+ get={'action': 'connectUser', 'login': user, 'password': data['password']})
+ res = json_loads(jsonResponse)
+
+ if res['response_code'] == "ok":
+ return {"premium": True, "validuntil": float(res['vip_end']), "status": True}
+ else:
+ self.logError(res)
+ return {"status": False, "premium": False}
+
+
+ def login(self, user, data, req):
+ jsonResponse = req.load(self.API_URL,
+ get={'action': 'connectUser', 'login': user, 'password': data['password']})
+ res = json_loads(jsonResponse)
+ if res['response_code'] != "ok":
+ self.wrongPassword()
diff --git a/pyload/plugin/account/MegaRapidCz.py b/pyload/plugin/account/MegaRapidCz.py
new file mode 100644
index 000000000..9e9f5cb02
--- /dev/null
+++ b/pyload/plugin/account/MegaRapidCz.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import mktime, strptime
+from pyload.plugin.Account import Account
+
+
+class MegaRapidCz(Account):
+ __name = "MegaRapidCz"
+ __type = "account"
+ __version = "0.34"
+
+ __description = """MegaRapid.cz account plugin"""
+ __license = "GPLv3"
+ __authors = [("MikyWoW", "mikywow@seznam.cz"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ login_timeout = 60
+
+ LIMITDL_PATTERN = ur'<td>Max. počet paralelních stahování: </td><td>(\d+)'
+ VALID_UNTIL_PATTERN = ur'<td>Paušální stahování aktivní. Vyprší </td><td><strong>(.*?)</strong>'
+ TRAFFIC_LEFT_PATTERN = r'<tr><td>Kredit</td><td>(.*?) GiB'
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("http://megarapid.cz/mujucet/", decode=True)
+
+ m = re.search(self.LIMITDL_PATTERN, html)
+ if m:
+ data = self.getAccountData(user)
+ data['options']['limitDL'] = [int(m.group(1))]
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ validuntil = mktime(strptime(m.group(1), "%d.%m.%Y - %H:%M"))
+ return {"premium": True, "trafficleft": -1, "validuntil": validuntil}
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ if m:
+ trafficleft = float(m.group(1)) * (1 << 20)
+ return {"premium": True, "trafficleft": trafficleft, "validuntil": -1}
+
+ return {"premium": False, "trafficleft": None, "validuntil": None}
+
+
+ def login(self, user, data, req):
+ htm = req.load("http://megarapid.cz/prihlaseni/")
+ if "Heslo:" in htm:
+ start = htm.index('id="inp_hash" name="hash" value="')
+ htm = htm[start + 33:]
+ hashes = htm[0:32]
+ htm = req.load("http://megarapid.cz/prihlaseni/",
+ post={"hash": hashes,
+ "login": user,
+ "pass1": data['password'],
+ "remember": 0,
+ "sbmt": u"Přihlásit"})
diff --git a/pyload/plugin/account/MegasharesCom.py b/pyload/plugin/account/MegasharesCom.py
new file mode 100644
index 000000000..95df5ad2d
--- /dev/null
+++ b/pyload/plugin/account/MegasharesCom.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+
+import re
+from time import mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class MegasharesCom(Account):
+ __name = "MegasharesCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Megashares.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ VALID_UNTIL_PATTERN = r'<p class="premium_info_box">Period Ends: (\w{3} \d{1,2}, \d{4})</p>'
+
+
+ def loadAccountInfo(self, user, req):
+ #self.relogin(user)
+ html = req.load("http://d01.megashares.com/myms.php", decode=True)
+
+ premium = False if '>Premium Upgrade<' in html else True
+
+ validuntil = trafficleft = -1
+ try:
+ timestr = re.search(self.VALID_UNTIL_PATTERN, html).group(1)
+ self.logDebug(timestr)
+ validuntil = mktime(strptime(timestr, "%b %d, %Y"))
+ except Exception, e:
+ self.logError(e)
+
+ return {"validuntil": validuntil, "trafficleft": -1, "premium": premium}
+
+
+ def login(self, user, data, req):
+ html = req.load('http://d01.megashares.com/myms_login.php', post={
+ "httpref": "",
+ "myms_login": "Login",
+ "mymslogin_name": user,
+ "mymspassword": data['password']
+ }, decode=True)
+
+ if not '<span class="b ml">%s</span>' % user in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/MovReelCom.py b/pyload/plugin/account/MovReelCom.py
new file mode 100644
index 000000000..9eabd0a6d
--- /dev/null
+++ b/pyload/plugin/account/MovReelCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class MovReelCom(XFSAccount):
+ __name = "MovReelCom"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """Movreel.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
+
+
+ login_timeout = 60
+ info_threshold = 30
+
+ HOSTER_DOMAIN = "movreel.com"
diff --git a/pyload/plugin/account/MultishareCz.py b/pyload/plugin/account/MultishareCz.py
new file mode 100644
index 000000000..2f9b285b2
--- /dev/null
+++ b/pyload/plugin/account/MultishareCz.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Account import Account
+
+
+class MultishareCz(Account):
+ __name = "MultishareCz"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """Multishare.cz account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ TRAFFIC_LEFT_PATTERN = r'<span class="profil-zvyrazneni">Kredit:</span>\s*<strong>(?P<S>[\d.,]+)&nbsp;(?P<U>[\w^_]+)</strong>'
+ ACCOUNT_INFO_PATTERN = r'<input type="hidden" id="(u_ID|u_hash)" name="[^"]*" value="([^"]+)">'
+
+
+ def loadAccountInfo(self, user, req):
+ #self.relogin(user)
+ html = req.load("http://www.multishare.cz/profil/", decode=True)
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ trafficleft = self.parseTraffic(m.group('S'), m.group('U')) if m else 0
+ self.premium = True if trafficleft else False
+
+ html = req.load("http://www.multishare.cz/", decode=True)
+ mms_info = dict(re.findall(self.ACCOUNT_INFO_PATTERN, html))
+
+ return dict(mms_info, **{"validuntil": -1, "trafficleft": trafficleft})
+
+
+ def login(self, user, data, req):
+ html = req.load('http://www.multishare.cz/html/prihlaseni_process.php', post={
+ "akce": "Přihlásit",
+ "heslo": data['password'],
+ "jmeno": user
+ }, decode=True)
+
+ if '<div class="akce-chyba akce">' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/MyfastfileCom.py b/pyload/plugin/account/MyfastfileCom.py
new file mode 100644
index 000000000..4e85c1a34
--- /dev/null
+++ b/pyload/plugin/account/MyfastfileCom.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+
+from time import time
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class MyfastfileCom(Account):
+ __name = "MyfastfileCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Myfastfile.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def loadAccountInfo(self, user, req):
+ if 'days_left' in self.json_data:
+ validuntil = int(time() + self.json_data['days_left'] * 24 * 60 * 60)
+ return {"premium": True, "validuntil": validuntil, "trafficleft": -1}
+ else:
+ self.logError(_("Unable to get account information"))
+
+
+ def login(self, user, data, req):
+ # Password to use is the API-Password written in http://myfastfile.com/myaccount
+ html = req.load("http://myfastfile.com/api.php",
+ get={"user": user, "pass": data['password']})
+ self.logDebug("JSON data: " + html)
+ self.json_data = json_loads(html)
+ if self.json_data['status'] != 'ok':
+ self.logError(_('Invalid login. The password to use is the API-Password you find in your "My Account" page'))
+ self.wrongPassword()
diff --git a/pyload/plugin/account/NetloadIn.py b/pyload/plugin/account/NetloadIn.py
new file mode 100644
index 000000000..4cfc205ca
--- /dev/null
+++ b/pyload/plugin/account/NetloadIn.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+
+import re
+from time import time
+
+from pyload.plugin.Account import Account
+
+
+class NetloadIn(Account):
+ __name = "NetloadIn"
+ __type = "account"
+ __version = "0.22"
+
+ __description = """Netload.in account plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("CryNickSystems", "webmaster@pcProfil.de")]
+
+
+ def loadAccountInfo(self, user, req):
+ page = req.load("http://netload.in/index.php", get={'id': 2, 'lang': "de"})
+ left = r'>(\d+) (Tag|Tage), (\d+) Stunden<'
+ left = re.search(left, page)
+ if left:
+ validuntil = time() + int(left.group(1)) * 24 * 60 * 60 + int(left.group(3)) * 60 * 60
+ trafficleft = -1
+ premium = True
+ else:
+ validuntil = None
+ premium = False
+ trafficleft = None
+ return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
+
+
+ def login(self, user, data, req):
+ page = req.load("http://netload.in/index.php", None,
+ {"txtuser": user, "txtpass": data['password'], "txtcheck": "login", "txtlogin": "Login"},
+ cookies=True)
+ if "password or it might be invalid!" in page:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/NosuploadCom.py b/pyload/plugin/account/NosuploadCom.py
new file mode 100644
index 000000000..10f9007a6
--- /dev/null
+++ b/pyload/plugin/account/NosuploadCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class NosuploadCom(XFSAccount):
+ __name = "NosuploadCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Nosupload.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "nosupload.com"
diff --git a/pyload/plugin/account/NovafileCom.py b/pyload/plugin/account/NovafileCom.py
new file mode 100644
index 000000000..8400cc267
--- /dev/null
+++ b/pyload/plugin/account/NovafileCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class NovafileCom(XFSAccount):
+ __name = "NovafileCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Novafile.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "novafile.com"
diff --git a/pyload/plugin/account/NowVideoAt.py b/pyload/plugin/account/NowVideoAt.py
new file mode 100644
index 000000000..84cec8974
--- /dev/null
+++ b/pyload/plugin/account/NowVideoAt.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import gmtime, mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class NowVideoAt(Account):
+ __name = "NowVideoAt"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """NowVideo.at account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ VALID_UNTIL_PATTERN = r'>Your premium membership expires on: (.+?)<'
+
+
+ def loadAccountInfo(self, user, req):
+ validuntil = None
+ trafficleft = -1
+ premium = None
+
+ html = req.load("http://www.nowvideo.at/premium.php")
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ expiredate = m.group(1).strip()
+ self.logDebug("Expire date: " + expiredate)
+
+ try:
+ validuntil = mktime(strptime(expiredate, "%Y-%b-%d"))
+
+ except Exception, e:
+ self.logError(e)
+
+ else:
+ if validuntil > mktime(gmtime()):
+ premium = True
+ else:
+ premium = False
+ validuntil = -1
+
+ return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
+
+
+ def login(self, user, data, req):
+ html = req.load("http://www.nowvideo.at/login.php",
+ post={'user': user, 'pass': data['password']})
+
+ if ">Invalid login details" is html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/OboomCom.py b/pyload/plugin/account/OboomCom.py
new file mode 100644
index 000000000..cfb4e3e7e
--- /dev/null
+++ b/pyload/plugin/account/OboomCom.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+import time
+
+from beaker.crypto.pbkdf2 import PBKDF2
+
+from pyload.utils import json_loads
+from pyload.plugin.Account import Account
+
+
+class OboomCom(Account):
+ __name = "OboomCom"
+ __type = "account"
+ __version = "0.21"
+
+ __description = """Oboom.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("stanley", "stanley.foerster@gmail.com")]
+
+
+ def loadAccountData(self, user, req):
+ passwd = self.getAccountData(user)['password']
+ salt = passwd[::-1]
+ pbkdf2 = PBKDF2(passwd, salt, 1000).hexread(16)
+ result = json_loads(req.load("https://www.oboom.com/1/login", get={"auth": user, "pass": pbkdf2}))
+ if not result[0] == 200:
+ self.logWarning(_("Failed to log in: %s") % result[1])
+ self.wrongPassword()
+ return result[1]
+
+
+ def loadAccountInfo(self, name, req):
+ accountData = self.loadAccountData(name, req)
+
+ userData = accountData['user']
+
+ if userData['premium'] == "null":
+ premium = False
+ else:
+ premium = True
+
+ if userData['premium_unix'] == "null":
+ validUntil = -1
+ else:
+ validUntil = int(userData['premium_unix'])
+
+ traffic = userData['traffic']
+
+ trafficLeft = traffic['current']
+ maxTraffic = traffic['max']
+
+ session = accountData['session']
+
+ return {'premium' : premium,
+ 'validuntil' : validUntil,
+ 'trafficleft': trafficLeft / 1024, #@TODO: Remove / 1024 in 0.4.10
+ 'maxtraffic' : maxTraffic / 1024, #@TODO: Remove / 1024 in 0.4.10
+ 'session' : session}
+
+
+ def login(self, user, data, req):
+ self.loadAccountData(user, req)
diff --git a/pyload/plugin/account/OneFichierCom.py b/pyload/plugin/account/OneFichierCom.py
new file mode 100644
index 000000000..7ecd5769e
--- /dev/null
+++ b/pyload/plugin/account/OneFichierCom.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import strptime, mktime
+
+from pycurl import REFERER
+
+from pyload.plugin.Account import Account
+
+
+class OneFichierCom(Account):
+ __name = "OneFichierCom"
+ __type = "account"
+ __version = "0.11"
+
+ __description = """1fichier.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Elrick69", "elrick69[AT]rocketmail[DOT]com"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ VALID_UNTIL_PATTERN = r'Your Premium Status will end the (\d+/\d+/\d+)'
+
+
+ def loadAccountInfo(self, user, req):
+ validuntil = None
+ trafficleft = -1
+ premium = None
+
+ html = req.load("https://1fichier.com/console/abo.pl")
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ expiredate = m.group(1)
+ self.logDebug("Expire date: " + expiredate)
+
+ try:
+ validuntil = mktime(strptime(expiredate, "%d/%m/%Y"))
+ except Exception, e:
+ self.logError(e)
+ else:
+ premium = True
+
+ return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium or False}
+
+
+ def login(self, user, data, req):
+ req.http.c.setopt(REFERER, "https://1fichier.com/login.pl?lg=en")
+
+ html = req.load("https://1fichier.com/login.pl?lg=en",
+ post={'mail': user, 'pass': data['password'], 'It': "on", 'purge': "off", 'valider': "Send"})
+
+ if '>Invalid email address' in html or '>Invalid password' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/OverLoadMe.py b/pyload/plugin/account/OverLoadMe.py
new file mode 100644
index 000000000..55dbfd1bb
--- /dev/null
+++ b/pyload/plugin/account/OverLoadMe.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class OverLoadMe(Account):
+ __name = "OverLoadMe"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """Over-Load.me account plugin"""
+ __license = "GPLv3"
+ __authors = [("marley", "marley@over-load.me")]
+
+
+ def loadAccountInfo(self, user, req):
+ data = self.getAccountData(user)
+ page = req.load("https://api.over-load.me/account.php", get={"user": user, "auth": data['password']}).strip()
+ data = json_loads(page)
+
+ # Check for premium
+ if data['membership'] == "Free":
+ return {"premium": False}
+
+ account_info = {"validuntil": data['expirationunix'], "trafficleft": -1}
+ return account_info
+
+
+ def login(self, user, data, req):
+ jsondata = req.load("https://api.over-load.me/account.php",
+ get={"user": user, "auth": data['password']}).strip()
+ data = json_loads(jsondata)
+
+ if data['err'] == 1:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/PremiumTo.py b/pyload/plugin/account/PremiumTo.py
new file mode 100644
index 000000000..2fe95b135
--- /dev/null
+++ b/pyload/plugin/account/PremiumTo.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+
+
+class PremiumTo(Account):
+ __name = "PremiumTo"
+ __type = "account"
+ __version = "0.04"
+
+ __description = """Premium.to account plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+
+ def loadAccountInfo(self, user, req):
+ api_r = req.load("http://premium.to/api/straffic.php",
+ get={'username': self.username, 'password': self.password})
+ traffic = sum(map(int, api_r.split(';')))
+
+ return {"trafficleft": int(traffic) / 1024, "validuntil": -1} #@TODO: Remove / 1024 in 0.4.10
+
+
+ def login(self, user, data, req):
+ self.username = user
+ self.password = data['password']
+ authcode = req.load("http://premium.to/api/getauthcode.php",
+ get={'username': user, 'password': self.password}).strip()
+
+ if "wrong username" in authcode:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/PremiumizeMe.py b/pyload/plugin/account/PremiumizeMe.py
new file mode 100644
index 000000000..9ad728bf7
--- /dev/null
+++ b/pyload/plugin/account/PremiumizeMe.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+
+from pyload.utils import json_loads
+
+
+class PremiumizeMe(Account):
+ __name = "PremiumizeMe"
+ __type = "account"
+ __version = "0.11"
+
+ __description = """Premiumize.me account plugin"""
+ __license = "GPLv3"
+ __authors = [("Florian Franzen", "FlorianFranzen@gmail.com")]
+
+
+ def loadAccountInfo(self, user, req):
+ # Get user data from premiumize.me
+ status = self.getAccountStatus(user, req)
+ self.logDebug(status)
+
+ # Parse account info
+ account_info = {"validuntil": float(status['result']['expires']),
+ "trafficleft": max(0, status['result']['trafficleft_bytes'])}
+
+ if status['result']['type'] == 'free':
+ account_info['premium'] = False
+
+ return account_info
+
+
+ def login(self, user, data, req):
+ # Get user data from premiumize.me
+ status = self.getAccountStatus(user, req)
+
+ # Check if user and password are valid
+ if status['status'] != 200:
+ self.wrongPassword()
+
+
+ def getAccountStatus(self, user, req):
+ # Use premiumize.me API v1 (see https://secure.premiumize.me/?show=api)
+ # to retrieve account info and return the parsed json answer
+ answer = req.load("https://api.premiumize.me/pm-api/v1.php",
+ get={'method' : "accountstatus",
+ 'params[login]': user,
+ 'params[pass]' : self.accounts[user]['password']})
+ return json_loads(answer)
diff --git a/pyload/plugin/account/QuickshareCz.py b/pyload/plugin/account/QuickshareCz.py
new file mode 100644
index 000000000..216bf62f2
--- /dev/null
+++ b/pyload/plugin/account/QuickshareCz.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Account import Account
+
+
+class QuickshareCz(Account):
+ __name = "QuickshareCz"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Quickshare.cz account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ TRAFFIC_LEFT_PATTERN = r'Stav kreditu: <strong>(.+?)</strong>'
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("http://www.quickshare.cz/premium", decode=True)
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ if m:
+ trafficleft = self.parseTraffic(m.group(1))
+ premium = True if trafficleft else False
+ else:
+ trafficleft = None
+ premium = False
+
+ return {"validuntil": -1, "trafficleft": trafficleft, "premium": premium}
+
+
+ def login(self, user, data, req):
+ html = req.load('http://www.quickshare.cz/html/prihlaseni_process.php', post={
+ "akce": u'Přihlásit',
+ "heslo": data['password'],
+ "jmeno": user
+ }, decode=True)
+
+ if u'>TakovÜ uşivatel neexistuje.<' in html or u'>Špatné heslo.<' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/RPNetBiz.py b/pyload/plugin/account/RPNetBiz.py
new file mode 100644
index 000000000..fe18e06c9
--- /dev/null
+++ b/pyload/plugin/account/RPNetBiz.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class RPNetBiz(Account):
+ __name = "RPNetBiz"
+ __type = "account"
+ __version = "0.10"
+
+ __description = """RPNet.biz account plugin"""
+ __license = "GPLv3"
+ __authors = [("Dman", "dmanugm@gmail.com")]
+
+
+ def loadAccountInfo(self, user, req):
+ # Get account information from rpnet.biz
+ res = self.getAccountStatus(user, req)
+ try:
+ if res['accountInfo']['isPremium']:
+ # Parse account info. Change the trafficleft later to support per host info.
+ account_info = {"validuntil": int(res['accountInfo']['premiumExpiry']),
+ "trafficleft": -1, "premium": True}
+ else:
+ account_info = {"validuntil": None, "trafficleft": None, "premium": False}
+
+ except KeyError:
+ #handle wrong password exception
+ account_info = {"validuntil": None, "trafficleft": None, "premium": False}
+
+ return account_info
+
+
+ def login(self, user, data, req):
+ # Get account information from rpnet.biz
+ res = self.getAccountStatus(user, req)
+
+ # If we have an error in the res, we have wrong login information
+ if 'error' in res:
+ self.wrongPassword()
+
+
+ def getAccountStatus(self, user, req):
+ # Using the rpnet API, check if valid premium account
+ res = req.load("https://premium.rpnet.biz/client_api.php",
+ get={"username": user, "password": self.accounts[user]['password'],
+ "action": "showAccountInformation"})
+ self.logDebug("JSON data: %s" % res)
+
+ return json_loads(res)
diff --git a/pyload/plugin/account/RapidfileshareNet.py b/pyload/plugin/account/RapidfileshareNet.py
new file mode 100644
index 000000000..0b0ed210c
--- /dev/null
+++ b/pyload/plugin/account/RapidfileshareNet.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class RapidfileshareNet(XFSAccount):
+ __name = "RapidfileshareNet"
+ __type = "account"
+ __version = "0.05"
+
+ __description = """Rapidfileshare.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "rapidfileshare.net"
+
+ TRAFFIC_LEFT_PATTERN = r'>Traffic available today:</TD><TD><label for="name">\s*(?P<S>[\d.,]+)\s*(?:(?P<U>[\w^_]+))?'
diff --git a/pyload/plugin/account/RapidgatorNet.py b/pyload/plugin/account/RapidgatorNet.py
new file mode 100644
index 000000000..66fe13f90
--- /dev/null
+++ b/pyload/plugin/account/RapidgatorNet.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class RapidgatorNet(Account):
+ __name = "RapidgatorNet"
+ __type = "account"
+ __version = "0.04"
+
+ __description = """Rapidgator.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ API_URL = 'http://rapidgator.net/api/user'
+
+
+ def loadAccountInfo(self, user, req):
+ try:
+ sid = self.getAccountData(user).get('SID')
+ assert sid
+
+ json = req.load("%s/info?sid=%s" % (self.API_URL, sid))
+ self.logDebug("API:USERINFO", json)
+ json = json_loads(json)
+
+ if json['response_status'] == 200:
+ if "reset_in" in json['response']:
+ self.scheduleRefresh(user, json['response']['reset_in'])
+
+ return {"validuntil": json['response']['expire_date'],
+ "trafficleft": int(json['response']['traffic_left']),
+ "premium": True}
+ else:
+ self.logError(json['response_details'])
+ except Exception, e:
+ self.logError(e)
+
+ return {"validuntil": None, "trafficleft": None, "premium": False}
+
+
+ def login(self, user, data, req):
+ try:
+ json = req.load('%s/login' % self.API_URL, post={"username": user, "password": data['password']})
+ self.logDebug("API:LOGIN", json)
+ json = json_loads(json)
+
+ if json['response_status'] == 200:
+ data['SID'] = str(json['response']['session_id'])
+ return
+ else:
+ self.logError(json['response_details'])
+ except Exception, e:
+ self.logError(e)
+
+ self.wrongPassword()
diff --git a/pyload/plugin/account/RapiduNet.py b/pyload/plugin/account/RapiduNet.py
new file mode 100644
index 000000000..dfb18d2c7
--- /dev/null
+++ b/pyload/plugin/account/RapiduNet.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class RapiduNet(Account):
+ __name = "RapiduNet"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Rapidu.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("prOq", "")]
+
+
+ PREMIUM_PATTERN = r'<a href="premium/" style="padding-left: 0px;">Account: <b>Premium</b></a>'
+
+
+ def loadAccountInfo(self, user, req):
+ info = {'validuntil': None, 'trafficleft': None, 'premium': False}
+
+ req.load("https://rapidu.net/ajax.php", get={'a': "getChangeLang"}, post={"_go": "", "lang": "en"})
+ html = req.load("https://rapidu.net/", decode=True)
+
+ if re.search(self.PREMIUM_PATTERN, html):
+ info['premium'] = True
+
+ return info
+
+
+ def login(self, user, data, req):
+ try:
+ json = json_loads(req.load("https://rapidu.net/ajax.php?a=getUserLogin",
+ post={'_go': "",
+ 'login': user,
+ 'pass': data['password'],
+ 'member': "1"}))
+
+ self.logDebug(json)
+
+ if not json['message'] == "success":
+ self.wrongPassword()
+
+ except Exception, e:
+ self.logError(e)
diff --git a/pyload/plugin/account/RarefileNet.py b/pyload/plugin/account/RarefileNet.py
new file mode 100644
index 000000000..fc736bafc
--- /dev/null
+++ b/pyload/plugin/account/RarefileNet.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class RarefileNet(XFSAccount):
+ __name = "RarefileNet"
+ __type = "account"
+ __version = "0.04"
+
+ __description = """RareFile.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "rarefile.net"
diff --git a/pyload/plugin/account/RealdebridCom.py b/pyload/plugin/account/RealdebridCom.py
new file mode 100644
index 000000000..c604fb108
--- /dev/null
+++ b/pyload/plugin/account/RealdebridCom.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+import xml.dom.minidom as dom
+
+from pyload.plugin.Account import Account
+
+
+class RealdebridCom(Account):
+ __name = "RealdebridCom"
+ __type = "account"
+ __version = "0.43"
+
+ __description = """Real-Debrid.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Devirex Hazzard", "naibaf_11@yahoo.de")]
+
+
+ def loadAccountInfo(self, user, req):
+ if self.pin_code:
+ return {"premium": False}
+ page = req.load("https://real-debrid.com/api/account.php")
+ xml = dom.parseString(page)
+ account_info = {"validuntil": int(xml.getElementsByTagName("expiration")[0].childNodes[0].nodeValue),
+ "trafficleft": -1}
+
+ return account_info
+
+
+ def login(self, user, data, req):
+ self.pin_code = False
+ page = req.load("https://real-debrid.com/ajax/login.php", get={"user": user, "pass": data['password']})
+ if "Your login informations are incorrect" in page:
+ self.wrongPassword()
+ elif "PIN Code required" in page:
+ self.logWarning(_("PIN code required. Please login to https://real-debrid.com using the PIN or disable the double authentication in your control panel on https://real-debrid.com"))
+ self.pin_code = True
diff --git a/pyload/plugin/account/RehostTo.py b/pyload/plugin/account/RehostTo.py
new file mode 100644
index 000000000..956982186
--- /dev/null
+++ b/pyload/plugin/account/RehostTo.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+
+
+class RehostTo(Account):
+ __name = "RehostTo"
+ __type = "account"
+ __version = "0.10"
+
+ __description = """Rehost.to account plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ def loadAccountInfo(self, user, req):
+ data = self.getAccountData(user)
+ page = req.load("http://rehost.to/api.php",
+ get={'cmd': "login", 'user': user, 'pass': data['password']})
+ data = [x.split("=") for x in page.split(",")]
+ ses = data[0][1]
+ long_ses = data[1][1]
+
+ page = req.load("http://rehost.to/api.php",
+ get={'cmd': "get_premium_credits", 'long_ses': long_ses})
+ traffic, valid = page.split(",")
+
+ account_info = {"trafficleft": int(traffic) * 1024,
+ "validuntil": int(valid),
+ "long_ses": long_ses,
+ "ses": ses}
+
+ return account_info
+
+
+ def login(self, user, data, req):
+ page = req.load("http://rehost.to/api.php",
+ get={'cmd': "login", 'user': user, 'pass': data['password']})
+
+ if "Login failed." in page:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/RyushareCom.py b/pyload/plugin/account/RyushareCom.py
new file mode 100644
index 000000000..d908e6442
--- /dev/null
+++ b/pyload/plugin/account/RyushareCom.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class RyushareCom(XFSAccount):
+ __name = "RyushareCom"
+ __type = "account"
+ __version = "0.05"
+
+ __description = """Ryushare.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("trance4us", "")]
+
+
+ HOSTER_DOMAIN = "ryushare.com"
+
+
+ def login(self, user, data, req):
+ req.lastURL = "http://ryushare.com/login.python"
+ html = req.load("http://ryushare.com/login.python",
+ post={"login": user, "password": data['password'], "op": "login"})
+ if 'Incorrect Login or Password' in html or '>Error<' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/SafesharingEu.py b/pyload/plugin/account/SafesharingEu.py
new file mode 100644
index 000000000..eae8140fb
--- /dev/null
+++ b/pyload/plugin/account/SafesharingEu.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class SafesharingEu(XFSAccount):
+ __name = "SafesharingEu"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Safesharing.eu account plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "safesharing.eu"
diff --git a/pyload/plugin/account/SecureUploadEu.py b/pyload/plugin/account/SecureUploadEu.py
new file mode 100644
index 000000000..b349e893f
--- /dev/null
+++ b/pyload/plugin/account/SecureUploadEu.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class SecureUploadEu(XFSAccount):
+ __name = "SecureUploadEu"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """SecureUpload.eu account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "secureupload.eu"
diff --git a/pyload/plugin/account/SendmywayCom.py b/pyload/plugin/account/SendmywayCom.py
new file mode 100644
index 000000000..add0b2183
--- /dev/null
+++ b/pyload/plugin/account/SendmywayCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class SendmywayCom(XFSAccount):
+ __name = "SendmywayCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Sendmyway.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "sendmyway.com"
diff --git a/pyload/plugin/account/ShareonlineBiz.py b/pyload/plugin/account/ShareonlineBiz.py
new file mode 100644
index 000000000..1b167a568
--- /dev/null
+++ b/pyload/plugin/account/ShareonlineBiz.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+
+
+class ShareonlineBiz(Account):
+ __name = "ShareonlineBiz"
+ __type = "account"
+ __version = "0.24"
+
+ __description = """Share-online.biz account plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def getUserAPI(self, user, req):
+ return req.load("http://api.share-online.biz/account.php",
+ {"username": user, "password": self.accounts[user]['password'], "act": "userDetails"})
+
+
+ def loadAccountInfo(self, user, req):
+ html = self.getUserAPI(user, req)
+
+ info = {}
+ for line in html.splitlines():
+ if "=" in line:
+ key, value = line.split("=")
+ info[key] = value
+ self.logDebug(info)
+
+ if "dl" in info and info['dl'].lower() != "not_available":
+ req.cj.setCookie("share-online.biz", "dl", info['dl'])
+ if "a" in info and info['a'].lower() != "not_available":
+ req.cj.setCookie("share-online.biz", "a", info['a'])
+
+ return {"validuntil": int(info['expire_date']) if "expire_date" in info else -1,
+ "trafficleft": -1,
+ "premium": True if ("dl" in info or "a" in info) and (info['group'] != "Sammler") else False}
+
+
+ def login(self, user, data, req):
+ html = self.getUserAPI(user, req)
+ if "EXCEPTION" in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/SimplyPremiumCom.py b/pyload/plugin/account/SimplyPremiumCom.py
new file mode 100644
index 000000000..af8fc0730
--- /dev/null
+++ b/pyload/plugin/account/SimplyPremiumCom.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.plugin.Account import Account
+
+
+class SimplyPremiumCom(Account):
+ __name = "SimplyPremiumCom"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """Simply-Premium.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("EvolutionClip", "evolutionclip@live.de")]
+
+
+ def loadAccountInfo(self, user, req):
+ json_data = req.load('http://www.simply-premium.com/api/user.php?format=json')
+ self.logDebug("JSON data: " + json_data)
+ json_data = json_loads(json_data)
+
+ if 'vip' in json_data['result'] and json_data['result']['vip'] == 0:
+ return {"premium": False}
+
+ #Time package
+ validuntil = float(json_data['result']['timeend'])
+ #Traffic package
+ # {"trafficleft": int(traffic), "validuntil": -1}
+ #trafficleft = int(json_data['result']['traffic'])
+
+ #return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
+ return {"premium": True, "validuntil": validuntil}
+
+
+ def login(self, user, data, req):
+ req.cj.setCookie("simply-premium.com", "lang", "EN")
+
+ if data['password'] == '' or data['password'] == '0':
+ post_data = {"key": user}
+ else:
+ post_data = {"login_name": user, "login_pass": data['password']}
+
+ html = req.load("http://www.simply-premium.com/login.php", post=post_data)
+
+ if 'logout' not in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/SimplydebridCom.py b/pyload/plugin/account/SimplydebridCom.py
new file mode 100644
index 000000000..24d0cb6bf
--- /dev/null
+++ b/pyload/plugin/account/SimplydebridCom.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+from time import mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class SimplydebridCom(Account):
+ __name = "SimplydebridCom"
+ __type = "account"
+ __version = "0.10"
+
+ __description = """Simply-Debrid.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Kagenoshin", "kagenoshin@gmx.ch")]
+
+
+ def loadAccountInfo(self, user, req):
+ get_data = {'login': 2, 'u': self.loginname, 'p': self.password}
+ res = req.load("http://simply-debrid.com/api.php", get=get_data, decode=True)
+ data = [x.strip() for x in res.split(";")]
+ if str(data[0]) != "1":
+ return {"premium": False}
+ else:
+ return {"trafficleft": -1, "validuntil": mktime(strptime(str(data[2]), "%d/%m/%Y"))}
+
+
+ def login(self, user, data, req):
+ self.loginname = user
+ self.password = data['password']
+ get_data = {'login': 1, 'u': self.loginname, 'p': self.password}
+ res = req.load("http://simply-debrid.com/api.php", get=get_data, decode=True)
+ if res != "02: loggin success":
+ self.wrongPassword()
diff --git a/pyload/plugin/account/StahnuTo.py b/pyload/plugin/account/StahnuTo.py
new file mode 100644
index 000000000..b3ca3891a
--- /dev/null
+++ b/pyload/plugin/account/StahnuTo.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Account import Account
+
+
+class StahnuTo(Account):
+ __name = "StahnuTo"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """StahnuTo account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("http://www.stahnu.to/")
+
+ m = re.search(r'>VIP: (\d+.*)<', html)
+ trafficleft = self.parseTraffic(m.group(1)) * 1024 if m else 0
+
+ return {"premium": trafficleft > (512 * 1024), "trafficleft": trafficleft, "validuntil": -1}
+
+
+ def login(self, user, data, req):
+ html = req.load("http://www.stahnu.to/login.php", post={
+ "username": user,
+ "password": data['password'],
+ "submit": "Login"})
+
+ if not '<a href="logout.php">' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/StreamcloudEu.py b/pyload/plugin/account/StreamcloudEu.py
new file mode 100644
index 000000000..f3eb6cce9
--- /dev/null
+++ b/pyload/plugin/account/StreamcloudEu.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class StreamcloudEu(XFSAccount):
+ __name = "StreamcloudEu"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Streamcloud.eu account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "streamcloud.eu"
diff --git a/pyload/plugin/account/TurbobitNet.py b/pyload/plugin/account/TurbobitNet.py
new file mode 100644
index 000000000..8237ed34e
--- /dev/null
+++ b/pyload/plugin/account/TurbobitNet.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+import re
+from time import mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class TurbobitNet(Account):
+ __name = "TurbobitNet"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """TurbobitNet account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("http://turbobit.net")
+
+ m = re.search(r'<u>Turbo Access</u> to ([\d.]+)', html)
+ if m:
+ premium = True
+ validuntil = mktime(strptime(m.group(1), "%d.%m.%Y"))
+ else:
+ premium = False
+ validuntil = -1
+
+ return {"premium": premium, "trafficleft": -1, "validuntil": validuntil}
+
+
+ def login(self, user, data, req):
+ req.cj.setCookie("turbobit.net", "user_lang", "en")
+
+ html = req.load("http://turbobit.net/user/login", post={
+ "user[login]": user,
+ "user[pass]": data['password'],
+ "user[submit]": "Login"})
+
+ if not '<div class="menu-item user-name">' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/TusfilesNet.py b/pyload/plugin/account/TusfilesNet.py
new file mode 100644
index 000000000..b951f8add
--- /dev/null
+++ b/pyload/plugin/account/TusfilesNet.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import mktime, strptime, gmtime
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class TusfilesNet(XFSAccount):
+ __name = "TusfilesNet"
+ __type = "account"
+ __version = "0.06"
+
+ __description = """Tusfile.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "tusfiles.net"
+
+ VALID_UNTIL_PATTERN = r'<span class="label label-default">([^<]+)</span>'
+ TRAFFIC_LEFT_PATTERN = r'<td><img src="//www\.tusfiles\.net/i/icon/meter\.png" alt=""/></td>\n<td>&nbsp;(?P<S>[\d.,]+)'
diff --git a/pyload/plugin/account/UlozTo.py b/pyload/plugin/account/UlozTo.py
new file mode 100644
index 000000000..bba346874
--- /dev/null
+++ b/pyload/plugin/account/UlozTo.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.Account import Account
+
+
+class UlozTo(Account):
+ __name = "UlozTo"
+ __type = "account"
+ __version = "0.07"
+
+ __description = """Uloz.to account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("pulpe", "")]
+
+
+ TRAFFIC_LEFT_PATTERN = r'<li class="menu-kredit"><a href="/kredit" title="[^"]*?GB = ([\d.]+) MB"'
+
+
+ def loadAccountInfo(self, user, req):
+ self.phpsessid = req.cj.getCookie("ULOSESSID") #@NOTE: this cookie gets lost somehow after each request
+
+ html = req.load("http://www.ulozto.net/", decode=True)
+
+ req.cj.setCookie("ulozto.net", "ULOSESSID", self.phpsessid)
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ trafficleft = int(float(m.group(1).replace(' ', '').replace(',', '.')) * 1000 * 1.048) if m else 0
+ self.premium = True if trafficleft else False
+
+ return {"validuntil": -1, "trafficleft": trafficleft}
+
+
+ def login(self, user, data, req):
+ login_page = req.load('http://www.ulozto.net/?do=web-login', decode=True)
+ action = re.findall('<form action="(.+?)"', login_page)[1].replace('&amp;', '&')
+ token = re.search('_token_" value="(.+?)"', login_page).group(1)
+
+ html = req.load(urljoin("http://www.ulozto.net/", action),
+ post={'_token_' : token,
+ 'do' : "loginForm-submit",
+ 'login' : u"Přihlásit",
+ 'password': data['password'],
+ 'username': user},
+ decode=True)
+
+ if '<div class="flash error">' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/UnrestrictLi.py b/pyload/plugin/account/UnrestrictLi.py
new file mode 100644
index 000000000..943943cac
--- /dev/null
+++ b/pyload/plugin/account/UnrestrictLi.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Account import Account
+from pyload.utils import json_loads
+
+
+class UnrestrictLi(Account):
+ __name = "UnrestrictLi"
+ __type = "account"
+ __version = "0.03"
+
+ __description = """Unrestrict.li account plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def loadAccountInfo(self, user, req):
+ json_data = req.load('http://unrestrict.li/api/jdownloader/user.php?format=json')
+ self.logDebug("JSON data: " + json_data)
+ json_data = json_loads(json_data)
+
+ if 'vip' in json_data['result'] and json_data['result']['vip'] == 0:
+ return {"premium": False}
+
+ validuntil = json_data['result']['expires']
+ trafficleft = int(json_data['result']['traffic'])
+
+ return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
+
+
+ def login(self, user, data, req):
+ req.cj.setCookie("unrestrict.li", "lang", "EN")
+ html = req.load("https://unrestrict.li/sign_in")
+
+ if 'solvemedia' in html:
+ self.logError(_("A Captcha is required. Go to http://unrestrict.li/sign_in and login, then retry"))
+ return
+
+ post_data = {"username": user, "password": data['password'],
+ "remember_me": "remember", "signin": "Sign in"}
+ html = req.load("https://unrestrict.li/sign_in", post=post_data)
+
+ if 'sign_out' not in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/UploadcCom.py b/pyload/plugin/account/UploadcCom.py
new file mode 100644
index 000000000..01102168c
--- /dev/null
+++ b/pyload/plugin/account/UploadcCom.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class UploadcCom(XFSAccount):
+ __name = "UploadcCom"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """Uploadc.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "uploadc.com"
diff --git a/pyload/plugin/account/UploadedTo.py b/pyload/plugin/account/UploadedTo.py
new file mode 100644
index 000000000..f25da8995
--- /dev/null
+++ b/pyload/plugin/account/UploadedTo.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+import re
+from time import time
+
+from pyload.plugin.Account import Account
+
+
+class UploadedTo(Account):
+ __name = "UploadedTo"
+ __type = "account"
+ __version = "0.27"
+
+ __description = """Uploaded.to account plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+ PREMIUM_PATTERN = r'<em>Premium</em>'
+ VALID_UNTIL_PATTERN = r'<td>Duration:</td>\s*<th>([^<]+)'
+ TRAFFIC_LEFT_PATTERN = r'<th colspan="2"><b class="cB">([^<]+)'
+
+
+ def loadAccountInfo(self, user, req):
+ validuntil = None
+ trafficleft = None
+ premium = None
+
+ html = req.load("http://uploaded.net/me")
+
+ premium = True if re.search(self.PREMIUM_PATTERN, html) else False
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html, re.M)
+ if m:
+ expiredate = m.group(1).strip()
+
+ if expiredate == "unlimited":
+ validuntil = -1
+ else:
+ m = re.findall(r'(\d+) (Week|weeks|day|hour)', expiredate)
+ if m:
+ validuntil = time()
+ for n, u in m:
+ validuntil += int(n) * 60 * 60 * {'Week': 168, 'weeks': 168, 'day': 24, 'hour': 1}[u]
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ if m:
+ trafficleft = self.parseTraffic(m.group(1).replace('.', ''))
+
+ return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium}
+
+
+ def login(self, user, data, req):
+ req.cj.setCookie("uploaded.net", "lang", "en")
+
+ page = req.load("http://uploaded.net/io/login",
+ post={'id': user, 'pw': data['password'], '_': ""})
+
+ if "User and password do not match" in page:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/UploadheroCom.py b/pyload/plugin/account/UploadheroCom.py
new file mode 100644
index 000000000..fa7af288e
--- /dev/null
+++ b/pyload/plugin/account/UploadheroCom.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+import re
+import datetime
+import time
+
+from pyload.plugin.Account import Account
+
+
+class UploadheroCom(Account):
+ __name = "UploadheroCom"
+ __type = "account"
+ __version = "0.20"
+
+ __description = """Uploadhero.co account plugin"""
+ __license = "GPLv3"
+ __authors = [("mcmyst", "mcmyst@hotmail.fr")]
+
+
+ def loadAccountInfo(self, user, req):
+ premium_pattern = re.compile('Il vous reste <span class="bleu">(\d+)</span> jours premium')
+
+ data = self.getAccountData(user)
+ page = req.load("http://uploadhero.co/my-account")
+
+ if premium_pattern.search(page):
+ end_date = datetime.date.today() + datetime.timedelta(days=int(premium_pattern.search(page).group(1)))
+ end_date = time.mktime(future.timetuple())
+ account_info = {"validuntil": end_date, "trafficleft": -1, "premium": True}
+ else:
+ account_info = {"validuntil": -1, "trafficleft": -1, "premium": False}
+
+ return account_info
+
+
+ def login(self, user, data, req):
+ page = req.load("http://uploadhero.co/lib/connexion.php",
+ post={"pseudo_login": user, "password_login": data['password']})
+
+ if "mot de passe invalide" in page:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/UploadingCom.py b/pyload/plugin/account/UploadingCom.py
new file mode 100644
index 000000000..7ac1e52ff
--- /dev/null
+++ b/pyload/plugin/account/UploadingCom.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import time, strptime, mktime
+
+from pyload.plugin.Account import Account
+from pyload.plugin.internal.SimpleHoster import set_cookies
+
+
+class UploadingCom(Account):
+ __name = "UploadingCom"
+ __type = "account"
+ __version = "0.11"
+
+ __description = """Uploading.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+ PREMIUM_PATTERN = r'UPGRADE TO PREMIUM'
+ VALID_UNTIL_PATTERN = r'Valid Until:(.+?)<'
+
+
+ def loadAccountInfo(self, user, req):
+ validuntil = None
+ trafficleft = None
+ premium = None
+
+ html = req.load("http://uploading.com/")
+
+ premium = False if re.search(self.PREMIUM_PATTERN, html) else True
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ expiredate = m.group(1).strip()
+ self.logDebug("Expire date: " + expiredate)
+
+ try:
+ validuntil = mktime(strptime(expiredate, "%b %d, %Y"))
+
+ except Exception, e:
+ self.logError(e)
+
+ else:
+ if validuntil > mktime(gmtime()):
+ premium = True
+ else:
+ premium = False
+ validuntil = None
+
+ return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium}
+
+
+ def login(self, user, data, req):
+ set_cookies([("uploading.com", "lang", "1"),
+ ("uploading.com", "language", "1"),
+ ("uploading.com", "setlang", "en"),
+ ("uploading.com", "_lang", "en")]
+
+ req.load("http://uploading.com/")
+ req.load("http://uploading.com/general/login_form/?JsHttpRequest=%s-xml" % long(time() * 1000),
+ post={'email': user, 'password': data['password'], 'remember': "on"})
diff --git a/pyload/plugin/account/UptoboxCom.py b/pyload/plugin/account/UptoboxCom.py
new file mode 100644
index 000000000..116721ebe
--- /dev/null
+++ b/pyload/plugin/account/UptoboxCom.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class UptoboxCom(XFSAccount):
+ __name = "UptoboxCom"
+ __type = "account"
+ __version = "0.07"
+
+ __description = """DDLStorage.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "uptobox.com"
+ HOSTER_URL = "https://uptobox.com/"
diff --git a/pyload/plugin/account/VidPlayNet.py b/pyload/plugin/account/VidPlayNet.py
new file mode 100644
index 000000000..c5d4e0b5a
--- /dev/null
+++ b/pyload/plugin/account/VidPlayNet.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class VidPlayNet(XFSAccount):
+ __name = "VidPlayNet"
+ __type = "account"
+ __version = "0.02"
+
+ __description = """VidPlay.net account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "vidplay.net"
diff --git a/pyload/plugin/account/XFileSharingPro.py b/pyload/plugin/account/XFileSharingPro.py
new file mode 100644
index 000000000..67679f2cc
--- /dev/null
+++ b/pyload/plugin/account/XFileSharingPro.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSAccount import XFSAccount
+
+
+class XFileSharingPro(XFSAccount):
+ __name = "XFileSharingPro"
+ __type = "account"
+ __version = "0.05"
+
+ __description = """XFileSharingPro multi-purpose account plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = None
+
+
+ def init(self):
+ if self.HOSTER_DOMAIN:
+ return super(XFileSharingPro, self).init()
+
+
+ def loadAccountInfo(self, user, req):
+ return super(XFileSharingPro if self.HOSTER_DOMAIN else XFSAccount, self).loadAccountInfo(user, req)
+
+
+ def login(self, user, data, req):
+ if self.HOSTER_DOMAIN:
+ return super(XFileSharingPro, self).login(user, data, req)
diff --git a/pyload/plugin/account/YibaishiwuCom.py b/pyload/plugin/account/YibaishiwuCom.py
new file mode 100644
index 000000000..6f149478e
--- /dev/null
+++ b/pyload/plugin/account/YibaishiwuCom.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Account import Account
+
+
+class YibaishiwuCom(Account):
+ __name = "YibaishiwuCom"
+ __type = "account"
+ __version = "0.01"
+
+ __description = """115.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ ACCOUNT_INFO_PATTERN = r'var USER_PERMISSION = {(.*?)}'
+
+
+ def loadAccountInfo(self, user, req):
+ #self.relogin(user)
+ html = req.load("http://115.com/", decode=True)
+
+ m = re.search(self.ACCOUNT_INFO_PATTERN, html, re.S)
+ premium = True if (m and 'is_vip: 1' in m.group(1)) else False
+ validuntil = trafficleft = (-1 if m else 0)
+ return dict({"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium})
+
+
+ def login(self, user, data, req):
+ html = req.load('http://passport.115.com/?ac=login', post={
+ "back": "http://www.115.com/",
+ "goto": "http://115.com/",
+ "login[account]": user,
+ "login[passwd]": data['password']
+ }, decode=True)
+
+ if not 'var USER_PERMISSION = {' in html:
+ self.wrongPassword()
diff --git a/pyload/plugin/account/ZeveraCom.py b/pyload/plugin/account/ZeveraCom.py
new file mode 100644
index 000000000..aab795e55
--- /dev/null
+++ b/pyload/plugin/account/ZeveraCom.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+from time import mktime, strptime
+
+from pyload.plugin.Account import Account
+
+
+class ZeveraCom(Account):
+ __name = "ZeveraCom"
+ __type = "account"
+ __version = "0.21"
+
+ __description = """Zevera.com account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def loadAccountInfo(self, user, req):
+ data = self.getAPIData(req)
+ if data == "No traffic":
+ account_info = {"trafficleft": 0, "validuntil": 0, "premium": False}
+ else:
+ account_info = {
+ "trafficleft": int(data['availabletodaytraffic']) * 1024,
+ "validuntil": mktime(strptime(data['endsubscriptiondate'], "%Y/%m/%d %H:%M:%S")),
+ "premium": True
+ }
+ return account_info
+
+
+ def login(self, user, data, req):
+ self.loginname = user
+ self.password = data['password']
+ if self.getAPIData(req) == "No traffic":
+ self.wrongPassword()
+
+
+ def getAPIData(self, req, just_header=False, **kwargs):
+ get_data = {
+ 'cmd': 'accountinfo',
+ 'login': self.loginname,
+ 'pass': self.password
+ }
+ get_data.update(kwargs)
+
+ res = req.load("http://www.zevera.com/jDownloader.ashx", get=get_data,
+ decode=True, just_header=just_header)
+ self.logDebug(res)
+
+ if ':' in res:
+ if not just_header:
+ res = res.replace(',', '\n')
+ return dict((y.strip().lower(), z.strip()) for (y, z) in
+ [x.split(':', 1) for x in res.splitlines() if ':' in x])
+ else:
+ return res
diff --git a/pyload/plugins/account/__init__.py b/pyload/plugin/account/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/account/__init__.py
+++ b/pyload/plugin/account/__init__.py
diff --git a/pyload/plugin/addon/Checksum.py b/pyload/plugin/addon/Checksum.py
new file mode 100644
index 000000000..f3f98d0f8
--- /dev/null
+++ b/pyload/plugin/addon/Checksum.py
@@ -0,0 +1,186 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import hashlib
+import re
+import zlib
+
+from os import remove
+from os.path import getsize, isfile, splitext
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import safe_join, fs_encode
+
+
+def computeChecksum(local_file, algorithm):
+ if algorithm in getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")):
+ h = getattr(hashlib, algorithm)()
+
+ with open(local_file, 'rb') as f:
+ for chunk in iter(lambda: f.read(128 * h.block_size), ''):
+ h.update(chunk)
+
+ return h.hexdigest()
+
+ elif algorithm in ("adler32", "crc32"):
+ hf = getattr(zlib, algorithm)
+ last = 0
+
+ with open(local_file, 'rb') as f:
+ for chunk in iter(lambda: f.read(8192), ''):
+ last = hf(chunk, last)
+
+ return "%x" % last
+
+ else:
+ return None
+
+
+class Checksum(Addon):
+ __name = "Checksum"
+ __type = "addon"
+ __version = "0.15"
+
+ __config = [("activated" , "bool" , "Activated" , True ),
+ ("check_checksum", "bool" , "Check checksum? (If False only size will be verified)", True ),
+ ("check_action" , "fail;retry;nothing", "What to do if check fails?" , "retry"),
+ ("max_tries" , "int" , "Number of retries" , 2 ),
+ ("retry_action" , "fail;nothing" , "What to do if all retries fail?" , "fail" ),
+ ("wait_time" , "int" , "Time to wait before each retry (seconds)" , 1 )]
+
+ __description = """Verify downloaded file size and checksum"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ methods = {'sfv': 'crc32', 'crc': 'crc32', 'hash': 'md5'}
+ regexps = {'sfv': r'^(?P<name>[^;].+)\s+(?P<hash>[0-9A-Fa-f]{8})$',
+ 'md5': r'^(?P<name>[0-9A-Fa-f]{32}) (?P<file>.+)$',
+ 'crc': r'filename=(?P<name>.+)\nsize=(?P<size>\d+)\ncrc32=(?P<hash>[0-9A-Fa-f]{8})$',
+ 'default': r'^(?P<hash>[0-9A-Fa-f]+)\s+\*?(?P<name>.+)$'}
+
+
+ def activate(self):
+ if not self.getConfig("check_checksum"):
+ self.logInfo(_("Checksum validation is disabled in plugin configuration"))
+
+
+ def setup(self):
+ self.algorithms = sorted(
+ getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")), reverse=True)
+ self.algorithms.extend(["crc32", "adler32"])
+ self.formats = self.algorithms + ["sfv", "crc", "hash"]
+
+
+ def downloadFinished(self, pyfile):
+ """
+ Compute checksum for the downloaded file and compare it with the hash provided by the hoster.
+ pyfile.plugin.check_data should be a dictionary which can contain:
+ a) if known, the exact filesize in bytes (e.g. "size": 123456789)
+ b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307")
+ """
+ if hasattr(pyfile.plugin, "check_data") and isinstance(pyfile.plugin.check_data, dict):
+ data = pyfile.plugin.check_data.copy()
+
+ elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict):
+ data = pyfile.plugin.api_data.copy()
+
+ # elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict):
+ # data = pyfile.plugin.info.copy()
+
+ else:
+ return
+
+ self.logDebug(data)
+
+ if not pyfile.plugin.lastDownload:
+ self.checkFailed(pyfile, None, "No file downloaded")
+
+ local_file = fs_encode(pyfile.plugin.lastDownload)
+ #download_folder = self.config['general']['download_folder']
+ #local_file = fs_encode(safe_join(download_folder, pyfile.package().folder, pyfile.name))
+
+ if not isfile(local_file):
+ self.checkFailed(pyfile, None, "File does not exist")
+
+ # validate file size
+ if "size" in data:
+ api_size = int(data['size'])
+ file_size = getsize(local_file)
+ if api_size != file_size:
+ self.logWarning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size))
+ self.checkFailed(pyfile, local_file, "Incorrect file size")
+ del data['size']
+
+ # validate checksum
+ if data and self.getConfig("check_checksum"):
+ if "checksum" in data:
+ data['md5'] = data['checksum']
+
+ for key in self.algorithms:
+ if key in data:
+ checksum = computeChecksum(local_file, key.replace("-", "").lower())
+ if checksum:
+ if checksum == data[key].lower():
+ self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
+ (pyfile.name, key.upper(), checksum))
+ break
+ else:
+ self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
+ (key.upper(), pyfile.name, checksum, data[key]))
+ self.checkFailed(pyfile, local_file, "Checksums do not match")
+ else:
+ self.logWarning(_("Unsupported hashing algorithm"), key.upper())
+ else:
+ self.logWarning(_("Unable to validate checksum for file: ") + pyfile.name)
+
+
+ def checkFailed(self, pyfile, local_file, msg):
+ check_action = self.getConfig("check_action")
+ if check_action == "retry":
+ max_tries = self.getConfig("max_tries")
+ retry_action = self.getConfig("retry_action")
+ if pyfile.plugin.retries < max_tries:
+ if local_file:
+ remove(local_file)
+ pyfile.plugin.retry(max_tries, self.getConfig("wait_time"), msg)
+ elif retry_action == "nothing":
+ return
+ elif check_action == "nothing":
+ return
+ pyfile.plugin.fail(reason=msg)
+
+
+ def packageFinished(self, pypack):
+ download_folder = safe_join(self.config['general']['download_folder'], pypack.folder, "")
+
+ for link in pypack.getChildren().itervalues():
+ file_type = splitext(link['name'])[1][1:].lower()
+
+ if file_type not in self.formats:
+ continue
+
+ hash_file = fs_encode(safe_join(download_folder, link['name']))
+ if not isfile(hash_file):
+ self.logWarning(_("File not found"), link['name'])
+ continue
+
+ with open(hash_file) as f:
+ text = f.read()
+
+ for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
+ data = m.groupdict()
+ self.logDebug(link['name'], data)
+
+ local_file = fs_encode(safe_join(download_folder, data['name']))
+ algorithm = self.methods.get(file_type, file_type)
+ checksum = computeChecksum(local_file, algorithm)
+ if checksum == data['hash']:
+ self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
+ (data['name'], algorithm, checksum))
+ else:
+ self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
+ (algorithm, data['name'], checksum, data['hash']))
diff --git a/pyload/plugin/addon/ClickAndLoad.py b/pyload/plugin/addon/ClickAndLoad.py
new file mode 100644
index 000000000..8a05ca85e
--- /dev/null
+++ b/pyload/plugin/addon/ClickAndLoad.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+
+from socket import socket, error
+from threading import Thread
+
+from pyload.plugin.Addon import Addon
+
+
+def forward(source, destination):
+ string = ' '
+ while string:
+ string = source.recv(1024)
+ if string:
+ destination.sendall(string)
+ else:
+ #source.shutdown(socket.SHUT_RD)
+ destination.shutdown(socket.SHUT_WR)
+
+
+class ClickAndLoad(Addon):
+ __name = "ClickAndLoad"
+ __type = "addon"
+ __version = "0.23"
+
+ __config = [("activated", "bool", "Activated" , True ),
+ ("port" , "int" , "Port" , 9666 ),
+ ("extern" , "bool", "Allow external link adding", False)]
+
+ __description = """Click'N'Load hook plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.de"),
+ ("mkaay", "mkaay@mkaay.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def setup(self):
+ self.interval = 300
+
+
+ def activate(self):
+ self.initPeriodical()
+
+
+ def periodical(self):
+ webip = "0.0.0.0" if self.getConfig("extern") else "127.0.0.1"
+ webport = self.config['webinterface']['port']
+ cnlport = self.getConfig("port"))
+
+ try:
+ s = socket()
+ s.bind((webip, cnlport))
+ s.listen(5)
+
+ client = s.accept()[0]
+ server = socket()
+
+ server.connect(("127.0.0.1", webport))
+
+ except error, e:
+ if hasattr(e, "errno"):
+ errno = e.errno
+ else:
+ errno = e.args[0]
+
+ if errno == 98:
+ self.logWarning(_("Port %d already in use") % cnlport)
+ else:
+ self.logDebug(e)
+
+ else:
+ self.core.scheduler.removeJob(self.cb)
+ t = Thread(target=forward, args=[client, server])
+ t.setDaemon(True)
+ t.start()
diff --git a/pyload/plugin/addon/DeleteFinished.py b/pyload/plugin/addon/DeleteFinished.py
new file mode 100644
index 000000000..045f58d61
--- /dev/null
+++ b/pyload/plugin/addon/DeleteFinished.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+from pyload.database import style
+from pyload.plugin.Addon import Addon
+
+
+class DeleteFinished(Addon):
+ __name = "DeleteFinished"
+ __type = "addon"
+ __version = "1.11"
+
+ __config = [('interval' , 'int' , 'Delete every (hours)' , '72' ),
+ ('deloffline', 'bool', 'Delete packages with offline links', 'False')]
+
+ __description = """Automatically delete all finished packages from queue"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+
+ ## overwritten methods ##
+ def periodical(self):
+ if not self.info['sleep']:
+ deloffline = self.getConfig('deloffline')
+ mode = '0,1,4' if deloffline else '0,4'
+ msg = _('delete all finished packages in queue list (%s packages with offline links)')
+ self.logInfo(msg % (_('including') if deloffline else _('excluding')))
+ self.deleteFinished(mode)
+ self.info['sleep'] = True
+ self.addEvent('packageFinished', self.wakeup)
+
+
+ def pluginConfigChanged(self, plugin, name, value):
+ if name == "interval" and value != self.interval:
+ self.interval = value * 3600
+ self.initPeriodical()
+
+
+ def deactivate(self):
+ self.removeEvent('packageFinished', self.wakeup)
+
+
+ def activate(self):
+ self.info = {'sleep': True}
+ interval = self.getConfig('interval')
+ self.pluginConfigChanged(self.__name, 'interval', interval)
+ self.addEvent('packageFinished', self.wakeup)
+
+
+ ## own methods ##
+ @style.queue
+ def deleteFinished(self, mode):
+ self.c.execute('DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE package=packages.id AND status NOT IN (%s))' % mode)
+ self.c.execute('DELETE FROM links WHERE NOT EXISTS(SELECT 1 FROM packages WHERE id=links.package)')
+
+
+ def wakeup(self, pypack):
+ self.removeEvent('packageFinished', self.wakeup)
+ self.info['sleep'] = False
+
+
+ ## event managing ##
+ def addEvent(self, event, func):
+ """Adds an event listener for event name"""
+ if event in self.m.events:
+ if func in self.m.events[event]:
+ self.logDebug("Function already registered", func)
+ else:
+ self.m.events[event].append(func)
+ else:
+ self.m.events[event] = [func]
+
+
+ def setup(self):
+ self.interval = 0
+ self.m = self.manager
+ self.removeEvent = self.m.removeEvent
diff --git a/pyload/plugin/addon/DownloadScheduler.py b/pyload/plugin/addon/DownloadScheduler.py
new file mode 100644
index 000000000..7f05d89f4
--- /dev/null
+++ b/pyload/plugin/addon/DownloadScheduler.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import localtime
+
+from pyload.plugin.Addon import Addon
+
+
+class DownloadScheduler(Addon):
+ __name = "DownloadScheduler"
+ __type = "addon"
+ __version = "0.22"
+
+ __config = [("timetable", "str" , "List time periods as hh:mm full or number(kB/s)" , "0:00 full, 7:00 250, 10:00 0, 17:00 150"),
+ ("abort" , "bool", "Abort active downloads when start period with speed 0", False )]
+
+ __description = """Download Scheduler"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ def setup(self):
+ self.cb = None #: callback to scheduler job; will be by removed AddonManager when addon unloaded
+
+
+ def activate(self):
+ self.updateSchedule()
+
+
+ def updateSchedule(self, schedule=None):
+ if schedule is None:
+ schedule = self.getConfig("timetable")
+
+ schedule = re.findall("(\d{1,2}):(\d{2})[\s]*(-?\d+)",
+ schedule.lower().replace("full", "-1").replace("none", "0"))
+ if not schedule:
+ self.logError(_("Invalid schedule"))
+ return
+
+ t0 = localtime()
+ now = (t0.tm_hour, t0.tm_min, t0.tm_sec, "X")
+ schedule = sorted([(int(x[0]), int(x[1]), 0, int(x[2])) for x in schedule] + [now])
+
+ self.logDebug("Schedule", schedule)
+
+ for i, v in enumerate(schedule):
+ if v[3] == "X":
+ last, next = schedule[i - 1], schedule[(i + 1) % len(schedule)]
+ self.logDebug("Now/Last/Next", now, last, next)
+
+ self.setDownloadSpeed(last[3])
+
+ next_time = (((24 + next[0] - now[0]) * 60 + next[1] - now[1]) * 60 + next[2] - now[2]) % 86400
+ self.core.scheduler.removeJob(self.cb)
+ self.cb = self.core.scheduler.addJob(next_time, self.updateSchedule, threaded=False)
+
+
+ def setDownloadSpeed(self, speed):
+ if speed == 0:
+ abort = self.getConfig("abort")
+ self.logInfo(_("Stopping download server. (Running downloads will %sbe aborted.)") % '' if abort else _('not '))
+ self.core.api.pauseServer()
+ if abort:
+ self.core.api.stopAllDownloads()
+ else:
+ self.core.api.unpauseServer()
+
+ if speed > 0:
+ self.logInfo(_("Setting download speed to %d kB/s") % speed)
+ self.core.api.setConfigValue("download", "limit_speed", 1)
+ self.core.api.setConfigValue("download", "max_speed", speed)
+ else:
+ self.logInfo(_("Setting download speed to FULL"))
+ self.core.api.setConfigValue("download", "limit_speed", 0)
+ self.core.api.setConfigValue("download", "max_speed", -1)
diff --git a/pyload/plugin/addon/ExternalScripts.py b/pyload/plugin/addon/ExternalScripts.py
new file mode 100644
index 000000000..030b44ae1
--- /dev/null
+++ b/pyload/plugin/addon/ExternalScripts.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+
+import subprocess
+
+from itertools import chain
+from os import listdir, access, X_OK, makedirs
+from os.path import join, exists, basename, abspath
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import safe_join
+
+
+class ExternalScripts(Addon):
+ __name = "ExternalScripts"
+ __type = "addon"
+ __version = "0.25"
+
+ __config = [("activated", "bool", "Activated", True)]
+
+ __description = """Run external scripts"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de"),
+ ("RaNaN", "ranan@pyload.org"),
+ ("spoob", "spoob@pyload.org"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ event_map = {'archive-extracted' : "archive_extracted",
+ 'package-extracted' : "package_extracted",
+ 'all_archives-extracted' : "all_archives_extracted",
+ 'all_archives-processed' : "all_archives_processed",
+ 'all_downloads-finished' : "allDownloadsFinished",
+ 'all_downloads-processed': "allDownloadsProcessed"}
+
+
+ def setup(self):
+ self.scripts = {}
+
+ folders = ["download_preparing", "download_finished", "all_downloads_finished", "all_downloads_processed",
+ "before_reconnect", "after_reconnect",
+ "package_finished", "package_extracted",
+ "archive_extracted", "all_archives_extracted", "all_archives_processed",
+ # deprecated folders
+ "unrar_finished", "all_dls_finished", "all_dls_processed"]
+
+ for folder in folders:
+ self.scripts[folder] = []
+
+ self.initPluginType(folder, join(pypath, 'scripts', folder))
+ self.initPluginType(folder, join('scripts', folder))
+
+ for script_type, names in self.scripts.iteritems():
+ if names:
+ self.logInfo(_("Installed scripts for"), script_type, ", ".join([basename(x) for x in names]))
+
+
+ def initPluginType(self, folder, path):
+ if not exists(path):
+ try:
+ makedirs(path)
+ except Exception:
+ self.logDebug("Script folder %s not created" % folder)
+ return
+
+ for f in listdir(path):
+ if f.startswith("#") or f.startswith(".") or f.startswith("_") or f.endswith("~") or f.endswith(".swp"):
+ continue
+
+ if not access(join(path, f), X_OK):
+ self.logWarning(_("Script not executable:") + " %s/%s" % (folder, f))
+
+ self.scripts[folder].append(join(path, f))
+
+
+ def callScript(self, script, *args):
+ try:
+ cmd = [script] + [str(x) if not isinstance(x, basestring) else x for x in args]
+ self.logDebug("Executing", abspath(script), " ".join(cmd))
+ #output goes to pyload
+ subprocess.Popen(cmd, bufsize=-1)
+ except Exception, e:
+ self.logError(_("Error in %(script)s: %(error)s") % {"script": basename(script), "error": e})
+
+
+ def downloadPreparing(self, pyfile):
+ for script in self.scripts['download_preparing']:
+ self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.id)
+
+
+ def downloadFinished(self, pyfile):
+ download_folder = self.config['general']['download_folder']
+ for script in self.scripts['download_finished']:
+ filename = safe_join(download_folder, pyfile.package().folder, pyfile.name)
+ self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.name, filename, pyfile.id)
+
+
+ def packageFinished(self, pypack):
+ download_folder = self.config['general']['download_folder']
+ for script in self.scripts['package_finished']:
+ folder = safe_join(download_folder, pypack.folder)
+ self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
+
+
+ def beforeReconnecting(self, ip):
+ for script in self.scripts['before_reconnect']:
+ self.callScript(script, ip)
+
+
+ def afterReconnecting(self, ip):
+ for script in self.scripts['after_reconnect']:
+ self.callScript(script, ip)
+
+
+ def archive_extracted(self, pyfile, folder, filename, files):
+ for script in self.scripts['archive_extracted']:
+ self.callScript(script, folder, filename, files)
+ for script in self.scripts['unrar_finished']: #: deprecated
+ self.callScript(script, folder, filename)
+
+
+ def package_extracted(self, pypack):
+ download_folder = self.config['general']['download_folder']
+ for script in self.scripts['package_extracted']:
+ folder = safe_join(download_folder, pypack.folder)
+ self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
+
+
+ def all_archives_extracted(self):
+ for script in self.scripts['all_archives_extracted']:
+ self.callScript(script)
+
+
+ def all_archives_processed(self):
+ for script in self.scripts['all_archives_processed']:
+ self.callScript(script)
+
+
+ def allDownloadsFinished(self):
+ for script in chain(self.scripts['all_downloads_finished'], self.scripts['all_dls_finished']):
+ self.callScript(script)
+
+
+ def allDownloadsProcessed(self):
+ for script in chain(self.scripts['all_downloads_processed'], self.scripts['all_dls_processed']):
+ self.callScript(script)
diff --git a/pyload/plugin/addon/ExtractArchive.py b/pyload/plugin/addon/ExtractArchive.py
new file mode 100644
index 000000000..478fb954e
--- /dev/null
+++ b/pyload/plugin/addon/ExtractArchive.py
@@ -0,0 +1,363 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import os
+import sys
+
+from copy import copy
+from os import remove, chmod, makedirs
+from os.path import exists, basename, isfile, isdir
+from traceback import print_exc
+
+# monkey patch bug in python 2.6 and lower
+# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
+if sys.version_info < (2, 7) and os.name != "nt":
+ import errno
+ from subprocess import Popen
+
+
+ def _eintr_retry_call(func, *args):
+ while True:
+ try:
+ return func(*args)
+ except OSError, e:
+ if e.errno == errno.EINTR:
+ continue
+ raise
+
+
+ # unsued timeout option for older python version
+ def wait(self, timeout=0):
+ """Wait for child process to terminate. Returns returncode
+ attribute."""
+ if self.returncode is None:
+ try:
+ pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
+ except OSError, e:
+ if e.errno != errno.ECHILD:
+ raise
+ # This happens if SIGCLD is set to be ignored or waiting
+ # for child processes has otherwise been disabled for our
+ # process. This child is dead, we can't get the status.
+ sts = 0
+ self._handle_exitstatus(sts)
+ return self.returncode
+
+ Popen.wait = wait
+
+if os.name != "nt":
+ from grp import getgrnam
+ from os import chown
+ from pwd import getpwnam
+
+from pyload.plugin.Addon import Addon, threaded, Expose
+from pyload.plugin.internal.AbstractExtractor import ArchiveError, CRCError, WrongPassword
+from pyload.utils import safe_join, fs_encode
+
+
+class ExtractArchive(Addon):
+ __name = "ExtractArchive"
+ __type = "addon"
+ __version = "0.19"
+
+ __config = [("activated" , "bool" , "Activated" , True ),
+ ("fullpath" , "bool" , "Extract full path" , True ),
+ ("overwrite" , "bool" , "Overwrite files" , True ),
+ ("passwordfile" , "file" , "password file" , "archive_password.txt"),
+ ("deletearchive", "bool" , "Delete archives when done" , False ),
+ ("subfolder" , "bool" , "Create subfolder for each package" , False ),
+ ("destination" , "folder", "Extract files to" , "" ),
+ ("excludefiles" , "str" , "Exclude files from unpacking (seperated by ;)", "" ),
+ ("recursive" , "bool" , "Extract archives in archvies" , True ),
+ ("queue" , "bool" , "Wait for all downloads to be finished" , True ),
+ ("renice" , "int" , "CPU Priority" , 0 )]
+
+ __description = """Extract different kind of archives"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "ranan@pyload.org"),
+ ("AndroKev", ""),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ event_map = {'all_downloads-processed': "allDownloadsProcessed"}
+
+
+ def setup(self):
+ self.plugins = []
+ self.passwords = []
+ names = []
+
+ for p in ("UnRar", "UnZip"):
+ try:
+ module = self.core.pluginManager.loadModule("internal", p)
+ klass = getattr(module, p)
+ if klass.checkDeps():
+ names.append(p)
+ self.plugins.append(klass)
+
+ except OSError, e:
+ if e.errno == 2:
+ self.logInfo(_("No %s installed") % p)
+ else:
+ self.logWarning(_("Could not activate %s") % p, e)
+ if self.core.debug:
+ print_exc()
+
+ except Exception, e:
+ self.logWarning(_("Could not activate %s") % p, e)
+ if self.core.debug:
+ print_exc()
+
+ if names:
+ self.logInfo(_("Activated") + " " + " ".join(names))
+ else:
+ self.logInfo(_("No Extract plugins activated"))
+
+ # queue with package ids
+ self.queue = []
+
+
+ @Expose
+ def extractPackage(self, id):
+ """ Extract package with given id"""
+ self.manager.startThread(self.extract, [id])
+
+
+ def packageFinished(self, pypack):
+ pid = pypack.id
+ if self.getConfig("queue"):
+ self.logInfo(_("Package %s queued for later extracting") % pypack.name)
+ self.queue.append(pid)
+ else:
+ self.manager.startThread(self.extract, [pid])
+
+
+ @threaded
+ def allDownloadsProcessed(self, thread):
+ local = copy(self.queue)
+ del self.queue[:]
+ if self.extract(local, thread): #: check only if all gone fine, no failed reporting for now
+ self.manager.dispatchEvent("all_archives-extracted")
+ self.manager.dispatchEvent("all_archives-processed")
+
+
+ def extract(self, ids, thread=None):
+ processed = []
+ extracted = []
+ failed = []
+
+ destination = self.getConfig("destination")
+ subfolder = self.getConfig("subfolder")
+ fullpath = self.getConfig("fullpath")
+ overwrite = self.getConfig("overwrite")
+ excludefiles = self.getConfig("excludefiles")
+ renice = self.getConfig("renice")
+ recursive = self.getConfig("recursive")
+
+ # reload from txt file
+ self.reloadPasswords()
+
+ # dl folder
+ dl = self.config['general']['download_folder']
+
+ #iterate packages -> plugins -> targets
+ for pid in ids:
+ p = self.core.files.getPackage(pid)
+ self.logInfo(_("Check package %s") % p.name)
+ if not p:
+ continue
+
+ # determine output folder
+ out = safe_join(dl, p.folder, "")
+
+ out = safe_join(dl, p.folder, self.getConfig("destination"), "")
+ if subfolder:
+ out = safe_join(out, fs_encode(p.folder))
+
+ if not exists(out):
+ makedirs(out)
+
+ files_ids = [(safe_join(dl, p.folder, x['name']), x['id']) for x in p.getChildren().itervalues()]
+ matched = False
+ success = True
+
+ # check as long there are unseen files
+ while files_ids:
+ new_files_ids = []
+
+ for plugin in self.plugins:
+ targets = plugin.getTargets(files_ids)
+ if targets:
+ self.logDebug("Targets for %s: %s" % (plugin.__name, targets))
+ matched = True
+ for target, fid in targets:
+ if target in processed:
+ self.logDebug(basename(target), "skipped")
+ continue
+
+ processed.append(target) # prevent extracting same file twice
+
+ self.logInfo(basename(target), _("Extract to %s") % out)
+ try:
+ klass = plugin(self, target, out, fullpath, overwrite, excludefiles, renice)
+ klass.init()
+ password = p.password.strip().splitlines()
+ new_files = self._extract(klass, fid, password, thread)
+ except Exception, e:
+ self.logError(basename(target), e)
+ success = False
+ continue
+
+ self.logDebug("Extracted", new_files)
+ self.setPermissions(new_files)
+
+ for file in new_files:
+ if not exists(file):
+ self.logDebug("New file %s does not exists" % file)
+ continue
+ if recursive and isfile(file):
+ new_files_ids.append((file, fid)) # append as new target
+
+ files_ids = new_files_ids # also check extracted files
+
+ if matched:
+ if success:
+ extracted.append(pid)
+ self.manager.dispatchEvent("package-extracted", p)
+ else:
+ failed.append(pid)
+ self.manager.dispatchEvent("package-extract_failed", p)
+ else:
+ self.logInfo(_("No files found to extract"))
+
+ return True if not failed else False
+
+
+ def _extract(self, plugin, fid, passwords, thread):
+ pyfile = self.core.files.getFile(fid)
+ deletearchive = self.getConfig("deletearchive")
+
+ pyfile.setCustomStatus(_("extracting"))
+ thread.addActive(pyfile) # keep this file until everything is done
+
+ try:
+ progress = lambda x: pyfile.setProgress(x)
+ success = False
+
+ if not plugin.checkArchive():
+ plugin.extract(progress)
+ success = True
+ else:
+ self.logInfo(basename(plugin.file), _("Password protected"))
+ self.logDebug("Passwords", passwords)
+
+ pwlist = copy(self.getPasswords())
+ # remove already supplied pws from list (only local)
+ for pw in passwords:
+ if pw in pwlist:
+ pwlist.remove(pw)
+
+ for pw in passwords + pwlist:
+ try:
+ self.logDebug("Try password", pw)
+ if plugin.checkPassword(pw):
+ plugin.extract(progress, pw)
+ self.addPassword(pw)
+ success = True
+ break
+ except WrongPassword:
+ self.logDebug("Password was wrong")
+
+ if not success:
+ raise Exception(_("Wrong password"))
+
+ if self.core.debug:
+ self.logDebug("Would delete", ", ".join(plugin.getDeleteFiles()))
+
+ if deletearchive:
+ files = plugin.getDeleteFiles()
+ self.logInfo(_("Deleting %s files") % len(files))
+ for f in files:
+ if exists(f):
+ remove(f)
+ else:
+ self.logDebug("%s does not exists" % f)
+
+ self.logInfo(basename(plugin.file), _("Extracting finished"))
+
+ extracted_files = plugin.getExtractedFiles()
+ self.manager.dispatchEvent("archive-extracted", pyfile, plugin.out, plugin.file, extracted_files)
+
+ return extracted_files
+
+ except ArchiveError, e:
+ self.logError(basename(plugin.file), _("Archive Error"), e)
+ except CRCError:
+ self.logError(basename(plugin.file), _("CRC Mismatch"))
+ except Exception, e:
+ if self.core.debug:
+ print_exc()
+ self.logError(basename(plugin.file), _("Unknown Error"), e)
+
+ self.manager.dispatchEvent("archive-extract_failed", pyfile)
+ raise Exception(_("Extract failed"))
+
+
+ @Expose
+ def getPasswords(self):
+ """ List of saved passwords """
+ return self.passwords
+
+
+ def reloadPasswords(self):
+ passwordfile = self.getConfig("passwordfile")
+
+ try:
+ passwords = []
+ with open(passwordfile, "a+") as f:
+ for pw in f.read().splitlines():
+ passwords.append(pw)
+
+ except IOError, e:
+ self.logError(e)
+
+ else:
+ self.passwords = passwords
+
+
+ @Expose
+ def addPassword(self, pw):
+ """ Adds a password to saved list"""
+ passwordfile = self.getConfig("passwordfile")
+
+ if pw in self.passwords:
+ self.passwords.remove(pw)
+
+ self.passwords.insert(0, pw)
+
+ try:
+ with open(passwordfile, "wb") as f:
+ for pw in self.passwords:
+ f.write(pw + "\n")
+ except IOError, e:
+ self.logError(e)
+
+
+ def setPermissions(self, files):
+ for f in files:
+ if not exists(f):
+ continue
+ try:
+ if self.config['permission']['change_file']:
+ if isfile(f):
+ chmod(f, int(self.config['permission']['file'], 8))
+ elif isdir(f):
+ chmod(f, int(self.config['permission']['folder'], 8))
+
+ if self.config['permission']['change_dl'] and os.name != "nt":
+ uid = getpwnam(self.config['permission']['user'])[2]
+ gid = getgrnam(self.config['permission']['group'])[2]
+ chown(f, uid, gid)
+ except Exception, e:
+ self.logWarning(_("Setting User and Group failed"), e)
diff --git a/pyload/plugin/addon/HotFolder.py b/pyload/plugin/addon/HotFolder.py
new file mode 100644
index 000000000..5eb15ab4f
--- /dev/null
+++ b/pyload/plugin/addon/HotFolder.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import time
+
+from os import listdir, makedirs
+from os.path import exists, isfile, join
+from shutil import move
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import fs_encode, safe_join
+
+
+class HotFolder(Addon):
+ __name = "HotFolder"
+ __type = "addon"
+ __version = "0.12"
+
+ __config = [("folder" , "str" , "Folder to observe" , "container"),
+ ("watch_file", "bool", "Observe link file" , False ),
+ ("keep" , "bool", "Keep added containers", True ),
+ ("file" , "str" , "Link file" , "links.txt")]
+
+ __description = """Observe folder and file for changes and add container and links"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.de")]
+
+
+ def setup(self):
+ self.interval = 10
+
+
+ def activate(self):
+ self.initPeriodical()
+
+
+ def periodical(self):
+ folder = fs_encode(self.getConfig("folder"))
+
+ try:
+ if not exists(join(folder, "finished")):
+ makedirs(join(folder, "finished"))
+
+ if self.getConfig("watch_file"):
+ with open(fs_encode(self.getConfig("file")), "a+") as f:
+ content = f.read().strip()
+
+ if content:
+ name = "%s_%s.txt" % (self.getConfig("file"), time.strftime("%H-%M-%S_%d%b%Y"))
+
+ with open(safe_join(folder, "finished", name), "wb") as f:
+ f.write(content)
+
+ self.core.api.addPackage(f.name, [f.name], 1)
+
+ for f in listdir(folder):
+ path = join(folder, f)
+
+ if not isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."):
+ continue
+
+ newpath = join(folder, "finished", f if self.getConfig("keep") else "tmp_" + f)
+ move(path, newpath)
+
+ self.logInfo(_("Added %s from HotFolder") % f)
+ self.core.api.addPackage(f, [newpath], 1)
+
+ except IOError, e:
+ self.logError(e)
diff --git a/pyload/plugin/addon/IRCInterface.py b/pyload/plugin/addon/IRCInterface.py
new file mode 100644
index 000000000..3596b72ab
--- /dev/null
+++ b/pyload/plugin/addon/IRCInterface.py
@@ -0,0 +1,431 @@
+# -*- coding: utf-8 -*-
+
+import re
+import socket
+import ssl
+import time
+
+from pycurl import FORM_FILE
+from select import select
+from threading import Thread
+from time import sleep
+from traceback import print_exc
+
+from pyload.api import PackageDoesNotExists, FileDoesNotExists
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Addon
+from pyload.utils import formatSize
+
+
+class IRCInterface(Thread, Addon):
+ __name = "IRCInterface"
+ __type = "addon"
+ __version = "0.13"
+
+ __config = [("host" , "str" , "IRC-Server Address" , "Enter your server here!"),
+ ("port" , "int" , "IRC-Server Port" , 6667 ),
+ ("ident" , "str" , "Clients ident" , "pyload-irc" ),
+ ("realname" , "str" , "Realname" , "pyload-irc" ),
+ ("ssl" , "bool", "Use SSL" , False ),
+ ("nick" , "str" , "Nickname the Client will take" , "pyLoad-IRC" ),
+ ("owner" , "str" , "Nickname the Client will accept commands from", "Enter your nick here!" ),
+ ("info_file", "bool", "Inform about every file finished" , False ),
+ ("info_pack", "bool", "Inform about every package finished" , True ),
+ ("captcha" , "bool", "Send captcha requests" , True )]
+
+ __description = """Connect to irc and let owner perform different tasks"""
+ __license = "GPLv3"
+ __authors = [("Jeix", "Jeix@hasnomail.com")]
+
+
+ def __init__(self, core, manager):
+ Thread.__init__(self)
+ Addon.__init__(self, core, manager)
+ self.setDaemon(True)
+
+
+ def activate(self):
+ self.abort = False
+ self.more = []
+ self.new_package = {}
+
+ self.start()
+
+
+ def packageFinished(self, pypack):
+ try:
+ if self.getConfig("info_pack"):
+ self.response(_("Package finished: %s") % pypack.name)
+ except Exception:
+ pass
+
+
+ def downloadFinished(self, pyfile):
+ try:
+ if self.getConfig("info_file"):
+ self.response(
+ _("Download finished: %(name)s @ %(plugin)s ") % {"name": pyfile.name, "plugin": pyfile.pluginname})
+ except Exception:
+ pass
+
+
+ def captchaTask(self, task):
+ if self.getConfig("captcha") and task.isTextual():
+ task.handler.append(self)
+ task.setWaiting(60)
+
+ page = getURL("http://www.freeimagehosting.net/upload.php",
+ post={"attached": (FORM_FILE, task.captchaFile)}, multipart=True)
+
+ url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", page).group(1)
+ self.response(_("New Captcha Request: %s") % url)
+ self.response(_("Answer with 'c %s text on the captcha'") % task.id)
+
+
+ def run(self):
+ # connect to IRC etc.
+ self.sock = socket.socket()
+ host = self.getConfig("host")
+ self.sock.connect((host, self.getConfig("port")))
+
+ if self.getConfig("ssl"):
+ self.sock = ssl.wrap_socket(self.sock, cert_reqs=ssl.CERT_NONE) #@TODO: support custom certificate
+
+ nick = self.getConfig("nick")
+ self.sock.send("NICK %s\r\n" % nick)
+ self.sock.send("USER %s %s bla :%s\r\n" % (nick, host, nick))
+ for t in self.getConfig("owner").split():
+ if t.strip().startswith("#"):
+ self.sock.send("JOIN %s\r\n" % t.strip())
+ self.logInfo(_("Connected to"), host)
+ self.logInfo(_("Switching to listening mode!"))
+ try:
+ self.main_loop()
+
+ except IRCError, ex:
+ self.sock.send("QUIT :byebye\r\n")
+ print_exc()
+ self.sock.close()
+
+
+ def main_loop(self):
+ readbuffer = ""
+ while True:
+ sleep(1)
+ fdset = select([self.sock], [], [], 0)
+ if self.sock not in fdset[0]:
+ continue
+
+ if self.abort:
+ raise IRCError("quit")
+
+ readbuffer += self.sock.recv(1024)
+ temp = readbuffer.split("\n")
+ readbuffer = temp.pop()
+
+ for line in temp:
+ line = line.rstrip()
+ first = line.split()
+
+ if first[0] == "PING":
+ self.sock.send("PONG %s\r\n" % first[1])
+
+ if first[0] == "ERROR":
+ raise IRCError(line)
+
+ msg = line.split(None, 3)
+ if len(msg) < 4:
+ continue
+
+ msg = {
+ "origin": msg[0][1:],
+ "action": msg[1],
+ "target": msg[2],
+ "text": msg[3][1:]
+ }
+
+ self.handle_events(msg)
+
+
+ def handle_events(self, msg):
+ if not msg['origin'].split("!", 1)[0] in self.getConfig("owner").split():
+ return
+
+ if msg['target'].split("!", 1)[0] != self.getConfig("nick"):
+ return
+
+ if msg['action'] != "PRIVMSG":
+ return
+
+ # HANDLE CTCP ANTI FLOOD/BOT PROTECTION
+ if msg['text'] == "\x01VERSION\x01":
+ self.logDebug("Sending CTCP VERSION")
+ self.sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
+ return
+ elif msg['text'] == "\x01TIME\x01":
+ self.logDebug("Sending CTCP TIME")
+ self.sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
+ return
+ elif msg['text'] == "\x01LAG\x01":
+ self.logDebug("Received CTCP LAG") #: don't know how to answer
+ return
+
+ trigger = "pass"
+ args = None
+
+ try:
+ temp = msg['text'].split()
+ trigger = temp[0]
+ if len(temp) > 1:
+ args = temp[1:]
+ except Exception:
+ pass
+
+ handler = getattr(self, "event_%s" % trigger, self.event_pass)
+ try:
+ res = handler(args)
+ for line in res:
+ self.response(line, msg['origin'])
+ except Exception, e:
+ self.logError(e)
+
+
+ def response(self, msg, origin=""):
+ if origin == "":
+ for t in self.getConfig("owner").split():
+ self.sock.send("PRIVMSG %s :%s\r\n" % (t.strip(), msg))
+ else:
+ self.sock.send("PRIVMSG %s :%s\r\n" % (origin.split("!", 1)[0], msg))
+
+
+ #### Events
+
+ def event_pass(self, args):
+ return []
+
+
+ def event_status(self, args):
+ downloads = self.core.api.statusDownloads()
+ if not downloads:
+ return ["INFO: There are no active downloads currently."]
+
+ temp_progress = ""
+ lines = ["ID - Name - Status - Speed - ETA - Progress"]
+ for data in downloads:
+
+ if data.status == 5:
+ temp_progress = data.format_wait
+ else:
+ temp_progress = "%d%% (%s)" % (data.percent, data.format_size)
+
+ lines.append("#%d - %s - %s - %s - %s - %s" %
+ (
+ data.fid,
+ data.name,
+ data.statusmsg,
+ "%s/s" % formatSize(data.speed),
+ "%s" % data.format_eta,
+ temp_progress
+ ))
+ return lines
+
+
+ def event_queue(self, args):
+ ps = self.core.api.getQueueData()
+
+ if not ps:
+ return ["INFO: There are no packages in queue."]
+
+ lines = []
+ for pack in ps:
+ lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
+
+ return lines
+
+
+ def event_collector(self, args):
+ ps = self.core.api.getCollectorData()
+ if not ps:
+ return ["INFO: No packages in collector!"]
+
+ lines = []
+ for pack in ps:
+ lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
+
+ return lines
+
+
+ def event_info(self, args):
+ if not args:
+ return ["ERROR: Use info like this: info <id>"]
+
+ info = None
+ try:
+ info = self.core.api.getFileData(int(args[0]))
+
+ except FileDoesNotExists:
+ return ["ERROR: Link doesn't exists."]
+
+ return ['LINK #%s: %s (%s) [%s][%s]' % (info.fid, info.name, info.format_size, info.statusmsg, info.plugin)]
+
+
+ def event_packinfo(self, args):
+ if not args:
+ return ["ERROR: Use packinfo like this: packinfo <id>"]
+
+ lines = []
+ pack = None
+ try:
+ pack = self.core.api.getPackageData(int(args[0]))
+
+ except PackageDoesNotExists:
+ return ["ERROR: Package doesn't exists."]
+
+ id = args[0]
+
+ self.more = []
+
+ lines.append('PACKAGE #%s: "%s" with %d links' % (id, pack.name, len(pack.links)))
+ for pyfile in pack.links:
+ self.more.append('LINK #%s: %s (%s) [%s][%s]' % (pyfile.fid, pyfile.name, pyfile.format_size,
+ pyfile.statusmsg, pyfile.plugin))
+
+ if len(self.more) < 6:
+ lines.extend(self.more)
+ self.more = []
+ else:
+ lines.extend(self.more[:6])
+ self.more = self.more[6:]
+ lines.append("%d more links do display." % len(self.more))
+
+ return lines
+
+
+ def event_more(self, args):
+ if not self.more:
+ return ["No more information to display."]
+
+ lines = self.more[:6]
+ self.more = self.more[6:]
+ lines.append("%d more links do display." % len(self.more))
+
+ return lines
+
+
+ def event_start(self, args):
+ self.core.api.unpauseServer()
+ return ["INFO: Starting downloads."]
+
+
+ def event_stop(self, args):
+ self.core.api.pauseServer()
+ return ["INFO: No new downloads will be started."]
+
+
+ def event_add(self, args):
+ if len(args) < 2:
+ return ['ERROR: Add links like this: "add <packagename|id> links". ',
+ "This will add the link <link> to to the package <package> / the package with id <id>!"]
+
+ pack = args[0].strip()
+ links = [x.strip() for x in args[1:]]
+
+ count_added = 0
+ count_failed = 0
+ try:
+ id = int(pack)
+ pack = self.core.api.getPackageData(id)
+ if not pack:
+ return ["ERROR: Package doesn't exists."]
+
+ #TODO add links
+
+ return ["INFO: Added %d links to Package %s [#%d]" % (len(links), pack['name'], id)]
+
+ except Exception:
+ # create new package
+ id = self.core.api.addPackage(pack, links, 1)
+ return ["INFO: Created new Package %s [#%d] with %d links." % (pack, id, len(links))]
+
+
+ def event_del(self, args):
+ if len(args) < 2:
+ return ["ERROR: Use del command like this: del -p|-l <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
+
+ if args[0] == "-p":
+ ret = self.core.api.deletePackages(map(int, args[1:]))
+ return ["INFO: Deleted %d packages!" % len(args[1:])]
+
+ elif args[0] == "-l":
+ ret = self.core.api.delLinks(map(int, args[1:]))
+ return ["INFO: Deleted %d links!" % len(args[1:])]
+
+ else:
+ return ["ERROR: Use del command like this: del <-p|-l> <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
+
+
+ def event_push(self, args):
+ if not args:
+ return ["ERROR: Push package to queue like this: push <package id>"]
+
+ id = int(args[0])
+ try:
+ info = self.core.api.getPackageInfo(id)
+ except PackageDoesNotExists:
+ return ["ERROR: Package #%d does not exist." % id]
+
+ self.core.api.pushToQueue(id)
+ return ["INFO: Pushed package #%d to queue." % id]
+
+
+ def event_pull(self, args):
+ if not args:
+ return ["ERROR: Pull package from queue like this: pull <package id>."]
+
+ id = int(args[0])
+ if not self.core.api.getPackageData(id):
+ return ["ERROR: Package #%d does not exist." % id]
+
+ self.core.api.pullFromQueue(id)
+ return ["INFO: Pulled package #%d from queue to collector." % id]
+
+
+ def event_c(self, args):
+ """ captcha answer """
+ if not args:
+ return ["ERROR: Captcha ID missing."]
+
+ task = self.core.captchaManager.getTaskByID(args[0])
+ if not task:
+ return ["ERROR: Captcha Task with ID %s does not exists." % args[0]]
+
+ task.setResult(" ".join(args[1:]))
+ return ["INFO: Result %s saved." % " ".join(args[1:])]
+
+
+ def event_help(self, args):
+ lines = ["The following commands are available:",
+ "add <package|packid> <links> [...] Adds link to package. (creates new package if it does not exist)",
+ "queue Shows all packages in the queue",
+ "collector Shows all packages in collector",
+ "del -p|-l <id> [...] Deletes all packages|links with the ids specified",
+ "info <id> Shows info of the link with id <id>",
+ "packinfo <id> Shows info of the package with id <id>",
+ "more Shows more info when the result was truncated",
+ "start Starts all downloads",
+ "stop Stops the download (but not abort active downloads)",
+ "push <id> Push package to queue",
+ "pull <id> Pull package from queue",
+ "status Show general download status",
+ "help Shows this help message"]
+ return lines
+
+
+class IRCError(Exception):
+
+ def __init__(self, value):
+ self.value = value
+
+
+ def __str__(self):
+ return repr(self.value)
diff --git a/pyload/plugin/addon/MergeFiles.py b/pyload/plugin/addon/MergeFiles.py
new file mode 100644
index 000000000..72dfd583d
--- /dev/null
+++ b/pyload/plugin/addon/MergeFiles.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import os
+import re
+
+from traceback import print_exc
+
+from pyload.plugin.Addon import Addon, threaded
+from pyload.utils import safe_join, fs_encode
+
+
+class MergeFiles(Addon):
+ __name = "MergeFiles"
+ __type = "addon"
+ __version = "0.13"
+
+ __config = [("activated", "bool", "Activated", True)]
+
+ __description = """Merges parts splitted with hjsplit"""
+ __license = "GPLv3"
+ __authors = [("and9000", "me@has-no-mail.com")]
+
+
+ BUFFER_SIZE = 4096
+
+
+ def setup(self):
+ pass
+
+
+ @threaded
+ def packageFinished(self, pack):
+ files = {}
+ fid_dict = {}
+ for fid, data in pack.getChildren().iteritems():
+ if re.search("\.\d{3}$", data['name']):
+ if data['name'][:-4] not in files:
+ files[data['name'][:-4]] = []
+ files[data['name'][:-4]].append(data['name'])
+ files[data['name'][:-4]].sort()
+ fid_dict[data['name']] = fid
+
+ download_folder = self.config['general']['download_folder']
+
+ if self.config['general']['folder_per_package']:
+ download_folder = safe_join(download_folder, pack.folder)
+
+ for name, file_list in files.iteritems():
+ self.logInfo(_("Starting merging of"), name)
+
+ final_file = open(safe_join(download_folder, name), "wb")
+ for splitted_file in file_list:
+ self.logDebug("Merging part", splitted_file)
+
+ pyfile = self.core.files.getFile(fid_dict[splitted_file])
+
+ pyfile.setStatus("processing")
+
+ try:
+ with open(os.path.join(download_folder, splitted_file), "rb") as s_file:
+ size_written = 0
+ s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file)))
+
+ while True:
+ f_buffer = s_file.read(self.BUFFER_SIZE)
+ if f_buffer:
+ final_file.write(f_buffer)
+ size_written += self.BUFFER_SIZE
+ pyfile.setProgress((size_written * 100) / s_file_size)
+ else:
+ break
+
+ self.logDebug("Finished merging part", splitted_file)
+
+ except Exception, e:
+ print_exc()
+
+ finally:
+ pyfile.setProgress(100)
+ pyfile.setStatus("finished")
+ pyfile.release()
+
+ self.logInfo(_("Finished merging of"), name)
diff --git a/pyload/plugin/addon/MultiHome.py b/pyload/plugin/addon/MultiHome.py
new file mode 100644
index 000000000..1bf78d2db
--- /dev/null
+++ b/pyload/plugin/addon/MultiHome.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+from time import time
+
+from pyload.plugin.Addon import Addon
+
+
+class MultiHome(Addon):
+ __name = "MultiHome"
+ __type = "addon"
+ __version = "0.12"
+
+ __config = [("interfaces", "str", "Interfaces", "None")]
+
+ __description = """Ip address changer"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+ def setup(self):
+ self.register = {}
+ self.interfaces = []
+ self.parseInterfaces(self.getConfig("interfaces").split(";"))
+ if not self.interfaces:
+ self.parseInterfaces([self.config['download']['interface']])
+ self.setConfig("interfaces", self.toConfig())
+
+
+ def toConfig(self):
+ return ";".join([i.adress for i in self.interfaces])
+
+
+ def parseInterfaces(self, interfaces):
+ for interface in interfaces:
+ if not interface or str(interface).lower() == "none":
+ continue
+ self.interfaces.append(Interface(interface))
+
+
+ def activate(self):
+ requestFactory = self.core.requestFactory
+ oldGetRequest = requestFactory.getRequest
+
+ def getRequest(pluginName, account=None):
+ iface = self.bestInterface(pluginName, account)
+ if iface:
+ iface.useFor(pluginName, account)
+ requestFactory.iface = lambda: iface.adress
+ self.logDebug("Using address", iface.adress)
+ return oldGetRequest(pluginName, account)
+
+ requestFactory.getRequest = getRequest
+
+
+ def bestInterface(self, pluginName, account):
+ best = None
+ for interface in self.interfaces:
+ if not best or interface.lastPluginAccess(pluginName, account) < best.lastPluginAccess(pluginName, account):
+ best = interface
+ return best
+
+
+class Interface(object):
+
+ def __init__(self, adress):
+ self.adress = adress
+ self.history = {}
+
+
+ def lastPluginAccess(self, pluginName, account):
+ if (pluginName, account) in self.history:
+ return self.history[(pluginName, account)]
+ return 0
+
+
+ def useFor(self, pluginName, account):
+ self.history[(pluginName, account)] = time()
+
+
+ def __repr__(self):
+ return "<Interface - %s>" % self.adress
diff --git a/pyload/plugin/addon/RestartFailed.py b/pyload/plugin/addon/RestartFailed.py
new file mode 100644
index 000000000..5611cc791
--- /dev/null
+++ b/pyload/plugin/addon/RestartFailed.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Addon import Addon
+
+
+class RestartFailed(Addon):
+ __name = "RestartFailed"
+ __type = "addon"
+ __version = "1.57"
+
+ __config = [("activated", "bool", "Activated" , True),
+ ("interval" , "int" , "Check interval in minutes", 90 )]
+
+ __description = """Periodically restart all failed downloads in queue"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+ MIN_INTERVAL = 15 * 60 #: 15m minimum check interval (value is in seconds)
+
+
+ def pluginConfigChanged(self, plugin, name, value):
+ if name == "interval":
+ interval = value * 60
+ if self.MIN_INTERVAL <= interval != self.interval:
+ self.core.scheduler.removeJob(self.cb)
+ self.interval = interval
+ self.initPeriodical()
+ else:
+ self.logDebug("Invalid interval value, kept current")
+
+
+ def periodical(self):
+ self.logDebug(_("Restart failed downloads"))
+ self.core.api.restartFailed()
+
+
+ def setup(self):
+ self.interval = 0
+
+
+ def activate(self):
+ self.pluginConfigChanged(self.__name, "interval", self.getConfig("interval"))
diff --git a/pyload/plugin/addon/RestartSlow.py b/pyload/plugin/addon/RestartSlow.py
new file mode 100644
index 000000000..8005abaea
--- /dev/null
+++ b/pyload/plugin/addon/RestartSlow.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+import pycurl
+
+from pyload.plugin.Addon import Addon
+
+
+class RestartSlow(Addon):
+ __name = "RestartSlow"
+ __type = "addon"
+ __version = "0.02"
+
+ __config = [("free_limit" , "int" , "Transfer speed threshold in kilobytes" , 100 ),
+ ("free_time" , "int" , "Sample interval in minutes" , 5 ),
+ ("premium_limit", "int" , "Transfer speed threshold for premium download in kilobytes", 300 ),
+ ("premium_time" , "int" , "Sample interval for premium download in minutes" , 2 ),
+ ("safe_mode" , "bool", "Don't restart if download is not resumable" , True)]
+
+ __description = """Restart slow downloads"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ event_map = {'download-start': "downloadStarts"}
+
+
+ def setup(self):
+ self.info = {'chunk': {}}
+
+
+ def periodical(self):
+ if not self.pyfile.req.dl:
+ return
+
+ if self.getConfig("safe_mode") and not self.pyfile.plugin.resumeDownload:
+ time = 30
+ limit = 5
+ else:
+ type = "premium" if self.pyfile.plugin.premium else "free"
+ time = max(30, self.getConfig("%s_time" % type) * 60)
+ limit = max(5, self.getConfig("%s_limit" % type) * 1024)
+
+ chunks = [chunk for chunk in self.pyfile.req.dl.chunks \
+ if chunk.id not in self.info['chunk'] or self.info['chunk'][chunk.id] not is (time, limit)]
+
+ for chunk in chunks:
+ chunk.c.setopt(pycurl.LOW_SPEED_TIME , time)
+ chunk.c.setopt(pycurl.LOW_SPEED_LIMIT, limit)
+
+ self.info['chunk'][chunk.id] = (time, limit)
+
+
+ def downloadStarts(self, pyfile, url, filename):
+ if self.cb or (self.getConfig("safe_mode") and not pyfile.plugin.resumeDownload):
+ return
+
+ self.initPeriodical()
diff --git a/pyload/plugin/addon/SkipRev.py b/pyload/plugin/addon/SkipRev.py
new file mode 100644
index 000000000..6ff590792
--- /dev/null
+++ b/pyload/plugin/addon/SkipRev.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+
+from urllib import unquote
+from urlparse import urlparse
+
+from pyload.plugin.Addon import Addon
+from pyload.plugin.Plugin import SkipDownload
+
+
+class SkipRev(Adoon):
+ __name = "SkipRev"
+ __type = "addon"
+ __version = "0.15"
+
+ __config = [("tokeep", "int", "Number of rev files to keep for package (-1 to auto)", -1)]
+
+ __description = """Skip files ending with extension rev"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def _setup(self):
+ super(self.pyfile.plugin, self).setup()
+ if self.pyfile.hasStatus("skipped"):
+ raise SkipDownload(self.pyfile.getStatusName() or self.pyfile.pluginname)
+
+
+ def pyname(self, pyfile):
+ url = pyfile.url
+ plugin = pyfile.plugin
+
+ if hasattr(plugin, "info") and 'name' in plugin.info and plugin.info['name']:
+ name = plugin.info['name']
+
+ elif hasattr(plugin, "parseInfos"):
+ name = next(plugin.parseInfos([url]))['name']
+
+ elif hasattr(plugin, "getInfo"): #@NOTE: if parseInfos was not found, getInfo should be missing too
+ name = plugin.getInfo(url)['name']
+
+ else:
+ self.logWarning("Unable to grab file name")
+ name = urlparse(unquote(url)).path.split('/')[-1])
+
+ return name
+
+
+ def downloadPreparing(self, pyfile):
+ if pyfile.getStatusName() is "unskipped" or not pyname(pyfile).endswith(".rev"):
+ return
+
+ tokeep = self.getConfig("tokeep")
+
+ if tokeep:
+ saved = [True for link in pyfile.package().getChildren() \
+ if link.name.endswith(".rev") and (link.hasStatus("finished") or link.hasStatus("downloading"))].count(True)
+
+ if not saved or saved < tokeep: #: keep one rev at least in auto mode
+ return
+
+ pyfile.setCustomStatus("SkipRev", "skipped")
+ pyfile.plugin.setup = _setup #: work-around: inject status checker inside the preprocessing routine of the plugin
+
+
+ def downloadFailed(self, pyfile):
+ tokeep = self.getConfig("tokeep")
+
+ if not tokeep:
+ return
+
+ for link in pyfile.package().getChildren():
+ if link.hasStatus("skipped") and link.name.endswith(".rev"):
+ if tokeep > -1 or pyfile.name.endswith(".rev"):
+ link.setStatus("queued")
+ else:
+ link.setCustomStatus("unskipped", "queued")
+ return
diff --git a/pyload/plugin/addon/UnSkipOnFail.py b/pyload/plugin/addon/UnSkipOnFail.py
new file mode 100644
index 000000000..b640c7daa
--- /dev/null
+++ b/pyload/plugin/addon/UnSkipOnFail.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+
+from os.path import basename
+
+from pyload.datatype.File import PyFile
+from pyload.plugin.Addon import Addon
+from pyload.utils import fs_encode
+
+
+class UnSkipOnFail(Addon):
+ __name = "UnSkipOnFail"
+ __type = "addon"
+ __version = "0.02"
+
+ __config = [("activated", "bool", "Activated", True)]
+
+ __description = """When a download fails, restart skipped duplicates"""
+ __license = "GPLv3"
+ __authors = [("hagg", "")]
+
+
+ def downloadFailed(self, pyfile):
+ pyfile_name = basename(pyfile.name)
+ pid = pyfile.package().id
+ msg = _('look for skipped duplicates for %s (pid:%s)')
+ self.logInfo(msg % (pyfile_name, pid))
+ dups = self.findDuplicates(pyfile)
+ for link in dups:
+ # check if link is "skipped"(=4)
+ if link.status == 4:
+ lpid = link.packageID
+ self.logInfo(_('restart "%s" (pid:%s)') % (pyfile_name, lpid))
+ self.setLinkStatus(link, "queued")
+
+
+ def findDuplicates(self, pyfile):
+ """ Search all packages for duplicate links to "pyfile".
+ Duplicates are links that would overwrite "pyfile".
+ To test on duplicity the package-folder and link-name
+ of twolinks are compared (basename(link.name)).
+ So this method returns a list of all links with equal
+ package-folders and filenames as "pyfile", but except
+ the data for "pyfile" iotselöf.
+ It does MOT check the link's status.
+ """
+ dups = []
+ pyfile_name = fs_encode(basename(pyfile.name))
+ # get packages (w/o files, as most file data is useless here)
+ queue = self.core.api.getQueue()
+ for package in queue:
+ # check if package-folder equals pyfile's package folder
+ if fs_encode(package.folder) == fs_encode(pyfile.package().folder):
+ # now get packaged data w/ files/links
+ pdata = self.core.api.getPackageData(package.pid)
+ if pdata.links:
+ for link in pdata.links:
+ link_name = fs_encode(basename(link.name))
+ # check if link name collides with pdata's name
+ if link_name == pyfile_name:
+ # at last check if it is not pyfile itself
+ if link.fid != pyfile.id:
+ dups.append(link)
+ return dups
+
+
+ def setLinkStatus(self, link, new_status):
+ """ Change status of "link" to "new_status".
+ "link" has to be a valid FileData object,
+ "new_status" has to be a valid status name
+ (i.e. "queued" for this Plugin)
+ It creates a temporary PyFile object using
+ "link" data, changes its status, and tells
+ the core.files-manager to save its data.
+ """
+ pyfile = PyFile(self.core.files,
+ link.fid,
+ link.url,
+ link.name,
+ link.size,
+ link.status,
+ link.error,
+ link.plugin,
+ link.packageID,
+ link.order)
+ pyfile.setStatus(new_status)
+ self.core.files.save()
+ pyfile.release()
diff --git a/pyload/plugin/addon/UpdateManager.py b/pyload/plugin/addon/UpdateManager.py
new file mode 100644
index 000000000..c5c34d1c8
--- /dev/null
+++ b/pyload/plugin/addon/UpdateManager.py
@@ -0,0 +1,305 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+import sys
+
+from operator import itemgetter
+from os import path, remove, stat
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Expose, Addon, threaded
+from pyload.utils import safe_join
+
+
+class UpdateManager(Addon):
+ __name = "UpdateManager"
+ __type = "addon"
+ __version = "0.42"
+
+ __config = [("activated" , "bool" , "Activated" , True ),
+ ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
+ ("interval" , "int" , "Check interval in hours" , 8 ),
+ ("autorestart" , "bool" , "Automatically restart pyLoad when required" , True ),
+ ("reloadplugins", "bool" , "Monitor plugins for code changes in debug mode", True ),
+ ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , True )]
+
+ __description = """Check for updates"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+ SERVER_URL = "http://updatemanager.pyload.org"
+ VERSION = re.compile(r'__version.*=.*("|\')([\d.]+)')
+ MIN_INTERVAL = 3 * 60 * 60 #: 3h minimum check interval (value is in seconds)
+
+
+ def pluginConfigChanged(self, plugin, name, value):
+ if name == "interval":
+ interval = value * 60 * 60
+ if self.MIN_INTERVAL <= interval != self.interval:
+ self.core.scheduler.removeJob(self.cb)
+ self.interval = interval
+ self.initPeriodical()
+ else:
+ self.logDebug("Invalid interval value, kept current")
+
+ elif name == "reloadplugins":
+ if self.cb2:
+ self.core.scheduler.removeJob(self.cb2)
+ if value is True and self.core.debug:
+ self.periodical2()
+
+
+ def activate(self):
+ self.pluginConfigChanged(self.__name, "interval", self.getConfig("interval"))
+ x = lambda: self.pluginConfigChanged(self.__name, "reloadplugins", self.getConfig("reloadplugins"))
+ self.core.scheduler.addJob(10, x, threaded=False)
+
+
+ def deactivate(self):
+ self.pluginConfigChanged(self.__name, "reloadplugins", False)
+
+
+ def setup(self):
+ self.cb2 = None
+ self.interval = 0
+ self.updating = False
+ self.info = {'pyload': False, 'version': None, 'plugins': False}
+ self.mtimes = {} #: store modification time for each plugin
+
+
+ def periodical2(self):
+ if not self.updating:
+ self.autoreloadPlugins()
+
+ self.cb2 = self.core.scheduler.addJob(4, self.periodical2, threaded=False)
+
+
+ @Expose
+ def autoreloadPlugins(self):
+ """ reload and reindex all modified plugins """
+ modules = filter(
+ lambda m: m and (m.__name.startswith("pyload.plugin.") or
+ m.__name.startswith("userplugins.")) and
+ m.__name.count(".") >= 2, sys.modules.itervalues()
+ )
+
+ reloads = []
+
+ for m in modules:
+ root, type, name = m.__name.rsplit(".", 2)
+ id = (type, name)
+ if type in self.core.pluginManager.plugins:
+ f = m.__file__.replace(".pyc", ".py")
+ if not path.isfile(f):
+ continue
+
+ mtime = stat(f).st_mtime
+
+ if id not in self.mtimes:
+ self.mtimes[id] = mtime
+ elif self.mtimes[id] < mtime:
+ reloads.append(id)
+ self.mtimes[id] = mtime
+
+ return True if self.core.pluginManager.reloadPlugins(reloads) else False
+
+
+ def periodical(self):
+ if self.info['pyload'] or self.getConfig("nodebugupdate") and self.core.debug:
+ return
+
+ self.updateThread()
+
+
+ def server_request(self):
+ try:
+ return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines()
+ except Exception:
+ self.logWarning(_("Unable to contact server to get updates"))
+
+
+ @threaded
+ def updateThread(self):
+ self.updating = True
+
+ status = self.update(onlyplugin=self.getConfig("mode") == "plugins only")
+
+ if status is 2 and self.getConfig("autorestart"):
+ self.core.api.restart()
+ else:
+ self.updating = False
+
+
+ @Expose
+ def updatePlugins(self):
+ """ simple wrapper for calling plugin update quickly """
+ return self.update(onlyplugin=True)
+
+
+ @Expose
+ def update(self, onlyplugin=False):
+ """ check for updates """
+ data = self.server_request()
+
+ if not data:
+ exitcode = 0
+
+ elif data[0] == "None":
+ self.logInfo(_("No new pyLoad version available"))
+ updates = data[1:]
+ exitcode = self._updatePlugins(updates)
+
+ elif onlyplugin:
+ exitcode = 0
+
+ else:
+ newversion = data[0]
+ self.logInfo(_("*** New pyLoad Version %s available ***") % newversion)
+ self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***"))
+ exitcode = 3
+ self.info['pyload'] = True
+ self.info['version'] = newversion
+
+ return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required; 3 = No plugins updated, new pyLoad version available
+
+
+ def _updatePlugins(self, updates):
+ """ check for plugin updates """
+
+ if self.info['plugins']:
+ return False #: plugins were already updated
+
+ exitcode = 0
+ updated = []
+
+ url = updates[0]
+ schema = updates[1].split('|')
+
+ if "BLACKLIST" in updates:
+ blacklist = updates[updates.index('BLACKLIST') + 1:]
+ updates = updates[2:updates.index('BLACKLIST')]
+ else:
+ blacklist = None
+ updates = updates[2:]
+
+ upgradable = [dict(zip(schema, x.split('|'))) for x in updates]
+ blacklisted = [(x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]) for x in blacklist] if blacklist else []
+
+ if blacklist:
+ # Protect internal plugins against removing
+ for i, t, n in enumerate(blacklisted):
+ if t == "internal":
+ blacklisted.pop(i)
+ continue
+
+ for idx, plugin in enumerate(upgradable):
+ if n == plugin['name'] and t == plugin['type']:
+ upgradable.pop(idx)
+ break
+
+ for t, n in self.removePlugins(sorted(blacklisted)):
+ self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % {
+ 'type': t,
+ 'name': n,
+ })
+
+ for plugin in sorted(upgradable, key=itemgetter("type", "name")):
+ filename = plugin['name']
+ type = plugin['type']
+ version = plugin['version']
+
+ if filename.endswith(".pyc"):
+ name = filename[:filename.find("_")]
+ else:
+ name = filename.replace(".py", "")
+
+ plugins = getattr(self.core.pluginManager, "%sPlugins" % type)
+
+ oldver = float(plugins[name]['version']) if name in plugins else None
+ newver = float(version)
+
+ if not oldver:
+ msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)"
+ elif newver > oldver:
+ msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)"
+ else:
+ continue
+
+ self.logInfo(_(msg) % {'type' : type,
+ 'name' : name,
+ 'oldver': oldver,
+ 'newver': newver})
+ try:
+ content = getURL(url % plugin)
+ m = self.VERSION.search(content)
+
+ if m and m.group(2) == version:
+ with open(safe_join("userplugins", prefix, filename), "wb") as f:
+ f.write(content)
+
+ updated.append((prefix, name))
+ else:
+ raise Exception, _("Version mismatch")
+
+ except Exception, e:
+ self.logError(_("Error updating plugin: %s") % filename, str(e))
+
+ if updated:
+ reloaded = self.core.pluginManager.reloadPlugins(updated)
+ if reloaded:
+ self.logInfo(_("Plugins updated and reloaded"))
+ exitcode = 1
+ else:
+ self.logInfo(_("*** Plugins have been updated, but need a pyLoad restart to be reloaded ***"))
+ self.info['plugins'] = True
+ exitcode = 2
+ else:
+ self.logInfo(_("No plugin updates available"))
+
+ return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required
+
+
+ @Expose
+ def removePlugins(self, type_plugins):
+ """ delete plugins from disk """
+
+ if not type_plugins:
+ return
+
+ self.logDebug("Requested deletion of plugins: %s" % type_plugins)
+
+ removed = []
+
+ for type, name in type_plugins:
+ err = False
+ file = name + ".py"
+
+ for root in ("userplugins", path.join(pypath, "pyload", "plugins")):
+
+ filename = safe_join(root, type, file)
+ try:
+ remove(filename)
+ except Exception, e:
+ self.logDebug("Error deleting: %s" % path.basename(filename), e)
+ err = True
+
+ filename += "c"
+ if path.isfile(filename):
+ try:
+ if type == "addon":
+ self.manager.deactivateAddon(name)
+ remove(filename)
+ except Exception, e:
+ self.logDebug("Error deleting: %s" % path.basename(filename), e)
+ err = True
+
+ if not err:
+ id = (type, name)
+ removed.append(id)
+
+ return removed #: return a list of the plugins successfully removed
diff --git a/pyload/plugin/addon/WindowsPhoneToastNotify.py b/pyload/plugin/addon/WindowsPhoneToastNotify.py
new file mode 100644
index 000000000..fadc17d04
--- /dev/null
+++ b/pyload/plugin/addon/WindowsPhoneToastNotify.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+import httplib
+import time
+
+from pyload.plugin.Addon import Addon
+
+
+class WindowsPhoneToastNotify(Addon):
+ __name = "WindowsPhoneToastNotify"
+ __type = "addon"
+ __version = "0.03"
+
+ __config = [("force" , "bool", "Force even if client is connected" , False),
+ ("pushId" , "str" , "pushId" , "" ),
+ ("pushUrl" , "str" , "pushUrl" , "" ),
+ ("pushTimeout", "int" , "Timeout between notifications in seconds", 0 )]
+
+ __description = """Send push notifications to Windows Phone"""
+ __license = "GPLv3"
+ __authors = [("Andy Voigt", "phone-support@hotmail.de")]
+
+
+ def getXmlData(self):
+ myxml = ("<?xml version='1.0' encoding='utf-8'?> <wp:Notification xmlns:wp='WPNotification'> "
+ "<wp:Toast> <wp:Text1>Pyload Mobile</wp:Text1> <wp:Text2>Captcha waiting!</wp:Text2> "
+ "</wp:Toast> </wp:Notification>")
+ return myxml
+
+
+ def doRequest(self):
+ URL = self.getConfig("pushUrl")
+ request = self.getXmlData()
+ webservice = httplib.HTTP(URL)
+ webservice.putrequest("POST", self.getConfig("pushId"))
+ webservice.putheader("Host", URL)
+ webservice.putheader("Content-type", "text/xml")
+ webservice.putheader("X-NotificationClass", "2")
+ webservice.putheader("X-WindowsPhone-Target", "toast")
+ webservice.putheader("Content-length", "%d" % len(request))
+ webservice.endheaders()
+ webservice.send(request)
+ webservice.close()
+ self.setStorage("LAST_NOTIFY", time.time())
+
+
+ def captchaTask(self, task):
+ if not self.getConfig("pushId") or not self.getConfig("pushUrl"):
+ return False
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return False
+
+ if (time.time() - float(self.getStorage("LAST_NOTIFY", 0))) < self.getConf("pushTimeout"):
+ return False
+
+ self.doRequest()
diff --git a/pyload/plugin/addon/XMPPInterface.py b/pyload/plugin/addon/XMPPInterface.py
new file mode 100644
index 000000000..8baffe284
--- /dev/null
+++ b/pyload/plugin/addon/XMPPInterface.py
@@ -0,0 +1,252 @@
+# -*- coding: utf-8 -*-
+
+from pyxmpp import streamtls
+from pyxmpp.all import JID, Message
+from pyxmpp.interface import implements
+from pyxmpp.interfaces import *
+from pyxmpp.jabber.client import JabberClient
+
+from pyload.plugin.addon.IRCInterface import IRCInterface
+
+
+class XMPPInterface(IRCInterface, JabberClient):
+ __name = "XMPPInterface"
+ __type = "addon"
+ __version = "0.11"
+
+ __config = [("jid" , "str" , "Jabber ID" , "user@exmaple-jabber-server.org" ),
+ ("pw" , "str" , "Password" , "" ),
+ ("tls" , "bool", "Use TLS" , False ),
+ ("owners" , "str" , "List of JIDs accepting commands from", "me@icq-gateway.org;some@msn-gateway.org"),
+ ("info_file", "bool", "Inform about every file finished" , False ),
+ ("info_pack", "bool", "Inform about every package finished" , True ),
+ ("captcha" , "bool", "Send captcha requests" , True )]
+
+ __description = """Connect to jabber and let owner perform different tasks"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ implements(IMessageHandlersProvider)
+
+
+ def __init__(self, core, manager):
+ IRCInterface.__init__(self, core, manager)
+
+ self.jid = JID(self.getConfig("jid"))
+ password = self.getConfig("pw")
+
+ # if bare JID is provided add a resource -- it is required
+ if not self.jid.resource:
+ self.jid = JID(self.jid.node, self.jid.domain, "pyLoad")
+
+ if self.getConfig("tls"):
+ tls_settings = streamtls.TLSSettings(require=True, verify_peer=False)
+ auth = ("sasl:PLAIN", "sasl:DIGEST-MD5")
+ else:
+ tls_settings = None
+ auth = ("sasl:DIGEST-MD5", "digest")
+
+ # setup client with provided connection information
+ # and identity data
+ JabberClient.__init__(self, self.jid, password,
+ disco_name="pyLoad XMPP Client", disco_type="bot",
+ tls_settings=tls_settings, auth_methods=auth)
+
+ self.interface_providers = [
+ VersionHandler(self),
+ self,
+ ]
+
+
+ def activate(self):
+ self.new_package = {}
+
+ self.start()
+
+
+ def packageFinished(self, pypack):
+ try:
+ if self.getConfig("info_pack"):
+ self.announce(_("Package finished: %s") % pypack.name)
+ except Exception:
+ pass
+
+
+ def downloadFinished(self, pyfile):
+ try:
+ if self.getConfig("info_file"):
+ self.announce(
+ _("Download finished: %(name)s @ %(plugin)s") % {"name": pyfile.name, "plugin": pyfile.pluginname})
+ except Exception:
+ pass
+
+
+ def run(self):
+ # connect to IRC etc.
+ self.connect()
+ try:
+ self.loop()
+ except Exception, ex:
+ self.logError(ex)
+
+
+ def stream_state_changed(self, state, arg):
+ """This one is called when the state of stream connecting the component
+ to a server changes. This will usually be used to let the user
+ know what is going on."""
+ self.logDebug("*** State changed: %s %r ***" % (state, arg))
+
+
+ def disconnected(self):
+ self.logDebug("Client was disconnected")
+
+
+ def stream_closed(self, stream):
+ self.logDebug("Stream was closed", stream)
+
+
+ def stream_error(self, err):
+ self.logDebug("Stream Error", err)
+
+
+ def get_message_handlers(self):
+ """Return list of (message_type, message_handler) tuples.
+
+ The handlers returned will be called when matching message is received
+ in a client session."""
+ return [("normal", self.message)]
+
+
+ def message(self, stanza):
+ """Message handler for the component."""
+ subject = stanza.get_subject()
+ body = stanza.get_body()
+ t = stanza.get_type()
+ self.logDebug("Message from %s received." % unicode(stanza.get_from()))
+ self.logDebug("Body: %s Subject: %s Type: %s" % (body, subject, t))
+
+ if t == "headline":
+ # 'headline' messages should never be replied to
+ return True
+ if subject:
+ subject = u"Re: " + subject
+
+ to_jid = stanza.get_from()
+ from_jid = stanza.get_to()
+
+ #j = JID()
+ to_name = to_jid.as_utf8()
+ from_name = from_jid.as_utf8()
+
+ names = self.getConfig("owners").split(";")
+
+ if to_name in names or to_jid.node + "@" + to_jid.domain in names:
+ messages = []
+
+ trigger = "pass"
+ args = None
+
+ try:
+ temp = body.split()
+ trigger = temp[0]
+ if len(temp) > 1:
+ args = temp[1:]
+ except Exception:
+ pass
+
+ handler = getattr(self, "event_%s" % trigger, self.event_pass)
+ try:
+ res = handler(args)
+ for line in res:
+ m = Message(
+ to_jid=to_jid,
+ from_jid=from_jid,
+ stanza_type=stanza.get_type(),
+ subject=subject,
+ body=line)
+
+ messages.append(m)
+ except Exception, e:
+ self.logError(e)
+
+ return messages
+
+ else:
+ return True
+
+
+ def response(self, msg, origin=""):
+ return self.announce(msg)
+
+
+ def announce(self, message):
+ """ send message to all owners"""
+ for user in self.getConfig("owners").split(";"):
+ self.logDebug("Send message to", user)
+
+ to_jid = JID(user)
+
+ m = Message(from_jid=self.jid,
+ to_jid=to_jid,
+ stanza_type="chat",
+ body=message)
+
+ stream = self.get_stream()
+ if not stream:
+ self.connect()
+ stream = self.get_stream()
+
+ stream.send(m)
+
+
+ def beforeReconnecting(self, ip):
+ self.disconnect()
+
+
+ def afterReconnecting(self, ip):
+ self.connect()
+
+
+class VersionHandler(object):
+ """Provides handler for a version query.
+
+ This class will answer version query and announce 'jabber:iq:version' namespace
+ in the client's disco#info results."""
+
+ implements(IIqHandlersProvider, IFeaturesProvider)
+
+
+ def __init__(self, client):
+ """Just remember who created this."""
+ self.client = client
+
+
+ def get_features(self):
+ """Return namespace which should the client include in its reply to a
+ disco#info query."""
+ return ["jabber:iq:version"]
+
+
+ def get_iq_get_handlers(self):
+ """Return list of tuples (element_name, namespace, handler) describing
+ handlers of <iq type='get'/> stanzas"""
+ return [("query", "jabber:iq:version", self.get_version)]
+
+
+ def get_iq_set_handlers(self):
+ """Return empty list, as this class provides no <iq type='set'/> stanza handler."""
+ return []
+
+
+ def get_version(self, iq):
+ """Handler for jabber:iq:version queries.
+
+ jabber:iq:version queries are not supported directly by PyXMPP, so the
+ XML node is accessed directly through the libxml2 API. This should be
+ used very carefully!"""
+ iq = iq.make_result_response()
+ q = iq.new_query("jabber:iq:version")
+ q.newTextChild(q.ns(), "name", "Echo component")
+ q.newTextChild(q.ns(), "version", "1.0")
+ return iq
diff --git a/pyload/plugins/addon/__init__.py b/pyload/plugin/addon/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/addon/__init__.py
+++ b/pyload/plugin/addon/__init__.py
diff --git a/pyload/plugin/captcha/AdYouLike.py b/pyload/plugin/captcha/AdYouLike.py
new file mode 100644
index 000000000..9c32d0569
--- /dev/null
+++ b/pyload/plugin/captcha/AdYouLike.py
@@ -0,0 +1,107 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Captcha import Captcha
+from pyload.utils import json_loads
+
+
+class AdYouLike(Captcha):
+ __name = "AdYouLike"
+ __type = "captcha"
+ __version = "0.02"
+
+ __description = """AdYouLike captcha service plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ AYL_PATTERN = r'Adyoulike\.create\s*\((.+?)\)'
+ CALLBACK_PATTERN = r'(Adyoulike\.g\._jsonp_\d+)'
+
+
+ def detect_key(self, html=None):
+ if not html:
+ if hasattr(self.plugin, "html") and self.plugin.html:
+ html = self.plugin.html
+ else:
+ errmsg = _("AdYouLike html not found")
+ self.plugin.fail(errmsg)
+ raise TypeError(errmsg)
+
+ m = re.search(self.AYL_PATTERN, html)
+ n = re.search(self.CALLBACK_PATTERN, html)
+ if m and n:
+ self.key = (m.group(1).strip(), n.group(1).strip())
+ self.plugin.logDebug("AdYouLike ayl|callback: %s | %s" % self.key)
+ return self.key #: key is the tuple(ayl, callback)
+ else:
+ self.plugin.logDebug("AdYouLike ayl or callback not found")
+ return None
+
+
+ def challenge(self, key=None):
+ if not key:
+ if self.detect_key():
+ key = self.key
+ else:
+ errmsg = _("AdYouLike key not found")
+ self.plugin.fail(errmsg)
+ raise TypeError(errmsg)
+
+ ayl, callback = key
+
+ # {"adyoulike":{"key":"P~zQ~O0zV0WTiAzC-iw0navWQpCLoYEP"},
+ # "all":{"element_id":"ayl_private_cap_92300","lang":"fr","env":"prod"}}
+ ayl = json_loads(ayl)
+
+ html = self.plugin.req.load("http://api-ayl.appspot.com/challenge",
+ get={'key' : ayl['adyoulike']['key'],
+ 'env' : ayl['all']['env'],
+ 'callback': callback})
+ try:
+ challenge = json_loads(re.search(callback + r'\s*\((.+?)\)', html).group(1))
+ except Exception:
+ errmsg = _("AdYouLike challenge pattern not found")
+ self.plugin.error(errmsg)
+ raise ValueError(errmsg)
+
+ self.plugin.logDebug("AdYouLike challenge: %s" % challenge)
+
+ return self.result(ayl, challenge)
+
+
+ def result(self, server, challenge):
+ # Adyoulike.g._jsonp_5579316662423138
+ # ({"translations":{"fr":{"instructions_visual":"Recopiez « Soonnight » ci-dessous :"}},
+ # "site_under":true,"clickable":true,"pixels":{"VIDEO_050":[],"DISPLAY":[],"VIDEO_000":[],"VIDEO_100":[],
+ # "VIDEO_025":[],"VIDEO_075":[]},"medium_type":"image/adyoulike",
+ # "iframes":{"big":"<iframe src=\"http://www.soonnight.com/campagn.html\" scrolling=\"no\"
+ # height=\"250\" width=\"300\" frameborder=\"0\"></iframe>"},"shares":{},"id":256,
+ # "token":"e6QuI4aRSnbIZJg02IsV6cp4JQ9~MjA1","formats":{"small":{"y":300,"x":0,"w":300,"h":60},
+ # "big":{"y":0,"x":0,"w":300,"h":250},"hover":{"y":440,"x":0,"w":300,"h":60}},
+ # "tid":"SqwuAdxT1EZoi4B5q0T63LN2AkiCJBg5"})
+
+ if isinstance(server, basestring):
+ server = json_loads(server)
+
+ if isinstance(challenge, basestring):
+ challenge = json_loads(challenge)
+
+ try:
+ instructions_visual = challenge['translations'][server['all']['lang']]['instructions_visual']
+ result = re.search(u'«(.+?)»', instructions_visual).group(1).strip()
+ except Exception:
+ errmsg = _("AdYouLike result not found")
+ self.plugin.error(errmsg)
+ raise ValueError(errmsg)
+
+ result = {'_ayl_captcha_engine' : "adyoulike",
+ '_ayl_env' : server['all']['env'],
+ '_ayl_tid' : challenge['tid'],
+ '_ayl_token_challenge': challenge['token'],
+ '_ayl_response' : response}
+
+ self.plugin.logDebug("AdYouLike result: %s" % result)
+
+ return result
diff --git a/pyload/plugin/captcha/AdsCaptcha.py b/pyload/plugin/captcha/AdsCaptcha.py
new file mode 100644
index 000000000..f879151ff
--- /dev/null
+++ b/pyload/plugin/captcha/AdsCaptcha.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import random
+
+from pyload.plugin.Captcha import Captcha
+
+
+class AdsCaptcha(Captcha):
+ __name = "AdsCaptcha"
+ __type = "captcha"
+ __version = "0.06"
+
+ __description = """AdsCaptcha captcha service plugin"""
+ __license = "GPLv3"
+ __authors = [("pyLoad Team", "admin@pyload.org")]
+
+
+ CAPTCHAID_PATTERN = r'api\.adscaptcha\.com/Get\.aspx\?[^"\']*CaptchaId=(\d+)'
+ PUBLICKEY_PATTERN = r'api\.adscaptcha\.com/Get\.aspx\?[^"\']*PublicKey=([\w-]+)'
+
+
+ def detect_key(self, html=None):
+ if not html:
+ if hasattr(self.plugin, "html") and self.plugin.html:
+ html = self.plugin.html
+ else:
+ errmsg = _("AdsCaptcha html not found")
+ self.plugin.fail(errmsg)
+ raise TypeError(errmsg)
+
+ m = re.search(self.PUBLICKEY_PATTERN, html)
+ n = re.search(self.CAPTCHAID_PATTERN, html)
+ if m and n:
+ self.key = (m.group(1).strip(), n.group(1).strip()) #: key is the tuple(PublicKey, CaptchaId)
+ self.plugin.logDebug("AdsCaptcha key|id: %s | %s" % self.key)
+ return self.key
+ else:
+ self.plugin.logDebug("AdsCaptcha key or id not found")
+ return None
+
+
+ def challenge(self, key=None):
+ if not key:
+ if self.detect_key():
+ key = self.key
+ else:
+ errmsg = _("AdsCaptcha key not found")
+ self.plugin.fail(errmsg)
+ raise TypeError(errmsg)
+
+ PublicKey, CaptchaId = key
+
+ html = self.plugin.req.load("http://api.adscaptcha.com/Get.aspx", get={'CaptchaId': CaptchaId, 'PublicKey': PublicKey})
+ try:
+ challenge = re.search("challenge: '(.+?)',", html).group(1)
+ server = re.search("server: '(.+?)',", html).group(1)
+ except Exception:
+ errmsg = _("AdsCaptcha challenge pattern not found")
+ self.plugin.error(errmsg)
+ raise ValueError(errmsg)
+
+ self.plugin.logDebug("AdsCaptcha challenge: %s" % challenge)
+
+ return challenge, self.result(server, challenge)
+
+
+ def result(self, server, challenge):
+ result = self.plugin.decryptCaptcha("%sChallenge.aspx" % server,
+ get={'cid': challenge, 'dummy': random()},
+ cookies=True,
+ imgtype="jpg")
+
+ self.plugin.logDebug("AdsCaptcha result: %s" % result)
+
+ return result
diff --git a/pyload/plugin/captcha/ReCaptcha.py b/pyload/plugin/captcha/ReCaptcha.py
new file mode 100644
index 000000000..076a30214
--- /dev/null
+++ b/pyload/plugin/captcha/ReCaptcha.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Captcha import Captcha
+
+
+class ReCaptcha(Captcha):
+ __name = "ReCaptcha"
+ __type = "captcha"
+ __version = "0.08"
+
+ __description = """ReCaptcha captcha service plugin"""
+ __license = "GPLv3"
+ __authors = [("pyLoad Team", "admin@pyload.org")]
+
+
+ KEY_PATTERN = r'recaptcha(?:/api|\.net)/(?:challenge|noscript)\?k=([\w-]+)'
+ KEY_AJAX_PATTERN = r'Recaptcha\.create\s*\(\s*["\']([\w-]+)'
+
+
+ def detect_key(self, html=None):
+ if not html:
+ if hasattr(self.plugin, "html") and self.plugin.html:
+ html = self.plugin.html
+ else:
+ errmsg = _("ReCaptcha html not found")
+ self.plugin.fail(errmsg)
+ raise TypeError(errmsg)
+
+ m = re.search(self.KEY_PATTERN, html) or re.search(self.KEY_AJAX_PATTERN, html)
+ if m:
+ self.key = m.group(1).strip()
+ self.plugin.logDebug("ReCaptcha key: %s" % self.key)
+ return self.key
+ else:
+ self.plugin.logDebug("ReCaptcha key not found")
+ return None
+
+
+ def challenge(self, key=None):
+ if not key:
+ if self.detect_key():
+ key = self.key
+ else:
+ errmsg = _("ReCaptcha key not found")
+ self.plugin.fail(errmsg)
+ raise TypeError(errmsg)
+
+ html = self.plugin.req.load("http://www.google.com/recaptcha/api/challenge", get={'k': key})
+ try:
+ challenge = re.search("challenge : '(.+?)',", html).group(1)
+ server = re.search("server : '(.+?)',", html).group(1)
+ except Exception:
+ errmsg = _("ReCaptcha challenge pattern not found")
+ self.plugin.error(errmsg)
+ raise ValueError(errmsg)
+
+ self.plugin.logDebug("ReCaptcha challenge: %s" % challenge)
+
+ return challenge, self.result(server, challenge)
+
+
+ def result(self, server, challenge):
+ result = self.plugin.decryptCaptcha("%simage" % server,
+ get={'c': challenge},
+ cookies=True,
+ forceUser=True,
+ imgtype="jpg")
+
+ self.plugin.logDebug("ReCaptcha result: %s" % result)
+
+ return result
diff --git a/pyload/plugin/captcha/SolveMedia.py b/pyload/plugin/captcha/SolveMedia.py
new file mode 100644
index 000000000..c2b1ba7eb
--- /dev/null
+++ b/pyload/plugin/captcha/SolveMedia.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Captcha import Captcha
+
+
+class SolveMedia(Captcha):
+ __name = "SolveMedia"
+ __type = "captcha"
+ __version = "0.06"
+
+ __description = """SolveMedia captcha service plugin"""
+ __license = "GPLv3"
+ __authors = [("pyLoad Team", "admin@pyload.org")]
+
+
+ KEY_PATTERN = r'api\.solvemedia\.com/papi/challenge\.(?:no)?script\?k=(.+?)["\']'
+
+
+ def challenge(self, key=None):
+ if not key:
+ if self.detect_key():
+ key = self.key
+ else:
+ errmsg = _("SolveMedia key not found")
+ self.plugin.fail(errmsg)
+ raise TypeError(errmsg)
+
+ html = self.plugin.req.load("http://api.solvemedia.com/papi/challenge.noscript", get={'k': key})
+ try:
+ challenge = re.search(r'<input type=hidden name="adcopy_challenge" id="adcopy_challenge" value="([^"]+)">',
+ html).group(1)
+ server = "http://api.solvemedia.com/papi/media"
+ except Exception:
+ errmsg = _("SolveMedia challenge pattern not found")
+ self.plugin.error(errmsg)
+ raise ValueError(errmsg)
+
+ self.plugin.logDebug("SolveMedia challenge: %s" % challenge)
+
+ return challenge, self.result(server, challenge)
+
+
+ def result(self, server, challenge):
+ result = self.plugin.decryptCaptcha(server, get={'c': challenge}, imgtype="gif")
+
+ self.plugin.logDebug("SolveMedia result: %s" % result)
+
+ return result
diff --git a/pyload/plugins/captcha/__init__.py b/pyload/plugin/captcha/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/captcha/__init__.py
+++ b/pyload/plugin/captcha/__init__.py
diff --git a/pyload/plugin/container/CCF.py b/pyload/plugin/container/CCF.py
new file mode 100644
index 000000000..943c114c8
--- /dev/null
+++ b/pyload/plugin/container/CCF.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+
+from os import makedirs
+from os.path import exists
+from urllib2 import build_opener
+
+from MultipartPostHandler import MultipartPostHandler
+
+from pyload.plugin.Container import Container
+from pyload.utils import safe_join
+
+
+class CCF(Container):
+ __name = "CCF"
+ __version = "0.20"
+
+ __pattern = r'.+\.ccf'
+
+ __description = """CCF container decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Willnix", "Willnix@pyload.org")]
+
+
+ def decrypt(self, pyfile):
+ infile = pyfile.url.replace("\n", "")
+
+ opener = build_opener(MultipartPostHandler)
+ params = {"src": "ccf",
+ "filename": "test.ccf",
+ "upload": open(infile, "rb")}
+ tempdlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', params).read()
+
+ download_folder = self.config['general']['download_folder']
+
+ tempdlc_name = safe_join(download_folder, "tmp_%s.dlc" % pyfile.name)
+ with open(tempdlc_name, "w") as tempdlc:
+ tempdlc.write(re.search(r'<dlc>(.*)</dlc>', tempdlc_content, re.S).group(1))
+
+ self.urls = [tempdlc_name]
diff --git a/pyload/plugin/container/LinkList.py b/pyload/plugin/container/LinkList.py
new file mode 100644
index 000000000..f8134f03d
--- /dev/null
+++ b/pyload/plugin/container/LinkList.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+import codecs
+
+from pyload.plugin.Container import Container
+from pyload.utils import fs_encode
+
+
+class LinkList(Container):
+ __name = "LinkList"
+ __version = "0.12"
+
+ __pattern = r'.+\.txt'
+ __config = [("clear", "bool", "Clear Linklist after adding", False),
+ ("encoding", "string", "File encoding (default utf-8)", "")]
+
+ __description = """Read link lists in txt format"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org"),
+ ("jeix", "jeix@hasnomail.com")]
+
+
+ def decrypt(self, pyfile):
+ try:
+ file_enc = codecs.lookup(self.getConfig("encoding")).name
+ except Exception:
+ file_enc = "utf-8"
+
+ file_name = fs_encode(pyfile.url)
+
+ txt = codecs.open(file_name, 'r', file_enc)
+ links = txt.readlines()
+ curPack = "Parsed links from %s" % pyfile.name
+
+ packages = {curPack:[],}
+
+ for link in links:
+ link = link.strip()
+ if not link:
+ continue
+
+ if link.startswith(";"):
+ continue
+ if link.startswith("[") and link.endswith("]"):
+ # new package
+ curPack = link[1:-1]
+ packages[curPack] = []
+ continue
+ packages[curPack].append(link)
+ txt.close()
+
+ # empty packages fix
+
+ delete = []
+
+ for key,value in packages.iteritems():
+ if not value:
+ delete.append(key)
+
+ for key in delete:
+ del packages[key]
+
+ if self.getConfig("clear"):
+ try:
+ txt = open(file_name, 'wb')
+ txt.close()
+ except Exception:
+ self.logWarning(_("LinkList could not be cleared"))
+
+ for name, links in packages.iteritems():
+ self.packages.append((name, links, name))
diff --git a/pyload/plugin/container/RSDF.py b/pyload/plugin/container/RSDF.py
new file mode 100644
index 000000000..22670ce4f
--- /dev/null
+++ b/pyload/plugin/container/RSDF.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import base64
+import binascii
+import re
+
+from pyload.plugin.Container import Container
+from pyload.utils import fs_encode
+
+
+class RSDF(Container):
+ __name = "RSDF"
+ __version = "0.24"
+
+ __pattern = r'.+\.rsdf'
+
+ __description = """RSDF container decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("spoob", "spoob@pyload.org")]
+
+
+ def decrypt(self, pyfile):
+
+ from Crypto.Cipher import AES
+
+ infile = fs_encode(pyfile.url.replace("\n", ""))
+ Key = binascii.unhexlify('8C35192D964DC3182C6F84F3252239EB4A320D2500000000')
+
+ IV = binascii.unhexlify('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF')
+ IV_Cipher = AES.new(Key, AES.MODE_ECB)
+ IV = IV_Cipher.encrypt(IV)
+
+ obj = AES.new(Key, AES.MODE_CFB, IV)
+
+ try:
+ with open(infile, 'r') as rsdf:
+ data = rsdf.read()
+ except IOError, e:
+ self.fail(str(e))
+
+ if re.search(r"<title>404 - Not Found</title>", data) is None:
+ data = binascii.unhexlify(''.join(data.split()))
+ data = data.splitlines()
+
+ for link in data:
+ if not link:
+ continue
+ link = base64.b64decode(link)
+ link = obj.decrypt(link)
+ decryptedUrl = link.replace('CCF: ', '')
+ self.urls.append(decryptedUrl)
+
+ self.logDebug("Adding package %s with %d links" % (pyfile.package().name, len(self.urls)))
diff --git a/pyload/plugins/container/__init__.py b/pyload/plugin/container/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/container/__init__.py
+++ b/pyload/plugin/container/__init__.py
diff --git a/pyload/plugin/crypter/BitshareCom.py b/pyload/plugin/crypter/BitshareCom.py
new file mode 100644
index 000000000..dfa7b71df
--- /dev/null
+++ b/pyload/plugin/crypter/BitshareCom.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class BitshareCom(SimpleCrypter):
+ __name = "BitshareCom"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?bitshare\.com/\?d=\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Bitshare.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'<a href="(http://bitshare\.com/files/.+)">.+</a></td>'
+ NAME_PATTERN = r'View public folder "(?P<N>.+)"</h1>'
diff --git a/pyload/plugin/crypter/C1neonCom.py b/pyload/plugin/crypter/C1neonCom.py
new file mode 100644
index 000000000..ad428f6dd
--- /dev/null
+++ b/pyload/plugin/crypter/C1neonCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class C1neonCom(DeadCrypter):
+ __name = "C1neonCom"
+ __type = "crypter"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?c1neon\.com/.*?'
+ __config = []
+
+ __description = """C1neon.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("godofdream", "soilfiction@gmail.com")]
+
+
+getInfo = create_getInfo(C1neonCom)
diff --git a/pyload/plugin/crypter/ChipDe.py b/pyload/plugin/crypter/ChipDe.py
new file mode 100644
index 000000000..36735fd13
--- /dev/null
+++ b/pyload/plugin/crypter/ChipDe.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+
+
+class ChipDe(Crypter):
+ __name = "ChipDe"
+ __type = "crypter"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?chip\.de/video/.*\.html'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Chip.de decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("4Christopher", "4Christopher@gmx.de")]
+
+
+ def decrypt(self, pyfile):
+ self.html = self.load(pyfile.url)
+ try:
+ f = re.search(r'"(http://video\.chip\.de/.+)"', self.html)
+ except Exception:
+ self.fail(_("Failed to find the URL"))
+ else:
+ self.urls = [f.group(1)]
+ self.logDebug("The file URL is %s" % self.urls[0])
diff --git a/pyload/plugin/crypter/CrockoCom.py b/pyload/plugin/crypter/CrockoCom.py
new file mode 100644
index 000000000..8782c86e6
--- /dev/null
+++ b/pyload/plugin/crypter/CrockoCom.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class CrockoCom(SimpleCrypter):
+ __name = "CrockoCom"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?crocko\.com/f/.*'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Crocko.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ LINK_PATTERN = r'<td class="last"><a href="([^"]+)">download</a>'
diff --git a/pyload/plugin/crypter/CryptItCom.py b/pyload/plugin/crypter/CryptItCom.py
new file mode 100644
index 000000000..dfb6be954
--- /dev/null
+++ b/pyload/plugin/crypter/CryptItCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class CryptItCom(DeadCrypter):
+ __name = "CryptItCom"
+ __type = "crypter"
+ __version = "0.11"
+
+ __pattern = r'http://(?:www\.)?crypt-it\.com/(s|e|d|c)/\w+'
+ __config = []
+
+ __description = """Crypt-it.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de")]
+
+
+getInfo = create_getInfo(CryptItCom)
diff --git a/pyload/plugin/crypter/CzshareCom.py b/pyload/plugin/crypter/CzshareCom.py
new file mode 100644
index 000000000..e36394426
--- /dev/null
+++ b/pyload/plugin/crypter/CzshareCom.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+
+
+class CzshareCom(Crypter):
+ __name = "CzshareCom"
+ __type = "crypter"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?(czshare|sdilej)\.(com|cz)/folders/.*'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Czshare.com folder decrypter plugin, now Sdilej.cz"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ FOLDER_PATTERN = r'<tr class="subdirectory">\s*<td>\s*<table>(.*?)</table>'
+ LINK_PATTERN = r'<td class="col2"><a href="([^"]+)">info</a></td>'
+
+
+ def decrypt(self, pyfile):
+ html = self.load(pyfile.url)
+
+ m = re.search(self.FOLDER_PATTERN, html, re.S)
+ if m is None:
+ self.error(_("FOLDER_PATTERN not found"))
+
+ self.urls.extend(re.findall(self.LINK_PATTERN, m.group(1)))
diff --git a/pyload/plugin/crypter/DDLMusicOrg.py b/pyload/plugin/crypter/DDLMusicOrg.py
new file mode 100644
index 000000000..a24cac22d
--- /dev/null
+++ b/pyload/plugin/crypter/DDLMusicOrg.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import sleep
+
+from pyload.plugin.Crypter import Crypter
+
+
+class DDLMusicOrg(Crypter):
+ __name = "DDLMusicOrg"
+ __type = "crypter"
+ __version = "0.30"
+
+ __pattern = r'http://(?:www\.)?ddl-music\.org/captcha/ddlm_cr\d\.php\?\d+\?\d+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Ddl-music.org decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+ def setup(self):
+ self.multiDL = False
+
+
+ def decrypt(self, pyfile):
+ html = self.load(pyfile.url, cookies=True)
+
+ if re.search(r"Wer dies nicht rechnen kann", html) is not None:
+ self.offline()
+
+ math = re.search(r"(\d+) ([+-]) (\d+) =\s+<inp", self.html)
+ id = re.search(r"name=\"id\" value=\"(\d+)\"", self.html).group(1)
+ linknr = re.search(r"name=\"linknr\" value=\"(\d+)\"", self.html).group(1)
+
+ solve = ""
+ if math.group(2) == "+":
+ solve = int(math.group(1)) + int(math.group(3))
+ else:
+ solve = int(math.group(1)) - int(math.group(3))
+ sleep(3)
+ htmlwithlink = self.load(pyfile.url, cookies=True,
+ post={"calc%s" % linknr: solve, "send%s" % linknr: "Send", "id": id,
+ "linknr": linknr})
+ m = re.search(r"<form id=\"ff\" action=\"(.*?)\" method=\"post\">", htmlwithlink)
+ if m:
+ self.urls = [m.group(1)]
+ else:
+ self.retry()
diff --git a/pyload/plugin/crypter/DailymotionBatch.py b/pyload/plugin/crypter/DailymotionBatch.py
new file mode 100644
index 000000000..4b5b7106c
--- /dev/null
+++ b/pyload/plugin/crypter/DailymotionBatch.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.utils import json_loads
+from pyload.plugin.Crypter import Crypter
+from pyload.utils import safe_join
+
+
+class DailymotionBatch(Crypter):
+ __name = "DailymotionBatch"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'https?://(?:www\.)?dailymotion\.com/((playlists/)?(?P<TYPE>playlist|user)/)?(?P<ID>[\w^_]+)(?(TYPE)|#)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Dailymotion.com channel & playlist decrypter"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def api_response(self, ref, req=None):
+ url = urljoin("https://api.dailymotion.com/", ref)
+ page = self.load(url, get=req)
+ return json_loads(page)
+
+
+ def getPlaylistInfo(self, id):
+ ref = "playlist/" + id
+ req = {"fields": "name,owner.screenname"}
+ playlist = self.api_response(ref, req)
+
+ if "error" in playlist:
+ return
+
+ name = playlist['name']
+ owner = playlist['owner.screenname']
+ return name, owner
+
+
+ def _getPlaylists(self, user_id, page=1):
+ ref = "user/%s/playlists" % user_id
+ req = {"fields": "id", "page": page, "limit": 100}
+ user = self.api_response(ref, req)
+
+ if "error" in user:
+ return
+
+ for playlist in user['list']:
+ yield playlist['id']
+
+ if user['has_more']:
+ for item in self._getPlaylists(user_id, page + 1):
+ yield item
+
+
+ def getPlaylists(self, user_id):
+ return [(id,) + self.getPlaylistInfo(id) for id in self._getPlaylists(user_id)]
+
+
+ def _getVideos(self, id, page=1):
+ ref = "playlist/%s/videos" % id
+ req = {"fields": "url", "page": page, "limit": 100}
+ playlist = self.api_response(ref, req)
+
+ if "error" in playlist:
+ return
+
+ for video in playlist['list']:
+ yield video['url']
+
+ if playlist['has_more']:
+ for item in self._getVideos(id, page + 1):
+ yield item
+
+
+ def getVideos(self, playlist_id):
+ return list(self._getVideos(playlist_id))[::-1]
+
+
+ def decrypt(self, pyfile):
+ m = re.match(self.__pattern, pyfile.url)
+ m_id = m.group("ID")
+ m_type = m.group("TYPE")
+
+ if m_type == "playlist":
+ self.logDebug("Url recognized as Playlist")
+ p_info = self.getPlaylistInfo(m_id)
+ playlists = [(m_id,) + p_info] if p_info else None
+ else:
+ self.logDebug("Url recognized as Channel")
+ playlists = self.getPlaylists(m_id)
+ self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), m_id))
+
+ if not playlists:
+ self.fail(_("No playlist available"))
+
+ for p_id, p_name, p_owner in playlists:
+ p_videos = self.getVideos(p_id)
+ p_folder = safe_join(self.config['general']['download_folder'], p_owner, p_name)
+ self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name))
+ self.packages.append((p_name, p_videos, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
diff --git a/pyload/plugin/crypter/DataHu.py b/pyload/plugin/crypter/DataHu.py
new file mode 100644
index 000000000..bc677253a
--- /dev/null
+++ b/pyload/plugin/crypter/DataHu.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class DataHu(SimpleCrypter):
+ __name = "DataHu"
+ __type = "crypter"
+ __version = "0.06"
+
+ __pattern = r'http://(?:www\.)?data\.hu/dir/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Data.hu folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("crash", ""),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'<a href=\'(http://data\.hu/get/.+)\' target=\'_blank\'>\1</a>'
+ NAME_PATTERN = ur'<title>(?P<N>.+) Let\xf6lt\xe9se</title>'
+
+
+ def prepare(self):
+ super(DataHu, self).prepare()
+
+ if u'K\xe9rlek add meg a jelsz\xf3t' in self.html: # Password protected
+ password = self.getPassword()
+ if not password:
+ self.fail(_("Password required"))
+
+ self.logDebug("The folder is password protected', 'Using password: " + password)
+
+ self.html = self.load(self.pyfile.url, post={'mappa_pass': password}, decode=True)
+
+ if u'Hib\xe1s jelsz\xf3' in self.html: # Wrong password
+ self.fail(_("Wrong password"))
diff --git a/pyload/plugin/crypter/DdlstorageCom.py b/pyload/plugin/crypter/DdlstorageCom.py
new file mode 100644
index 000000000..c6f423bb6
--- /dev/null
+++ b/pyload/plugin/crypter/DdlstorageCom.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class DdlstorageCom(DeadCrypter):
+ __name = "DdlstorageCom"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'https?://(?:www\.)?ddlstorage\.com/folder/\w+'
+ __config = []
+
+ __description = """DDLStorage.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("godofdream", "soilfiction@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(DdlstorageCom)
diff --git a/pyload/plugin/crypter/DepositfilesCom.py b/pyload/plugin/crypter/DepositfilesCom.py
new file mode 100644
index 000000000..8ecbb6f8d
--- /dev/null
+++ b/pyload/plugin/crypter/DepositfilesCom.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class DepositfilesCom(SimpleCrypter):
+ __name = "DepositfilesCom"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?depositfiles\.com/folders/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Depositfiles.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ LINK_PATTERN = r'<div class="progressName"[^>]*>\s*<a href="([^"]+)" title="[^"]*" target="_blank">'
diff --git a/pyload/plugin/crypter/Dereferer.py b/pyload/plugin/crypter/Dereferer.py
new file mode 100644
index 000000000..ec7f48a52
--- /dev/null
+++ b/pyload/plugin/crypter/Dereferer.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+
+from pyload.plugin.Crypter import Crypter
+
+
+class Dereferer(Crypter):
+ __name = "Dereferer"
+ __type = "crypter"
+ __version = "0.10"
+
+ __pattern = r'https?://([^/]+)/.*?(?P<url>(ht|f)tps?(://|%3A%2F%2F).*)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Crypter for dereferers"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def decrypt(self, pyfile):
+ link = re.match(self.__pattern, pyfile.url).group('url')
+ self.urls = [unquote(link).rstrip('+')]
diff --git a/pyload/plugin/crypter/DevhostStFolder.py b/pyload/plugin/crypter/DevhostStFolder.py
new file mode 100644
index 000000000..aa00fe60e
--- /dev/null
+++ b/pyload/plugin/crypter/DevhostStFolder.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://d-h.st/users/shine/?fld_id=37263#files
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class DevhostStFolder(SimpleCrypter):
+ __name = "DevhostStFolder"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?d-h\.st/users/(?P<USER>\w+)(/\?fld_id=(?P<ID>\d+))?'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """d-h.st folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ LINK_PATTERN = r'(?:/> |;">)<a href="(.+?)"(?!>Back to \w+<)'
+ OFFLINE_PATTERN = r'"/cHP">test\.png<'
+
+
+ def getFileInfo(self):
+ if re.search(self.OFFLINE_PATTERN, self.html):
+ self.offline()
+
+ try:
+ id = re.match(self.__pattern, self.pyfile.url).group('ID')
+ if id == "0":
+ raise
+
+ p = r'href="(.+?)">Back to \w+<'
+ m = re.search(p, self.html)
+ html = self.load(urljoin("http://d-h.st", m.group(1)),
+ cookies=False)
+
+ p = '\?fld_id=%s.*?">(.+?)<' % id
+ m = re.search(p, html)
+ name = folder = m.group(1)
+
+ except Exception, e:
+ self.logDebug(e)
+ name = folder = re.match(self.__pattern, self.pyfile.url).group('USER')
+
+ return {'name': name, 'folder': folder}
+
+
+ def getLinks(self):
+ return [urljoin("http://d-h.st", link) for link in re.findall(self.LINK_PATTERN, self.html)]
diff --git a/pyload/plugin/crypter/DlProtectCom.py b/pyload/plugin/crypter/DlProtectCom.py
new file mode 100644
index 000000000..0a9f00cc4
--- /dev/null
+++ b/pyload/plugin/crypter/DlProtectCom.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from base64 import urlsafe_b64encode
+from time import time
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class DlProtectCom(SimpleCrypter):
+ __name = "DlProtectCom"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?dl-protect\.com/((en|fr)/)?(?P<ID>\w+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Dl-protect.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ OFFLINE_PATTERN = r'>Unfortunately, the link you are looking for is not found'
+
+
+ def getLinks(self):
+ # Direct link with redirect
+ if not re.match(r"http://(?:www\.)?dl-protect\.com", self.req.http.lastEffectiveURL):
+ return [self.req.http.lastEffectiveURL]
+
+ #id = re.match(self.__pattern, self.pyfile.url).group("ID")
+ key = re.search(r'name="id_key" value="(.+?)"', self.html).group(1)
+
+ post_req = {"id_key": key, "submitform": ""}
+
+ if self.OFFLINE_PATTERN in self.html:
+ self.offline()
+ elif ">Please click on continue to see the content" in self.html:
+ post_req.update({"submitform": "Continue"})
+ else:
+ mstime = int(round(time() * 1000))
+ b64time = "_" + urlsafe_b64encode(str(mstime)).replace("=", "%3D")
+
+ post_req.update({"i": b64time, "submitform": "Decrypt+link"})
+
+ if ">Password :" in self.html:
+ post_req['pwd'] = self.getPassword()
+
+ if ">Security Code" in self.html:
+ captcha_id = re.search(r'/captcha\.php\?uid=(.+?)"', self.html).group(1)
+ captcha_url = "http://www.dl-protect.com/captcha.php?uid=" + captcha_id
+ captcha_code = self.decryptCaptcha(captcha_url, imgtype="gif")
+
+ post_req['secure'] = captcha_code
+
+ self.html = self.load(self.pyfile.url, post=post_req)
+
+ for errmsg in (">The password is incorrect", ">The security code is incorrect"):
+ if errmsg in self.html:
+ self.fail(_(errmsg[1:]))
+
+ pattern = r'<a href="([^/].+?)" target="_blank">'
+ return re.findall(pattern, self.html)
diff --git a/pyload/plugin/crypter/DontKnowMe.py b/pyload/plugin/crypter/DontKnowMe.py
new file mode 100644
index 000000000..7fc1c87e4
--- /dev/null
+++ b/pyload/plugin/crypter/DontKnowMe.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+
+from pyload.plugin.Crypter import Crypter
+
+
+class DontKnowMe(Crypter):
+ __name = "DontKnowMe"
+ __type = "crypter"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?dontknow\.me/at/\?.+$'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """DontKnow.me decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("selaux", "")]
+
+
+ LINK_PATTERN = r'http://dontknow\.me/at/\?(.+)$'
+
+
+ def decrypt(self, pyfile):
+ link = re.findall(self.LINK_PATTERN, pyfile.url)[0]
+ self.urls = [unquote(link)]
diff --git a/pyload/plugin/crypter/DuckCryptInfo.py b/pyload/plugin/crypter/DuckCryptInfo.py
new file mode 100644
index 000000000..28f9a1505
--- /dev/null
+++ b/pyload/plugin/crypter/DuckCryptInfo.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from BeautifulSoup import BeautifulSoup
+
+from pyload.plugin.Crypter import Crypter
+
+
+class DuckCryptInfo(Crypter):
+ __name = "DuckCryptInfo"
+ __type = "crypter"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?duckcrypt\.info/(folder|wait|link)/(\w+)/?(\w*)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """DuckCrypt.info decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("godofdream", "soilfiction@gmail.com")]
+
+
+ TIMER_PATTERN = r'<span id="timer">(.*)</span>'
+
+
+ def decrypt(self, pyfile):
+ url = pyfile.url
+
+ m = re.match(self.__pattern, url)
+ if m is None:
+ self.fail(_("Weird error in link"))
+ if str(m.group(1)) == "link":
+ self.handleLink(url)
+ else:
+ self.handleFolder(m)
+
+
+ def handleFolder(self, m):
+ html = self.load("http://duckcrypt.info/ajax/auth.php?hash=" + str(m.group(2)))
+ m = re.match(self.__pattern, html)
+ self.logDebug("Redirectet to " + str(m.group(0)))
+ html = self.load(str(m.group(0)))
+ soup = BeautifulSoup(html)
+ cryptlinks = soup.findAll("div", attrs={"class": "folderbox"})
+ self.logDebug("Redirectet to " + str(cryptlinks))
+ if not cryptlinks:
+ self.error(_("No link found"))
+ for clink in cryptlinks:
+ if clink.find("a"):
+ self.handleLink(clink.find("a")['href'])
+
+
+ def handleLink(self, url):
+ html = self.load(url)
+ soup = BeautifulSoup(html)
+ self.urls = [soup.find("iframe")['src']]
+ if not self.urls:
+ self.logInfo(_("No link found"))
diff --git a/pyload/plugin/crypter/DuploadOrg.py b/pyload/plugin/crypter/DuploadOrg.py
new file mode 100644
index 000000000..d5839bce0
--- /dev/null
+++ b/pyload/plugin/crypter/DuploadOrg.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class DuploadOrg(DeadCrypter):
+ __name = "DuploadOrg"
+ __type = "crypter"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?dupload\.org/folder/\d+'
+ __config = []
+
+ __description = """Dupload.org folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(DuploadOrg)
diff --git a/pyload/plugin/crypter/EasybytezCom.py b/pyload/plugin/crypter/EasybytezCom.py
new file mode 100644
index 000000000..2e7e37537
--- /dev/null
+++ b/pyload/plugin/crypter/EasybytezCom.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSCrypter import XFSCrypter
+
+
+class EasybytezCom(XFSCrypter):
+ __name = "EasybytezCom"
+ __type = "crypter"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?easybytez\.com/users/\d+/\d+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Easybytez.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ HOSTER_DOMAIN = "easybytez.com"
+
+ LOGIN_ACCOUNT = True
diff --git a/pyload/plugin/crypter/EmbeduploadCom.py b/pyload/plugin/crypter/EmbeduploadCom.py
new file mode 100644
index 000000000..88f6db50b
--- /dev/null
+++ b/pyload/plugin/crypter/EmbeduploadCom.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+from pyload.network.HTTPRequest import BadHeader
+
+
+class EmbeduploadCom(Crypter):
+ __name = "EmbeduploadCom"
+ __type = "crypter"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?embedupload\.com/\?d=.*'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True),
+ ("preferedHoster", "str", "Prefered hoster list (bar-separated)", "embedupload"),
+ ("ignoredHoster", "str", "Ignored hoster list (bar-separated)", "")]
+
+ __description = """EmbedUpload.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ LINK_PATTERN = r'<div id="([^"]+)"[^>]*>\s*<a href="([^"]+)" target="_blank" (?:class="DownloadNow"|style="color:red")>'
+
+
+ def decrypt(self, pyfile):
+ self.html = self.load(pyfile.url, decode=True)
+ tmp_links = []
+
+ m = re.findall(self.LINK_PATTERN, self.html)
+ if m:
+ prefered_set = set(self.getConfig("preferedHoster").split('|'))
+ prefered_set = map(lambda s: s.lower().split('.')[0], prefered_set)
+
+ self.logDebug("PF: %s" % prefered_set)
+
+ tmp_links.extend([x[1] for x in m if x[0] in prefered_set])
+ self.urls = self.getLocation(tmp_links)
+
+ if not self.urls:
+ ignored_set = set(self.getConfig("ignoredHoster").split('|'))
+ ignored_set = map(lambda s: s.lower().split('.')[0], ignored_set)
+
+ self.logDebug("IG: %s" % ignored_set)
+
+ tmp_links.extend([x[1] for x in m if x[0] not in ignored_set])
+ self.urls = self.getLocation(tmp_links)
+
+
+ def getLocation(self, tmp_links):
+ new_links = []
+ for link in tmp_links:
+ try:
+ header = self.load(link, just_header=True)
+ if 'location' in header:
+ new_links.append(header['location'])
+ except BadHeader:
+ pass
+ return new_links
diff --git a/pyload/plugin/crypter/FilebeerInfo.py b/pyload/plugin/crypter/FilebeerInfo.py
new file mode 100644
index 000000000..294f57451
--- /dev/null
+++ b/pyload/plugin/crypter/FilebeerInfo.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class FilebeerInfo(DeadCrypter):
+ __name = "FilebeerInfo"
+ __type = "crypter"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?filebeer\.info/(\d+~f).*'
+ __config = []
+
+ __description = """Filebeer.info folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(FilebeerInfo)
diff --git a/pyload/plugin/crypter/FilecloudIo.py b/pyload/plugin/crypter/FilecloudIo.py
new file mode 100644
index 000000000..1926d04bb
--- /dev/null
+++ b/pyload/plugin/crypter/FilecloudIo.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class FilecloudIo(SimpleCrypter):
+ __name = "FilecloudIo"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'https?://(?:www\.)?(filecloud\.io|ifile\.it)/_\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Filecloud.io folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ LINK_PATTERN = r'href="(http://filecloud\.io/\w+)" title'
+ NAME_PATTERN = r'>(?P<N>.+?) - filecloud\.io<'
diff --git a/pyload/plugin/crypter/FilecryptCc.py b/pyload/plugin/crypter/FilecryptCc.py
new file mode 100644
index 000000000..7db82f24c
--- /dev/null
+++ b/pyload/plugin/crypter/FilecryptCc.py
@@ -0,0 +1,148 @@
+# -*- coding: utf-8 -*-
+
+import base64
+import binascii
+import re
+
+from Crypto.Cipher import AES
+
+from pyload.plugin.Crypter import Crypter
+
+
+class FilecryptCc(Crypter):
+ __name = "FilecryptCc"
+ __type = "crypter"
+ __version = "0.05"
+
+ __pattern = r'https?://(?:www\.)?filecrypt\.cc/Container/\w+'
+
+ __description = """Filecrypt.cc decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
+
+
+ # URL_REPLACEMENTS = [(r'.html$', ""), (r'$', ".html")] #@TODO: Extend SimpleCrypter
+
+ DLC_LINK_PATTERN = r'<button class="dlcdownload" type="button" title="Download \*.dlc" onclick="DownloadDLC\(\'(.+)\'\);"><i></i><span>dlc<'
+ WEBLINK_PATTERN = r"openLink.?'([\w_-]*)',"
+
+ CAPTCHA_PATTERN = r'<img id="nc" src="(.+?)"'
+
+ MIRROR_PAGE_PATTERN = r'"[\w]*" href="(http://filecrypt.cc/Container/\w+\.html\?mirror=\d+)">'
+
+
+ def setup(self):
+ self.links = []
+
+
+ def decrypt(self, pyfile):
+ self.html = self.load(pyfile.url, cookies=True)
+
+ if "content not found" in self.html:
+ self.offline()
+
+ self.handlePasswordProtection()
+ self.handleCaptcha()
+ self.handleMirrorPages()
+
+ for handle in (self.handleCNL, self.handleWeblinks, self.handleDlcContainer):
+ handle()
+ if self.links:
+ self.packages = [(pyfile.package().name, self.links, pyfile.package().name)]
+ return
+
+
+ def handleMirrorPages(self):
+ if "mirror=" not in self.siteWithLinks:
+ return
+
+ mirror = re.findall(self.MIRROR_PAGE_PATTERN, self.siteWithLinks)
+
+ self.logInfo(_("Found %d mirrors") % len(mirror))
+
+ for i in mirror[1:]:
+ self.siteWithLinks = self.siteWithLinks + self.load(i, cookies=True).decode("utf-8", "replace")
+
+
+ def handlePasswordProtection(self):
+ if '<input type="text" name="password"' not in self.html:
+ return
+
+ self.logInfo(_("Folder is password protected"))
+
+ if not self.pyfile.package().password:
+ self.fail(_("Please enter the password in package section and try again"))
+
+ self.html = self.load(self.pyfile.url, post={"password": self.password}, cookies=True)
+
+
+ def handleCaptcha(self):
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+
+ if m:
+ self.logDebug("Captcha-URL: %s" % m.group(1))
+ captcha_code = self.decryptCaptcha("http://filecrypt.cc" + m.group(1), forceUser=True, imgtype="gif")
+ self.siteWithLinks = self.load(self.pyfile.url, post={"recaptcha_response_field":captcha_code}, decode=True, cookies=True)
+ else:
+ self.logDebug("No captcha found")
+ self.siteWithLinks = self.html
+
+ if "recaptcha_response_field" in self.siteWithLinks:
+ self.invalidCaptcha()
+ self.retry()
+
+
+ def handleDlcContainer(self):
+ dlc = re.findall(self.DLC_LINK_PATTERN, self.siteWithLinks)
+
+ if not dlc:
+ return
+
+ for i in dlc:
+ self.links.append("http://filecrypt.cc/DLC/%s.dlc" % i)
+
+
+ def handleWeblinks(self):
+ try:
+ weblinks = re.findall(self.WEBLINK_PATTERN, self.siteWithLinks)
+
+ for link in weblinks:
+ res = self.load("http://filecrypt.cc/Link/%s.html" % link, cookies=True)
+ link2 = re.search('<iframe noresize src="(.*)"></iframe>', res)
+ res2 = self.load(link2.group(1), just_header=True, cookies=True)
+ self.links.append(res2['location'])
+
+ except Exception, e:
+ self.logDebug("Error decrypting weblinks: %s" % e)
+
+
+ def handleCNL(self):
+ try:
+ vjk = re.findall('<input type="hidden" name="jk" value="function f\(\){ return \'(.*)\';}">', self.siteWithLinks)
+ vcrypted = re.findall('<input type="hidden" name="crypted" value="(.*)">', self.siteWithLinks)
+
+ for i in xrange(len(vcrypted)):
+ self.links.extend(self._getLinks(vcrypted[i], vjk[i]))
+
+ except Exception, e:
+ self.logDebug("Error decrypting CNL: %s" % e)
+
+
+ def _getLinks(self, crypted, jk):
+ # Get key
+ key = binascii.unhexlify(str(jk))
+
+ # Decode crypted
+ crypted = base64.standard_b64decode(crypted)
+
+ # Decrypt
+ Key = key
+ IV = key
+ obj = AES.new(Key, AES.MODE_CBC, IV)
+ text = obj.decrypt(crypted)
+
+ # Extract links
+ links = filter(lambda x: x != "",
+ text.replace("\x00", "").replace("\r", "").split("\n"))
+
+ return links
diff --git a/pyload/plugin/crypter/FilefactoryCom.py b/pyload/plugin/crypter/FilefactoryCom.py
new file mode 100644
index 000000000..5b2d242cb
--- /dev/null
+++ b/pyload/plugin/crypter/FilefactoryCom.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class FilefactoryCom(SimpleCrypter):
+ __name = "FilefactoryCom"
+ __type = "crypter"
+ __version = "0.31"
+
+ __pattern = r'https?://(?:www\.)?filefactory\.com/(?:f|folder)/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Filefactory.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'<td><a href="([^"]+)">'
+ NAME_PATTERN = r'<h1>Files in <span>(?P<N>.+)</span></h1>'
+ PAGES_PATTERN = r'data-paginator-totalPages="(\d+)"'
+
+ COOKIES = [("filefactory.com", "locale", "en_US.utf8")]
+
+
+ def loadPage(self, page_n):
+ return self.load(self.pyfile.url, get={'page': page_n})
diff --git a/pyload/plugin/crypter/FilerNet.py b/pyload/plugin/crypter/FilerNet.py
new file mode 100644
index 000000000..950012b46
--- /dev/null
+++ b/pyload/plugin/crypter/FilerNet.py
@@ -0,0 +1,26 @@
+import re
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class FilerNet(SimpleCrypter):
+ __name = "FilerNet"
+ __type = "crypter"
+ __version = "0.41"
+
+ __pattern = r'https?://filer\.net/folder/\w{16}'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Filer.net decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("nath_schwarz", "nathan.notwhite@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'href="(/get/\w{16})">(?!<)'
+ NAME_PATTERN = r'<h3>(?P<N>.+?) - <small'
+
+
+ def getLinks(self):
+ return ['http://filer.net%s' % link for link in re.findall(self.LINK_PATTERN, self.html)]
diff --git a/pyload/plugin/crypter/FileserveCom.py b/pyload/plugin/crypter/FileserveCom.py
new file mode 100644
index 000000000..a9f766b81
--- /dev/null
+++ b/pyload/plugin/crypter/FileserveCom.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Crypter import Crypter
+
+
+class FileserveCom(Crypter):
+ __name = "FileserveCom"
+ __type = "crypter"
+ __version = "0.11"
+
+ __pattern = r'http://(?:www\.)?fileserve\.com/list/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """FileServe.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("fionnc", "fionnc@gmail.com")]
+
+
+ FOLDER_PATTERN = r'<table class="file_list">(.*?)</table>'
+ LINK_PATTERN = r'<a href="([^"]+)" class="sheet_icon wbold">'
+
+
+ def decrypt(self, pyfile):
+ html = self.load(pyfile.url)
+
+ new_links = []
+
+ folder = re.search(self.FOLDER_PATTERN, html, re.S)
+ if folder is None:
+ self.error(_("FOLDER_PATTERN not found"))
+
+ new_links.extend(re.findall(self.LINK_PATTERN, folder.group(1)))
+
+ if new_links:
+ self.urls = [map(lambda s: "http://fileserve.com%s" % s, new_links)]
diff --git a/pyload/plugin/crypter/FilesonicCom.py b/pyload/plugin/crypter/FilesonicCom.py
new file mode 100644
index 000000000..6dded6c04
--- /dev/null
+++ b/pyload/plugin/crypter/FilesonicCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class FilesonicCom(DeadCrypter):
+ __name = "FilesonicCom"
+ __type = "crypter"
+ __version = "0.12"
+
+ __pattern = r'http://(?:www\.)?filesonic\.com/folder/\w+'
+
+ __description = """Filesonic.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(FilesonicCom)
diff --git a/pyload/plugin/crypter/FilestubeCom.py b/pyload/plugin/crypter/FilestubeCom.py
new file mode 100644
index 000000000..5238899b0
--- /dev/null
+++ b/pyload/plugin/crypter/FilestubeCom.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class FilestubeCom(SimpleCrypter):
+ __name = "FilestubeCom"
+ __type = "crypter"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?filestube\.(?:com|to)/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Filestube.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'<a class=\"file-link-main(?: noref)?\" [^>]* href=\"(http://[^\"]+)'
+ NAME_PATTERN = r'<h1\s*> (?P<N>.+) download\s*</h1>'
diff --git a/pyload/plugin/crypter/FiletramCom.py b/pyload/plugin/crypter/FiletramCom.py
new file mode 100644
index 000000000..28e38e30e
--- /dev/null
+++ b/pyload/plugin/crypter/FiletramCom.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class FiletramCom(SimpleCrypter):
+ __name = "FiletramCom"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?filetram\.com/[^/]+/.+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Filetram.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("igel", "igelkun@myopera.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'\s+(http://.+)'
+ NAME_PATTERN = r'<title>(?P<N>.+?) - Free Download'
diff --git a/pyload/plugin/crypter/FiredriveCom.py b/pyload/plugin/crypter/FiredriveCom.py
new file mode 100644
index 000000000..27b81c006
--- /dev/null
+++ b/pyload/plugin/crypter/FiredriveCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class FiredriveCom(DeadCrypter):
+ __name = "FiredriveCom"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'https?://(?:www\.)?(firedrive|putlocker)\.com/share/.+'
+ __config = []
+
+ __description = """Firedrive.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+getInfo = create_getInfo(FiredriveCom)
diff --git a/pyload/plugin/crypter/FourChanOrg.py b/pyload/plugin/crypter/FourChanOrg.py
new file mode 100644
index 000000000..97d21ddeb
--- /dev/null
+++ b/pyload/plugin/crypter/FourChanOrg.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+#
+# Based on 4chandl by Roland Beermann (https://gist.github.com/enkore/3492599)
+
+import re
+
+from pyload.plugin.Crypter import Crypter
+
+
+class FourChanOrg(Crypter):
+ __name = "FourChanOrg"
+ __type = "crypter"
+ __version = "0.30"
+
+ __pattern = r'http://(?:www\.)?boards\.4chan\.org/\w+/res/(\d+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """4chan.org folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = []
+
+
+ def decrypt(self, pyfile):
+ pagehtml = self.load(pyfile.url)
+ images = set(re.findall(r'(images\.4chan\.org/[^/]*/src/[^"<]*)', pagehtml))
+ self.urls = ["http://" + image for image in images]
diff --git a/pyload/plugin/crypter/FreakhareCom.py b/pyload/plugin/crypter/FreakhareCom.py
new file mode 100644
index 000000000..9df8ef200
--- /dev/null
+++ b/pyload/plugin/crypter/FreakhareCom.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class FreakhareCom(SimpleCrypter):
+ __name = "FreakhareCom"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?freakshare\.com/folder/.+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Freakhare.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'<a href="(http://freakshare\.com/files/[^"]+)" target="_blank">'
+ NAME_PATTERN = r'Folder:</b> (?P<N>.+)'
+ PAGES_PATTERN = r'Pages: +(\d+)'
+
+
+ def loadPage(self, page_n):
+ if not hasattr(self, 'f_id') and not hasattr(self, 'f_md5'):
+ m = re.search(r'http://freakshare.com/\?x=folder&f_id=(\d+)&f_md5=(\w+)', self.html)
+ if m:
+ self.f_id = m.group(1)
+ self.f_md5 = m.group(2)
+ return self.load('http://freakshare.com/', get={'x': 'folder',
+ 'f_id': self.f_id,
+ 'f_md5': self.f_md5,
+ 'entrys': '20',
+ 'page': page_n - 1,
+ 'order': ''}, decode=True)
diff --git a/pyload/plugin/crypter/FreetexthostCom.py b/pyload/plugin/crypter/FreetexthostCom.py
new file mode 100644
index 000000000..db97a8003
--- /dev/null
+++ b/pyload/plugin/crypter/FreetexthostCom.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class FreetexthostCom(SimpleCrypter):
+ __name = "FreetexthostCom"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?freetexthost\.com/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Freetexthost.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def getLinks(self):
+ m = re.search(r'<div id="contentsinner">\s*(.+)<div class="viewcount">', self.html, re.S)
+ if m is None:
+ self.error(_("Unable to extract links"))
+ links = m.group(1)
+ return links.strip().split("<br />\r\n")
diff --git a/pyload/plugin/crypter/FshareVn.py b/pyload/plugin/crypter/FshareVn.py
new file mode 100644
index 000000000..895a4d258
--- /dev/null
+++ b/pyload/plugin/crypter/FshareVn.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class FshareVn(SimpleCrypter):
+ __name = "FshareVn"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?fshare\.vn/folder/.*'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Fshare.vn folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ LINK_PATTERN = r'<li class="w_80pc"><a href="([^"]+)" target="_blank">'
diff --git a/pyload/plugin/crypter/Go4UpCom.py b/pyload/plugin/crypter/Go4UpCom.py
new file mode 100644
index 000000000..8652c3825
--- /dev/null
+++ b/pyload/plugin/crypter/Go4UpCom.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter, create_getInfo
+
+
+class Go4UpCom(SimpleCrypter):
+ __name = "Go4UpCom"
+ __type = "crypter"
+ __version = "0.11"
+
+ __pattern = r'http://go4up\.com/(dl/\w{12}|rd/\w{12}/\d+)'
+
+ __description = """Go4Up.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("rlindner81", "rlindner81@gmail.com"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ LINK_PATTERN = r'(http://go4up\.com/rd/.+?)<'
+
+ NAME_PATTERN = r'<title>Download (.+?)<'
+
+ OFFLINE_PATTERN = r'>\s*(404 Page Not Found|File not Found|Mirror does not exist)'
+
+
+ def getLinks(self
+ links = []
+
+ m = re.search(r'(/download/gethosts/.+?)"')
+ if m:
+ self.html = self.load(urljoin("http://go4up.com/", m.group(1)))
+ pages = [self.load(url) for url in re.findall(self.LINK_PATTERN, self.html)]
+ else:
+ pages = [self.html]
+
+ for html in pages:
+ try:
+ links.append(re.search(r'<b><a href="(.+?)"', html).group(1))
+ except Exception:
+ continue
+
+ return links
+
+
+getInfo = create_getInfo(Go4UpCom)
diff --git a/pyload/plugin/crypter/GooGl.py b/pyload/plugin/crypter/GooGl.py
new file mode 100644
index 000000000..d1c47935d
--- /dev/null
+++ b/pyload/plugin/crypter/GooGl.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Crypter import Crypter
+from pyload.utils import json_loads
+
+
+class GooGl(Crypter):
+ __name = "GooGl"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'https?://(?:www\.)?goo\.gl/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Goo.gl decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ API_URL = "https://www.googleapis.com/urlshortener/v1/url"
+
+
+ def decrypt(self, pyfile):
+ rep = self.load(self.API_URL, get={'shortUrl': pyfile.url})
+ self.logDebug("JSON data: " + rep)
+ rep = json_loads(rep)
+
+ if 'longUrl' in rep:
+ self.urls = [rep['longUrl']]
+ else:
+ self.fail(_("Unable to expand shortened link"))
diff --git a/pyload/plugin/crypter/HoerbuchIn.py b/pyload/plugin/crypter/HoerbuchIn.py
new file mode 100644
index 000000000..a809e756c
--- /dev/null
+++ b/pyload/plugin/crypter/HoerbuchIn.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from BeautifulSoup import BeautifulSoup, BeautifulStoneSoup
+
+from pyload.plugin.Crypter import Crypter
+
+
+class HoerbuchIn(Crypter):
+ __name = "HoerbuchIn"
+ __type = "crypter"
+ __version = "0.60"
+
+ __pattern = r'http://(?:www\.)?hoerbuch\.in/(wp/horbucher/\d+/.+/|tp/out\.php\?.+|protection/folder_\d+\.html)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Hoerbuch.in decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org"),
+ ("mkaay", "mkaay@mkaay.de")]
+
+
+ article = re.compile("http://(?:www\.)?hoerbuch\.in/wp/horbucher/\d+/.+/")
+ protection = re.compile("http://(?:www\.)?hoerbuch\.in/protection/folder_\d+.html")
+
+
+ def decrypt(self, pyfile):
+ self.pyfile = pyfile
+
+ if self.article.match(pyfile.url):
+ html = self.load(pyfile.url)
+ soup = BeautifulSoup(html, convertEntities=BeautifulStoneSoup.HTML_ENTITIES)
+
+ abookname = soup.find("a", attrs={"rel": "bookmark"}).text
+ for a in soup.findAll("a", attrs={"href": self.protection}):
+ package = "%s (%s)" % (abookname, a.previousSibling.previousSibling.text[:-1])
+ links = self.decryptFolder(a['href'])
+
+ self.packages.append((package, links, package))
+ else:
+ self.urls = self.decryptFolder(pyfile.url)
+
+
+ def decryptFolder(self, url):
+ m = self.protection.search(url)
+ if m is None:
+ self.fail(_("Bad URL"))
+ url = m.group(0)
+
+ self.pyfile.url = url
+ html = self.load(url, post={"viewed": "adpg"})
+
+ links = []
+ pattern = re.compile("http://www\.hoerbuch\.in/protection/(\w+)/(.*?)\"")
+ for hoster, lid in pattern.findall(html):
+ self.req.lastURL = url
+ self.load("http://www.hoerbuch.in/protection/%s/%s" % (hoster, lid))
+ links.append(self.req.lastEffectiveURL)
+
+ return links
diff --git a/pyload/plugin/crypter/HotfileCom.py b/pyload/plugin/crypter/HotfileCom.py
new file mode 100644
index 000000000..11e0008c5
--- /dev/null
+++ b/pyload/plugin/crypter/HotfileCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class HotfileCom(DeadCrypter):
+ __name = "HotfileCom"
+ __type = "crypter"
+ __version = "0.30"
+
+ __pattern = r'https?://(?:www\.)?hotfile\.com/list/\w+/\w+'
+ __config = []
+
+ __description = """Hotfile.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+getInfo = create_getInfo(HotfileCom)
diff --git a/pyload/plugin/crypter/ILoadTo.py b/pyload/plugin/crypter/ILoadTo.py
new file mode 100644
index 000000000..d012c9bfd
--- /dev/null
+++ b/pyload/plugin/crypter/ILoadTo.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class ILoadTo(DeadCrypter):
+ __name = "ILoadTo"
+ __type = "crypter"
+ __version = "0.11"
+
+ __pattern = r'http://(?:www\.)?iload\.to/go/\d+-[\w.-]+/'
+ __config = []
+
+ __description = """Iload.to decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("hzpz", "")]
+
+
+getInfo = create_getInfo(ILoadTo)
diff --git a/pyload/plugin/crypter/ImgurComAlbum.py b/pyload/plugin/crypter/ImgurComAlbum.py
new file mode 100644
index 000000000..e7df57cb6
--- /dev/null
+++ b/pyload/plugin/crypter/ImgurComAlbum.py
@@ -0,0 +1,27 @@
+import re
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+from pyload.utils import uniqify
+
+
+class ImgurComAlbum(SimpleCrypter):
+ __name = "ImgurComAlbum"
+ __type = "crypter"
+ __version = "0.51"
+
+ __pattern = r'https?://(?:www\.|m\.)?imgur\.com/(a|gallery|)/?\w{5,7}'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Imgur.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("nath_schwarz", "nathan.notwhite@gmail.com")]
+
+
+ NAME_PATTERN = r'(?P<N>.+?) - Imgur'
+ LINK_PATTERN = r'i\.imgur\.com/\w{7}s?\.(?:jpeg|jpg|png|gif|apng)'
+
+
+ def getLinks(self):
+ f = lambda url: "http://" + re.sub(r'(\w{7})s\.', r'\1.', url)
+ return uniqify(map(f, re.findall(self.LINK_PATTERN, self.html)))
diff --git a/pyload/plugin/crypter/JunocloudMe.py b/pyload/plugin/crypter/JunocloudMe.py
new file mode 100644
index 000000000..b47a5fc58
--- /dev/null
+++ b/pyload/plugin/crypter/JunocloudMe.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSCrypter import XFSCrypter
+
+
+class JunocloudMe(XFSCrypter):
+ __name = "JunocloudMe"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?junocloud\.me/folders/(?P<ID>\d+/\w+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Junocloud.me folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "junocloud.me"
diff --git a/pyload/plugin/crypter/LetitbitNet.py b/pyload/plugin/crypter/LetitbitNet.py
new file mode 100644
index 000000000..ca787e31a
--- /dev/null
+++ b/pyload/plugin/crypter/LetitbitNet.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+
+
+class LetitbitNet(Crypter):
+ __name = "LetitbitNet"
+ __type = "crypter"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?letitbit\.net/folder/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Letitbit.net folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("DHMH", "webmaster@pcProfil.de"),
+ ("z00nx", "z00nx0@gmail.com")]
+
+
+ FOLDER_PATTERN = r'<table>(.*)</table>'
+ LINK_PATTERN = r'<a href="([^"]+)" target="_blank">'
+
+
+ def decrypt(self, pyfile):
+ html = self.load(pyfile.url)
+
+ folder = re.search(self.FOLDER_PATTERN, html, re.S)
+ if folder is None:
+ self.error(_("FOLDER_PATTERN not found"))
+
+ self.urls.extend(re.findall(self.LINK_PATTERN, folder.group(0)))
diff --git a/pyload/plugin/crypter/LinkCryptWs.py b/pyload/plugin/crypter/LinkCryptWs.py
new file mode 100644
index 000000000..2bd7b4c17
--- /dev/null
+++ b/pyload/plugin/crypter/LinkCryptWs.py
@@ -0,0 +1,327 @@
+# -*- coding: utf-8 -*-
+
+import base64
+import binascii
+import re
+
+import pycurl
+
+from Crypto.Cipher import AES
+
+from pyload.plugin.Crypter import Crypter
+from pyload.utils import html_unescape
+
+
+class LinkCryptWs(Crypter):
+ __name = "LinkCryptWs"
+ __type = "crypter"
+ __version = "0.07"
+
+ __pattern = r'http://(?:www\.)?linkcrypt\.ws/(dir|container)/(?P<ID>\w+)'
+
+ __description = """LinkCrypt.ws decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("kagenoshin", "kagenoshin[AT]gmx[DOT]ch"),
+ ("glukgluk", ""),
+ ("Gummibaer", "")]
+
+
+ CRYPTED_KEY = "crypted"
+ JK_KEY = "jk"
+
+
+ def setup(self):
+ self.captcha = False
+ self.links = []
+ self.sources = ['cnl', 'web', 'dlc', 'rsdf', 'ccf']
+
+
+ def prepare(self):
+ # Init
+ self.fileid = re.match(self.__pattern, self.pyfile.url).group('ID')
+
+ self.req.cj.setCookie("linkcrypt.ws", "language", "en")
+
+ # Request package
+ self.req.http.c.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko") #: better chance to not get those key-captchas
+ self.html = self.load(self.pyfile.url)
+
+
+ def decrypt(self, pyfile):
+ if not self.js:
+ self.fail(_("Missing JS Engine"))
+
+ self.prepare()
+
+ if not self.isOnline():
+ self.offline()
+
+ if self.isKeyCaptchaProtected():
+ self.retry(4, 30, _("Can't handle Key-Captcha"))
+
+ if self.isCaptchaProtected():
+ self.captcha = True
+ self.unlockCaptchaProtection()
+ self.handleCaptchaErrors()
+
+ # Check for protection
+ if self.isPasswordProtected():
+ self.unlockPasswordProtection()
+ self.handleErrors()
+
+ # get unrar password
+ self.getunrarpw()
+
+ # Get package name and folder
+ package_name, folder_name = self.getPackageInfo()
+
+ #get the container definitions from script section
+ self.get_container_html()
+
+ # Extract package links
+ for type in self.sources:
+ links = self.handleLinkSource(type)
+
+ if links:
+ self.links.extend(links)
+ break
+
+ if self.links:
+ self.packages = [(package_name, self.links, folder_name)]
+
+
+ def isOnline(self):
+ if "<title>Linkcrypt.ws // Error 404</title>" in self.html:
+ self.logDebug("folder doesen't exist anymore")
+ return False
+ else:
+ return True
+
+
+ def isPasswordProtected(self):
+ if "Authorizing" in self.html:
+ self.logDebug("Links are password protected")
+ return True
+ else:
+ return False
+
+
+ def isCaptchaProtected(self):
+ if 'id="captcha">' in self.html:
+ self.logDebug("Links are captcha protected")
+ return True
+ else:
+ return False
+
+
+ def isKeyCaptchaProtected(self):
+ if re.search(r'Key[ -]', self.html, re.I):
+ return True
+ else:
+ return False
+
+
+ def unlockPasswordProtection(self):
+ password = self.getPassword()
+
+ if password:
+ self.logDebug("Submitting password [%s] for protected links" % password)
+ self.html = self.load(self.pyfile.url, post={"password": password, 'x': "0", 'y': "0"})
+ else:
+ self.fail(_("Folder is password protected"))
+
+
+ def unlockCaptchaProtection(self):
+ captcha_url = re.search(r'<form.*?id\s*?=\s*?"captcha"[^>]*?>.*?<\s*?input.*?src="([^"]*?)"', self.html, re.I | re.S).group(1)
+ captcha_code = self.decryptCaptcha(captcha_url, forceUser=True, imgtype="gif", result_type='positional')
+
+ self.html = self.load(self.pyfile.url, post={"x": captcha_code[0], "y": captcha_code[1]})
+
+
+ def getPackageInfo(self):
+ name = self.pyfile.package().name
+ folder = self.pyfile.package().folder
+
+ self.logDebug("Defaulting to pyfile name [%s] and folder [%s] for package" % (name, folder))
+
+ return name, folder
+
+
+ def getunrarpw(self):
+ sitein = self.html
+ indexi = sitein.find("|source|") + 8
+ indexe = sitein.find("|",indexi)
+
+ unrarpw = sitein[indexi:indexe]
+
+ if not (unrarpw == "Password" or "Dateipasswort") :
+ self.logDebug("File password set to: [%s]"% unrarpw)
+ self.pyfile.package().password = unrarpw
+
+
+ def handleErrors(self):
+ if self.isPasswordProtected():
+ self.fail(_("Incorrect password"))
+
+
+ def handleCaptchaErrors(self):
+ if self.captcha:
+ if "Your choice was wrong!" in self.html:
+ self.invalidCaptcha()
+ self.retry()
+ else:
+ self.correctCaptcha()
+
+
+ def handleLinkSource(self, type):
+ if type == 'cnl':
+ return self.handleCNL2()
+
+ elif type == 'web':
+ return self.handleWebLinks()
+
+ elif type in ('rsdf', 'ccf', 'dlc'):
+ return self.handleContainer(type)
+
+ else:
+ self.fail(_("Unknown source type: %s") % type) #@TODO: Replace with self.error in 0.4.10
+
+
+ def handleWebLinks(self):
+ self.logDebug("Search for Web links ")
+
+ package_links = []
+ pattern = r'<form action="http://linkcrypt.ws/out.html"[^>]*?>.*?<input[^>]*?value="([^"]*?)"[^>]*?name="file"'
+ ids = re.findall(pattern, self.html, re.I | re.S)
+
+ self.logDebug("Decrypting %d Web links" % len(ids))
+
+ for idx, weblink_id in enumerate(ids):
+ try:
+ self.logDebug("Decrypting Web link %d, %s" % (idx + 1, weblink_id))
+
+ res = self.load("http://linkcrypt.ws/out.html", post = {'file':weblink_id})
+
+ indexs = res.find("window.location =") + 19
+ indexe = res.find('"', indexs)
+
+ link2 = res[indexs:indexe]
+
+ self.logDebug(link2)
+
+ link2 = html_unescape(link2)
+ package_links.append(link2)
+
+ except Exception, detail:
+ self.logDebug("Error decrypting Web link %s, %s" % (weblink_id, detail))
+
+ return package_links
+
+
+ def get_container_html(self):
+ self.container_html = []
+
+ script = re.search(r'<div.*?id="ad_cont".*?<script.*?javascrip[^>]*?>(.*?)</script', self.html, re.I | re.S)
+
+ if script:
+ container_html_text = script.group(1)
+ container_html_text.strip()
+ self.container_html = container_html_text.splitlines()
+
+
+ def handle_javascript(self, line):
+ return self.js.eval(line.replace('{}))',"{}).replace('document.open();document.write','').replace(';document.close();',''))"))
+
+
+ def handleContainer(self, type):
+ package_links = []
+ type = type.lower()
+
+ self.logDebug('Search for %s Container links' % type.upper())
+
+ if not type.isalnum(): # check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric)
+ self.fail(_("Unknown container type: %s") % type) #@TODO: Replace with self.error in 0.4.10
+
+ for line in self.container_html:
+ if type in line:
+ jseval = self.handle_javascript(line)
+ clink = re.search(r'href=["\']([^"\']*?)["\']',jseval,re.I)
+
+ if not clink:
+ continue
+
+ self.logDebug("clink avaible")
+
+ package_name, folder_name = self.getPackageInfo()
+ self.logDebug("Added package with name %s.%s and container link %s" %( package_name, type, clink.group(1)))
+ self.core.api.uploadContainer( "%s.%s" %(package_name, type), self.load(clink.group(1)))
+ return "Found it"
+
+ return package_links
+
+
+ def handleCNL2(self):
+ self.logDebug("Search for CNL links")
+
+ package_links = []
+ cnl_line = None
+
+ for line in self.container_html:
+ if "cnl" in line:
+ cnl_line = line
+ break
+
+ if cnl_line:
+ self.logDebug("cnl_line gefunden")
+
+ try:
+ cnl_section = self.handle_javascript(cnl_line)
+ (vcrypted, vjk) = self._getCipherParams(cnl_section)
+ for (crypted, jk) in zip(vcrypted, vjk):
+ package_links.extend(self._getLinks(crypted, jk))
+ except Exception:
+ self.logError(_("Unable to decrypt CNL links (JS Error) try to get over links"))
+ return self.handleWebLinks()
+
+ return package_links
+
+
+ def _getCipherParams(self, cnl_section):
+ # Get jk
+ jk_re = r'<INPUT.*?NAME="%s".*?VALUE="(.*?)"' % LinkCryptWs.JK_KEY
+ vjk = re.findall(jk_re, cnl_section)
+
+ # Get crypted
+ crypted_re = r'<INPUT.*?NAME="%s".*?VALUE="(.*?)"' % LinkCryptWs.CRYPTED_KEY
+ vcrypted = re.findall(crypted_re, cnl_section)
+
+ # Log and return
+ self.logDebug("Detected %d crypted blocks" % len(vcrypted))
+ return vcrypted, vjk
+
+
+ def _getLinks(self, crypted, jk):
+ # Get key
+ jreturn = self.js.eval("%s f()" % jk)
+ key = binascii.unhexlify(jreturn)
+
+ self.logDebug("JsEngine returns value [%s]" % jreturn)
+
+ # Decode crypted
+ crypted = base64.standard_b64decode(crypted)
+
+ # Decrypt
+ Key = key
+ IV = key
+ obj = AES.new(Key, AES.MODE_CBC, IV)
+ text = obj.decrypt(crypted)
+
+ # Extract links
+ text = text.replace("\x00", "").replace("\r", "")
+ links = text.split("\n")
+ links = filter(lambda x: x != "", links)
+
+ # Log and return
+ self.logDebug("Package has %d links" % len(links))
+
+ return links
diff --git a/pyload/plugin/crypter/LinkSaveIn.py b/pyload/plugin/crypter/LinkSaveIn.py
new file mode 100644
index 000000000..3d5a989eb
--- /dev/null
+++ b/pyload/plugin/crypter/LinkSaveIn.py
@@ -0,0 +1,246 @@
+# -*- coding: utf-8 -*-
+#
+# * cnl2 and web links are skipped if JS is not available (instead of failing the package)
+# * only best available link source is used (priority: cnl2>rsdf>ccf>dlc>web
+
+import base64
+import binascii
+import re
+
+from Crypto.Cipher import AES
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+from pyload.utils import html_unescape
+
+
+class LinkSaveIn(SimpleCrypter):
+ __name = "LinkSaveIn"
+ __type = "crypter"
+ __version = "2.02"
+
+ __pattern = r'http://(?:www\.)?linksave\.in/(?P<id>\w+)$'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """LinkSave.in decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es")]
+
+
+ COOKIES = [("linksave.in", "Linksave_Language", "english")]
+
+ # Constants
+ _JK_KEY_ = "jk"
+ _CRYPTED_KEY_ = "crypted"
+
+
+ def setup(self):
+ self.fileid = None
+ self.captcha = False
+ self.package = None
+ self.preferred_sources = ["cnl2", "rsdf", "ccf", "dlc", "web"]
+
+
+ def decrypt(self, pyfile):
+ # Init
+ self.package = pyfile.package()
+ self.fileid = re.match(self.__pattern, pyfile.url).group('id')
+
+ # Request package
+ self.html = self.load(pyfile.url)
+ if not self.isOnline():
+ self.offline()
+
+ # Check for protection
+ if self.isPasswordProtected():
+ self.unlockPasswordProtection()
+ self.handleErrors()
+
+ if self.isCaptchaProtected():
+ self.captcha = True
+ self.unlockCaptchaProtection()
+ self.handleErrors()
+
+ # Get package name and folder
+ (package_name, folder_name) = self.getPackageInfo()
+
+ # Extract package links
+ package_links = []
+ for type_ in self.preferred_sources:
+ package_links.extend(self.handleLinkSource(type_))
+ if package_links: # use only first source which provides links
+ break
+ package_links = set(package_links)
+
+ # Pack
+ if package_links:
+ self.packages = [(package_name, package_links, folder_name)]
+
+
+ def isOnline(self):
+ if "<big>Error 404 - Folder not found!</big>" in self.html:
+ self.logDebug("File not found")
+ return False
+ return True
+
+
+ def isPasswordProtected(self):
+ if re.search(r'''<input.*?type="password"''', self.html):
+ self.logDebug("Links are password protected")
+ return True
+
+
+ def isCaptchaProtected(self):
+ if "<b>Captcha:</b>" in self.html:
+ self.logDebug("Links are captcha protected")
+ return True
+ return False
+
+
+ def unlockPasswordProtection(self):
+ password = self.getPassword()
+ self.logDebug("Submitting password [%s] for protected links" % password)
+ post = {"id": self.fileid, "besucherpasswort": password, 'login': 'submit'}
+ self.html = self.load(self.pyfile.url, post=post)
+
+
+ def unlockCaptchaProtection(self):
+ captcha_hash = re.search(r'name="hash" value="([^"]+)', self.html).group(1)
+ captcha_url = re.search(r'src=".(/captcha/cap.php\?hsh=[^"]+)', self.html).group(1)
+ captcha_code = self.decryptCaptcha("http://linksave.in" + captcha_url, forceUser=True)
+ self.html = self.load(self.pyfile.url, post={"id": self.fileid, "hash": captcha_hash, "code": captcha_code})
+
+
+ def getPackageInfo(self):
+ name = self.pyfile.package().name
+ folder = self.pyfile.package().folder
+ self.logDebug("Defaulting to pyfile name [%s] and folder [%s] for package" % (name, folder))
+ return name, folder
+
+
+ def handleErrors(self):
+ if "The visitorpassword you have entered is wrong" in self.html:
+ self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry")
+ self.fail(_("Incorrect password, please set right password on 'Edit package' form and retry"))
+
+ if self.captcha:
+ if "Wrong code. Please retry" in self.html:
+ self.invalidCaptcha()
+ self.retry()
+ else:
+ self.correctCaptcha()
+
+
+ def handleLinkSource(self, type_):
+ if type_ == "cnl2":
+ return self.handleCNL2()
+ elif type_ in ("rsdf", "ccf", "dlc"):
+ return self.handleContainer(type_)
+ elif type_ == "web":
+ return self.handleWebLinks()
+ else:
+ self.error('Unknown source type "%s" (this is probably a bug)' % type_)
+
+
+ def handleWebLinks(self):
+ package_links = []
+ self.logDebug("Search for Web links")
+ if not self.js:
+ self.logDebug("No JS -> skip Web links")
+ else:
+ #@TODO: Gather paginated web links
+ pattern = r'<a href="http://linksave\.in/(\w{43})"'
+ ids = re.findall(pattern, self.html)
+ self.logDebug("Decrypting %d Web links" % len(ids))
+ for i, weblink_id in enumerate(ids):
+ try:
+ webLink = "http://linksave.in/%s" % weblink_id
+
+ self.logDebug("Decrypting Web link %d, %s" % (i + 1, webLink))
+
+ fwLink = "http://linksave.in/fw-%s" % weblink_id
+ res = self.load(fwLink)
+
+ jscode = re.findall(r'<script type="text/javascript">(.*)</script>', res)[-1]
+ jseval = self.js.eval("document = { write: function(e) { return e; } }; %s" % jscode)
+ dlLink = re.search(r'http://linksave\.in/dl-\w+', jseval).group(0)
+ self.logDebug("JsEngine returns value [%s] for redirection link" % dlLink)
+
+ res = self.load(dlLink)
+ link = html_unescape(re.search(r'<iframe src="(.+?)"', res).group(1))
+
+ package_links.append(link)
+
+ except Exception, detail:
+ self.logDebug("Error decrypting Web link %s, %s" % (webLink, detail))
+
+ return package_links
+
+
+ def handleContainer(self, type_):
+ package_links = []
+ type_ = type_.lower()
+ self.logDebug("Seach for %s Container links" % type_.upper())
+ if not type_.isalnum(): # check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric)
+ self.error('Unknown container type "%s" (this is probably a bug)' % type_)
+ pattern = r'\(\'%s_link\'\).href=unescape\(\'(.*?\.%s)\'\)' % (type_, type_)
+ containersLinks = re.findall(pattern, self.html)
+ self.logDebug("Found %d %s Container links" % (len(containersLinks), type_.upper()))
+ for containerLink in containersLinks:
+ link = "http://linksave.in/%s" % html_unescape(containerLink)
+ package_links.append(link)
+ return package_links
+
+
+ def handleCNL2(self):
+ package_links = []
+ self.logDebug("Search for CNL2 links")
+ if not self.js:
+ self.logDebug("No JS -> skip CNL2 links")
+ elif 'cnl2_load' in self.html:
+ try:
+ (vcrypted, vjk) = self._getCipherParams()
+ for (crypted, jk) in zip(vcrypted, vjk):
+ package_links.extend(self._getLinks(crypted, jk))
+ except Exception:
+ self.fail(_("Unable to decrypt CNL2 links"))
+ return package_links
+
+
+ def _getCipherParams(self):
+ # Get jk
+ jk_re = r'<INPUT.*?NAME="%s".*?VALUE="(.*?)"' % LinkSaveIn._JK_KEY_
+ vjk = re.findall(jk_re, self.html)
+
+ # Get crypted
+ crypted_re = r'<INPUT.*?NAME="%s".*?VALUE="(.*?)"' % LinkSaveIn._CRYPTED_KEY_
+ vcrypted = re.findall(crypted_re, self.html)
+
+ # Log and return
+ self.logDebug("Detected %d crypted blocks" % len(vcrypted))
+ return vcrypted, vjk
+
+
+ def _getLinks(self, crypted, jk):
+ # Get key
+ jreturn = self.js.eval("%s f()" % jk)
+ self.logDebug("JsEngine returns value [%s]" % jreturn)
+ key = binascii.unhexlify(jreturn)
+
+ # Decode crypted
+ crypted = base64.standard_b64decode(crypted)
+
+ # Decrypt
+ Key = key
+ IV = key
+ obj = AES.new(Key, AES.MODE_CBC, IV)
+ text = obj.decrypt(crypted)
+
+ # Extract links
+ text = text.replace("\x00", "").replace("\r", "")
+ links = text.split("\n")
+ links = filter(lambda x: x != "", links)
+
+ # Log and return
+ self.logDebug("Package has %d links" % len(links))
+ return links
diff --git a/pyload/plugin/crypter/LinkdecrypterCom.py b/pyload/plugin/crypter/LinkdecrypterCom.py
new file mode 100644
index 000000000..349e8b2d3
--- /dev/null
+++ b/pyload/plugin/crypter/LinkdecrypterCom.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+
+
+class LinkdecrypterCom(Crypter):
+ __name = "LinkdecrypterCom"
+ __type = "crypter"
+ __version = "0.27"
+
+ __pattern = r'^unmatchable$'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Linkdecrypter.com"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("flowlee", "")]
+
+
+ TEXTAREA_PATTERN = r'<textarea name="links" wrap="off" readonly="1" class="caja_des">(.+)</textarea>'
+ PASSWORD_PATTERN = r'<input type="text" name="password"'
+ CAPTCHA_PATTERN = r'<img class="captcha" src="(.+?)"(.*?)>'
+ REDIR_PATTERN = r'<i>(Click <a href="./">here</a> if your browser does not redirect you).</i>'
+
+
+ def decrypt(self, pyfile):
+ self.passwords = self.getPassword().splitlines()
+
+ # API not working anymore
+ self.urls = self.decryptHTML()
+
+
+ def decryptAPI(self):
+ get_dict = {"t": "link", "url": self.pyfile.url, "lcache": "1"}
+ self.html = self.load('http://linkdecrypter.com/api', get=get_dict)
+ if self.html.startswith('http://'):
+ return self.html.splitlines()
+
+ if self.html == 'INTERRUPTION(PASSWORD)':
+ for get_dict['pass'] in self.passwords:
+ self.html = self.load('http://linkdecrypter.com/api', get=get_dict)
+ if self.html.startswith('http://'):
+ return self.html.splitlines()
+
+ self.logError("API", self.html)
+ if self.html == 'INTERRUPTION(PASSWORD)':
+ self.fail(_("No or incorrect password"))
+
+ return None
+
+
+ def decryptHTML(self):
+ retries = 5
+
+ post_dict = {"link_cache": "on", "pro_links": self.pyfile.url, "modo_links": "text"}
+ self.html = self.load('http://linkdecrypter.com/', post=post_dict, cookies=True, decode=True)
+
+ while self.passwords or retries:
+ m = re.search(self.TEXTAREA_PATTERN, self.html, flags=re.S)
+ if m:
+ return [x for x in m.group(1).splitlines() if '[LINK-ERROR]' not in x]
+
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m:
+ captcha_url = 'http://linkdecrypter.com/' + m.group(1)
+ result_type = "positional" if "getPos" in m.group(2) else "textual"
+
+ m = re.search(r"<p><i><b>([^<]+)</b></i></p>", self.html)
+ msg = m.group(1) if m else ""
+ self.logInfo(_("Captcha protected link"), result_type, msg)
+
+ captcha = self.decryptCaptcha(captcha_url, result_type=result_type)
+ if result_type == "positional":
+ captcha = "%d|%d" % captcha
+ self.html = self.load('http://linkdecrypter.com/', post={"captcha": captcha}, decode=True)
+ retries -= 1
+
+ elif self.PASSWORD_PATTERN in self.html:
+ if self.passwords:
+ password = self.passwords.pop(0)
+ self.logInfo(_("Password protected link, trying ") + password)
+ self.html = self.load('http://linkdecrypter.com/', post={'password': password}, decode=True)
+ else:
+ self.fail(_("No or incorrect password"))
+
+ else:
+ retries -= 1
+ self.html = self.load('http://linkdecrypter.com/', cookies=True, decode=True)
+
+ return None
diff --git a/pyload/plugin/crypter/LixIn.py b/pyload/plugin/crypter/LixIn.py
new file mode 100644
index 000000000..f992d38fd
--- /dev/null
+++ b/pyload/plugin/crypter/LixIn.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Crypter import Crypter
+
+
+class LixIn(Crypter):
+ __name = "LixIn"
+ __type = "crypter"
+ __version = "0.22"
+
+ __pattern = r'http://(?:www\.)?lix\.in/(?P<ID>.+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Lix.in decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org")]
+
+
+ CAPTCHA_PATTERN = r'<img src="(?P<image>captcha_img\.php\?.*?)"'
+ SUBMIT_PATTERN = r'value=\'continue.*?\''
+ LINK_PATTERN = r'name="ifram" src="(?P<link>.*?)"'
+
+
+ def decrypt(self, pyfile):
+ url = pyfile.url
+
+ m = re.match(self.__pattern, url)
+ if m is None:
+ self.error(_("Unable to identify file ID"))
+
+ id = m.group("ID")
+ self.logDebug("File id is %s" % id)
+
+ self.html = self.load(url, decode=True)
+
+ m = re.search(self.SUBMIT_PATTERN, self.html)
+ if m is None:
+ self.error(_("Link doesn't seem valid"))
+
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m:
+ for _i in xrange(5):
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m:
+ self.logDebug("Trying captcha")
+ captcharesult = self.decryptCaptcha("http://lix.in/" + m.group("image"))
+ self.html = self.load(url, decode=True,
+ post={"capt": captcharesult, "submit": "submit", "tiny": id})
+ else:
+ self.logDebug("No captcha/captcha solved")
+ else:
+ self.html = self.load(url, decode=True, post={"submit": "submit", "tiny": id})
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Unable to find destination url"))
+ else:
+ self.urls = [m.group("link")]
+ self.logDebug("Found link %s, adding to package" % self.urls[0])
diff --git a/pyload/plugin/crypter/LofCc.py b/pyload/plugin/crypter/LofCc.py
new file mode 100644
index 000000000..f1f4d1680
--- /dev/null
+++ b/pyload/plugin/crypter/LofCc.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class LofCc(DeadCrypter):
+ __name = "LofCc"
+ __type = "crypter"
+ __version = "0.21"
+
+ __pattern = r'http://(?:www\.)?lof\.cc/(.*)'
+ __config = []
+
+ __description = """Lof.cc decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+getInfo = create_getInfo(LofCc)
diff --git a/pyload/plugin/crypter/MBLinkInfo.py b/pyload/plugin/crypter/MBLinkInfo.py
new file mode 100644
index 000000000..64eb57167
--- /dev/null
+++ b/pyload/plugin/crypter/MBLinkInfo.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class MBLinkInfo(DeadCrypter):
+ __name = "MBLinkInfo"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?mblink\.info/?\?id=(\d+)'
+ __config = []
+
+ __description = """MBLink.info decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Gummibaer", "Gummibaer@wiki-bierkiste.de"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(MBLinkInfo)
diff --git a/pyload/plugin/crypter/MediafireCom.py b/pyload/plugin/crypter/MediafireCom.py
new file mode 100644
index 000000000..036c41233
--- /dev/null
+++ b/pyload/plugin/crypter/MediafireCom.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+from pyload.plugin.hoster.MediafireCom import checkHTMLHeader
+from pyload.utils import json_loads
+
+
+class MediafireCom(Crypter):
+ __name = "MediafireCom"
+ __type = "crypter"
+ __version = "0.14"
+
+ __pattern = r'http://(?:www\.)?mediafire\.com/(folder/|\?sharekey=|\?\w{13}($|[/#]))'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Mediafire.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ FOLDER_KEY_PATTERN = r'var afI= \'(\w+)'
+ LINK_PATTERN = r'<meta property="og:url" content="http://www\.mediafire\.com/\?(\w+)"/>'
+
+
+ def decrypt(self, pyfile):
+ url, result = checkHTMLHeader(pyfile.url)
+ self.logDebug("Location (%d): %s" % (result, url))
+
+ if result == 0:
+ # load and parse html
+ html = self.load(pyfile.url)
+ m = re.search(self.LINK_PATTERN, html)
+ if m:
+ # file page
+ self.urls.append("http://www.mediafire.com/file/%s" % m.group(1))
+ else:
+ # folder page
+ m = re.search(self.FOLDER_KEY_PATTERN, html)
+ if m:
+ folder_key = m.group(1)
+ self.logDebug("FOLDER KEY: %s" % folder_key)
+
+ json_resp = json_loads(self.load("http://www.mediafire.com/api/folder/get_info.php",
+ get={'folder_key' : folder_key,
+ 'response_format': "json",
+ 'version' : 1}))
+ #self.logInfo(json_resp)
+ if json_resp['response']['result'] == "Success":
+ for link in json_resp['response']['folder_info']['files']:
+ self.urls.append("http://www.mediafire.com/file/%s" % link['quickkey'])
+ else:
+ self.fail(json_resp['response']['message'])
+ elif result == 1:
+ self.offline()
+ else:
+ self.urls.append(url)
diff --git a/pyload/plugin/crypter/MegaRapidCz.py b/pyload/plugin/crypter/MegaRapidCz.py
new file mode 100644
index 000000000..2cedfec09
--- /dev/null
+++ b/pyload/plugin/crypter/MegaRapidCz.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class MegaRapidCz(SimpleCrypter):
+ __name = "MegaRapidCz"
+ __type = "crypter"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?(share|mega)rapid\.cz/slozka/\d+/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Share-Rapid.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ LINK_PATTERN = r'<td class="soubor"[^>]*><a href="([^"]+)">'
diff --git a/pyload/plugin/crypter/MegauploadCom.py b/pyload/plugin/crypter/MegauploadCom.py
new file mode 100644
index 000000000..5a4b2349e
--- /dev/null
+++ b/pyload/plugin/crypter/MegauploadCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class MegauploadCom(DeadCrypter):
+ __name = "MegauploadCom"
+ __type = "crypter"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?megaupload\.com/(\?f|xml/folderfiles\.php\?.*&?folderid)=\w+'
+
+ __description = """Megaupload.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(MegauploadCom)
diff --git a/pyload/plugin/crypter/Movie2kTo.py b/pyload/plugin/crypter/Movie2kTo.py
new file mode 100644
index 000000000..b481ead73
--- /dev/null
+++ b/pyload/plugin/crypter/Movie2kTo.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class Movie2kTo(DeadCrypter):
+ __name = "Movie2kTo"
+ __type = "crypter"
+ __version = "0.51"
+
+ __pattern = r'http://(?:www\.)?movie2k\.to/(.*)\.html'
+ __config = []
+
+ __description = """Movie2k.to decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("4Christopher", "4Christopher@gmx.de")]
+
+
+getInfo = create_getInfo(Movie2kTo)
diff --git a/pyload/plugin/crypter/MultiUpOrg.py b/pyload/plugin/crypter/MultiUpOrg.py
new file mode 100644
index 000000000..1b3116ad5
--- /dev/null
+++ b/pyload/plugin/crypter/MultiUpOrg.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+
+import re
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class MultiUpOrg(SimpleCrypter):
+ __name = "MultiUpOrg"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?multiup\.org/(en|fr)/(?P<TYPE>project|download|miror)/\w+(/\w+)?'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """MultiUp.org decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'<title>.*(?:Project|Projet|ownload|élécharger) (?P<N>.+?) (\(|- )'
+
+
+ def getLinks(self):
+ m_type = re.match(self.__pattern, self.pyfile.url).group("TYPE")
+
+ if m_type == "project":
+ pattern = r'\n(http://www\.multiup\.org/(?:en|fr)/download/.*)'
+ else:
+ pattern = r'style="width:97%;text-align:left".*\n.*href="(.*)"'
+ if m_type == "download":
+ dl_pattern = r'href="(.*)">.*\n.*<h5>DOWNLOAD</h5>'
+ miror_page = urljoin("http://www.multiup.org", re.search(dl_pattern, self.html).group(1))
+ self.html = self.load(miror_page)
+
+ return re.findall(pattern, self.html)
diff --git a/pyload/plugin/crypter/MultiloadCz.py b/pyload/plugin/crypter/MultiloadCz.py
new file mode 100644
index 000000000..a94c53d42
--- /dev/null
+++ b/pyload/plugin/crypter/MultiloadCz.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+
+
+class MultiloadCz(Crypter):
+ __name = "MultiloadCz"
+ __type = "crypter"
+ __version = "0.40"
+
+ __pattern = r'http://(?:[^/]*\.)?multiload\.cz/(stahnout|slozka)/.*'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True),
+ ("usedHoster", "str", "Prefered hoster list (bar-separated)", ""),
+ ("ignoredHoster", "str", "Ignored hoster list (bar-separated)", "")]
+
+ __description = """Multiload.cz decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ FOLDER_PATTERN = r'<form action="" method="get"><textarea[^>]*>([^>]*)</textarea></form>'
+ LINK_PATTERN = r'<p class="manager-server"><strong>([^<]+)</strong></p><p class="manager-linky"><a href="([^"]+)">'
+
+
+ def decrypt(self, pyfile):
+ self.html = self.load(pyfile.url, decode=True)
+
+ if re.match(self.__pattern, pyfile.url).group(1) == "slozka":
+ m = re.search(self.FOLDER_PATTERN, self.html)
+ if m:
+ self.urls.extend(m.group(1).split())
+ else:
+ m = re.findall(self.LINK_PATTERN, self.html)
+ if m:
+ prefered_set = set(self.getConfig("usedHoster").split('|'))
+ self.urls.extend([x[1] for x in m if x[0] in prefered_set])
+
+ if not self.urls:
+ ignored_set = set(self.getConfig("ignoredHoster").split('|'))
+ self.urls.extend([x[1] for x in m if x[0] not in ignored_set])
diff --git a/pyload/plugin/crypter/MultiuploadCom.py b/pyload/plugin/crypter/MultiuploadCom.py
new file mode 100644
index 000000000..44c449d72
--- /dev/null
+++ b/pyload/plugin/crypter/MultiuploadCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class MultiuploadCom(DeadCrypter):
+ __name = "MultiuploadCom"
+ __type = "crypter"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?multiupload\.(com|nl)/\w+'
+
+ __description = """MultiUpload.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(MultiuploadCom)
diff --git a/pyload/plugin/crypter/NCryptIn.py b/pyload/plugin/crypter/NCryptIn.py
new file mode 100644
index 000000000..22d8020f2
--- /dev/null
+++ b/pyload/plugin/crypter/NCryptIn.py
@@ -0,0 +1,315 @@
+# -*- coding: utf-8 -*-
+
+import base64
+import binascii
+import re
+
+from Crypto.Cipher import AES
+
+from pyload.plugin.Crypter import Crypter
+from pyload.plugin.internal.captcha import ReCaptcha
+
+
+class NCryptIn(Crypter):
+ __name = "NCryptIn"
+ __type = "crypter"
+ __version = "1.33"
+
+ __pattern = r'http://(?:www\.)?ncrypt\.in/(?P<type>folder|link|frame)-([^/\?]+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """NCrypt.in decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ JK_KEY = "jk"
+ CRYPTED_KEY = "crypted"
+
+ NAME_PATTERN = r'<meta name="description" content="(?P<N>[^"]+)"'
+
+
+ def setup(self):
+ self.package = None
+ self.cleanedHtml = None
+ self.links_source_order = ["cnl2", "rsdf", "ccf", "dlc", "web"]
+ self.protection_type = None
+
+
+ def decrypt(self, pyfile):
+ # Init
+ self.package = pyfile.package()
+ package_links = []
+ package_name = self.package.name
+ folder_name = self.package.folder
+
+ # Deal with single links
+ if self.isSingleLink():
+ package_links.extend(self.handleSingleLink())
+
+ # Deal with folders
+ else:
+
+ # Request folder home
+ self.html = self.requestFolderHome()
+ self.cleanedHtml = self.removeHtmlCrap(self.html)
+ if not self.isOnline():
+ self.offline()
+
+ # Check for folder protection
+ if self.isProtected():
+ self.html = self.unlockProtection()
+ self.cleanedHtml = self.removeHtmlCrap(self.html)
+ self.handleErrors()
+
+ # Prepare package name and folder
+ (package_name, folder_name) = self.getPackageInfo()
+
+ # Extract package links
+ for link_source_type in self.links_source_order:
+ package_links.extend(self.handleLinkSource(link_source_type))
+ if package_links: # use only first source which provides links
+ break
+ package_links = set(package_links)
+
+ # Pack and return links
+ if package_links:
+ self.packages = [(package_name, package_links, folder_name)]
+
+
+ def isSingleLink(self):
+ link_type = re.match(self.__pattern, self.pyfile.url).group('type')
+ return link_type in ("link", "frame")
+
+
+ def requestFolderHome(self):
+ return self.load(self.pyfile.url, decode=True)
+
+
+ def removeHtmlCrap(self, content):
+ patterns = (r'(type="hidden".*?(name=".*?")?.*?value=".*?")',
+ r'display:none;">(.*?)</(div|span)>',
+ r'<div\s+class="jdownloader"(.*?)</div>',
+ r'<table class="global">(.*?)</table>',
+ r'<iframe\s+style="display:none(.*?)</iframe>')
+ for pattern in patterns:
+ rexpr = re.compile(pattern, re.S)
+ content = re.sub(rexpr, "", content)
+ return content
+
+
+ def isOnline(self):
+ if "Your folder does not exist" in self.cleanedHtml:
+ self.logDebug("File not m")
+ return False
+ return True
+
+
+ def isProtected(self):
+ form = re.search(r'<form.*?name.*?protected.*?>(.*?)</form>', self.cleanedHtml, re.S)
+ if form is not None:
+ content = form.group(1)
+ for keyword in ("password", "captcha"):
+ if keyword in content:
+ self.protection_type = keyword
+ self.logDebug("Links are %s protected" % self.protection_type)
+ return True
+ return False
+
+
+ def getPackageInfo(self):
+ m = re.search(self.NAME_PATTERN, self.html)
+ if m:
+ name = folder = m.group('N').strip()
+ self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder))
+ else:
+ name = self.package.name
+ folder = self.package.folder
+ self.logDebug("Package info not m, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
+ return name, folder
+
+
+ def unlockProtection(self):
+ postData = {}
+
+ form = re.search(r'<form name="protected"(.*?)</form>', self.cleanedHtml, re.S).group(1)
+
+ # Submit package password
+ if "password" in form:
+ password = self.getPassword()
+ self.logDebug("Submitting password [%s] for protected links" % password)
+ postData['password'] = password
+
+ # Resolve anicaptcha
+ if "anicaptcha" in form:
+ self.logDebug("Captcha protected")
+ captchaUri = re.search(r'src="(/temp/anicaptcha/[^"]+)', form).group(1)
+ captcha = self.decryptCaptcha("http://ncrypt.in" + captchaUri)
+ self.logDebug("Captcha resolved [%s]" % captcha)
+ postData['captcha'] = captcha
+
+ # Resolve recaptcha
+ if "recaptcha" in form:
+ self.logDebug("ReCaptcha protected")
+ captcha_key = re.search(r'\?k=(.*?)"', form).group(1)
+ self.logDebug("Resolving ReCaptcha with key [%s]" % captcha_key)
+ recaptcha = ReCaptcha(self)
+ challenge, response = recaptcha.challenge(captcha_key)
+ postData['recaptcha_challenge_field'] = challenge
+ postData['recaptcha_response_field'] = response
+
+ # Resolve circlecaptcha
+ if "circlecaptcha" in form:
+ self.logDebug("CircleCaptcha protected")
+ captcha_img_url = "http://ncrypt.in/classes/captcha/circlecaptcha.php"
+ coords = self.decryptCaptcha(captcha_img_url, forceUser=True, imgtype="png", result_type='positional')
+ self.logDebug("Captcha resolved, coords [%s]" % str(coords))
+ postData['circle.x'] = coords[0]
+ postData['circle.y'] = coords[1]
+
+ # Unlock protection
+ postData['submit_protected'] = 'Continue to folder'
+ return self.load(self.pyfile.url, post=postData, decode=True)
+
+
+ def handleErrors(self):
+ if self.protection_type == "password":
+ if "This password is invalid!" in self.cleanedHtml:
+ self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry")
+ self.fail(_("Incorrect password, please set right password on 'Edit package' form and retry"))
+
+ if self.protection_type == "captcha":
+ if "The securitycheck was wrong!" in self.cleanedHtml:
+ self.invalidCaptcha()
+ self.retry()
+ else:
+ self.correctCaptcha()
+
+
+ def handleLinkSource(self, link_source_type):
+ # Check for JS engine
+ require_js_engine = link_source_type in ("cnl2", "rsdf", "ccf", "dlc")
+ if require_js_engine and not self.js:
+ self.logDebug("No JS engine available, skip %s links" % link_source_type)
+ return []
+
+ # Select suitable handler
+ if link_source_type == 'single':
+ return self.handleSingleLink()
+ if link_source_type == 'cnl2':
+ return self.handleCNL2()
+ elif link_source_type in ("rsdf", "ccf", "dlc"):
+ return self.handleContainer(link_source_type)
+ elif link_source_type == "web":
+ return self.handleWebLinks()
+ else:
+ self.error('Unknown source type "%s" (this is probably a bug)' % link_source_type)
+
+
+ def handleSingleLink(self):
+ self.logDebug("Handling Single link")
+ package_links = []
+
+ # Decrypt single link
+ decrypted_link = self.decryptLink(self.pyfile.url)
+ if decrypted_link:
+ package_links.append(decrypted_link)
+
+ return package_links
+
+
+ def handleCNL2(self):
+ self.logDebug("Handling CNL2 links")
+ package_links = []
+
+ if 'cnl2_output' in self.cleanedHtml:
+ try:
+ (vcrypted, vjk) = self._getCipherParams()
+ for (crypted, jk) in zip(vcrypted, vjk):
+ package_links.extend(self._getLinks(crypted, jk))
+ except Exception:
+ self.fail(_("Unable to decrypt CNL2 links"))
+
+ return package_links
+
+
+ def handleContainers(self):
+ self.logDebug("Handling Container links")
+ package_links = []
+
+ pattern = r'/container/(rsdf|dlc|ccf)/(\w+)'
+ containersLinks = re.findall(pattern, self.html)
+ self.logDebug("Decrypting %d Container links" % len(containersLinks))
+ for containerLink in containersLinks:
+ link = "http://ncrypt.in/container/%s/%s.%s" % (containerLink[0], containerLink[1], containerLink[0])
+ package_links.append(link)
+
+ return package_links
+
+
+ def handleWebLinks(self):
+ self.logDebug("Handling Web links")
+ pattern = r'(http://ncrypt\.in/link-.*?=)'
+ links = re.findall(pattern, self.html)
+
+ package_links = []
+ self.logDebug("Decrypting %d Web links" % len(links))
+ for i, link in enumerate(links):
+ self.logDebug("Decrypting Web link %d, %s" % (i + 1, link))
+ decrypted_link = self.decrypt(link)
+ if decrypted_link:
+ package_links.append(decrypted_link)
+
+ return package_links
+
+
+ def decryptLink(self, link):
+ try:
+ url = link.replace("link-", "frame-")
+ link = self.load(url, just_header=True)['location']
+ return link
+ except Exception, detail:
+ self.logDebug("Error decrypting link %s, %s" % (link, detail))
+
+
+ def _getCipherParams(self):
+ pattern = r'<input.*?name="%s".*?value="(.*?)"'
+
+ # Get jk
+ jk_re = pattern % NCryptIn.JK_KEY
+ vjk = re.findall(jk_re, self.html)
+
+ # Get crypted
+ crypted_re = pattern % NCryptIn.CRYPTED_KEY
+ vcrypted = re.findall(crypted_re, self.html)
+
+ # Log and return
+ self.logDebug("Detected %d crypted blocks" % len(vcrypted))
+ return vcrypted, vjk
+
+
+ def _getLinks(self, crypted, jk):
+ # Get key
+ jreturn = self.js.eval("%s f()" % jk)
+ self.logDebug("JsEngine returns value [%s]" % jreturn)
+ key = binascii.unhexlify(jreturn)
+
+ # Decode crypted
+ crypted = base64.standard_b64decode(crypted)
+
+ # Decrypt
+ Key = key
+ IV = key
+ obj = AES.new(Key, AES.MODE_CBC, IV)
+ text = obj.decrypt(crypted)
+
+ # Extract links
+ text = text.replace("\x00", "").replace("\r", "")
+ links = text.split("\n")
+ links = filter(lambda x: x != "", links)
+
+ # Log and return
+ self.logDebug("Block has %d links" % len(links))
+ return links
diff --git a/pyload/plugin/crypter/NetfolderIn.py b/pyload/plugin/crypter/NetfolderIn.py
new file mode 100644
index 000000000..279c99d94
--- /dev/null
+++ b/pyload/plugin/crypter/NetfolderIn.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class NetfolderIn(SimpleCrypter):
+ __name = "NetfolderIn"
+ __type = "crypter"
+ __version = "0.72"
+
+ __pattern = r'http://(?:www\.)?netfolder\.in/((?P<id1>\w+)/\w+|folder\.php\?folder_id=(?P<id2>\w+))'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """NetFolder.in decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("fragonib", "fragonib[AT]yahoo[DOT]es")]
+
+
+ NAME_PATTERN = r'<div class="Text">Inhalt des Ordners <span.*>(?P<N>.+)</span></div>'
+
+
+ def prepare(self):
+ super(NetfolderIn, self).prepare()
+
+ # Check for password protection
+ if self.isPasswordProtected():
+ self.html = self.submitPassword()
+ if not self.html:
+ self.fail(_("Incorrect password, please set right password on Add package form and retry"))
+
+
+ def isPasswordProtected(self):
+ if '<input type="password" name="password"' in self.html:
+ self.logDebug("Links are password protected")
+ return True
+ return False
+
+
+ def submitPassword(self):
+ # Gather data
+ try:
+ m = re.match(self.__pattern, self.pyfile.url)
+ id = max(m.group('id1'), m.group('id2'))
+ except AttributeError:
+ self.logDebug("Unable to get package id from url [%s]" % self.pyfile.url)
+ return
+ url = "http://netfolder.in/folder.php?folder_id=" + id
+ password = self.getPassword()
+
+ # Submit package password
+ post = {'password': password, 'save': 'Absenden'}
+ self.logDebug("Submitting password [%s] for protected links with id [%s]" % (password, id))
+ html = self.load(url, {}, post)
+
+ # Check for invalid password
+ if '<div class="InPage_Error">' in html:
+ self.logDebug("Incorrect password, please set right password on Edit package form and retry")
+ return None
+
+ return html
+
+
+ def getLinks(self):
+ links = re.search(r'name="list" value="(.*?)"', self.html).group(1).split(",")
+ self.logDebug("Package has %d links" % len(links))
+ return links
diff --git a/pyload/plugin/crypter/NosvideoCom.py b/pyload/plugin/crypter/NosvideoCom.py
new file mode 100644
index 000000000..a9f29cf74
--- /dev/null
+++ b/pyload/plugin/crypter/NosvideoCom.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class NosvideoCom(SimpleCrypter):
+ __name = "NosvideoCom"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?nosvideo\.com/\?v=\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Nosvideo.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("igel", "igelkun@myopera.com")]
+
+
+ LINK_PATTERN = r'href="(http://(?:w{3}\.)?nosupload\.com/\?d=\w+)"'
+ NAME_PATTERN = r'<[tT]itle>Watch (?P<N>.+?)<'
diff --git a/pyload/plugin/crypter/OneKhDe.py b/pyload/plugin/crypter/OneKhDe.py
new file mode 100644
index 000000000..c3e5a6a3b
--- /dev/null
+++ b/pyload/plugin/crypter/OneKhDe.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.utils import html_unescape
+from pyload.plugin.Crypter import Crypter
+
+
+class OneKhDe(Crypter):
+ __name = "OneKhDe"
+ __type = "crypter"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?1kh\.de/f/'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """1kh.de decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org")]
+
+
+ def __init__(self, parent):
+ Crypter.__init__(self, parent)
+ self.parent = parent
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ return True
+
+
+ def proceed(self, url, location):
+ url = self.parent.url
+ self.html = self.load(url)
+ link_ids = re.findall(r"<a id=\"DownloadLink_(\d*)\" href=\"http://1kh.de/", self.html)
+ for id in link_ids:
+ new_link = html_unescape(re.search("width=\"100%\" src=\"(.*)\"></iframe>", self.load("http://1kh.de/l/" + id)).group(1))
+ self.urls.append(new_link)
diff --git a/pyload/plugin/crypter/OronCom.py b/pyload/plugin/crypter/OronCom.py
new file mode 100644
index 000000000..24c941f19
--- /dev/null
+++ b/pyload/plugin/crypter/OronCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class OronCom(DeadCrypter):
+ __name = "OronCom"
+ __type = "crypter"
+ __version = "0.11"
+
+ __pattern = r'http://(?:www\.)?oron\.com/folder/\w+'
+ __config = []
+
+ __description = """Oron.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("DHMH", "webmaster@pcProfil.de")]
+
+
+getInfo = create_getInfo(OronCom)
diff --git a/pyload/plugin/crypter/PastebinCom.py b/pyload/plugin/crypter/PastebinCom.py
new file mode 100644
index 000000000..95c925733
--- /dev/null
+++ b/pyload/plugin/crypter/PastebinCom.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class PastebinCom(SimpleCrypter):
+ __name = "PastebinCom"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?pastebin\.com/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Pastebin.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'<div class="de\d+">(https?://[^ <]+)(?:[^<]*)</div>'
+ NAME_PATTERN = r'<div class="paste_box_line1" title="(?P<N>[^"]+)">'
diff --git a/pyload/plugin/crypter/QuickshareCz.py b/pyload/plugin/crypter/QuickshareCz.py
new file mode 100644
index 000000000..5bb68c960
--- /dev/null
+++ b/pyload/plugin/crypter/QuickshareCz.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+
+
+class QuickshareCz(Crypter):
+ __name = "QuickshareCz"
+ __type = "crypter"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?quickshare\.cz/slozka-\d+.*'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Quickshare.cz folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ FOLDER_PATTERN = r'<textarea[^>]*>(.*?)</textarea>'
+ LINK_PATTERN = r'(http://www\.quickshare\.cz/\S+)'
+
+
+ def decrypt(self, pyfile):
+ html = self.load(pyfile.url)
+
+ m = re.search(self.FOLDER_PATTERN, html, re.S)
+ if m is None:
+ self.error(_("FOLDER_PATTERN not found"))
+ self.urls.extend(re.findall(self.LINK_PATTERN, m.group(1)))
diff --git a/pyload/plugin/crypter/RSLayerCom.py b/pyload/plugin/crypter/RSLayerCom.py
new file mode 100644
index 000000000..934b3cac8
--- /dev/null
+++ b/pyload/plugin/crypter/RSLayerCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class RSLayerCom(DeadCrypter):
+ __name = "RSLayerCom"
+ __type = "crypter"
+ __version = "0.21"
+
+ __pattern = r'http://(?:www\.)?rs-layer\.com/directory-'
+ __config = []
+
+ __description = """RS-Layer.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("hzpz", "")]
+
+
+getInfo = create_getInfo(RSLayerCom)
diff --git a/pyload/plugin/crypter/RapidfileshareNet.py b/pyload/plugin/crypter/RapidfileshareNet.py
new file mode 100644
index 000000000..7003f017e
--- /dev/null
+++ b/pyload/plugin/crypter/RapidfileshareNet.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSCrypter import XFSCrypter
+
+
+class RapidfileshareNet(XFSCrypter):
+ __name = "RapidfileshareNet"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?rapidfileshare\.net/users/\w+/\d+/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Rapidfileshare.net folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "rapidfileshare.net"
diff --git a/pyload/plugin/crypter/RelinkUs.py b/pyload/plugin/crypter/RelinkUs.py
new file mode 100644
index 000000000..1773f6a22
--- /dev/null
+++ b/pyload/plugin/crypter/RelinkUs.py
@@ -0,0 +1,293 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import base64
+import binascii
+import re
+import os
+
+from Crypto.Cipher import AES
+from pyload.plugin.Crypter import Crypter
+
+
+class RelinkUs(Crypter):
+ __name = "RelinkUs"
+ __type = "crypter"
+ __version = "3.11"
+
+ __pattern = r'http://(?:www\.)?relink\.us/(f/|((view|go)\.php\?id=))(?P<id>.+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Relink.us decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
+ ("AndroKev", "neureither.kevin@gmail.com")]
+
+
+ PREFERRED_LINK_SOURCES = ["cnl2", "dlc", "web"]
+
+ OFFLINE_TOKEN = r'<title>Tattooside'
+
+ PASSWORD_TOKEN = r'container_password.php'
+ PASSWORD_ERROR_ROKEN = r'You have entered an incorrect password'
+ PASSWORD_SUBMIT_URL = r'http://www.relink.us/container_password.php'
+
+ CAPTCHA_TOKEN = r'container_captcha.php'
+ CAPTCHA_ERROR_ROKEN = r'You have solved the captcha wrong'
+ CAPTCHA_IMG_URL = r'http://www.relink.us/core/captcha/circlecaptcha.php'
+ CAPTCHA_SUBMIT_URL = r'http://www.relink.us/container_captcha.php'
+
+ FILE_TITLE_REGEX = r'<th>Title</th><td>(.*)</td></tr>'
+ FILE_NOTITLE = r'No title'
+
+ CNL2_FORM_REGEX = r'<form id="cnl_form-(.*?)</form>'
+ CNL2_FORMINPUT_REGEX = r'<input.*?name="%s".*?value="(.*?)"'
+ CNL2_JK_KEY = "jk"
+ CNL2_CRYPTED_KEY = "crypted"
+
+ DLC_LINK_REGEX = r'<a href=".*?" class="dlc_button" target="_blank">'
+ DLC_DOWNLOAD_URL = r'http://www.relink.us/download.php'
+
+ WEB_FORWARD_REGEX = r'getFile\(\'(?P<link>.+)\'\)'
+ WEB_FORWARD_URL = r'http://www.relink.us/frame.php'
+ WEB_LINK_REGEX = r'<iframe name="Container" height="100%" frameborder="no" width="100%" src="(?P<link>.+)"></iframe>'
+
+
+ def setup(self):
+ self.fileid = None
+ self.package = None
+ self.password = None
+ self.captcha = False
+
+
+ def decrypt(self, pyfile):
+ # Init
+ self.initPackage(pyfile)
+
+ # Request package
+ self.requestPackage()
+
+ # Check for online
+ if not self.isOnline():
+ self.offline()
+
+ # Check for protection
+ if self.isPasswordProtected():
+ self.unlockPasswordProtection()
+ self.handleErrors()
+
+ if self.isCaptchaProtected():
+ self.captcha = True
+ self.unlockCaptchaProtection()
+ self.handleErrors()
+
+ # Get package name and folder
+ (package_name, folder_name) = self.getPackageInfo()
+
+ # Extract package links
+ package_links = []
+ for sources in self.PREFERRED_LINK_SOURCES:
+ package_links.extend(self.handleLinkSource(sources))
+ if package_links: # use only first source which provides links
+ break
+ package_links = set(package_links)
+
+ # Pack
+ if package_links:
+ self.packages = [(package_name, package_links, folder_name)]
+
+
+ def initPackage(self, pyfile):
+ self.fileid = re.match(self.__pattern, pyfile.url).group('id')
+ self.package = pyfile.package()
+ self.password = self.getPassword()
+
+
+ def requestPackage(self):
+ self.html = self.load(self.pyfile.url, decode=True)
+
+
+ def isOnline(self):
+ if self.OFFLINE_TOKEN in self.html:
+ self.logDebug("File not found")
+ return False
+ return True
+
+
+ def isPasswordProtected(self):
+ if self.PASSWORD_TOKEN in self.html:
+ self.logDebug("Links are password protected")
+ return True
+
+
+ def isCaptchaProtected(self):
+ if self.CAPTCHA_TOKEN in self.html:
+ self.logDebug("Links are captcha protected")
+ return True
+ return False
+
+
+ def unlockPasswordProtection(self):
+ self.logDebug("Submitting password [%s] for protected links" % self.password)
+ passwd_url = self.PASSWORD_SUBMIT_URL + "?id=%s" % self.fileid
+ passwd_data = {'id': self.fileid, 'password': self.password, 'pw': 'submit'}
+ self.html = self.load(passwd_url, post=passwd_data, decode=True)
+
+
+ def unlockCaptchaProtection(self):
+ self.logDebug("Request user positional captcha resolving")
+ captcha_img_url = self.CAPTCHA_IMG_URL + "?id=%s" % self.fileid
+ coords = self.decryptCaptcha(captcha_img_url, forceUser=True, imgtype="png", result_type='positional')
+ self.logDebug("Captcha resolved, coords [%s]" % str(coords))
+ captcha_post_url = self.CAPTCHA_SUBMIT_URL + "?id=%s" % self.fileid
+ captcha_post_data = {'button.x': coords[0], 'button.y': coords[1], 'captcha': 'submit'}
+ self.html = self.load(captcha_post_url, post=captcha_post_data, decode=True)
+
+
+ def getPackageInfo(self):
+ name = folder = None
+
+ # Try to get info from web
+ m = re.search(self.FILE_TITLE_REGEX, self.html)
+ if m is not None:
+ title = m.group(1).strip()
+ if not self.FILE_NOTITLE in title:
+ name = folder = title
+ self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder))
+
+ # Fallback to defaults
+ if not name or not folder:
+ name = self.package.name
+ folder = self.package.folder
+ self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
+
+ # Return package info
+ return name, folder
+
+
+ def handleErrors(self):
+ if self.PASSWORD_ERROR_ROKEN in self.html:
+ msg = "Incorrect password, please set right password on 'Edit package' form and retry"
+ self.logDebug(msg)
+ self.fail(_(msg))
+
+ if self.captcha:
+ if self.CAPTCHA_ERROR_ROKEN in self.html:
+ self.invalidCaptcha()
+ self.retry()
+ else:
+ self.correctCaptcha()
+
+
+ def handleLinkSource(self, source):
+ if source == 'cnl2':
+ return self.handleCNL2Links()
+ elif source == 'dlc':
+ return self.handleDLCLinks()
+ elif source == 'web':
+ return self.handleWEBLinks()
+ else:
+ self.error('Unknown source type "%s" (this is probably a bug)' % source)
+
+
+ def handleCNL2Links(self):
+ self.logDebug("Search for CNL2 links")
+ package_links = []
+ m = re.search(self.CNL2_FORM_REGEX, self.html, re.S)
+ if m is not None:
+ cnl2_form = m.group(1)
+ try:
+ (vcrypted, vjk) = self._getCipherParams(cnl2_form)
+ for (crypted, jk) in zip(vcrypted, vjk):
+ package_links.extend(self._getLinks(crypted, jk))
+ except Exception:
+ self.logDebug("Unable to decrypt CNL2 links")
+ return package_links
+
+
+ def handleDLCLinks(self):
+ self.logDebug("Search for DLC links")
+ package_links = []
+ m = re.search(self.DLC_LINK_REGEX, self.html)
+ if m is not None:
+ container_url = self.DLC_DOWNLOAD_URL + "?id=%s&dlc=1" % self.fileid
+ self.logDebug("Downloading DLC container link [%s]" % container_url)
+ try:
+ dlc = self.load(container_url)
+ dlc_filename = self.fileid + ".dlc"
+ dlc_filepath = os.path.join(self.config['general']['download_folder'], dlc_filename)
+ with open(dlc_filepath, "wb") as f:
+ f.write(dlc)
+ package_links.append(dlc_filepath)
+ except Exception:
+ self.fail("Unable to download DLC container")
+ return package_links
+
+
+ def handleWEBLinks(self):
+ self.logDebug("Search for WEB links")
+
+ package_links = []
+ params = re.findall(self.WEB_FORWARD_REGEX, self.html)
+
+ self.logDebug("Decrypting %d Web links" % len(params))
+
+ for index, param in enumerate(params):
+ try:
+ url = self.WEB_FORWARD_URL + "?%s" % param
+
+ self.logDebug("Decrypting Web link %d, %s" % (index + 1, url))
+
+ res = self.load(url, decode=True)
+ link = re.search(self.WEB_LINK_REGEX, res).group('link')
+
+ package_links.append(link)
+
+ except Exception, detail:
+ self.logDebug("Error decrypting Web link %s, %s" % (index, detail))
+
+ self.setWait(4)
+ self.wait()
+
+ return package_links
+
+
+ def _getCipherParams(self, cnl2_form):
+ # Get jk
+ jk_re = self.CNL2_FORMINPUT_REGEX % self.CNL2_JK_KEY
+ vjk = re.findall(jk_re, cnl2_form, re.I)
+
+ # Get crypted
+ crypted_re = self.CNL2_FORMINPUT_REGEX % RelinkUs.CNL2_CRYPTED_KEY
+ vcrypted = re.findall(crypted_re, cnl2_form, re.I)
+
+ # Log and return
+ self.logDebug("Detected %d crypted blocks" % len(vcrypted))
+ return vcrypted, vjk
+
+
+ def _getLinks(self, crypted, jk):
+ # Get key
+ jreturn = self.js.eval("%s f()" % jk)
+ self.logDebug("JsEngine returns value [%s]" % jreturn)
+ key = binascii.unhexlify(jreturn)
+
+ # Decode crypted
+ crypted = base64.standard_b64decode(crypted)
+
+ # Decrypt
+ Key = key
+ IV = key
+ obj = AES.new(Key, AES.MODE_CBC, IV)
+ text = obj.decrypt(crypted)
+
+ # Extract links
+ text = text.replace("\x00", "").replace("\r", "")
+ links = text.split("\n")
+ links = filter(lambda x: x != "", links)
+
+ # Log and return
+ self.logDebug("Package has %d links" % len(links))
+ return links
diff --git a/pyload/plugin/crypter/SafelinkingNet.py b/pyload/plugin/crypter/SafelinkingNet.py
new file mode 100644
index 000000000..1b5eeeb63
--- /dev/null
+++ b/pyload/plugin/crypter/SafelinkingNet.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from BeautifulSoup import BeautifulSoup
+
+from pyload.utils import json_loads
+from pyload.plugin.Crypter import Crypter
+from pyload.plugin.internal.captcha import SolveMedia
+
+
+class SafelinkingNet(Crypter):
+ __name = "SafelinkingNet"
+ __type = "crypter"
+ __version = "0.11"
+
+ __pattern = r'https?://(?:www\.)?safelinking\.net/([pd])/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Safelinking.net decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("quareevo", "quareevo@arcor.de")]
+
+
+ SOLVEMEDIA_PATTERN = "solvemediaApiKey = '([\w.-]+)';"
+
+
+ def decrypt(self, pyfile):
+ url = pyfile.url
+
+ if re.match(self.__pattern, url).group(1) == "d":
+
+ header = self.load(url, just_header=True)
+ if 'location' in header:
+ self.urls = [header['location']]
+ else:
+ self.error(_("Couldn't find forwarded Link"))
+
+ else:
+ postData = {"post-protect": "1"}
+
+ if "link-password" in self.html:
+ postData['link-password'] = self.getPassword()
+
+ if "altcaptcha" in self.html:
+ for _i in xrange(5):
+ m = re.search(self.SOLVEMEDIA_PATTERN, self.html)
+ if m:
+ captchaKey = m.group(1)
+ captcha = SolveMedia(self)
+ captchaProvider = "Solvemedia"
+ else:
+ self.fail(_("Error parsing captcha"))
+
+ challenge, response = captcha.challenge(captchaKey)
+ postData['adcopy_challenge'] = challenge
+ postData['adcopy_response'] = response
+
+ self.html = self.load(url, post=postData)
+ if "The password you entered was incorrect" in self.html:
+ self.fail(_("Incorrect Password"))
+ if not "The CAPTCHA code you entered was wrong" in self.html:
+ break
+
+ pyfile.package().password = ""
+ soup = BeautifulSoup(self.html)
+ scripts = soup.findAll("script")
+ for s in scripts:
+ if "d_links" in s.text:
+ break
+ m = re.search('d_links":(\[.*?\])', s.text)
+ if m:
+ linkDict = json_loads(m.group(1))
+ for link in linkDict:
+ if not "http://" in link['full']:
+ self.urls.append("https://safelinking.net/d/" + link['full'])
+ else:
+ self.urls.append(link['full'])
diff --git a/pyload/plugin/crypter/SecuredIn.py b/pyload/plugin/crypter/SecuredIn.py
new file mode 100644
index 000000000..a5e12a352
--- /dev/null
+++ b/pyload/plugin/crypter/SecuredIn.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class SecuredIn(DeadCrypter):
+ __name = "SecuredIn"
+ __type = "crypter"
+ __version = "0.21"
+
+ __pattern = r'http://(?:www\.)?secured\.in/download-[\d]+-\w{8}\.html'
+ __config = []
+
+ __description = """Secured.in decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+getInfo = create_getInfo(SecuredIn)
diff --git a/pyload/plugin/crypter/SexuriaCom.py b/pyload/plugin/crypter/SexuriaCom.py
new file mode 100644
index 000000000..08f289541
--- /dev/null
+++ b/pyload/plugin/crypter/SexuriaCom.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Crypter import Crypter
+
+
+class SexuriaCom(Crypter):
+ __name = "SexuriaCom"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?sexuria\.com/(v1/)?(Pornos_Kostenlos_.+?_(\d+)\.html|dl_links_\d+_\d+\.html|id=\d+\&part=\d+\&link=\d+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Sexuria.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("NETHead", "NETHead.AT.gmx.DOT.net")]
+
+
+ PATTERN_SUPPORTED_MAIN = re.compile(r'http://(www\.)?sexuria\.com/(v1/)?Pornos_Kostenlos_.+?_(\d+)\.html', flags=re.I)
+ PATTERN_SUPPORTED_CRYPT = re.compile(r'http://(www\.)?sexuria\.com/(v1/)?dl_links_\d+_(?P<ID>\d+)\.html', flags=re.I)
+ PATTERN_SUPPORTED_REDIRECT = re.compile(r'http://(www\.)?sexuria\.com/out\.php\?id=(?P<ID>\d+)\&part=\d+\&link=\d+', flags=re.I)
+ PATTERN_TITLE = re.compile(r'<title> - (?P<TITLE>.*) Sexuria - Kostenlose Pornos - Rapidshare XXX Porn</title>', flags=re.I)
+ PATTERN_PASSWORD = re.compile(r'<strong>Passwort: </strong></div></td>.*?bgcolor="#EFEFEF">(?P<PWD>.*?)</td>', flags=re.I | re.S)
+ PATTERN_DL_LINK_PAGE = re.compile(r'"(dl_links_\d+_\d+\.html)"', flags=re.I)
+ PATTERN_REDIRECT_LINKS = re.compile(r'value="(http://sexuria\.com/out\.php\?id=\d+\&part=\d+\&link=\d+)" readonly', flags=re.I)
+
+
+ def decrypt(self, pyfile):
+ # Init
+ self.pyfile = pyfile
+ self.package = pyfile.package()
+
+ # Get package links
+ package_name, self.links, folder_name, package_pwd = self.decryptLinks(self.pyfile.url)
+ self.packages = [(package_name, self.links, folder_name)]
+
+
+ def decryptLinks(self, url):
+ linklist = []
+ name = self.package.name
+ folder = self.package.folder
+ password = None
+
+ if re.match(self.PATTERN_SUPPORTED_MAIN, url):
+ # Processing main page
+ html = self.load(url)
+ links = re.findall(self.PATTERN_DL_LINK_PAGE, html)
+ for link in links:
+ linklist.append("http://sexuria.com/v1/" + link)
+
+ elif re.match(self.PATTERN_SUPPORTED_REDIRECT, url):
+ # Processing direct redirect link (out.php), redirecting to main page
+ id = re.search(self.PATTERN_SUPPORTED_REDIRECT, url).group('ID')
+ if id:
+ linklist.append("http://sexuria.com/v1/Pornos_Kostenlos_liebe_%s.html" % id)
+
+ elif re.match(self.PATTERN_SUPPORTED_CRYPT, url):
+ # Extract info from main file
+ id = re.search(self.PATTERN_SUPPORTED_CRYPT, url).group('ID')
+ html = self.load("http://sexuria.com/v1/Pornos_Kostenlos_info_%s.html" % id, decode=True)
+
+ title = re.search(self.PATTERN_TITLE, html).group('TITLE').strip()
+ if title:
+ name = folder = title
+ self.logDebug("Package info found, name [%s] and folder [%s]" % (name, folder))
+
+ pwd = re.search(self.PATTERN_PASSWORD, html).group('PWD')
+ if pwd:
+ password = pwd.strip()
+ self.logDebug("Password info [%s] found" % password)
+
+ # Process link (dl_link)
+ html = self.load(url)
+ links = re.findall(self.PATTERN_REDIRECT_LINKS, html)
+ if len(links) == 0:
+ self.LogError("Broken for link %s" % link)
+ else:
+ for link in links:
+ link = link.replace("http://sexuria.com/", "http://www.sexuria.com/")
+ finallink = self.load(link, just_header=True)['location']
+ if not finallink or "sexuria.com/" in finallink:
+ self.LogError("Broken for link %s" % link)
+ else:
+ linklist.append(finallink)
+
+ # Debug log
+ self.logDebug("%d supported links" % len(linklist))
+ for i, link in enumerate(linklist):
+ self.logDebug("Supported link %d, %s" % (i + 1, link))
+
+ return name, linklist, folder, password
diff --git a/pyload/plugin/crypter/ShareLinksBiz.py b/pyload/plugin/crypter/ShareLinksBiz.py
new file mode 100644
index 000000000..b3f8f8b75
--- /dev/null
+++ b/pyload/plugin/crypter/ShareLinksBiz.py
@@ -0,0 +1,286 @@
+# -*- coding: utf-8 -*-
+
+import base64
+import binascii
+import re
+
+from Crypto.Cipher import AES
+from pyload.plugin.Crypter import Crypter
+
+
+class ShareLinksBiz(Crypter):
+ __name = "ShareLinksBiz"
+ __type = "crypter"
+ __version = "1.14"
+
+ __pattern = r'http://(?:www\.)?(share-links|s2l)\.biz/(?P<ID>_?\w+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Share-Links.biz decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es")]
+
+
+ def setup(self):
+ self.baseUrl = None
+ self.fileId = None
+ self.package = None
+ self.captcha = False
+
+
+ def decrypt(self, pyfile):
+ # Init
+ self.initFile(pyfile)
+
+ # Request package
+ url = self.baseUrl + '/' + self.fileId
+ self.html = self.load(url, decode=True)
+
+ # Unblock server (load all images)
+ self.unblockServer()
+
+ # Check for protection
+ if self.isPasswordProtected():
+ self.unlockPasswordProtection()
+ self.handleErrors()
+
+ if self.isCaptchaProtected():
+ self.captcha = True
+ self.unlockCaptchaProtection()
+ self.handleErrors()
+
+ # Extract package links
+ package_links = []
+ package_links.extend(self.handleWebLinks())
+ package_links.extend(self.handleContainers())
+ package_links.extend(self.handleCNL2())
+ package_links = set(package_links)
+
+ # Get package info
+ package_name, package_folder = self.getPackageInfo()
+
+ # Pack
+ self.packages = [(package_name, package_links, package_folder)]
+
+
+ def initFile(self, pyfile):
+ url = pyfile.url
+ if 's2l.biz' in url:
+ url = self.load(url, just_header=True)['location']
+ self.baseUrl = "http://www.%s.biz" % re.match(self.__pattern, url).group(1)
+ self.fileId = re.match(self.__pattern, url).group('ID')
+ self.package = pyfile.package()
+
+
+ def isOnline(self):
+ if "No usable content was found" in self.html:
+ self.logDebug("File not found")
+ return False
+ return True
+
+
+ def isPasswordProtected(self):
+ if re.search(r'''<form.*?id="passwordForm".*?>''', self.html):
+ self.logDebug("Links are protected")
+ return True
+ return False
+
+
+ def isCaptchaProtected(self):
+ if '<map id="captchamap"' in self.html:
+ self.logDebug("Links are captcha protected")
+ return True
+ return False
+
+
+ def unblockServer(self):
+ imgs = re.findall(r"(/template/images/.*?\.gif)", self.html)
+ for img in imgs:
+ self.load(self.baseUrl + img)
+
+
+ def unlockPasswordProtection(self):
+ password = self.getPassword()
+ self.logDebug("Submitting password [%s] for protected links" % password)
+ post = {"password": password, 'login': 'Submit form'}
+ url = self.baseUrl + '/' + self.fileId
+ self.html = self.load(url, post=post, decode=True)
+
+
+ def unlockCaptchaProtection(self):
+ # Get captcha map
+ captchaMap = self._getCaptchaMap()
+ self.logDebug("Captcha map with [%d] positions" % len(captchaMap.keys()))
+
+ # Request user for captcha coords
+ m = re.search(r'<img src="/captcha.gif\?d=(.*?)&amp;PHPSESSID=(.*?)&amp;legend=1"', self.html)
+ captchaUrl = self.baseUrl + '/captcha.gif?d=%s&PHPSESSID=%s' % (m.group(1), m.group(2))
+ self.logDebug("Waiting user for correct position")
+ coords = self.decryptCaptcha(captchaUrl, forceUser=True, imgtype="gif", result_type='positional')
+ self.logDebug("Captcha resolved, coords [%s]" % str(coords))
+
+ # Resolve captcha
+ href = self._resolveCoords(coords, captchaMap)
+ if href is None:
+ self.invalidCaptcha()
+ self.retry(wait_time=5)
+ url = self.baseUrl + href
+ self.html = self.load(url, decode=True)
+
+
+ def _getCaptchaMap(self):
+ mapp = {}
+ for m in re.finditer(r'<area shape="rect" coords="(.*?)" href="(.*?)"', self.html):
+ rect = eval('(' + m.group(1) + ')')
+ href = m.group(2)
+ mapp[rect] = href
+ return mapp
+
+
+ def _resolveCoords(self, coords, captchaMap):
+ x, y = coords
+ for rect, href in captchaMap.iteritems():
+ x1, y1, x2, y2 = rect
+ if (x >= x1 and x <= x2) and (y >= y1 and y <= y2):
+ return href
+
+
+ def handleErrors(self):
+ if "The inserted password was wrong" in self.html:
+ self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry")
+ self.fail(_("Incorrect password, please set right password on 'Edit package' form and retry"))
+
+ if self.captcha:
+ if "Your choice was wrong" in self.html:
+ self.invalidCaptcha()
+ self.retry(wait_time=5)
+ else:
+ self.correctCaptcha()
+
+
+ def getPackageInfo(self):
+ name = folder = None
+
+ # Extract from web package header
+ title_re = r'<h2><img.*?/>(.*)</h2>'
+ m = re.search(title_re, self.html, re.S)
+ if m is not None:
+ title = m.group(1).strip()
+ if 'unnamed' not in title:
+ name = folder = title
+ self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder))
+
+ # Fallback to defaults
+ if not name or not folder:
+ name = self.package.name
+ folder = self.package.folder
+ self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
+
+ # Return package info
+ return name, folder
+
+
+ def handleWebLinks(self):
+ package_links = []
+ self.logDebug("Handling Web links")
+
+ #@TODO: Gather paginated web links
+ pattern = r'javascript:_get\(\'(.*?)\', \d+, \'\'\)'
+ ids = re.findall(pattern, self.html)
+ self.logDebug("Decrypting %d Web links" % len(ids))
+ for i, ID in enumerate(ids):
+ try:
+ self.logDebug("Decrypting Web link %d, [%s]" % (i + 1, ID))
+
+ dwLink = self.baseUrl + "/get/lnk/" + ID
+ res = self.load(dwLink)
+
+ code = re.search(r'frm/(\d+)', res).group(1)
+ fwLink = self.baseUrl + "/get/frm/" + code
+ res = self.load(fwLink)
+
+ jscode = re.search(r'<script language="javascript">\s*eval\((.*)\)\s*</script>', res, re.S).group(1)
+ jscode = self.js.eval("f = %s" % jscode)
+ jslauncher = "window=''; parent={frames:{Main:{location:{href:''}}},location:''}; %s; parent.frames.Main.location.href"
+
+ dlLink = self.js.eval(jslauncher % jscode)
+
+ self.logDebug("JsEngine returns value [%s] for redirection link" % dlLink)
+
+ package_links.append(dlLink)
+ except Exception, detail:
+ self.logDebug("Error decrypting Web link [%s], %s" % (ID, detail))
+ return package_links
+
+
+ def handleContainers(self):
+ package_links = []
+ self.logDebug("Handling Container links")
+
+ pattern = r'javascript:_get\(\'(.*?)\', 0, \'(rsdf|ccf|dlc)\'\)'
+ containersLinks = re.findall(pattern, self.html)
+ self.logDebug("Decrypting %d Container links" % len(containersLinks))
+ for containerLink in containersLinks:
+ link = "%s/get/%s/%s" % (self.baseUrl, containerLink[1], containerLink[0])
+ package_links.append(link)
+ return package_links
+
+
+ def handleCNL2(self):
+ package_links = []
+ self.logDebug("Handling CNL2 links")
+
+ if '/lib/cnl2/ClicknLoad.swf' in self.html:
+ try:
+ (crypted, jk) = self._getCipherParams()
+ package_links.extend(self._getLinks(crypted, jk))
+ except Exception:
+ self.fail(_("Unable to decrypt CNL2 links"))
+ return package_links
+
+
+ def _getCipherParams(self):
+ # Request CNL2
+ code = re.search(r'ClicknLoad.swf\?code=(.*?)"', self.html).group(1)
+ url = "%s/get/cnl2/%s" % (self.baseUrl, code)
+ res = self.load(url)
+ params = res.split(";;")
+
+ # Get jk
+ strlist = list(base64.standard_b64decode(params[1]))
+ strlist.reverse()
+ jk = ''.join(strlist)
+
+ # Get crypted
+ strlist = list(base64.standard_b64decode(params[2]))
+ strlist.reverse()
+ crypted = ''.join(strlist)
+
+ # Log and return
+ return crypted, jk
+
+
+ def _getLinks(self, crypted, jk):
+ # Get key
+ jreturn = self.js.eval("%s f()" % jk)
+ self.logDebug("JsEngine returns value [%s]" % jreturn)
+ key = binascii.unhexlify(jreturn)
+
+ # Decode crypted
+ crypted = base64.standard_b64decode(crypted)
+
+ # Decrypt
+ Key = key
+ IV = key
+ obj = AES.new(Key, AES.MODE_CBC, IV)
+ text = obj.decrypt(crypted)
+
+ # Extract links
+ text = text.replace("\x00", "").replace("\r", "")
+ links = text.split("\n")
+ links = filter(lambda x: x != "", links)
+
+ # Log and return
+ self.logDebug("Block has %d links" % len(links))
+ return links
diff --git a/pyload/plugin/crypter/SharingmatrixCom.py b/pyload/plugin/crypter/SharingmatrixCom.py
new file mode 100644
index 000000000..53f1afa32
--- /dev/null
+++ b/pyload/plugin/crypter/SharingmatrixCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class SharingmatrixCom(DeadCrypter):
+ __name = "SharingmatrixCom"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?sharingmatrix\.com/folder/\w+'
+
+ __description = """Sharingmatrix.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(SharingmatrixCom)
diff --git a/pyload/plugin/crypter/SpeedLoadOrg.py b/pyload/plugin/crypter/SpeedLoadOrg.py
new file mode 100644
index 000000000..e40d321be
--- /dev/null
+++ b/pyload/plugin/crypter/SpeedLoadOrg.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class SpeedLoadOrg(DeadCrypter):
+ __name = "SpeedLoadOrg"
+ __type = "crypter"
+ __version = "0.30"
+
+ __pattern = r'http://(?:www\.)?speedload\.org/(\d+~f$|folder/\d+/)'
+ __config = []
+
+ __description = """Speedload decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(SpeedLoadOrg)
diff --git a/pyload/plugin/crypter/StealthTo.py b/pyload/plugin/crypter/StealthTo.py
new file mode 100644
index 000000000..0d05c3c64
--- /dev/null
+++ b/pyload/plugin/crypter/StealthTo.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class StealthTo(DeadCrypter):
+ __name = "StealthTo"
+ __type = "crypter"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?stealth\.to/folder/.+'
+ __config = []
+
+ __description = """Stealth.to decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org")]
+
+
+getInfo = create_getInfo(StealthTo)
diff --git a/pyload/plugin/crypter/TnyCz.py b/pyload/plugin/crypter/TnyCz.py
new file mode 100644
index 000000000..19427b845
--- /dev/null
+++ b/pyload/plugin/crypter/TnyCz.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+import re
+
+
+class TnyCz(SimpleCrypter):
+ __name = "TnyCz"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?tny\.cz/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Tny.cz decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'<title>(?P<N>.+) - .+</title>'
+
+
+ def getLinks(self):
+ m = re.search(r'<a id=\'save_paste\' href="(.+save\.php\?hash=.+)">', self.html)
+ return re.findall(".+", self.load(m.group(1), decode=True)) if m else None
diff --git a/pyload/plugin/crypter/TrailerzoneInfo.py b/pyload/plugin/crypter/TrailerzoneInfo.py
new file mode 100644
index 000000000..106524f1c
--- /dev/null
+++ b/pyload/plugin/crypter/TrailerzoneInfo.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class TrailerzoneInfo(DeadCrypter):
+ __name = "TrailerzoneInfo"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?trailerzone\.info/.*?'
+ __config = []
+
+ __description = """TrailerZone.info decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("godofdream", "soilfiction@gmail.com")]
+
+
+getInfo = create_getInfo(TrailerzoneInfo)
diff --git a/pyload/plugin/crypter/TurbobitNet.py b/pyload/plugin/crypter/TurbobitNet.py
new file mode 100644
index 000000000..0ea49c694
--- /dev/null
+++ b/pyload/plugin/crypter/TurbobitNet.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+from pyload.utils import json_loads
+
+
+class TurbobitNet(SimpleCrypter):
+ __name = "TurbobitNet"
+ __type = "crypter"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?turbobit\.net/download/folder/(?P<ID>\w+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Turbobit.net folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'src=\'/js/lib/grid/icon/folder.png\'> <span>(?P<N>.+?)</span>'
+
+
+ def _getLinks(self, id, page=1):
+ gridFile = self.load("http://turbobit.net/downloadfolder/gridFile",
+ get={"rootId": id, "rows": 200, "page": page}, decode=True)
+ grid = json_loads(gridFile)
+
+ if grid['rows']:
+ for i in grid['rows']:
+ yield i['id']
+ for id in self._getLinks(id, page + 1):
+ yield id
+ else:
+ return
+
+
+ def getLinks(self):
+ id = re.match(self.__pattern, self.pyfile.url).group("ID")
+ fixurl = lambda id: "http://turbobit.net/%s.html" % id
+ return map(fixurl, self._getLinks(id))
diff --git a/pyload/plugin/crypter/TusfilesNet.py b/pyload/plugin/crypter/TusfilesNet.py
new file mode 100644
index 000000000..55b9c5deb
--- /dev/null
+++ b/pyload/plugin/crypter/TusfilesNet.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+import math
+import re
+from urlparse import urljoin
+
+from pyload.plugin.internal.XFSCrypter import XFSCrypter
+
+
+class TusfilesNet(XFSCrypter):
+ __name = "TusfilesNet"
+ __type = "crypter"
+ __version = "0.07"
+
+ __pattern = r'https?://(?:www\.)?tusfiles\.net/go/(?P<ID>\w+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Tusfiles.net folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ HOSTER_DOMAIN = "tusfiles.net"
+
+ PAGES_PATTERN = r'>\((\d+) \w+\)<'
+
+ URL_REPLACEMENTS = [(__pattern + ".*", r'https://www.tusfiles.net/go/\g<ID>/')]
+
+
+ def loadPage(self, page_n):
+ return self.load(urljoin(self.pyfile.url, str(page_n)), decode=True)
+
+
+ def handleMultiPages(self):
+ pages = re.search(self.PAGES_PATTERN, self.html)
+ if pages:
+ pages = int(math.ceil(int(pages.group('pages')) / 25.0))
+ else:
+ return
+
+ for p in xrange(2, pages + 1):
+ self.html = self.loadPage(p)
+ self.links += self.getLinks()
diff --git a/pyload/plugin/crypter/UlozTo.py b/pyload/plugin/crypter/UlozTo.py
new file mode 100644
index 000000000..d886c9d08
--- /dev/null
+++ b/pyload/plugin/crypter/UlozTo.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+
+import re
+from pyload.plugin.Crypter import Crypter
+
+
+class UlozTo(Crypter):
+ __name = "UlozTo"
+ __type = "crypter"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?(uloz\.to|ulozto\.(cz|sk|net)|bagruj\.cz|zachowajto\.pl)/(m|soubory)/.*'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Uloz.to folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ FOLDER_PATTERN = r'<ul class="profile_files">(.*?)</ul>'
+ LINK_PATTERN = r'<br /><a href="/([^"]+)">[^<]+</a>'
+ NEXT_PAGE_PATTERN = r'<a class="next " href="/([^"]+)">&nbsp;</a>'
+
+
+ def decrypt(self, pyfile):
+ html = self.load(pyfile.url)
+
+ new_links = []
+ for i in xrange(1, 100):
+ self.logInfo(_("Fetching links from page %i") % i)
+ m = re.search(self.FOLDER_PATTERN, html, re.S)
+ if m is None:
+ self.error(_("FOLDER_PATTERN not found"))
+
+ new_links.extend(re.findall(self.LINK_PATTERN, m.group(1)))
+ m = re.search(self.NEXT_PAGE_PATTERN, html)
+ if m:
+ html = self.load("http://ulozto.net/" + m.group(1))
+ else:
+ break
+ else:
+ self.logInfo(_("Limit of 99 pages reached, aborting"))
+
+ if new_links:
+ self.urls = [map(lambda s: "http://ulozto.net/%s" % s, new_links)]
diff --git a/pyload/plugin/crypter/UploadableCh.py b/pyload/plugin/crypter/UploadableCh.py
new file mode 100644
index 000000000..bf6bd09eb
--- /dev/null
+++ b/pyload/plugin/crypter/UploadableCh.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class UploadableCh(SimpleCrypter):
+ __name = "UploadableCh"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?uploadable\.ch/list/\w+'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Uploadable.ch folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ LINK_PATTERN = r'"(.+?)" class="icon_zipfile">'
+ NAME_PATTERN = r'<div class="folder"><span>&nbsp;</span>(?P<N>.+?)</div>'
+ OFFLINE_PATTERN = r'We are sorry... The URL you entered cannot be found on the server.'
+ TEMP_OFFLINE_PATTERN = r'<div class="icon_err">'
diff --git a/pyload/plugin/crypter/UploadedTo.py b/pyload/plugin/crypter/UploadedTo.py
new file mode 100644
index 000000000..fbae7e7d6
--- /dev/null
+++ b/pyload/plugin/crypter/UploadedTo.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class UploadedTo(SimpleCrypter):
+ __name = "UploadedTo"
+ __type = "crypter"
+ __version = "0.42"
+
+ __pattern = r'http://(?:www\.)?(uploaded|ul)\.(to|net)/(f|folder|list)/(?P<id>\w+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """UploadedTo decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ PLAIN_PATTERN = r'<small class="date"><a href="(?P<plain>[\w/]+)" onclick='
+ NAME_PATTERN = r'<title>(?P<N>.+?)<'
+
+
+ def getLinks(self):
+ m = re.search(self.PLAIN_PATTERN, self.html)
+ if m is None:
+ self.error(_("PLAIN_PATTERN not found"))
+
+ plain_link = urljoin("http://uploaded.net/", m.group('plain'))
+ return self.load(plain_link).split('\n')[:-1]
diff --git a/pyload/plugin/crypter/WiiReloadedOrg.py b/pyload/plugin/crypter/WiiReloadedOrg.py
new file mode 100644
index 000000000..66ef764e4
--- /dev/null
+++ b/pyload/plugin/crypter/WiiReloadedOrg.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class WiiReloadedOrg(DeadCrypter):
+ __name = "WiiReloadedOrg"
+ __type = "crypter"
+ __version = "0.11"
+
+ __pattern = r'http://(?:www\.)?wii-reloaded\.org/protect/get\.php\?i=.+'
+ __config = []
+
+ __description = """Wii-Reloaded.org decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("hzpz", "")]
+
+
+getInfo = create_getInfo(WiiReloadedOrg)
diff --git a/pyload/plugin/crypter/WuploadCom.py b/pyload/plugin/crypter/WuploadCom.py
new file mode 100644
index 000000000..ad1a789a0
--- /dev/null
+++ b/pyload/plugin/crypter/WuploadCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadCrypter import DeadCrypter, create_getInfo
+
+
+class WuploadCom(DeadCrypter):
+ __name = "WuploadCom"
+ __type = "crypter"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?wupload\.com/folder/\w+'
+
+ __description = """Wupload.com folder decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(WuploadCom)
diff --git a/pyload/plugin/crypter/XFileSharingPro.py b/pyload/plugin/crypter/XFileSharingPro.py
new file mode 100644
index 000000000..eedf7ab26
--- /dev/null
+++ b/pyload/plugin/crypter/XFileSharingPro.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.XFSCrypter import XFSCrypter
+
+
+class XFileSharingPro(XFSCrypter):
+ __name = "XFileSharingPro"
+ __type = "crypter"
+ __version = "0.03"
+
+ __pattern = r'^unmatchable$'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """XFileSharingPro dummy folder decrypter plugin for hook"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def _log(self, type, args):
+ msg = " | ".join([str(a).strip() for a in args if a])
+ logger = getattr(self.log, type)
+ logger("%s: %s: %s" % (self.__name, self.HOSTER_NAME, msg or _("%s MARK" % type.upper())))
+
+
+ def init(self):
+ super(XFileSharingPro, self).init()
+
+ self.__pattern = self.core.pluginManager.crypterPlugins[self.__name]['pattern']
+
+ self.HOSTER_DOMAIN = re.match(self.__pattern, self.pyfile.url).group(1).lower()
+ self.HOSTER_NAME = "".join([str.capitalize() for str in self.HOSTER_DOMAIN.split('.')])
+
+ account = self.core.accountManager.getAccountPlugin(self.HOSTER_NAME)
+
+ if account and account.canUse():
+ self.account = account
+ elif self.account:
+ self.account.HOSTER_DOMAIN = self.HOSTER_DOMAIN
+ else:
+ return
+
+ self.user, data = self.account.selectAccount()
+ self.req = self.account.getAccountRequest(self.user)
+ self.premium = self.account.isPremium(self.user)
diff --git a/pyload/plugin/crypter/XupPl.py b/pyload/plugin/crypter/XupPl.py
new file mode 100644
index 000000000..233d96534
--- /dev/null
+++ b/pyload/plugin/crypter/XupPl.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Crypter import Crypter
+
+
+class XupPl(Crypter):
+ __name = "XupPl"
+ __type = "crypter"
+ __version = "0.10"
+
+ __pattern = r'https?://(?:[^/]*\.)?xup\.pl/.*'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Xup.pl decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("z00nx", "z00nx0@gmail.com")]
+
+
+ def decrypt(self, pyfile):
+ header = self.load(pyfile.url, just_header=True)
+ if 'location' in header:
+ self.urls = [header['location']]
+ else:
+ self.fail(_("Unable to find link"))
diff --git a/pyload/plugin/crypter/YoutubeBatch.py b/pyload/plugin/crypter/YoutubeBatch.py
new file mode 100644
index 000000000..8f23b0149
--- /dev/null
+++ b/pyload/plugin/crypter/YoutubeBatch.py
@@ -0,0 +1,148 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.utils import json_loads
+from pyload.plugin.Crypter import Crypter
+from pyload.utils import safe_join
+
+
+class YoutubeBatch(Crypter):
+ __name = "YoutubeBatch"
+ __type = "crypter"
+ __version = "1.01"
+
+ __pattern = r'https?://(?:www\.|m\.)?youtube\.com/(?P<TYPE>user|playlist|view_play_list)(/|.*?[?&](?:list|p)=)(?P<ID>[\w-]+)'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True),
+ ("likes", "bool", "Grab user (channel) liked videos", False),
+ ("favorites", "bool", "Grab user (channel) favorite videos", False),
+ ("uploads", "bool", "Grab channel unplaylisted videos", True)]
+
+ __description = """Youtube.com channel & playlist decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ API_KEY = "AIzaSyCKnWLNlkX-L4oD1aEzqqhRw1zczeD6_k0"
+
+
+ def api_response(self, ref, req):
+ req.update({"key": self.API_KEY})
+ url = urljoin("https://www.googleapis.com/youtube/v3/", ref)
+ page = self.load(url, get=req)
+ return json_loads(page)
+
+
+ def getChannel(self, user):
+ channels = self.api_response("channels", {"part": "id,snippet,contentDetails", "forUsername": user, "maxResults": "50"})
+ if channels['items']:
+ channel = channels['items'][0]
+ return {"id": channel['id'],
+ "title": channel['snippet']['title'],
+ "relatedPlaylists": channel['contentDetails']['relatedPlaylists'],
+ "user": user} # One lone channel for user?
+
+
+ def getPlaylist(self, p_id):
+ playlists = self.api_response("playlists", {"part": "snippet", "id": p_id})
+ if playlists['items']:
+ playlist = playlists['items'][0]
+ return {"id": p_id,
+ "title": playlist['snippet']['title'],
+ "channelId": playlist['snippet']['channelId'],
+ "channelTitle": playlist['snippet']['channelTitle']}
+
+
+ def _getPlaylists(self, id, token=None):
+ req = {"part": "id", "maxResults": "50", "channelId": id}
+ if token:
+ req.update({"pageToken": token})
+
+ playlists = self.api_response("playlists", req)
+
+ for playlist in playlists['items']:
+ yield playlist['id']
+
+ if "nextPageToken" in playlists:
+ for item in self._getPlaylists(id, playlists['nextPageToken']):
+ yield item
+
+
+ def getPlaylists(self, ch_id):
+ return map(self.getPlaylist, self._getPlaylists(ch_id))
+
+
+ def _getVideosId(self, id, token=None):
+ req = {"part": "contentDetails", "maxResults": "50", "playlistId": id}
+ if token:
+ req.update({"pageToken": token})
+
+ playlist = self.api_response("playlistItems", req)
+
+ for item in playlist['items']:
+ yield item['contentDetails']['videoId']
+
+ if "nextPageToken" in playlist:
+ for item in self._getVideosId(id, playlist['nextPageToken']):
+ yield item
+
+
+ def getVideosId(self, p_id):
+ return list(self._getVideosId(p_id))
+
+
+ def decrypt(self, pyfile):
+ m = re.match(self.__pattern, pyfile.url)
+ m_id = m.group("ID")
+ m_type = m.group("TYPE")
+
+ if m_type == "user":
+ self.logDebug("Url recognized as Channel")
+ user = m_id
+ channel = self.getChannel(user)
+
+ if channel:
+ playlists = self.getPlaylists(channel['id'])
+ self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel['title']))
+
+ relatedplaylist = {p_name: self.getPlaylist(p_id) for p_name, p_id in channel['relatedPlaylists'].iteritems()}
+ self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys())
+
+ relatedplaylist['uploads']['title'] = "Unplaylisted videos"
+ relatedplaylist['uploads']['checkDups'] = True #: checkDups flag
+
+ for p_name, p_data in relatedplaylist.iteritems():
+ if self.getConfig(p_name):
+ p_data['title'] += " of " + user
+ playlists.append(p_data)
+ else:
+ playlists = []
+ else:
+ self.logDebug("Url recognized as Playlist")
+ playlists = [self.getPlaylist(m_id)]
+
+ if not playlists:
+ self.fail(_("No playlist available"))
+
+ addedvideos = []
+ urlize = lambda x: "https://www.youtube.com/watch?v=" + x
+ for p in playlists:
+ p_name = p['title']
+ p_videos = self.getVideosId(p['id'])
+ p_folder = safe_join(self.config['general']['download_folder'], p['channelTitle'], p_name)
+ self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name))
+
+ if not p_videos:
+ continue
+ elif "checkDups" in p:
+ p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos]
+ self.logDebug("%s video\s available on playlist \"%s\" after duplicates cleanup" % (len(p_urls), p_name))
+ else:
+ p_urls = map(urlize, p_videos)
+
+ self.packages.append((p_name, p_urls, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
+
+ addedvideos.extend(p_videos)
diff --git a/pyload/plugins/crypter/__init__.py b/pyload/plugin/crypter/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/crypter/__init__.py
+++ b/pyload/plugin/crypter/__init__.py
diff --git a/pyload/plugin/hook/AlldebridCom.py b/pyload/plugin/hook/AlldebridCom.py
new file mode 100644
index 000000000..3297c12e3
--- /dev/null
+++ b/pyload/plugin/hook/AlldebridCom.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class AlldebridCom(MultiHoster):
+ __name = "AlldebridCom"
+ __type = "hook"
+ __version = "0.13"
+
+ __config = [("https", "bool", "Enable HTTPS", False),
+ ("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """Alldebrid.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("Andy Voigt", "spamsales@online.de")]
+
+
+ def getHoster(self):
+ https = "https" if self.getConfig("https") else "http"
+ page = getURL(https + "://www.alldebrid.com/api.php", get={'action': "get_host"}).replace("\"", "").strip()
+
+ return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugin/hook/BypassCaptcha.py b/pyload/plugin/hook/BypassCaptcha.py
new file mode 100644
index 000000000..20a1582cc
--- /dev/null
+++ b/pyload/plugin/hook/BypassCaptcha.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+
+from pycurl import FORM_FILE, LOW_SPEED_TIME
+
+from pyload.network.HTTPRequest import BadHeader
+from pyload.network.RequestFactory import getURL, getRequest
+from pyload.plugin.Addon import Addon
+
+
+class BypassCaptchaException(Exception):
+
+ def __init__(self, err):
+ self.err = err
+
+
+ def getCode(self):
+ return self.err
+
+
+ def __str__(self):
+ return "<BypassCaptchaException %s>" % self.err
+
+
+ def __repr__(self):
+ return "<BypassCaptchaException %s>" % self.err
+
+
+class BypassCaptcha(Addon):
+ __name = "BypassCaptcha"
+ __type = "hook"
+ __version = "0.05"
+
+ __config = [("force", "bool", "Force BC even if client is connected", False),
+ ("passkey", "password", "Passkey", "")]
+
+ __description = """Send captchas to BypassCaptcha.com"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("Godofdream", "soilfcition@gmail.com"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ PYLOAD_KEY = "4f771155b640970d5607f919a615bdefc67e7d32"
+
+ SUBMIT_URL = "http://bypasscaptcha.com/upload.php"
+ RESPOND_URL = "http://bypasscaptcha.com/check_value.php"
+ GETCREDITS_URL = "http://bypasscaptcha.com/ex_left.php"
+
+
+ def getCredits(self):
+ res = getURL(self.GETCREDITS_URL, post={"key": self.getConfig("passkey")})
+
+ data = dict(x.split(' ', 1) for x in res.splitlines())
+ return int(data['Left'])
+
+
+ def submit(self, captcha, captchaType="file", match=None):
+ req = getRequest()
+
+ #raise timeout threshold
+ req.c.setopt(LOW_SPEED_TIME, 80)
+
+ try:
+ res = req.load(self.SUBMIT_URL,
+ post={'vendor_key': self.PYLOAD_KEY,
+ 'key': self.getConfig("passkey"),
+ 'gen_task_id': "1",
+ 'file': (FORM_FILE, captcha)},
+ multipart=True)
+ finally:
+ req.close()
+
+ data = dict(x.split(' ', 1) for x in res.splitlines())
+ if not data or "Value" not in data:
+ raise BypassCaptchaException(res)
+
+ result = data['Value']
+ ticket = data['TaskId']
+ self.logDebug("Result %s : %s" % (ticket, result))
+
+ return ticket, result
+
+
+ def respond(self, ticket, success):
+ try:
+ res = getURL(self.RESPOND_URL, post={"task_id": ticket, "key": self.getConfig("passkey"),
+ "cv": 1 if success else 0})
+ except BadHeader, e:
+ self.logError(_("Could not send response"), e)
+
+
+ def captchaTask(self, task):
+ if "service" in task.data:
+ return False
+
+ if not task.isTextual():
+ return False
+
+ if not self.getConfig("passkey"):
+ return False
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return False
+
+ if self.getCredits() > 0:
+ task.handler.append(self)
+ task.data['service'] = self.__name
+ task.setWaiting(100)
+ self.processCaptcha(task)
+ else:
+ self.logInfo(_("Your %s account has not enough credits") % self.__name)
+
+
+ def captchaCorrect(self, task):
+ if task.data['service'] == self.__name and "ticket" in task.data:
+ self.respond(task.data['ticket'], True)
+
+
+ def captchaInvalid(self, task):
+ if task.data['service'] == self.__name and "ticket" in task.data:
+ self.respond(task.data['ticket'], False)
+
+
+ def processCaptcha(self, task):
+ c = task.captchaFile
+ try:
+ ticket, result = self.submit(c)
+ except BypassCaptchaException, e:
+ task.error = e.getCode()
+ return
+
+ task.data['ticket'] = ticket
+ task.setResult(result)
diff --git a/pyload/plugin/hook/Captcha9kw.py b/pyload/plugin/hook/Captcha9kw.py
new file mode 100644
index 000000000..dafbf72b3
--- /dev/null
+++ b/pyload/plugin/hook/Captcha9kw.py
@@ -0,0 +1,253 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+
+from base64 import b64encode
+from time import sleep
+
+from pyload.network.HTTPRequest import BadHeader
+from pyload.network.RequestFactory import getURL
+
+from pyload.plugin.Addon import Addon
+
+
+class Captcha9kw(Addon):
+ __name = "Captcha9kw"
+ __type = "hook"
+ __version = "0.26"
+
+ __config = [("ssl" , "bool" , "Use HTTPS" , True ),
+ ("force" , "bool" , "Force captcha resolving even if client is connected" , True ),
+ ("confirm" , "bool" , "Confirm Captcha (cost +6 credits)" , False ),
+ ("captchaperhour", "int" , "Captcha per hour" , "9999" ),
+ ("captchapermin" , "int" , "Captcha per minute" , "9999" ),
+ ("prio" , "int" , "Priority (max 10)(cost +0 -> +10 credits)" , "0" ),
+ ("queue" , "int" , "Max. Queue (max 999)" , "50" ),
+ ("hoster_options", "string" , "Hoster options (format: pluginname:prio=1:selfsolfe=1:confirm=1:timeout=900|...)", "ShareonlineBiz:prio=0:timeout=999 | UploadedTo:prio=0:timeout=999"),
+ ("selfsolve" , "bool" , "Selfsolve (manually solve your captcha in your 9kw client if active)" , "0" ),
+ ("passkey" , "password", "API key" , "" ),
+ ("timeout" , "int" , "Timeout in seconds (min 60, max 3999)" , "900" )]
+
+ __description = """Send captchas to 9kw.eu"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ API_URL = "http://www.9kw.eu/index.cgi"
+
+
+ def activate(self):
+ if self.getConfig("ssl"):
+ self.API_URL = self.API_URL.replace("http://", "https://")
+
+
+ def getCredits(self):
+ res = getURL(self.API_URL,
+ get={'apikey': self.getConfig("passkey"),
+ 'pyload': "1",
+ 'source': "pyload",
+ 'action': "usercaptchaguthaben"})
+
+ if res.isdigit():
+ self.logInfo(_("%s credits left") % res)
+ credits = self.info['credits'] = int(res)
+ return credits
+ else:
+ self.logError(res)
+ return 0
+
+
+ def _processCaptcha(self, task):
+ try:
+ with open(task.captchaFile, 'rb') as f:
+ data = f.read()
+
+ except IOError, e:
+ self.logError(e)
+ return
+
+ data = b64encode(data)
+ mouse = 1 if task.isPositional() else 0
+ pluginname = re.search(r'_([^_]*)_\d+.\w+', task.captchaFile).group(1)
+
+ option = {'min' : 2,
+ 'max' : 50,
+ 'phrase' : 0,
+ 'numeric' : 0,
+ 'case_sensitive': 0,
+ 'math' : 0,
+ 'prio' : min(max(self.getConfig("prio"), 0), 10),
+ 'confirm' : self.getConfig("confirm"),
+ 'timeout' : min(max(self.getConfig("timeout"), 300), 3999),
+ 'selfsolve' : self.getConfig("selfsolve"),
+ 'cph' : self.getConfig("captchaperhour"),
+ 'cpm' : self.getConfig("captchapermin")}
+
+ for opt in str(self.getConfig("hoster_options").split('|')):
+
+ details = map(str.strip, opt.split(':'))
+
+ if not details or details[0].lower() != pluginname.lower():
+ continue
+
+ for d in details:
+ hosteroption = d.split("=")
+
+ if len(hosteroption) < 2 or not hosteroption[1].isdigit():
+ continue
+
+ o = hosteroption[0].lower()
+ if o in option:
+ option[o] = hosteroption[1]
+
+ break
+
+ post_data = {'apikey' : self.getConfig("passkey"),
+ 'prio' : option['prio'],
+ 'confirm' : option['confirm'],
+ 'maxtimeout' : option['timeout'],
+ 'selfsolve' : option['selfsolve'],
+ 'captchaperhour': option['cph'],
+ 'captchapermin' : option['cpm'],
+ 'case-sensitive': option['case_sensitive'],
+ 'min_len' : option['min'],
+ 'max_len' : option['max'],
+ 'phrase' : option['phrase'],
+ 'numeric' : option['numeric'],
+ 'math' : option['math'],
+ 'oldsource' : pluginname,
+ 'pyload' : "1",
+ 'source' : "pyload",
+ 'base64' : "1",
+ 'mouse' : mouse,
+ 'file-upload-01': data,
+ 'action' : "usercaptchaupload"}
+
+ for _i in xrange(5):
+ try:
+ res = getURL(self.API_URL, post=post_data)
+ except BadHeader, e:
+ sleep(3)
+ else:
+ if res and res.isdigit():
+ break
+ else:
+ self.logError(_("Bad upload: %s") % res)
+ return
+
+ self.logDebug(_("NewCaptchaID ticket: %s") % res, task.captchaFile)
+
+ task.data["ticket"] = res
+
+ for _i in xrange(int(self.getConfig("timeout") / 5)):
+ result = getURL(self.API_URL,
+ get={'apikey': self.getConfig("passkey"),
+ 'id' : res,
+ 'pyload': "1",
+ 'info' : "1",
+ 'source': "pyload",
+ 'action': "usercaptchacorrectdata"})
+
+ if not result or result == "NO DATA":
+ sleep(5)
+ else:
+ break
+ else:
+ self.logDebug("Could not send request: %s" % res)
+ result = None
+
+ self.logInfo(_("Captcha result for ticket %s: %s") % (res, result))
+
+ task.setResult(result)
+
+
+ def captchaTask(self, task):
+ if not task.isTextual() and not task.isPositional():
+ return
+
+ if not self.getConfig("passkey"):
+ return
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return
+
+ credits = self.getCredits()
+
+ if not credits:
+ self.logError(_("Your captcha 9kw.eu account has not enough credits"))
+ return
+
+ queue = min(self.getConfig("queue"), 999)
+ timeout = min(max(self.getConfig("timeout"), 300), 3999)
+ pluginname = re.search(r'_([^_]*)_\d+.\w+', task.captchaFile).group(1)
+
+ for _i in xrange(5):
+ servercheck = getURL("http://www.9kw.eu/grafik/servercheck.txt")
+ if queue < re.search(r'queue=(\d+)', servercheck).group(1):
+ break
+
+ sleep(10)
+ else:
+ self.fail(_("Too many captchas in queue"))
+
+ for opt in str(self.getConfig("hoster_options").split('|')):
+ details = map(str.strip, opt.split(':'))
+
+ if not details or details[0].lower() != pluginname.lower():
+ continue
+
+ for d in details:
+ hosteroption = d.split("=")
+
+ if (len(hosteroption) > 1
+ and hosteroption[0].lower() == 'timeout'
+ and hosteroption[1].isdigit()):
+ timeout = int(hosteroption[1])
+
+ break
+
+ task.handler.append(self)
+
+ task.setWaiting(timeout)
+
+ self._processCaptcha(task)
+
+
+ def _captchaResponse(self, task, correct):
+ type = "correct" if correct else "refund"
+
+ if 'ticket' not in task.data:
+ self.logDebug("No CaptchaID for %s request (task: %s)" % (type, task))
+ return
+
+ passkey = self.getConfig("passkey")
+
+ for _i in xrange(3):
+ res = getURL(self.API_URL,
+ get={'action' : "usercaptchacorrectback",
+ 'apikey' : passkey,
+ 'api_key': passkey,
+ 'correct': "1" if correct else "2",
+ 'pyload' : "1",
+ 'source' : "pyload",
+ 'id' : task.data["ticket"]})
+
+ self.logDebug("Request %s: %s" % (type, res))
+
+ if res == "OK":
+ break
+
+ sleep(5)
+ else:
+ self.logDebug("Could not send %s request: %s" % (type, res))
+
+
+ def captchaCorrect(self, task):
+ self._captchaResponse(task, True)
+
+
+ def captchaInvalid(self, task):
+ self._captchaResponse(task, False)
diff --git a/pyload/plugin/hook/CaptchaBrotherhood.py b/pyload/plugin/hook/CaptchaBrotherhood.py
new file mode 100644
index 000000000..d01d262f2
--- /dev/null
+++ b/pyload/plugin/hook/CaptchaBrotherhood.py
@@ -0,0 +1,166 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import StringIO
+import pycurl
+
+try:
+ from PIL import Image
+except ImportError:
+ import Image
+
+from time import sleep
+from urllib import urlencode
+
+from pyload.network.RequestFactory import getURL, getRequest
+from pyload.plugin.Addon import Addon
+
+
+class CaptchaBrotherhoodException(Exception):
+
+ def __init__(self, err):
+ self.err = err
+
+
+ def getCode(self):
+ return self.err
+
+
+ def __str__(self):
+ return "<CaptchaBrotherhoodException %s>" % self.err
+
+
+ def __repr__(self):
+ return "<CaptchaBrotherhoodException %s>" % self.err
+
+
+class CaptchaBrotherhood(Addon):
+ __name = "CaptchaBrotherhood"
+ __type = "hook"
+ __version = "0.06"
+
+ __config = [("username", "str", "Username", ""),
+ ("force", "bool", "Force CT even if client is connected", False),
+ ("passkey", "password", "Password", "")]
+
+ __description = """Send captchas to CaptchaBrotherhood.com"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ API_URL = "http://www.captchabrotherhood.com/"
+
+
+ def getCredits(self):
+ res = getURL(self.API_URL + "askCredits.aspx",
+ get={"username": self.getConfig("username"), "password": self.getConfig("passkey")})
+ if not res.startswith("OK"):
+ raise CaptchaBrotherhoodException(res)
+ else:
+ credits = int(res[3:])
+ self.logInfo(_("%d credits left") % credits)
+ self.info['credits'] = credits
+ return credits
+
+
+ def submit(self, captcha, captchaType="file", match=None):
+ try:
+ img = Image.open(captcha)
+ output = StringIO.StringIO()
+ self.logDebug("CAPTCHA IMAGE", img, img.format, img.mode)
+ if img.format in ("GIF", "JPEG"):
+ img.save(output, img.format)
+ else:
+ if img.mode != "RGB":
+ img = img.convert("RGB")
+ img.save(output, "JPEG")
+ data = output.getvalue()
+ output.close()
+ except Exception, e:
+ raise CaptchaBrotherhoodException("Reading or converting captcha image failed: %s" % e)
+
+ req = getRequest()
+
+ url = "%ssendNewCaptcha.aspx?%s" % (self.API_URL,
+ urlencode({"username": self.getConfig("username"),
+ "password": self.getConfig("passkey"),
+ "captchaSource": "pyLoad",
+ "timeout": "80"}))
+
+ req.c.setopt(pycurl.URL, url)
+ req.c.setopt(pycurl.POST, 1)
+ req.c.setopt(pycurl.POSTFIELDS, data)
+ req.c.setopt(pycurl.HTTPHEADER, ["Content-Type: text/html"])
+
+ try:
+ req.c.perform()
+ res = req.getResponse()
+ except Exception, e:
+ raise CaptchaBrotherhoodException("Submit captcha image failed")
+
+ req.close()
+
+ if not res.startswith("OK"):
+ raise CaptchaBrotherhoodException(res[1])
+
+ ticket = res[3:]
+
+ for _i in xrange(15):
+ sleep(5)
+ res = self.get_api("askCaptchaResult", ticket)
+ if res.startswith("OK-answered"):
+ return ticket, res[12:]
+
+ raise CaptchaBrotherhoodException("No solution received in time")
+
+
+ def get_api(self, api, ticket):
+ res = getURL("%s%s.aspx" % (self.API_URL, api),
+ get={"username": self.getConfig("username"),
+ "password": self.getConfig("passkey"),
+ "captchaID": ticket})
+ if not res.startswith("OK"):
+ raise CaptchaBrotherhoodException("Unknown response: %s" % res)
+
+ return res
+
+
+ def captchaTask(self, task):
+ if "service" in task.data:
+ return False
+
+ if not task.isTextual():
+ return False
+
+ if not self.getConfig("username") or not self.getConfig("passkey"):
+ return False
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return False
+
+ if self.getCredits() > 10:
+ task.handler.append(self)
+ task.data['service'] = self.__name
+ task.setWaiting(100)
+ self.processCaptcha(task)
+ else:
+ self.logInfo(_("Your CaptchaBrotherhood Account has not enough credits"))
+
+
+ def captchaInvalid(self, task):
+ if task.data['service'] == self.__name and "ticket" in task.data:
+ res = self.get_api("complainCaptcha", task.data['ticket'])
+
+
+ def processCaptcha(self, task):
+ c = task.captchaFile
+ try:
+ ticket, result = self.submit(c)
+ except CaptchaBrotherhoodException, e:
+ task.error = e.getCode()
+ return
+
+ task.data['ticket'] = ticket
+ task.setResult(result)
diff --git a/pyload/plugin/hook/DeathByCaptcha.py b/pyload/plugin/hook/DeathByCaptcha.py
new file mode 100644
index 000000000..92bfa1e70
--- /dev/null
+++ b/pyload/plugin/hook/DeathByCaptcha.py
@@ -0,0 +1,213 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+
+from base64 import b64encode
+from pycurl import FORM_FILE, HTTPHEADER
+from time import sleep
+
+from pyload.utils import json_loads
+from pyload.network.HTTPRequest import BadHeader
+from pyload.network.RequestFactory import getRequest
+from pyload.plugin.Addon import Addon
+
+
+class DeathByCaptchaException(Exception):
+ DBC_ERRORS = {'not-logged-in': 'Access denied, check your credentials',
+ 'invalid-credentials': 'Access denied, check your credentials',
+ 'banned': 'Access denied, account is suspended',
+ 'insufficient-funds': 'Insufficient account balance to decrypt CAPTCHA',
+ 'invalid-captcha': 'CAPTCHA is not a valid image',
+ 'service-overload': 'CAPTCHA was rejected due to service overload, try again later',
+ 'invalid-request': 'Invalid request',
+ 'timed-out': 'No CAPTCHA solution received in time'}
+
+
+ def __init__(self, err):
+ self.err = err
+
+
+ def getCode(self):
+ return self.err
+
+
+ def getDesc(self):
+ if self.err in self.DBC_ERRORS.keys():
+ return self.DBC_ERRORS[self.err]
+ else:
+ return self.err
+
+
+ def __str__(self):
+ return "<DeathByCaptchaException %s>" % self.err
+
+
+ def __repr__(self):
+ return "<DeathByCaptchaException %s>" % self.err
+
+
+class DeathByCaptcha(Addon):
+ __name = "DeathByCaptcha"
+ __type = "hook"
+ __version = "0.04"
+
+ __config = [("username", "str", "Username", ""),
+ ("passkey", "password", "Password", ""),
+ ("force", "bool", "Force DBC even if client is connected", False)]
+
+ __description = """Send captchas to DeathByCaptcha.com"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ API_URL = "http://api.dbcapi.me/api/"
+
+
+ def call_api(self, api="captcha", post=False, multipart=False):
+ req = getRequest()
+ req.c.setopt(HTTPHEADER, ["Accept: application/json", "User-Agent: pyLoad %s" % self.core.version])
+
+ if post:
+ if not isinstance(post, dict):
+ post = {}
+ post.update({"username": self.getConfig("username"),
+ "password": self.getConfig("passkey")})
+
+ res = None
+ try:
+ json = req.load("%s%s" % (self.API_URL, api),
+ post=post,
+ multipart=multipart)
+ self.logDebug(json)
+ res = json_loads(json)
+
+ if "error" in res:
+ raise DeathByCaptchaException(res['error'])
+ elif "status" not in res:
+ raise DeathByCaptchaException(str(res))
+
+ except BadHeader, e:
+ if 403 == e.code:
+ raise DeathByCaptchaException('not-logged-in')
+ elif 413 == e.code:
+ raise DeathByCaptchaException('invalid-captcha')
+ elif 503 == e.code:
+ raise DeathByCaptchaException('service-overload')
+ elif e.code in (400, 405):
+ raise DeathByCaptchaException('invalid-request')
+ else:
+ raise
+
+ finally:
+ req.close()
+
+ return res
+
+
+ def getCredits(self):
+ res = self.call_api("user", True)
+
+ if 'is_banned' in res and res['is_banned']:
+ raise DeathByCaptchaException('banned')
+ elif 'balance' in res and 'rate' in res:
+ self.info.update(res)
+ else:
+ raise DeathByCaptchaException(res)
+
+
+ def getStatus(self):
+ res = self.call_api("status", False)
+
+ if 'is_service_overloaded' in res and res['is_service_overloaded']:
+ raise DeathByCaptchaException('service-overload')
+
+
+ def submit(self, captcha, captchaType="file", match=None):
+ #workaround multipart-post bug in HTTPRequest.py
+ if re.match("^\w*$", self.getConfig("passkey")):
+ multipart = True
+ data = (FORM_FILE, captcha)
+ else:
+ multipart = False
+ with open(captcha, 'rb') as f:
+ data = f.read()
+ data = "base64:" + b64encode(data)
+
+ res = self.call_api("captcha", {"captchafile": data}, multipart)
+
+ if "captcha" not in res:
+ raise DeathByCaptchaException(res)
+ ticket = res['captcha']
+
+ for _i in xrange(24):
+ sleep(5)
+ res = self.call_api("captcha/%d" % ticket, False)
+ if res['text'] and res['is_correct']:
+ break
+ else:
+ raise DeathByCaptchaException('timed-out')
+
+ result = res['text']
+ self.logDebug("Result %s : %s" % (ticket, result))
+
+ return ticket, result
+
+
+ def captchaTask(self, task):
+ if "service" in task.data:
+ return False
+
+ if not task.isTextual():
+ return False
+
+ if not self.getConfig("username") or not self.getConfig("passkey"):
+ return False
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return False
+
+ try:
+ self.getStatus()
+ self.getCredits()
+ except DeathByCaptchaException, e:
+ self.logError(e.getDesc())
+ return False
+
+ balance, rate = self.info['balance'], self.info['rate']
+ self.logInfo(_("Account balance"),
+ _("US$%.3f (%d captchas left at %.2f cents each)") % (balance / 100,
+ balance // rate, rate))
+
+ if balance > rate:
+ task.handler.append(self)
+ task.data['service'] = self.__name
+ task.setWaiting(180)
+ self.processCaptcha(task)
+
+
+ def captchaInvalid(self, task):
+ if task.data['service'] == self.__name and "ticket" in task.data:
+ try:
+ res = self.call_api("captcha/%d/report" % task.data['ticket'], True)
+
+ except DeathByCaptchaException, e:
+ self.logError(e.getDesc())
+
+ except Exception, e:
+ self.logError(e)
+
+
+ def processCaptcha(self, task):
+ c = task.captchaFile
+ try:
+ ticket, result = self.submit(c)
+ except DeathByCaptchaException, e:
+ task.error = e.getCode()
+ self.logError(e.getDesc())
+ return
+
+ task.data['ticket'] = ticket
+ task.setResult(result)
diff --git a/pyload/plugin/hook/DebridItaliaCom.py b/pyload/plugin/hook/DebridItaliaCom.py
new file mode 100644
index 000000000..bce99abb2
--- /dev/null
+++ b/pyload/plugin/hook/DebridItaliaCom.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class DebridItaliaCom(MultiHoster):
+ __name = "DebridItaliaCom"
+ __type = "hook"
+ __version = "0.08"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to standard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """Debriditalia.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def getHoster(self):
+ html = getURL("http://www.debriditalia.com/status.php")
+ return re.findall(r'title="(.+?)"> \1</td><td><img src="/images/(?:attivo|testing)', html)
diff --git a/pyload/plugin/hook/EasybytezCom.py b/pyload/plugin/hook/EasybytezCom.py
new file mode 100644
index 000000000..b401809fb
--- /dev/null
+++ b/pyload/plugin/hook/EasybytezCom.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class EasybytezCom(MultiHoster):
+ __name = "EasybytezCom"
+ __type = "hook"
+ __version = "0.03"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", "")]
+
+ __description = """EasyBytez.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def getHoster(self):
+ self.account = self.core.accountManager.getAccountPlugin(self.__name)
+ user = self.account.selectAccount()[0]
+
+ try:
+ req = self.account.getAccountRequest(user)
+ page = req.load("http://www.easybytez.com")
+
+ hosters = re.search(r'</textarea>\s*Supported sites:(.*)', page).group(1).split(',')
+
+ except Exception, e:
+ self.logWarning(_("Unable to load supported hoster list, using last known"))
+ self.logDebug(e)
+
+ hosters = ["bitshare.com", "crocko.com", "ddlstorage.com", "depositfiles.com", "extabit.com", "hotfile.com",
+ "mediafire.com", "netload.in", "rapidgator.net", "rapidshare.com", "uploading.com", "uload.to",
+ "uploaded.to"]
+ finally:
+ return hosters
diff --git a/pyload/plugin/hook/ExpertDecoders.py b/pyload/plugin/hook/ExpertDecoders.py
new file mode 100644
index 000000000..ed8edbf69
--- /dev/null
+++ b/pyload/plugin/hook/ExpertDecoders.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+from base64 import b64encode
+from pycurl import LOW_SPEED_TIME
+from uuid import uuid4
+
+from pyload.network.HTTPRequest import BadHeader
+from pyload.network.RequestFactory import getURL, getRequest
+from pyload.plugin.Addon import Addon
+
+
+class ExpertDecoders(Addon):
+ __name = "ExpertDecoders"
+ __type = "hook"
+ __version = "0.02"
+
+ __config = [("force", "bool", "Force CT even if client is connected", False),
+ ("passkey", "password", "Access key", "")]
+
+ __description = """Send captchas to expertdecoders.com"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ API_URL = "http://www.fasttypers.org/imagepost.ashx"
+
+
+ def getCredits(self):
+ res = getURL(self.API_URL, post={"key": self.getConfig("passkey"), "action": "balance"})
+
+ if res.isdigit():
+ self.logInfo(_("%s credits left") % res)
+ self.info['credits'] = credits = int(res)
+ return credits
+ else:
+ self.logError(res)
+ return 0
+
+
+ def processCaptcha(self, task):
+ task.data['ticket'] = ticket = uuid4()
+ result = None
+
+ with open(task.captchaFile, 'rb') as f:
+ data = f.read()
+ data = b64encode(data)
+
+ req = getRequest()
+ #raise timeout threshold
+ req.c.setopt(LOW_SPEED_TIME, 80)
+
+ try:
+ result = req.load(self.API_URL, post={"action": "upload", "key": self.getConfig("passkey"),
+ "file": data, "gen_task_id": ticket})
+ finally:
+ req.close()
+
+ self.logDebug("Result %s : %s" % (ticket, result))
+ task.setResult(result)
+
+
+ def captchaTask(self, task):
+ if not task.isTextual():
+ return False
+
+ if not self.getConfig("passkey"):
+ return False
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return False
+
+ if self.getCredits() > 0:
+ task.handler.append(self)
+ task.setWaiting(100)
+ self.processCaptcha(task)
+ else:
+ self.logInfo(_("Your ExpertDecoders Account has not enough credits"))
+
+
+ def captchaInvalid(self, task):
+ if "ticket" in task.data:
+
+ try:
+ res = getURL(self.API_URL,
+ post={'action': "refund", 'key': self.getConfig("passkey"), 'gen_task_id': task.data['ticket']})
+ self.logInfo(_("Request refund", res)
+
+ except BadHeader, e:
+ self.logError(_("Could not send refund request"), e)
diff --git a/pyload/plugin/hook/FastixRu.py b/pyload/plugin/hook/FastixRu.py
new file mode 100644
index 000000000..4bfec052e
--- /dev/null
+++ b/pyload/plugin/hook/FastixRu.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class FastixRu(MultiHoster):
+ __name = "FastixRu"
+ __type = "hook"
+ __version = "0.02"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("unloadFailing", "bool", "Revert to standard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """Fastix.ru hook plugin"""
+ __license = "GPLv3"
+ __authors = [("Massimo Rosamilia", "max@spiritix.eu")]
+
+
+ def getHoster(self):
+ page = getURL("http://fastix.ru/api_v2",
+ get={'apikey': "5182964c3f8f9a7f0b00000a_kelmFB4n1IrnCDYuIFn2y",
+ 'sub' : "allowed_sources"})
+ host_list = json_loads(page)
+ host_list = host_list['allow']
+ return host_list
diff --git a/pyload/plugin/hook/FreeWayMe.py b/pyload/plugin/hook/FreeWayMe.py
new file mode 100644
index 000000000..910c9d640
--- /dev/null
+++ b/pyload/plugin/hook/FreeWayMe.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class FreeWayMe(MultiHoster):
+ __name = "FreeWayMe"
+ __type = "hook"
+ __version = "0.11"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """FreeWay.me hook plugin"""
+ __license = "GPLv3"
+ __authors = [("Nicolas Giese", "james@free-way.me")]
+
+
+ def getHoster(self):
+ hostis = getURL("https://www.free-way.me/ajax/jd.php", get={'id': 3}).replace("\"", "").strip()
+ self.logDebug("Hosters", hostis)
+ return [x.strip() for x in hostis.split(",") if x.strip()]
diff --git a/pyload/plugin/hook/ImageTyperz.py b/pyload/plugin/hook/ImageTyperz.py
new file mode 100644
index 000000000..4ad37eb89
--- /dev/null
+++ b/pyload/plugin/hook/ImageTyperz.py
@@ -0,0 +1,151 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+
+from base64 import b64encode
+from pycurl import FORM_FILE, LOW_SPEED_TIME
+
+from pyload.network.RequestFactory import getURL, getRequest
+from pyload.plugin.Addon import Addon
+
+
+class ImageTyperzException(Exception):
+
+ def __init__(self, err):
+ self.err = err
+
+
+ def getCode(self):
+ return self.err
+
+
+ def __str__(self):
+ return "<ImageTyperzException %s>" % self.err
+
+
+ def __repr__(self):
+ return "<ImageTyperzException %s>" % self.err
+
+
+class ImageTyperz(Addon):
+ __name = "ImageTyperz"
+ __type = "hook"
+ __version = "0.05"
+
+ __config = [("username", "str", "Username", ""),
+ ("passkey", "password", "Password", ""),
+ ("force", "bool", "Force IT even if client is connected", False)]
+
+ __description = """Send captchas to ImageTyperz.com"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ SUBMIT_URL = "http://captchatypers.com/Forms/UploadFileAndGetTextNEW.ashx"
+ RESPOND_URL = "http://captchatypers.com/Forms/SetBadImage.ashx"
+ GETCREDITS_URL = "http://captchatypers.com/Forms/RequestBalance.ashx"
+
+
+ def getCredits(self):
+ res = getURL(self.GETCREDITS_URL,
+ post={'action': "REQUESTBALANCE",
+ 'username': self.getConfig("username"),
+ 'password': self.getConfig("passkey")})
+
+ if res.startswith('ERROR'):
+ raise ImageTyperzException(res)
+
+ try:
+ balance = float(res)
+ except Exception:
+ raise ImageTyperzException("Invalid response")
+
+ self.logInfo(_("Account balance: $%s left") % res)
+ return balance
+
+
+ def submit(self, captcha, captchaType="file", match=None):
+ req = getRequest()
+ #raise timeout threshold
+ req.c.setopt(LOW_SPEED_TIME, 80)
+
+ try:
+ #workaround multipart-post bug in HTTPRequest.py
+ if re.match("^\w*$", self.getConfig("passkey")):
+ multipart = True
+ data = (FORM_FILE, captcha)
+ else:
+ multipart = False
+ with open(captcha, 'rb') as f:
+ data = f.read()
+ data = b64encode(data)
+
+ res = req.load(self.SUBMIT_URL,
+ post={'action': "UPLOADCAPTCHA",
+ 'username': self.getConfig("username"),
+ 'password': self.getConfig("passkey"), "file": data},
+ multipart=multipart)
+ finally:
+ req.close()
+
+ if res.startswith("ERROR"):
+ raise ImageTyperzException(res)
+ else:
+ data = res.split('|')
+ if len(data) == 2:
+ ticket, result = data
+ else:
+ raise ImageTyperzException("Unknown response: %s" % res)
+
+ return ticket, result
+
+
+ def captchaTask(self, task):
+ if "service" in task.data:
+ return False
+
+ if not task.isTextual():
+ return False
+
+ if not self.getConfig("username") or not self.getConfig("passkey"):
+ return False
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return False
+
+ if self.getCredits() > 0:
+ task.handler.append(self)
+ task.data['service'] = self.__name
+ task.setWaiting(100)
+ self.processCaptcha(task)
+ else:
+ self.logInfo(_("Your %s account has not enough credits") % self.__name)
+
+
+ def captchaInvalid(self, task):
+ if task.data['service'] == self.__name and "ticket" in task.data:
+ res = getURL(self.RESPOND_URL,
+ post={'action': "SETBADIMAGE",
+ 'username': self.getConfig("username"),
+ 'password': self.getConfig("passkey"),
+ 'imageid': task.data['ticket']})
+
+ if res == "SUCCESS":
+ self.logInfo(_("Bad captcha solution received, requested refund"))
+ else:
+ self.logError(_("Bad captcha solution received, refund request failed"), res)
+
+
+ def processCaptcha(self, task):
+ c = task.captchaFile
+ try:
+ ticket, result = self.submit(c)
+ except ImageTyperzException, e:
+ task.error = e.getCode()
+ return
+
+ task.data['ticket'] = ticket
+ task.setResult(result)
diff --git a/pyload/plugin/hook/LinkdecrypterCom.py b/pyload/plugin/hook/LinkdecrypterCom.py
new file mode 100644
index 000000000..6465d95e0
--- /dev/null
+++ b/pyload/plugin/hook/LinkdecrypterCom.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Addon
+from pyload.utils import remove_chars
+
+
+class LinkdecrypterCom(Addon):
+ __name = "LinkdecrypterCom"
+ __type = "hook"
+ __version = "0.21"
+
+ __description = """Linkdecrypter.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def activate(self):
+ try:
+ self.loadPatterns()
+ except Exception, e:
+ self.logError(e)
+
+
+ def loadPatterns(self):
+ html = getURL("http://linkdecrypter.com/")
+
+ m = re.search(r'<title>', html)
+ if m is None:
+ self.logError(_("Linkdecrypter site is down"))
+ return
+
+ m = re.search(r'<b>Supported\(\d+\)</b>: <i>([^+<]*)', html)
+ if m is None:
+ self.logError(_("Crypter list not found"))
+ return
+
+ builtin = [name.lower() for name in self.core.pluginManager.crypterPlugins.keys()]
+ builtin.append("downloadserienjunkiesorg")
+
+ crypter_pattern = re.compile("(\w[\w.-]+)")
+ online = []
+ for crypter in m.group(1).split(', '):
+ m = re.match(crypter_pattern, crypter)
+ if m and remove_chars(m.group(1), "-.") not in builtin:
+ online.append(m.group(1).replace(".", "\\."))
+
+ if not online:
+ self.logError(_("Crypter list is empty"))
+ return
+
+ regexp = r'https?://([^.]+\.)*?(%s)/.*' % '|'.join(online)
+
+ dict = self.core.pluginManager.crypterPlugins[self.__name]
+ dict['pattern'] = regexp
+ dict['re'] = re.compile(regexp)
+
+ self.logDebug("Loaded pattern: %s" % regexp)
diff --git a/pyload/plugin/hook/LinksnappyCom.py b/pyload/plugin/hook/LinksnappyCom.py
new file mode 100644
index 000000000..487927724
--- /dev/null
+++ b/pyload/plugin/hook/LinksnappyCom.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class LinksnappyCom(MultiHoster):
+ __name = "LinksnappyCom"
+ __type = "hook"
+ __version = "0.01"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to standard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """Linksnappy.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def getHoster(self):
+ json_data = getURL("http://gen.linksnappy.com/lseAPI.php", get={'act': "FILEHOSTS"})
+ json_data = json_loads(json_data)
+
+ return json_data['return'].keys()
diff --git a/pyload/plugin/hook/MegaDebridEu.py b/pyload/plugin/hook/MegaDebridEu.py
new file mode 100644
index 000000000..88a3bbe49
--- /dev/null
+++ b/pyload/plugin/hook/MegaDebridEu.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class MegaDebridEu(MultiHoster):
+ __name = "MegaDebridEu"
+ __type = "hook"
+ __version = "0.02"
+
+ __config = [("unloadFailing", "bool", "Revert to standard download if download fails", False)]
+
+ __description = """mega-debrid.eu hook plugin"""
+ __license = "GPLv3"
+ __authors = [("D.Ducatel", "dducatel@je-geek.fr")]
+
+
+ def getHoster(self):
+ reponse = getURL("http://www.mega-debrid.eu/api.php", get={'action': "getHosters"})
+ json_data = json_loads(reponse)
+
+ if json_data['response_code'] == "ok":
+ host_list = [element[0] for element in json_data['hosters']]
+ else:
+ self.logError(_("Unable to retrieve hoster list"))
+ host_list = list()
+
+ return host_list
diff --git a/pyload/plugin/hook/MultishareCz.py b/pyload/plugin/hook/MultishareCz.py
new file mode 100644
index 000000000..58f721df2
--- /dev/null
+++ b/pyload/plugin/hook/MultishareCz.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class MultishareCz(MultiHoster):
+ __name = "MultishareCz"
+ __type = "hook"
+ __version = "0.04"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", "uloz.to")]
+
+ __description = """MultiShare.cz hook plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_PATTERN = r'<img class="logo-shareserveru"[^>]*?alt="([^"]+)"></td>\s*<td class="stav">[^>]*?alt="OK"'
+
+
+ def getHoster(self):
+ page = getURL("http://www.multishare.cz/monitoring/")
+ return re.findall(self.HOSTER_PATTERN, page)
diff --git a/pyload/plugin/hook/MyfastfileCom.py b/pyload/plugin/hook/MyfastfileCom.py
new file mode 100644
index 000000000..c9b3b2638
--- /dev/null
+++ b/pyload/plugin/hook/MyfastfileCom.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+from pyload.utils import json_loads
+
+
+class MyfastfileCom(MultiHoster):
+ __name = "MyfastfileCom"
+ __type = "hook"
+ __version = "0.02"
+
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to standard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """Myfastfile.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+
+ def getHoster(self):
+ json_data = getURL("http://myfastfile.com/api.php", get={'hosts': ""}, decode=True)
+ self.logDebug("JSON data", json_data)
+ json_data = json_loads(json_data)
+
+ return json_data['hosts']
diff --git a/pyload/plugin/hook/OverLoadMe.py b/pyload/plugin/hook/OverLoadMe.py
new file mode 100644
index 000000000..51d0b1dc1
--- /dev/null
+++ b/pyload/plugin/hook/OverLoadMe.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class OverLoadMe(MultiHoster):
+ __name = "OverLoadMe"
+ __type = "hook"
+ __version = "0.01"
+
+ __config = [("https", "bool", "Enable HTTPS", True),
+ ("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to standard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 12)]
+
+ __description = """Over-Load.me hook plugin"""
+ __license = "GPLv3"
+ __authors = [("marley", "marley@over-load.me")]
+
+
+ def getHoster(self):
+ https = "https" if self.getConfig("https") else "http"
+ page = getURL(https + "://api.over-load.me/hoster.php",
+ get={'auth': "0001-cb1f24dadb3aa487bda5afd3b76298935329be7700cd7-5329be77-00cf-1ca0135f"}).replace("\"", "").strip()
+ self.logDebug("Hosterlist", page)
+
+ return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugin/hook/PremiumTo.py b/pyload/plugin/hook/PremiumTo.py
new file mode 100644
index 000000000..25fb70e8d
--- /dev/null
+++ b/pyload/plugin/hook/PremiumTo.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class PremiumTo(MultiHoster):
+ __name = "PremiumTo"
+ __type = "hook"
+ __version = "0.04"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for downloads from supported hosters:", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", "")]
+
+ __description = """Premium.to hook plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+
+ def getHoster(self):
+ page = getURL("http://premium.to/api/hosters.php",
+ get={'username': self.account.username, 'password': self.account.password})
+ return [x.strip() for x in page.replace("\"", "").split(";")]
+
+
+ def activate(self):
+ self.account = self.core.accountManager.getAccountPlugin("PremiumTo")
+
+ user = self.account.selectAccount()[0]
+
+ if not user:
+ self.logError(_("Please add your premium.to account first and restart pyLoad"))
+ return
+
+ return MultiHoster.activate(self)
diff --git a/pyload/plugin/hook/PremiumizeMe.py b/pyload/plugin/hook/PremiumizeMe.py
new file mode 100644
index 000000000..981c671b9
--- /dev/null
+++ b/pyload/plugin/hook/PremiumizeMe.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class PremiumizeMe(MultiHoster):
+ __name = "PremiumizeMe"
+ __type = "hook"
+ __version = "0.12"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """Premiumize.me hook plugin"""
+ __license = "GPLv3"
+ __authors = [("Florian Franzen", "FlorianFranzen@gmail.com")]
+
+
+ def getHoster(self):
+ # If no accounts are available there will be no hosters available
+ if not self.account or not self.account.canUse():
+ return []
+
+ # Get account data
+ (user, data) = self.account.selectAccount()
+
+ # Get supported hosters list from premiumize.me using the
+ # json API v1 (see https://secure.premiumize.me/?show=api)
+ answer = getURL("https://api.premiumize.me/pm-api/v1.php"
+ get={'method': "hosterlist", 'params[login]': user, 'params[pass]': data['password']})
+ data = json_loads(answer)
+
+ # If account is not valid thera are no hosters available
+ if data['status'] != 200:
+ return []
+
+ # Extract hosters from json file
+ return data['result']['hosterlist']
+
+
+ def activate(self):
+ # Get account plugin and check if there is a valid account available
+ self.account = self.core.accountManager.getAccountPlugin("PremiumizeMe")
+ if not self.account.canUse():
+ self.account = None
+ self.logError(_("Please add a valid premiumize.me account first and restart pyLoad"))
+ return
+
+ # Run the overwriten core ready which actually enables the multihoster hook
+ return MultiHoster.activate(self)
diff --git a/pyload/plugin/hook/RPNetBiz.py b/pyload/plugin/hook/RPNetBiz.py
new file mode 100644
index 000000000..b976aa262
--- /dev/null
+++ b/pyload/plugin/hook/RPNetBiz.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class RPNetBiz(MultiHoster):
+ __name = "RPNetBiz"
+ __type = "hook"
+ __version = "0.10"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """RPNet.biz hook plugin"""
+ __license = "GPLv3"
+ __authors = [("Dman", "dmanugm@gmail.com")]
+
+
+ def getHoster(self):
+ # No hosts supported if no account
+ if not self.account or not self.account.canUse():
+ return []
+
+ # Get account data
+ (user, data) = self.account.selectAccount()
+
+ res = getURL("https://premium.rpnet.biz/client_api.php",
+ get={'username': user, 'password': data['password'], 'action': "showHosterList"})
+ hoster_list = json_loads(res)
+
+ # If account is not valid thera are no hosters available
+ if 'error' in hoster_list:
+ return []
+
+ # Extract hosters from json file
+ return hoster_list['hosters']
+
+
+ def activate(self):
+ # Get account plugin and check if there is a valid account available
+ self.account = self.core.accountManager.getAccountPlugin("RPNetBiz")
+ if not self.account.canUse():
+ self.account = None
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "rpnet")
+ return
+
+ # Run the overwriten core ready which actually enables the multihoster hook
+ return MultiHoster.activate(self)
diff --git a/pyload/plugin/hook/RealdebridCom.py b/pyload/plugin/hook/RealdebridCom.py
new file mode 100644
index 000000000..2584132b8
--- /dev/null
+++ b/pyload/plugin/hook/RealdebridCom.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class RealdebridCom(MultiHoster):
+ __name = "RealdebridCom"
+ __type = "hook"
+ __version = "0.43"
+
+ __config = [("https", "bool", "Enable HTTPS", False),
+ ("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """Real-Debrid.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("Devirex Hazzard", "naibaf_11@yahoo.de")]
+
+
+ def getHoster(self):
+ https = "https" if self.getConfig("https") else "http"
+ page = getURL(https + "://real-debrid.com/api/hosters.php").replace("\"", "").strip()
+
+ return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugin/hook/RehostTo.py b/pyload/plugin/hook/RehostTo.py
new file mode 100644
index 000000000..fd51bad12
--- /dev/null
+++ b/pyload/plugin/hook/RehostTo.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class RehostTo(MultiHoster):
+ __name = "RehostTo"
+ __type = "hook"
+ __version = "0.43"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
+
+ __description = """Rehost.to hook plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ def getHoster(self):
+ page = getURL("http://rehost.to/api.php",
+ get={'cmd': "get_supported_och_dl", 'long_ses': self.long_ses})
+ return [x.strip() for x in page.replace("\"", "").split(",")]
+
+
+ def activate(self):
+ self.account = self.core.accountManager.getAccountPlugin("RehostTo")
+
+ user = self.account.selectAccount()[0]
+
+ if not user:
+ self.logError(_("Please add your rehost.to account first and restart pyLoad"))
+ return
+
+ data = self.account.getAccountInfo(user)
+ self.ses = data['ses']
+ self.long_ses = data['long_ses']
+
+ return MultiHoster.activate(self)
diff --git a/pyload/plugin/hook/SimplyPremiumCom.py b/pyload/plugin/hook/SimplyPremiumCom.py
new file mode 100644
index 000000000..e71a7a1c8
--- /dev/null
+++ b/pyload/plugin/hook/SimplyPremiumCom.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class SimplyPremiumCom(MultiHoster):
+ __name = "SimplyPremiumCom"
+ __type = "hook"
+ __version = "0.02"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to standard download if download fails", "False"),
+ ("interval", "int", "Reload interval in hours (0 to disable)", "24")]
+
+ __description = """Simply-Premium.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("EvolutionClip", "evolutionclip@live.de")]
+
+
+ def getHoster(self):
+ json_data = getURL("http://www.simply-premium.com/api/hosts.php", get={'format': "json", 'online': 1})
+ json_data = json_loads(json_data)
+
+ host_list = [element['regex'] for element in json_data['result']]
+
+ return host_list
diff --git a/pyload/plugin/hook/SimplydebridCom.py b/pyload/plugin/hook/SimplydebridCom.py
new file mode 100644
index 000000000..6283032e5
--- /dev/null
+++ b/pyload/plugin/hook/SimplydebridCom.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class SimplydebridCom(MultiHoster):
+ __name = "SimplydebridCom"
+ __type = "hook"
+ __version = "0.01"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", "")]
+
+ __description = """Simply-Debrid.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("Kagenoshin", "kagenoshin@gmx.ch")]
+
+
+ def getHoster(self):
+ page = getURL("http://simply-debrid.com/api.php", get={'list': 1})
+ return [x.strip() for x in page.rstrip(';').replace("\"", "").split(";")]
diff --git a/pyload/plugin/hook/UnrestrictLi.py b/pyload/plugin/hook/UnrestrictLi.py
new file mode 100644
index 000000000..a478545d6
--- /dev/null
+++ b/pyload/plugin/hook/UnrestrictLi.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class UnrestrictLi(MultiHoster):
+ __name = "UnrestrictLi"
+ __type = "hook"
+ __version = "0.02"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", ""),
+ ("unloadFailing", "bool", "Revert to standard download if download fails", False),
+ ("interval", "int", "Reload interval in hours (0 to disable)", 24),
+ ("history", "bool", "Delete History", False)]
+
+ __description = """Unrestrict.li hook plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def getHoster(self):
+ json_data = getURL("http://unrestrict.li/api/jdownloader/hosts.php", get={'format': "json"})
+ json_data = json_loads(json_data)
+
+ host_list = [element['host'] for element in json_data['result']]
+
+ return host_list
diff --git a/pyload/plugin/hook/XFileSharingPro.py b/pyload/plugin/hook/XFileSharingPro.py
new file mode 100644
index 000000000..b478245dc
--- /dev/null
+++ b/pyload/plugin/hook/XFileSharingPro.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Addon import Addon
+
+
+class XFileSharingPro(Addon):
+ __name = "XFileSharingPro"
+ __type = "hook"
+ __version = "0.26"
+
+ __config = [("activated" , "bool", "Activated" , True ),
+ ("use_hoster_list" , "bool", "Load listed hosters only" , True ),
+ ("use_crypter_list", "bool", "Load listed crypters only" , False),
+ ("use_builtin_list", "bool", "Load built-in plugin list" , True ),
+ ("hoster_list" , "str" , "Hoster list (comma separated)" , "" ),
+ ("crypter_list" , "str" , "Crypter list (comma separated)", "" )]
+
+ __description = """Load XFileSharingPro based hosters and crypter which don't need a own plugin to run"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+ regexp = {'hoster' : (r'https?://(?:www\.)?([\w.^_]+(?:\.[a-zA-Z]{2,})(?:\:\d+)?)/(?:embed-)?\w{12}(?:\W|$)',
+ r'https?://(?:[^/]+\.)?(%s)/(?:embed-)?\w+'),
+ 'crypter': (r'https?://(?:www\.)?([\w.^_]+(?:\.[a-zA-Z]{2,})(?:\:\d+)?)/(?:user|folder)s?/\w+',
+ r'https?://(?:[^/]+\.)?(%s)/(?:user|folder)s?/\w+')}
+
+ HOSTER_LIST = [#WORKING HOSTERS:
+ "eyesfile.ca", "file4safe.com", "fileband.com", "filedwon.com", "filevice.com", "hostingbulk.com",
+ "linestorage.com", "ravishare.com", "sharesix.com", "thefile.me", "verzend.be", "xvidstage.com",
+ #NOT TESTED:
+ "101shared.com", "4upfiles.com", "filemaze.ws", "filenuke.com", "linkzhost.com", "mightyupload.com",
+ "rockdizfile.com", "sharebeast.com", "sharerepo.com", "shareswift.com", "uploadbaz.com", "uploadc.com",
+ "vidbull.com", "zalaa.com", "zomgupload.com",
+ #NOT WORKING:
+ "amonshare.com", "banicrazy.info", "boosterking.com", "host4desi.com", "laoupload.com", "rd-fs.com"]
+ CRYPTER_LIST = []
+
+
+ # def pluginConfigChanged(self.__name, plugin, name, value):
+ # self.loadPattern()
+
+
+ def activate(self):
+ self.loadPattern()
+
+
+ def loadPattern(self):
+ use_builtin_list = self.getConfig('use_builtin_list')
+
+ for type in ("hoster", "crypter"):
+ every_plugin = not self.getConfig("use_%s_list" % type)
+
+ if every_plugin:
+ self.logInfo(_("Handling any %s I can!") % type)
+ pattern = self.regexp[type][0]
+ else:
+ s = self.getConfig('%s_list' % type).replace('\\', '').replace('|', ',').replace(';', ',').lower()
+ plugin_list = set([x.strip() for x in s.split(',')])
+
+ if use_builtin_list:
+ plugin_list |= set([x.lower() for x in getattr(self, "%s_LIST" % type.upper())])
+
+ plugin_list -= set(('', u''))
+
+ if not plugin_list:
+ self.logInfo(_("No %s to handle") % type)
+ self._unload(type)
+ return
+
+ match_list = '|'.join(sorted(plugin_list))
+
+ len_match_list = len(plugin_list)
+ self.logInfo(_("Handling %d %s%s: %s") % (len_match_list, type, "" if len_match_list is 1 else "s", match_list.replace('|', ', ')))
+
+ pattern = self.regexp[type][1] % match_list.replace('.', '\.')
+
+ dict = self.core.pluginManager.plugins[type]["XFileSharingPro"]
+ dict['pattern'] = pattern
+ dict['re'] = re.compile(pattern)
+
+ self.logDebug("Loaded %s pattern: %s" % (type, pattern))
+
+
+ def _unload(self, type):
+ dict = self.core.pluginManager.plugins[type]["XFileSharingPro"]
+ dict['pattern'] = r'^unmatchable$'
+ dict['re'] = re.compile(dict['pattern'])
+
+
+ def deactivate(self):
+ for type in ("hoster", "crypter"):
+ self._unload(type, "XFileSharingPro")
diff --git a/pyload/plugin/hook/ZeveraCom.py b/pyload/plugin/hook/ZeveraCom.py
new file mode 100644
index 000000000..af93f1a7f
--- /dev/null
+++ b/pyload/plugin/hook/ZeveraCom.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.MultiHoster import MultiHoster
+
+
+class ZeveraCom(MultiHoster):
+ __name = "ZeveraCom"
+ __type = "hook"
+ __version = "0.02"
+
+ __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
+ ("hosterList", "str", "Hoster list (comma separated)", "")]
+
+ __description = """Real-Debrid.com hook plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def getHoster(self):
+ page = getURL("http://www.zevera.com/jDownloader.ashx", get={'cmd': "gethosters"})
+ return [x.strip() for x in page.replace("\"", "").split(",")]
diff --git a/pyload/plugins/hook/__init__.py b/pyload/plugin/hook/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/hook/__init__.py
+++ b/pyload/plugin/hook/__init__.py
diff --git a/pyload/plugin/hoster/AlldebridCom.py b/pyload/plugin/hoster/AlldebridCom.py
new file mode 100644
index 000000000..03efde803
--- /dev/null
+++ b/pyload/plugin/hoster/AlldebridCom.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import randrange
+from urllib import unquote
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import parseFileSize
+
+
+class AlldebridCom(Hoster):
+ __name = "AlldebridCom"
+ __type = "hoster"
+ __version = "0.34"
+
+ __pattern = r'https?://(?:[^/]*\.)?alldebrid\..*'
+
+ __description = """Alldebrid.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Andy Voigt", "spamsales@online.de")]
+
+
+ def getFilename(self, url):
+ try:
+ name = unquote(url.rsplit("/", 1)[1])
+ except IndexError:
+ name = "Unknown_Filename..."
+ if name.endswith("..."): # incomplete filename, append random stuff
+ name += "%s.tmp" % randrange(100, 999)
+ return name
+
+
+ def setup(self):
+ self.chunkLimit = 16
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "AllDebrid")
+ self.fail(_("No AllDebrid account provided"))
+ else:
+ self.logDebug("Old URL: %s" % pyfile.url)
+ password = self.getPassword().splitlines()[0] or ""
+
+ data = json_loads(self.load("http://www.alldebrid.com/service.php",
+ get={'link': pyfile.url, 'json': "true", 'pw': password}))
+
+ self.logDebug("Json data", data)
+
+ if data['error']:
+ if data['error'] == "This link isn't available on the hoster website.":
+ self.offline()
+ else:
+ self.logWarning(data['error'])
+ self.tempOffline()
+ else:
+ if pyfile.name and not pyfile.name.endswith('.tmp'):
+ pyfile.name = data['filename']
+ pyfile.size = parseFileSize(data['filesize'])
+ new_url = data['link']
+
+ if self.getConfig("https"):
+ new_url = new_url.replace("http://", "https://")
+ else:
+ new_url = new_url.replace("https://", "http://")
+
+ if new_url != pyfile.url:
+ self.logDebug("New URL: %s" % new_url)
+
+ if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown"):
+ #only use when name wasnt already set
+ pyfile.name = self.getFilename(new_url)
+
+ self.download(new_url, disposition=True)
+
+ check = self.checkDownload({'error': "<title>An error occured while processing your request</title>",
+ 'empty': re.compile(r"^$")})
+
+ if check == "error":
+ self.retry(wait_time=60, reason=_("An error occured while generating link"))
+ elif check == "empty":
+ self.retry(wait_time=60, reason=_("Downloaded File was empty"))
diff --git a/pyload/plugin/hoster/BayfilesCom.py b/pyload/plugin/hoster/BayfilesCom.py
new file mode 100644
index 000000000..3a139a796
--- /dev/null
+++ b/pyload/plugin/hoster/BayfilesCom.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import time
+
+from pyload.utils import json_loads
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class BayfilesCom(SimpleHoster):
+ __name = "BayfilesCom"
+ __type = "hoster"
+ __version = "0.08"
+
+ __pattern = r'https?://(?:www\.)?bayfiles\.(com|net)/file/(?P<ID>\w+/\w+/[^/]+)'
+
+ __description = """Bayfiles.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ INFO_PATTERN = r'<p title="(?P<N>[^"]+)">[^<]*<strong>(?P<S>[\d .,]+)(?P<U>[\w^_]+)</strong></p>'
+ OFFLINE_PATTERN = r'(<p>The requested file could not be found.</p>|<title>404 Not Found</title>)'
+
+ WAIT_PATTERN = r'>Your IP [\d.]* has recently downloaded a file\. Upgrade to premium or wait (\d+) minutes\.<'
+ VARS_PATTERN = r'var vfid = (\d+);\s*var delay = (\d+);'
+ FREE_LINK_PATTERN = r'javascript:window\.location\.href = \'(.+?)\';'
+ PREMIUM_LINK_PATTERN = r'(?:<a class="highlighted-btn" href="|(?=http://s\d+\.baycdn\.com/dl/))(.*?)"'
+
+
+ def handleFree(self):
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ self.retry(wait_time=int(m.group(1)) * 60)
+
+ # Get download token
+ m = re.search(self.VARS_PATTERN, self.html)
+ if m is None:
+ self.error(_("VARS_PATTERN not found"))
+ vfid, delay = m.groups()
+
+ res = json_loads(self.load('http://bayfiles.com/ajax_download',
+ get={"_": time() * 1000,
+ "action": "startTimer",
+ "vfid": vfid}, decode=True))
+
+ if not "token" in res or not res['token']:
+ self.fail(_("No token"))
+
+ self.wait(int(delay))
+
+ self.html = self.load('http://bayfiles.com/ajax_download', get={
+ "token": res['token'],
+ "action": "getLink",
+ "vfid": vfid})
+
+ # Get final link and download
+ m = re.search(self.FREE_LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Free link"))
+ self.startDownload(m.group(1))
+
+
+ def handlePremium(self):
+ m = re.search(self.PREMIUM_LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Premium link"))
+ self.startDownload(m.group(1))
+
+
+ def startDownload(self, url):
+ self.logDebug("%s URL: %s" % ("Premium" if self.premium else "Free", url))
+ self.download(url)
+ # check download
+ check = self.checkDownload({
+ "waitforfreeslots": re.compile(r"<title>BayFiles</title>"),
+ "notfound": re.compile(r"<title>404 Not Found</title>")
+ })
+ if check == "waitforfreeslots":
+ self.retry(30, 5 * 60, "Wait for free slot")
+ elif check == "notfound":
+ self.retry(30, 5 * 60, "404 Not found")
+
+
+getInfo = create_getInfo(BayfilesCom)
diff --git a/pyload/plugin/hoster/BezvadataCz.py b/pyload/plugin/hoster/BezvadataCz.py
new file mode 100644
index 000000000..2f28aebf5
--- /dev/null
+++ b/pyload/plugin/hoster/BezvadataCz.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class BezvadataCz(SimpleHoster):
+ __name = "BezvadataCz"
+ __type = "hoster"
+ __version = "0.25"
+
+ __pattern = r'http://(?:www\.)?bezvadata\.cz/stahnout/.*'
+
+ __description = """BezvaData.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<p><b>Soubor: (?P<N>[^<]+)</b></p>'
+ SIZE_PATTERN = r'<li><strong>Velikost:</strong> (?P<S>[^<]+)</li>'
+ OFFLINE_PATTERN = r'<title>BezvaData \| Soubor nenalezen</title>'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def handleFree(self):
+ #download button
+ m = re.search(r'<a class="stahnoutSoubor".*?href="(.*?)"', self.html)
+ if m is None:
+ self.error(_("Page 1 URL not found"))
+ url = "http://bezvadata.cz%s" % m.group(1)
+
+ #captcha form
+ self.html = self.load(url)
+ self.checkErrors()
+ for _i in xrange(5):
+ action, inputs = self.parseHtmlForm('frm-stahnoutFreeForm')
+ if not inputs:
+ self.error(_("FreeForm"))
+
+ m = re.search(r'<img src="data:image/png;base64,(.*?)"', self.html)
+ if m is None:
+ self.error(_("Wrong captcha image"))
+
+ #captcha image is contained in html page as base64encoded data but decryptCaptcha() expects image url
+ self.load, proper_load = self.loadcaptcha, self.load
+ try:
+ inputs['captcha'] = self.decryptCaptcha(m.group(1), imgtype='png')
+ finally:
+ self.load = proper_load
+
+ if '<img src="data:image/png;base64' in self.html:
+ self.invalidCaptcha()
+ else:
+ self.correctCaptcha()
+ break
+ else:
+ self.fail(_("No valid captcha code entered"))
+
+ #download url
+ self.html = self.load("http://bezvadata.cz%s" % action, post=inputs)
+ self.checkErrors()
+ m = re.search(r'<a class="stahnoutSoubor2" href="(.*?)">', self.html)
+ if m is None:
+ self.error(_("Page 2 URL not found"))
+ url = "http://bezvadata.cz%s" % m.group(1)
+ self.logDebug("DL URL %s" % url)
+
+ #countdown
+ m = re.search(r'id="countdown">(\d\d):(\d\d)<', self.html)
+ wait_time = (int(m.group(1)) * 60 + int(m.group(2))) if m else 120
+ self.wait(wait_time, False)
+
+ self.download(url)
+
+
+ def checkErrors(self):
+ if 'images/button-download-disable.png' in self.html:
+ self.longWait(5 * 60, 24) #: parallel dl limit
+ elif '<div class="infobox' in self.html:
+ self.tempOffline()
+
+ self.info.pop('error', None)
+
+
+ def loadcaptcha(self, data, *args, **kwargs):
+ return data.decode("base64")
+
+
+getInfo = create_getInfo(BezvadataCz)
diff --git a/pyload/plugin/hoster/BillionuploadsCom.py b/pyload/plugin/hoster/BillionuploadsCom.py
new file mode 100644
index 000000000..aa987a9bb
--- /dev/null
+++ b/pyload/plugin/hoster/BillionuploadsCom.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class BillionuploadsCom(XFSHoster):
+ __name = "BillionuploadsCom"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'http://(?:www\.)?billionuploads\.com/\w{12}'
+
+ __description = """Billionuploads.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "billionuploads.com"
+
+ NAME_PATTERN = r'<td class="dofir" title="(?P<N>.+?)"'
+ SIZE_PATTERN = r'<td class="dofir">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+
+getInfo = create_getInfo(BillionuploadsCom)
diff --git a/pyload/plugin/hoster/BitshareCom.py b/pyload/plugin/hoster/BitshareCom.py
new file mode 100644
index 000000000..3081451f7
--- /dev/null
+++ b/pyload/plugin/hoster/BitshareCom.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class BitshareCom(SimpleHoster):
+ __name = "BitshareCom"
+ __type = "hoster"
+ __version = "0.51"
+
+ __pattern = r'http://(?:www\.)?bitshare\.com/(files/(?P<id1>\w+)(/(?P<name>.*?)\.html)?|\?f=(?P<id2>\w+))'
+
+ __description = """Bitshare.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Paul King", ""),
+ ("fragonib", "fragonib[AT]yahoo[DOT]es")]
+
+
+ INFO_PATTERN = r'Downloading (?P<N>.+) - (?P<S>[\d.,]+) (?P<U>[\w^_]+)</h1>'
+ OFFLINE_PATTERN = r'(>We are sorry, but the requested file was not found in our database|>Error - File not available<|The file was deleted either by the uploader, inactivity or due to copyright claim)'
+
+ COOKIES = [("bitshare.com", "language_selection", "EN")]
+
+ AJAXID_PATTERN = r'var ajaxdl = "(.*?)";'
+ TRAFFIC_USED_UP = r'Your Traffic is used up for today. Upgrade to premium to continue!'
+
+
+ def setup(self):
+ self.multiDL = self.premium
+ self.chunkLimit = 1
+
+
+ def process(self, pyfile):
+ if self.premium:
+ self.account.relogin(self.user)
+
+ self.pyfile = pyfile
+
+ # File id
+ m = re.match(self.__pattern, pyfile.url)
+ self.file_id = max(m.group('id1'), m.group('id2'))
+ self.logDebug("File id is [%s]" % self.file_id)
+
+ # Load main page
+ self.html = self.load(pyfile.url, ref=False, decode=True)
+
+ # Check offline
+ if re.search(self.OFFLINE_PATTERN, self.html):
+ self.offline()
+
+ # Check Traffic used up
+ if re.search(self.TRAFFIC_USED_UP, self.html):
+ self.logInfo(_("Your Traffic is used up for today"))
+ self.wait(30 * 60, True)
+ self.retry()
+
+ # File name
+ m = re.match(self.__pattern, pyfile.url)
+ name1 = m.group('name') if m else None
+ m = re.search(self.INFO_PATTERN, self.html)
+ name2 = m.group('N') if m else None
+ pyfile.name = max(name1, name2)
+
+ # Ajax file id
+ self.ajaxid = re.search(self.AJAXID_PATTERN, self.html).group(1)
+ self.logDebug("File ajax id is [%s]" % self.ajaxid)
+
+ # This may either download our file or forward us to an error page
+ url = self.getDownloadUrl()
+ self.download(url)
+
+ check = self.checkDownload({"404": ">404 Not Found<", "Error": ">Error occured<"})
+ if check == "404":
+ self.retry(3, 60, 'Error 404')
+ elif check == "error":
+ self.retry(5, 5 * 60, "Bitshare host : Error occured")
+
+
+ def getDownloadUrl(self):
+ # Return location if direct download is active
+ if self.premium:
+ header = self.load(self.pyfile.url, cookies=True, just_header=True)
+ if 'location' in header:
+ return header['location']
+
+ # Get download info
+ self.logDebug("Getting download info")
+ res = self.load("http://bitshare.com/files-ajax/" + self.file_id + "/request.html",
+ post={"request": "generateID", "ajaxid": self.ajaxid})
+ self.handleErrors(res, ':')
+ parts = res.split(":")
+ filetype = parts[0]
+ wait = int(parts[1])
+ captcha = int(parts[2])
+ self.logDebug("Download info [type: '%s', waiting: %d, captcha: %d]" % (filetype, wait, captcha))
+
+ # Waiting
+ if wait > 0:
+ self.logDebug("Waiting %d seconds." % wait)
+ if wait < 120:
+ self.wait(wait, False)
+ else:
+ self.wait(wait - 55, True)
+ self.retry()
+
+ # Resolve captcha
+ if captcha == 1:
+ self.logDebug("File is captcha protected")
+ recaptcha = ReCaptcha(self)
+
+ # Try up to 3 times
+ for i in xrange(3):
+ challenge, response = recaptcha.challenge()
+ res = self.load("http://bitshare.com/files-ajax/" + self.file_id + "/request.html",
+ post={"request" : "validateCaptcha",
+ "ajaxid" : self.ajaxid,
+ "recaptcha_challenge_field": challenge,
+ "recaptcha_response_field" : response})
+ if self.handleCaptchaErrors(res):
+ break
+
+ # Get download URL
+ self.logDebug("Getting download url")
+ res = self.load("http://bitshare.com/files-ajax/" + self.file_id + "/request.html",
+ post={"request": "getDownloadURL", "ajaxid": self.ajaxid})
+ self.handleErrors(res, '#')
+ url = res.split("#")[-1]
+
+ return url
+
+
+ def handleErrors(self, res, separator):
+ self.logDebug("Checking response [%s]" % res)
+ if "ERROR:Session timed out" in res:
+ self.retry()
+ elif "ERROR" in res:
+ msg = res.split(separator)[-1]
+ self.fail(msg)
+
+
+ def handleCaptchaErrors(self, res):
+ self.logDebug("Result of captcha resolving [%s]" % res)
+ if "SUCCESS" in res:
+ self.correctCaptcha()
+ return True
+ elif "ERROR:SESSION ERROR" in res:
+ self.retry()
+
+ self.invalidCaptcha()
+
+
+getInfo = create_getInfo(BitshareCom)
diff --git a/pyload/plugin/hoster/BoltsharingCom.py b/pyload/plugin/hoster/BoltsharingCom.py
new file mode 100644
index 000000000..a84a60aeb
--- /dev/null
+++ b/pyload/plugin/hoster/BoltsharingCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class BoltsharingCom(DeadHoster):
+ __name = "BoltsharingCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?boltsharing\.com/\w{12}'
+
+ __description = """Boltsharing.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(BoltsharingCom)
diff --git a/pyload/plugin/hoster/CatShareNet.py b/pyload/plugin/hoster/CatShareNet.py
new file mode 100644
index 000000000..e3d12c10e
--- /dev/null
+++ b/pyload/plugin/hoster/CatShareNet.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class CatShareNet(SimpleHoster):
+ __name = "CatShareNet"
+ __type = "hoster"
+ __version = "0.08"
+
+ __pattern = r'http://(?:www\.)?catshare\.net/\w{16}'
+
+ __description = """CatShare.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("z00nx", "z00nx0@gmail.com"),
+ ("prOq", ""),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ TEXT_ENCODING = True
+
+ INFO_PATTERN = r'<title>(?P<N>.+) \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)<'
+ OFFLINE_PATTERN = ur'Podany plik został usunięty\s*</div>'
+
+ IP_BLOCKED_PATTERN = ur'>Nasz serwis wykrył ÅŒe Twój adres IP nie pochodzi z Polski.<'
+ SECONDS_PATTERN = 'var\scount\s=\s(\d+);'
+ LINK_PATTERN = r'<form action="(.+?)" method="GET">'
+
+
+ def setup(self):
+ self.multiDL = self.premium
+ self.resumeDownload = True
+
+
+ def getFileInfo(self):
+ m = re.search(self.IP_BLOCKED_PATTERN, self.html)
+ if m:
+ self.fail(_("Only connections from Polish IP address are allowed"))
+ return super(CatShareNet, self).getFileInfo()
+
+
+ def handleFree(self):
+ m = re.search(self.SECONDS_PATTERN, self.html)
+ if m:
+ wait_time = int(m.group(1))
+ self.wait(wait_time, True)
+
+ recaptcha = ReCaptcha(self)
+
+ challenge, response = recaptcha.challenge()
+ self.html = self.load(self.pyfile.url,
+ post={'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response})
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.invalidCaptcha()
+ self.retry(reason=_("Wrong captcha entered"))
+
+ dl_link = m.group(1)
+ self.download(dl_link, disposition=True)
+
+
+getInfo = create_getInfo(CatShareNet)
diff --git a/pyload/plugin/hoster/CloudzerNet.py b/pyload/plugin/hoster/CloudzerNet.py
new file mode 100644
index 000000000..d499b46b4
--- /dev/null
+++ b/pyload/plugin/hoster/CloudzerNet.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class CloudzerNet(DeadHoster):
+ __name = "CloudzerNet"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'https?://(?:www\.)?(cloudzer\.net/file/|clz\.to/(file/)?)\w+'
+
+ __description = """Cloudzer.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("gs", "I-_-I-_-I@web.de"),
+ ("z00nx", "z00nx0@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(CloudzerNet)
diff --git a/pyload/plugin/hoster/CramitIn.py b/pyload/plugin/hoster/CramitIn.py
new file mode 100644
index 000000000..d4c80c0d4
--- /dev/null
+++ b/pyload/plugin/hoster/CramitIn.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class CramitIn(XFSHoster):
+ __name = "CramitIn"
+ __type = "hoster"
+ __version = "0.07"
+
+ __pattern = r'http://(?:www\.)?cramit\.in/\w{12}'
+
+ __description = """Cramit.in hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "cramit.in"
+
+ INFO_PATTERN = r'<span class=t2>\s*(?P<N>.*?)</span>.*?<small>\s*\((?P<S>.*?)\)'
+ LINK_PATTERN = r'href="(http://cramit\.in/file_download/.*?)"'
+
+
+getInfo = create_getInfo(CramitIn)
diff --git a/pyload/plugin/hoster/CrockoCom.py b/pyload/plugin/hoster/CrockoCom.py
new file mode 100644
index 000000000..892f5a354
--- /dev/null
+++ b/pyload/plugin/hoster/CrockoCom.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class CrockoCom(SimpleHoster):
+ __name = "CrockoCom"
+ __type = "hoster"
+ __version = "0.17"
+
+ __pattern = r'http://(?:www\.)?(crocko|easy-share)\.com/\w+'
+
+ __description = """Crocko hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<span class="fz24">Download:\s*<strong>(?P<N>.*)'
+ SIZE_PATTERN = r'<span class="tip1"><span class="inner">(?P<S>[^<]+)</span></span>'
+ OFFLINE_PATTERN = r'<h1>Sorry,<br />the page you\'re looking for <br />isn\'t here.</h1>|File not found'
+
+ CAPTCHA_PATTERN = re.compile(r"u='(/file_contents/captcha/\w+)';\s*w='(\d+)';")
+
+ FORM_PATTERN = r'<form method="post" action="([^"]+)">(.*?)</form>'
+ FORM_INPUT_PATTERN = r'<input[^>]* name="?([^" ]+)"? value="?([^" ]+)"?[^>]*>'
+
+ NAME_REPLACEMENTS = [(r'<[^>]*>', '')]
+
+
+ def handleFree(self):
+ if "You need Premium membership to download this file." in self.html:
+ self.fail(_("You need Premium membership to download this file"))
+
+ for _i in xrange(5):
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m:
+ url, wait_time = 'http://crocko.com' + m.group(1), int(m.group(2))
+ self.wait(wait_time)
+ self.html = self.load(url)
+ else:
+ break
+
+ m = re.search(self.FORM_PATTERN, self.html, re.S)
+ if m is None:
+ self.error(_("FORM_PATTERN not found"))
+
+ action, form = m.groups()
+ inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge()
+ self.download(action, post=inputs)
+
+ check = self.checkDownload({
+ "captcha_err": recaptcha.KEY_AJAX_PATTERN
+ })
+
+ if check == "captcha_err":
+ self.invalidCaptcha()
+ else:
+ break
+ else:
+ self.fail(_("No valid captcha solution received"))
+
+
+getInfo = create_getInfo(CrockoCom)
diff --git a/pyload/plugin/hoster/CyberlockerCh.py b/pyload/plugin/hoster/CyberlockerCh.py
new file mode 100644
index 000000000..75262a805
--- /dev/null
+++ b/pyload/plugin/hoster/CyberlockerCh.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class CyberlockerCh(DeadHoster):
+ __name = "CyberlockerCh"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?cyberlocker\.ch/\w+'
+
+ __description = """Cyberlocker.ch hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(CyberlockerCh)
diff --git a/pyload/plugin/hoster/CzshareCom.py b/pyload/plugin/hoster/CzshareCom.py
new file mode 100644
index 000000000..2297450e4
--- /dev/null
+++ b/pyload/plugin/hoster/CzshareCom.py
@@ -0,0 +1,152 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://czshare.com/5278880/random.bin
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+from pyload.utils import parseFileSize
+
+
+class CzshareCom(SimpleHoster):
+ __name = "CzshareCom"
+ __type = "hoster"
+ __version = "0.95"
+
+ __pattern = r'http://(?:www\.)?(czshare|sdilej)\.(com|cz)/(\d+/|download\.php\?).*'
+
+ __description = """CZshare.com hoster plugin, now Sdilej.cz"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<div class="tab" id="parameters">\s*<p>\s*Cel. n.zev: <a href=[^>]*>(?P<N>[^<]+)</a>'
+ SIZE_PATTERN = r'<div class="tab" id="category">(?:\s*<p>[^\n]*</p>)*\s*Velikost:\s*(?P<S>[\d .,]+)(?P<U>[\w^_]+)\s*</div>'
+ OFFLINE_PATTERN = r'<div class="header clearfix">\s*<h2 class="red">'
+
+ SIZE_REPLACEMENTS = [(' ', '')]
+ URL_REPLACEMENTS = [(r'http://[^/]*/download.php\?.*?id=(\w+).*', r'http://sdilej.cz/\1/x/')]
+
+ FORCE_CHECK_TRAFFIC = True
+
+ FREE_URL_PATTERN = r'<a href="([^"]+)" class="page-download">[^>]*alt="([^"]+)" /></a>'
+ FREE_FORM_PATTERN = r'<form action="download\.php" method="post">\s*<img src="captcha\.php" id="captcha" />(.*?)</form>'
+ PREMIUM_FORM_PATTERN = r'<form action="/profi_down\.php" method="post">(.*?)</form>'
+ FORM_INPUT_PATTERN = r'<input[^>]* name="([^"]+)" value="([^"]+)"[^>]*/>'
+ MULTIDL_PATTERN = r'<p><font color=\'red\'>Z[^<]*PROFI.</font></p>'
+ USER_CREDIT_PATTERN = r'<div class="credit">\s*kredit: <strong>([\d .,]+)(\w+)</strong>\s*</div><!-- .credit -->'
+
+
+ def checkTrafficLeft(self):
+ # check if user logged in
+ m = re.search(self.USER_CREDIT_PATTERN, self.html)
+ if m is None:
+ self.account.relogin(self.user)
+ self.html = self.load(self.pyfile.url, cookies=True, decode=True)
+ m = re.search(self.USER_CREDIT_PATTERN, self.html)
+ if m is None:
+ return False
+
+ # check user credit
+ try:
+ credit = parseFileSize(m.group(1).replace(' ', ''), m.group(2))
+ self.logInfo(_("Premium download for %i KiB of Credit") % (self.pyfile.size / 1024))
+ self.logInfo(_("User %s has %i KiB left") % (self.user, credit / 1024))
+ if credit < self.pyfile.size:
+ self.logInfo(_("Not enough credit to download file: %s") % self.pyfile.name)
+ return False
+ except Exception, e:
+ # let's continue and see what happens...
+ self.logError(e)
+
+ return True
+
+
+ def handlePremium(self):
+ # parse download link
+ try:
+ form = re.search(self.PREMIUM_FORM_PATTERN, self.html, re.S).group(1)
+ inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
+ except Exception, e:
+ self.logError(e)
+ self.resetAccount()
+
+ # download the file, destination is determined by pyLoad
+ self.download("http://sdilej.cz/profi_down.php", post=inputs, disposition=True)
+ self.checkDownloadedFile()
+
+
+ def handleFree(self):
+ # get free url
+ m = re.search(self.FREE_URL_PATTERN, self.html)
+ if m is None:
+ self.error(_("FREE_URL_PATTERN not found"))
+ parsed_url = "http://sdilej.cz" + m.group(1)
+ self.logDebug("PARSED_URL:" + parsed_url)
+
+ # get download ticket and parse html
+ self.html = self.load(parsed_url, cookies=True, decode=True)
+ if re.search(self.MULTIDL_PATTERN, self.html):
+ self.longWait(5 * 60, 12)
+
+ try:
+ form = re.search(self.FREE_FORM_PATTERN, self.html, re.S).group(1)
+ inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
+ self.pyfile.size = int(inputs['size'])
+ except Exception, e:
+ self.logError(e)
+ self.error(_("Form"))
+
+ # get and decrypt captcha
+ captcha_url = 'http://sdilej.cz/captcha.php'
+ for _i in xrange(5):
+ inputs['captchastring2'] = self.decryptCaptcha(captcha_url)
+ self.html = self.load(parsed_url, cookies=True, post=inputs, decode=True)
+ if u"<li>ZadanÃœ ověřovací kód nesouhlasí!</li>" in self.html:
+ self.invalidCaptcha()
+ elif re.search(self.MULTIDL_PATTERN, self.html):
+ self.longWait(5 * 60, 12)
+ else:
+ self.correctCaptcha()
+ break
+ else:
+ self.fail(_("No valid captcha code entered"))
+
+ m = re.search("countdown_number = (\d+);", self.html)
+ self.setWait(int(m.group(1)) if m else 50)
+
+ # download the file, destination is determined by pyLoad
+ self.logDebug("WAIT URL", self.req.lastEffectiveURL)
+ m = re.search("free_wait.php\?server=(.*?)&(.*)", self.req.lastEffectiveURL)
+ if m is None:
+ self.error(_("Download URL not found"))
+
+ url = "http://%s/download.php?%s" % (m.group(1), m.group(2))
+
+ self.wait()
+ self.download(url)
+ self.checkDownloadedFile()
+
+
+ def checkDownloadedFile(self):
+ # check download
+ check = self.checkDownload({
+ "temp_offline": re.compile(r"^Soubor je do.*asn.* nedostupn.*$"),
+ "credit": re.compile(r"^Nem.*te dostate.*n.* kredit.$"),
+ "multi_dl": re.compile(self.MULTIDL_PATTERN),
+ "captcha_err": "<li>ZadanÃœ ověřovací kód nesouhlasí!</li>"
+ })
+
+ if check == "temp_offline":
+ self.fail(_("File not available - try later"))
+ if check == "credit":
+ self.resetAccount()
+ elif check == "multi_dl":
+ self.longWait(5 * 60, 12)
+ elif check == "captcha_err":
+ self.invalidCaptcha()
+ self.retry()
+
+
+getInfo = create_getInfo(CzshareCom)
diff --git a/pyload/plugin/hoster/DailymotionCom.py b/pyload/plugin/hoster/DailymotionCom.py
new file mode 100644
index 000000000..d05f2f74c
--- /dev/null
+++ b/pyload/plugin/hoster/DailymotionCom.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.datatype.File import statusMap
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Hoster import Hoster
+
+
+def getInfo(urls):
+ result = []
+ regex = re.compile(DailymotionCom.__pattern)
+ apiurl = "https://api.dailymotion.com/video/%s"
+ request = {"fields": "access_error,status,title"}
+
+ for url in urls:
+ id = regex.match(url).group("ID")
+ page = getURL(apiurl % id, get=request)
+ info = json_loads(page)
+
+ name = info['title'] + ".mp4" if "title" in info else url
+
+ if "error" in info or info['access_error']:
+ status = "offline"
+ else:
+ status = info['status']
+ if status in ("ready", "published"):
+ status = "online"
+ elif status in ("waiting", "processing"):
+ status = "temp. offline"
+ else:
+ status = "offline"
+
+ result.append((name, 0, statusMap[status], url))
+
+ return result
+
+
+class DailymotionCom(Hoster):
+ __name = "DailymotionCom"
+ __type = "hoster"
+ __version = "0.20"
+
+ __pattern = r'https?://(?:www\.)?dailymotion\.com/.*video/(?P<ID>[\w^_]+)'
+ __config = [("quality", "Lowest;LD 144p;LD 240p;SD 384p;HQ 480p;HD 720p;HD 1080p;Highest", "Quality", "Highest")]
+
+ __description = """Dailymotion.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def getStreams(self):
+ streams = []
+
+ for result in re.finditer(r"\"(?P<URL>http:\\/\\/www.dailymotion.com\\/cdn\\/H264-(?P<QF>.*?)\\.*?)\"",
+ self.html):
+ url = result.group("URL")
+ qf = result.group("QF")
+
+ link = url.replace("\\", "")
+ quality = tuple(int(x) for x in qf.split("x"))
+
+ streams.append((quality, link))
+
+ return sorted(streams, key=lambda x: x[0][::-1])
+
+
+ def getQuality(self):
+ q = self.getConfig("quality")
+
+ if q == "Lowest":
+ quality = 0
+ elif q == "Highest":
+ quality = -1
+ else:
+ quality = int(q.rsplit(" ")[1][:-1])
+
+ return quality
+
+
+ def getLink(self, streams, quality):
+ if quality > 0:
+ for x, s in reversed([item for item in enumerate(streams)]):
+ qf = s[0][1]
+ if qf <= quality:
+ idx = x
+ break
+ else:
+ idx = 0
+ else:
+ idx = quality
+
+ s = streams[idx]
+
+ self.logInfo(_("Download video quality %sx%s") % s[0])
+
+ return s[1]
+
+
+ def checkInfo(self, pyfile):
+ pyfile.name, pyfile.size, pyfile.status, pyfile.url = getInfo([pyfile.url])[0]
+
+ if pyfile.status == 1:
+ self.offline()
+
+ elif pyfile.status == 6:
+ self.tempOffline()
+
+
+ def process(self, pyfile):
+ self.checkInfo(pyfile)
+
+ id = re.match(self.__pattern, pyfile.url).group("ID")
+ self.html = self.load("http://www.dailymotion.com/embed/video/" + id, decode=True)
+
+ streams = self.getStreams()
+ quality = self.getQuality()
+
+ self.download(self.getLink(streams, quality))
diff --git a/pyload/plugin/hoster/DataHu.py b/pyload/plugin/hoster/DataHu.py
new file mode 100644
index 000000000..61456de61
--- /dev/null
+++ b/pyload/plugin/hoster/DataHu.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://data.hu/get/6381232/random.bin
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class DataHu(SimpleHoster):
+ __name = "DataHu"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?data\.hu/get/\w+'
+
+ __description = """Data.hu hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("crash", ""),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ INFO_PATTERN = ur'<title>(?P<N>.*) \((?P<S>[^)]+)\) let\xf6lt\xe9se</title>'
+ OFFLINE_PATTERN = ur'Az adott f\xe1jl nem l\xe9tezik'
+ LINK_PATTERN = r'<div class="download_box_button"><a href="([^"]+)">'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = self.premium
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("LINK_PATTERN not found"))
+
+ self.download(m.group(1), disposition=True)
+
+
+getInfo = create_getInfo(DataHu)
diff --git a/pyload/plugin/hoster/DataportCz.py b/pyload/plugin/hoster/DataportCz.py
new file mode 100644
index 000000000..a5b03afc3
--- /dev/null
+++ b/pyload/plugin/hoster/DataportCz.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class DataportCz(SimpleHoster):
+ __name = "DataportCz"
+ __type = "hoster"
+ __version = "0.40"
+
+ __pattern = r'http://(?:www\.)?dataport\.cz/file/(.*)'
+
+ __description = """Dataport.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<span itemprop="name">(?P<N>[^<]+)</span>'
+ SIZE_PATTERN = r'<td class="fil">Velikost</td>\s*<td>(?P<S>[^<]+)</td>'
+ OFFLINE_PATTERN = r'<h2>Soubor nebyl nalezen</h2>'
+
+ CAPTCHA_PATTERN = r'<section id="captcha_bg">\s*<img src="(.*?)"'
+ FREE_SLOTS_PATTERN = ur'Počet volnÜch slotů: <span class="darkblue">(\d+)</span><br />'
+
+
+ def handleFree(self):
+ captchas = {"1": "jkeG", "2": "hMJQ", "3": "vmEK", "4": "ePQM", "5": "blBd"}
+
+ for _i in xrange(60):
+ action, inputs = self.parseHtmlForm('free_download_form')
+ self.logDebug(action, inputs)
+ if not action or not inputs:
+ self.error(_("free_download_form"))
+
+ if "captchaId" in inputs and inputs['captchaId'] in captchas:
+ inputs['captchaCode'] = captchas[inputs['captchaId']]
+ else:
+ self.error(_("captcha"))
+
+ self.html = self.download("http://www.dataport.cz%s" % action, post=inputs)
+
+ check = self.checkDownload({"captcha": 'alert("\u0160patn\u011b opsan\u00fd k\u00f3d z obr\u00e1zu");',
+ "slot": 'alert("Je n\u00e1m l\u00edto, ale moment\u00e1ln\u011b nejsou'})
+ if check == "captcha":
+ self.error(_("invalid captcha"))
+ elif check == "slot":
+ self.logDebug("No free slots - wait 60s and retry")
+ self.wait(60, False)
+ self.html = self.load(self.pyfile.url, decode=True)
+ continue
+ else:
+ break
+
+
+getInfo = create_getInfo(DataportCz)
diff --git a/pyload/plugin/hoster/DateiTo.py b/pyload/plugin/hoster/DateiTo.py
new file mode 100644
index 000000000..75fca8829
--- /dev/null
+++ b/pyload/plugin/hoster/DateiTo.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class DateiTo(SimpleHoster):
+ __name = "DateiTo"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?datei\.to/datei/(?P<ID>\w+)\.html'
+
+ __description = """Datei.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'Dateiname:</td>\s*<td colspan="2"><strong>(?P<N>.*?)</'
+ SIZE_PATTERN = r'Dateigr&ouml;&szlig;e:</td>\s*<td colspan="2">(?P<S>.*?)</'
+ OFFLINE_PATTERN = r'>Datei wurde nicht gefunden<|>Bitte wÀhle deine Datei aus... <'
+
+ WAIT_PATTERN = r'countdown\({seconds: (\d+)'
+ MULTIDL_PATTERN = r'>Du lÀdst bereits eine Datei herunter<'
+
+ DATA_PATTERN = r'url: "(.*?)", data: "(.*?)",'
+
+
+ def handleFree(self):
+ url = 'http://datei.to/ajax/download.php'
+ data = {'P': 'I', 'ID': self.info['pattern']['ID']}
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(10):
+ self.logDebug("URL", url, "POST", data)
+ self.html = self.load(url, post=data)
+ self.checkErrors()
+
+ if url.endswith('download.php') and 'P' in data:
+ if data['P'] == 'I':
+ self.doWait()
+
+ elif data['P'] == 'IV':
+ break
+
+ m = re.search(self.DATA_PATTERN, self.html)
+ if m is None:
+ self.error(_("data"))
+ url = 'http://datei.to/' + m.group(1)
+ data = dict(x.split('=') for x in m.group(2).split('&'))
+
+ if url.endswith('recaptcha.php'):
+ data['recaptcha_challenge_field'], data['recaptcha_response_field'] = recaptcha.challenge()
+ else:
+ self.fail(_("Too bad..."))
+
+ self.download(self.html)
+
+
+ def checkErrors(self):
+ m = re.search(self.MULTIDL_PATTERN, self.html)
+ if m:
+ m = re.search(self.WAIT_PATTERN, self.html)
+ wait_time = int(m.group(1)) if m else 30
+
+ errmsg = self.info['error'] = _("Parallel downloads")
+ self.retry(wait_time=wait_time, reason=errmsg)
+
+ self.info.pop('error', None)
+
+
+ def doWait(self):
+ m = re.search(self.WAIT_PATTERN, self.html)
+ wait_time = int(m.group(1)) if m else 30
+
+ self.load('http://datei.to/ajax/download.php', post={'P': 'Ads'})
+ self.wait(wait_time, False)
+
+
+getInfo = create_getInfo(DateiTo)
diff --git a/pyload/plugin/hoster/DdlstorageCom.py b/pyload/plugin/hoster/DdlstorageCom.py
new file mode 100644
index 000000000..616a9595b
--- /dev/null
+++ b/pyload/plugin/hoster/DdlstorageCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class DdlstorageCom(DeadHoster):
+ __name = "DdlstorageCom"
+ __type = "hoster"
+ __version = "1.02"
+
+ __pattern = r'https?://(?:www\.)?ddlstorage\.com/\w+'
+
+ __description = """DDLStorage.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(DdlstorageCom)
diff --git a/pyload/plugin/hoster/DebridItaliaCom.py b/pyload/plugin/hoster/DebridItaliaCom.py
new file mode 100644
index 000000000..134309408
--- /dev/null
+++ b/pyload/plugin/hoster/DebridItaliaCom.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.internal.SimpleHoster import replace_patterns
+
+
+class DebridItaliaCom(Hoster):
+ __name = "DebridItaliaCom"
+ __type = "hoster"
+ __version = "0.07"
+
+ __pattern = r'http://s\d+\.debriditalia\.com/dl/\d+'
+
+ __description = """Debriditalia.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ URL_REPLACEMENTS = [(r'(/dl/\d+)$', '\1/')]
+
+
+ def setup(self):
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ pyfile.url = replace_patterns(pyfile.url, cls.URL_REPLACEMENTS)
+
+ if re.match(self.__pattern, pyfile.url):
+ link = pyfile.url
+
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "DebridItalia")
+ self.fail(_("No DebridItalia account provided"))
+
+ else:
+ html = self.load("http://www.debriditalia.com/api.php", get={'generate': "", 'link': pyfile.url})
+
+ if "ERROR" in html:
+ self.fail(re.search(r'ERROR:(.*)', html).strip())
+
+ link = html.strip()
+
+ self.download(link, disposition=True)
+
+ check = self.checkDownload({'empty': re.compile(r'^$')})
+
+ if check == "empty":
+ self.retry(5, 2 * 60, "Empty file downloaded")
diff --git a/pyload/plugin/hoster/DepositfilesCom.py b/pyload/plugin/hoster/DepositfilesCom.py
new file mode 100644
index 000000000..8dd485e92
--- /dev/null
+++ b/pyload/plugin/hoster/DepositfilesCom.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class DepositfilesCom(SimpleHoster):
+ __name = "DepositfilesCom"
+ __type = "hoster"
+ __version = "0.51"
+
+ __pattern = r'https?://(?:www\.)?(depositfiles\.com|dfiles\.(eu|ru))(/\w{1,3})?/files/(?P<ID>\w+)'
+
+ __description = """Depositfiles.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'<script type="text/javascript">eval\( unescape\(\'(?P<N>.*?)\''
+ SIZE_PATTERN = r': <b>(?P<S>[\d.,]+)&nbsp;(?P<U>[\w^_]+)</b>'
+ OFFLINE_PATTERN = r'<span class="html_download_api-not_exists"></span>'
+
+ NAME_REPLACEMENTS = [(r'\%u([0-9A-Fa-f]{4})', lambda m: unichr(int(m.group(1), 16))),
+ (r'.*<b title="(?P<N>[^"]+).*', "\g<N>")]
+ URL_REPLACEMENTS = [(__pattern + ".*", "https://dfiles.eu/files/\g<ID>")]
+
+ COOKIES = [("dfiles.eu", "lang_current", "en")]
+
+ FREE_LINK_PATTERN = r'<form id="downloader_file_form" action="(http://.+?\.(dfiles\.eu|depositfiles\.com)/.+?)" method="post"'
+ PREMIUM_LINK_PATTERN = r'class="repeat"><a href="(.+?)"'
+ PREMIUM_MIRROR_PATTERN = r'class="repeat_mirror"><a href="(.+?)"'
+
+
+ def handleFree(self):
+ self.html = self.load(self.pyfile.url, post={"gateway_result": "1"}, cookies=True)
+
+ if re.search(r'File is checked, please try again in a minute.', self.html) is not None:
+ self.logInfo(_("The file is being checked. Waiting 1 minute"))
+ self.retry(wait_time=60)
+
+ wait = re.search(r'html_download_api-limit_interval\">(\d+)</span>', self.html)
+ if wait:
+ wait_time = int(wait.group(1))
+ self.logInfo(_("Traffic used up. Waiting %d seconds") % wait_time)
+ self.wait(wait_time, True)
+ self.retry()
+
+ wait = re.search(r'>Try in (\d+) minutes or use GOLD account', self.html)
+ if wait:
+ wait_time = int(wait.group(1))
+ self.logInfo(_("All free slots occupied. Waiting %d minutes") % wait_time)
+ self.setWait(wait_time * 60, False)
+
+ wait = re.search(r'Please wait (\d+) sec', self.html)
+ if wait:
+ self.setWait(int(wait.group(1)))
+
+ m = re.search(r"var fid = '(\w+)';", self.html)
+ if m is None:
+ self.retry(wait_time=5)
+ params = {'fid': m.group(1)}
+ self.logDebug("FID: %s" % params['fid'])
+
+ self.wait()
+ recaptcha = ReCaptcha(self)
+ captcha_key = recaptcha.detect_key()
+ if captcha_key is None:
+ self.error(_("ReCaptcha key not found"))
+
+ for _i in xrange(5):
+ self.html = self.load("https://dfiles.eu/get_file.php", get=params)
+
+ if '<input type=button value="Continue" onclick="check_recaptcha' in self.html:
+ if 'response' in params:
+ self.invalidCaptcha()
+ params['challenge'], params['response'] = recaptcha.challenge(captcha_key)
+ self.logDebug(params)
+ continue
+
+ m = re.search(self.FREE_LINK_PATTERN, self.html)
+ if m:
+ if 'response' in params:
+ self.correctCaptcha()
+ link = unquote(m.group(1))
+ self.logDebug("LINK: %s" % link)
+ break
+ else:
+ self.error(_("Download link"))
+ else:
+ self.fail(_("No valid captcha response received"))
+
+ try:
+ self.download(link, disposition=True)
+ except Exception:
+ self.retry(wait_time=60)
+
+
+ def handlePremium(self):
+ if '<span class="html_download_api-gold_traffic_limit">' in self.html:
+ self.logWarning(_("Download limit reached"))
+ self.retry(25, 60 * 60, "Download limit reached")
+ elif 'onClick="show_gold_offer' in self.html:
+ self.account.relogin(self.user)
+ self.retry()
+ else:
+ link = re.search(self.PREMIUM_LINK_PATTERN, self.html)
+ mirror = re.search(self.PREMIUM_MIRROR_PATTERN, self.html)
+ if link:
+ dlink = link.group(1)
+ elif mirror:
+ dlink = mirror.group(1)
+ else:
+ self.error(_("No direct download link or mirror found"))
+ self.download(dlink, disposition=True)
+
+
+getInfo = create_getInfo(DepositfilesCom)
diff --git a/pyload/plugin/hoster/DevhostSt.py b/pyload/plugin/hoster/DevhostSt.py
new file mode 100644
index 000000000..42a6e27f1
--- /dev/null
+++ b/pyload/plugin/hoster/DevhostSt.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://d-h.st/mM8
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class DevhostSt(SimpleHoster):
+ __name = "DevhostSt"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?d-h\.st/(?!users/)\w{3}'
+
+ __description = """d-h.st hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
+
+
+ NAME_PATTERN = r'>Filename:</span> <div title="(?P<N>.+?)"'
+ SIZE_PATTERN = r'>Size:</span> (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ OFFLINE_PATTERN = r'>File Not Found<'
+ LINK_PATTERN = r'id="downloadfile" href="(.+?)"'
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Download link not found"))
+
+ dl_url = m.group(1)
+ self.download(dl_url, disposition=True)
+
+ check = self.checkDownload({'html': re.compile("html")})
+ if check == "html":
+ self.error(_("Downloaded file is an html page"))
+
+
+getInfo = create_getInfo(DevhostSt)
diff --git a/pyload/plugin/hoster/DlFreeFr.py b/pyload/plugin/hoster/DlFreeFr.py
new file mode 100644
index 000000000..3ebaa6c22
--- /dev/null
+++ b/pyload/plugin/hoster/DlFreeFr.py
@@ -0,0 +1,136 @@
+# -*- coding: utf-8 -*-
+
+import pycurl
+import re
+
+from pyload.network.Browser import Browser
+from pyload.network.CookieJar import CookieJar
+from pyload.plugin.internal.captcha import AdYouLike
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo, replace_patterns
+from pyload.utils import json_loads
+
+
+class CustomBrowser(Browser):
+
+ def __init__(self, bucket=None, options={}):
+ Browser.__init__(self, bucket, options)
+
+
+ def load(self, *args, **kwargs):
+ post = kwargs.get("post")
+
+ if post is None and len(args) > 2:
+ post = args[2]
+
+ if post:
+ self.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
+ self.http.c.setopt(pycurl.POST, 1)
+ self.http.c.setopt(pycurl.CUSTOMREQUEST, "POST")
+ else:
+ self.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
+ self.http.c.setopt(pycurl.POST, 0)
+ self.http.c.setopt(pycurl.CUSTOMREQUEST, "GET")
+
+ return Browser.load(self, *args, **kwargs)
+
+
+class DlFreeFr(SimpleHoster):
+ __name = "DlFreeFr"
+ __type = "hoster"
+ __version = "0.26"
+
+ __pattern = r'http://(?:www\.)?dl\.free\.fr/(\w+|getfile\.pl\?file=/\w+)'
+
+ __description = """Dl.free.fr hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("the-razer", "daniel_ AT gmx DOT net"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("Toilal", "toilal.dev@gmail.com")]
+
+
+ NAME_PATTERN = r'Fichier:</td>\s*<td[^>]*>(?P<N>[^>]*)</td>'
+ SIZE_PATTERN = r'Taille:</td>\s*<td[^>]*>(?P<S>[\d.,]+\w)o'
+ OFFLINE_PATTERN = r'Erreur 404 - Document non trouv|Fichier inexistant|Le fichier demand&eacute; n\'a pas &eacute;t&eacute; trouv&eacute;'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.limitDL = 5
+ self.chunkLimit = 1
+
+
+ def init(self):
+ factory = self.core.requestFactory
+ self.req = CustomBrowser(factory.bucket, factory.getOptions())
+
+
+ def process(self, pyfile):
+ pyfile.url = replace_patterns(pyfile.url, self.URL_REPLACEMENTS)
+ valid_url = pyfile.url
+ headers = self.load(valid_url, just_header=True)
+
+ if headers.get('code') == 302:
+ valid_url = headers.get('location')
+ headers = self.load(valid_url, just_header=True)
+
+ if headers.get('code') == 200:
+ content_type = headers.get('content-type')
+ if content_type and content_type.startswith("text/html"):
+ # Undirect acces to requested file, with a web page providing it (captcha)
+ self.html = self.load(valid_url)
+ self.handleFree()
+ else:
+ # Direct access to requested file for users using free.fr as Internet Service Provider.
+ self.download(valid_url, disposition=True)
+ elif headers.get('code') == 404:
+ self.offline()
+ else:
+ self.fail(_("Invalid return code: ") + str(headers.get('code')))
+
+
+ def handleFree(self):
+ action, inputs = self.parseHtmlForm('action="getfile.pl"')
+
+ adyoulike = AdYouLike(self)
+ inputs.update(adyoulike.challenge())
+
+ self.load("http://dl.free.fr/getfile.pl", post=inputs)
+ headers = self.getLastHeaders()
+ if headers.get("code") == 302 and "set-cookie" in headers and "location" in headers:
+ m = re.search("(.*?)=(.*?); path=(.*?); domain=(.*?)", headers.get("set-cookie"))
+ cj = CookieJar(__name)
+ if m:
+ cj.setCookie(m.group(4), m.group(1), m.group(2), m.group(3))
+ else:
+ self.fail(_("Cookie error"))
+ location = headers.get("location")
+ self.req.setCookieJar(cj)
+ self.download(location, disposition=True)
+ else:
+ self.fail(_("Invalid response"))
+
+
+ def getLastHeaders(self):
+ #parse header
+ header = {"code": self.req.code}
+ for line in self.req.http.header.splitlines():
+ line = line.strip()
+ if not line or ":" not in line:
+ continue
+
+ key, none, value = line.partition(":")
+ key = key.lower().strip()
+ value = value.strip()
+
+ if key in header:
+ if type(header[key]) == list:
+ header[key].append(value)
+ else:
+ header[key] = [header[key], value]
+ else:
+ header[key] = value
+ return header
+
+
+getInfo = create_getInfo(DlFreeFr)
diff --git a/pyload/plugin/hoster/DodanePl.py b/pyload/plugin/hoster/DodanePl.py
new file mode 100644
index 000000000..8e543d823
--- /dev/null
+++ b/pyload/plugin/hoster/DodanePl.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class DodanePl(DeadHoster):
+ __name = "DodanePl"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?dodane\.pl/file/\d+'
+
+ __description = """Dodane.pl hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("z00nx", "z00nx0@gmail.com")]
+
+
+getInfo = create_getInfo(DodanePl)
diff --git a/pyload/plugin/hoster/DuploadOrg.py b/pyload/plugin/hoster/DuploadOrg.py
new file mode 100644
index 000000000..f5b68d3ef
--- /dev/null
+++ b/pyload/plugin/hoster/DuploadOrg.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class DuploadOrg(DeadHoster):
+ __name = "DuploadOrg"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?dupload\.org/\w{12}'
+
+ __description = """Dupload.grg hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(DuploadOrg)
diff --git a/pyload/plugin/hoster/EasybytezCom.py b/pyload/plugin/hoster/EasybytezCom.py
new file mode 100644
index 000000000..ee6c1621b
--- /dev/null
+++ b/pyload/plugin/hoster/EasybytezCom.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class EasybytezCom(XFSHoster):
+ __name = "EasybytezCom"
+ __type = "hoster"
+ __version = "0.23"
+
+ __pattern = r'http://(?:www\.)?easybytez\.com/\w{12}'
+
+ __description = """Easybytez.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ HOSTER_DOMAIN = "easybytez.com"
+
+ OFFLINE_PATTERN = r'>File not available'
+
+ LINK_PATTERN = r'(http://(\w+\.(easybytez|easyload|ezbytez|zingload)\.(com|to)|\d+\.\d+\.\d+\.\d+)/files/\d+/\w+/.+?)["\'<]'
+
+
+getInfo = create_getInfo(EasybytezCom)
diff --git a/pyload/plugin/hoster/EdiskCz.py b/pyload/plugin/hoster/EdiskCz.py
new file mode 100644
index 000000000..9e9cafa9d
--- /dev/null
+++ b/pyload/plugin/hoster/EdiskCz.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class EdiskCz(SimpleHoster):
+ __name = "EdiskCz"
+ __type = "hoster"
+ __version = "0.22"
+
+ __pattern = r'http://(?:www\.)?edisk\.(cz|sk|eu)/(stahni|sk/stahni|en/download)/.*'
+
+ __description = """Edisk.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ INFO_PATTERN = r'<span class="fl" title="(?P<N>[^"]+)">\s*.*?\((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)</h1></span>'
+ OFFLINE_PATTERN = r'<h3>This file does not exist due to one of the following:</h3><ul><li>'
+
+ ACTION_PATTERN = r'/en/download/(\d+/.*\.html)'
+ LINK_PATTERN = r'http://.*edisk\.cz.*\.html'
+
+
+ def setup(self):
+ self.multiDL = False
+
+
+ def process(self, pyfile):
+ url = re.sub("/(stahni|sk/stahni)/", "/en/download/", pyfile.url)
+
+ self.logDebug("URL:" + url)
+
+ m = re.search(self.ACTION_PATTERN, url)
+ if m is None:
+ self.error(_("ACTION_PATTERN not found"))
+ action = m.group(1)
+
+ self.html = self.load(url, decode=True)
+ self.getFileInfo()
+
+ self.html = self.load(re.sub("/en/download/", "/en/download-slow/", url))
+
+ url = self.load(re.sub("/en/download/", "/x-download/", url), post={
+ "action": action
+ })
+
+ if not re.match(self.LINK_PATTERN, url):
+ self.fail(_("Unexpected server response"))
+
+ self.download(url)
+
+
+getInfo = create_getInfo(EdiskCz)
diff --git a/pyload/plugin/hoster/EgoFilesCom.py b/pyload/plugin/hoster/EgoFilesCom.py
new file mode 100644
index 000000000..d087f1c71
--- /dev/null
+++ b/pyload/plugin/hoster/EgoFilesCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class EgoFilesCom(DeadHoster):
+ __name = "EgoFilesCom"
+ __type = "hoster"
+ __version = "0.16"
+
+ __pattern = r'https?://(?:www\.)?egofiles\.com/\w+'
+
+ __description = """Egofiles.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(EgoFilesCom)
diff --git a/pyload/plugin/hoster/EnteruploadCom.py b/pyload/plugin/hoster/EnteruploadCom.py
new file mode 100644
index 000000000..992eeec25
--- /dev/null
+++ b/pyload/plugin/hoster/EnteruploadCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class EnteruploadCom(DeadHoster):
+ __name = "EnteruploadCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?enterupload\.com/\w+'
+
+ __description = """EnterUpload.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(EnteruploadCom)
diff --git a/pyload/plugin/hoster/EpicShareNet.py b/pyload/plugin/hoster/EpicShareNet.py
new file mode 100644
index 000000000..93620a384
--- /dev/null
+++ b/pyload/plugin/hoster/EpicShareNet.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class EpicShareNet(DeadHoster):
+ __name = "EpicShareNet"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'https?://(?:www\.)?epicshare\.net/\w{12}'
+
+ __description = """EpicShare.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
+
+
+getInfo = create_getInfo(EpicShareNet)
diff --git a/pyload/plugin/hoster/EuroshareEu.py b/pyload/plugin/hoster/EuroshareEu.py
new file mode 100644
index 000000000..7048d3553
--- /dev/null
+++ b/pyload/plugin/hoster/EuroshareEu.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class EuroshareEu(SimpleHoster):
+ __name = "EuroshareEu"
+ __type = "hoster"
+ __version = "0.26"
+
+ __pattern = r'http://(?:www\.)?euroshare\.(eu|sk|cz|hu|pl)/file/.*'
+
+ __description = """Euroshare.eu hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ INFO_PATTERN = r'<span style="float: left;"><strong>(?P<N>.+?)</strong> \((?P<S>.+?)\)</span>'
+ OFFLINE_PATTERN = ur'<h2>S.bor sa nena.iel</h2>|Poşadovaná stránka neexistuje!'
+
+ FREE_URL_PATTERN = r'<a href="(/file/\d+/[^/]*/download/)"><div class="downloadButton"'
+ ERR_PARDL_PATTERN = r'<h2>Prebieha s.ahovanie</h2>|<p>Naraz je z jednej IP adresy mo.n. s.ahova. iba jeden s.bor'
+ ERR_NOT_LOGGED_IN_PATTERN = r'href="/customer-zone/login/"'
+
+ URL_REPLACEMENTS = [(r"(http://[^/]*\.)(sk|cz|hu|pl)/", r"\1eu/")]
+
+
+ def setup(self):
+ self.multiDL = self.resumeDownload = self.premium
+ self.req.setOption("timeout", 120)
+
+
+ def handlePremium(self):
+ if self.ERR_NOT_LOGGED_IN_PATTERN in self.html:
+ self.account.relogin(self.user)
+ self.retry(reason=_("User not logged in"))
+
+ self.download(self.pyfile.url.rstrip('/') + "/download/")
+
+ check = self.checkDownload({"login": re.compile(self.ERR_NOT_LOGGED_IN_PATTERN),
+ "json": re.compile(r'\{"status":"error".*?"message":"(.*?)"')})
+ if check == "login" or (check == "json" and self.lastCheck.group(1) == "Access token expired"):
+ self.account.relogin(self.user)
+ self.retry(reason=_("Access token expired"))
+ elif check == "json":
+ self.fail(self.lastCheck.group(1))
+
+
+ def handleFree(self):
+ if re.search(self.ERR_PARDL_PATTERN, self.html) is not None:
+ self.longWait(5 * 60, 12)
+
+ m = re.search(self.FREE_URL_PATTERN, self.html)
+ if m is None:
+ self.error(_("FREE_URL_PATTERN not found"))
+ parsed_url = "http://euroshare.eu%s" % m.group(1)
+ self.logDebug("URL", parsed_url)
+ self.download(parsed_url, disposition=True)
+
+ check = self.checkDownload({"multi_dl": re.compile(self.ERR_PARDL_PATTERN)})
+ if check == "multi_dl":
+ self.longWait(5 * 60, 12)
+
+
+getInfo = create_getInfo(EuroshareEu)
diff --git a/pyload/plugin/hoster/ExtabitCom.py b/pyload/plugin/hoster/ExtabitCom.py
new file mode 100644
index 000000000..fc99ddf31
--- /dev/null
+++ b/pyload/plugin/hoster/ExtabitCom.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.utils import json_loads
+
+from pyload.plugin.hoster.UnrestrictLi import secondsToMidnight
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class ExtabitCom(SimpleHoster):
+ __name = "ExtabitCom"
+ __type = "hoster"
+ __version = "0.62"
+
+ __pattern = r'http://(?:www\.)?extabit\.com/(file|go|fid)/(?P<ID>\w+)'
+
+ __description = """Extabit.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<th>File:</th>\s*<td class="col-fileinfo">\s*<div title="(?P<N>[^"]+)">'
+ SIZE_PATTERN = r'<th>Size:</th>\s*<td class="col-fileinfo">(?P<S>[^<]+)</td>'
+ OFFLINE_PATTERN = r'>File not found<'
+ TEMP_OFFLINE_PATTERN = r'>(File is temporary unavailable|No download mirror)<'
+
+ LINK_PATTERN = r'[\'"](http://guest\d+\.extabit\.com/\w+/.*?)[\'"]'
+
+
+ def handleFree(self):
+ if r">Only premium users can download this file" in self.html:
+ self.fail(_("Only premium users can download this file"))
+
+ m = re.search(r"Next free download from your ip will be available in <b>(\d+)\s*minutes", self.html)
+ if m:
+ self.wait(int(m.group(1)) * 60, True)
+ elif "The daily downloads limit from your IP is exceeded" in self.html:
+ self.logWarning(_("You have reached your daily downloads limit for today"))
+ self.wait(secondsToMidnight(gmt=2), True)
+
+ self.logDebug("URL: " + self.req.http.lastEffectiveURL)
+ m = re.match(self.__pattern, self.req.http.lastEffectiveURL)
+ fileID = m.group('ID') if m else self.info('ID')
+
+ m = re.search(r'recaptcha/api/challenge\?k=(\w+)', self.html)
+ if m:
+ recaptcha = ReCaptcha(self)
+ captcha_key = m.group(1)
+
+ for _i in xrange(5):
+ get_data = {"type": "recaptcha"}
+ get_data['challenge'], get_data['capture'] = recaptcha.challenge(captcha_key)
+ res = json_loads(self.load("http://extabit.com/file/%s/" % fileID, get=get_data))
+ if "ok" in res:
+ self.correctCaptcha()
+ break
+ else:
+ self.invalidCaptcha()
+ else:
+ self.fail(_("Invalid captcha"))
+ else:
+ self.error(_("Captcha"))
+
+ if not "href" in res:
+ self.error(_("Bad JSON response"))
+
+ self.html = self.load("http://extabit.com/file/%s%s" % (fileID, res['href']))
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("LINK_PATTERN not found"))
+
+ url = m.group(1)
+ self.download(url)
+
+
+getInfo = create_getInfo(ExtabitCom)
diff --git a/pyload/plugin/hoster/FastixRu.py b/pyload/plugin/hoster/FastixRu.py
new file mode 100644
index 000000000..7e167e245
--- /dev/null
+++ b/pyload/plugin/hoster/FastixRu.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import randrange
+from urllib import unquote
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+
+
+class FastixRu(Hoster):
+ __name = "FastixRu"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'http://(?:www\.)?fastix\.(ru|it)/file/(?P<ID>\w{24})'
+
+ __description = """Fastix hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Massimo Rosamilia", "max@spiritix.eu")]
+
+
+ def getFilename(self, url):
+ try:
+ name = unquote(url.rsplit("/", 1)[1])
+ except IndexError:
+ name = "Unknown_Filename..."
+ if name.endswith("..."): # incomplete filename, append random stuff
+ name += "%s.tmp" % randrange(100, 999)
+ return name
+
+
+ def setup(self):
+ self.chunkLimit = 3
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "Fastix")
+ self.fail(_("No Fastix account provided"))
+ else:
+ self.logDebug("Old URL: %s" % pyfile.url)
+ api_key = self.account.getAccountData(self.user)
+ api_key = api_key['api']
+
+ page = self.load("http://fastix.ru/api_v2/",
+ get={'apikey': api_key, 'sub': "getdirectlink", 'link': pyfile.url})
+ data = json_loads(page)
+
+ self.logDebug("Json data", data)
+
+ if "error\":true" in page:
+ self.offline()
+ else:
+ new_url = data['downloadlink']
+
+ if new_url != pyfile.url:
+ self.logDebug("New URL: %s" % new_url)
+
+ if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown"):
+ #only use when name wasnt already set
+ pyfile.name = self.getFilename(new_url)
+
+ self.download(new_url, disposition=True)
+
+ check = self.checkDownload({"error": "<title>An error occurred while processing your request</title>",
+ "empty": re.compile(r"^$")})
+
+ if check == "error":
+ self.retry(wait_time=60, reason=_("An error occurred while generating link"))
+ elif check == "empty":
+ self.retry(wait_time=60, reason=_("Downloaded File was empty"))
diff --git a/pyload/plugin/hoster/FastshareCz.py b/pyload/plugin/hoster/FastshareCz.py
new file mode 100644
index 000000000..4c69e0e81
--- /dev/null
+++ b/pyload/plugin/hoster/FastshareCz.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class FastshareCz(SimpleHoster):
+ __name = "FastshareCz"
+ __type = "hoster"
+ __version = "0.25"
+
+ __pattern = r'http://(?:www\.)?fastshare\.cz/\d+/.+'
+
+ __description = """FastShare.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+ URL_REPLACEMENTS = [("#.*", "")]
+
+ COOKIES = [("fastshare.cz", "lang", "en")]
+
+ INFO_PATTERN = r'<h1 class="dwp">(?P<N>[^<]+)</h1>\s*<div class="fileinfo">\s*Size\s*: (?P<S>\d+) (?P<U>[\w^_]+),'
+ OFFLINE_PATTERN = r'>(The file has been deleted|Requested page not found)'
+
+ LINK_FREE_PATTERN = r'action=(/free/.*?)>\s*<img src="([^"]*)"><br'
+ LINK_PREMIUM_PATTERN = r'(http://data\d+\.fastshare\.cz/download\.php\?id=\d+&)'
+
+ SLOT_ERROR = "> 100% of FREE slots are full"
+ CREDIT_ERROR = " credit for "
+
+
+ def checkErrors(self):
+ if self.SLOT_ERROR in self.html:
+ errmsg = self.info['error'] = _("No free slots")
+ self.retry(12, 60, errmsg)
+
+ if self.CREDIT_ERROR in self.html:
+ errmsg = self.info['error'] = _("Not enough traffic left")
+ self.logWarning(errmsg)
+ self.resetAccount()
+
+ self.info.pop('error', None)
+
+
+ def handleFree(self):
+ m = re.search(self.FREE_URL_PATTERN, self.html)
+ if m:
+ action, captcha_src = m.groups()
+ else:
+ self.error(_("FREE_URL_PATTERN not found"))
+
+ baseurl = "http://www.fastshare.cz"
+ captcha = self.decryptCaptcha(urljoin(baseurl, captcha_src))
+ self.download(urljoin(baseurl, action), post={'code': captcha, 'btn.x': 77, 'btn.y': 18})
+
+
+ def checkFile(self):
+ check = self.checkDownload({
+ 'paralell_dl' : re.compile(r"<title>FastShare.cz</title>|<script>alert\('Pres FREE muzete stahovat jen jeden soubor najednou.'\)"),
+ 'wrong_captcha': re.compile(r'Download for FREE'),
+ 'credit' : re.compile(self.CREDIT_ERROR)
+ })
+
+ if check == "paralell_dl":
+ self.retry(6, 10 * 60, _("Paralell download"))
+
+ elif check == "wrong_captcha":
+ self.retry(max_tries=5, reason=_("Wrong captcha"))
+
+ elif check == "credit":
+ self.resetAccount()
+
+
+getInfo = create_getInfo(FastshareCz)
diff --git a/pyload/plugin/hoster/FileApeCom.py b/pyload/plugin/hoster/FileApeCom.py
new file mode 100644
index 000000000..c89272f66
--- /dev/null
+++ b/pyload/plugin/hoster/FileApeCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class FileApeCom(DeadHoster):
+ __name = "FileApeCom"
+ __type = "hoster"
+ __version = "0.12"
+
+ __pattern = r'http://(?:www\.)?fileape\.com/(index\.php\?act=download\&id=|dl/)\w+'
+
+ __description = """FileApe.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("espes", "")]
+
+
+getInfo = create_getInfo(FileApeCom)
diff --git a/pyload/plugin/hoster/FileParadoxIn.py b/pyload/plugin/hoster/FileParadoxIn.py
new file mode 100644
index 000000000..51046a3c2
--- /dev/null
+++ b/pyload/plugin/hoster/FileParadoxIn.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class FileParadoxIn(XFSHoster):
+ __name = "FileParadoxIn"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'https?://(?:www\.)?fileparadox\.in/\w{12}'
+
+ __description = """FileParadox.in hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("RazorWing", "muppetuk1@hotmail.com")]
+
+
+ HOSTER_DOMAIN = "fileparadox.in"
+
+ SIZE_PATTERN = r'</font>\s*\(\s*(?P<S>[^)]+)\s*\)</font>'
+
+
+getInfo = create_getInfo(FileParadoxIn)
diff --git a/pyload/plugin/hoster/FileSharkPl.py b/pyload/plugin/hoster/FileSharkPl.py
new file mode 100644
index 000000000..165c993bd
--- /dev/null
+++ b/pyload/plugin/hoster/FileSharkPl.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class FileSharkPl(SimpleHoster):
+ __name = "FileSharkPl"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'http://(?:www\.)?fileshark\.pl/pobierz/\d{6}/\w{5}'
+
+ __description = """FileShark.pl hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("prOq", ""),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'<h2 class="name-file">(?P<N>.+)</h2>'
+ SIZE_PATTERN = r'<p class="size-file">(.*?)<strong>(?P<S>\d+\.?\d*)\s(?P<U>\w+)</strong></p>'
+
+ OFFLINE_PATTERN = '(P|p)lik zosta. (usuni.ty|przeniesiony)'
+
+ LINK_FREE_PATTERN = r'<a href="(.*?)" class="btn-upload-free">'
+ LINK_PREMIUM_PATTERN = r'<a href="(.*?)" class="btn-upload-premium">'
+
+ WAIT_PATTERN = r'var timeToDownload = (\d+);'
+ ERROR_PATTERN = r'<p class="lead text-center alert alert-warning">(.*?)</p>'
+ IP_ERROR_PATTERN = r'Strona jest dost.pna wy..cznie dla u.ytkownik.w znajduj.cych si. na terenie Polski'
+ SLOT_ERROR_PATTERN = r'Osi.gni.to maksymaln. liczb. .ci.ganych jednocze.nie plik.w\.'
+
+ CAPTCHA_PATTERN = '<img src="data:image/jpeg;base64,(.*?)" title="captcha"'
+ TOKEN_PATTERN = r'name="form\[_token\]" value="(.*?)" />'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ if self.premium:
+ self.multiDL = True
+ self.limitDL = 20
+ else:
+ self.multiDL = False
+
+
+ def checkErrors(self):
+ # check if file is now available for download (-> file name can be found in html body)
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ errmsg = self.info['error'] = _("Another download already run")
+ self.retry(15, int(m.group(1)), errmsg)
+
+ m = re.search(self.ERROR_PATTERN, self.html):
+ if m:
+ alert = m.group(1)
+
+ if re.match(self.IP_ERROR_PATTERN, alert):
+ self.fail(_("Only connections from Polish IP are allowed"))
+
+ elif re.match(self.SLOT_ERROR_PATTERN, alert):
+ errmsg = self.info['error'] = _("No free download slots available")
+ self.logWarning(errmsg)
+ self.retry(10, 30 * 60, _("Still no free download slots available"))
+
+ else:
+ self.info['error'] = alert
+ self.retry(10, 10 * 60, _("Try again later"))
+
+ self.info.pop('error', None)
+
+
+ #@NOTE: handlePremium method was never been tested
+ def handlePremium(self):
+ super(FilerNet, self).handlePremium()
+ if self.link:
+ self.link = urljoin("http://fileshark.pl/", self.link)
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
+ if m is None:
+ self.error(_("Download url not found"))
+
+ link = urljoin("http://fileshark.pl", m.group(1))
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ seconds = int(m.group(1))
+ self.logDebug("Wait %s seconds" % seconds)
+ self.wait(seconds)
+
+ action, inputs = self.parseHtmlForm('action=""')
+
+ m = re.search(self.TOKEN_PATTERN, self.html)
+ if m is None:
+ self.retry(reason=_("Captcha form not found"))
+
+ inputs['form[_token]'] = m.group(1)
+
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m is None:
+ self.retry(reason=_("Captcha image not found"))
+
+ tmp_load = self.load
+ self.load = self._decode64 #: work-around: injects decode64 inside decryptCaptcha
+
+ inputs['form[captcha]'] = self.decryptCaptcha(m.group(1), imgtype='jpeg')
+ inputs['form[start]'] = ""
+
+ self.load = tmp_load
+
+ self.download(link, post=inputs, cookies=True, disposition=True)
+
+
+ def checkFile(self):
+ check = self.checkDownload({'wrong_captcha': re.compile(r'<label for="form_captcha" generated="true" class="error">(.*?)</label>'),
+ 'wait_pattern' : re.compile(self.SECONDS_PATTERN),
+ 'DL-found' : re.compile('<a href="(.*)">')})
+
+ if check == "DL-found":
+ self.correctCaptcha()
+
+ elif check == "wrong_captcha":
+ self.invalidCaptcha()
+ self.retry(10, 1, _("Wrong captcha solution"))
+
+ elif check == "wait_pattern":
+ self.retry()
+
+
+ def _decode64(self, data, *args, **kwargs):
+ return data.decode("base64")
+
+
+getInfo = create_getInfo(FileSharkPl)
diff --git a/pyload/plugin/hoster/FileStoreTo.py b/pyload/plugin/hoster/FileStoreTo.py
new file mode 100644
index 000000000..6a916dab2
--- /dev/null
+++ b/pyload/plugin/hoster/FileStoreTo.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class FileStoreTo(SimpleHoster):
+ __name = "FileStoreTo"
+ __type = "hoster"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?filestore\.to/\?d=(?P<ID>\w+)'
+
+ __description = """FileStore.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ INFO_PATTERN = r'File: <span[^>]*>(?P<N>.+)</span><br />Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+ OFFLINE_PATTERN = r'>Download-Datei wurde nicht gefunden<'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def handleFree(self):
+ self.wait(10)
+ ldc = re.search(r'wert="(\w+)"', self.html).group(1)
+ link = self.load("http://filestore.to/ajax/download.php", get={"LDC": ldc})
+ self.download(link)
+
+
+getInfo = create_getInfo(FileStoreTo)
diff --git a/pyload/plugin/hoster/FilebeerInfo.py b/pyload/plugin/hoster/FilebeerInfo.py
new file mode 100644
index 000000000..004613cab
--- /dev/null
+++ b/pyload/plugin/hoster/FilebeerInfo.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class FilebeerInfo(DeadHoster):
+ __name = "FilebeerInfo"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?filebeer\.info/(?!\d*~f)(?P<ID>\w+).*'
+
+ __description = """Filebeer.info plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(FilebeerInfo)
diff --git a/pyload/plugin/hoster/FilecloudIo.py b/pyload/plugin/hoster/FilecloudIo.py
new file mode 100644
index 000000000..792f563d6
--- /dev/null
+++ b/pyload/plugin/hoster/FilecloudIo.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.utils import json_loads
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class FilecloudIo(SimpleHoster):
+ __name = "FilecloudIo"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?(?:filecloud\.io|ifile\.it|mihd\.net)/(?P<ID>\w+).*'
+
+ __description = """Filecloud.io hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ SIZE_PATTERN = r'{var __ab1 = (?P<S>\d+);}'
+ NAME_PATTERN = r'id="aliasSpan">(?P<N>.*?)&nbsp;&nbsp;<'
+ OFFLINE_PATTERN = r'l10n\.(FILES__DOESNT_EXIST|REMOVED)'
+ TEMP_OFFLINE_PATTERN = r'l10n\.FILES__WARNING'
+
+ UKEY_PATTERN = r'\'ukey\'\s*:\'(\w+)'
+ AB1_PATTERN = r'if\( __ab1 == \'(\w+)\' \)'
+ ERROR_MSG_PATTERN = r'var __error_msg\s*=\s*l10n\.(.*?);'
+ RECAPTCHA_PATTERN = r'var __recaptcha_public\s*=\s*\'(.+?)\';'
+
+ LINK_PATTERN = r'"(http://s\d+\.filecloud\.io/%s/\d+/.*?)"'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ data = {"ukey": self.info['pattern']['ID']}
+
+ m = re.search(self.AB1_PATTERN, self.html)
+ if m is None:
+ self.error(_("__AB1"))
+ data['__ab1'] = m.group(1)
+
+ recaptcha = ReCaptcha(self)
+
+ m = re.search(self.RECAPTCHA_PATTERN, self.html)
+ captcha_key = m.group(1) if m else recaptcha.detect_key()
+
+ if captcha_key is None:
+ self.error(_("ReCaptcha key not found"))
+
+ if not self.account:
+ self.fail(_("User not logged in"))
+ elif not self.account.logged_in:
+ challenge, response = recaptcha.challenge(captcha_key)
+ self.account.form_data = {"recaptcha_challenge_field": challenge,
+ "recaptcha_response_field" : response}
+ self.account.relogin(self.user)
+ self.retry(2)
+
+ json_url = "http://filecloud.io/download-request.json"
+ res = self.load(json_url, post=data)
+ self.logDebug(res)
+ res = json_loads(res)
+
+ if "error" in res and res['error']:
+ self.fail(res)
+
+ self.logDebug(res)
+ if res['captcha']:
+ data['ctype'] = "recaptcha"
+
+ for _i in xrange(5):
+ data['recaptcha_challenge'], data['recaptcha_response'] = recaptcha.challenge(captcha_key)
+
+ json_url = "http://filecloud.io/download-request.json"
+ res = self.load(json_url, post=data)
+ self.logDebug(res)
+ res = json_loads(res)
+
+ if "retry" in res and res['retry']:
+ self.invalidCaptcha()
+ else:
+ self.correctCaptcha()
+ break
+ else:
+ self.fail(_("Incorrect captcha"))
+
+ if res['dl']:
+ self.html = self.load('http://filecloud.io/download.html')
+
+ m = re.search(self.LINK_PATTERN % self.info['pattern']['ID'], self.html)
+ if m is None:
+ self.error(_("LINK_PATTERN not found"))
+
+ if "size" in self.info and self.info['size']:
+ self.check_data = {"size": int(self.info['size'])}
+
+ download_url = m.group(1)
+ self.download(download_url)
+ else:
+ self.fail(_("Unexpected server response"))
+
+
+ def handlePremium(self):
+ akey = self.account.getAccountData(self.user)['akey']
+ ukey = self.info['pattern']['ID']
+ self.logDebug("Akey: %s | Ukey: %s" % (akey, ukey))
+ rep = self.load("http://api.filecloud.io/api-fetch_download_url.api",
+ post={"akey": akey, "ukey": ukey})
+ self.logDebug("FetchDownloadUrl: " + rep)
+ rep = json_loads(rep)
+ if rep['status'] == 'ok':
+ self.download(rep['download_url'], disposition=True)
+ else:
+ self.fail(rep['message'])
+
+
+getInfo = create_getInfo(FilecloudIo)
diff --git a/pyload/plugin/hoster/FilefactoryCom.py b/pyload/plugin/hoster/FilefactoryCom.py
new file mode 100644
index 000000000..c46cac679
--- /dev/null
+++ b/pyload/plugin/hoster/FilefactoryCom.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, parseFileInfo
+
+
+def getInfo(urls):
+ for url in urls:
+ h = getURL(url, just_header=True)
+ m = re.search(r'Location: (.+)\r\n', h)
+ if m and not re.match(m.group(1), FilefactoryCom.__pattern): #: It's a direct link! Skipping
+ yield (url, 0, 3, url)
+ else: #: It's a standard html page
+ yield parseFileInfo(FilefactoryCom, url, getURL(url))
+
+
+class FilefactoryCom(SimpleHoster):
+ __name = "FilefactoryCom"
+ __type = "hoster"
+ __version = "0.52"
+
+ __pattern = r'https?://(?:www\.)?filefactory\.com/(file|trafficshare/\w+)/\w+'
+
+ __description = """Filefactory.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ INFO_PATTERN = r'<div id="file_name"[^>]*>\s*<h2>(?P<N>[^<]+)</h2>\s*<div id="file_info">\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+) uploaded'
+ OFFLINE_PATTERN = r'<h2>File Removed</h2>|This file is no longer available'
+
+ LINK_PATTERN = r'"([^"]+filefactory\.com/get.+?)"'
+
+ WAIT_PATTERN = r'<div id="countdown_clock" data-delay="(\d+)">'
+ PREMIUM_ONLY_PATTERN = r'>Premium Account Required'
+
+ COOKIES = [("filefactory.com", "locale", "en_US.utf8")]
+
+
+ def handleFree(self):
+ if "Currently only Premium Members can download files larger than" in self.html:
+ self.fail(_("File too large for free download"))
+ elif "All free download slots on this server are currently in use" in self.html:
+ self.retry(50, 15 * 60, _("All free slots are busy"))
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Free download link not found"))
+
+ dl_link = m.group(1)
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ self.wait(int(m.group(1)))
+
+ self.download(dl_link, disposition=True)
+
+ check = self.checkDownload({'multiple': "You are currently downloading too many files at once.",
+ 'error': '<div id="errorMessage">'})
+
+ if check == "multiple":
+ self.logDebug("Parallel downloads detected; waiting 15 minutes")
+ self.retry(wait_time=15 * 60, reason=_("Parallel downloads"))
+ elif check == "error":
+ self.error(_("Unknown error"))
+
+
+ def handlePremium(self):
+ header = self.load(self.pyfile.url, just_header=True)
+
+ if 'location' in header:
+ url = header['location'].strip()
+ if not url.startswith("http://"):
+ url = urljoin("http://www.filefactory.com", url)
+ elif 'content-disposition' in header:
+ url = self.pyfile.url
+ else:
+ html = self.load(self.pyfile.url)
+ m = re.search(self.LINK_PATTERN, html)
+ if m:
+ url = m.group(1)
+ else:
+ self.error(_("Premium download link not found"))
+
+ self.download(url, disposition=True)
diff --git a/pyload/plugin/hoster/FilejungleCom.py b/pyload/plugin/hoster/FilejungleCom.py
new file mode 100644
index 000000000..3fa567a6b
--- /dev/null
+++ b/pyload/plugin/hoster/FilejungleCom.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.hoster.FileserveCom import FileserveCom, checkFile
+from pyload.plugin.Plugin import chunks
+
+
+class FilejungleCom(FileserveCom):
+ __name = "FilejungleCom"
+ __type = "hoster"
+ __version = "0.51"
+
+ __pattern = r'http://(?:www\.)?filejungle\.com/f/(?P<id>[^/]+).*'
+
+ __description = """Filejungle.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ URLS = ["http://www.filejungle.com/f/", "http://www.filejungle.com/check_links.php",
+ "http://www.filejungle.com/checkReCaptcha.php"]
+ LINKCHECK_TR = r'<li>\s*(<div class="col1">.*?)</li>'
+ LINKCHECK_TD = r'<div class="(?:col )?col\d">(?:<[^>]*>|&nbsp;)*([^<]*)'
+
+ LONG_WAIT_PATTERN = r'<h1>Please wait for (\d+) (\w+)\s*to download the next file\.</h1>'
+
+
+def getInfo(urls):
+ for chunk in chunks(urls, 100):
+ yield checkFile(FilejungleCom, chunk)
diff --git a/pyload/plugin/hoster/FileomCom.py b/pyload/plugin/hoster/FileomCom.py
new file mode 100644
index 000000000..06b5921fe
--- /dev/null
+++ b/pyload/plugin/hoster/FileomCom.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://fileom.com/gycaytyzdw3g/random.bin.html
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class FileomCom(XFSHoster):
+ __name = "FileomCom"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'https?://(?:www\.)?fileom\.com/\w{12}'
+
+ __description = """Fileom.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "fileom.com"
+
+ NAME_PATTERN = r'Filename: <span>(?P<N>.+?)<'
+ SIZE_PATTERN = r'File Size: <span class="size">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ LINK_PATTERN = r'var url2 = \'(.+?)\';'
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+ self.resumeDownload = self.premium
+
+
+getInfo = create_getInfo(FileomCom)
diff --git a/pyload/plugin/hoster/FilepostCom.py b/pyload/plugin/hoster/FilepostCom.py
new file mode 100644
index 000000000..f7ed38a5c
--- /dev/null
+++ b/pyload/plugin/hoster/FilepostCom.py
@@ -0,0 +1,130 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import time
+
+from pyload.utils import json_loads
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class FilepostCom(SimpleHoster):
+ __name = "FilepostCom"
+ __type = "hoster"
+ __version = "0.30"
+
+ __pattern = r'https?://(?:www\.)?(?:filepost\.com/files|fp\.io)/(?P<ID>[^/]+)'
+
+ __description = """Filepost.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ INFO_PATTERN = r'<input type="text" id="url" value=\'<a href[^>]*>(?P<N>[^>]+?) - (?P<S>[\d.,]+) (?P<U>[\w^_]+)</a>\' class="inp_text"/>'
+ OFFLINE_PATTERN = r'class="error_msg_title"> Invalid or Deleted File. </div>|<div class="file_info file_info_deleted">'
+
+ PREMIUM_ONLY_PATTERN = r'members only. Please upgrade to premium|a premium membership is required to download this file'
+ RECAPTCHA_PATTERN = r'Captcha.init\({\s*key:\s*\'(.+?)\''
+ FLP_TOKEN_PATTERN = r'set_store_options\({token: \'(.+?)\''
+
+
+ def handleFree(self):
+ m = re.search(self.FLP_TOKEN_PATTERN, self.html)
+ if m is None:
+ self.error(_("Token"))
+ flp_token = m.group(1)
+
+ m = re.search(self.RECAPTCHA_PATTERN, self.html)
+ if m is None:
+ self.error(_("Captcha key"))
+ captcha_key = m.group(1)
+
+ # Get wait time
+ get_dict = {'SID': self.req.cj.getCookie('SID'), 'JsHttpRequest': str(int(time() * 10000)) + '-xml'}
+ post_dict = {'action': 'set_download', 'token': flp_token, 'code': self.info['pattern']['ID']}
+ wait_time = int(self.getJsonResponse(get_dict, post_dict, 'wait_time'))
+
+ if wait_time > 0:
+ self.wait(wait_time)
+
+ post_dict = {"token": flp_token, "code": self.info['pattern']['ID'], "file_pass": ''}
+
+ if 'var is_pass_exists = true;' in self.html:
+ # Solve password
+ for file_pass in self.getPassword().splitlines():
+ get_dict['JsHttpRequest'] = str(int(time() * 10000)) + '-xml'
+ post_dict['file_pass'] = file_pass
+ self.logInfo(_("Password protected link, trying ") + file_pass)
+
+ download_url = self.getJsonResponse(get_dict, post_dict, 'link')
+ if download_url:
+ break
+
+ else:
+ self.fail(_("No or incorrect password"))
+
+ else:
+ # Solve recaptcha
+ recaptcha = ReCaptcha(self)
+
+ for i in xrange(5):
+ get_dict['JsHttpRequest'] = str(int(time() * 10000)) + '-xml'
+ if i:
+ post_dict['recaptcha_challenge_field'], post_dict['recaptcha_response_field'] = recaptcha.challenge(
+ captcha_key)
+ self.logDebug(u"RECAPTCHA: %s : %s : %s" % (
+ captcha_key, post_dict['recaptcha_challenge_field'], post_dict['recaptcha_response_field']))
+
+ download_url = self.getJsonResponse(get_dict, post_dict, 'link')
+ if download_url:
+ if i:
+ self.correctCaptcha()
+ break
+ elif i:
+ self.invalidCaptcha()
+
+ else:
+ self.fail(_("Invalid captcha"))
+
+ # Download
+ self.download(download_url)
+
+
+ def getJsonResponse(self, get_dict, post_dict, field):
+ res = json_loads(self.load('https://filepost.com/files/get/', get=get_dict, post=post_dict))
+
+ self.logDebug(res)
+
+ if not 'js' in res:
+ self.error(_("JSON %s 1") % field)
+
+ # i changed js_answer to res['js'] since js_answer is nowhere set.
+ # i don't know the JSON-HTTP specs in detail, but the previous author
+ # accessed res['js']['error'] as well as js_answer['error'].
+ # see the two lines commented out with "# ~?".
+ if 'error' in res['js']:
+
+ if res['js']['error'] == 'download_delay':
+ self.retry(wait_time=res['js']['params']['next_download'])
+ # ~? self.retry(wait_time=js_answer['params']['next_download'])
+
+ elif ('Wrong file password' in res['js']['error']
+ or 'You entered a wrong CAPTCHA code' in res['js']['error']
+ or 'CAPTCHA Code nicht korrekt' in res['js']['error']):
+ return None
+
+ elif 'CAPTCHA' in res['js']['error']:
+ self.logDebug("Error response is unknown, but mentions CAPTCHA")
+ return None
+
+ else:
+ self.fail(res['js']['error'])
+
+ if not 'answer' in res['js'] or not field in res['js']['answer']:
+ self.error(_("JSON %s 2") % field)
+
+ return res['js']['answer'][field]
+
+
+getInfo = create_getInfo(FilepostCom)
diff --git a/pyload/plugin/hoster/FilepupNet.py b/pyload/plugin/hoster/FilepupNet.py
new file mode 100644
index 000000000..71ad44ecd
--- /dev/null
+++ b/pyload/plugin/hoster/FilepupNet.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://www.filepup.net/files/k5w4ZVoF1410184283.html
+# http://www.filepup.net/files/R4GBq9XH1410186553.html
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class FilepupNet(SimpleHoster):
+ __name = "FilepupNet"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?filepup\.net/files/\w+'
+
+ __description = """Filepup.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'>(?P<N>.+?)</h1>'
+ SIZE_PATTERN = r'class="fa fa-archive"></i> \((?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ OFFLINE_PATTERN = r'>This file has been deleted'
+
+ LINK_PATTERN = r'(http://www\.filepup\.net/get/.+?)\''
+
+
+ def setup(self):
+ self.multiDL = False
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Download link not found"))
+
+ dl_link = m.group(1)
+ self.download(dl_link, post={'task': "download"})
+
+ check = self.checkDownload({'html': re.compile("html")})
+ if check == "html":
+ self.error(_("Downloaded file is an html page"))
+
+
+getInfo = create_getInfo(FilepupNet)
diff --git a/pyload/plugin/hoster/FilerNet.py b/pyload/plugin/hoster/FilerNet.py
new file mode 100644
index 000000000..4b355f91e
--- /dev/null
+++ b/pyload/plugin/hoster/FilerNet.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://filer.net/get/ivgf5ztw53et3ogd
+# http://filer.net/get/hgo14gzcng3scbvv
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class FilerNet(SimpleHoster):
+ __name = "FilerNet"
+ __type = "hoster"
+ __version = "0.10"
+
+ __pattern = r'https?://(?:www\.)?filer\.net/get/\w+'
+
+ __description = """Filer.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ INFO_PATTERN = r'<h1 class="page-header">Free Download (?P<N>\S+) <small>(?P<S>[\w.]+) (?P<U>[\w^_]+)</small></h1>'
+ OFFLINE_PATTERN = r'Nicht gefunden'
+
+ LINK_FREE_PATTERN = LINK_PREMIUM_PATTERN = r'href="([^"]+)">Get download</a>'
+
+
+ def checkErrors(self):
+ # Wait between downloads
+ m = re.search(r'musst du <span id="time">(\d+)</span> Sekunden warten', self.html)
+ if m:
+ errmsg = self.info['error'] = _("Wait between free downloads")
+ self.retry(wait_time=int(m.group(1)), reason=errmsg)
+
+ self.info.pop('error', None)
+
+
+ def handleFree(self):
+ inputs = self.parseHtmlForm(input_names={'token': re.compile(r'.+')})[1]
+ if 'token' not in inputs:
+ self.error(_("Unable to detect token"))
+
+ self.html = self.load(self.pyfile.url, post={'token': inputs['token']}, decode=True)
+
+ inputs = self.parseHtmlForm(input_names={'hash': re.compile(r'.+')})[1]
+ if 'hash' not in inputs:
+ self.error(_("Unable to detect hash"))
+
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge()
+
+ header = self.load(self.pyfile.url,
+ post={'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response,
+ 'hash' : inputs['hash']})
+
+ if 'location' in header and header['location']:
+ self.correctCaptcha()
+ self.link = header['location']
+ return
+ else:
+ self.invalidCaptcha()
+
+
+ def downloadLink(self, link):
+ if not link:
+ return
+
+ self.download(urljoin("http://filer.net/", link), disposition=True)
+
+
+getInfo = create_getInfo(FilerNet)
diff --git a/pyload/plugin/hoster/FilerioCom.py b/pyload/plugin/hoster/FilerioCom.py
new file mode 100644
index 000000000..19be7ef3a
--- /dev/null
+++ b/pyload/plugin/hoster/FilerioCom.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class FilerioCom(XFSHoster):
+ __name = "FilerioCom"
+ __type = "hoster"
+ __version = "0.07"
+
+ __pattern = r'http://(?:www\.)?(filerio\.(in|com)|filekeen\.com)/\w{12}'
+
+ __description = """FileRio.in hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "filerio.in"
+
+ URL_REPLACEMENTS = [(r'filekeen\.com', "filerio.in")]
+
+ OFFLINE_PATTERN = r'>&quot;File Not Found|File has been removed'
+
+
+getInfo = create_getInfo(FilerioCom)
diff --git a/pyload/plugin/hoster/FilesMailRu.py b/pyload/plugin/hoster/FilesMailRu.py
new file mode 100644
index 000000000..47d1aa8e0
--- /dev/null
+++ b/pyload/plugin/hoster/FilesMailRu.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.Plugin import chunks
+
+
+def getInfo(urls):
+ result = []
+ for chunk in chunks(urls, 10):
+ for url in chunk:
+ html = getURL(url)
+ if r'<div class="errorMessage mb10">' in html:
+ result.append((url, 0, 1, url))
+ elif r'Page cannot be displayed' in html:
+ result.append((url, 0, 1, url))
+ else:
+ try:
+ url_pattern = '<a href="(.+?)" onclick="return Act\(this\, \'dlink\'\, event\)">(.+?)</a>'
+ file_name = re.search(url_pattern, html).group(0).split(', event)">')[1].split('</a>')[0]
+ result.append((file_name, 0, 2, url))
+ except Exception:
+ pass
+
+ # status 1=OFFLINE, 2=OK, 3=UNKNOWN
+ # result.append((#name,#size,#status,#url))
+ yield result
+
+
+class FilesMailRu(Hoster):
+ __name = "FilesMailRu"
+ __type = "hoster"
+ __version = "0.31"
+
+ __pattern = r'http://(?:www\.)?files\.mail\.ru/.*'
+
+ __description = """Files.mail.ru hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("oZiRiz", "ich@oziriz.de")]
+
+
+ def setup(self):
+ if not self.account:
+ self.multiDL = False
+
+
+ def process(self, pyfile):
+ self.html = self.load(pyfile.url)
+ self.url_pattern = '<a href="(.+?)" onclick="return Act\(this\, \'dlink\'\, event\)">(.+?)</a>'
+
+ #marks the file as "offline" when the pattern was found on the html-page'''
+ if r'<div class="errorMessage mb10">' in self.html:
+ self.offline()
+
+ elif r'Page cannot be displayed' in self.html:
+ self.offline()
+
+ #the filename that will be showed in the list (e.g. test.part1.rar)'''
+ pyfile.name = self.getFileName()
+
+ #prepare and download'''
+ if not self.account:
+ self.prepare()
+ self.download(self.getFileUrl())
+ self.myPostProcess()
+ else:
+ self.download(self.getFileUrl())
+ self.myPostProcess()
+
+
+ def prepare(self):
+ """You have to wait some seconds. Otherwise you will get a 40Byte HTML Page instead of the file you expected"""
+ self.setWait(10)
+ self.wait()
+ return True
+
+
+ def getFileUrl(self):
+ """gives you the URL to the file. Extracted from the Files.mail.ru HTML-page stored in self.html"""
+ return re.search(self.url_pattern, self.html).group(0).split('<a href="')[1].split('" onclick="return Act')[0]
+
+
+ def getFileName(self):
+ """gives you the Name for each file. Also extracted from the HTML-Page"""
+ return re.search(self.url_pattern, self.html).group(0).split(', event)">')[1].split('</a>')[0]
+
+
+ def myPostProcess(self):
+ # searches the file for HTMl-Code. Sometimes the Redirect
+ # doesn't work (maybe a curl Problem) and you get only a small
+ # HTML file and the Download is marked as "finished"
+ # then the download will be restarted. It's only bad for these
+ # who want download a HTML-File (it's one in a million ;-) )
+ #
+ # The maximum UploadSize allowed on files.mail.ru at the moment is 100MB
+ # so i set it to check every download because sometimes there are downloads
+ # that contain the HTML-Text and 60MB ZEROs after that in a xyzfile.part1.rar file
+ # (Loading 100MB in to ram is not an option)
+ check = self.checkDownload({"html": "<meta name="}, read_size=50000)
+ if check == "html":
+ self.logInfo(_(
+ "There was HTML Code in the Downloaded File (%s)...redirect error? The Download will be restarted." %
+ self.pyfile.name))
+ self.retry()
diff --git a/pyload/plugin/hoster/FileserveCom.py b/pyload/plugin/hoster/FileserveCom.py
new file mode 100644
index 000000000..27f1a95f8
--- /dev/null
+++ b/pyload/plugin/hoster/FileserveCom.py
@@ -0,0 +1,217 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.utils import json_loads
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.Plugin import chunks
+from pyload.plugin.hoster.UnrestrictLi import secondsToMidnight
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.utils import parseFileSize
+
+
+def checkFile(plugin, urls):
+ html = getURL(plugin.URLS[1], post={"urls": "\n".join(urls)}, decode=True)
+
+ file_info = []
+ for li in re.finditer(plugin.LINKCHECK_TR, html, re.S):
+ try:
+ cols = re.findall(plugin.LINKCHECK_TD, li.group(1))
+ if cols:
+ file_info.append((
+ cols[1] if cols[1] != '--' else cols[0],
+ parseFileSize(cols[2]) if cols[2] != '--' else 0,
+ 2 if cols[3].startswith('Available') else 1,
+ cols[0]))
+ except Exception, e:
+ continue
+
+ return file_info
+
+
+class FileserveCom(Hoster):
+ __name = "FileserveCom"
+ __type = "hoster"
+ __version = "0.52"
+
+ __pattern = r'http://(?:www\.)?fileserve\.com/file/(?P<id>[^/]+).*'
+
+ __description = """Fileserve.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("mkaay", "mkaay@mkaay.de"),
+ ("Paul King", ""),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ URLS = ["http://www.fileserve.com/file/", "http://www.fileserve.com/link-checker.php",
+ "http://www.fileserve.com/checkReCaptcha.php"]
+ LINKCHECK_TR = r'<tr>\s*(<td>http://www\.fileserve\.com/file/.*?)</tr>'
+ LINKCHECK_TD = r'<td>(?:<[^>]*>|&nbsp;)*([^<]*)'
+
+ CAPTCHA_KEY_PATTERN = r'var reCAPTCHA_publickey=\'(?P<key>.+?)\''
+ LONG_WAIT_PATTERN = r'<li class="title">You need to wait (\d+) (\w+) to start another download\.</li>'
+ LINK_EXPIRED_PATTERN = r'Your download link has expired'
+ DAILY_LIMIT_PATTERN = r'Your daily download limit has been reached'
+ NOT_LOGGED_IN_PATTERN = r'<form (name="loginDialogBoxForm"|id="login_form")|<li><a href="/login\.php">Login</a></li>'
+
+
+ def setup(self):
+ self.resumeDownload = self.multiDL = self.premium
+ self.file_id = re.match(self.__pattern, self.pyfile.url).group('id')
+ self.url = "%s%s" % (self.URLS[0], self.file_id)
+
+ self.logDebug("File ID: %s URL: %s" % (self.file_id, self.url))
+
+
+ def process(self, pyfile):
+ pyfile.name, pyfile.size, status, self.url = checkFile(self, [self.url])[0]
+ if status != 2:
+ self.offline()
+ self.logDebug("File Name: %s Size: %d" % (pyfile.name, pyfile.size))
+
+ if self.premium:
+ self.handlePremium()
+ else:
+ self.handleFree()
+
+
+ def handleFree(self):
+ self.html = self.load(self.url)
+ action = self.load(self.url, post={"checkDownload": "check"}, decode=True)
+ action = json_loads(action)
+ self.logDebug(action)
+
+ if "fail" in action:
+ if action['fail'] == "timeLimit":
+ self.html = self.load(self.url, post={"checkDownload": "showError", "errorType": "timeLimit"},
+ decode=True)
+
+ self.doLongWait(re.search(self.LONG_WAIT_PATTERN, self.html))
+
+ elif action['fail'] == "parallelDownload":
+ self.logWarning(_("Parallel download error, now waiting 60s"))
+ self.retry(wait_time=60, reason=_("parallelDownload"))
+
+ else:
+ self.fail(_("Download check returned: %s") % action['fail'])
+
+ elif "success" in action:
+ if action['success'] == "showCaptcha":
+ self.doCaptcha()
+ self.doTimmer()
+ elif action['success'] == "showTimmer":
+ self.doTimmer()
+
+ else:
+ self.error(_("Unknown server response"))
+
+ # show download link
+ res = self.load(self.url, post={"downloadLink": "show"}, decode=True)
+ self.logDebug("Show downloadLink response: %s" % res)
+ if "fail" in res:
+ self.error(_("Couldn't retrieve download url"))
+
+ # this may either download our file or forward us to an error page
+ self.download(self.url, post={"download": "normal"})
+ self.logDebug(self.req.http.lastEffectiveURL)
+
+ check = self.checkDownload({"expired": self.LINK_EXPIRED_PATTERN,
+ "wait": re.compile(self.LONG_WAIT_PATTERN),
+ "limit": self.DAILY_LIMIT_PATTERN})
+
+ if check == "expired":
+ self.logDebug("Download link was expired")
+ self.retry()
+ elif check == "wait":
+ self.doLongWait(self.lastCheck)
+ elif check == "limit":
+ self.logWarning(_("Download limited reached for today"))
+ self.setWait(secondsToMidnight(gmt=2), True)
+ self.wait()
+ self.retry()
+
+ self.thread.m.reconnecting.wait(3) # Ease issue with later downloads appearing to be in parallel
+
+
+ def doTimmer(self):
+ res = self.load(self.url, post={"downloadLink": "wait"}, decode=True)
+ self.logDebug("Wait response: %s" % res[:80])
+
+ if "fail" in res:
+ self.fail(_("Failed getting wait time"))
+
+ if self.__name == "FilejungleCom":
+ m = re.search(r'"waitTime":(\d+)', res)
+ if m is None:
+ self.fail(_("Cannot get wait time"))
+ wait_time = int(m.group(1))
+ else:
+ wait_time = int(res) + 3
+
+ self.setWait(wait_time)
+ self.wait()
+
+
+ def doCaptcha(self):
+ captcha_key = re.search(self.CAPTCHA_KEY_PATTERN, self.html).group("key")
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge(captcha_key)
+ res = json_loads(self.load(self.URLS[2],
+ post={'recaptcha_challenge_field' : challenge,
+ 'recaptcha_response_field' : response,
+ 'recaptcha_shortencode_field': self.file_id}))
+ if not res['success']:
+ self.invalidCaptcha()
+ else:
+ self.correctCaptcha()
+ break
+ else:
+ self.fail(_("Invalid captcha"))
+
+
+ def doLongWait(self, m):
+ wait_time = (int(m.group(1)) * {'seconds': 1, 'minutes': 60, 'hours': 3600}[m.group(2)]) if m else 12 * 60
+ self.setWait(wait_time, True)
+ self.wait()
+ self.retry()
+
+
+ def handlePremium(self):
+ premium_url = None
+ if self.__name == "FileserveCom":
+ #try api download
+ res = self.load("http://app.fileserve.com/api/download/premium/",
+ post={"username": self.user,
+ "password": self.account.getAccountData(self.user)['password'],
+ "shorten": self.file_id},
+ decode=True)
+ if res:
+ res = json_loads(res)
+ if res['error_code'] == "302":
+ premium_url = res['next']
+ elif res['error_code'] in ["305", "500"]:
+ self.tempOffline()
+ elif res['error_code'] in ["403", "605"]:
+ self.resetAccount()
+ elif res['error_code'] in ["606", "607", "608"]:
+ self.offline()
+ else:
+ self.logError(res['error_code'], res['error_message'])
+
+ self.download(premium_url or self.pyfile.url)
+
+ if not premium_url:
+ check = self.checkDownload({"login": re.compile(self.NOT_LOGGED_IN_PATTERN)})
+
+ if check == "login":
+ self.account.relogin(self.user)
+ self.retry(reason=_("Not logged in"))
+
+
+def getInfo(urls):
+ for chunk in chunks(urls, 100):
+ yield checkFile(FileserveCom, chunk)
diff --git a/pyload/plugin/hoster/FileshareInUa.py b/pyload/plugin/hoster/FileshareInUa.py
new file mode 100644
index 000000000..94897d804
--- /dev/null
+++ b/pyload/plugin/hoster/FileshareInUa.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class FileshareInUa(DeadHoster):
+ __name = "FileshareInUa"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'https?://(?:www\.)?fileshare\.in\.ua/\w{7}'
+
+ __description = """Fileshare.in.ua hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("fwannmacher", "felipe@warhammerproject.com")]
+
+
+getInfo = create_getInfo(FileshareInUa)
diff --git a/pyload/plugin/hoster/FilesonicCom.py b/pyload/plugin/hoster/FilesonicCom.py
new file mode 100644
index 000000000..cfb0e563b
--- /dev/null
+++ b/pyload/plugin/hoster/FilesonicCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class FilesonicCom(DeadHoster):
+ __name = "FilesonicCom"
+ __type = "hoster"
+ __version = "0.35"
+
+ __pattern = r'http://(?:www\.)?filesonic\.com/file/\w+'
+
+ __description = """Filesonic.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("paulking", "")]
+
+
+getInfo = create_getInfo(FilesonicCom)
diff --git a/pyload/plugin/hoster/FilezyNet.py b/pyload/plugin/hoster/FilezyNet.py
new file mode 100644
index 000000000..f80d3a59d
--- /dev/null
+++ b/pyload/plugin/hoster/FilezyNet.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class FilezyNet(DeadHoster):
+ __name = "FilezyNet"
+ __type = "hoster"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?filezy\.net/\w{12}'
+
+ __description = """Filezy.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = []
+
+
+getInfo = create_getInfo(FilezyNet)
diff --git a/pyload/plugin/hoster/FiredriveCom.py b/pyload/plugin/hoster/FiredriveCom.py
new file mode 100644
index 000000000..a889035f0
--- /dev/null
+++ b/pyload/plugin/hoster/FiredriveCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class FiredriveCom(DeadHoster):
+ __name = "FiredriveCom"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'https?://(?:www\.)?(firedrive|putlocker)\.com/(mobile/)?(file|embed)/(?P<ID>\w+)'
+
+ __description = """Firedrive.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+getInfo = create_getInfo(FiredriveCom)
diff --git a/pyload/plugin/hoster/FlyFilesNet.py b/pyload/plugin/hoster/FlyFilesNet.py
new file mode 100644
index 000000000..a76ce846d
--- /dev/null
+++ b/pyload/plugin/hoster/FlyFilesNet.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.SimpleHoster import SimpleHoster
+
+
+class FlyFilesNet(SimpleHoster):
+ __name = "FlyFilesNet"
+ __type = "hoster"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?flyfiles\.net/.*'
+
+ __description = """FlyFiles.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = []
+
+ SESSION_PATTERN = r'flyfiles\.net/(.*)/.*'
+ NAME_PATTERN = r'flyfiles\.net/.*/(.*)'
+
+
+ def process(self, pyfile):
+ name = re.search(self.NAME_PATTERN, pyfile.url).group(1)
+ pyfile.name = unquote_plus(name)
+
+ session = re.search(self.SESSION_PATTERN, pyfile.url).group(1)
+
+ url = "http://flyfiles.net"
+
+ # get download URL
+ parsed_url = getURL(url, post={"getDownLink": session}, cookies=True)
+ self.logDebug("Parsed URL: %s" % parsed_url)
+
+ if parsed_url == '#downlink|' or parsed_url == "#downlink|#":
+ self.logWarning(_("Could not get the download URL. Please wait 10 minutes"))
+ self.wait(10 * 60, True)
+ self.retry()
+
+ download_url = parsed_url.replace('#downlink|', '')
+
+ self.download(download_url)
diff --git a/pyload/plugin/hoster/FourSharedCom.py b/pyload/plugin/hoster/FourSharedCom.py
new file mode 100644
index 000000000..e7097271d
--- /dev/null
+++ b/pyload/plugin/hoster/FourSharedCom.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class FourSharedCom(SimpleHoster):
+ __name = "FourSharedCom"
+ __type = "hoster"
+ __version = "0.30"
+
+ __pattern = r'https?://(?:www\.)?4shared(\-china)?\.com/(account/)?(download|get|file|document|photo|video|audio|mp3|office|rar|zip|archive|music)/.+?/.*'
+
+ __description = """4Shared.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<meta name="title" content="(?P<N>.+?)"'
+ SIZE_PATTERN = r'<span title="Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)">'
+ OFFLINE_PATTERN = r'The file link that you requested is not valid\.|This file was deleted.'
+
+ NAME_REPLACEMENTS = [(r"&#(\d+).", lambda m: unichr(int(m.group(1))))]
+ SIZE_REPLACEMENTS = [(",", "")]
+
+ DOWNLOAD_URL_PATTERN = r'name="d3link" value="(.*?)"'
+ DOWNLOAD_BUTTON_PATTERN = r'id="btnLink" href="(.*?)"'
+ FID_PATTERN = r'name="d3fid" value="(.*?)"'
+
+
+ def handleFree(self):
+ if not self.account:
+ self.fail(_("User not logged in"))
+
+ m = re.search(self.DOWNLOAD_BUTTON_PATTERN, self.html)
+ if m:
+ link = m.group(1)
+ else:
+ link = re.sub(r'/(download|get|file|document|photo|video|audio)/', r'/get/', self.pyfile.url)
+
+ self.html = self.load(link)
+
+ m = re.search(self.DOWNLOAD_URL_PATTERN, self.html)
+ if m is None:
+ self.error(_("Download link"))
+ link = m.group(1)
+
+ try:
+ m = re.search(self.FID_PATTERN, self.html)
+ res = self.load('http://www.4shared.com/web/d2/getFreeDownloadLimitInfo?fileId=%s' % m.group(1))
+ self.logDebug(res)
+ except Exception:
+ pass
+
+ self.wait(20)
+ self.download(link)
+
+
+getInfo = create_getInfo(FourSharedCom)
diff --git a/pyload/plugin/hoster/FreakshareCom.py b/pyload/plugin/hoster/FreakshareCom.py
new file mode 100644
index 000000000..498a77a22
--- /dev/null
+++ b/pyload/plugin/hoster/FreakshareCom.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.hoster.UnrestrictLi import secondsToMidnight
+from pyload.plugin.internal.captcha import ReCaptcha
+
+
+class FreakshareCom(Hoster):
+ __name = "FreakshareCom"
+ __type = "hoster"
+ __version = "0.39"
+
+ __pattern = r'http://(?:www\.)?freakshare\.(net|com)/files/\S*?/'
+
+ __description = """Freakshare.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("sitacuisses", "sitacuisses@yahoo.de"),
+ ("spoob", "spoob@pyload.org"),
+ ("mkaay", "mkaay@mkaay.de"),
+ ("Toilal", "toilal.dev@gmail.com")]
+
+
+ def setup(self):
+ self.multiDL = False
+ self.req_opts = []
+
+
+ def process(self, pyfile):
+ self.pyfile = pyfile
+
+ pyfile.url = pyfile.url.replace("freakshare.net/", "freakshare.com/")
+
+ if self.account:
+ self.html = self.load(pyfile.url, cookies=False)
+ pyfile.name = self.get_file_name()
+ self.download(pyfile.url)
+
+ else:
+ self.prepare()
+ self.get_file_url()
+
+ self.download(pyfile.url, post=self.req_opts)
+
+ check = self.checkDownload({"bad": "bad try",
+ "paralell": "> Sorry, you cant download more then 1 files at time. <",
+ "empty": "Warning: Unknown: Filename cannot be empty",
+ "wrong_captcha": "Wrong Captcha!",
+ "downloadserver": "No Downloadserver. Please try again later!"})
+
+ if check == "bad":
+ self.fail(_("Bad Try"))
+ elif check == "paralell":
+ self.setWait(300, True)
+ self.wait()
+ self.retry()
+ elif check == "empty":
+ self.fail(_("File not downloadable"))
+ elif check == "wrong_captcha":
+ self.invalidCaptcha()
+ self.retry()
+ elif check == "downloadserver":
+ self.retry(5, 15 * 60, _("No Download server"))
+
+
+ def prepare(self):
+ pyfile = self.pyfile
+
+ self.download_html()
+
+ if not self.file_exists():
+ self.offline()
+
+ self.setWait(self.get_waiting_time())
+
+ pyfile.name = self.get_file_name()
+ pyfile.size = self.get_file_size()
+
+ self.wait()
+
+ return True
+
+
+ def download_html(self):
+ self.load("http://freakshare.com/index.php", {"language": "EN"}) # Set english language in server session
+ self.html = self.load(self.pyfile.url)
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ if not self.html:
+ self.download_html()
+ if not self.wantReconnect:
+ self.req_opts = self.get_download_options() # get the Post options for the Request
+ #file_url = self.pyfile.url
+ #return file_url
+ else:
+ self.offline()
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+ if not self.wantReconnect:
+ file_name = re.search(r"<h1\sclass=\"box_heading\"\sstyle=\"text-align:center;\">([^ ]+)", self.html)
+ if file_name is not None:
+ file_name = file_name.group(1)
+ else:
+ file_name = self.pyfile.url
+ return file_name
+ else:
+ return self.pyfile.url
+
+
+ def get_file_size(self):
+ size = 0
+ if not self.html:
+ self.download_html()
+ if not self.wantReconnect:
+ file_size_check = re.search(
+ r"<h1\sclass=\"box_heading\"\sstyle=\"text-align:center;\">[^ ]+ - ([^ ]+) (\w\w)yte", self.html)
+ if file_size_check is not None:
+ units = float(file_size_check.group(1).replace(",", ""))
+ pow = {'KB': 1, 'MB': 2, 'GB': 3}[file_size_check.group(2)]
+ size = int(units * 1024 ** pow)
+
+ return size
+
+
+ def get_waiting_time(self):
+ if not self.html:
+ self.download_html()
+
+ if "Your Traffic is used up for today" in self.html:
+ self.wantReconnect = True
+ return secondsToMidnight(gmt=2)
+
+ timestring = re.search('\s*var\s(?:downloadWait|time)\s=\s(\d*)[\d.]*;', self.html)
+ if timestring:
+ return int(timestring.group(1))
+ else:
+ return 60
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ if not self.html:
+ self.download_html()
+ if re.search(r"This file does not exist!", self.html) is not None:
+ return False
+ else:
+ return True
+
+
+ def get_download_options(self):
+ re_envelope = re.search(r".*?value=\"Free\sDownload\".*?\n*?(.*?<.*?>\n*)*?\n*\s*?</form>",
+ self.html).group(0) # get the whole request
+ to_sort = re.findall(r"<input\stype=\"hidden\"\svalue=\"(.*?)\"\sname=\"(.*?)\"\s\/>", re_envelope)
+ request_options = dict((n, v) for (v, n) in to_sort)
+
+ herewego = self.load(self.pyfile.url, None, request_options) # the actual download-Page
+
+ to_sort = re.findall(r"<input\stype=\".*?\"\svalue=\"(\S*?)\".*?name=\"(\S*?)\"\s.*?\/>", herewego)
+ request_options = dict((n, v) for (v, n) in to_sort)
+
+ challenge = re.search(r"http://api\.recaptcha\.net/challenge\?k=(\w+)", herewego)
+
+ if challenge:
+ re_captcha = ReCaptcha(self)
+ (request_options['recaptcha_challenge_field'],
+ request_options['recaptcha_response_field']) = re_captcha.challenge(challenge.group(1))
+
+ return request_options
diff --git a/pyload/plugin/hoster/FreeWayMe.py b/pyload/plugin/hoster/FreeWayMe.py
new file mode 100644
index 000000000..0d180e0fe
--- /dev/null
+++ b/pyload/plugin/hoster/FreeWayMe.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Hoster import Hoster
+
+
+class FreeWayMe(Hoster):
+ __name = "FreeWayMe"
+ __type = "hoster"
+ __version = "0.11"
+
+ __pattern = r'https://(?:www\.)?free-way\.me/.*'
+
+ __description = """FreeWayMe hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Nicolas Giese", "james@free-way.me")]
+
+
+ def setup(self):
+ self.resumeDownload = False
+ self.multiDL = self.premium
+ self.chunkLimit = 1
+
+
+ def process(self, pyfile):
+ if not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "FreeWayMe")
+ self.fail(_("No FreeWay account provided"))
+
+ self.logDebug("Old URL: %s" % pyfile.url)
+
+ (user, data) = self.account.selectAccount()
+
+ self.download(
+ "https://www.free-way.me/load.php",
+ get={"multiget": 7, "url": pyfile.url, "user": user, "pw": self.account.getpw(user), "json": ""},
+ disposition=True)
diff --git a/pyload/plugin/hoster/FreevideoCz.py b/pyload/plugin/hoster/FreevideoCz.py
new file mode 100644
index 000000000..5994cfca9
--- /dev/null
+++ b/pyload/plugin/hoster/FreevideoCz.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class FreevideoCz(DeadHoster):
+ __name = "FreevideoCz"
+ __type = "hoster"
+ __version = "0.30"
+
+ __pattern = r'http://(?:www\.)?freevideo\.cz/vase-videa/.+'
+
+ __description = """Freevideo.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(FreevideoCz) \ No newline at end of file
diff --git a/pyload/plugin/hoster/FshareVn.py b/pyload/plugin/hoster/FshareVn.py
new file mode 100644
index 000000000..88d8ebb6e
--- /dev/null
+++ b/pyload/plugin/hoster/FshareVn.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import strptime, mktime, gmtime
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, parseFileInfo
+
+
+def getInfo(urls):
+ for url in urls:
+ html = getURL("http://www.fshare.vn/check_link.php",
+ post={'action': "check_link", 'arrlinks': url},
+ decode=True)
+
+ yield parseFileInfo(FshareVn, url, html)
+
+
+def doubleDecode(m):
+ return m.group(1).decode('raw_unicode_escape')
+
+
+class FshareVn(SimpleHoster):
+ __name = "FshareVn"
+ __type = "hoster"
+ __version = "0.17"
+
+ __pattern = r'http://(?:www\.)?fshare\.vn/file/.*'
+
+ __description = """FshareVn hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ INFO_PATTERN = r'<p>(?P<N>[^<]+)<\\/p>[\\trn\s]*<p>(?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)<\\/p>'
+ OFFLINE_PATTERN = r'<div class=\\"f_left file_w\\"|<\\/p>\\t\\t\\t\\t\\r\\n\\t\\t<p><\\/p>\\t\\t\\r\\n\\t\\t<p>0 KB<\\/p>'
+
+ NAME_REPLACEMENTS = [("(.*)", doubleDecode)]
+
+ LINK_PATTERN = r'action="(http://download.*?)[#"]'
+ WAIT_PATTERN = ur'Lượt tải xuống kế tiếp là:\s*(.*?)\s*<'
+
+
+ def process(self, pyfile):
+ self.html = self.load('http://www.fshare.vn/check_link.php', post={
+ "action": "check_link",
+ "arrlinks": pyfile.url
+ }, decode=True)
+ self.getFileInfo()
+
+ if self.premium:
+ self.handlePremium()
+ else:
+ self.handleFree()
+ self.checkDownloadedFile()
+
+
+ def handleFree(self):
+ self.html = self.load(self.pyfile.url, decode=True)
+
+ self.checkErrors()
+
+ action, inputs = self.parseHtmlForm('frm_download')
+ self.url = self.pyfile.url + action
+
+ if not inputs:
+ self.error(_("No FORM"))
+ elif 'link_file_pwd_dl' in inputs:
+ for password in self.getPassword().splitlines():
+ self.logInfo(_("Password protected link, trying ") + password)
+ inputs['link_file_pwd_dl'] = password
+ self.html = self.load(self.url, post=inputs, decode=True)
+ if not 'name="link_file_pwd_dl"' in self.html:
+ break
+ else:
+ self.fail(_("No or incorrect password"))
+ else:
+ self.html = self.load(self.url, post=inputs, decode=True)
+
+ self.checkErrors()
+
+ m = re.search(r'var count = (\d+)', self.html)
+ self.setWait(int(m.group(1)) if m else 30)
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("LINK_PATTERN not found"))
+ self.url = m.group(1)
+ self.logDebug("FREE DL URL: %s" % self.url)
+
+ self.wait()
+ self.download(self.url)
+
+
+ def handlePremium(self):
+ self.download(self.pyfile.url)
+
+
+ def checkErrors(self):
+ if '/error.php?' in self.req.lastEffectiveURL or u"Liên kết bạn chọn khÃŽng tồn" in self.html:
+ self.offline()
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ self.logInfo(_("Wait until %s ICT") % m.group(1))
+ wait_until = mktime(strptime(m.group(1), "%d/%m/%Y %H:%M"))
+ self.wait(wait_until - mktime(gmtime()) - 7 * 60 * 60, True)
+ self.retry()
+ elif '<ul class="message-error">' in self.html:
+ msg = "Unknown error occured or wait time not parsed"
+ self.logError(msg)
+ self.retry(30, 2 * 60, msg)
+
+ self.info.pop('error', None)
+
+
+ def checkDownloadedFile(self):
+ # check download
+ check = self.checkDownload({
+ "not_found": "<head><title>404 Not Found</title></head>"
+ })
+
+ if check == "not_found":
+ self.fail(_("File not m on server"))
diff --git a/pyload/plugin/hoster/Ftp.py b/pyload/plugin/hoster/Ftp.py
new file mode 100644
index 000000000..e11418a1b
--- /dev/null
+++ b/pyload/plugin/hoster/Ftp.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+import pycurl
+import re
+
+from urllib import quote, unquote
+from urlparse import urlparse
+
+from pyload.plugin.Hoster import Hoster
+
+
+class Ftp(Hoster):
+ __name = "Ftp"
+ __type = "hoster"
+ __version = "0.43"
+
+ __pattern = r'(?:ftps?|sftp)://([\w.-]+(:[\w.-]+)?@)?[\w.-]+(:\d+)?/.+'
+
+ __description = """Download from ftp directory"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.com"),
+ ("mkaay", "mkaay@mkaay.de"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def setup(self):
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ parsed_url = urlparse(pyfile.url)
+ netloc = parsed_url.netloc
+
+ pyfile.name = parsed_url.path.rpartition('/')[2]
+ try:
+ pyfile.name = unquote(str(pyfile.name)).decode('utf8')
+ except Exception:
+ pass
+
+ if not "@" in netloc:
+ servers = [x['login'] for x in self.account.getAllAccounts()] if self.account else []
+
+ if netloc in servers:
+ self.logDebug("Logging on to %s" % netloc)
+ self.req.addAuth(self.account.accounts[netloc]['password'])
+ else:
+ for pwd in self.getPassword().splitlines():
+ if ":" in pwd:
+ self.req.addAuth(pwd.strip())
+ break
+
+ self.req.http.c.setopt(pycurl.NOBODY, 1)
+
+ try:
+ res = self.load(pyfile.url)
+ except pycurl.error, e:
+ self.fail(_("Error %d: %s") % e.args)
+
+ self.req.http.c.setopt(pycurl.NOBODY, 0)
+ self.logDebug(self.req.http.header)
+
+ m = re.search(r"Content-Length:\s*(\d+)", res)
+ if m:
+ pyfile.size = int(m.group(1))
+ self.download(pyfile.url)
+ else:
+ #Naive ftp directory listing
+ if re.search(r'^25\d.*?"', self.req.http.header, re.M):
+ pyfile.url = pyfile.url.rstrip('/')
+ pkgname = "/".join(pyfile.package().name, urlparse(pyfile.url).path.rpartition('/')[2])
+ pyfile.url += '/'
+ self.req.http.c.setopt(48, 1) # CURLOPT_DIRLISTONLY
+ res = self.load(pyfile.url, decode=False)
+ links = [pyfile.url + quote(x) for x in res.splitlines()]
+ self.logDebug("LINKS", links)
+ self.core.api.addPackage(pkgname, links)
+ else:
+ self.fail(_("Unexpected server response"))
diff --git a/pyload/plugin/hoster/GamefrontCom.py b/pyload/plugin/hoster/GamefrontCom.py
new file mode 100644
index 000000000..81568e376
--- /dev/null
+++ b/pyload/plugin/hoster/GamefrontCom.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import parseFileSize
+
+
+class GamefrontCom(Hoster):
+ __name = "GamefrontCom"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'http://(?:www\.)?gamefront\.com/files/\w+'
+
+ __description = """Gamefront.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("fwannmacher", "felipe@warhammerproject.com")]
+
+
+ PATTERN_FILENAME = r'<title>(.*?) | Game Front'
+ PATTERN_FILESIZE = r'<dt>File Size:</dt>[\n\s]*<dd>(.*?)</dd>'
+ PATTERN_OFFLINE = r'This file doesn\'t exist, or has been removed.'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = -1
+
+
+ def process(self, pyfile):
+ self.pyfile = pyfile
+ self.html = self.load(pyfile.url, decode=True)
+
+ if not self._checkOnline():
+ self.offline()
+
+ pyfile.name = self._getName()
+
+ link = self._getLink()
+
+ if not link.startswith('http://'):
+ link = "http://www.gamefront.com/" + link
+
+ self.download(link)
+
+
+ def _checkOnline(self):
+ if re.search(self.PATTERN_OFFLINE, self.html):
+ return False
+ else:
+ return True
+
+
+ def _getName(self):
+ name = re.search(self.PATTERN_FILENAME, self.html)
+ if name is None:
+ self.fail(_("Plugin broken")
+
+ return name.group(1)
+
+
+ def _getLink(self):
+ self.html2 = self.load("http://www.gamefront.com/" + re.search("(files/service/thankyou\\?id=\w+)",
+ self.html).group(1))
+ return re.search("<a href=\"(http://media\d+\.gamefront.com/.*)\">click here</a>", self.html2).group(1).replace("&amp;", "&")
+
+
+def getInfo(urls):
+ result = []
+
+ for url in urls:
+ html = getURL(url)
+
+ if re.search(GamefrontCom.PATTERN_OFFLINE, html):
+ result.append((url, 0, 1, url))
+ else:
+ name = re.search(GamefrontCom.PATTERN_FILENAME, html)
+ if name is None:
+ result.append((url, 0, 1, url))
+ else:
+ name = name.group(1)
+ size = re.search(GamefrontCom.PATTERN_FILESIZE, html)
+ size = parseFileSize(size.group(1))
+
+ result.append((name, size, 3, url))
+
+ yield result
diff --git a/pyload/plugin/hoster/GigapetaCom.py b/pyload/plugin/hoster/GigapetaCom.py
new file mode 100644
index 000000000..ded18c235
--- /dev/null
+++ b/pyload/plugin/hoster/GigapetaCom.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import randint
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class GigapetaCom(SimpleHoster):
+ __name = "GigapetaCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?gigapeta\.com/dl/\w+'
+
+ __description = """GigaPeta.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<img src=".*" alt="file" />-->\s*(?P<N>.*?)\s*</td>'
+ SIZE_PATTERN = r'<th>\s*Size\s*</th>\s*<td>\s*(?P<S>.*?)\s*</td>'
+ OFFLINE_PATTERN = r'<div id="page_error">'
+
+ COOKIES = [("gigapeta.com", "lang", "us")]
+
+
+ def handleFree(self):
+ captcha_key = str(randint(1, 100000000))
+ captcha_url = "http://gigapeta.com/img/captcha.gif?x=%s" % captcha_key
+
+ for _i in xrange(5):
+ self.checkErrors()
+
+ captcha = self.decryptCaptcha(captcha_url)
+ self.html = self.load(self.pyfile.url,
+ post={'captcha_key': captcha_key,
+ 'captcha' : captcha,
+ 'download' : "Download"},
+ follow_location=False)
+
+ m = re.search(r'Location\s*:\s*(.+)', self.req.http.header, re.I)
+ if m:
+ download_url = m.group(1)
+ break
+ elif "Entered figures don&#96;t coincide with the picture" in self.html:
+ self.invalidCaptcha()
+ else:
+ self.fail(_("No valid captcha code entered"))
+
+ self.download(download_url)
+
+
+ def checkErrors(self):
+ if "All threads for IP" in self.html:
+ self.logDebug("Your IP is already downloading a file")
+ self.wait(5 * 60, True)
+ self.retry()
+
+ self.info.pop('error', None)
+
+
+getInfo = create_getInfo(GigapetaCom)
diff --git a/pyload/plugin/hoster/GooIm.py b/pyload/plugin/hoster/GooIm.py
new file mode 100644
index 000000000..af7143b7c
--- /dev/null
+++ b/pyload/plugin/hoster/GooIm.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# https://goo.im/devs/liquidsmooth/3.x/codina/Nightly/LS-KK-v3.2-2014-08-01-codina.zip
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class GooIm(SimpleHoster):
+ __name = "GooIm"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'https?://(?:www\.)?goo\.im/.+'
+
+ __description = """Goo.im hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
+
+
+ NAME_PATTERN = r'You will be redirected to .*(?P<N>[^/ ]+) in'
+ OFFLINE_PATTERN = r'The file you requested was not found'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def handleFree(self):
+ url = self.pyfile.url
+ self.html = self.load(url, cookies=True)
+ self.wait(10)
+ self.download(url, cookies=True)
+
+
+getInfo = create_getInfo(GooIm)
diff --git a/pyload/plugin/hoster/HellshareCz.py b/pyload/plugin/hoster/HellshareCz.py
new file mode 100644
index 000000000..9c3459a44
--- /dev/null
+++ b/pyload/plugin/hoster/HellshareCz.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class HellshareCz(SimpleHoster):
+ __name = "HellshareCz"
+ __type = "hoster"
+ __version = "0.83"
+
+ __pattern = r'(http://(?:www\.)?hellshare\.(?:cz|com|sk|hu|pl)/[^?]*/\d+).*'
+
+ __description = """Hellshare.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<h1 id="filename"[^>]*>(?P<N>[^<]+)</h1>'
+ SIZE_PATTERN = r'<strong id="FileSize_master">(?P<S>[\d.,]+)&nbsp;(?P<U>[\w^_]+)</strong>'
+ OFFLINE_PATTERN = r'<h1>File not found.</h1>'
+ SHOW_WINDOW_PATTERN = r'<a href="([^?]+/(\d+)/\?do=(fileDownloadButton|relatedFileDownloadButton-\2)-showDownloadWindow)"'
+
+
+ def setup(self):
+ self.resumeDownload = self.multiDL = True if self.account else False
+ self.chunkLimit = 1
+
+
+ def process(self, pyfile):
+ if not self.account:
+ self.fail(_("User not logged in"))
+ pyfile.url = re.match(self.__pattern, pyfile.url).group(1)
+ self.html = self.load(pyfile.url, decode=True)
+ self.getFileInfo()
+ if not self.checkTrafficLeft():
+ self.fail(_("Not enough traffic left for user ") + self.user)
+
+ m = re.search(self.SHOW_WINDOW_PATTERN, self.html)
+ if m is None:
+ self.error(_("SHOW_WINDOW_PATTERN not found"))
+
+ self.url = "http://www.hellshare.com" + m.group(1)
+ self.download(self.url)
+
+
+getInfo = create_getInfo(HellshareCz)
diff --git a/pyload/plugin/hoster/HellspyCz.py b/pyload/plugin/hoster/HellspyCz.py
new file mode 100644
index 000000000..2e0c63d1a
--- /dev/null
+++ b/pyload/plugin/hoster/HellspyCz.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class HellspyCz(DeadHoster):
+ __name = "HellspyCz"
+ __type = "hoster"
+ __version = "0.28"
+
+ __pattern = r'http://(?:www\.)?(?:hellspy\.(?:cz|com|sk|hu|pl)|sciagaj\.pl)(/\S+/\d+)/?.*'
+
+ __description = """HellSpy.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(HellspyCz)
diff --git a/pyload/plugin/hoster/HotfileCom.py b/pyload/plugin/hoster/HotfileCom.py
new file mode 100644
index 000000000..92b42bab6
--- /dev/null
+++ b/pyload/plugin/hoster/HotfileCom.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class HotfileCom(DeadHoster):
+ __name = "HotfileCom"
+ __type = "hoster"
+ __version = "0.37"
+
+ __pattern = r'https?://(?:www\.)?hotfile\.com/dl/\d+/\w+'
+
+ __description = """Hotfile.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("sitacuisses", "sitacuisses@yhoo.de"),
+ ("spoob", "spoob@pyload.org"),
+ ("mkaay", "mkaay@mkaay.de"),
+ ("JoKoT3", "jokot3@gmail.com")]
+
+
+getInfo = create_getInfo(HotfileCom)
diff --git a/pyload/plugin/hoster/HugefilesNet.py b/pyload/plugin/hoster/HugefilesNet.py
new file mode 100644
index 000000000..c32f2e190
--- /dev/null
+++ b/pyload/plugin/hoster/HugefilesNet.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class HugefilesNet(XFSHoster):
+ __name = "HugefilesNet"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?hugefiles\.net/\w{12}'
+
+ __description = """Hugefiles.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ HOSTER_DOMAIN = "hugefiles.net"
+
+ SIZE_PATTERN = r'File Size:</span>\s*<span[^>]*>(?P<S>[^<]+)</span></div>'
+
+ FORM_INPUTS_MAP = {'ctype': re.compile(r'\d+')}
+
+
+getInfo = create_getInfo(HugefilesNet)
diff --git a/pyload/plugin/hoster/HundredEightyUploadCom.py b/pyload/plugin/hoster/HundredEightyUploadCom.py
new file mode 100644
index 000000000..b5e8c2d4c
--- /dev/null
+++ b/pyload/plugin/hoster/HundredEightyUploadCom.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://180upload.com/js9qdm6kjnrs
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class HundredEightyUploadCom(XFSHoster):
+ __name = "HundredEightyUploadCom"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'http://(?:www\.)?180upload\.com/\w{12}'
+
+ __description = """180upload.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ HOSTER_DOMAIN = "180upload.com"
+
+ NAME_PATTERN = r'Filename:</b></td><td nowrap>(?P<N>.+)</td></tr>-->'
+ SIZE_PATTERN = r'Size:</b></td><td>(?P<S>[\d.,]+) (?P<U>[\w^_]+)\s*<small>'
+
+
+getInfo = create_getInfo(HundredEightyUploadCom)
diff --git a/pyload/plugin/hoster/IFileWs.py b/pyload/plugin/hoster/IFileWs.py
new file mode 100644
index 000000000..31a5f6d62
--- /dev/null
+++ b/pyload/plugin/hoster/IFileWs.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class IFileWs(DeadHoster):
+ __name = "IFileWs"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?ifile\.ws/\w{12}'
+
+ __description = """Ifile.ws hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("z00nx", "z00nx0@gmail.com")]
+
+
+getInfo = create_getInfo(IFileWs)
diff --git a/pyload/plugin/hoster/IcyFilesCom.py b/pyload/plugin/hoster/IcyFilesCom.py
new file mode 100644
index 000000000..da45be10f
--- /dev/null
+++ b/pyload/plugin/hoster/IcyFilesCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class IcyFilesCom(DeadHoster):
+ __name = "IcyFilesCom"
+ __type = "hoster"
+ __version = "0.06"
+
+ __pattern = r'http://(?:www\.)?icyfiles\.com/(.*)'
+
+ __description = """IcyFiles.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("godofdream", "soilfiction@gmail.com")]
+
+
+getInfo = create_getInfo(IcyFilesCom)
diff --git a/pyload/plugin/hoster/IfileIt.py b/pyload/plugin/hoster/IfileIt.py
new file mode 100644
index 000000000..1995b210f
--- /dev/null
+++ b/pyload/plugin/hoster/IfileIt.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.utils import json_loads
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class IfileIt(SimpleHoster):
+ __name = "IfileIt"
+ __type = "hoster"
+ __version = "0.28"
+
+ __pattern = r'^unmatchable$'
+
+ __description = """Ifile.it"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ LINK_PATTERN = r'</span> If it doesn\'t, <a target="_blank" href="([^"]+)">'
+ RECAPTCHA_PATTERN = r'var __recaptcha_public\s*=\s*\'(.+?)\''
+ INFO_PATTERN = r'<span style="cursor: default;[^>]*>\s*(?P<N>.*?)\s*&nbsp;\s*<strong>\s*(?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)\s*</strong>\s*</span>'
+ OFFLINE_PATTERN = r'<span style="cursor: default;[^>]*>\s*&nbsp;\s*<strong>\s*</strong>\s*</span>'
+ TEMP_OFFLINE_PATTERN = r'<span class="msg_red">Downloading of this file is temporarily disabled</span>'
+
+
+ def handleFree(self):
+ ukey = re.match(self.__pattern, self.pyfile.url).group(1)
+ json_url = 'http://ifile.it/new_download-request.json'
+ post_data = {"ukey": ukey, "ab": "0"}
+ res = json_loads(self.load(json_url, post=post_data))
+
+ self.logDebug(res)
+
+ if res['status'] == 3:
+ self.offline()
+
+ if res['captcha']:
+ captcha_key = re.search(self.RECAPTCHA_PATTERN, self.html).group(1)
+
+ recaptcha = ReCaptcha(self)
+ post_data['ctype'] = "recaptcha"
+
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge(captcha_key)
+ post_data.update({'recaptcha_challenge': challenge,
+ 'recaptcha_response' : response})
+ res = json_loads(self.load(json_url, post=post_data))
+ self.logDebug(res)
+
+ if res['retry']:
+ self.invalidCaptcha()
+ else:
+ self.correctCaptcha()
+ break
+ else:
+ self.fail(_("Incorrect captcha"))
+
+ if not "ticket_url" in res:
+ self.error(_("No download URL"))
+
+ self.download(res['ticket_url'])
+
+
+getInfo = create_getInfo(IfileIt)
diff --git a/pyload/plugin/hoster/IfolderRu.py b/pyload/plugin/hoster/IfolderRu.py
new file mode 100644
index 000000000..984d2f3ad
--- /dev/null
+++ b/pyload/plugin/hoster/IfolderRu.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class IfolderRu(SimpleHoster):
+ __name = "IfolderRu"
+ __type = "hoster"
+ __version = "0.38"
+
+ __pattern = r'http://(?:www\.)?(?:ifolder\.ru|rusfolder\.(?:com|net|ru))/(?:files/)?(?P<ID>\d+).*'
+
+ __description = """Ifolder.ru hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ SIZE_REPLACEMENTS = [(u'Кб', 'KB'), (u'Мб', 'MB'), (u'Гб', 'GB')]
+ NAME_PATTERN = ur'(?:<div><span>)?НазваМОе:(?:</span>)? <b>(?P<N>[^<]+)</b><(?:/div|br)>'
+ SIZE_PATTERN = ur'(?:<div><span>)?РазЌер:(?:</span>)? <b>(?P<S>[^<]+)</b><(?:/div|br)>'
+ OFFLINE_PATTERN = ur'<p>Ѐайл МПЌер <b>[^<]*</b> (Ме МайЎеМ|уЎалеМ) !!!</p>'
+
+ SESSION_ID_PATTERN = r'<a href=(http://ints\.(?:rusfolder\.com|ifolder\.ru)/ints/sponsor/\?bi=\d*&session=([^&]+)&u=[^>]+)>'
+ INTS_SESSION_PATTERN = r'\(\'ints_session\'\);\s*if\(tag\)\{tag\.value = "([^"]+)";\}'
+ HIDDEN_INPUT_PATTERN = r'var v = .*?name=\'(.+?)\' value=\'1\''
+ LINK_PATTERN = r'<a id="download_file_href" href="([^"]+)"'
+ WRONG_CAPTCHA_PATTERN = ur'<font color=Red>МеверМый кПЎ,<br>ввеЎОте еще раз</font><br>'
+
+
+ def setup(self):
+ self.resumeDownload = self.multiDL = True if self.account else False
+ self.chunkLimit = 1
+
+
+ def process(self, pyfile):
+ file_id = re.match(self.__pattern, pyfile.url).group('ID')
+ self.html = self.load("http://rusfolder.com/%s" % file_id, cookies=True, decode=True)
+ self.getFileInfo()
+
+ url = re.search(r"location\.href = '(http://ints\..*?=)'", self.html).group(1)
+ self.html = self.load(url, cookies=True, decode=True)
+
+ url, session_id = re.search(self.SESSION_ID_PATTERN, self.html).groups()
+ self.html = self.load(url, cookies=True, decode=True)
+
+ url = "http://ints.rusfolder.com/ints/frame/?session=%s" % session_id
+ self.html = self.load(url, cookies=True)
+
+ self.wait(31, False)
+
+ captcha_url = "http://ints.rusfolder.com/random/images/?session=%s" % session_id
+ for _i in xrange(5):
+ self.html = self.load(url, cookies=True)
+ action, inputs = self.parseHtmlForm('ID="Form1"')
+ inputs['ints_session'] = re.search(self.INTS_SESSION_PATTERN, self.html).group(1)
+ inputs[re.search(self.HIDDEN_INPUT_PATTERN, self.html).group(1)] = '1'
+ inputs['confirmed_number'] = self.decryptCaptcha(captcha_url, cookies=True)
+ inputs['action'] = '1'
+ self.logDebug(inputs)
+
+ self.html = self.load(url, decode=True, cookies=True, post=inputs)
+ if self.WRONG_CAPTCHA_PATTERN in self.html:
+ self.invalidCaptcha()
+ else:
+ break
+ else:
+ self.fail(_("Invalid captcha"))
+
+ download_url = re.search(self.LINK_PATTERN, self.html).group(1)
+ self.correctCaptcha()
+ self.download(download_url)
+
+
+getInfo = create_getInfo(IfolderRu)
diff --git a/pyload/plugin/hoster/JumbofilesCom.py b/pyload/plugin/hoster/JumbofilesCom.py
new file mode 100644
index 000000000..b7c82082a
--- /dev/null
+++ b/pyload/plugin/hoster/JumbofilesCom.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class JumbofilesCom(SimpleHoster):
+ __name = "JumbofilesCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?jumbofiles\.com/(\w{12}).*'
+
+ __description = """JumboFiles.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("godofdream", "soilfiction@gmail.com")]
+
+
+ INFO_PATTERN = r'<TR><TD>(?P<N>[^<]+?)\s*<small>\((?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)'
+ OFFLINE_PATTERN = r'Not Found or Deleted / Disabled due to inactivity or DMCA'
+ LINK_PATTERN = r'<meta http-equiv="refresh" content="10;url=(.+)">'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def handleFree(self):
+ ukey = re.match(self.__pattern, self.pyfile.url).group(1)
+ post_data = {"id": ukey, "op": "download3", "rand": ""}
+ html = self.load(self.pyfile.url, post=post_data, decode=True)
+ url = re.search(self.LINK_PATTERN, html).group(1)
+ self.download(url)
+
+
+getInfo = create_getInfo(JumbofilesCom)
diff --git a/pyload/plugin/hoster/JunocloudMe.py b/pyload/plugin/hoster/JunocloudMe.py
new file mode 100644
index 000000000..b455c8049
--- /dev/null
+++ b/pyload/plugin/hoster/JunocloudMe.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class JunocloudMe(XFSHoster):
+ __name = "JunocloudMe"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:\w+\.)?junocloud\.me/\w{12}'
+
+ __description = """Junocloud.me hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "junocloud.me"
+
+ URL_REPLACEMENTS = [(r'//(www\.)?junocloud', "//dl3.junocloud")]
+
+ SIZE_PATTERN = r'<p class="request_filesize">Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)</p>'
+
+ OFFLINE_PATTERN = r'>No such file with this filename<'
+ TEMP_OFFLINE_PATTERN = r'The page may have been renamed, removed or be temporarily unavailable.<'
+
+
+getInfo = create_getInfo(JunocloudMe)
diff --git a/pyload/plugin/hoster/Keep2shareCc.py b/pyload/plugin/hoster/Keep2shareCc.py
new file mode 100644
index 000000000..a57965ca2
--- /dev/null
+++ b/pyload/plugin/hoster/Keep2shareCc.py
@@ -0,0 +1,132 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin, urlparse
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import _isDirectLink, SimpleHoster, create_getInfo
+
+
+class Keep2shareCc(SimpleHoster):
+ __name = "Keep2shareCc"
+ __type = "hoster"
+ __version = "0.17"
+
+ __pattern = r'https?://(?:www\.)?(keep2share|k2s|keep2s)\.cc/file/(?P<ID>\w+)'
+
+ __description = """Keep2share.cc hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ URL_REPLACEMENTS = [(__pattern + ".*", "http://k2s.cc/file/\g<ID>")]
+
+ NAME_PATTERN = r'File: <span>(?P<N>.+)</span>'
+ SIZE_PATTERN = r'Size: (?P<S>[^<]+)</div>'
+
+ OFFLINE_PATTERN = r'File not found or deleted|Sorry, this file is blocked or deleted|Error 404'
+ TEMP_OFFLINE_PATTERN = r'Downloading blocked due to'
+
+ LINK_FREE_PATTERN = LINK_PREMIUM_PATTERN = r'"([^"]+url.html?file=.+?)"|window\.location\.href = \'(.+?)\';'
+
+ CAPTCHA_PATTERN = r'src="(/file/captcha\.html.+?)"'
+
+ WAIT_PATTERN = r'Please wait ([\d:]+) to download this file'
+ TEMP_ERROR_PATTERN = r'>\s*(Download count files exceed|Traffic limit exceed|Free account does not allow to download more than one file at the same time)'
+ ERROR_PATTERN = r'>\s*(Free user can\'t download large files|You no can access to this file|This download available only for premium users|This is private file)'
+
+
+ def checkErrors(self):
+ m = re.search(self.TEMP_ERROR_PATTERN, self.html)
+ if m:
+ self.info['error'] = m.group(1)
+ self.wantReconnect = True
+ self.retry(wait_time=30 * 60, reason=m.group(0))
+
+ m = re.search(self.ERROR_PATTERN, self.html)
+ if m:
+ errmsg = self.info['error'] = m.group(1)
+ self.error(errmsg)
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ self.logDebug("Hoster told us to wait for %s" % m.group(1))
+
+ # string to time convert courtesy of https://stackoverflow.com/questions/10663720
+ ftr = [3600, 60, 1]
+ wait_time = sum([a * b for a, b in zip(ftr, map(int, m.group(1).split(':')))])
+
+ self.wantReconnect = True
+ self.retry(wait_time=wait_time, reason="Please wait to download this file")
+
+ self.info.pop('error', None)
+
+
+ def handleFree(self):
+ self.fid = re.search(r'<input type="hidden" name="slow_id" value="([^"]+)">', self.html).group(1)
+ self.html = self.load(self.pyfile.url, post={'yt0': '', 'slow_id': self.fid})
+
+ self.checkErrors()
+
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
+
+ if m is None:
+ self.handleCaptcha()
+
+ self.wait(30)
+
+ self.html = self.load(self.pyfile.url, post={'uniqueId': self.fid, 'free': 1})
+
+ self.checkErrors()
+
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
+ if m is None:
+ self.error(_("LINK_FREE_PATTERN not found"))
+
+ self.link = m.group(1)
+
+
+ def handleCaptcha(self):
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ post_data = {'free' : 1,
+ 'freeDownloadRequest': 1,
+ 'uniqueId' : self.fid,
+ 'yt0' : ''}
+
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m:
+ captcha_url = urljoin(self.base, m.group(1))
+ post_data['CaptchaForm[code]'] = self.decryptCaptcha(captcha_url)
+ else:
+ challenge, response = recaptcha.challenge()
+ post_data.update({'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response})
+
+ self.html = self.load(self.pyfile.url, post=post_data)
+
+ if 'recaptcha' not in self.html:
+ self.correctCaptcha()
+ break
+ else:
+ self.invalidCaptcha()
+ else:
+ self.fail(_("All captcha attempts failed"))
+
+
+ def downloadLink(self, link):
+ if not link:
+ return
+
+ p = urlparse(self.pyfile.url)
+ base = "%s://%s" % (p.scheme, p.netloc)
+ link = _isDirectLink(self, link, self.premium)
+
+ if link:
+ self.download(urljoin(base, link), disposition=True)
+
+
+getInfo = create_getInfo(Keep2shareCc)
diff --git a/pyload/plugin/hoster/KickloadCom.py b/pyload/plugin/hoster/KickloadCom.py
new file mode 100644
index 000000000..781abf654
--- /dev/null
+++ b/pyload/plugin/hoster/KickloadCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class KickloadCom(DeadHoster):
+ __name = "KickloadCom"
+ __type = "hoster"
+ __version = "0.21"
+
+ __pattern = r'http://(?:www\.)?kickload\.com/get/.+'
+
+ __description = """Kickload.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+getInfo = create_getInfo(KickloadCom)
diff --git a/pyload/plugin/hoster/KingfilesNet.py b/pyload/plugin/hoster/KingfilesNet.py
new file mode 100644
index 000000000..d863dbbf0
--- /dev/null
+++ b/pyload/plugin/hoster/KingfilesNet.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.captcha import SolveMedia
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class KingfilesNet(SimpleHoster):
+ __name = "KingfilesNet"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?kingfiles\.net/(?P<ID>\w{12})'
+
+ __description = """Kingfiles.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'name="fname" value="(?P<N>.+?)">'
+ SIZE_PATTERN = r'>Size: .+?">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ OFFLINE_PATTERN = r'>(File Not Found</b><br><br>|File Not Found</h2>)'
+
+ RAND_ID_PATTERN = r'type=\"hidden\" name=\"rand\" value=\"(.+)\">'
+
+ LINK_PATTERN = r'var download_url = \'(.+)\';'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def handleFree(self):
+ # Click the free user button
+ post_data = {'op' : "download1",
+ 'usr_login' : "",
+ 'id' : self.info['pattern']['ID'],
+ 'fname' : self.pyfile.name,
+ 'referer' : "",
+ 'method_free': "+"}
+
+ self.html = self.load(self.pyfile.url, post=post_data, cookies=True, decode=True)
+
+ solvemedia = SolveMedia(self)
+ challenge, response = solvemedia.challenge()
+
+ # Make the downloadlink appear and load the file
+ m = re.search(self.RAND_ID_PATTERN, self.html)
+ if m is None:
+ self.error(_("Random key not found"))
+
+ rand = m.group(1)
+ self.logDebug("rand = ", rand)
+
+ post_data = {'op' : "download2",
+ 'id' : self.info['pattern']['ID'],
+ 'rand' : rand,
+ 'referer' : self.pyfile.url,
+ 'method_free' : "+",
+ 'method_premium' : "",
+ 'adcopy_response' : response,
+ 'adcopy_challenge': challenge,
+ 'down_direct' : "1"}
+
+ self.html = self.load(self.pyfile.url, post=post_data, cookies=True, decode=True)
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Download url not found"))
+
+ self.download(m.group(1), cookies=True, disposition=True)
+
+ check = self.checkDownload({'html': re.compile("<html>")})
+ if check == "html":
+ self.error(_("Downloaded file is an html page"))
+
+
+getInfo = create_getInfo(KingfilesNet)
diff --git a/pyload/plugin/hoster/LemUploadsCom.py b/pyload/plugin/hoster/LemUploadsCom.py
new file mode 100644
index 000000000..8663504dc
--- /dev/null
+++ b/pyload/plugin/hoster/LemUploadsCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class LemUploadsCom(DeadHoster):
+ __name = "LemUploadsCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'https?://(?:www\.)?lemuploads\.com/\w{12}'
+
+ __description = """LemUploads.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
+
+
+getInfo = create_getInfo(LemUploadsCom)
diff --git a/pyload/plugin/hoster/LetitbitNet.py b/pyload/plugin/hoster/LetitbitNet.py
new file mode 100644
index 000000000..164e61c01
--- /dev/null
+++ b/pyload/plugin/hoster/LetitbitNet.py
@@ -0,0 +1,142 @@
+# -*- coding: utf-8 -*-
+#
+# API Documentation:
+# http://api.letitbit.net/reg/static/api.pdf
+#
+# Test links:
+# http://letitbit.net/download/07874.0b5709a7d3beee2408bb1f2eefce/random.bin.html
+
+import re
+
+from urllib import urlencode, urlopen
+from urlparse import urljoin
+
+from pyload.utils import json_loads, json_dumps
+from pyload.plugin.hoster.UnrestrictLi import secondsToMidnight
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster
+
+
+def api_download_info(url):
+ json_data = ["yw7XQy2v9", ["download/info", {"link": url}]]
+ post_data = urlencode({'r': json_dumps(json_data)})
+ api_rep = urlopen("http://api.letitbit.net/json", data=post_data).read()
+ return json_loads(api_rep)
+
+
+def getInfo(urls):
+ for url in urls:
+ api_rep = api_download_info(url)
+ if api_rep['status'] == 'OK':
+ info = api_rep['data'][0]
+ yield (info['name'], info['size'], 2, url)
+ else:
+ yield (url, 0, 1, url)
+
+
+class LetitbitNet(SimpleHoster):
+ __name = "LetitbitNet"
+ __type = "hoster"
+ __version = "0.26"
+
+ __pattern = r'https?://(?:www\.)?(letitbit|shareflare)\.net/download/.*'
+
+ __description = """Letitbit.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("z00nx", "z00nx0@gmail.com")]
+
+
+ URL_REPLACEMENTS = [(r"(?<=http://)([^/]+)", "letitbit.net")]
+
+ SECONDS_PATTERN = r'seconds\s*=\s*(\d+);'
+ CAPTCHA_CONTROL_FIELD = r'recaptcha_control_field\s=\s\'(?P<value>.+?)\''
+
+
+ def setup(self):
+ self.resumeDownload = True
+
+
+ def getFileInfo(self):
+ api_rep = api_download_info(self.pyfile.url)
+ if api_rep['status'] == 'OK':
+ self.api_data = api_rep['data'][0]
+ self.pyfile.name = self.api_data['name']
+ self.pyfile.size = self.api_data['size']
+ else:
+ self.offline()
+
+
+ def handleFree(self):
+ action, inputs = self.parseHtmlForm('id="ifree_form"')
+ if not action:
+ self.error(_("ifree_form"))
+
+ self.pyfile.size = float(inputs['sssize'])
+ self.logDebug(action, inputs)
+ inputs['desc'] = ""
+
+ self.html = self.load(urljoin("http://letitbit.net/", action), post=inputs, cookies=True)
+
+ m = re.search(self.SECONDS_PATTERN, self.html)
+ seconds = int(m.group(1)) if m else 60
+ self.logDebug("Seconds found", seconds)
+ m = re.search(self.CAPTCHA_CONTROL_FIELD, self.html)
+ recaptcha_control_field = m.group(1)
+ self.logDebug("ReCaptcha control field found", recaptcha_control_field)
+ self.wait(seconds)
+
+ res = self.load("http://letitbit.net/ajax/download3.php", post=" ", cookies=True)
+ if res != '1':
+ self.error(_("Unknown response - ajax_check_url"))
+ self.logDebug(res)
+
+ recaptcha = ReCaptcha(self)
+ challenge, response = recaptcha.challenge()
+
+ post_data = {"recaptcha_challenge_field": challenge,
+ "recaptcha_response_field": response,
+ "recaptcha_control_field": recaptcha_control_field}
+ self.logDebug("Post data to send", post_data)
+ res = self.load("http://letitbit.net/ajax/check_recaptcha.php", post=post_data, cookies=True)
+ self.logDebug(res)
+ if not res:
+ self.invalidCaptcha()
+ if res == "error_free_download_blocked":
+ self.logWarning(_("Daily limit reached"))
+ self.wait(secondsToMidnight(gmt=2), True)
+ if res == "error_wrong_captcha":
+ self.invalidCaptcha()
+ self.retry()
+ elif res.startswith('['):
+ urls = json_loads(res)
+ elif res.startswith('http://'):
+ urls = [res]
+ else:
+ self.error(_("Unknown response - captcha check"))
+
+ self.correctCaptcha()
+
+ for download_url in urls:
+ try:
+ self.download(download_url)
+ break
+ except Exception, e:
+ self.logError(e)
+ else:
+ self.fail(_("Download did not finish correctly"))
+
+
+ def handlePremium(self):
+ api_key = self.user
+ premium_key = self.account.getAccountData(self.user)['password']
+
+ json_data = [api_key, ["download/direct_links", {"pass": premium_key, "link": self.pyfile.url}]]
+ api_rep = self.load('http://api.letitbit.net/json', post={'r': json_dumps(json_data)})
+ self.logDebug("API Data: " + api_rep)
+ api_rep = json_loads(api_rep)
+
+ if api_rep['status'] == 'FAIL':
+ self.fail(api_rep['data'])
+
+ self.download(api_rep['data'][0][0], disposition=True)
diff --git a/pyload/plugin/hoster/LinksnappyCom.py b/pyload/plugin/hoster/LinksnappyCom.py
new file mode 100644
index 000000000..b94b8c965
--- /dev/null
+++ b/pyload/plugin/hoster/LinksnappyCom.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urlsplit
+
+from pyload.utils import json_loads, json_dumps
+from pyload.plugin.Hoster import Hoster
+
+
+class LinksnappyCom(Hoster):
+ __name = "LinksnappyCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'https?://(?:[^/]*\.)?linksnappy\.com'
+
+ __description = """Linksnappy.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ SINGLE_CHUNK_HOSTERS = ('easybytez.com')
+
+
+ def setup(self):
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "Linksnappy.com")
+ self.fail(_("No Linksnappy.com account provided"))
+ else:
+ self.logDebug("Old URL: %s" % pyfile.url)
+ host = self._get_host(pyfile.url)
+ json_params = json_dumps({'link': pyfile.url,
+ 'type': host,
+ 'username': self.user,
+ 'password': self.account.getAccountData(self.user)['password']})
+ r = self.load('http://gen.linksnappy.com/genAPI.php',
+ post={'genLinks': json_params})
+ self.logDebug("JSON data: " + r)
+
+ j = json_loads(r)['links'][0]
+
+ if j['error']:
+ msg = _("Error converting the link")
+ self.logError(msg, j['error'])
+ self.fail(msg)
+
+ pyfile.name = j['filename']
+ new_url = j['generated']
+
+ if host in self.SINGLE_CHUNK_HOSTERS:
+ self.chunkLimit = 1
+ else:
+ self.setup()
+
+ if new_url != pyfile.url:
+ self.logDebug("New URL: " + new_url)
+
+ self.download(new_url, disposition=True)
+
+ check = self.checkDownload({"html302": "<title>302 Found</title>"})
+ if check == "html302":
+ self.retry(wait_time=5, reason=_("Linksnappy returns only HTML data"))
+
+
+ @staticmethod
+ def _get_host(url):
+ host = urlsplit(url).netloc
+ return re.search(r'[\w-]+\.\w+$', host).group(0)
diff --git a/pyload/plugin/hoster/LoadTo.py b/pyload/plugin/hoster/LoadTo.py
new file mode 100644
index 000000000..14341ac6a
--- /dev/null
+++ b/pyload/plugin/hoster/LoadTo.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://www.load.to/JWydcofUY6/random.bin
+# http://www.load.to/oeSmrfkXE/random100.bin
+
+import re
+
+from pyload.plugin.internal.captcha import SolveMedia
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class LoadTo(SimpleHoster):
+ __name = "LoadTo"
+ __type = "hoster"
+ __version = "0.18"
+
+ __pattern = r'http://(?:www\.)?load\.to/\w+'
+
+ __description = """Load.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("halfman", "Pulpan3@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ NAME_PATTERN = r'<h1>(?P<N>.+)</h1>'
+ SIZE_PATTERN = r'Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+ OFFLINE_PATTERN = r'>Can\'t find file'
+
+ LINK_PATTERN = r'<form method="post" action="(.+?)"'
+ WAIT_PATTERN = r'type="submit" value="Download \((\d+)\)"'
+
+ URL_REPLACEMENTS = [(r'(\w)$', r'\1/')]
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ # Search for Download URL
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("LINK_PATTERN not found"))
+
+ download_url = m.group(1)
+
+ # Set Timer - may be obsolete
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ self.wait(int(m.group(1)))
+
+ # Load.to is using solvemedia captchas since ~july 2014:
+ solvemedia = SolveMedia(self)
+ captcha_key = solvemedia.detect_key()
+
+ if captcha_key is None:
+ self.download(download_url)
+ else:
+ challenge, response = solvemedia.challenge(captcha_key)
+
+ self.download(download_url, post={"adcopy_challenge": challenge, "adcopy_response": response})
+
+ check = self.checkDownload({'404': re.compile("\A<h1>404 Not Found</h1>"), 'html': re.compile("html")})
+
+ if check == "404":
+ self.invalidCaptcha()
+ self.retry()
+ elif check == "html":
+ self.logWarning(_("Downloaded file is an html page, will retry"))
+ self.retry()
+
+
+getInfo = create_getInfo(LoadTo)
diff --git a/pyload/plugin/hoster/LomafileCom.py b/pyload/plugin/hoster/LomafileCom.py
new file mode 100644
index 000000000..3c0f5ea5b
--- /dev/null
+++ b/pyload/plugin/hoster/LomafileCom.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class LomafileCom(XFSHoster):
+ __name = "LomafileCom"
+ __type = "hoster"
+ __version = "0.51"
+
+ __pattern = r'http://lomafile\.com/\w{12}'
+
+ __description = """Lomafile.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("nath_schwarz", "nathan.notwhite@gmail.com"),
+ ("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "lomafile.com"
+
+ NAME_PATTERN = r'<a href="http://lomafile\.com/w{12}/(?P<N>.+?)">'
+ SIZE_PATTERN = r'Size:</b></td><td>(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ OFFLINE_PATTERN = r'>(No such file|Software error:<)'
+ TEMP_OFFLINE_PATTERN = r'The page may have been renamed, removed or be temporarily unavailable.<'
+
+ CAPTCHA_PATTERN = r'(http://lomafile\.com/captchas/[^"\']+)'
+
+
+getInfo = create_getInfo(LomafileCom)
diff --git a/pyload/plugin/hoster/LuckyShareNet.py b/pyload/plugin/hoster/LuckyShareNet.py
new file mode 100644
index 000000000..8f0843529
--- /dev/null
+++ b/pyload/plugin/hoster/LuckyShareNet.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from bottle import json_loads
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class LuckyShareNet(SimpleHoster):
+ __name = "LuckyShareNet"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'https?://(?:www\.)?luckyshare\.net/(?P<ID>\d{10,})'
+
+ __description = """LuckyShare.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ INFO_PATTERN = r'<h1 class=\'file_name\'>(?P<N>\S+)</h1>\s*<span class=\'file_size\'>Filesize: (?P<S>[\d.,]+)(?P<U>[\w^_]+)</span>'
+ OFFLINE_PATTERN = r'There is no such file available'
+
+
+ def parseJson(self, rep):
+ if 'AJAX Error' in rep:
+ html = self.load(self.pyfile.url, decode=True)
+ m = re.search(r"waitingtime = (\d+);", html)
+ if m:
+ seconds = int(m.group(1))
+ self.logDebug("You have to wait %d seconds between free downloads" % seconds)
+ self.retry(wait_time=seconds)
+ else:
+ self.error(_("Unable to detect wait time between free downloads"))
+ elif 'Hash expired' in rep:
+ self.retry(reason=_("Hash expired"))
+ return json_loads(rep)
+
+
+ # TODO: There should be a filesize limit for free downloads
+ # TODO: Some files could not be downloaded in free mode
+ def handleFree(self):
+ rep = self.load(r"http://luckyshare.net/download/request/type/time/file/" + self.info['pattern']['ID'], decode=True)
+ self.logDebug("JSON: " + rep)
+ json = self.parseJson(rep)
+
+ self.wait(int(json['time']))
+
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge()
+ rep = self.load(r"http://luckyshare.net/download/verify/challenge/%s/response/%s/hash/%s" %
+ (challenge, response, json['hash']), decode=True)
+ self.logDebug("JSON: " + rep)
+ if 'link' in rep:
+ json.update(self.parseJson(rep))
+ self.correctCaptcha()
+ break
+ elif 'Verification failed' in rep:
+ self.invalidCaptcha()
+ else:
+ self.error(_("Unable to get downlaod link"))
+
+ if not json['link']:
+ self.fail(_("No Download url retrieved/all captcha attempts failed"))
+
+ self.download(json['link'])
+
+
+getInfo = create_getInfo(LuckyShareNet)
diff --git a/pyload/plugin/hoster/MediafireCom.py b/pyload/plugin/hoster/MediafireCom.py
new file mode 100644
index 000000000..009a42c62
--- /dev/null
+++ b/pyload/plugin/hoster/MediafireCom.py
@@ -0,0 +1,124 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.captcha import SolveMedia
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, parseFileInfo
+from pyload.network.RequestFactory import getURL
+
+
+def replace_eval(js_expr):
+ return js_expr.replace(r'eval("', '').replace(r"\'", r"'").replace(r'\"', r'"')
+
+
+def checkHTMLHeader(url):
+ try:
+ for _i in xrange(3):
+ header = getURL(url, just_header=True)
+ for line in header.splitlines():
+ line = line.lower()
+ if 'location' in line:
+ url = line.split(':', 1)[1].strip()
+ if 'error.php?errno=320' in url:
+ return url, 1
+ if not url.startswith('http://'):
+ url = 'http://www.mediafire.com' + url
+ break
+ elif 'content-disposition' in line:
+ return url, 2
+ else:
+ break
+ except Exception:
+ return url, 3
+
+ return url, 0
+
+
+def getInfo(urls):
+ for url in urls:
+ location, status = checkHTMLHeader(url)
+
+ if status:
+ file_info = (url, 0, status, url)
+ else:
+ file_info = parseFileInfo(MediafireCom, url, getURL(url, decode=True))
+
+ yield file_info
+
+
+class MediafireCom(SimpleHoster):
+ __name = "MediafireCom"
+ __type = "hoster"
+ __version = "0.80"
+
+ __pattern = r'http://(?:www\.)?mediafire\.com/(file/|(view/?|download\.php)?\?)(\w{11}|\w{15})($|/)'
+
+ __description = """Mediafire.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ LINK_PATTERN = r'<div class="download_link"[^>]*(?:z-index:(?P<zindex>\d+))?[^>]*>\s*<a href="(?P<href>http://[^"]+)"'
+ JS_KEY_PATTERN = r'DoShow\(\'mfpromo1\'\);[^{]*{((\w+)=\'\';.*?)eval\(\2\);'
+ JS_ZMODULO_PATTERN = r'\(\'z-index\'\)\) \% (\d+)\)\);'
+ PAGE1_ACTION_PATTERN = r'<link rel="canonical" href="([^"]+)"/>'
+ PASSWORD_PATTERN = r'<form name="form_password"'
+
+ NAME_PATTERN = r'<META NAME="description" CONTENT="(?P<N>[^"]+)"/>'
+ INFO_PATTERN = r'oFileSharePopup\.ald\(\'(?P<ID>[^\']*)\',\'(?P<N>[^\']*)\',\'(?P<S>[^\']*)\',\'\',\'(?P<sha256>[^\']*)\'\)'
+ OFFLINE_PATTERN = r'class="error_msg_title"> Invalid or Deleted File. </div>'
+
+
+ def setup(self):
+ self.multiDL = False
+
+
+ def process(self, pyfile):
+ pyfile.url = re.sub(r'/view/?\?', '/?', pyfile.url)
+
+ self.url, result = checkHTMLHeader(pyfile.url)
+ self.logDebug("Location (%d): %s" % (result, self.url))
+
+ if result == 0:
+ self.html = self.load(self.url, decode=True)
+ self.checkCaptcha()
+ self.multiDL = True
+ self.check_data = self.getFileInfo()
+
+ if self.account:
+ self.handlePremium()
+ else:
+ self.handleFree()
+ elif result == 1:
+ self.offline()
+ else:
+ self.multiDL = True
+ self.download(self.url, disposition=True)
+
+
+ def handleFree(self):
+ passwords = self.getPassword().splitlines()
+ while self.PASSWORD_PATTERN in self.html:
+ if len(passwords):
+ password = passwords.pop(0)
+ self.logInfo(_("Password protected link, trying ") + password)
+ self.html = self.load(self.url, post={"downloadp": password})
+ else:
+ self.fail(_("No or incorrect password"))
+
+ m = re.search(r'kNO = r"(http://.*?)";', self.html)
+ if m is None:
+ self.error(_("No download URL"))
+
+ download_url = m.group(1)
+ self.download(download_url)
+
+
+ def checkCaptcha(self):
+ solvemedia = SolveMedia(self)
+ challenge, response = solvemedia.challenge()
+ self.html = self.load(self.url,
+ post={'adcopy_challenge': challenge,
+ 'adcopy_response' : response},
+ decode=True)
diff --git a/pyload/plugin/hoster/MegaCoNz.py b/pyload/plugin/hoster/MegaCoNz.py
new file mode 100644
index 000000000..b8f02278b
--- /dev/null
+++ b/pyload/plugin/hoster/MegaCoNz.py
@@ -0,0 +1,171 @@
+# -*- coding: utf-8 -*-
+
+import random
+import re
+
+from array import array
+from base64 import standard_b64decode
+from os import remove
+
+from Crypto.Cipher import AES
+from Crypto.Util import Counter
+from pycurl import SSL_CIPHER_LIST
+
+from pyload.utils import json_loads, json_dumps
+from pyload.plugin.Hoster import Hoster
+
+############################ General errors ###################################
+# EINTERNAL (-1): An internal error has occurred. Please submit a bug report, detailing the exact circumstances in which this error occurred
+# EARGS (-2): You have passed invalid arguments to this command
+# EAGAIN (-3): (always at the request level) A temporary congestion or server malfunction prevented your request from being processed. No data was altered. Retry. Retries must be spaced with exponential backoff
+# ERATELIMIT (-4): You have exceeded your command weight per time quota. Please wait a few seconds, then try again (this should never happen in sane real-life applications)
+#
+############################ Upload errors ####################################
+# EFAILED (-5): The upload failed. Please restart it from scratch
+# ETOOMANY (-6): Too many concurrent IP addresses are accessing this upload target URL
+# ERANGE (-7): The upload file packet is out of range or not starting and ending on a chunk boundary
+# EEXPIRED (-8): The upload target URL you are trying to access has expired. Please request a fresh one
+#
+############################ Stream/System errors #############################
+# ENOENT (-9): Object (typically, node or user) not found
+# ECIRCULAR (-10): Circular linkage attempted
+# EACCESS (-11): Access violation (e.g., trying to write to a read-only share)
+# EEXIST (-12): Trying to create an object that already exists
+# EINCOMPLETE (-13): Trying to access an incomplete resource
+# EKEY (-14): A decryption operation failed (never returned by the API)
+# ESID (-15): Invalid or expired user session, please relogin
+# EBLOCKED (-16): User blocked
+# EOVERQUOTA (-17): Request over quota
+# ETEMPUNAVAIL (-18): Resource temporarily not available, please try again later
+# ETOOMANYCONNECTIONS (-19): Too many connections on this resource
+# EWRITE (-20): Write failed
+# EREAD (-21): Read failed
+# EAPPKEY (-22): Invalid application key; request not processed
+
+
+class MegaCoNz(Hoster):
+ __name = "MegaCoNz"
+ __type = "hoster"
+ __version = "0.16"
+
+ __pattern = r'https?://(\w+\.)?mega\.co\.nz/#!([\w!-]+)'
+
+ __description = """Mega.co.nz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "ranan@pyload.org")]
+
+ API_URL = "https://g.api.mega.co.nz/cs"
+ FILE_SUFFIX = ".crypted"
+
+
+ def b64_decode(self, data):
+ data = data.replace("-", "+").replace("_", "/")
+ return standard_b64decode(data + '=' * (-len(data) % 4))
+
+
+ def getCipherKey(self, key):
+ """ Construct the cipher key from the given data """
+ a = array("I", key)
+ key_array = array("I", [a[0] ^ a[4], a[1] ^ a[5], a[2] ^ a[6], a[3] ^ a[7]])
+ return key_array
+
+
+ def callApi(self, **kwargs):
+ """ Dispatch a call to the api, see https://mega.co.nz/#developers """
+ # generate a session id, no idea where to obtain elsewhere
+ uid = random.randint(10 << 9, 10 ** 10)
+
+ res = self.load(self.API_URL, get={'id': uid}, post=json_dumps([kwargs]))
+ self.logDebug("Api Response: " + res)
+ return json_loads(res)
+
+
+ def decryptAttr(self, data, key):
+ cbc = AES.new(self.getCipherKey(key), AES.MODE_CBC, "\0" * 16)
+ attr = cbc.decrypt(self.b64_decode(data))
+ self.logDebug("Decrypted Attr: " + attr)
+ if not attr.startswith("MEGA"):
+ self.fail(_("Decryption failed"))
+
+ # Data is padded, 0-bytes must be stripped
+ return json_loads(re.search(r'{.+?}', attr).group(0))
+
+
+ def decryptFile(self, key):
+ """ Decrypts the file at lastDownload` """
+
+ # upper 64 bit of counter start
+ n = key[16:24]
+
+ # convert counter to long and shift bytes
+ ctr = Counter.new(128, initial_value=long(n.encode("hex"), 16) << 64)
+ cipher = AES.new(self.getCipherKey(key), AES.MODE_CTR, counter=ctr)
+
+ self.pyfile.setStatus("decrypting")
+
+ file_crypted = self.lastDownload
+ file_decrypted = file_crypted.rsplit(self.FILE_SUFFIX)[0]
+
+ try:
+ f = open(file_crypted, "rb")
+ df = open(file_decrypted, "wb")
+ except IOError, e:
+ self.fail(str(e))
+
+ # TODO: calculate CBC-MAC for checksum
+
+ size = 2 ** 15 # buffer size, 32k
+ while True:
+ buf = f.read(size)
+ if not buf:
+ break
+
+ df.write(cipher.decrypt(buf))
+
+ f.close()
+ df.close()
+ remove(file_crypted)
+
+ self.lastDownload = file_decrypted
+
+
+ def process(self, pyfile):
+ key = None
+
+ # match is guaranteed because plugin was chosen to handle url
+ node = re.match(self.__pattern, pyfile.url).group(2)
+ if "!" in node:
+ node, key = node.split("!")
+
+ self.logDebug("File id: %s | Key: %s" % (node, key))
+
+ if not key:
+ self.fail(_("No file key provided in the URL"))
+
+ # g is for requesting a download url
+ # this is similar to the calls in the mega js app, documentation is very bad
+ dl = self.callApi(a="g", g=1, p=node, ssl=1)[0]
+
+ if "e" in dl:
+ e = dl['e']
+ # ETEMPUNAVAIL (-18): Resource temporarily not available, please try again later
+ if e == -18:
+ self.retry()
+ else:
+ self.fail(_("Error code:") + e)
+
+ # TODO: map other error codes, e.g
+ # EACCESS (-11): Access violation (e.g., trying to write to a read-only share)
+
+ key = self.b64_decode(key)
+ attr = self.decryptAttr(dl['at'], key)
+
+ pyfile.name = attr['n'] + self.FILE_SUFFIX
+
+ self.req.http.c.setopt(SSL_CIPHER_LIST, "RC4-MD5:DEFAULT")
+
+ self.download(dl['g'])
+ self.decryptFile(key)
+
+ # Everything is finished and final name can be set
+ pyfile.name = attr['n']
diff --git a/pyload/plugin/hoster/MegaDebridEu.py b/pyload/plugin/hoster/MegaDebridEu.py
new file mode 100644
index 000000000..d17a8eeeb
--- /dev/null
+++ b/pyload/plugin/hoster/MegaDebridEu.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote_plus
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+
+
+class MegaDebridEu(Hoster):
+ __name = "MegaDebridEu"
+ __type = "hoster"
+ __version = "0.40"
+
+ __pattern = r'^https?://(?:w{3}\d+\.mega-debrid\.eu|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/download/file/[^/]+/.+$'
+
+ __description = """mega-debrid.eu hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("D.Ducatel", "dducatel@je-geek.fr")]
+
+
+ API_URL = "https://www.mega-debrid.eu/api.php"
+
+
+ def getFilename(self, url):
+ try:
+ return unquote_plus(url.rsplit("/", 1)[1])
+ except IndexError:
+ return ""
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.exitOnFail("Please enter your %s account or deactivate this plugin" % "Mega-debrid.eu")
+ else:
+ if not self.connectToApi():
+ self.exitOnFail("Unable to connect to Mega-debrid.eu")
+
+ self.logDebug("Old URL: %s" % pyfile.url)
+ new_url = self.debridLink(pyfile.url)
+ self.logDebug("New URL: " + new_url)
+
+ filename = self.getFilename(new_url)
+ if filename != "":
+ pyfile.name = filename
+ self.download(new_url, disposition=True)
+
+
+ def connectToApi(self):
+ """
+ Connexion to the mega-debrid API
+ Return True if succeed
+ """
+ user, data = self.account.selectAccount()
+ jsonResponse = self.load(self.API_URL,
+ get={'action': 'connectUser', 'login': user, 'password': data['password']})
+ res = json_loads(jsonResponse)
+
+ if res['response_code'] == "ok":
+ self.token = res['token']
+ return True
+ else:
+ return False
+
+
+ def debridLink(self, linkToDebrid):
+ """
+ Debrid a link
+ Return The debrided link if succeed or original link if fail
+ """
+ jsonResponse = self.load(self.API_URL, get={'action': 'getLink', 'token': self.token},
+ post={"link": linkToDebrid})
+ res = json_loads(jsonResponse)
+
+ if res['response_code'] == "ok":
+ debridedLink = res['debridLink'][1:-1]
+ return debridedLink
+ else:
+ self.exitOnFail("Unable to debrid %s" % linkToDebrid)
+
+
+ def exitOnFail(self, msg):
+ """
+ exit the plugin on fail case
+ And display the reason of this failure
+ """
+ if self.getConfig("unloadFailing"):
+ self.logError(_(msg))
+ self.resetAccount()
+ else:
+ self.fail(_(msg))
diff --git a/pyload/plugin/hoster/MegaFilesSe.py b/pyload/plugin/hoster/MegaFilesSe.py
new file mode 100644
index 000000000..6b6e5c0b1
--- /dev/null
+++ b/pyload/plugin/hoster/MegaFilesSe.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class MegaFilesSe(DeadHoster):
+ __name = "MegaFilesSe"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?megafiles\.se/\w{12}'
+
+ __description = """MegaFiles.se hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
+
+
+getInfo = create_getInfo(MegaFilesSe)
diff --git a/pyload/plugin/hoster/MegaRapidCz.py b/pyload/plugin/hoster/MegaRapidCz.py
new file mode 100644
index 000000000..380032724
--- /dev/null
+++ b/pyload/plugin/hoster/MegaRapidCz.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pycurl import HTTPHEADER
+
+from pyload.network.RequestFactory import getRequest
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, parseFileInfo
+
+
+def getInfo(urls):
+ h = getRequest()
+ h.c.setopt(HTTPHEADER,
+ ["Accept: text/html",
+ "User-Agent: Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"])
+
+ for url in urls:
+ html = h.load(url, decode=True)
+ yield parseFileInfo(MegaRapidCz, url, html)
+
+
+class MegaRapidCz(SimpleHoster):
+ __name = "MegaRapidCz"
+ __type = "hoster"
+ __version = "0.54"
+
+ __pattern = r'http://(?:www\.)?(share|mega)rapid\.cz/soubor/\d+/.+'
+
+ __description = """MegaRapid.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("MikyWoW", "mikywow@seznam.cz"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'<h1[^>]*><span[^>]*>(?:<a[^>]*>)?(?P<N>[^<]+)'
+ SIZE_PATTERN = r'<td class="i">Velikost:</td>\s*<td class="h"><strong>\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+)</strong></td>'
+ OFFLINE_PATTERN = ur'Nastala chyba 404|Soubor byl smazán'
+
+ FORCE_CHECK_TRAFFIC = True
+
+ LINK_PATTERN = r'<a href="([^"]+)" title="Stahnout">([^<]+)</a>'
+ ERR_LOGIN_PATTERN = ur'<div class="error_div"><strong>Stahování je přístupné pouze přihlášenÃœm uÅŸivatelům'
+ ERR_CREDIT_PATTERN = ur'<div class="error_div"><strong>Stahování zdarma je moÅŸné jen přes náš'
+
+
+ def setup(self):
+ self.chunkLimit = 1
+
+
+ def handlePremium(self):
+ try:
+ self.html = self.load(self.pyfile.url, decode=True)
+ except BadHeader, e:
+ self.account.relogin(self.user)
+ self.retry(wait_time=60, reason=str(e))
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m:
+ link = m.group(1)
+ self.logDebug("Premium link: %s" % link)
+ self.download(link, disposition=True)
+ else:
+ if re.search(self.ERR_LOGIN_PATTERN, self.html):
+ self.relogin(self.user)
+ self.retry(wait_time=60, reason=_("User login failed"))
+ elif re.search(self.ERR_CREDIT_PATTERN, self.html):
+ self.fail(_("Not enough credit left"))
+ else:
+ self.fail(_("Download link not found"))
diff --git a/pyload/plugin/hoster/MegacrypterCom.py b/pyload/plugin/hoster/MegacrypterCom.py
new file mode 100644
index 000000000..5bf0d0864
--- /dev/null
+++ b/pyload/plugin/hoster/MegacrypterCom.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.utils import json_loads, json_dumps
+
+from pyload.plugin.hoster.MegaCoNz import MegaCoNz
+
+
+class MegacrypterCom(MegaCoNz):
+ __name = "MegacrypterCom"
+ __type = "hoster"
+ __version = "0.21"
+
+ __pattern = r'(https?://\w{0,10}\.?megacrypter\.com/[\w!-]+)'
+
+ __description = """Megacrypter.com decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("GonzaloSR", "gonzalo@gonzalosr.com")]
+
+
+ API_URL = "http://megacrypter.com/api"
+ FILE_SUFFIX = ".crypted"
+
+
+ def callApi(self, **kwargs):
+ """ Dispatch a call to the api, see megacrypter.com/api_doc """
+ self.logDebug("JSON request: " + json_dumps(kwargs))
+ res = self.load(self.API_URL, post=json_dumps(kwargs))
+ self.logDebug("API Response: " + res)
+ return json_loads(res)
+
+
+ def process(self, pyfile):
+ # match is guaranteed because plugin was chosen to handle url
+ node = re.match(self.__pattern, pyfile.url).group(1)
+
+ # get Mega.co.nz link info
+ info = self.callApi(link=node, m="info")
+
+ # get crypted file URL
+ dl = self.callApi(link=node, m="dl")
+
+ # TODO: map error codes, implement password protection
+ # if info['pass'] is True:
+ # crypted_file_key, md5_file_key = info['key'].split("#")
+
+ key = self.b64_decode(info['key'])
+
+ pyfile.name = info['name'] + self.FILE_SUFFIX
+
+ self.download(dl['url'])
+ self.decryptFile(key)
+
+ # Everything is finished and final name can be set
+ pyfile.name = info['name']
diff --git a/pyload/plugin/hoster/MegareleaseOrg.py b/pyload/plugin/hoster/MegareleaseOrg.py
new file mode 100644
index 000000000..de388ef9d
--- /dev/null
+++ b/pyload/plugin/hoster/MegareleaseOrg.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class MegareleaseOrg(DeadHoster):
+ __name = "MegareleaseOrg"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'https?://(?:www\.)?megarelease\.org/\w{12}'
+
+ __description = """Megarelease.org hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("derek3x", "derek3x@vmail.me"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(MegareleaseOrg)
diff --git a/pyload/plugin/hoster/MegasharesCom.py b/pyload/plugin/hoster/MegasharesCom.py
new file mode 100644
index 000000000..235a2e4d4
--- /dev/null
+++ b/pyload/plugin/hoster/MegasharesCom.py
@@ -0,0 +1,113 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import time
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class MegasharesCom(SimpleHoster):
+ __name = "MegasharesCom"
+ __type = "hoster"
+ __version = "0.27"
+
+ __pattern = r'http://(?:www\.)?(d\d{2}\.)?megashares\.com/((index\.php)?\?d\d{2}=|dl/)\w+'
+
+ __description = """Megashares.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'<h1 class="black xxl"[^>]*title="(?P<N>[^"]+)">'
+ SIZE_PATTERN = r'<strong><span class="black">Filesize:</span></strong> (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+ OFFLINE_PATTERN = r'<dd class="red">(Invalid Link Request|Link has been deleted|Invalid link)'
+
+ LINK_PATTERN = r'<div id="show_download_button_%d"[^>]*>\s*<a href="([^"]+)">'
+
+ PASSPORT_LEFT_PATTERN = r'Your Download Passport is: <[^>]*>(\w+).*?You have.*?<[^>]*>.*?([\d.]+) (\w+)'
+ PASSPORT_RENEW_PATTERN = r'(\d+):<strong>(\d+)</strong>:<strong>(\d+)</strong>'
+ REACTIVATE_NUM_PATTERN = r'<input[^>]*id="random_num" value="(\d+)" />'
+ REACTIVATE_PASSPORT_PATTERN = r'<input[^>]*id="passport_num" value="(\w+)" />'
+ REQUEST_URI_PATTERN = r'var request_uri = "([^"]+)";'
+ NO_SLOTS_PATTERN = r'<dd class="red">All download slots for this link are currently filled'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = self.premium
+
+
+ def handlePremium(self):
+ self.handleDownload(True)
+
+
+ def handleFree(self):
+ if self.NO_SLOTS_PATTERN in self.html:
+ self.retry(wait_time=5 * 60)
+
+ m = re.search(self.REACTIVATE_PASSPORT_PATTERN, self.html)
+ if m:
+ passport_num = m.group(1)
+ request_uri = re.search(self.REQUEST_URI_PATTERN, self.html).group(1)
+
+ for _i in xrange(5):
+ random_num = re.search(self.REACTIVATE_NUM_PATTERN, self.html).group(1)
+
+ verifyinput = self.decryptCaptcha("http://d01.megashares.com/index.php",
+ get={'secgfx': "gfx", 'random_num': random_num})
+
+ self.logInfo(_("Reactivating passport %s: %s %s") % (passport_num, random_num, verifyinput))
+
+ res = self.load("http://d01.megashares.com%s" % request_uri,
+ get={'rs' : "check_passport_renewal",
+ 'rsargs[]': verifyinput,
+ 'rsargs[]': random_num,
+ 'rsargs[]': passport_num,
+ 'rsargs[]': "replace_sec_pprenewal",
+ 'rsrnd[]' : str(int(time() * 1000))})
+
+ if 'Thank you for reactivating your passport.' in res:
+ self.correctCaptcha()
+ self.retry()
+ else:
+ self.invalidCaptcha()
+ else:
+ self.fail(_("Failed to reactivate passport"))
+
+ m = re.search(self.PASSPORT_RENEW_PATTERN, self.html)
+ if m:
+ time = [int(x) for x in m.groups()]
+ renew = time[0] + (time[1] * 60) + (time[2] * 60)
+ self.logDebug("Waiting %d seconds for a new passport" % renew)
+ self.retry(wait_time=renew, reason=_("Passport renewal"))
+
+ # Check traffic left on passport
+ m = re.search(self.PASSPORT_LEFT_PATTERN, self.html, re.M | re.S)
+ if m is None:
+ self.fail(_("Passport not found"))
+
+ self.logInfo(_("Download passport: %s") % m.group(1))
+ data_left = float(m.group(2)) * 1024 ** {'B': 0, 'KB': 1, 'MB': 2, 'GB': 3}[m.group(3)]
+ self.logInfo(_("Data left: %s %s (%d MB needed)") % (m.group(2), m.group(3), self.pyfile.size / 1048576))
+
+ if not data_left:
+ self.retry(wait_time=600, reason=_("Passport renewal"))
+
+ self.handleDownload(False)
+
+
+ def handleDownload(self, premium=False):
+ # Find download link;
+ m = re.search(self.LINK_PATTERN % (1 if premium else 2), self.html)
+ msg = _('%s download URL' % ('Premium' if premium else 'Free'))
+ if m is None:
+ self.error(msg)
+
+ download_url = m.group(1)
+ self.logDebug("%s: %s" % (msg, download_url))
+ self.download(download_url)
+
+
+getInfo = create_getInfo(MegasharesCom)
diff --git a/pyload/plugin/hoster/MegauploadCom.py b/pyload/plugin/hoster/MegauploadCom.py
new file mode 100644
index 000000000..468a7389d
--- /dev/null
+++ b/pyload/plugin/hoster/MegauploadCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class MegauploadCom(DeadHoster):
+ __name = "MegauploadCom"
+ __type = "hoster"
+ __version = "0.31"
+
+ __pattern = r'http://(?:www\.)?megaupload\.com/\?.*&?(d|v)=\w+'
+
+ __description = """Megaupload.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org")]
+
+
+getInfo = create_getInfo(MegauploadCom)
diff --git a/pyload/plugin/hoster/MegavideoCom.py b/pyload/plugin/hoster/MegavideoCom.py
new file mode 100644
index 000000000..2d1a17daf
--- /dev/null
+++ b/pyload/plugin/hoster/MegavideoCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class MegavideoCom(DeadHoster):
+ __name = "MegavideoCom"
+ __type = "hoster"
+ __version = "0.21"
+
+ __pattern = r'http://(?:www\.)?megavideo\.com/\?.*&?(d|v)=\w+'
+
+ __description = """Megavideo.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("mkaay", "mkaay@mkaay.de")]
+
+
+getInfo = create_getInfo(MegavideoCom)
diff --git a/pyload/plugin/hoster/MovReelCom.py b/pyload/plugin/hoster/MovReelCom.py
new file mode 100644
index 000000000..90c3da2c8
--- /dev/null
+++ b/pyload/plugin/hoster/MovReelCom.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class MovReelCom(XFSHoster):
+ __name = "MovReelCom"
+ __type = "hoster"
+ __version = "1.24"
+
+ __pattern = r'http://(?:www\.)?movreel\.com/\w{12}'
+
+ __description = """MovReel.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("JorisV83", "jorisv83-pyload@yahoo.com")]
+
+
+ HOSTER_DOMAIN = "movreel.com"
+
+ NAME_PATTERN = r'Filename: <b>(?P<N>.+?)<'
+ SIZE_PATTERN = r'Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ LINK_PATTERN = r'<a href="([^"]+)">Download Link'
+
+
+getInfo = create_getInfo(MovReelCom)
diff --git a/pyload/plugin/hoster/MultishareCz.py b/pyload/plugin/hoster/MultishareCz.py
new file mode 100644
index 000000000..a66043fc0
--- /dev/null
+++ b/pyload/plugin/hoster/MultishareCz.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import random
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class MultishareCz(SimpleHoster):
+ __name = "MultishareCz"
+ __type = "hoster"
+ __version = "0.35"
+
+ __pattern = r'http://(?:www\.)?multishare\.cz/stahnout/(?P<ID>\d+).*'
+
+ __description = """MultiShare.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ SIZE_REPLACEMENTS = [('&nbsp;', '')]
+
+ MULTI_HOSTER = True
+
+ INFO_PATTERN = ur'(?:<li>Název|Soubor): <strong>(?P<N>[^<]+)</strong><(?:/li><li|br)>Velikost: <strong>(?P<S>[^<]+)</strong>'
+ OFFLINE_PATTERN = ur'<h1>Stáhnout soubor</h1><p><strong>PoşadovanÜ soubor neexistuje.</strong></p>'
+
+
+ def process(self, pyfile):
+ msurl = re.match(self.__pattern, pyfile.url)
+ if msurl:
+ self.fileID = msurl.group('ID')
+ self.html = self.load(pyfile.url, decode=True)
+ self.getFileInfo()
+
+ if self.premium:
+ self.handlePremium()
+ else:
+ self.handleFree()
+ else:
+ self.handleOverriden()
+
+
+ def handleFree(self):
+ self.download("http://www.multishare.cz/html/download_free.php?ID=%s" % self.fileID)
+
+
+ def handlePremium(self):
+ if not self.checkCredit():
+ self.logWarning(_("Not enough credit left to download file"))
+ self.resetAccount()
+
+ self.download("http://www.multishare.cz/html/download_premium.php?ID=%s" % self.fileID)
+
+
+ def handleOverriden(self):
+ if not self.premium:
+ self.fail(_("Only premium users can download from other hosters"))
+
+ self.html = self.load('http://www.multishare.cz/html/mms_ajax.php', post={"link": self.pyfile.url}, decode=True)
+ self.getFileInfo()
+
+ if not self.checkCredit():
+ self.fail(_("Not enough credit left to download file"))
+
+ url = "http://dl%d.mms.multishare.cz/html/mms_process.php" % round(random() * 10000 * random())
+ params = {"u_ID": self.acc_info['u_ID'], "u_hash": self.acc_info['u_hash'], "link": self.pyfile.url}
+ self.logDebug(url, params)
+ self.download(url, get=params)
+
+
+ def checkCredit(self):
+ self.acc_info = self.account.getAccountInfo(self.user, True)
+ self.logInfo(_("User %s has %i MB left") % (self.user, self.acc_info['trafficleft'] / 1024))
+
+ return self.pyfile.size / 1024 <= self.acc_info['trafficleft']
+
+
+getInfo = create_getInfo(MultishareCz)
diff --git a/pyload/plugin/hoster/MyfastfileCom.py b/pyload/plugin/hoster/MyfastfileCom.py
new file mode 100644
index 000000000..528a4c2c8
--- /dev/null
+++ b/pyload/plugin/hoster/MyfastfileCom.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import json_loads
+
+
+class MyfastfileCom(Hoster):
+ __name = "MyfastfileCom"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'http://(?:www\.)?\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/dl/'
+
+ __description = """Myfastfile.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+
+ def setup(self):
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "Myfastfile.com")
+ self.fail(_("No Myfastfile.com account provided"))
+ else:
+ self.logDebug("Original URL: %s" % pyfile.url)
+ page = self.load('http://myfastfile.com/api.php',
+ get={'user': self.user, 'pass': self.account.getAccountData(self.user)['password'],
+ 'link': pyfile.url})
+ self.logDebug("JSON data: " + page)
+ page = json_loads(page)
+ if page['status'] != 'ok':
+ self.fail(_("Unable to unrestrict link"))
+ new_url = page['link']
+
+ if new_url != pyfile.url:
+ self.logDebug("Unrestricted URL: " + new_url)
+
+ self.download(new_url, disposition=True)
diff --git a/pyload/plugin/hoster/MyvideoDe.py b/pyload/plugin/hoster/MyvideoDe.py
new file mode 100644
index 000000000..08652cdc9
--- /dev/null
+++ b/pyload/plugin/hoster/MyvideoDe.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import html_unescape
+
+
+class MyvideoDe(Hoster):
+ __name = "MyvideoDe"
+ __type = "hoster"
+ __version = "0.90"
+
+ __pattern = r'http://(?:www\.)?myvideo\.de/watch/'
+
+ __description = """Myvideo.de hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org")]
+
+
+ def process(self, pyfile):
+ self.pyfile = pyfile
+ self.download_html()
+ pyfile.name = self.get_file_name()
+ self.download(self.get_file_url())
+
+
+ def download_html(self):
+ self.html = self.load(self.pyfile.url)
+
+
+ def get_file_url(self):
+ videoId = re.search(r"addVariable\('_videoid','(.*)'\);p.addParam\('quality'", self.html).group(1)
+ videoServer = re.search("rel='image_src' href='(.*)thumbs/.*' />", self.html).group(1)
+ file_url = videoServer + videoId + ".flv"
+ return file_url
+
+
+ def get_file_name(self):
+ file_name_pattern = r"<h1 class='globalHd'>(.*)</h1>"
+ return html_unescape(re.search(file_name_pattern, self.html).group(1).replace("/", "") + '.flv')
+
+
+ def file_exists(self):
+ self.download_html()
+ self.load(str(self.pyfile.url), cookies=False, just_header=True)
+ if self.req.lastEffectiveURL == "http://www.myvideo.de/":
+ return False
+ return True
diff --git a/pyload/plugin/hoster/NahrajCz.py b/pyload/plugin/hoster/NahrajCz.py
new file mode 100644
index 000000000..6e187e268
--- /dev/null
+++ b/pyload/plugin/hoster/NahrajCz.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class NahrajCz(DeadHoster):
+ __name = "NahrajCz"
+ __type = "hoster"
+ __version = "0.21"
+
+ __pattern = r'http://(?:www\.)?nahraj\.cz/content/download/.+'
+
+ __description = """Nahraj.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(NahrajCz)
diff --git a/pyload/plugin/hoster/NarodRu.py b/pyload/plugin/hoster/NarodRu.py
new file mode 100644
index 000000000..9d5b1f4e6
--- /dev/null
+++ b/pyload/plugin/hoster/NarodRu.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import random
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class NarodRu(SimpleHoster):
+ __name = "NarodRu"
+ __type = "hoster"
+ __version = "0.11"
+
+ __pattern = r'http://(?:www\.)?narod(\.yandex)?\.ru/(disk|start/\d+\.\w+-narod\.yandex\.ru)/(?P<ID>\d+)/.+'
+
+ __description = """Narod.ru hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<dt class="name">(?:<[^<]*>)*(?P<N>[^<]+)</dt>'
+ SIZE_PATTERN = r'<dd class="size">(?P<S>\d[^<]*)</dd>'
+ OFFLINE_PATTERN = r'<title>404</title>|Ѐайл уЎалеМ с сервОса|ЗакПМчОлся срПк храМеМОя файла\.'
+
+ SIZE_REPLACEMENTS = [(u'КБ', 'KB'), (u'МБ', 'MB'), (u'ГБ', 'GB')]
+ URL_REPLACEMENTS = [("narod.yandex.ru/", "narod.ru/"),
+ (r"/start/\d+\.\w+-narod\.yandex\.ru/(\d{6,15})/\w+/(\w+)", r"/disk/\1/\2")]
+
+ CAPTCHA_PATTERN = r'<number url="(.*?)">(\w+)</number>'
+ LINK_PATTERN = r'<a class="h-link" rel="yandex_bar" href="(.+?)">'
+
+
+ def handleFree(self):
+ for _i in xrange(5):
+ self.html = self.load('http://narod.ru/disk/getcapchaxml/?rnd=%d' % int(random() * 777))
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m is None:
+ self.error(_("Captcha"))
+ post_data = {"action": "sendcapcha"}
+ captcha_url, post_data['key'] = m.groups()
+ post_data['rep'] = self.decryptCaptcha(captcha_url)
+
+ self.html = self.load(self.pyfile.url, post=post_data, decode=True)
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m:
+ url = 'http://narod.ru' + m.group(1)
+ self.correctCaptcha()
+ break
+ elif u'<b class="error-msg"><strong>ОшОблОсь?</strong>' in self.html:
+ self.invalidCaptcha()
+ else:
+ self.error(_("Download link"))
+ else:
+ self.fail(_("No valid captcha code entered"))
+
+ self.download(url)
+
+
+getInfo = create_getInfo(NarodRu)
diff --git a/pyload/plugin/hoster/NetloadIn.py b/pyload/plugin/hoster/NetloadIn.py
new file mode 100644
index 000000000..77ea27bcc
--- /dev/null
+++ b/pyload/plugin/hoster/NetloadIn.py
@@ -0,0 +1,294 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+from time import sleep, time
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.Plugin import chunks
+from pyload.plugin.captcha import ReCaptcha
+
+
+def getInfo(urls):
+ ## returns list of tupels (name, size (in bytes), status (see database.File), url)
+
+ apiurl = "http://api.netload.in/info.php"
+ id_regex = re.compile(NetloadIn.__pattern)
+ urls_per_query = 80
+
+ for chunk in chunks(urls, urls_per_query):
+ ids = ""
+ for url in chunk:
+ match = id_regex.search(url)
+ if match:
+ ids = ids + match.group(1) + ";"
+
+ api = getURL(apiurl,
+ get={'auth' : "Zf9SnQh9WiReEsb18akjvQGqT0I830e8",
+ 'bz' : 1,
+ 'md5' : 1,
+ 'file_id': ids},
+ decode=True)
+
+ if api is None or len(api) < 10:
+ self.logDebug("Prefetch failed")
+ return
+
+ if api.find("unknown_auth") >= 0:
+ self.logDebug("Outdated auth code")
+ return
+
+ result = []
+
+ for i, r in enumerate(api.splitlines()):
+ try:
+ tmp = r.split(";")
+
+ try:
+ size = int(tmp[2])
+ except Exception:
+ size = 0
+
+ result.append((tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i] ))
+
+ except Exception:
+ self.logDebug("Error while processing response: %s" % r)
+
+ yield result
+
+
+class NetloadIn(Hoster):
+ __name = "NetloadIn"
+ __type = "hoster"
+ __version = "0.47"
+
+ __pattern = r'https?://(?:[^/]*\.)?netload\.in/(?:datei(.*?)(?:\.htm|/)|index\.php?id=10&file_id=)'
+
+ __description = """Netload.in hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org"),
+ ("RaNaN", "ranan@pyload.org"),
+ ("Gregy", "gregy@gregy.cz")]
+
+
+ def setup(self):
+ self.multiDL = self.resumeDownload = self.premium
+
+
+ def process(self, pyfile):
+ self.url = pyfile.url
+
+ self.prepare()
+
+ pyfile.setStatus("downloading")
+
+ self.proceed(self.url)
+
+
+ def prepare(self):
+ self.download_api_data()
+
+ if self.api_data and self.api_data['filename']:
+ self.pyfile.name = self.api_data['filename']
+
+ if self.premium:
+ self.logDebug("Use Premium Account")
+
+ settings = self.load("http://www.netload.in/index.php", get={'id': 2, 'lang': "en"})
+
+ if '<option value="2" selected="selected">Direkter Download' in settings:
+ self.logDebug("Using direct download")
+ return True
+ else:
+ self.logDebug("Direct downloads not enabled. Parsing html for a download URL")
+
+ if self.download_html():
+ return True
+ else:
+ self.fail(_("Failed"))
+ return False
+
+
+ def download_api_data(self, n=0):
+ url = self.url
+ id_regex = re.compile(self.__pattern)
+ match = id_regex.search(url)
+
+ if match:
+ #normalize url
+ self.url = 'http://www.netload.in/datei%s.htm' % match.group(1)
+ self.logDebug("URL: %s" % self.url)
+ else:
+ self.api_data = False
+ return
+
+ apiurl = "http://api.netload.in/info.php"
+ html = self.load(apiurl, cookies=False,
+ get={"file_id": match.group(1), "auth": "Zf9SnQh9WiReEsb18akjvQGqT0I830e8", "bz": "1",
+ "md5": "1"}, decode=True).strip()
+ if not html and n <= 3:
+ sleep(0.2)
+ self.download_api_data(n + 1)
+ return
+
+ self.logDebug("APIDATA: " + html)
+
+ self.api_data = {}
+
+ if html and ";" in html and html not in ("unknown file_data", "unknown_server_data", "No input file specified."):
+ lines = html.split(";")
+ self.api_data['exists'] = True
+ self.api_data['fileid'] = lines[0]
+ self.api_data['filename'] = lines[1]
+ self.api_data['size'] = lines[2]
+ self.api_data['status'] = lines[3]
+
+ if self.api_data['status'] == "online":
+ self.api_data['checksum'] = lines[4].strip()
+ else:
+ self.api_data = False # check manually since api data is useless sometimes
+
+ if lines[0] == lines[1] and lines[2] == "0": # useless api data
+ self.api_data = False
+ else:
+ self.api_data = False
+
+
+ def final_wait(self, page):
+ wait_time = self.get_wait_time(page)
+
+ self.setWait(wait_time)
+
+ self.logDebug("Final wait %d seconds" % wait_time)
+
+ self.wait()
+
+ self.url = self.get_file_url(page)
+
+
+ def check_free_wait(self,page):
+ if ">An access request has been made from IP address <" in page:
+ self.wantReconnect = True
+ self.setWait(self.get_wait_time(page) or 30)
+ self.wait()
+ return True
+ else:
+ return False
+
+
+ def download_html(self):
+ page = self.load(self.url, decode=True)
+
+ if "/share/templates/download_hddcrash.tpl" in page:
+ self.logError(_("Netload HDD Crash"))
+ self.fail(_("File temporarily not available"))
+
+ if not self.api_data:
+ self.logDebug("API Data may be useless, get details from html page")
+
+ if "* The file was deleted" in page:
+ self.offline()
+
+ name = re.search(r'class="dl_first_filename">([^<]+)', page, re.M)
+ # the found filename is not truncated
+ if name:
+ name = name.group(1).strip()
+ if not name.endswith(".."):
+ self.pyfile.name = name
+
+ captchawaited = False
+
+ for i in xrange(5):
+ if not page:
+ page = self.load(self.url)
+ t = time() + 30
+
+ if "/share/templates/download_hddcrash.tpl" in page:
+ self.logError(_("Netload HDD Crash"))
+ self.fail(_("File temporarily not available"))
+
+ self.logDebug("Try number %d " % i)
+
+ if ">Your download is being prepared.<" in page:
+ self.logDebug("We will prepare your download")
+ self.final_wait(page)
+ return True
+
+ self.logDebug("Trying to find captcha")
+
+ try:
+ url_captcha_html = re.search(r'(index.php\?id=10&amp;.*&amp;captcha=1)', page).group(1).replace("amp;", "")
+
+ except Exception, e:
+ self.logDebug("Exception during Captcha regex: %s" % e.message)
+ page = None
+
+ else:
+ url_captcha_html = urljoin("http://netload.in/", url_captcha_html)
+ break
+
+ self.html = self.load(url_captcha_html)
+
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge()
+
+ response_page = self.load("http://www.netload.in/index.php?id=10",
+ post={'captcha_check' : '1',
+ 'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response,
+ 'file_id' : self.api_data['fileid'],
+ 'Download_Next' : ''})
+ if "Orange_Link" in response_page:
+ break
+
+ if self.check_free_wait(response_page):
+ self.logDebug("Had to wait for next free slot, trying again")
+ return self.download_html()
+
+ else:
+ download_url = self.get_file_url(response_page)
+ self.logDebug("Download URL after get_file: " + download_url)
+ if not download_url.startswith("http://"):
+ self.error("download url: %s" % download_url)
+ self.wait()
+
+ self.url = download_url
+ return True
+
+
+ def get_file_url(self, page):
+ try:
+ file_url_pattern = r'<a class="Orange_Link" href="(http://.+)".?>Or click here'
+ attempt = re.search(file_url_pattern, page)
+ if attempt is not None:
+ return attempt.group(1)
+ else:
+ self.logDebug("Backup try for final link")
+ file_url_pattern = r'<a href="(.+)" class="Orange_Link">Click here'
+ attempt = re.search(file_url_pattern, page)
+ return "http://netload.in/" + attempt.group(1)
+
+ except Exception, e:
+ self.logDebug("Getting final link failed", e.message)
+ return None
+
+
+ def get_wait_time(self, page):
+ return int(re.search(r"countdown\((.+),'change\(\)'\)", page).group(1)) / 100
+
+
+ def proceed(self, url):
+ self.download(url, disposition=True)
+
+ check = self.checkDownload({'empty' : re.compile(r'^$'),
+ 'offline': re.compile("The file was deleted")})
+ if check == "empty":
+ self.logInfo(_("Downloaded File was empty"))
+ self.retry()
+
+ elif check == "offline":
+ self.offline()
diff --git a/pyload/plugin/hoster/NosuploadCom.py b/pyload/plugin/hoster/NosuploadCom.py
new file mode 100644
index 000000000..02ceb65f2
--- /dev/null
+++ b/pyload/plugin/hoster/NosuploadCom.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class NosuploadCom(XFSHoster):
+ __name = "NosuploadCom"
+ __type = "hoster"
+ __version = "0.31"
+
+ __pattern = r'http://(?:www\.)?nosupload\.com/\?d=\w{12}'
+
+ __description = """Nosupload.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("igel", "igelkun@myopera.com")]
+
+
+ HOSTER_DOMAIN = "nosupload.com"
+
+ SIZE_PATTERN = r'<p><strong>Size:</strong> (?P<S>[\d.,]+) (?P<U>[\w^_]+)</p>'
+ LINK_PATTERN = r'<a class="select" href="(http://.+?)">Download</a>'
+ WAIT_PATTERN = r'Please wait.*?>(\d+)</span>'
+
+
+ def getDownloadLink(self):
+ # stage1: press the "Free Download" button
+ data = self.getPostParameters()
+ self.html = self.load(self.pyfile.url, post=data, ref=True, decode=True)
+
+ # stage2: wait some time and press the "Download File" button
+ data = self.getPostParameters()
+ wait_time = re.search(self.WAIT_PATTERN, self.html, re.M | re.S).group(1)
+ self.logDebug("Hoster told us to wait %s seconds" % wait_time)
+ self.wait(wait_time)
+ self.html = self.load(self.pyfile.url, post=data, ref=True, decode=True)
+
+ # stage3: get the download link
+ return re.search(self.LINK_PATTERN, self.html, re.S).group(1)
+
+
+getInfo = create_getInfo(NosuploadCom)
diff --git a/pyload/plugin/hoster/NovafileCom.py b/pyload/plugin/hoster/NovafileCom.py
new file mode 100644
index 000000000..ed39fa87e
--- /dev/null
+++ b/pyload/plugin/hoster/NovafileCom.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://novafile.com/vfun4z6o2cit
+# http://novafile.com/s6zrr5wemuz4
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class NovafileCom(XFSHoster):
+ __name = "NovafileCom"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?novafile\.com/\w{12}'
+
+ __description = """Novafile.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ HOSTER_DOMAIN = "novafile.com"
+
+ SIZE_PATTERN = r'<div class="size">(?P<S>.+?)</div>'
+ ERROR_PATTERN = r'class="alert[^"]*alert-separate"[^>]*>\s*(?:<p>)?(.*?)\s*</'
+ LINK_PATTERN = r'<a href="(http://s\d+\.novafile\.com/.*?)" class="btn btn-green">Download File</a>'
+ WAIT_PATTERN = r'<p>Please wait <span id="count"[^>]*>(\d+)</span> seconds</p>'
+
+
+getInfo = create_getInfo(NovafileCom)
diff --git a/pyload/plugin/hoster/NowDownloadSx.py b/pyload/plugin/hoster/NowDownloadSx.py
new file mode 100644
index 000000000..f9ba7e681
--- /dev/null
+++ b/pyload/plugin/hoster/NowDownloadSx.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+from pyload.utils import fixup
+
+
+class NowDownloadSx(SimpleHoster):
+ __name = "NowDownloadSx"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?nowdownload\.(at|ch|co|eu|sx)/(dl/|download\.php\?id=)\w+'
+
+ __description = """NowDownload.sx hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("godofdream", "soilfiction@gmail.com"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ INFO_PATTERN = r'Downloading</span> <br> (?P<N>.*) (?P<S>[\d.,]+) (?P<U>[\w^_]+) </h4>'
+ OFFLINE_PATTERN = r'>This file does not exist'
+
+ TOKEN_PATTERN = r'"(/api/token\.php\?token=\w+)"'
+ CONTINUE_PATTERN = r'"(/dl2/\w+/\w+)"'
+ WAIT_PATTERN = r'\.countdown\(\{until: \+(\d+),'
+ LINK_PATTERN = r'(http://s\d+\.coolcdn\.info/nowdownload/.+?)["\']'
+
+ NAME_REPLACEMENTS = [("&#?\w+;", fixup), (r'<[^>]*>', '')]
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = -1
+
+
+ def handleFree(self):
+ tokenlink = re.search(self.TOKEN_PATTERN, self.html)
+ continuelink = re.search(self.CONTINUE_PATTERN, self.html)
+ if tokenlink is None or continuelink is None:
+ self.error()
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ wait = int(m.group(1))
+ else:
+ wait = 60
+
+ baseurl = "http://www.nowdownload.at"
+ self.html = self.load(baseurl + str(tokenlink.group(1)))
+ self.wait(wait)
+
+ self.html = self.load(baseurl + str(continuelink.group(1)))
+
+ url = re.search(self.LINK_PATTERN, self.html)
+ if url is None:
+ self.error(_("Download link not found"))
+
+ self.download(str(url.group(1)))
+
+
+getInfo = create_getInfo(NowDownloadSx)
diff --git a/pyload/plugin/hoster/NowVideoSx.py b/pyload/plugin/hoster/NowVideoSx.py
new file mode 100644
index 000000000..5f3d1954c
--- /dev/null
+++ b/pyload/plugin/hoster/NowVideoSx.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class NowVideoSx(SimpleHoster):
+ __name = "NowVideoSx"
+ __type = "hoster"
+ __version = "0.07"
+
+ __pattern = r'http://(?:www\.)?nowvideo\.(at|ch|co|eu|sx)/(video|mobile/#/videos)/(?P<ID>\w+)'
+
+ __description = """NowVideo.sx hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ URL_REPLACEMENTS = [(__pattern + ".*", r'http://www.nowvideo.at/video/\g<ID>')]
+
+ NAME_PATTERN = r'<h4>(?P<N>.+?)<'
+ OFFLINE_PATTERN = r'>This file no longer exists'
+
+ LINK_FREE_PATTERN = r'<source src="(.+?)"'
+ LINK_PREMIUM_PATTERN = r'<div id="content_player" >\s*<a href="(.+?)"'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def handleFree(self):
+ self.html = self.load("http://www.nowvideo.at/mobile/video.php", get={'id': self.info['pattern']['ID']})
+
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
+ if m is None:
+ self.error(_("Free download link not found"))
+
+ self.download(m.group(1))
+
+
+getInfo = create_getInfo(NowVideoSx)
diff --git a/pyload/plugin/hoster/OboomCom.py b/pyload/plugin/hoster/OboomCom.py
new file mode 100644
index 000000000..7467dcaf5
--- /dev/null
+++ b/pyload/plugin/hoster/OboomCom.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# https://www.oboom.com/B7CYZIEB/10Mio.dat
+
+import re
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.internal.captcha import ReCaptcha
+
+
+class OboomCom(Hoster):
+ __name = "OboomCom"
+ __type = "hoster"
+ __version = "0.30"
+
+ __pattern = r'https?://(?:www\.)?oboom\.com/(#(id=|/)?)?(?P<ID>\w{8})'
+
+ __description = """oboom.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stanley", "stanley.foerster@gmail.com")]
+
+
+ RECAPTCHA_KEY = "6LdqpO0SAAAAAJGHXo63HyalP7H4qlRs_vff0kJX"
+
+
+ def setup(self):
+ self.chunkLimit = 1
+ self.multiDL = self.resumeDownload = self.premium
+
+
+ def process(self, pyfile):
+ self.pyfile.url.replace(".com/#id=", ".com/#")
+ self.pyfile.url.replace(".com/#/", ".com/#")
+ self.getFileId(self.pyfile.url)
+ self.getSessionToken()
+ self.getFileInfo(self.sessionToken, self.fileId)
+ self.pyfile.name = self.fileName
+ self.pyfile.size = self.fileSize
+ if not self.premium:
+ self.solveCaptcha()
+ self.getDownloadTicket()
+ self.download("https://%s/1.0/dlh" % self.downloadDomain, get={"ticket": self.downloadTicket, "http_errors": 0})
+
+
+ def loadUrl(self, url, get=None):
+ if get is None:
+ get = dict()
+ return json_loads(self.load(url, get, decode=True))
+
+
+ def getFileId(self, url):
+ self.fileId = re.match(OboomCom.__pattern, url).group('ID')
+
+
+ def getSessionToken(self):
+ if self.premium:
+ accountInfo = self.account.getAccountInfo(self.user, True)
+ if "session" in accountInfo:
+ self.sessionToken = accountInfo['session']
+ else:
+ self.fail(_("Could not retrieve premium session"))
+ else:
+ apiUrl = "https://www.oboom.com/1.0/guestsession"
+ result = self.loadUrl(apiUrl)
+ if result[0] == 200:
+ self.sessionToken = result[1]
+ else:
+ self.fail(_("Could not retrieve token for guest session. Error code: %s") % result[0])
+
+
+ def solveCaptcha(self):
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
+ apiUrl = "https://www.oboom.com/1.0/download/ticket"
+ params = {"recaptcha_challenge_field": challenge,
+ "recaptcha_response_field": response,
+ "download_id": self.fileId,
+ "token": self.sessionToken}
+ result = self.loadUrl(apiUrl, params)
+
+ if result[0] == 200:
+ self.downloadToken = result[1]
+ self.downloadAuth = result[2]
+ self.correctCaptcha()
+ self.setWait(30)
+ self.wait()
+ break
+
+ elif result[0] == 400:
+ if result[1] == "incorrect-captcha-sol":
+ self.invalidCaptcha()
+ elif result[1] == "captcha-timeout":
+ self.invalidCaptcha()
+ elif result[1] == "forbidden":
+ self.retry(5, 15 * 60, _("Service unavailable"))
+
+ elif result[0] == 403:
+ if result[1] == -1: # another download is running
+ self.setWait(15 * 60)
+ else:
+ self.setWait(result[1], True)
+ self.wait()
+ self.retry(5)
+ else:
+ self.invalidCaptcha()
+ self.fail(_("Received invalid captcha 5 times"))
+
+
+ def getFileInfo(self, token, fileId):
+ apiUrl = "https://api.oboom.com/1.0/info"
+ params = {"token": token, "items": fileId, "http_errors": 0}
+
+ result = self.loadUrl(apiUrl, params)
+ if result[0] == 200:
+ item = result[1][0]
+ if item['state'] == "online":
+ self.fileSize = item['size']
+ self.fileName = item['name']
+ else:
+ self.offline()
+ else:
+ self.fail(_("Could not retrieve file info. Error code %s: %s") % (result[0], result[1]))
+
+
+ def getDownloadTicket(self):
+ apiUrl = "https://api.oboom.com/1/dl"
+ params = {"item": self.fileId, "http_errors": 0}
+ if self.premium:
+ params['token'] = self.sessionToken
+ else:
+ params['token'] = self.downloadToken
+ params['auth'] = self.downloadAuth
+
+ result = self.loadUrl(apiUrl, params)
+ if result[0] == 200:
+ self.downloadDomain = result[1]
+ self.downloadTicket = result[2]
+ elif result[0] == 421:
+ self.retry(wait_time=result[2] + 60, reason=_("Connection limit exceeded"))
+ else:
+ self.fail(_("Could not retrieve download ticket. Error code: %s") % result[0])
diff --git a/pyload/plugin/hoster/OneFichierCom.py b/pyload/plugin/hoster/OneFichierCom.py
new file mode 100644
index 000000000..eb48d7311
--- /dev/null
+++ b/pyload/plugin/hoster/OneFichierCom.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class OneFichierCom(SimpleHoster):
+ __name = "OneFichierCom"
+ __type = "hoster"
+ __version = "0.74"
+
+ __pattern = r'https?://(?:www\.)?(?:(?P<ID1>\w+)\.)?(?P<HOST>1fichier\.com|alterupload\.com|cjoint\.net|d(es)?fichiers\.com|dl4free\.com|megadl\.fr|mesfichiers\.org|piecejointe\.net|pjointe\.com|tenvoi\.com)(?:/\?(?P<ID2>\w+))?'
+
+ __description = """1fichier.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
+ ("the-razer", "daniel_ AT gmx DOT net"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("imclem", ""),
+ ("stickell", "l.stickell@yahoo.it"),
+ ("Elrick69", "elrick69[AT]rocketmail[DOT]com"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'>FileName :</td>\s*<td.*>(?P<N>.+?)<'
+ SIZE_PATTERN = r'>Size :</td>\s*<td.*>(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ OFFLINE_PATTERN = r'File not found !\s*<'
+
+ COOKIES = [("1fichier.com", "LG", "en")]
+
+ WAIT_PATTERN = r'>You must wait (\d+)'
+
+
+ def setup(self):
+ self.multiDL = self.premium
+ self.resumeDownload = True
+
+
+ def handle(self, reconnect):
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ wait_time = int(m.group(1)) * 60
+
+ self.wait(wait_time, reconnect)
+ self.retry(reason="You have to wait been each free download")
+
+ id = self.info['pattern']['ID1'] or self.info['pattern']['ID2']
+ url, inputs = self.parseHtmlForm('action="https://1fichier.com/\?%s' % id)
+
+ if not url:
+ self.fail(_("Download link not found"))
+
+ if "pass" in inputs:
+ inputs['pass'] = self.getPassword()
+
+ inputs['submit'] = "Download"
+
+ self.download(url, post=inputs)
+
+
+ def handleFree(self):
+ return self.handle(True)
+
+
+ def handlePremium(self):
+ return self.handle(False)
+
+
+getInfo = create_getInfo(OneFichierCom)
diff --git a/pyload/plugin/hoster/OronCom.py b/pyload/plugin/hoster/OronCom.py
new file mode 100644
index 000000000..f2efafc83
--- /dev/null
+++ b/pyload/plugin/hoster/OronCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class OronCom(DeadHoster):
+ __name = "OronCom"
+ __type = "hoster"
+ __version = "0.14"
+
+ __pattern = r'https?://(?:www\.)?oron\.com/\w{12}'
+
+ __description = """Oron.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("chrox", "chrox@pyload.org"),
+ ("DHMH", "DHMH@pyload.org")]
+
+
+getInfo = create_getInfo(OronCom)
diff --git a/pyload/plugin/hoster/OverLoadMe.py b/pyload/plugin/hoster/OverLoadMe.py
new file mode 100644
index 000000000..e06aadfbe
--- /dev/null
+++ b/pyload/plugin/hoster/OverLoadMe.py
@@ -0,0 +1,84 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import randrange
+from urllib import unquote
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import parseFileSize
+
+
+class OverLoadMe(Hoster):
+ __name = "OverLoadMe"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'https?://.*overload\.me.*'
+
+ __description = """Over-Load.me hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("marley", "marley@over-load.me")]
+
+
+ def getFilename(self, url):
+ try:
+ name = unquote(url.rsplit("/", 1)[1])
+ except IndexError:
+ name = "Unknown_Filename..."
+ if name.endswith("..."): #: incomplete filename, append random stuff
+ name += "%s.tmp" % randrange(100, 999)
+ return name
+
+
+ def setup(self):
+ self.chunkLimit = 5
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "Over-Load")
+ self.fail(_("No Over-Load account provided"))
+ else:
+ self.logDebug("Old URL: %s" % pyfile.url)
+ data = self.account.getAccountData(self.user)
+
+ page = self.load("https://api.over-load.me/getdownload.php",
+ get={"auth": data['password'], "link": pyfile.url})
+ data = json_loads(page)
+
+ self.logDebug("Returned Data: %s" % data)
+
+ if data['error'] == 1:
+ self.logWarning(data['msg'])
+ self.tempOffline()
+ else:
+ if pyfile.name is not None and pyfile.name.endswith('.tmp') and data['filename']:
+ pyfile.name = data['filename']
+ pyfile.size = parseFileSize(data['filesize'])
+ new_url = data['downloadlink']
+
+ if self.getConfig("https"):
+ new_url = new_url.replace("http://", "https://")
+ else:
+ new_url = new_url.replace("https://", "http://")
+
+ if new_url != pyfile.url:
+ self.logDebug("New URL: %s" % new_url)
+
+ if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown") or pyfile.name.endswith('..'):
+ # only use when name wasn't already set
+ pyfile.name = self.getFilename(new_url)
+
+ self.download(new_url, disposition=True)
+
+ check = self.checkDownload(
+ {"error": "<title>An error occured while processing your request</title>"})
+
+ if check == "error":
+ # usual this download can safely be retried
+ self.retry(wait_time=60, reason=_("An error occured while generating link."))
diff --git a/pyload/plugin/hoster/PandaplaNet.py b/pyload/plugin/hoster/PandaplaNet.py
new file mode 100644
index 000000000..5c4a649b9
--- /dev/null
+++ b/pyload/plugin/hoster/PandaplaNet.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class PandaplaNet(DeadHoster):
+ __name = "PandaplaNet"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?pandapla\.net/\w{12}'
+
+ __description = """Pandapla.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
+
+
+getInfo = create_getInfo(PandaplaNet)
diff --git a/pyload/plugin/hoster/PornhostCom.py b/pyload/plugin/hoster/PornhostCom.py
new file mode 100644
index 000000000..653faf81f
--- /dev/null
+++ b/pyload/plugin/hoster/PornhostCom.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+
+
+class PornhostCom(Hoster):
+ __name = "PornhostCom"
+ __type = "hoster"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?pornhost\.com/(\d+/\d+\.html|\d+)'
+
+ __description = """Pornhost.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de")]
+
+
+ def process(self, pyfile):
+ self.download_html()
+ if not self.file_exists():
+ self.offline()
+
+ pyfile.name = self.get_file_name()
+ self.download(self.get_file_url())
+
+
+ # Old interface
+ def download_html(self):
+ url = self.pyfile.url
+ self.html = self.load(url)
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ if not self.html:
+ self.download_html()
+
+ url = re.search(r'download this file</label>.*?<a href="(.*?)"', self.html)
+ if url is None:
+ url = re.search(r'"(http://dl\d+\.pornhost\.com/files/.*?/.*?/.*?/.*?/.*?/.*?\..*?)"', self.html)
+ if url is None:
+ url = re.search(r'width: 894px; height: 675px">.*?<img src="(.*?)"', self.html)
+ if url is None:
+ url = re.search(r'"http://file\d+\.pornhost\.com/\d+/.*?"',
+ self.html) # TODO: fix this one since it doesn't match
+
+ return url.group(1).strip()
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+
+ name = re.search(r'<title>pornhost\.com - free file hosting with a twist - gallery(.*?)</title>', self.html)
+ if name is None:
+ name = re.search(r'id="url" value="http://www\.pornhost\.com/(.*?)/"', self.html)
+ if name is None:
+ name = re.search(r'<title>pornhost\.com - free file hosting with a twist -(.*?)</title>', self.html)
+ if name is None:
+ name = re.search(r'"http://file\d+\.pornhost\.com/.*?/(.*?)"', self.html)
+
+ name = name.group(1).strip() + ".flv"
+
+ return name
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ if not self.html:
+ self.download_html()
+
+ if (re.search(r'gallery not found', self.html) is not None or
+ re.search(r'You will be redirected to', self.html) is not None):
+ return False
+ else:
+ return True
diff --git a/pyload/plugin/hoster/PornhubCom.py b/pyload/plugin/hoster/PornhubCom.py
new file mode 100644
index 000000000..48fd82871
--- /dev/null
+++ b/pyload/plugin/hoster/PornhubCom.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+
+
+class PornhubCom(Hoster):
+ __name = "PornhubCom"
+ __type = "hoster"
+ __version = "0.50"
+
+ __pattern = r'http://(?:www\.)?pornhub\.com/view_video\.php\?viewkey=\w+'
+
+ __description = """Pornhub.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de")]
+
+
+ def process(self, pyfile):
+ self.download_html()
+ if not self.file_exists():
+ self.offline()
+
+ pyfile.name = self.get_file_name()
+ self.download(self.get_file_url())
+
+
+ def download_html(self):
+ url = self.pyfile.url
+ self.html = self.load(url)
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ if not self.html:
+ self.download_html()
+
+ url = "http://www.pornhub.com//gateway.php"
+ video_id = self.pyfile.url.split('=')[-1]
+ # thanks to jD team for this one v
+ post_data = "\x00\x03\x00\x00\x00\x01\x00\x0c\x70\x6c\x61\x79\x65\x72\x43\x6f\x6e\x66\x69\x67\x00\x02\x2f\x31\x00\x00\x00\x44\x0a\x00\x00\x00\x03\x02\x00"
+ post_data += chr(len(video_id))
+ post_data += video_id
+ post_data += "\x02\x00\x02\x2d\x31\x02\x00\x20"
+ post_data += "add299463d4410c6d1b1c418868225f7"
+
+ content = self.load(url, post=str(post_data))
+
+ new_content = ""
+ for x in content:
+ if ord(x) < 32 or ord(x) > 176:
+ new_content += '#'
+ else:
+ new_content += x
+
+ content = new_content
+
+ return re.search(r'flv_url.*(http.*?)##post_roll', content).group(1)
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+
+ m = re.search(r'<title[^>]+>([^<]+) - ', self.html)
+ if m:
+ name = m.group(1)
+ else:
+ matches = re.findall('<h1>(.*?)</h1>', self.html)
+ if len(matches) > 1:
+ name = matches[1]
+ else:
+ name = matches[0]
+
+ return name + '.flv'
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ if not self.html:
+ self.download_html()
+
+ if re.search(r'This video is no longer in our database or is in conversion', self.html) is not None:
+ return False
+ else:
+ return True
diff --git a/pyload/plugin/hoster/PotloadCom.py b/pyload/plugin/hoster/PotloadCom.py
new file mode 100644
index 000000000..08d5a7901
--- /dev/null
+++ b/pyload/plugin/hoster/PotloadCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class PotloadCom(DeadHoster):
+ __name = "PotloadCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?potload\.com/\w{12}'
+
+ __description = """Potload.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(PotloadCom)
diff --git a/pyload/plugin/hoster/PremiumTo.py b/pyload/plugin/hoster/PremiumTo.py
new file mode 100644
index 000000000..4591ec7bc
--- /dev/null
+++ b/pyload/plugin/hoster/PremiumTo.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+from os import remove
+from os.path import exists
+from urllib import quote
+
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import fs_encode
+
+
+class PremiumTo(Hoster):
+ __name = "PremiumTo"
+ __type = "hoster"
+ __version = "0.11"
+
+ __pattern = r'https?://(?:www\.)?premium\.to/.+'
+
+ __description = """Premium.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.chunkLimit = 1
+
+
+ def process(self, pyfile):
+ if not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "premium.to")
+ self.fail(_("No premium.to account provided"))
+
+ self.logDebug("Old URL: %s" % pyfile.url)
+
+ tra = self.getTraffic()
+
+ #raise timeout to 2min
+ self.req.setOption("timeout", 120)
+
+ self.download("http://premium.to/api/getfile.php",
+ get={'username': self.account.username,
+ 'password': self.account.password,
+ 'link' : quote(pyfile.url, "")},
+ disposition=True)
+
+ check = self.checkDownload({"nopremium": "No premium account available"})
+
+ if check == "nopremium":
+ self.retry(60, 5 * 60, "No premium account available")
+
+ err = ''
+ if self.req.http.code == '420':
+ # Custom error code send - fail
+ lastDownload = fs_encode(self.lastDownload)
+
+ if exists(lastDownload):
+ with open(lastDownload, "rb") as f:
+ err = f.read(256).strip()
+ remove(lastDownload)
+ else:
+ err = _('File does not exist')
+
+ trb = self.getTraffic()
+ self.logInfo(_("Filesize: %d, Traffic used %d, traffic left %d") % (pyfile.size, tra - trb, trb))
+
+ if err:
+ self.fail(err)
+
+
+ def getTraffic(self):
+ try:
+ api_r = self.load("http://premium.to/api/straffic.php",
+ get={'username': self.account.username, 'password': self.account.password})
+ traffic = sum(map(int, api_r.split(';')))
+ except Exception:
+ traffic = 0
+ return traffic
diff --git a/pyload/plugin/hoster/PremiumizeMe.py b/pyload/plugin/hoster/PremiumizeMe.py
new file mode 100644
index 000000000..e56ccdbb0
--- /dev/null
+++ b/pyload/plugin/hoster/PremiumizeMe.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+
+
+class PremiumizeMe(Hoster):
+ __name = "PremiumizeMe"
+ __type = "hoster"
+ __version = "0.12"
+
+ __pattern = r'^unmatchable$' #: Since we want to allow the user to specify the list of hoster to use we let MultiHoster.activate
+
+ __description = """Premiumize.me hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Florian Franzen", "FlorianFranzen@gmail.com")]
+
+
+ def process(self, pyfile):
+ # Check account
+ if not self.account or not self.account.canUse():
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "premiumize.me")
+ self.fail(_("No valid premiumize.me account provided"))
+
+ # In some cases hostsers do not supply us with a filename at download, so we
+ # are going to set a fall back filename (e.g. for freakshare or xfileshare)
+ pyfile.name = pyfile.name.split('/').pop() # Remove everthing before last slash
+
+ # Correction for automatic assigned filename: Removing html at end if needed
+ suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"]
+ temp = pyfile.name.split('.')
+ if temp.pop() in suffix_to_remove:
+ pyfile.name = ".".join(temp)
+
+ # Get account data
+ (user, data) = self.account.selectAccount()
+
+ # Get rewritten link using the premiumize.me api v1 (see https://secure.premiumize.me/?show=api)
+ data = json_loads(self.load("https://api.premiumize.me/pm-api/v1.php",
+ get={'method' : "directdownloadlink",
+ 'params[login]': user,
+ 'params[pass]' : data['password'],
+ 'params[link]' : pyfile.url}))
+
+ # Check status and decide what to do
+ status = data['status']
+ if status == 200:
+ self.download(data['result']['location'], disposition=True)
+ elif status == 400:
+ self.fail(_("Invalid link"))
+ elif status == 404:
+ self.offline()
+ elif status >= 500:
+ self.tempOffline()
+ else:
+ self.fail(data['statusmessage'])
diff --git a/pyload/plugin/hoster/PromptfileCom.py b/pyload/plugin/hoster/PromptfileCom.py
new file mode 100644
index 000000000..315714025
--- /dev/null
+++ b/pyload/plugin/hoster/PromptfileCom.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class PromptfileCom(SimpleHoster):
+ __name = "PromptfileCom"
+ __type = "hoster"
+ __version = "0.12"
+
+ __pattern = r'https?://(?:www\.)?promptfile\.com/'
+
+ __description = """Promptfile.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("igel", "igelkun@myopera.com")]
+
+
+ INFO_PATTERN = r'<span style="[^"]*" title="[^"]*">(?P<N>.*?) \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)</span>'
+ OFFLINE_PATTERN = r'<span style="[^"]*" title="File Not Found">File Not Found</span>'
+
+ CHASH_PATTERN = r'<input type="hidden" name="chash" value="([^"]*)" />'
+ LINK_PATTERN = r'<a href=\"(.+)\" target=\"_blank\" class=\"view_dl_link\">Download File</a>'
+
+
+ def handleFree(self):
+ # STAGE 1: get link to continue
+ m = re.search(self.CHASH_PATTERN, self.html)
+ if m is None:
+ self.error(_("CHASH_PATTERN not found"))
+ chash = m.group(1)
+ self.logDebug("Read chash %s" % chash)
+ # continue to stage2
+ self.html = self.load(self.pyfile.url, decode=True, post={'chash': chash})
+
+ # STAGE 2: get the direct link
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("LINK_PATTERN not found"))
+
+ self.download(m.group(1), disposition=True)
+
+
+getInfo = create_getInfo(PromptfileCom)
diff --git a/pyload/plugin/hoster/PrzeklejPl.py b/pyload/plugin/hoster/PrzeklejPl.py
new file mode 100644
index 000000000..faaa25e0f
--- /dev/null
+++ b/pyload/plugin/hoster/PrzeklejPl.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class PrzeklejPl(DeadHoster):
+ __name = "PrzeklejPl"
+ __type = "hoster"
+ __version = "0.11"
+
+ __pattern = r'http://(?:www\.)?przeklej\.pl/plik/.+'
+
+ __description = """Przeklej.pl hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(PrzeklejPl)
diff --git a/pyload/plugin/hoster/QuickshareCz.py b/pyload/plugin/hoster/QuickshareCz.py
new file mode 100644
index 000000000..ca250b47b
--- /dev/null
+++ b/pyload/plugin/hoster/QuickshareCz.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class QuickshareCz(SimpleHoster):
+ __name = "QuickshareCz"
+ __type = "hoster"
+ __version = "0.55"
+
+ __pattern = r'http://(?:[^/]*\.)?quickshare\.cz/stahnout-soubor/.*'
+
+ __description = """Quickshare.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<th width="145px">Název:</th>\s*<td style="word-wrap:break-word;">(?P<N>[^<]+)</td>'
+ SIZE_PATTERN = r'<th>Velikost:</th>\s*<td>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</td>'
+ OFFLINE_PATTERN = r'<script type="text/javascript">location\.href=\'/chyba\';</script>'
+
+
+ def process(self, pyfile):
+ self.html = self.load(pyfile.url, decode=True)
+ self.getFileInfo()
+
+ # parse js variables
+ self.jsvars = dict((x, y.strip("'")) for x, y in re.findall(r"var (\w+) = ([\d.]+|'[^']*')", self.html))
+ self.logDebug(self.jsvars)
+ pyfile.name = self.jsvars['ID3']
+
+ # determine download type - free or premium
+ if self.premium:
+ if 'UU_prihlasen' in self.jsvars:
+ if self.jsvars['UU_prihlasen'] == '0':
+ self.logWarning(_("User not logged in"))
+ self.relogin(self.user)
+ self.retry()
+ elif float(self.jsvars['UU_kredit']) < float(self.jsvars['kredit_odecet']):
+ self.logWarning(_("Not enough credit left"))
+ self.premium = False
+
+ if self.premium:
+ self.handlePremium()
+ else:
+ self.handleFree()
+
+ check = self.checkDownload({"err": re.compile(r"\AChyba!")}, max_size=100)
+ if check == "err":
+ self.fail(_("File not m or plugin defect"))
+
+
+ def handleFree(self):
+ # get download url
+ download_url = '%s/download.php' % self.jsvars['server']
+ data = dict((x, self.jsvars[x]) for x in self.jsvars if x in ("ID1", "ID2", "ID3", "ID4"))
+ self.logDebug("FREE URL1:" + download_url, data)
+
+ self.load(download_url, post=data, follow_location=False)
+ self.header = self.req.http.header
+
+ m = re.search(r'Location\s*:\s*(.+)', self.header, re.I)
+ if m is None:
+ self.fail(_("File not found"))
+ download_url = m.group(1)
+ self.logDebug("FREE URL2:" + download_url)
+
+ # check errors
+ m = re.search(r'/chyba/(\d+)', download_url)
+ if m:
+ if m.group(1) == '1':
+ self.retry(60, 2 * 60, "This IP is already downloading")
+ elif m.group(1) == '2':
+ self.retry(60, 60, "No free slots available")
+ else:
+ self.fail(_("Error %d") % m.group(1))
+
+ # download file
+ self.download(download_url)
+
+
+ def handlePremium(self):
+ download_url = '%s/download_premium.php' % self.jsvars['server']
+ data = dict((x, self.jsvars[x]) for x in self.jsvars if x in ("ID1", "ID2", "ID4", "ID5"))
+ self.download(download_url, get=data)
+
+
+getInfo = create_getInfo(QuickshareCz)
diff --git a/pyload/plugin/hoster/RPNetBiz.py b/pyload/plugin/hoster/RPNetBiz.py
new file mode 100644
index 000000000..aea39f76d
--- /dev/null
+++ b/pyload/plugin/hoster/RPNetBiz.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import json_loads
+
+
+class RPNetBiz(Hoster):
+ __name = "RPNetBiz"
+ __type = "hoster"
+ __version = "0.10"
+
+ __description = """RPNet.biz hoster plugin"""
+ __license = "GPLv3"
+
+ __pattern = r'https?://.*rpnet\.biz'
+ __authors = [("Dman", "dmanugm@gmail.com")]
+
+
+ def setup(self):
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ link_status = {'generated': pyfile.url}
+ elif not self.account:
+ # Check account
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "rpnet")
+ self.fail(_("No rpnet account provided"))
+ else:
+ (user, data) = self.account.selectAccount()
+
+ self.logDebug("Original URL: %s" % pyfile.url)
+ # Get the download link
+ res = self.load("https://premium.rpnet.biz/client_api.php",
+ get={"username": user,
+ "password": data['password'],
+ "action": "generate",
+ "links": pyfile.url})
+
+ self.logDebug("JSON data: %s" % res)
+ link_status = json_loads(res)['links'][0] # get the first link... since we only queried one
+
+ # Check if we only have an id as a HDD link
+ if 'id' in link_status:
+ self.logDebug("Need to wait at least 30 seconds before requery")
+ self.setWait(30) # wait for 30 seconds
+ self.wait()
+ # Lets query the server again asking for the status on the link,
+ # we need to keep doing this until we reach 100
+ max_tries = 30
+ my_try = 0
+ while (my_try <= max_tries):
+ self.logDebug("Try: %d ; Max Tries: %d" % (my_try, max_tries))
+ res = self.load("https://premium.rpnet.biz/client_api.php",
+ get={"username": user,
+ "password": data['password'],
+ "action": "downloadInformation",
+ "id": link_status['id']})
+ self.logDebug("JSON data hdd query: %s" % res)
+ download_status = json_loads(res)['download']
+
+ if download_status['status'] == '100':
+ link_status['generated'] = download_status['rpnet_link']
+ self.logDebug("Successfully downloaded to rpnet HDD: %s" % link_status['generated'])
+ break
+ else:
+ self.logDebug("At %s%% for the file download" % download_status['status'])
+
+ self.setWait(30)
+ self.wait()
+ my_try += 1
+
+ if my_try > max_tries: # We went over the limit!
+ self.fail(_("Waited for about 15 minutes for download to finish but failed"))
+
+ if 'generated' in link_status:
+ self.download(link_status['generated'], disposition=True)
+ elif 'error' in link_status:
+ self.fail(link_status['error'])
+ else:
+ self.fail(_("Something went wrong, not supposed to enter here"))
diff --git a/pyload/plugin/hoster/RapidfileshareNet.py b/pyload/plugin/hoster/RapidfileshareNet.py
new file mode 100644
index 000000000..f2d3d7c63
--- /dev/null
+++ b/pyload/plugin/hoster/RapidfileshareNet.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class RapidfileshareNet(XFSHoster):
+ __name = "RapidfileshareNet"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?rapidfileshare\.net/\w{12}'
+
+ __description = """Rapidfileshare.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "rapidfileshare.net"
+
+ NAME_PATTERN = r'<input type="hidden" name="fname" value="(?P<N>.+?)">'
+ SIZE_PATTERN = r'>http://www.rapidfileshare.net/\w+?</font> \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)</font>'
+
+ OFFLINE_PATTERN = r'>No such file with this filename'
+ TEMP_OFFLINE_PATTERN = r'The page may have been renamed, removed or be temporarily unavailable.<'
+
+
+ def handlePremium(self):
+ self.fail(_("Premium download not implemented"))
+
+
+getInfo = create_getInfo(RapidfileshareNet)
diff --git a/pyload/plugin/hoster/RapidgatorNet.py b/pyload/plugin/hoster/RapidgatorNet.py
new file mode 100644
index 000000000..91c9c4eec
--- /dev/null
+++ b/pyload/plugin/hoster/RapidgatorNet.py
@@ -0,0 +1,199 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pycurl import HTTPHEADER
+
+from pyload.utils import json_loads
+from pyload.network.HTTPRequest import BadHeader
+from pyload.plugin.hoster.UnrestrictLi import secondsToMidnight
+from pyload.plugin.internal.captcha import AdsCaptcha, ReCaptcha, SolveMedia
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class RapidgatorNet(SimpleHoster):
+ __name = "RapidgatorNet"
+ __type = "hoster"
+ __version = "0.26"
+
+ __pattern = r'http://(?:www\.)?(rapidgator\.net|rg\.to)/file/\w+'
+
+ __description = """Rapidgator.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("chrox", ""),
+ ("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ API_URL = "http://rapidgator.net/api/file"
+
+ COOKIES = [("rapidgator.net", "lang", "en")]
+
+ NAME_PATTERN = r'<title>Download file (?P<N>.*)</title>'
+ SIZE_PATTERN = r'File size:\s*<strong>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</strong>'
+ OFFLINE_PATTERN = r'>(File not found|Error 404)'
+
+ JSVARS_PATTERN = r'\s+var\s*(startTimerUrl|getDownloadUrl|captchaUrl|fid|secs)\s*=\s*\'?(.*?)\'?;'
+ PREMIUM_ONLY_ERROR_PATTERN = r'You can download files up to|This file can be downloaded by premium only<'
+ DOWNLOAD_LIMIT_ERROR_PATTERN = r'You have reached your (daily|hourly) downloads limit'
+ WAIT_PATTERN = r'(?:Delay between downloads must be not less than|Try again in)\s*(\d+)\s*(hour|min)'
+ LINK_PATTERN = r'return \'(http://\w+.rapidgator.net/.*)\';'
+
+ RECAPTCHA_PATTERN = r'"http://api\.recaptcha\.net/challenge\?k=(.*?)"'
+ ADSCAPTCHA_PATTERN = r'(http://api\.adscaptcha\.com/Get\.aspx[^"\']*)'
+ SOLVEMEDIA_PATTERN = r'http://api\.solvemedia\.com/papi/challenge\.script\?k=(.*?)"'
+
+
+ def setup(self):
+ if self.account:
+ self.sid = self.account.getAccountData(self.user).get('SID', None)
+ else:
+ self.sid = None
+
+ if self.sid:
+ self.premium = True
+
+ self.resumeDownload = self.multiDL = self.premium
+ self.chunkLimit = 1
+
+
+ def api_response(self, cmd):
+ try:
+ json = self.load('%s/%s' % (self.API_URL, cmd),
+ get={'sid': self.sid,
+ 'url': self.pyfile.url}, decode=True)
+ self.logDebug("API:%s" % cmd, json, "SID: %s" % self.sid)
+ json = json_loads(json)
+ status = json['response_status']
+ msg = json['response_details']
+
+ except BadHeader, e:
+ self.logError("API: %s" % cmd, e, "SID: %s" % self.sid)
+ status = e.code
+ msg = e
+
+ if status == 200:
+ return json['response']
+
+ elif status == 423:
+ self.account.empty(self.user)
+ self.retry()
+
+ else:
+ self.account.relogin(self.user)
+ self.retry(wait_time=60)
+
+
+ def handlePremium(self):
+ #self.logDebug("ACCOUNT_DATA", self.account.getAccountData(self.user))
+ self.api_data = self.api_response('info')
+ self.api_data['md5'] = self.api_data['hash']
+ self.pyfile.name = self.api_data['filename']
+ self.pyfile.size = self.api_data['size']
+ url = self.api_response('download')['url']
+ self.download(url)
+
+
+ def handleFree(self):
+ self.checkFree()
+
+ jsvars = dict(re.findall(self.JSVARS_PATTERN, self.html))
+ self.logDebug(jsvars)
+
+ self.req.http.lastURL = self.pyfile.url
+ self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
+
+ url = "http://rapidgator.net%s?fid=%s" % (
+ jsvars.get('startTimerUrl', '/download/AjaxStartTimer'), jsvars['fid'])
+ jsvars.update(self.getJsonResponse(url))
+
+ self.wait(int(jsvars.get('secs', 45)), False)
+
+ url = "http://rapidgator.net%s?sid=%s" % (
+ jsvars.get('getDownloadUrl', '/download/AjaxGetDownload'), jsvars['sid'])
+ jsvars.update(self.getJsonResponse(url))
+
+ self.req.http.lastURL = self.pyfile.url
+ self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With:"])
+
+ url = "http://rapidgator.net%s" % jsvars.get('captchaUrl', '/download/captcha')
+ self.html = self.load(url)
+
+ for _i in xrange(5):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m:
+ link = m.group(1)
+ self.logDebug(link)
+ self.download(link, disposition=True)
+ break
+ else:
+ captcha, captcha_key = self.getCaptcha()
+ challenge, response = captcha.challenge(captcha_key)
+
+ self.html = self.load(url, post={'DownloadCaptchaForm[captcha]': "",
+ 'adcopy_challenge' : challenge,
+ 'adcopy_response' : response})
+
+ if "The verification code is incorrect" in self.html:
+ self.invalidCaptcha()
+ else:
+ self.correctCaptcha()
+ else:
+ self.error(_("Download link"))
+
+
+ def getCaptcha(self):
+ m = re.search(self.ADSCAPTCHA_PATTERN, self.html)
+ if m:
+ captcha_key = m.group(1)
+ captcha = AdsCaptcha(self)
+ else:
+ m = re.search(self.RECAPTCHA_PATTERN, self.html)
+ if m:
+ captcha_key = m.group(1)
+ captcha = ReCaptcha(self)
+ else:
+ m = re.search(self.SOLVEMEDIA_PATTERN, self.html)
+ if m:
+ captcha_key = m.group(1)
+ captcha = SolveMedia(self)
+ else:
+ self.error(_("Captcha"))
+
+ return captcha, captcha_key
+
+
+ def checkFree(self):
+ m = re.search(self.PREMIUM_ONLY_ERROR_PATTERN, self.html)
+ if m:
+ self.fail(_("Premium account needed for download"))
+ else:
+ m = re.search(self.WAIT_PATTERN, self.html)
+
+ if m:
+ wait_time = int(m.group(1)) * {"hour": 60, "min": 1}[m.group(2)]
+ else:
+ m = re.search(self.DOWNLOAD_LIMIT_ERROR_PATTERN, self.html)
+ if m is None:
+ return
+ elif m.group(1) == "daily":
+ self.logWarning(_("You have reached your daily downloads limit for today"))
+ wait_time = secondsToMidnight(gmt=2)
+ else:
+ wait_time = 1 * 60 * 60
+
+ self.logDebug("Waiting %d minutes" % wait_time / 60)
+ self.wait(wait_time, True)
+ self.retry()
+
+
+ def getJsonResponse(self, url):
+ res = self.load(url, decode=True)
+ if not res.startswith('{'):
+ self.retry()
+ self.logDebug(url, res)
+ return json_loads(res)
+
+
+getInfo = create_getInfo(RapidgatorNet)
diff --git a/pyload/plugin/hoster/RapiduNet.py b/pyload/plugin/hoster/RapiduNet.py
new file mode 100644
index 000000000..5e5971bf5
--- /dev/null
+++ b/pyload/plugin/hoster/RapiduNet.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pycurl import HTTPHEADER
+from time import time, altzone
+
+from pyload.utils import json_loads
+from pyload.plugin.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class RapiduNet(SimpleHoster):
+ __name = "RapiduNet"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'https?://(?:www\.)?rapidu\.net/(?P<ID>\d{10})'
+
+ __description = """Rapidu.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("prOq", "")]
+
+
+ COOKIES = [("rapidu.net", "rapidu_lang", "en")]
+
+ FILE_INFO_PATTERN = r'<h1 title="(?P<N>.*)">.*</h1>\s*<small>(?P<S>\d+(\.\d+)?)\s(?P<U>\w+)</small>'
+ OFFLINE_PATTERN = r'404 - File not found'
+
+ ERROR_PATTERN = r'<div class="error">'
+
+ RECAPTCHA_KEY = r'6Ld12ewSAAAAAHoE6WVP_pSfCdJcBQScVweQh8Io'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.limitDL = 0 if self.premium else 2
+
+
+ def handleFree(self):
+ self.req.http.lastURL = self.pyfile.url
+ self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
+
+ jsvars = self.getJsonResponse("https://rapidu.net/ajax.php?a=getLoadTimeToDownload", {'_go': None})
+
+ if str(jsvars['timeToDownload']) is "stop":
+ t = (24 * 60 * 60) - (int(time()) % (24 *60 * 60)) + altzone
+
+ self.logInfo("You've reach your daily download transfer")
+
+ self.retry(10, 10 if t < 1 else None, "Try tomorrow again") #@NOTE: check t in case of not synchronised clock
+
+ else:
+ self.wait(int(jsvars['timeToDownload']) - int(time()))
+
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(10):
+ challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
+
+ jsvars = self.getJsonResponse("https://rapidu.net/ajax.php?a=getCheckCaptcha",
+ {'_go' : None,
+ 'captcha1': challenge,
+ 'captcha2': response,
+ 'fileId' : self.info['ID']})
+ if jsvars['message'] == 'success':
+ self.download(jsvars['url'])
+ break
+
+
+ def getJsonResponse(self, url, post_data):
+ res = self.load(url, post=post_data, decode=True)
+ if not res.startswith('{'):
+ self.retry()
+
+ self.logDebug(url, res)
+
+ return json_loads(res)
+
+
+getInfo = create_getInfo(RapiduNet)
diff --git a/pyload/plugin/hoster/RarefileNet.py b/pyload/plugin/hoster/RarefileNet.py
new file mode 100644
index 000000000..c14bb9008
--- /dev/null
+++ b/pyload/plugin/hoster/RarefileNet.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class RarefileNet(XFSHoster):
+ __name = "RarefileNet"
+ __type = "hoster"
+ __version = "0.08"
+
+ __pattern = r'http://(?:www\.)?rarefile\.net/\w{12}'
+
+ __description = """Rarefile.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "rarefile.net"
+
+ NAME_PATTERN = r'<font color="red">(?P<N>.+?)<'
+ SIZE_PATTERN = r'>Size : (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ LINK_PATTERN = r'<a href="(?P<link>[^"]+)">(?P=link)</a>'
+
+
+getInfo = create_getInfo(RarefileNet)
diff --git a/pyload/plugin/hoster/RealdebridCom.py b/pyload/plugin/hoster/RealdebridCom.py
new file mode 100644
index 000000000..16744bc3d
--- /dev/null
+++ b/pyload/plugin/hoster/RealdebridCom.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import randrange
+from urllib import quote, unquote
+from time import time
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import parseFileSize
+
+
+class RealdebridCom(Hoster):
+ __name = "RealdebridCom"
+ __type = "hoster"
+ __version = "0.53"
+
+ __pattern = r'https?://(?:[^/]*\.)?real-debrid\..*'
+
+ __description = """Real-Debrid.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Devirex Hazzard", "naibaf_11@yahoo.de")]
+
+
+ def getFilename(self, url):
+ try:
+ name = unquote(url.rsplit("/", 1)[1])
+ except IndexError:
+ name = "Unknown_Filename..."
+ if not name or name.endswith(".."): #: incomplete filename, append random stuff
+ name += "%s.tmp" % randrange(100, 999)
+ return name
+
+
+ def setup(self):
+ self.chunkLimit = 3
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "Real-debrid")
+ self.fail(_("No Real-debrid account provided"))
+ else:
+ self.logDebug("Old URL: %s" % pyfile.url)
+ password = self.getPassword().splitlines()
+ if not password:
+ password = ""
+ else:
+ password = password[0]
+
+ data = json_loads(self.load("https://real-debrid.com/ajax/unrestrict.php",
+ get={'lang' : "en",
+ 'link' : quote(pyfile.url, ""),
+ 'password': password,
+ 'time' : int(time() * 1000)}))
+
+ self.logDebug("Returned Data: %s" % data)
+
+ if data['error'] != 0:
+ if data['message'] == "Your file is unavailable on the hoster.":
+ self.offline()
+ else:
+ self.logWarning(data['message'])
+ self.tempOffline()
+ else:
+ if pyfile.name is not None and pyfile.name.endswith('.tmp') and data['file_name']:
+ pyfile.name = data['file_name']
+ pyfile.size = parseFileSize(data['file_size'])
+ new_url = data['generated_links'][0][-1]
+
+ if self.getConfig("https"):
+ new_url = new_url.replace("http://", "https://")
+ else:
+ new_url = new_url.replace("https://", "http://")
+
+ if new_url != pyfile.url:
+ self.logDebug("New URL: %s" % new_url)
+
+ if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown") or pyfile.name.endswith('..'):
+ #only use when name wasnt already set
+ pyfile.name = self.getFilename(new_url)
+
+ self.download(new_url, disposition=True)
+
+ check = self.checkDownload(
+ {"error": "<title>An error occured while processing your request</title>"})
+
+ if check == "error":
+ #usual this download can safely be retried
+ self.retry(wait_time=60, reason=_("An error occured while generating link"))
diff --git a/pyload/plugin/hoster/RedtubeCom.py b/pyload/plugin/hoster/RedtubeCom.py
new file mode 100644
index 000000000..4ab976fdd
--- /dev/null
+++ b/pyload/plugin/hoster/RedtubeCom.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import html_unescape
+
+
+class RedtubeCom(Hoster):
+ __name = "RedtubeCom"
+ __type = "hoster"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?redtube\.com/\d+'
+
+ __description = """Redtube.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de")]
+
+
+ def process(self, pyfile):
+ self.download_html()
+ if not self.file_exists():
+ self.offline()
+
+ pyfile.name = self.get_file_name()
+ self.download(self.get_file_url())
+
+
+ def download_html(self):
+ url = self.pyfile.url
+ self.html = self.load(url)
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ if not self.html:
+ self.download_html()
+
+ file_url = html_unescape(re.search(r'hashlink=(http.*?)"', self.html).group(1))
+
+ return file_url
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+
+ return re.search('<title>(.*?)- RedTube - Free Porn Videos</title>', self.html).group(1).strip() + ".flv"
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ if not self.html:
+ self.download_html()
+
+ if re.search(r'This video has been removed.', self.html) is not None:
+ return False
+ else:
+ return True
diff --git a/pyload/plugin/hoster/RehostTo.py b/pyload/plugin/hoster/RehostTo.py
new file mode 100644
index 000000000..75fae2449
--- /dev/null
+++ b/pyload/plugin/hoster/RehostTo.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+from urllib import quote, unquote
+
+from pyload.plugin.Hoster import Hoster
+
+
+class RehostTo(Hoster):
+ __name = "RehostTo"
+ __type = "hoster"
+ __version = "0.13"
+
+ __pattern = r'https?://.*rehost\.to\..*'
+
+ __description = """Rehost.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ def getFilename(self, url):
+ return unquote(url.rsplit("/", 1)[1])
+
+
+ def setup(self):
+ self.chunkLimit = 1
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "rehost.to")
+ self.fail(_("No rehost.to account provided"))
+
+ data = self.account.getAccountInfo(self.user)
+ long_ses = data['long_ses']
+
+ self.logDebug("Rehost.to: Old URL: %s" % pyfile.url)
+
+ #raise timeout to 2min
+ self.req.setOption("timeout", 120)
+
+ self.download("http://rehost.to/process_download.php",
+ get={'user': "cookie", 'pass': long_ses, 'dl': quote(pyfile.url, "")},
+ disposition=True)
diff --git a/pyload/plugin/hoster/RemixshareCom.py b/pyload/plugin/hoster/RemixshareCom.py
new file mode 100644
index 000000000..2cf2dd2ff
--- /dev/null
+++ b/pyload/plugin/hoster/RemixshareCom.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://remixshare.com/download/p946u
+#
+# Note:
+# The remixshare.com website is very very slow, so
+# if your download not starts because of pycurl timeouts:
+# Adjust timeouts in /usr/share/pyload/pyload/network/HTTPRequest.py
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class RemixshareCom(SimpleHoster):
+ __name = "RemixshareCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'https?://remixshare\.com/(download|dl)/\w+'
+
+ __description = """Remixshare.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ INFO_PATTERN = r'title=\'.+?\'>(?P<N>.+?)</span><span class=\'light2\'>&nbsp;\((?P<S>\d+)&nbsp;(?P<U>[\w^_]+)\)<'
+ OFFLINE_PATTERN = r'<h1>Ooops!<'
+
+ LINK_PATTERN = r'(http://remixshare\.com/downloadfinal/.+?)"'
+ TOKEN_PATTERN = r'var acc = (\d+)'
+ WAIT_PATTERN = r'var XYZ = r"(\d+)"'
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ b = re.search(self.LINK_PATTERN, self.html)
+ if not b:
+ self.error(_("Cannot parse download url"))
+ c = re.search(self.TOKEN_PATTERN, self.html)
+ if not c:
+ self.error(_("Cannot parse file token"))
+ dl_url = b.group(1) + c.group(1)
+
+ #Check if we have to wait
+ seconds = re.search(self.WAIT_PATTERN, self.html)
+ if seconds:
+ self.logDebug("Wait " + seconds.group(1))
+ self.wait(int(seconds.group(1)))
+
+ # Finally start downloading...
+ self.download(dl_url, disposition=True)
+
+
+getInfo = create_getInfo(RemixshareCom)
diff --git a/pyload/plugin/hoster/RgHostNet.py b/pyload/plugin/hoster/RgHostNet.py
new file mode 100644
index 000000000..82dcf42df
--- /dev/null
+++ b/pyload/plugin/hoster/RgHostNet.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class RgHostNet(SimpleHoster):
+ __name = "RgHostNet"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?rghost\.net/\d+(?:r=\d+)?'
+
+ __description = """RgHost.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("z00nx", "z00nx0@gmail.com")]
+
+
+ INFO_PATTERN = r'<h1>\s+(<a[^>]+>)?(?P<N>[^<]+)(</a>)?\s+<small[^>]+>\s+\((?P<S>[^)]+)\)\s+</small>\s+</h1>'
+ OFFLINE_PATTERN = r'File is deleted|this page is not found'
+
+ LINK_FREE_PATTERN = r'<a\s+href="([^"]+)"\s+class="btn\s+large\s+download"[^>]+>Download</a>'
+
+
+getInfo = create_getInfo(RgHostNet)
diff --git a/pyload/plugin/hoster/RyushareCom.py b/pyload/plugin/hoster/RyushareCom.py
new file mode 100644
index 000000000..233ba57a4
--- /dev/null
+++ b/pyload/plugin/hoster/RyushareCom.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://ryushare.com/cl0jy8ric2js/random.bin
+
+import re
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+from pyload.plugin.internal.captcha import SolveMedia
+
+
+class RyushareCom(XFSHoster):
+ __name = "RyushareCom"
+ __type = "hoster"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?ryushare\.com/\w+'
+
+ __description = """Ryushare.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it"),
+ ("quareevo", "quareevo@arcor.de")]
+
+
+ HOSTER_DOMAIN = "ryushare.com"
+
+ SIZE_PATTERN = r'You have requested <font color="red">[^<]+</font> \((?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ WAIT_PATTERN = r'You have to wait ((?P<hour>\d+) hour[s]?, )?((?P<min>\d+) minute[s], )?(?P<sec>\d+) second[s]'
+ LINK_PATTERN = r'<a href="([^"]+)">Click here to download<'
+
+
+ def getDownloadLink(self):
+ retry = False
+ self.html = self.load(self.pyfile.url)
+ action, inputs = self.parseHtmlForm(input_names={"op": re.compile("^download")})
+ if "method_premium" in inputs:
+ del inputs['method_premium']
+
+ self.html = self.load(self.pyfile.url, post=inputs)
+ action, inputs = self.parseHtmlForm('F1')
+
+ self.setWait(65)
+ # Wait 1 hour
+ if "You have reached the download-limit" in self.html:
+ self.setWait(1 * 60 * 60, True)
+ retry = True
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ wait = m.groupdict(0)
+ waittime = int(wait['hour']) * 60 * 60 + int(wait['min']) * 60 + int(wait['sec'])
+ self.setWait(waittime, True)
+ retry = True
+
+ self.wait()
+ if retry:
+ self.retry()
+
+ for _i in xrange(5):
+ solvemedia = SolveMedia(self)
+ challenge, response = solvemedia.challenge()
+
+ inputs['adcopy_challenge'] = challenge
+ inputs['adcopy_response'] = response
+
+ self.html = self.load(self.pyfile.url, post=inputs)
+ if "WRONG CAPTCHA" in self.html:
+ self.invalidCaptcha()
+ else:
+ self.correctCaptcha()
+ break
+ else:
+ self.fail(_("You have entered 5 invalid captcha codes"))
+
+ if "Click here to download" in self.html:
+ return re.search(r'<a href="([^"]+)">Click here to download</a>', self.html).group(1)
+
+
+getInfo = create_getInfo(RyushareCom)
diff --git a/pyload/plugin/hoster/SafesharingEu.py b/pyload/plugin/hoster/SafesharingEu.py
new file mode 100644
index 000000000..40824d01f
--- /dev/null
+++ b/pyload/plugin/hoster/SafesharingEu.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class SafesharingEu(XFSHoster):
+ __name = "SafesharingEu"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'https?://(?:www\.)?safesharing\.eu/\w{12}'
+
+ __description = """Safesharing.eu hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
+
+
+ HOSTER_DOMAIN = "safesharing.eu"
+
+ WAIT_PATTERN = r'You have to wait (\d+) minutes'
+
+ ERROR_PATTERN = r'(?:<div class="alert alert-danger">)(.+?)(?:</div>)'
+
+
+getInfo = create_getInfo(SafesharingEu)
diff --git a/pyload/plugin/hoster/SecureUploadEu.py b/pyload/plugin/hoster/SecureUploadEu.py
new file mode 100644
index 000000000..c332b890f
--- /dev/null
+++ b/pyload/plugin/hoster/SecureUploadEu.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class SecureUploadEu(XFSHoster):
+ __name = "SecureUploadEu"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'https?://(?:www\.)?secureupload\.eu/\w{12}'
+
+ __description = """SecureUpload.eu hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("z00nx", "z00nx0@gmail.com")]
+
+
+ HOSTER_DOMAIN = "secureupload.eu"
+
+ INFO_PATTERN = r'<h3>Downloading (?P<N>[^<]+) \((?P<S>[^<]+)\)</h3>'
+
+
+getInfo = create_getInfo(SecureUploadEu)
diff --git a/pyload/plugin/hoster/SendmywayCom.py b/pyload/plugin/hoster/SendmywayCom.py
new file mode 100644
index 000000000..2912600fa
--- /dev/null
+++ b/pyload/plugin/hoster/SendmywayCom.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class SendmywayCom(XFSHoster):
+ __name = "SendmywayCom"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'http://(?:www\.)?sendmyway\.com/\w{12}'
+
+ __description = """SendMyWay hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "sendmyway.com"
+
+ NAME_PATTERN = r'<p class="file-name" ><.*?>\s*(?P<N>.+)'
+ SIZE_PATTERN = r'<small>\((?P<S>\d+) bytes\)</small>'
+
+
+getInfo = create_getInfo(SendmywayCom)
diff --git a/pyload/plugin/hoster/SendspaceCom.py b/pyload/plugin/hoster/SendspaceCom.py
new file mode 100644
index 000000000..bd92fb058
--- /dev/null
+++ b/pyload/plugin/hoster/SendspaceCom.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class SendspaceCom(SimpleHoster):
+ __name = "SendspaceCom"
+ __type = "hoster"
+ __version = "0.14"
+
+ __pattern = r'http://(?:www\.)?sendspace\.com/file/.*'
+
+ __description = """Sendspace.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<h2 class="bgray">\s*<(?:b|strong)>(?P<N>[^<]+)</'
+ SIZE_PATTERN = r'<div class="file_description reverse margin_center">\s*<b>File Size:</b>\s*(?P<S>[\d.,]+)(?P<U>[\w^_]+)\s*</div>'
+ OFFLINE_PATTERN = r'<div class="msg error" style="cursor: default">Sorry, the file you requested is not available.</div>'
+
+ LINK_PATTERN = r'<a id="download_button" href="([^"]+)"'
+ CAPTCHA_PATTERN = r'<td><img src="(/captchas/captcha\.php?captcha=([^"]+))"></td>'
+ USER_CAPTCHA_PATTERN = r'<td><img src="/captchas/captcha\.php?user=([^"]+))"></td>'
+
+
+ def handleFree(self):
+ params = {}
+ for _i in xrange(3):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m:
+ if 'captcha_hash' in params:
+ self.correctCaptcha()
+ download_url = m.group(1)
+ break
+
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m:
+ if 'captcha_hash' in params:
+ self.invalidCaptcha()
+ captcha_url1 = "http://www.sendspace.com/" + m.group(1)
+ m = re.search(self.USER_CAPTCHA_PATTERN, self.html)
+ captcha_url2 = "http://www.sendspace.com/" + m.group(1)
+ params = {'captcha_hash': m.group(2),
+ 'captcha_submit': 'Verify',
+ 'captcha_answer': self.decryptCaptcha(captcha_url1) + " " + self.decryptCaptcha(captcha_url2)}
+ else:
+ params = {'download': "Regular Download"}
+
+ self.logDebug(params)
+ self.html = self.load(self.pyfile.url, post=params)
+ else:
+ self.fail(_("Download link not found"))
+
+ self.download(download_url)
+
+
+getInfo = create_getInfo(SendspaceCom)
diff --git a/pyload/plugin/hoster/Share4webCom.py b/pyload/plugin/hoster/Share4webCom.py
new file mode 100644
index 000000000..479f42e90
--- /dev/null
+++ b/pyload/plugin/hoster/Share4webCom.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.hoster.UnibytesCom import UnibytesCom
+from pyload.plugin.internal.SimpleHoster import create_getInfo
+
+
+class Share4webCom(UnibytesCom):
+ __name = "Share4webCom"
+ __type = "hoster"
+ __version = "0.11"
+
+ __pattern = r'https?://(?:www\.)?share4web\.com/get/\w+'
+
+ __description = """Share4web.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "share4web.com"
+
+
+getInfo = create_getInfo(UnibytesCom)
diff --git a/pyload/plugin/hoster/Share76Com.py b/pyload/plugin/hoster/Share76Com.py
new file mode 100644
index 000000000..43f74ec23
--- /dev/null
+++ b/pyload/plugin/hoster/Share76Com.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class Share76Com(DeadHoster):
+ __name = "Share76Com"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'http://(?:www\.)?share76\.com/\w{12}'
+
+ __description = """Share76.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = []
+
+
+getInfo = create_getInfo(Share76Com)
diff --git a/pyload/plugin/hoster/ShareFilesCo.py b/pyload/plugin/hoster/ShareFilesCo.py
new file mode 100644
index 000000000..cbb5d3895
--- /dev/null
+++ b/pyload/plugin/hoster/ShareFilesCo.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class ShareFilesCo(DeadHoster):
+ __name = "ShareFilesCo"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?sharefiles\.co/\w{12}'
+
+ __description = """Sharefiles.co hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(ShareFilesCo)
diff --git a/pyload/plugin/hoster/SharebeesCom.py b/pyload/plugin/hoster/SharebeesCom.py
new file mode 100644
index 000000000..f46e85a5f
--- /dev/null
+++ b/pyload/plugin/hoster/SharebeesCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class SharebeesCom(DeadHoster):
+ __name = "SharebeesCom"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?sharebees\.com/\w{12}'
+
+ __description = """ShareBees hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(SharebeesCom)
diff --git a/pyload/plugin/hoster/ShareonlineBiz.py b/pyload/plugin/hoster/ShareonlineBiz.py
new file mode 100644
index 000000000..a1c5da2e6
--- /dev/null
+++ b/pyload/plugin/hoster/ShareonlineBiz.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import time
+from urllib import unquote
+from urlparse import urlparse
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class ShareonlineBiz(SimpleHoster):
+ __name = "ShareonlineBiz"
+ __type = "hoster"
+ __version = "0.44"
+
+ __pattern = r'https?://(?:www\.)?(share-online\.biz|egoshare\.com)/(download\.php\?id=|dl/)(?P<ID>\w+)'
+
+ __description = """Shareonline.biz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org"),
+ ("mkaay", "mkaay@mkaay.de"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ URL_REPLACEMENTS = [(__pattern + ".*", "http://www.share-online.biz/dl/\g<ID>")]
+
+ RECAPTCHA_KEY = "6LdatrsSAAAAAHZrB70txiV5p-8Iv8BtVxlTtjKX"
+
+ ERROR_INFO_PATTERN = r'<p class="b">Information:</p>\s*<div>\s*<strong>(.*?)</strong>'
+
+
+ @classmethod
+ def getInfo(cls, url="", html=""):
+ info = {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3 if url else 1, 'url': url}
+
+ if url:
+ info['pattern'] = re.match(cls.__pattern, url).groupdict()
+
+ field = getURL("http://api.share-online.biz/linkcheck.php",
+ get={'md5': "1"},
+ post={'links': info['pattern']['ID']},
+ decode=True).split(";")
+
+ if field[1] == "OK":
+ info['fileid'] = field[0]
+ info['status'] = 2
+ info['name'] = field[2]
+ info['size'] = field[3] #: in bytes
+ info['md5'] = field[4].strip().lower().replace("\n\n", "") #: md5
+
+ elif field[1] in ("DELETED", "NOT FOUND"):
+ info['status'] = 1
+
+ return info
+
+
+ def setup(self):
+ self.resumeDownload = self.premium
+ self.multiDL = False
+
+
+ def handleCaptcha(self):
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
+
+ m = re.search(r'var wait=(\d+);', self.html)
+ self.setWait(int(m.group(1)) if m else 30)
+
+ res = self.load("%s/free/captcha/%d" % (self.pyfile.url, int(time() * 1000)),
+ post={'dl_free' : "1",
+ 'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response})
+ if not res == '0':
+ self.correctCaptcha()
+ return res
+ else:
+ self.invalidCaptcha()
+ else:
+ self.invalidCaptcha()
+ self.fail(_("No valid captcha solution received"))
+
+
+ def handleFree(self):
+ self.html = self.load(self.pyfile.url, cookies=True) #: refer, stuff
+
+ self.wait(3)
+
+ self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free": "1", "choice": "free"}, decode=True)
+
+ self.checkErrors()
+
+ res = self.handleCaptcha()
+
+ download_url = res.decode("base64")
+
+ if not download_url.startswith("http://"):
+ self.error(_("Wrong download url"))
+
+ self.wait()
+
+ self.download(download_url)
+
+
+ def checkFile(self):
+ # check download
+ check = self.checkDownload({
+ 'empty' : re.compile(r"^$"),
+ 'cookie': re.compile(r'<div id="dl_failure"'),
+ 'fail' : re.compile(r"<title>Share-Online")
+ })
+
+ if check == "empty":
+ self.fail(_("Empty file"))
+
+ elif check == "cookie":
+ self.invalidCaptcha()
+ self.retry(5, 60, _("Cookie failure"))
+
+ elif check == "fail":
+ self.invalidCaptcha()
+ self.retry(5, 5 * 60, _("Download failed"))
+
+
+ def handlePremium(self): #: should be working better loading (account) api internally
+ self.account.getAccountInfo(self.user, True)
+
+ html = self.load("http://api.share-online.biz/account.php",
+ {"username": self.user, "password": self.account.accounts[self.user]['password'],
+ "act": "download", "lid": self.info['fileid']})
+
+ self.api_data = dlinfo = {}
+
+ for line in html.splitlines():
+ key, value = line.split(": ")
+ dlinfo[key.lower()] = value
+
+ self.logDebug(dlinfo)
+
+ if not dlinfo['status'] == "online":
+ self.offline()
+ else:
+ self.pyfile.name = dlinfo['name']
+ self.pyfile.size = int(dlinfo['size'])
+
+ dlLink = dlinfo['url']
+
+ if dlLink == "server_under_maintenance":
+ self.tempOffline()
+ else:
+ self.multiDL = True
+ self.download(dlLink)
+
+
+ def checkErrors(self):
+ m = re.search(r"/failure/(.*?)/1", self.req.lastEffectiveURL)
+ if m is None:
+ self.info.pop('error', None)
+ return
+
+ errmsg = m.group(1).lower()
+
+ try:
+ self.logError(errmsg, re.search(self.ERROR_INFO_PATTERN, self.html).group(1))
+ except Exception:
+ self.logError("Unknown error occurred", errmsg)
+
+ if errmsg is "invalid":
+ self.fail(_("File not available"))
+
+ elif errmsg in ("freelimit", "size", "proxy"):
+ self.fail(_("Premium account needed"))
+
+ elif errmsg in ("expired", "server"):
+ self.retry(wait_time=600, reason=errmsg)
+
+ elif 'slot' in errmsg:
+ self.wantReconnect = True
+ self.retry(24, 3600, errmsg)
+
+ else:
+ self.wantReconnect = True
+ self.retry(wait_time=60, reason=errmsg)
+
+
+getInfo = create_getInfo(ShareonlineBiz)
diff --git a/pyload/plugin/hoster/ShareplaceCom.py b/pyload/plugin/hoster/ShareplaceCom.py
new file mode 100644
index 000000000..173e7f41c
--- /dev/null
+++ b/pyload/plugin/hoster/ShareplaceCom.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+
+from pyload.plugin.Hoster import Hoster
+
+
+class ShareplaceCom(Hoster):
+ __name = "ShareplaceCom"
+ __type = "hoster"
+ __version = "0.11"
+
+ __pattern = r'(http://)?(?:www\.)?shareplace\.(com|org)/\?\w+'
+
+ __description = """Shareplace.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("ACCakut", "")]
+
+
+ def process(self, pyfile):
+ self.pyfile = pyfile
+ self.prepare()
+ self.download(self.get_file_url())
+
+
+ def prepare(self):
+ if not self.file_exists():
+ self.offline()
+
+ self.pyfile.name = self.get_file_name()
+
+ wait_time = self.get_waiting_time()
+ self.setWait(wait_time)
+ self.wait()
+
+
+ def get_waiting_time(self):
+ if not self.html:
+ self.download_html()
+
+ #var zzipitime = 15;
+ m = re.search(r'var zzipitime = (\d+);', self.html)
+ if m:
+ sec = int(m.group(1))
+ else:
+ sec = 0
+
+ return sec
+
+
+ def download_html(self):
+ url = re.sub("shareplace.com\/\?", "shareplace.com//index1.php/?a=", self.pyfile.url)
+ self.html = self.load(url, decode=True)
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ url = re.search(r"var beer = '(.*?)';", self.html)
+ if url:
+ url = url.group(1)
+ url = unquote(
+ url.replace("http://http:/", "").replace("vvvvvvvvv", "").replace("lllllllll", "").replace(
+ "teletubbies", ""))
+ self.logDebug("URL: %s" % url)
+ return url
+ else:
+ self.error(_("Absolute filepath not found"))
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+
+ return re.search("<title>\s*(.*?)\s*</title>", self.html).group(1)
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ if not self.html:
+ self.download_html()
+
+ if re.search(r"HTTP Status 404", self.html) is not None:
+ return False
+ else:
+ return True
diff --git a/pyload/plugin/hoster/SharingmatrixCom.py b/pyload/plugin/hoster/SharingmatrixCom.py
new file mode 100644
index 000000000..8146d67b2
--- /dev/null
+++ b/pyload/plugin/hoster/SharingmatrixCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class SharingmatrixCom(DeadHoster):
+ __name = "SharingmatrixCom"
+ __type = "hoster"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?sharingmatrix\.com/file/\w+'
+
+ __description = """Sharingmatrix.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("paulking", "")]
+
+
+getInfo = create_getInfo(SharingmatrixCom)
diff --git a/pyload/plugin/hoster/ShragleCom.py b/pyload/plugin/hoster/ShragleCom.py
new file mode 100644
index 000000000..139283cd1
--- /dev/null
+++ b/pyload/plugin/hoster/ShragleCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class ShragleCom(DeadHoster):
+ __name = "ShragleCom"
+ __type = "hoster"
+ __version = "0.22"
+
+ __pattern = r'http://(?:www\.)?(cloudnator|shragle)\.com/files/(?P<ID>.*?)/'
+
+ __description = """Cloudnator.com (Shragle.com) hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(ShragleCom)
diff --git a/pyload/plugin/hoster/SimplyPremiumCom.py b/pyload/plugin/hoster/SimplyPremiumCom.py
new file mode 100644
index 000000000..364c0bd0e
--- /dev/null
+++ b/pyload/plugin/hoster/SimplyPremiumCom.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from datetime import datetime, timedelta
+
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.hoster.UnrestrictLi import secondsToMidnight
+
+
+class SimplyPremiumCom(Hoster):
+ __name = "SimplyPremiumCom"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'https?://.*(simply-premium)\.com'
+
+ __description = """Simply-Premium.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("EvolutionClip", "evolutionclip@live.de")]
+
+
+ def setup(self):
+ self.chunkLimit = 16
+ self.resumeDownload = False
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "Simply-Premium.com")
+ self.fail(_("No Simply-Premium.com account provided"))
+ else:
+ self.logDebug("Old URL: %s" % pyfile.url)
+ for i in xrange(5):
+ page = self.load("http://www.simply-premium.com/premium.php", get={'info': "", 'link': pyfile.url})
+ self.logDebug("JSON data: " + page)
+ if page != '':
+ break
+ else:
+ self.logInfo(_("Unable to get API data, waiting 1 minute and retry"))
+ self.retry(5, 60, "Unable to get API data")
+
+ if '<valid>0</valid>' in page or (
+ "You are not allowed to download from this host" in page and self.premium):
+ self.account.relogin(self.user)
+ self.retry()
+ elif "NOTFOUND" in page:
+ self.offline()
+ elif "downloadlimit" in page:
+ self.logWarning(_("Reached maximum connctions"))
+ self.retry(5, 60, "Reached maximum connctions")
+ elif "trafficlimit" in page:
+ self.logWarning(_("Reached daily limit for this host"))
+ self.retry(wait_time=secondsToMidnight(gmt=2), "Daily limit for this host reached")
+ elif "hostererror" in page:
+ self.logWarning(_("Hoster temporarily unavailable, waiting 1 minute and retry"))
+ self.retry(5, 60, "Hoster is temporarily unavailable")
+ #page = json_loads(page)
+ #new_url = page.keys()[0]
+ #self.api_data = page[new_url]
+
+ try:
+ self.pyfile.name = re.search(r'<name>([^<]+)</name>', page).group(1)
+ except AttributeError:
+ self.pyfile.name = ""
+
+ try:
+ self.pyfile.size = re.search(r'<size>(\d+)</size>', page).group(1)
+ except AttributeError:
+ self.pyfile.size = 0
+
+ try:
+ new_url = re.search(r'<download>([^<]+)</download>', page).group(1)
+ except AttributeError:
+ new_url = 'http://www.simply-premium.com/premium.php?link=' + pyfile.url
+
+ if new_url != pyfile.url:
+ self.logDebug("New URL: " + new_url)
+
+ self.download(new_url, disposition=True)
diff --git a/pyload/plugin/hoster/SimplydebridCom.py b/pyload/plugin/hoster/SimplydebridCom.py
new file mode 100644
index 000000000..a0ef4a596
--- /dev/null
+++ b/pyload/plugin/hoster/SimplydebridCom.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+
+
+class SimplydebridCom(Hoster):
+ __name = "SimplydebridCom"
+ __type = "hoster"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/sd\.php/*'
+
+ __description = """Simply-debrid.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Kagenoshin", "kagenoshin@gmx.ch")]
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = 1
+
+
+ def process(self, pyfile):
+ if not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "simply-debrid.com")
+ self.fail(_("No simply-debrid.com account provided"))
+
+ self.logDebug("Old URL: %s" % pyfile.url)
+
+ #fix the links for simply-debrid.com!
+ new_url = pyfile.url
+ new_url = new_url.replace("clz.to", "cloudzer.net/file")
+ new_url = new_url.replace("http://share-online", "http://www.share-online")
+ new_url = new_url.replace("ul.to", "uploaded.net/file")
+ new_url = new_url.replace("uploaded.com", "uploaded.net")
+ new_url = new_url.replace("filerio.com", "filerio.in")
+ new_url = new_url.replace("lumfile.com", "lumfile.se")
+ if('fileparadox' in new_url):
+ new_url = new_url.replace("http://", "https://")
+
+ if re.match(self.__pattern, new_url):
+ new_url = new_url
+
+ self.logDebug("New URL: %s" % new_url)
+
+ if not re.match(self.__pattern, new_url):
+ page = self.load("http://simply-debrid.com/api.php", get={'dl': new_url}) # +'&u='+self.user+'&p='+self.account.getAccountData(self.user)['password'])
+ if 'tiger Link' in page or 'Invalid Link' in page or ('API' in page and 'ERROR' in page):
+ self.fail(_("Unable to unrestrict link"))
+ new_url = page
+
+ self.setWait(5)
+ self.wait()
+ self.logDebug("Unrestricted URL: " + new_url)
+
+ self.download(new_url, disposition=True)
+
+ check = self.checkDownload({"bad1": "No address associated with hostname", "bad2": "<html"})
+
+ if check == "bad1" or check == "bad2":
+ self.retry(24, 3 * 60, "Bad file downloaded")
diff --git a/pyload/plugin/hoster/SockshareCom.py b/pyload/plugin/hoster/SockshareCom.py
new file mode 100644
index 000000000..4ce005135
--- /dev/null
+++ b/pyload/plugin/hoster/SockshareCom.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class SockshareCom(DeadHoster):
+ __name = "SockshareCom"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?sockshare\.com/(mobile/)?(file|embed)/(?P<ID>\w+)'
+
+ __description = """Sockshare.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+getInfo = create_getInfo(SockshareCom)
diff --git a/pyload/plugin/hoster/SoundcloudCom.py b/pyload/plugin/hoster/SoundcloudCom.py
new file mode 100644
index 000000000..48a5b465c
--- /dev/null
+++ b/pyload/plugin/hoster/SoundcloudCom.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+import pycurl
+import re
+
+from pyload.plugin.Hoster import Hoster
+
+
+class SoundcloudCom(Hoster):
+ __name = "SoundcloudCom"
+ __type = "hoster"
+ __version = "0.10"
+
+ __pattern = r'https?://(?:www\.)?soundcloud\.com/(?P<UID>.*?)/(?P<SID>.*)'
+
+ __description = """SoundCloud.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Peekayy", "peekayy.dev@gmail.com")]
+
+
+ def process(self, pyfile):
+ # default UserAgent of HTTPRequest fails for this hoster so we use this one
+ self.req.http.c.setopt(pycurl.USERAGENT, 'Mozilla/5.0')
+ page = self.load(pyfile.url)
+ m = re.search(r'<div class="haudio.*?large.*?" data-sc-track="(?P<ID>\d*)"', page)
+ songId = clientId = ""
+ if m:
+ songId = m.group("ID")
+ if len(songId) <= 0:
+ self.logError(_("Could not find song id"))
+ self.offline()
+ else:
+ m = re.search(r'"clientID":"(?P<CID>.*?)"', page)
+ if m:
+ clientId = m.group("CID")
+
+ if len(clientId) <= 0:
+ clientId = "b45b1aa10f1ac2941910a7f0d10f8e28"
+
+ m = re.search(r'<em itemprop="name">\s(?P<TITLE>.*?)\s</em>', page)
+ if m:
+ pyfile.name = m.group("TITLE") + ".mp3"
+ else:
+ pyfile.name = re.match(self.__pattern, pyfile.url).group("SID") + ".mp3"
+
+ # url to retrieve the actual song url
+ page = self.load("https://api.sndcdn.com/i1/tracks/%s/streams" % songId, get={"client_id": clientId})
+ # getting streams
+ # for now we choose the first stream found in all cases
+ # it could be improved if relevant for this hoster
+ streams = [
+ (result.group("QUALITY"), result.group("URL"))
+ for result in re.finditer(r'"(?P<QUALITY>.*?)":"(?P<URL>.*?)"', page)
+ ]
+ self.logDebug("Found Streams", streams)
+ self.logDebug("Downloading", streams[0][0], streams[0][1])
+ self.download(streams[0][1])
diff --git a/pyload/plugin/hoster/SpeedLoadOrg.py b/pyload/plugin/hoster/SpeedLoadOrg.py
new file mode 100644
index 000000000..39e27ef4c
--- /dev/null
+++ b/pyload/plugin/hoster/SpeedLoadOrg.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class SpeedLoadOrg(DeadHoster):
+ __name = "SpeedLoadOrg"
+ __type = "hoster"
+ __version = "1.02"
+
+ __pattern = r'http://(?:www\.)?speedload\.org/(?P<ID>\w+)'
+
+ __description = """Speedload.org hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+getInfo = create_getInfo(SpeedLoadOrg)
diff --git a/pyload/plugin/hoster/SpeedfileCz.py b/pyload/plugin/hoster/SpeedfileCz.py
new file mode 100644
index 000000000..29af9db4f
--- /dev/null
+++ b/pyload/plugin/hoster/SpeedfileCz.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class SpeedfileCz(DeadHoster):
+ __name = "SpeedFileCz"
+ __type = "hoster"
+ __version = "0.32"
+
+ __pattern = r'http://(?:www\.)?speedfile\.cz/.*'
+
+ __description = """Speedfile.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(SpeedfileCz)
diff --git a/pyload/plugin/hoster/SpeedyshareCom.py b/pyload/plugin/hoster/SpeedyshareCom.py
new file mode 100644
index 000000000..959af8025
--- /dev/null
+++ b/pyload/plugin/hoster/SpeedyshareCom.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://speedy.sh/ep2qY/Zapp-Brannigan.jpg
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class SpeedyshareCom(SimpleHoster):
+ __name = "SpeedyshareCom"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'https?://(?:www\.)?(speedyshare\.com|speedy\.sh)/\w+'
+
+ __description = """Speedyshare.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
+
+
+ NAME_PATTERN = r'class=downloadfilename>(?P<N>.*)</span></td>'
+ SIZE_PATTERN = r'class=sizetagtext>(?P<S>.*) (?P<U>[kKmM]?[iI]?[bB]?)</div>'
+
+ OFFLINE_PATTERN = r'class=downloadfilenamenotfound>.*</span>'
+
+ LINK_PATTERN = r'<a href=\'(.*)\'><img src=/gf/slowdownload\.png alt=\'Slow Download\' border=0'
+
+
+ def setup(self):
+ self.multiDL = False
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Download link not found"))
+
+ dl_link = urljoin("http://www.speedyshare.com", m.group(1))
+ self.download(dl_link, disposition=True)
+
+ check = self.checkDownload({'html': re.compile("html")})
+ if check == "html":
+ self.error(_("Downloaded file is an html page"))
+
+
+getInfo = create_getInfo(SpeedyshareCom)
diff --git a/pyload/plugin/hoster/StorageTo.py b/pyload/plugin/hoster/StorageTo.py
new file mode 100644
index 000000000..978a7f75d
--- /dev/null
+++ b/pyload/plugin/hoster/StorageTo.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class StorageTo(DeadHoster):
+ __name = "StorageTo"
+ __type = "hoster"
+ __version = "0.01"
+
+ __pattern = r'http://(?:www\.)?storage\.to/get/.+'
+
+ __description = """Storage.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("mkaay", "mkaay@mkaay.de")]
+
+
+getInfo = create_getInfo(StorageTo)
diff --git a/pyload/plugin/hoster/StreamCz.py b/pyload/plugin/hoster/StreamCz.py
new file mode 100644
index 000000000..831072d77
--- /dev/null
+++ b/pyload/plugin/hoster/StreamCz.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Hoster import Hoster
+
+
+def getInfo(urls):
+ result = []
+
+ for url in urls:
+
+ html = getURL(url)
+ if re.search(StreamCz.OFFLINE_PATTERN, html):
+ # File offline
+ result.append((url, 0, 1, url))
+ else:
+ result.append((url, 0, 2, url))
+ yield result
+
+
+class StreamCz(Hoster):
+ __name = "StreamCz"
+ __type = "hoster"
+ __version = "0.20"
+
+ __pattern = r'https?://(?:www\.)?stream\.cz/[^/]+/\d+.*'
+
+ __description = """Stream.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<link rel="video_src" href="http://www\.stream\.cz/\w+/(\d+)-([^"]+)" />'
+ OFFLINE_PATTERN = r'<h1 class="commonTitle">Str.nku nebylo mo.n. nal.zt \(404\)</h1>'
+
+ CDN_PATTERN = r'<param name="flashvars" value="[^"]*&id=(?P<ID>\d+)(?:&cdnLQ=(?P<cdnLQ>\d*))?(?:&cdnHQ=(?P<cdnHQ>\d*))?(?:&cdnHD=(?P<cdnHD>\d*))?&'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def process(self, pyfile):
+ self.html = self.load(pyfile.url, decode=True)
+
+ if re.search(self.OFFLINE_PATTERN, self.html):
+ self.offline()
+
+ m = re.search(self.CDN_PATTERN, self.html)
+ if m is None:
+ self.error(_("CDN_PATTERN not found"))
+ cdn = m.groupdict()
+ self.logDebug(cdn)
+ for cdnkey in ("cdnHD", "cdnHQ", "cdnLQ"):
+ if cdnkey in cdn and cdn[cdnkey] > '':
+ cdnid = cdn[cdnkey]
+ break
+ else:
+ self.fail(_("Stream URL not found"))
+
+ m = re.search(self.NAME_PATTERN, self.html)
+ if m is None:
+ self.error(_("NAME_PATTERN not found"))
+ pyfile.name = "%s-%s.%s.mp4" % (m.group(2), m.group(1), cdnkey[-2:])
+
+ download_url = "http://cdn-dispatcher.stream.cz/?id=" + cdnid
+ self.logInfo(_("STREAM: %s") % cdnkey[-2:], download_url)
+ self.download(download_url)
diff --git a/pyload/plugin/hoster/StreamcloudEu.py b/pyload/plugin/hoster/StreamcloudEu.py
new file mode 100644
index 000000000..63bd24cbf
--- /dev/null
+++ b/pyload/plugin/hoster/StreamcloudEu.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class StreamcloudEu(XFSHoster):
+ __name = "StreamcloudEu"
+ __type = "hoster"
+ __version = "0.09"
+
+ __pattern = r'http://(?:www\.)?streamcloud\.eu/\w{12}'
+
+ __description = """Streamcloud.eu hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("seoester", "seoester@googlemail.com")]
+
+
+ HOSTER_DOMAIN = "streamcloud.eu"
+
+ LINK_PATTERN = r'file: "(http://(stor|cdn)\d+\.streamcloud\.eu:?\d*/.*/video\.(mp4|flv))",'
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+ self.resumeDownload = self.premium
+
+
+getInfo = create_getInfo(StreamcloudEu)
diff --git a/pyload/plugin/hoster/TurbobitNet.py b/pyload/plugin/hoster/TurbobitNet.py
new file mode 100644
index 000000000..2b0fb9ef5
--- /dev/null
+++ b/pyload/plugin/hoster/TurbobitNet.py
@@ -0,0 +1,173 @@
+# -*- coding: utf-8 -*-
+
+import random
+import re
+import time
+
+from Crypto.Cipher import ARC4
+from binascii import hexlify, unhexlify
+from pycurl import HTTPHEADER
+from urllib import quote
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo, timestamp
+
+
+class TurbobitNet(SimpleHoster):
+ __name = "TurbobitNet"
+ __type = "hoster"
+ __version = "0.16"
+
+ __pattern = r'http://(?:www\.)?turbobit\.net/(?:download/free/)?(?P<ID>\w+)'
+
+ __description = """Turbobit.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("prOq", "")]
+
+
+ URL_REPLACEMENTS = [(__pattern + ".*", "http://turbobit.net/\g<ID>.html")]
+
+ COOKIES = [("turbobit.net", "user_lang", "en")]
+
+ NAME_PATTERN = r'id="file-title">(?P<N>.+?)<'
+ SIZE_PATTERN = r'class="file-size">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+ OFFLINE_PATTERN = r'<h2>File Not Found</h2>|html\(\'File (?:was )?not found'
+
+ LINK_PATTERN = r'(?P<url>/download/redirect/[^"\']+)'
+ LIMIT_WAIT_PATTERN = r'<div id=\'timeout\'>(\d+)<'
+
+ CAPTCHA_PATTERN = r'<img alt="Captcha" src="(.+?)"'
+
+
+ def handleFree(self):
+ self.url = "http://turbobit.net/download/free/%s" % self.info['pattern']['ID']
+ self.html = self.load(self.url, ref=True, decode=True)
+
+ rtUpdate = self.getRtUpdate()
+
+ self.solveCaptcha()
+ self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
+ self.url = self.getDownloadUrl(rtUpdate)
+
+ self.wait()
+ self.html = self.load(self.url)
+ self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With:"])
+ self.downloadFile()
+
+
+ def solveCaptcha(self):
+ for _i in xrange(5):
+ m = re.search(self.LIMIT_WAIT_PATTERN, self.html)
+ if m:
+ wait_time = int(m.group(1))
+ self.wait(wait_time, wait_time > 60)
+ self.retry()
+
+ action, inputs = self.parseHtmlForm("action='#'")
+ if not inputs:
+ self.error(_("Captcha form not found"))
+ self.logDebug(inputs)
+
+ if inputs['captcha_type'] == 'recaptcha':
+ recaptcha = ReCaptcha(self)
+ inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge()
+ else:
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m is None:
+ self.error(_("captcha"))
+ captcha_url = m.group(1)
+ inputs['captcha_response'] = self.decryptCaptcha(captcha_url)
+
+ self.logDebug(inputs)
+ self.html = self.load(self.url, post=inputs)
+
+ if '<div class="captcha-error">Incorrect, try again!<' in self.html:
+ self.invalidCaptcha()
+ else:
+ self.correctCaptcha()
+ break
+ else:
+ self.fail(_("Invalid captcha"))
+
+
+ def getRtUpdate(self):
+ rtUpdate = self.getStorage("rtUpdate")
+ if not rtUpdate:
+ if self.getStorage("version") != self.__version \
+ or int(self.getStorage("timestamp", 0)) + 86400000 < timestamp():
+ # that's right, we are even using jdownloader updates
+ rtUpdate = getURL("http://update0.jdownloader.org/pluginstuff/tbupdate.js")
+ rtUpdate = self.decrypt(rtUpdate.splitlines()[1])
+ # but we still need to fix the syntax to work with other engines than rhino
+ rtUpdate = re.sub(r'for each\(var (\w+) in(\[[^\]]+\])\)\{',
+ r'zza=\2;for(var zzi=0;zzi<zza.length;zzi++){\1=zza[zzi];', rtUpdate)
+ rtUpdate = re.sub(r"for\((\w+)=", r"for(var \1=", rtUpdate)
+
+ self.setStorage("rtUpdate", rtUpdate)
+ self.setStorage("timestamp", timestamp())
+ self.setStorage("version", self.__version)
+ else:
+ self.logError(_("Unable to download, wait for update..."))
+ self.tempOffline()
+
+ return rtUpdate
+
+
+ def getDownloadUrl(self, rtUpdate):
+ self.req.http.lastURL = self.url
+
+ m = re.search("(/\w+/timeout\.js\?\w+=)([^\"\'<>]+)", self.html)
+ if m:
+ url = "http://turbobit.net%s%s" % m.groups()
+ else:
+ url = "http://turbobit.net/files/timeout.js?ver=%s" % "".join(random.choice('0123456789ABCDEF') for _i in xrange(32))
+
+ fun = self.load(url)
+
+ self.setWait(65, False)
+
+ for b in [1, 3]:
+ self.jscode = "var id = \'%s\';var b = %d;var inn = \'%s\';%sout" % (
+ self.info['pattern']['ID'], b, quote(fun), rtUpdate)
+
+ try:
+ out = self.js.eval(self.jscode)
+ self.logDebug("URL", self.js.engine, out)
+ if out.startswith('/download/'):
+ return "http://turbobit.net%s" % out.strip()
+ except Exception, e:
+ self.logError(e)
+ else:
+ if self.retries >= 2:
+ # retry with updated js
+ self.delStorage("rtUpdate")
+ self.retry()
+
+
+ def decrypt(self, data):
+ cipher = ARC4.new(hexlify('E\x15\xa1\x9e\xa3M\xa0\xc6\xa0\x84\xb6H\x83\xa8o\xa0'))
+ return unhexlify(cipher.encrypt(unhexlify(data)))
+
+
+ def getLocalTimeString(self):
+ lt = time.localtime()
+ tz = time.altzone if lt.tm_isdst else time.timezone
+ return "%s GMT%+03d%02d" % (time.strftime("%a %b %d %Y %H:%M:%S", lt), -tz // 3600, tz % 3600)
+
+
+ def handlePremium(self):
+ self.logDebug("Premium download as user %s" % self.user)
+ self.downloadFile()
+
+
+ def downloadFile(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Download link not found"))
+ self.url = "http://turbobit.net" + m.group('url')
+ self.download(self.url)
+
+
+getInfo = create_getInfo(TurbobitNet)
diff --git a/pyload/plugin/hoster/TurbouploadCom.py b/pyload/plugin/hoster/TurbouploadCom.py
new file mode 100644
index 000000000..01b5d306f
--- /dev/null
+++ b/pyload/plugin/hoster/TurbouploadCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class TurbouploadCom(DeadHoster):
+ __name = "TurbouploadCom"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'http://(?:www\.)?turboupload\.com/(\w+).*'
+
+ __description = """Turboupload.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(TurbouploadCom)
diff --git a/pyload/plugin/hoster/TusfilesNet.py b/pyload/plugin/hoster/TusfilesNet.py
new file mode 100644
index 000000000..ff1540d9f
--- /dev/null
+++ b/pyload/plugin/hoster/TusfilesNet.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class TusfilesNet(XFSHoster):
+ __name = "TusfilesNet"
+ __type = "hoster"
+ __version = "0.07"
+
+ __pattern = r'https?://(?:www\.)?tusfiles\.net/\w{12}'
+
+ __description = """Tusfiles.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com"),
+ ("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "tusfiles.net"
+
+ INFO_PATTERN = r'\](?P<N>.+) - (?P<S>[\d.,]+) (?P<U>[\w^_]+)\['
+ OFFLINE_PATTERN = r'>File Not Found|<Title>TusFiles - Fast Sharing Files!'
+
+
+ def setup(self):
+ self.multiDL = False
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
+
+ def handlePremium(self):
+ return self.handleFree()
+
+
+getInfo = create_getInfo(TusfilesNet)
diff --git a/pyload/plugin/hoster/TwoSharedCom.py b/pyload/plugin/hoster/TwoSharedCom.py
new file mode 100644
index 000000000..6d16ae1a6
--- /dev/null
+++ b/pyload/plugin/hoster/TwoSharedCom.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class TwoSharedCom(SimpleHoster):
+ __name = "TwoSharedCom"
+ __type = "hoster"
+ __version = "0.12"
+
+ __pattern = r'http://(?:www\.)?2shared\.com/(account/)?(download|get|file|document|photo|video|audio)/.*'
+
+ __description = """2Shared.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<h1>(?P<N>.*)</h1>'
+ SIZE_PATTERN = r'<span class="dtitle">File size:</span>\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+ OFFLINE_PATTERN = r'The file link that you requested is not valid\.|This file was deleted\.'
+
+ LINK_PATTERN = r'window.location =\'(.+?)\';'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("Download link"))
+
+ link = m.group(1)
+ self.download(link)
+
+
+getInfo = create_getInfo(TwoSharedCom)
diff --git a/pyload/plugin/hoster/UlozTo.py b/pyload/plugin/hoster/UlozTo.py
new file mode 100644
index 000000000..1e5317950
--- /dev/null
+++ b/pyload/plugin/hoster/UlozTo.py
@@ -0,0 +1,164 @@
+# -*- coding: utf-8 -*-
+
+import re
+import time
+
+from pyload.utils import json_loads
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+def convertDecimalPrefix(m):
+ # decimal prefixes used in filesize and traffic
+ return ("%%.%df" % {'k': 3, 'M': 6, 'G': 9}[m.group(2)] % float(m.group(1))).replace('.', '')
+
+
+class UlozTo(SimpleHoster):
+ __name = "UlozTo"
+ __type = "hoster"
+ __version = "1.00"
+
+ __pattern = r'http://(?:www\.)?(uloz\.to|ulozto\.(cz|sk|net)|bagruj\.cz|zachowajto\.pl)/(?:live/)?(?P<id>\w+/[^/?]*)'
+
+ __description = """Uloz.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ INFO_PATTERN = r'<p>File <strong>(?P<N>[^<]+)</strong> is password protected</p>'
+ NAME_PATTERN = r'<title>(?P<N>[^<]+) \| Uloz\.to</title>'
+ SIZE_PATTERN = r'<span id="fileSize">.*?(?P<S>[\d.,]+\s[kMG]?B)</span>'
+ OFFLINE_PATTERN = r'<title>404 - Page not found</title>|<h1 class="h1">File (has been deleted|was banned)</h1>'
+
+ SIZE_REPLACEMENTS = [('([\d.]+)\s([kMG])B', convertDecimalPrefix)]
+ URL_REPLACEMENTS = [(r"(?<=http://)([^/]+)", "www.ulozto.net")]
+
+ ADULT_PATTERN = r'<form action="(?P<link>[^\"]*)" method="post" id="frm-askAgeForm">'
+ PASSWD_PATTERN = r'<div class="passwordProtectedFile">'
+ VIPLINK_PATTERN = r'<a href="[^"]*\?disclaimer=1" class="linkVip">'
+ FREE_URL_PATTERN = r'<div class="freeDownloadForm"><form action="([^"]+)"'
+ PREMIUM_URL_PATTERN = r'<div class="downloadForm"><form action="([^"]+)"'
+ TOKEN_PATTERN = r'<input type="hidden" name="_token_" id="[^\"]*" value="(?P<token>.+?)"'
+
+
+ def setup(self):
+ self.multiDL = self.premium
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ pyfile.url = re.sub(r"(?<=http://)([^/]+)", "www.ulozto.net", pyfile.url)
+ self.html = self.load(pyfile.url, decode=True, cookies=True)
+
+ if re.search(self.ADULT_PATTERN, self.html):
+ self.logInfo(_("Adult content confirmation needed"))
+
+ m = re.search(self.TOKEN_PATTERN, self.html)
+ if m is None:
+ self.error(_("TOKEN_PATTERN not found"))
+ token = m.group(1)
+
+ self.html = self.load(pyfile.url, get={"do": "askAgeForm-submit"},
+ post={"agree": "Confirm", "_token_": token}, cookies=True)
+
+ passwords = self.getPassword().splitlines()
+ while self.PASSWD_PATTERN in self.html:
+ if passwords:
+ password = passwords.pop(0)
+ self.logInfo(_("Password protected link, trying ") + password)
+ self.html = self.load(pyfile.url, get={"do": "passwordProtectedForm-submit"},
+ post={"password": password, "password_send": 'Send'}, cookies=True)
+ else:
+ self.fail(_("No or incorrect password"))
+
+ if re.search(self.VIPLINK_PATTERN, self.html):
+ self.html = self.load(pyfile.url, get={"disclaimer": "1"})
+
+ self.getFileInfo()
+
+ if self.premium and self.checkTrafficLeft():
+ self.handlePremium()
+ else:
+ self.handleFree()
+
+ self.doCheckDownload()
+
+
+ def handleFree(self):
+ action, inputs = self.parseHtmlForm('id="frm-downloadDialog-freeDownloadForm"')
+ if not action or not inputs:
+ self.error(_("Free download form not found"))
+
+ self.logDebug("inputs.keys = " + str(inputs.keys()))
+ # get and decrypt captcha
+ if all(key in inputs for key in ("captcha_value", "captcha_id", "captcha_key")):
+ # Old version - last seen 9.12.2013
+ self.logDebug('Using "old" version')
+
+ captcha_value = self.decryptCaptcha("http://img.uloz.to/captcha/%s.png" % inputs['captcha_id'])
+ self.logDebug("CAPTCHA ID: " + inputs['captcha_id'] + ", CAPTCHA VALUE: " + captcha_value)
+
+ inputs.update({'captcha_id': inputs['captcha_id'], 'captcha_key': inputs['captcha_key'], 'captcha_value': captcha_value})
+
+ elif all(key in inputs for key in ("captcha_value", "timestamp", "salt", "hash")):
+ # New version - better to get new parameters (like captcha reload) because of image url - since 6.12.2013
+ self.logDebug('Using "new" version')
+
+ xapca = self.load("http://www.ulozto.net/reloadXapca.php", get={"rnd": str(int(time.time()))})
+ self.logDebug("xapca = " + str(xapca))
+
+ data = json_loads(xapca)
+ captcha_value = self.decryptCaptcha(str(data['image']))
+ self.logDebug("CAPTCHA HASH: " + data['hash'], "CAPTCHA SALT: " + str(data['salt']), "CAPTCHA VALUE: " + captcha_value)
+
+ inputs.update({'timestamp': data['timestamp'], 'salt': data['salt'], 'hash': data['hash'], 'captcha_value': captcha_value})
+ else:
+ self.error(_("CAPTCHA form changed"))
+
+ self.multiDL = True
+ self.download("http://www.ulozto.net" + action, post=inputs, cookies=True, disposition=True)
+
+
+ def handlePremium(self):
+ self.download(self.pyfile.url + "?do=directDownload", disposition=True)
+ #parsed_url = self.findDownloadURL(premium=True)
+ #self.download(parsed_url, post={"download": "Download"})
+
+
+ def findDownloadURL(self, premium=False):
+ msg = _("%s link" % ("Premium" if premium else "Free"))
+ m = re.search(self.PREMIUM_URL_PATTERN if premium else self.FREE_URL_PATTERN, self.html)
+ if m is None:
+ self.error(msg)
+ parsed_url = "http://www.ulozto.net" + m.group(1)
+ self.logDebug("%s: %s" % (msg, parsed_url))
+ return parsed_url
+
+
+ def doCheckDownload(self):
+ check = self.checkDownload({
+ "wrong_captcha": re.compile(r'<ul class="error">\s*<li>Error rewriting the text.</li>'),
+ "offline": re.compile(self.OFFLINE_PATTERN),
+ "passwd": self.PASSWD_PATTERN,
+ "server_error": 'src="http://img.ulozto.cz/error403/vykricnik.jpg"', # paralell dl, server overload etc.
+ "not_found": "<title>UloÅŸ.to</title>"
+ })
+
+ if check == "wrong_captcha":
+ #self.delStorage("captcha_id")
+ #self.delStorage("captcha_text")
+ self.invalidCaptcha()
+ self.retry(reason=_("Wrong captcha code"))
+ elif check == "offline":
+ self.offline()
+ elif check == "passwd":
+ self.fail(_("Wrong password"))
+ elif check == "server_error":
+ self.logError(_("Server error, try downloading later"))
+ self.multiDL = False
+ self.wait(1 * 60 * 60, True)
+ self.retry()
+ elif check == "not_found":
+ self.fail(_("Server error - file not downloadable"))
+
+
+getInfo = create_getInfo(UlozTo)
diff --git a/pyload/plugin/hoster/UloziskoSk.py b/pyload/plugin/hoster/UloziskoSk.py
new file mode 100644
index 000000000..ab16c1da7
--- /dev/null
+++ b/pyload/plugin/hoster/UloziskoSk.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class UloziskoSk(SimpleHoster):
+ __name = "UloziskoSk"
+ __type = "hoster"
+ __version = "0.24"
+
+ __pattern = r'http://(?:www\.)?ulozisko\.sk/.*'
+
+ __description = """Ulozisko.sk hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<div class="down1">(?P<N>[^<]+)</div>'
+ SIZE_PATTERN = ur'Veğkosť súboru: <strong>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</strong><br />'
+ OFFLINE_PATTERN = ur'<span class = "red">ZadanÜ súbor neexistuje z jedného z nasledujúcich dÎvodov:</span>'
+
+ LINK_PATTERN = r'<form name = "formular" action = "([^"]+)" method = "post">'
+ ID_PATTERN = r'<input type = "hidden" name = "id" value = "([^"]+)" />'
+ CAPTCHA_PATTERN = r'<img src="(/obrazky/obrazky\.php\?fid=[^"]+)" alt="" />'
+ IMG_PATTERN = ur'<strong>PRE ZVÄČŠENIE KLIKNITE NA OBRÁZOK</strong><br /><a href = "([^"]+)">'
+
+
+ def process(self, pyfile):
+ self.html = self.load(pyfile.url, decode=True)
+ self.getFileInfo()
+
+ m = re.search(self.IMG_PATTERN, self.html)
+ if m:
+ url = "http://ulozisko.sk" + m.group(1)
+ self.download(url)
+ else:
+ self.handleFree()
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("LINK_PATTERN not found"))
+ parsed_url = 'http://www.ulozisko.sk' + m.group(1)
+
+ m = re.search(self.ID_PATTERN, self.html)
+ if m is None:
+ self.error(_("ID_PATTERN not found"))
+ id = m.group(1)
+
+ self.logDebug("URL:" + parsed_url + ' ID:' + id)
+
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m is None:
+ self.error(_("CAPTCHA_PATTERN not found"))
+ captcha_url = 'http://www.ulozisko.sk' + m.group(1)
+
+ captcha = self.decryptCaptcha(captcha_url, cookies=True)
+
+ self.logDebug("CAPTCHA_URL:" + captcha_url + ' CAPTCHA:' + captcha)
+
+ self.download(parsed_url, post={
+ "antispam": captcha,
+ "id": id,
+ "name": self.pyfile.name,
+ "but": "++++STIAHNI+S%DABOR++++"
+ })
+
+
+getInfo = create_getInfo(UloziskoSk)
diff --git a/pyload/plugin/hoster/UnibytesCom.py b/pyload/plugin/hoster/UnibytesCom.py
new file mode 100644
index 000000000..8d1ca0344
--- /dev/null
+++ b/pyload/plugin/hoster/UnibytesCom.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class UnibytesCom(SimpleHoster):
+ __name = "UnibytesCom"
+ __type = "hoster"
+ __version = "0.11"
+
+ __pattern = r'https?://(?:www\.)?unibytes\.com/[\w .-]{11}B'
+
+ __description = """UniBytes.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ HOSTER_DOMAIN = "unibytes.com"
+
+ INFO_PATTERN = r'<span[^>]*?id="fileName"[^>]*>(?P<N>[^>]+)</span>\s*\((?P<S>\d.*?)\)'
+
+ WAIT_PATTERN = r'Wait for <span id="slowRest">(\d+)</span> sec'
+ LINK_PATTERN = r'<a href="([^"]+)">Download</a>'
+
+
+ def handleFree(self):
+ domain = "http://www.%s/" % self.HOSTER_DOMAIN
+ action, post_data = self.parseHtmlForm('id="startForm"')
+
+ for _i in xrange(8):
+ self.logDebug(action, post_data)
+ self.html = self.load(urljoin(domain, action), post=post_data, follow_location=False)
+
+ m = re.search(r'location:\s*(\S+)', self.req.http.header, re.I)
+ if m:
+ url = m.group(1)
+ break
+
+ if '>Somebody else is already downloading using your IP-address<' in self.html:
+ self.wait(10 * 60, True)
+ self.retry()
+
+ if post_data['step'] == 'last':
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m:
+ url = m.group(1)
+ self.correctCaptcha()
+ break
+ else:
+ self.invalidCaptcha()
+
+ last_step = post_data['step']
+ action, post_data = self.parseHtmlForm('id="stepForm"')
+
+ if last_step == 'timer':
+ m = re.search(self.WAIT_PATTERN, self.html)
+ self.wait(int(m.group(1)) if m else 60, False)
+ elif last_step in ("captcha", "last"):
+ post_data['captcha'] = self.decryptCaptcha(urljoin(domain, "/captcha.jpg"))
+ else:
+ self.fail(_("No valid captcha code entered"))
+
+ self.download(url)
+
+
+getInfo = create_getInfo(UnibytesCom)
diff --git a/pyload/plugin/hoster/UnrestrictLi.py b/pyload/plugin/hoster/UnrestrictLi.py
new file mode 100644
index 000000000..a1d3f1d75
--- /dev/null
+++ b/pyload/plugin/hoster/UnrestrictLi.py
@@ -0,0 +1,91 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from datetime import datetime, timedelta
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+
+
+def secondsToMidnight(gmt=0):
+ now = datetime.utcnow() + timedelta(hours=gmt)
+ if now.hour is 0 and now.minute < 10:
+ midnight = now
+ else:
+ midnight = now + timedelta(days=1)
+ midnight = midnight.replace(hour=0, minute=10, second=0, microsecond=0)
+ return int((midnight - now).total_seconds())
+
+
+class UnrestrictLi(Hoster):
+ __name = "UnrestrictLi"
+ __type = "hoster"
+ __version = "0.12"
+
+ __pattern = r'https?://(?:[^/]*\.)?(unrestrict|unr)\.li'
+
+ __description = """Unrestrict.li hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def setup(self):
+ self.chunkLimit = 16
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ if re.match(self.__pattern, pyfile.url):
+ new_url = pyfile.url
+ elif not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "Unrestrict.li")
+ self.fail(_("No Unrestrict.li account provided"))
+ else:
+ self.logDebug("Old URL: %s" % pyfile.url)
+ for _i in xrange(5):
+ page = self.load('https://unrestrict.li/unrestrict.php',
+ post={'link': pyfile.url, 'domain': 'long'})
+ self.logDebug("JSON data: " + page)
+ if page != '':
+ break
+ else:
+ self.logInfo(_("Unable to get API data, waiting 1 minute and retry"))
+ self.retry(5, 60, "Unable to get API data")
+
+ if 'Expired session' in page or ("You are not allowed to "
+ "download from this host" in page and self.premium):
+ self.account.relogin(self.user)
+ self.retry()
+ elif "File offline" in page:
+ self.offline()
+ elif "You are not allowed to download from this host" in page:
+ self.fail(_("You are not allowed to download from this host"))
+ elif "You have reached your daily limit for this host" in page:
+ self.logWarning(_("Reached daily limit for this host"))
+ self.retry(5, secondsToMidnight(gmt=2), "Daily limit for this host reached")
+ elif "ERROR_HOSTER_TEMPORARILY_UNAVAILABLE" in page:
+ self.logInfo(_("Hoster temporarily unavailable, waiting 1 minute and retry"))
+ self.retry(5, 60, "Hoster is temporarily unavailable")
+ page = json_loads(page)
+ new_url = page.keys()[0]
+ self.api_data = page[new_url]
+
+ if new_url != pyfile.url:
+ self.logDebug("New URL: " + new_url)
+
+ if hasattr(self, 'api_data'):
+ self.setNameSize()
+
+ self.download(new_url, disposition=True)
+
+ if self.getConfig("history"):
+ self.load("https://unrestrict.li/history/", get={'delete': "all"})
+ self.logInfo(_("Download history deleted"))
+
+
+ def setNameSize(self):
+ if 'name' in self.api_data:
+ self.pyfile.name = self.api_data['name']
+ if 'size' in self.api_data:
+ self.pyfile.size = self.api_data['size']
diff --git a/pyload/plugin/hoster/UpleaCom.py b/pyload/plugin/hoster/UpleaCom.py
new file mode 100644
index 000000000..d4931224a
--- /dev/null
+++ b/pyload/plugin/hoster/UpleaCom.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class UpleaCom(XFSHoster):
+ __name = "UpleaCom"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'https?://(?:www\.)?uplea\.com/dl/\w{15}'
+
+ __description = """Uplea.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Redleon", "")]
+
+
+ HOSTER_DOMAIN = "uplea.com"
+
+ NAME_PATTERN = r'class="agmd size18">(?P<N>.+?)<'
+ SIZE_PATTERN = r'size14">(?P<S>[\d.,]+) (?P<U>[\w^_])</span>'
+
+ OFFLINE_PATTERN = r'>You followed an invalid or expired link'
+
+ LINK_PATTERN = r'"(http?://\w+\.uplea\.com/anonym/.*?)"'
+ WAIT_PATTERN = r'timeText:([\d.]+),'
+ STEP_PATTERN = r'<a href="(/step/.+)">'
+
+
+ def setup(self):
+ self.multiDL = False
+ self.chunkLimit = 1
+ self.resumeDownload = True
+
+
+ def handleFree(self):
+ m = re.search(self.STEP_PATTERN, self.html)
+ if m is None:
+ self.error("STEP_PATTERN not found")
+
+ self.html = self.load(urljoin("http://uplea.com/", m.group(1)))
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ self.wait(int(m.group(1)), True)
+ self.retry()
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error("LINK_PATTERN not found")
+
+ self.wait(15)
+ self.download(m.group(1), disposition=True)
+
+
+getInfo = create_getInfo(UpleaCom)
diff --git a/pyload/plugin/hoster/UploadStationCom.py b/pyload/plugin/hoster/UploadStationCom.py
new file mode 100644
index 000000000..7d3d806e2
--- /dev/null
+++ b/pyload/plugin/hoster/UploadStationCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class UploadStationCom(DeadHoster):
+ __name = "UploadStationCom"
+ __type = "hoster"
+ __version = "0.52"
+
+ __pattern = r'http://(?:www\.)?uploadstation\.com/file/(?P<id>\w+)'
+
+ __description = """UploadStation.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(UploadStationCom)
diff --git a/pyload/plugin/hoster/UploadableCh.py b/pyload/plugin/hoster/UploadableCh.py
new file mode 100644
index 000000000..5cb443d99
--- /dev/null
+++ b/pyload/plugin/hoster/UploadableCh.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import sleep
+
+from pyload.plugin.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class UploadableCh(SimpleHoster):
+ __name = "UploadableCh"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?uploadable\.ch/file/(?P<ID>\w+)'
+
+ __description = """Uploadable.ch hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ FILE_INFO_PATTERN = r'div id=\"file_name\" title=.*>(?P<N>.+)<span class=\"filename_normal\">\((?P<S>[\d.]+) (?P<U>\w+)\)</span><'
+
+ OFFLINE_PATTERN = r'>(File not available|This file is no longer available)'
+ TEMP_OFFLINE_PATTERN = r'<div class="icon_err">'
+
+ WAIT_PATTERN = r'data-time="(\d+)" data-format'
+
+ FILE_URL_REPLACEMENTS = [(__pattern + ".*", r'http://www.uploadable.ch/file/\g<ID>')]
+
+
+ def setup(self):
+ self.multiDL = False
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ # Click the "free user" button and wait
+ a = self.load(self.pyfile.url, cookies=True, post={'downloadLink': "wait"}, decode=True)
+ self.logDebug(a)
+
+ m = re.search(self.WAIT_PATTERN, a)
+ if m is not None:
+ self.wait(int(m.group(1))) #: Expected output: {"waitTime":30}
+ else:
+ self.error("WAIT_PATTERN")
+
+ # Make the recaptcha appear and show it the pyload interface
+ b = self.load(self.pyfile.url, cookies=True, post={'checkDownload': "check"}, decode=True)
+ self.logDebug(b) #: Expected output: {"success":"showCaptcha"}
+
+ recaptcha = ReCaptcha(self)
+
+ challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
+
+ # Submit the captcha solution
+ self.load("http://www.uploadable.ch/checkReCaptcha.php",
+ cookies=True,
+ post={'recaptcha_challenge_field' : challenge,
+ 'recaptcha_response_field' : response,
+ 'recaptcha_shortencode_field': self.info['ID']},
+ decode=True)
+
+ self.wait(3)
+
+ # Get ready for downloading
+ self.load(self.pyfile.url, cookies=True, post={'downloadLink': "show"}, decode=True)
+
+ self.wait(3)
+
+ # Download the file
+ self.download(self.pyfile.url, cookies=True, post={'download': "normal"}, disposition=True)
+
+
+ def checkFile(self):
+ check = self.checkDownload({'wait_or_reconnect': re.compile("Please wait for"),
+ 'is_html' : re.compile("<head>")})
+
+ if check == "wait_or_reconnect":
+ self.logInfo("Downloadlimit reached, please wait or reconnect")
+ self.wait(60 * 60, True)
+ self.retry()
+
+ elif check == "is_html":
+ self.error("Downloaded file is an html file")
+
+
+getInfo = create_getInfo(UploadableCh)
diff --git a/pyload/plugin/hoster/UploadboxCom.py b/pyload/plugin/hoster/UploadboxCom.py
new file mode 100644
index 000000000..dcb966e8b
--- /dev/null
+++ b/pyload/plugin/hoster/UploadboxCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class UploadboxCom(DeadHoster):
+ __name = "Uploadbox"
+ __type = "hoster"
+ __version = "0.05"
+
+ __pattern = r'http://(?:www\.)?uploadbox\.com/files/.+'
+
+ __description = """UploadBox.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(UploadboxCom)
diff --git a/pyload/plugin/hoster/UploadedTo.py b/pyload/plugin/hoster/UploadedTo.py
new file mode 100644
index 000000000..16420d361
--- /dev/null
+++ b/pyload/plugin/hoster/UploadedTo.py
@@ -0,0 +1,245 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://ul.to/044yug9o
+# http://ul.to/gzfhd0xs
+
+import re
+
+from time import sleep
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.Plugin import chunks
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.utils import html_unescape, parseFileSize
+
+
+key = "bGhGMkllZXByd2VEZnU5Y2NXbHhYVlZ5cEE1bkEzRUw=".decode('base64')
+
+
+def getID(url):
+ """ returns id from file url"""
+ m = re.match(UploadedTo.__pattern, url)
+ return m.group('ID')
+
+
+def getAPIData(urls):
+ post = {"apikey": key}
+
+ idMap = {}
+
+ for i, url in enumerate(urls):
+ id = getID(url)
+ post['id_%s' % i] = id
+ idMap[id] = url
+
+ for _i in xrange(5):
+ api = unicode(getURL("http://uploaded.net/api/filemultiple", post=post, decode=False), 'iso-8859-1')
+ if api != "can't find request":
+ break
+ else:
+ sleep(3)
+
+ result = {}
+
+ if api:
+ for line in api.splitlines():
+ data = line.split(",", 4)
+ if data[1] in idMap:
+ result[data[1]] = (data[0], data[2], data[4], data[3], idMap[data[1]])
+
+ return result
+
+
+def parseFileInfo(self, url='', html=''):
+ if not html and hasattr(self, "html"):
+ html = self.html
+
+ name = url
+ size = 0
+ fileid = None
+
+ if re.search(self.OFFLINE_PATTERN, html):
+ # File offline
+ status = 1
+ else:
+ m = re.search(self.INFO_PATTERN, html)
+ if m:
+ name, fileid = html_unescape(m.group('N')), m.group('ID')
+ size = parseFileSize(m.group('S'))
+ status = 2
+ else:
+ status = 3
+
+ return name, size, status, fileid
+
+
+def getInfo(urls):
+ for chunk in chunks(urls, 80):
+ result = []
+
+ api = getAPIData(chunk)
+
+ for data in api.itervalues():
+ if data[0] == "online":
+ result.append((html_unescape(data[2]), data[1], 2, data[4]))
+
+ elif data[0] == "offline":
+ result.append((data[4], 0, 1, data[4]))
+
+ yield result
+
+
+class UploadedTo(Hoster):
+ __name = "UploadedTo"
+ __type = "hoster"
+ __version = "0.75"
+
+ __pattern = r'https?://(?:www\.)?(uploaded\.(to|net)|ul\.to)(/file/|/?\?id=|.*?&id=|/)(?P<ID>\w+)'
+
+ __description = """Uploaded.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org"),
+ ("mkaay", "mkaay@mkaay.de"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("netpok", "netpok@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ INFO_PATTERN = r'<a href="file/(?P<ID>\w+)" id="filename">(?P<N>[^<]+)</a> &nbsp;\s*<small[^>]*>(?P<S>[^<]+)</small>'
+ OFFLINE_PATTERN = r'<small class="cL">Error: 404</small>'
+ DL_LIMIT_PATTERN = r'You have reached the max. number of possible free downloads for this hour'
+
+
+ def setup(self):
+ self.multiDL = self.resumeDownload = self.premium
+ self.chunkLimit = 1 # critical problems with more chunks
+
+ self.fileID = getID(self.pyfile.url)
+ self.pyfile.url = "http://uploaded.net/file/%s" % self.fileID
+
+
+ def process(self, pyfile):
+ self.load("http://uploaded.net/language/en", just_header=True)
+
+ api = getAPIData([pyfile.url])
+
+ # TODO: fallback to parse from site, because api sometimes delivers wrong status codes
+
+ if not api:
+ self.logWarning(_("No response for API call"))
+
+ self.html = unicode(self.load(pyfile.url, decode=False), 'iso-8859-1')
+ name, size, status, self.fileID = parseFileInfo(self)
+ self.logDebug(name, size, status, self.fileID)
+ if status == 1:
+ self.offline()
+ elif status == 2:
+ pyfile.name, pyfile.size = name, size
+ else:
+ self.error(_("file info"))
+
+ elif api == 'Access denied':
+ self.fail(_("API key invalid"))
+
+ else:
+ if self.fileID not in api:
+ self.offline()
+
+ self.data = api[self.fileID]
+ if self.data[0] != "online":
+ self.offline()
+
+ pyfile.name = html_unescape(self.data[2])
+
+ # pyfile.name = self.get_file_name()
+
+ if self.premium:
+ self.handlePremium()
+ else:
+ self.handleFree()
+
+
+ def handlePremium(self):
+ info = self.account.getAccountInfo(self.user, True)
+ self.logDebug("%(name)s: Use Premium Account (%(left)sGB left)" % {"name": self.__name,
+ "left": info['trafficleft'] / 1024 / 1024})
+ if int(self.data[1]) / 1024 > info['trafficleft']:
+ self.logInfo(_("Not enough traffic left"))
+ self.account.empty(self.user)
+ self.resetAccount()
+ self.fail(_("Traffic exceeded"))
+
+ header = self.load("http://uploaded.net/file/%s" % self.fileID, just_header=True)
+ if 'location' in header:
+ #Direct download
+ self.logDebug("Direct download link detected")
+ self.download(header['location'])
+ else:
+ #Indirect download
+ self.html = self.load("http://uploaded.net/file/%s" % self.fileID)
+ m = re.search(r'<div class="tfree".*\s*<form method="post" action="(.*?)"', self.html)
+ if m is None:
+ self.fail(_("Download URL not m. Try to enable direct downloads"))
+ url = m.group(1)
+ self.download(url, post={})
+
+
+ def handleFree(self):
+ self.html = self.load(self.pyfile.url, decode=True)
+
+ if 'var free_enabled = false;' in self.html:
+ self.logError(_("Free-download capacities exhausted"))
+ self.retry(24, 5 * 60)
+
+ m = re.search(r"Current waiting period: <span>(\d+)</span> seconds", self.html)
+ if m is None:
+ self.fail(_("File not downloadable for free users"))
+ self.setWait(int(m.group(1)))
+
+ self.html = self.load("http://uploaded.net/js/download.js", decode=True)
+
+ url = "http://uploaded.net/io/ticket/captcha/%s" % self.fileID
+ downloadURL = ""
+
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge()
+ options = {"recaptcha_challenge_field": challenge, "recaptcha_response_field": response}
+ self.wait()
+
+ result = self.load(url, post=options)
+ self.logDebug("Result: %s" % result)
+
+ if "limit-size" in result:
+ self.fail(_("File too big for free download"))
+ elif "limit-slot" in result: # Temporary restriction so just wait a bit
+ self.setWait(30 * 60, True)
+ self.wait()
+ self.retry()
+ elif "limit-parallel" in result:
+ self.fail(_("Cannot download in parallel"))
+ elif "limit-dl" in result or self.DL_LIMIT_PATTERN in result: # limit-dl
+ self.setWait(3 * 60 * 60, True)
+ self.wait()
+ self.retry()
+ elif '"err":"captcha"' in result:
+ self.invalidCaptcha()
+ elif "type:'download'" in result:
+ self.correctCaptcha()
+ downloadURL = re.search("url:'([^']+)", result).group(1)
+ break
+ else:
+ self.error(_("Unknown error: %s") % result)
+
+ if not downloadURL:
+ self.fail(_("No Download url retrieved/all captcha attempts failed"))
+
+ self.download(downloadURL, disposition=True)
+ check = self.checkDownload({"limit-dl": self.DL_LIMIT_PATTERN})
+ if check == "limit-dl":
+ self.setWait(3 * 60 * 60, True)
+ self.wait()
+ self.retry()
diff --git a/pyload/plugin/hoster/UploadhereCom.py b/pyload/plugin/hoster/UploadhereCom.py
new file mode 100644
index 000000000..dd168bc3b
--- /dev/null
+++ b/pyload/plugin/hoster/UploadhereCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class UploadhereCom(DeadHoster):
+ __name = "UploadhereCom"
+ __type = "hoster"
+ __version = "0.12"
+
+ __pattern = r'http://(?:www\.)?uploadhere\.com/\w{10}'
+
+ __description = """Uploadhere.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(UploadhereCom)
diff --git a/pyload/plugin/hoster/UploadheroCom.py b/pyload/plugin/hoster/UploadheroCom.py
new file mode 100644
index 000000000..36b8a8b7d
--- /dev/null
+++ b/pyload/plugin/hoster/UploadheroCom.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://uploadhero.co/dl/wQBRAVSM
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class UploadheroCom(SimpleHoster):
+ __name = "UploadheroCom"
+ __type = "hoster"
+ __version = "0.16"
+
+ __pattern = r'http://(?:www\.)?uploadhero\.com?/dl/\w+'
+
+ __description = """UploadHero.co plugin"""
+ __license = "GPLv3"
+ __authors = [("mcmyst", "mcmyst@hotmail.fr"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'<div class="nom_de_fichier">(?P<N>.*?)</div>'
+ SIZE_PATTERN = r'Taille du fichier : </span><strong>(?P<S>.*?)</strong>'
+ OFFLINE_PATTERN = r'<p class="titre_dl_2">|<div class="raison"><strong>Le lien du fichier ci-dessus n\'existe plus.'
+
+ COOKIES = [("uploadhero.co", "lang", "en")]
+
+ IP_BLOCKED_PATTERN = r'href="(/lightbox_block_download\.php\?min=.*?)"'
+ IP_WAIT_PATTERN = r'<span id="minutes">(\d+)</span>.*\s*<span id="seconds">(\d+)</span>'
+
+ CAPTCHA_PATTERN = r'"(/captchadl\.php\?\w+)"'
+ FREE_URL_PATTERN = r'var magicomfg = \'<a href="(http://[^<>"]*?)"|"(http://storage\d+\.uploadhero\.co/\?d=\w+/[^<>"/]+)"'
+ PREMIUM_URL_PATTERN = r'<a href="([^"]+)" id="downloadnow"'
+
+
+ def handleFree(self):
+ self.checkErrors()
+
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m is None:
+ self.error(_("CAPTCHA_PATTERN not found"))
+ captcha_url = "http://uploadhero.co" + m.group(1)
+
+ for _i in xrange(5):
+ captcha = self.decryptCaptcha(captcha_url)
+ self.html = self.load(self.pyfile.url, get={"code": captcha})
+ m = re.search(self.FREE_URL_PATTERN, self.html)
+ if m:
+ self.correctCaptcha()
+ download_url = m.group(1) or m.group(2)
+ break
+ else:
+ self.invalidCaptcha()
+ else:
+ self.fail(_("No valid captcha code entered"))
+
+ self.download(download_url)
+
+
+ def handlePremium(self):
+ self.logDebug("%s: Use Premium Account" % self.__name)
+ link = re.search(self.PREMIUM_URL_PATTERN, self.html).group(1)
+ self.download(link)
+
+
+ def checkErrors(self):
+ m = re.search(self.IP_BLOCKED_PATTERN, self.html)
+ if m:
+ self.html = self.load("http://uploadhero.co%s" % m.group(1))
+
+ m = re.search(self.IP_WAIT_PATTERN, self.html)
+ wait_time = (int(m.group(1)) * 60 + int(m.group(2))) if m else 5 * 60
+ self.wait(wait_time, True)
+ self.retry()
+
+ self.info.pop('error', None)
+
+
+getInfo = create_getInfo(UploadheroCom)
diff --git a/pyload/plugin/hoster/UploadingCom.py b/pyload/plugin/hoster/UploadingCom.py
new file mode 100644
index 000000000..3a92d5f20
--- /dev/null
+++ b/pyload/plugin/hoster/UploadingCom.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pycurl import HTTPHEADER
+
+from pyload.utils import json_loads
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo, timestamp
+
+
+class UploadingCom(SimpleHoster):
+ __name = "UploadingCom"
+ __type = "hoster"
+ __version = "0.39"
+
+ __pattern = r'http://(?:www\.)?uploading\.com/files/(?:get/)?(?P<ID>\w+)'
+
+ __description = """Uploading.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("mkaay", "mkaay@mkaay.de"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'id="file_title">(?P<N>.+)</'
+ SIZE_PATTERN = r'size tip_container">(?P<S>[\d.,]+) (?P<U>[\w^_]+)<'
+ OFFLINE_PATTERN = r'(Page|file) not found'
+
+ COOKIES = [("uploading.com", "lang", "1"),
+ (".uploading.com", "language", "1"),
+ (".uploading.com", "setlang", "en"),
+ (".uploading.com", "_lang", "en")]
+
+
+ def process(self, pyfile):
+ if not "/get/" in pyfile.url:
+ pyfile.url = pyfile.url.replace("/files", "/files/get")
+
+ self.html = self.load(pyfile.url, decode=True)
+ self.getFileInfo()
+
+ if self.premium:
+ self.handlePremium()
+ else:
+ self.handleFree()
+
+
+ def handlePremium(self):
+ postData = {'action': 'get_link',
+ 'code': self.info['pattern']['ID'],
+ 'pass': 'undefined'}
+
+ self.html = self.load('http://uploading.com/files/get/?JsHttpRequest=%d-xml' % timestamp(), post=postData)
+ url = re.search(r'"link"\s*:\s*"(.*?)"', self.html)
+ if url:
+ url = url.group(1).replace("\\/", "/")
+ self.download(url)
+
+ raise Exception("Plugin defect")
+
+
+ def handleFree(self):
+ m = re.search('<h2>((Daily )?Download Limit)</h2>', self.html)
+ if m:
+ self.pyfile.error = m.group(1)
+ self.logWarning(self.pyfile.error)
+ self.retry(6, (6 * 60 if m.group(2) else 15) * 60, self.pyfile.error)
+
+ ajax_url = "http://uploading.com/files/get/?ajax"
+ self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
+ self.req.http.lastURL = self.pyfile.url
+
+ res = json_loads(self.load(ajax_url, post={'action': 'second_page', 'code': self.info['pattern']['ID']}))
+
+ if 'answer' in res and 'wait_time' in res['answer']:
+ wait_time = int(res['answer']['wait_time'])
+ self.logInfo(_("Waiting %d seconds") % wait_time)
+ self.wait(wait_time)
+ else:
+ self.error(_("No AJAX/WAIT"))
+
+ res = json_loads(self.load(ajax_url, post={'action': 'get_link', 'code': self.info['pattern']['ID'], 'pass': 'false'}))
+
+ if 'answer' in res and 'link' in res['answer']:
+ url = res['answer']['link']
+ else:
+ self.error(_("No AJAX/URL"))
+
+ self.html = self.load(url)
+ m = re.search(r'<form id="file_form" action="(.*?)"', self.html)
+ if m:
+ url = m.group(1)
+ else:
+ self.error(_("No URL"))
+
+ self.download(url)
+
+ check = self.checkDownload({"html": re.compile("\A<!DOCTYPE html PUBLIC")})
+ if check == "html":
+ self.logWarning(_("Redirected to a HTML page, wait 10 minutes and retry"))
+ self.wait(10 * 60, True)
+
+
+getInfo = create_getInfo(UploadingCom)
diff --git a/pyload/plugin/hoster/UploadkingCom.py b/pyload/plugin/hoster/UploadkingCom.py
new file mode 100644
index 000000000..429245c30
--- /dev/null
+++ b/pyload/plugin/hoster/UploadkingCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class UploadkingCom(DeadHoster):
+ __name = "UploadkingCom"
+ __type = "hoster"
+ __version = "0.14"
+
+ __pattern = r'http://(?:www\.)?uploadking\.com/\w{10}'
+
+ __description = """UploadKing.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+getInfo = create_getInfo(UploadkingCom)
diff --git a/pyload/plugin/hoster/UpstoreNet.py b/pyload/plugin/hoster/UpstoreNet.py
new file mode 100644
index 000000000..e0153c742
--- /dev/null
+++ b/pyload/plugin/hoster/UpstoreNet.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class UpstoreNet(SimpleHoster):
+ __name = "UpstoreNet"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'https?://(?:www\.)?upstore\.net/'
+
+ __description = """Upstore.Net File Download Hoster"""
+ __license = "GPLv3"
+ __authors = [("igel", "igelkun@myopera.com")]
+
+
+ INFO_PATTERN = r'<div class="comment">.*?</div>\s*\n<h2 style="margin:0">(?P<N>.*?)</h2>\s*\n<div class="comment">\s*\n\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+ OFFLINE_PATTERN = r'<span class="error">File not found</span>'
+
+ WAIT_PATTERN = r'var sec = (\d+)'
+ CHASH_PATTERN = r'<input type="hidden" name="hash" value="([^"]*)">'
+ LINK_PATTERN = r'<a href="(https?://.*?)" target="_blank"><b>'
+
+
+ def handleFree(self):
+ # STAGE 1: get link to continue
+ m = re.search(self.CHASH_PATTERN, self.html)
+ if m is None:
+ self.error(_("CHASH_PATTERN not found"))
+ chash = m.group(1)
+ self.logDebug("Read hash " + chash)
+ # continue to stage2
+ post_data = {'hash': chash, 'free': 'Slow download'}
+ self.html = self.load(self.pyfile.url, post=post_data, decode=True)
+
+ # STAGE 2: solv captcha and wait
+ # first get the infos we need: recaptcha key and wait time
+ recaptcha = ReCaptcha(self)
+
+ # try the captcha 5 times
+ for i in xrange(5):
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m is None:
+ self.error(_("Wait pattern not found"))
+ wait_time = int(m.group(1))
+
+ # then, do the waiting
+ self.wait(wait_time)
+
+ # then, handle the captcha
+ challenge, response = recaptcha.challenge()
+ post_data.update({'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response})
+
+ self.html = self.load(self.pyfile.url, post=post_data, decode=True)
+
+ # STAGE 3: get direct link
+ m = re.search(self.LINK_PATTERN, self.html, re.S)
+ if m:
+ break
+
+ if m is None:
+ self.error(_("Download link not found"))
+
+ direct = m.group(1)
+ self.download(direct, disposition=True)
+
+
+getInfo = create_getInfo(UpstoreNet)
diff --git a/pyload/plugin/hoster/UptoboxCom.py b/pyload/plugin/hoster/UptoboxCom.py
new file mode 100644
index 000000000..6c126d627
--- /dev/null
+++ b/pyload/plugin/hoster/UptoboxCom.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class UptoboxCom(XFSHoster):
+ __name = "UptoboxCom"
+ __type = "hoster"
+ __version = "0.16"
+
+ __pattern = r'https?://(?:www\.)?uptobox\.com/\w{12}'
+
+ __description = """Uptobox.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = "uptobox.com"
+
+ INFO_PATTERN = r'"para_title">(?P<N>.+) \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)'
+ OFFLINE_PATTERN = r'>(File not found|Access Denied|404 Not Found)'
+
+ LINK_PATTERN = r'"(https?://\w+\.uptobox\.com/d/.*?)"'
+
+ ERROR_PATTERN = r'>(You have to wait.+till next download.)<' #@TODO: Check XFSHoster ERROR_PATTERN
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+ self.resumeDownload = True
+
+
+getInfo = create_getInfo(UptoboxCom)
diff --git a/pyload/plugin/hoster/VeehdCom.py b/pyload/plugin/hoster/VeehdCom.py
new file mode 100644
index 000000000..30a89f0a8
--- /dev/null
+++ b/pyload/plugin/hoster/VeehdCom.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+
+
+class VeehdCom(Hoster):
+ __name = "VeehdCom"
+ __type = "hoster"
+ __version = "0.23"
+
+ __pattern = r'http://veehd\.com/video/\d+_\S+'
+ __config = [("filename_spaces", "bool", "Allow spaces in filename", False),
+ ("replacement_char", "str", "Filename replacement character", "_")]
+
+ __description = """Veehd.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("cat", "cat@pyload")]
+
+
+ def setup(self):
+ self.multiDL = True
+ self.req.canContinue = True
+
+
+ def process(self, pyfile):
+ self.download_html()
+ if not self.file_exists():
+ self.offline()
+
+ pyfile.name = self.get_file_name()
+ self.download(self.get_file_url())
+
+
+ def download_html(self):
+ url = self.pyfile.url
+ self.logDebug("Requesting page: %s" % url)
+ self.html = self.load(url)
+
+
+ def file_exists(self):
+ if not self.html:
+ self.download_html()
+
+ if '<title>Veehd</title>' in self.html:
+ return False
+ return True
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+
+ m = re.search(r'<title[^>]*>([^<]+) on Veehd</title>', self.html)
+ if m is None:
+ self.error(_("Video title not found"))
+
+ name = m.group(1)
+
+ # replace unwanted characters in filename
+ if self.getConfig('filename_spaces'):
+ pattern = '[^\w ]+'
+ else:
+ pattern = '[^\w.]+'
+
+ return re.sub(pattern, self.getConfig('replacement_char'), name) + '.avi'
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ if not self.html:
+ self.download_html()
+
+ m = re.search(r'<embed type="video/divx" src="(http://([^/]*\.)?veehd\.com/dl/[^"]+)"',
+ self.html)
+ if m is None:
+ self.error(_("Embedded video url not found"))
+
+ return m.group(1)
diff --git a/pyload/plugin/hoster/VeohCom.py b/pyload/plugin/hoster/VeohCom.py
new file mode 100644
index 000000000..744497064
--- /dev/null
+++ b/pyload/plugin/hoster/VeohCom.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class VeohCom(SimpleHoster):
+ __name = "VeohCom"
+ __type = "hoster"
+ __version = "0.21"
+
+ __pattern = r'http://(?:www\.)?veoh\.com/(tv/)?(watch|videos)/(?P<ID>v\w+)'
+ __config = [("quality", "Low;High;Auto", "Quality", "Auto")]
+
+ __description = """Veoh.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'<meta name="title" content="(?P<N>.*?)"'
+ OFFLINE_PATTERN = r'>Sorry, we couldn\'t find the video you were looking for'
+
+ URL_REPLACEMENTS = [(__pattern + ".*", r'http://www.veoh.com/watch/\g<ID>')]
+
+ COOKIES = [("veoh.com", "lassieLocale", "en")]
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = -1
+
+
+ def handleFree(self):
+ quality = self.getConfig("quality")
+ if quality == "Auto":
+ quality = ("High", "Low")
+ for q in quality:
+ pattern = r'"fullPreviewHash%sPath":"(.+?)"' % q
+ m = re.search(pattern, self.html)
+ if m:
+ self.pyfile.name += ".mp4"
+ link = m.group(1).replace("\\", "")
+ self.download(link)
+ return
+ else:
+ self.logInfo(_("No %s quality video found") % q.upper())
+ else:
+ self.fail(_("No video found!"))
+
+
+getInfo = create_getInfo(VeohCom)
diff --git a/pyload/plugin/hoster/VidPlayNet.py b/pyload/plugin/hoster/VidPlayNet.py
new file mode 100644
index 000000000..8849bafcf
--- /dev/null
+++ b/pyload/plugin/hoster/VidPlayNet.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# BigBuckBunny_320x180.mp4 - 61.7 Mb - http://vidplay.net/38lkev0h3jv0
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class VidPlayNet(XFSHoster):
+ __name = "VidPlayNet"
+ __type = "hoster"
+ __version = "0.04"
+
+ __pattern = r'https?://(?:www\.)?vidplay\.net/\w{12}'
+
+ __description = """VidPlay.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
+
+
+ HOSTER_DOMAIN = "vidplay.net"
+
+ NAME_PATTERN = r'<b>Password:</b></div>\s*<h[1-6]>(?P<N>[^<]+)</h[1-6]>'
+
+
+getInfo = create_getInfo(VidPlayNet)
diff --git a/pyload/plugin/hoster/VimeoCom.py b/pyload/plugin/hoster/VimeoCom.py
new file mode 100644
index 000000000..63a0ec882
--- /dev/null
+++ b/pyload/plugin/hoster/VimeoCom.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class VimeoCom(SimpleHoster):
+ __name = "VimeoCom"
+ __type = "hoster"
+ __version = "0.03"
+
+ __pattern = r'https?://(?:www\.)?(player\.)?vimeo\.com/(video/)?(?P<ID>\d+)'
+ __config = [("quality", "Lowest;Mobile;SD;HD;Highest", "Quality", "Highest"),
+ ("original", "bool", "Try to download the original file first", True)]
+
+ __description = """Vimeo.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'<title>(?P<N>.+) on Vimeo<'
+ OFFLINE_PATTERN = r'class="exception_header"'
+ TEMP_OFFLINE_PATTERN = r'Please try again in a few minutes.<'
+
+ URL_REPLACEMENTS = [(__pattern + ".*", r'https://www.vimeo.com/\g<ID>')]
+
+ COOKIES = [("vimeo.com", "language", "en")]
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = -1
+
+
+ def handleFree(self):
+ password = self.getPassword()
+
+ if self.js and 'class="btn iconify_down_b"' in self.html:
+ html = self.js.eval(self.load(self.pyfile.url, get={'action': "download", 'password': password}, decode=True))
+ pattern = r'href="(?P<URL>http://vimeo\.com.+?)".*?\>(?P<QL>.+?) '
+ else:
+ id = re.match(self.__pattern, self.pyfile.url).group("ID")
+ html = self.load("https://player.vimeo.com/video/" + id, get={'password': password})
+ pattern = r'"(?P<QL>\w+)":{"profile".*?"(?P<URL>http://pdl\.vimeocdn\.com.+?)"'
+
+ link = dict((l.group('QL').lower(), l.group('URL')) for l in re.finditer(pattern, html))
+
+ if self.getConfig("original"):
+ if "original" in link:
+ self.download(link[q])
+ return
+ else:
+ self.logInfo(_("Original file not downloadable"))
+
+ quality = self.getConfig("quality")
+ if quality == "Highest":
+ qlevel = ("hd", "sd", "mobile")
+ elif quality == "Lowest":
+ qlevel = ("mobile", "sd", "hd")
+ else:
+ qlevel = quality.lower()
+
+ for q in qlevel:
+ if q in link:
+ self.download(link[q])
+ return
+ else:
+ self.logInfo(_("No %s quality video found") % q.upper())
+ else:
+ self.fail(_("No video found!"))
+
+
+getInfo = create_getInfo(VimeoCom)
diff --git a/pyload/plugin/hoster/Vipleech4uCom.py b/pyload/plugin/hoster/Vipleech4uCom.py
new file mode 100644
index 000000000..8936bee98
--- /dev/null
+++ b/pyload/plugin/hoster/Vipleech4uCom.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class Vipleech4uCom(DeadHoster):
+ __name = "Vipleech4uCom"
+ __type = "hoster"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?vipleech4u\.com/manager\.php'
+
+ __description = """Vipleech4u.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Kagenoshin", "kagenoshin@gmx.ch")]
+
+
+getInfo = create_getInfo(Vipleech4uCom)
diff --git a/pyload/plugin/hoster/WarserverCz.py b/pyload/plugin/hoster/WarserverCz.py
new file mode 100644
index 000000000..3f935341c
--- /dev/null
+++ b/pyload/plugin/hoster/WarserverCz.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class WarserverCz(DeadHoster):
+ __name = "WarserverCz"
+ __type = "hoster"
+ __version = "0.13"
+
+ __pattern = r'http://(?:www\.)?warserver\.cz/stahnout/\d+'
+
+ __description = """Warserver.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+getInfo = create_getInfo(WarserverCz)
diff --git a/pyload/plugin/hoster/WebshareCz.py b/pyload/plugin/hoster/WebshareCz.py
new file mode 100644
index 000000000..4f5b78d68
--- /dev/null
+++ b/pyload/plugin/hoster/WebshareCz.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.internal.SimpleHoster import SimpleHoster
+
+
+def getInfo(urls):
+ for url in urls:
+ fid = re.search(WebshareCz.__pattern, url).group('ID')
+ api_data = getURL("https://webshare.cz/api/file_info/", post={'ident': fid})
+
+ if 'File not found' in api_data:
+ file_info = (url, 0, 1, url)
+ else:
+ name = re.search('<name>(.+)</name>', api_data).group(1)
+ size = re.search('<size>(.+)</size>', api_data).group(1)
+ file_info = (name, size, 2, url)
+
+ yield file_info
+
+
+class WebshareCz(SimpleHoster):
+ __name = "WebshareCz"
+ __type = "hoster"
+ __version = "0.14"
+
+ __pattern = r'https?://(?:www\.)?webshare\.cz/(?:#/)?file/(?P<ID>\w+)'
+
+ __description = """WebShare.cz hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ def handleFree(self):
+ api_data = self.load('https://webshare.cz/api/file_link/', post={'ident': self.fid})
+
+ self.logDebug("API data: " + api_data)
+
+ m = re.search('<link>(.+)</link>', api_data)
+ if m is None:
+ self.error(_("Unable to detect direct link"))
+
+ self.download(m.group(1), disposition=True)
+
+
+ def getFileInfo(self):
+ self.logDebug("URL: %s" % self.pyfile.url)
+
+ self.fid = re.match(self.__pattern, self.pyfile.url).group('ID')
+
+ self.load(self.pyfile.url)
+ api_data = self.load('https://webshare.cz/api/file_info/', post={'ident': self.fid})
+
+ if 'File not found' in api_data:
+ self.offline()
+ else:
+ self.pyfile.name = re.search('<name>(.+)</name>', api_data).group(1)
+ self.pyfile.size = re.search('<size>(.+)</size>', api_data).group(1)
+
+ self.logDebug("FILE NAME: %s FILE SIZE: %s" % (self.pyfile.name, self.pyfile.size))
diff --git a/pyload/plugin/hoster/WrzucTo.py b/pyload/plugin/hoster/WrzucTo.py
new file mode 100644
index 000000000..ad0a88d02
--- /dev/null
+++ b/pyload/plugin/hoster/WrzucTo.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pycurl import HTTPHEADER
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class WrzucTo(SimpleHoster):
+ __name = "WrzucTo"
+ __type = "hoster"
+ __version = "0.02"
+
+ __pattern = r'http://(?:www\.)?wrzuc\.to/(\w+(\.wt|\.html)|(\w+/?linki/\w+))'
+
+ __description = """Wrzuc.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'id="file_info">\s*<strong>(?P<N>.*?)</strong>'
+ SIZE_PATTERN = r'class="info">\s*<tr>\s*<td>(?P<S>.*?)</td>'
+
+ COOKIES = [("wrzuc.to", "language", "en")]
+
+
+ def setup(self):
+ self.multiDL = True
+
+
+ def handleFree(self):
+ data = dict(re.findall(r'(md5|file): "(.*?)"', self.html))
+ if len(data) != 2:
+ self.error(_("No file ID"))
+
+ self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
+ self.req.http.lastURL = self.pyfile.url
+ self.load("http://www.wrzuc.to/ajax/server/prepair", post={"md5": data['md5']})
+
+ self.req.http.lastURL = self.pyfile.url
+ self.html = self.load("http://www.wrzuc.to/ajax/server/download_link", post={"file": data['file']})
+
+ data.update(re.findall(r'"(download_link|server_id)":"(.*?)"', self.html))
+ if len(data) != 4:
+ self.error(_("No download URL"))
+
+ download_url = "http://%s.wrzuc.to/pobierz/%s" % (data['server_id'], data['download_link'])
+ self.download(download_url)
+
+
+getInfo = create_getInfo(WrzucTo)
diff --git a/pyload/plugin/hoster/WuploadCom.py b/pyload/plugin/hoster/WuploadCom.py
new file mode 100644
index 000000000..6eb3594f5
--- /dev/null
+++ b/pyload/plugin/hoster/WuploadCom.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class WuploadCom(DeadHoster):
+ __name = "WuploadCom"
+ __type = "hoster"
+ __version = "0.23"
+
+ __pattern = r'http://(?:www\.)?wupload\..*?/file/((\w+/)?\d+)(/.*)?'
+
+ __description = """Wupload.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("Paul King", "")]
+
+
+getInfo = create_getInfo(WuploadCom)
diff --git a/pyload/plugin/hoster/X7To.py b/pyload/plugin/hoster/X7To.py
new file mode 100644
index 000000000..d8a3b30d9
--- /dev/null
+++ b/pyload/plugin/hoster/X7To.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class X7To(DeadHoster):
+ __name = "X7To"
+ __type = "hoster"
+ __version = "0.41"
+
+ __pattern = r'http://(?:www\.)?x7\.to/'
+
+ __description = """X7.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("ernieb", "ernieb")]
+
+
+getInfo = create_getInfo(X7To)
diff --git a/pyload/plugin/hoster/XFileSharingPro.py b/pyload/plugin/hoster/XFileSharingPro.py
new file mode 100644
index 000000000..9da17fd0d
--- /dev/null
+++ b/pyload/plugin/hoster/XFileSharingPro.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class XFileSharingPro(XFSHoster):
+ __name = "XFileSharingPro"
+ __type = "hoster"
+ __version = "0.43"
+
+ __pattern = r'^unmatchable$'
+
+ __description = """XFileSharingPro dummy hoster plugin for hook"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ URL_REPLACEMENTS = [("/embed-", "/")]
+
+
+ def _log(self, type, args):
+ msg = " | ".join([str(a).strip() for a in args if a])
+ logger = getattr(self.log, type)
+ logger("%s: %s: %s" % (self.__name, self.HOSTER_NAME, msg or _("%s MARK" % type.upper())))
+
+
+ def init(self):
+ super(XFileSharingPro, self).init()
+
+ self.__pattern = self.core.pluginManager.hosterPlugins[self.__name]['pattern']
+
+ self.HOSTER_DOMAIN = re.match(self.__pattern, self.pyfile.url).group(1).lower()
+ self.HOSTER_NAME = "".join([str.capitalize() for str in self.HOSTER_DOMAIN.split('.')])
+
+ account = self.core.accountManager.getAccountPlugin(self.HOSTER_NAME)
+
+ if account and account.canUse():
+ self.account = account
+ elif self.account:
+ self.account.HOSTER_DOMAIN = self.HOSTER_DOMAIN
+ else:
+ return
+
+ self.user, data = self.account.selectAccount()
+ self.req = self.account.getAccountRequest(self.user)
+ self.premium = self.account.isPremium(self.user)
+
+
+ def setup(self):
+ self.chunkLimit = 1
+ self.resumeDownload = self.premium
+ self.multiDL = True
+
+
+getInfo = create_getInfo(XFileSharingPro)
diff --git a/pyload/plugin/hoster/XHamsterCom.py b/pyload/plugin/hoster/XHamsterCom.py
new file mode 100644
index 000000000..e1b0d8c32
--- /dev/null
+++ b/pyload/plugin/hoster/XHamsterCom.py
@@ -0,0 +1,129 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+
+from pyload.utils import json_loads
+from pyload.plugin.Hoster import Hoster
+
+
+def clean_json(json_expr):
+ json_expr = re.sub('[\n\r]', '', json_expr)
+ json_expr = re.sub(' +', '', json_expr)
+ json_expr = re.sub('\'', '"', json_expr)
+
+ return json_expr
+
+
+class XHamsterCom(Hoster):
+ __name = "XHamsterCom"
+ __type = "hoster"
+ __version = "0.12"
+
+ __pattern = r'http://(?:www\.)?xhamster\.com/movies/.+'
+ __config = [("type", ".mp4;.flv", "Preferred type", ".mp4")]
+
+ __description = """XHamster.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = []
+
+
+ def process(self, pyfile):
+ self.pyfile = pyfile
+
+ if not self.file_exists():
+ self.offline()
+
+ if self.getConfig("type"):
+ self.desired_fmt = self.getConfig("type")
+
+ pyfile.name = self.get_file_name() + self.desired_fmt
+ self.download(self.get_file_url())
+
+
+ def download_html(self):
+ url = self.pyfile.url
+ self.html = self.load(url)
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ if not self.html:
+ self.download_html()
+
+ flashvar_pattern = re.compile('flashvars = ({.*?});', re.S)
+ json_flashvar = flashvar_pattern.search(self.html)
+
+ if not json_flashvar:
+ self.error(_("flashvar not found"))
+
+ j = clean_json(json_flashvar.group(1))
+ flashvars = json_loads(j)
+
+ if flashvars['srv']:
+ srv_url = flashvars['srv'] + '/'
+ else:
+ self.error(_("srv_url not found"))
+
+ if flashvars['url_mode']:
+ url_mode = flashvars['url_mode']
+
+
+ else:
+ self.error(_("url_mode not found"))
+
+ if self.desired_fmt == ".mp4":
+ file_url = re.search(r"<a href=\"" + srv_url + "(.+?)\"", self.html)
+ if file_url is None:
+ self.error(_("file_url not found"))
+ file_url = file_url.group(1)
+ long_url = srv_url + file_url
+ self.logDebug("long_url = " + long_url)
+ else:
+ if flashvars['file']:
+ file_url = unquote(flashvars['file'])
+ else:
+ self.error(_("file_url not found"))
+
+ if url_mode == '3':
+ long_url = file_url
+ self.logDebug("long_url = " + long_url)
+ else:
+ long_url = srv_url + "key=" + file_url
+ self.logDebug("long_url = " + long_url)
+
+ return long_url
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+
+ pattern = r'<title>(.*?) - xHamster\.com</title>'
+ name = re.search(pattern, self.html)
+ if name is None:
+ pattern = r'<h1 >(.*)</h1>'
+ name = re.search(pattern, self.html)
+ if name is None:
+ pattern = r'http://[www.]+xhamster\.com/movies/.*/(.*?)\.html?'
+ name = re.match(file_name_pattern, self.pyfile.url)
+ if name is None:
+ pattern = r'<div id="element_str_id" style="display:none;">(.*)</div>'
+ name = re.search(pattern, self.html)
+ if name is None:
+ return "Unknown"
+
+ return name.group(1)
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ if not self.html:
+ self.download_html()
+ if re.search(r"(.*Video not found.*)", self.html) is not None:
+ return False
+ else:
+ return True
diff --git a/pyload/plugin/hoster/XVideosCom.py b/pyload/plugin/hoster/XVideosCom.py
new file mode 100644
index 000000000..b54407b6f
--- /dev/null
+++ b/pyload/plugin/hoster/XVideosCom.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+
+from pyload.plugin.Hoster import Hoster
+
+
+class XVideosCom(Hoster):
+ __name = "XVideos.com"
+ __type = "hoster"
+ __version = "0.10"
+
+ __pattern = r'http://(?:www\.)?xvideos\.com/video(\d+)/.*'
+
+ __description = """XVideos.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = []
+
+
+ def process(self, pyfile):
+ site = self.load(pyfile.url)
+ pyfile.name = "%s (%s).flv" % (
+ re.search(r"<h2>([^<]+)<span", site).group(1),
+ re.match(self.__pattern, pyfile.url).group(1),
+ )
+ self.download(unquote(re.search(r"flv_url=([^&]+)&", site).group(1)))
diff --git a/pyload/plugin/hoster/Xdcc.py b/pyload/plugin/hoster/Xdcc.py
new file mode 100644
index 000000000..1ce8ee677
--- /dev/null
+++ b/pyload/plugin/hoster/Xdcc.py
@@ -0,0 +1,207 @@
+# -*- coding: utf-8 -*-
+
+import re
+import socket
+import struct
+import sys
+import time
+
+from os import makedirs
+from os.path import exists, join
+from select import select
+
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import safe_join
+
+
+class Xdcc(Hoster):
+ __name = "Xdcc"
+ __type = "hoster"
+ __version = "0.32"
+
+ __config = [("nick", "str", "Nickname", "pyload"),
+ ("ident", "str", "Ident", "pyloadident"),
+ ("realname", "str", "Realname", "pyloadreal")]
+
+ __description = """Download from IRC XDCC bot"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.com")]
+
+
+ def setup(self):
+ self.debug = 0 # 0,1,2
+ self.timeout = 30
+ self.multiDL = False
+
+
+ def process(self, pyfile):
+ # change request type
+ self.req = pyfile.m.core.requestFactory.getRequest(self.__name, type="XDCC")
+
+ self.pyfile = pyfile
+ for _i in xrange(0, 3):
+ try:
+ nmn = self.doDownload(pyfile.url)
+ self.logDebug("Download of %s finished." % nmn)
+ return
+ except socket.error, e:
+ if hasattr(e, "errno"):
+ errno = e.errno
+ else:
+ errno = e.args[0]
+
+ if errno == 10054:
+ self.logDebug("Server blocked our ip, retry in 5 min")
+ self.setWait(300)
+ self.wait()
+ continue
+
+ self.fail(_("Failed due to socket errors. Code: %d") % errno)
+
+ self.fail(_("Server blocked our ip, retry again later manually"))
+
+
+ def doDownload(self, url):
+ self.pyfile.setStatus("waiting") # real link
+
+ m = re.match(r'xdcc://(.*?)/#?(.*?)/(.*?)/#?(\d+)/?', url)
+ server = m.group(1)
+ chan = m.group(2)
+ bot = m.group(3)
+ pack = m.group(4)
+ nick = self.getConfig('nick')
+ ident = self.getConfig('ident')
+ real = self.getConfig('realname')
+
+ temp = server.split(':')
+ ln = len(temp)
+ if ln == 2:
+ host, port = temp
+ elif ln == 1:
+ host, port = temp[0], 6667
+ else:
+ self.fail(_("Invalid hostname for IRC Server: %s") % server)
+
+ #######################
+ # CONNECT TO IRC AND IDLE FOR REAL LINK
+ dl_time = time.time()
+
+ sock = socket.socket()
+ sock.connect((host, int(port)))
+ if nick == "pyload":
+ nick = "pyload-%d" % (time.time() % 1000) # last 3 digits
+ sock.send("NICK %s\r\n" % nick)
+ sock.send("USER %s %s bla :%s\r\n" % (ident, host, real))
+ time.sleep(3)
+ sock.send("JOIN #%s\r\n" % chan)
+ sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
+
+ # IRC recv loop
+ readbuffer = ""
+ done = False
+ retry = None
+ m = None
+ while True:
+
+ # done is set if we got our real link
+ if done:
+ break
+
+ if retry:
+ if time.time() > retry:
+ retry = None
+ dl_time = time.time()
+ sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
+
+ else:
+ if (dl_time + self.timeout) < time.time(): # todo: add in config
+ sock.send("QUIT :byebye\r\n")
+ sock.close()
+ self.fail(_("XDCC Bot did not answer"))
+
+ fdset = select([sock], [], [], 0)
+ if sock not in fdset[0]:
+ continue
+
+ readbuffer += sock.recv(1024)
+ temp = readbuffer.split("\n")
+ readbuffer = temp.pop()
+
+ for line in temp:
+ if self.debug is 2:
+ print "*> " + unicode(line, errors='ignore')
+ line = line.rstrip()
+ first = line.split()
+
+ if first[0] == "PING":
+ sock.send("PONG %s\r\n" % first[1])
+
+ if first[0] == "ERROR":
+ self.fail(_("IRC-Error: %s") % line)
+
+ msg = line.split(None, 3)
+ if len(msg) != 4:
+ continue
+
+ msg = {
+ "origin": msg[0][1:],
+ "action": msg[1],
+ "target": msg[2],
+ "text": msg[3][1:]
+ }
+
+ if nick == msg['target'][0:len(nick)] and "PRIVMSG" == msg['action']:
+ if msg['text'] == "\x01VERSION\x01":
+ self.logDebug("Sending CTCP VERSION")
+ sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
+ elif msg['text'] == "\x01TIME\x01":
+ self.logDebug("Sending CTCP TIME")
+ sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
+ elif msg['text'] == "\x01LAG\x01":
+ pass # don't know how to answer
+
+ if not (bot == msg['origin'][0:len(bot)]
+ and nick == msg['target'][0:len(nick)]
+ and msg['action'] in ("PRIVMSG", "NOTICE")):
+ continue
+
+ if self.debug is 1:
+ print "%s: %s" % (msg['origin'], msg['text'])
+
+ if "You already requested that pack" in msg['text']:
+ retry = time.time() + 300
+
+ if "you must be on a known channel to request a pack" in msg['text']:
+ self.fail(_("Wrong channel"))
+
+ m = re.match('\x01DCC SEND (.*?) (\d+) (\d+)(?: (\d+))?\x01', msg['text'])
+ if m:
+ done = True
+
+ # get connection data
+ ip = socket.inet_ntoa(struct.pack('L', socket.ntohl(int(m.group(2)))))
+ port = int(m.group(3))
+ packname = m.group(1)
+
+ if len(m.groups()) > 3:
+ self.req.filesize = int(m.group(4))
+
+ self.pyfile.name = packname
+
+ download_folder = self.config['general']['download_folder']
+ filename = safe_join(download_folder, packname)
+
+ self.logInfo(_("Downloading %s from %s:%d") % (packname, ip, port))
+
+ self.pyfile.setStatus("downloading")
+ newname = self.req.download(ip, port, filename, sock, self.pyfile.setProgress)
+ if newname and newname != filename:
+ self.logInfo(_("%(name)s saved as %(newname)s") % {"name": self.pyfile.name, "newname": newname})
+ filename = newname
+
+ # kill IRC socket
+ # sock.send("QUIT :byebye\r\n")
+ sock.close()
+
+ self.lastDownload = filename
+ return self.lastDownload
diff --git a/pyload/plugin/hoster/YibaishiwuCom.py b/pyload/plugin/hoster/YibaishiwuCom.py
new file mode 100644
index 000000000..563047e57
--- /dev/null
+++ b/pyload/plugin/hoster/YibaishiwuCom.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.utils import json_loads
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class YibaishiwuCom(SimpleHoster):
+ __name = "YibaishiwuCom"
+ __type = "hoster"
+ __version = "0.13"
+
+ __pattern = r'http://(?:www\.)?(?:u\.)?115\.com/file/(?P<ID>\w+)'
+
+ __description = """115.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ NAME_PATTERN = r'file_name: \'(?P<N>.+?)\''
+ SIZE_PATTERN = r'file_size: \'(?P<S>.+?)\''
+ OFFLINE_PATTERN = ur'<h3><i style="color:red;">哎呀提取码䞍存圚䞍劚搜搜看吧</i></h3>'
+
+ LINK_PATTERN = r'(/\?ct=(pickcode|download)[^"\']+)'
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("LINK_PATTERN not found"))
+ url = m.group(1)
+ self.logDebug(('FREEUSER' if m.group(2) == 'download' else 'GUEST') + ' URL', url)
+
+ res = json_loads(self.load("http://115.com" + url, decode=False))
+ if "urls" in res:
+ mirrors = res['urls']
+ elif "data" in res:
+ mirrors = res['data']
+ else:
+ mirrors = None
+
+ for mr in mirrors:
+ try:
+ url = mr['url'].replace("\\", "")
+ self.logDebug("Trying URL: " + url)
+ self.download(url)
+ break
+ except Exception:
+ continue
+ else:
+ self.fail(_("No working link found"))
+
+
+getInfo = create_getInfo(YibaishiwuCom)
diff --git a/pyload/plugin/hoster/YoupornCom.py b/pyload/plugin/hoster/YoupornCom.py
new file mode 100644
index 000000000..8f77a15dd
--- /dev/null
+++ b/pyload/plugin/hoster/YoupornCom.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Hoster import Hoster
+
+
+class YoupornCom(Hoster):
+ __name = "YoupornCom"
+ __type = "hoster"
+ __version = "0.20"
+
+ __pattern = r'http://(?:www\.)?youporn\.com/watch/.+'
+
+ __description = """Youporn.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("willnix", "willnix@pyload.org")]
+
+
+ def process(self, pyfile):
+ self.pyfile = pyfile
+
+ if not self.file_exists():
+ self.offline()
+
+ pyfile.name = self.get_file_name()
+ self.download(self.get_file_url())
+
+
+ def download_html(self):
+ url = self.pyfile.url
+ self.html = self.load(url, post={"user_choice": "Enter"}, cookies=False)
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ if not self.html:
+ self.download_html()
+
+ return re.search(r'(http://download\.youporn\.com/download/\d+\?save=1)">', self.html).group(1)
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+
+ file_name_pattern = r'<title>(.+) - '
+ return re.search(file_name_pattern, self.html).group(1).replace("&amp;", "&").replace("/", "") + '.flv'
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ if not self.html:
+ self.download_html()
+ if re.search(r"(.*invalid video_id.*)", self.html) is not None:
+ return False
+ else:
+ return True
diff --git a/pyload/plugin/hoster/YourfilesTo.py b/pyload/plugin/hoster/YourfilesTo.py
new file mode 100644
index 000000000..a58df5b72
--- /dev/null
+++ b/pyload/plugin/hoster/YourfilesTo.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+
+from pyload.plugin.Hoster import Hoster
+
+
+class YourfilesTo(Hoster):
+ __name = "YourfilesTo"
+ __type = "hoster"
+ __version = "0.21"
+
+ __pattern = r'(http://)?(?:www\.)?yourfiles\.(to|biz)/\?d=\w+'
+
+ __description = """Youfiles.to hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("jeix", "jeix@hasnomail.de"),
+ ("skydancer", "skydancer@hasnomail.de")]
+
+
+ def process(self, pyfile):
+ self.pyfile = pyfile
+ self.prepare()
+ self.download(self.get_file_url())
+
+
+ def prepare(self):
+ if not self.file_exists():
+ self.offline()
+
+ self.pyfile.name = self.get_file_name()
+
+ wait_time = self.get_waiting_time()
+ self.setWait(wait_time)
+ self.wait()
+
+
+ def get_waiting_time(self):
+ if not self.html:
+ self.download_html()
+
+ #var zzipitime = 15;
+ m = re.search(r'var zzipitime = (\d+);', self.html)
+ if m:
+ sec = int(m.group(1))
+ else:
+ sec = 0
+
+ return sec
+
+
+ def download_html(self):
+ url = self.pyfile.url
+ self.html = self.load(url)
+
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ url = re.search(r"var bla = '(.*?)';", self.html)
+ if url:
+ url = url.group(1)
+ url = unquote(url.replace("http://http:/http://", "http://").replace("dumdidum", ""))
+ return url
+ else:
+ self.error(_("Absolute filepath not found"))
+
+
+ def get_file_name(self):
+ if not self.html:
+ self.download_html()
+
+ return re.search("<title>(.*)</title>", self.html).group(1)
+
+
+ def file_exists(self):
+ """ returns True or False
+ """
+ if not self.html:
+ self.download_html()
+
+ if re.search(r"HTTP Status 404", self.html) is not None:
+ return False
+ else:
+ return True
diff --git a/pyload/plugin/hoster/YoutubeCom.py b/pyload/plugin/hoster/YoutubeCom.py
new file mode 100644
index 000000000..b0e4c82f5
--- /dev/null
+++ b/pyload/plugin/hoster/YoutubeCom.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+
+import os
+import re
+import subprocess
+
+from urllib import unquote
+
+from pyload.plugin.Hoster import Hoster
+from pyload.plugin.internal.SimpleHoster import replace_patterns
+from pyload.utils import html_unescape
+
+
+def which(program):
+ """Works exactly like the unix command which
+
+ Courtesy of http://stackoverflow.com/a/377028/675646"""
+
+ def is_exe(fpath):
+ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+ fpath, fname = os.path.split(program)
+
+ if fpath:
+ if is_exe(program):
+ return program
+ else:
+ for path in os.environ['PATH'].split(os.pathsep):
+ path = path.strip('"')
+ exe_file = os.path.join(path, program)
+ if is_exe(exe_file):
+ return exe_file
+
+ return None
+
+
+class YoutubeCom(Hoster):
+ __name = "YoutubeCom"
+ __type = "hoster"
+ __version = "0.40"
+
+ __pattern = r'https?://(?:[^/]*\.)?(?:youtube\.com|youtu\.be)/watch.*?[?&]v=.*'
+ __config = [("quality", "sd;hd;fullhd;240p;360p;480p;720p;1080p;3072p", "Quality Setting", "hd"),
+ ("fmt", "int", "FMT/ITAG Number (5-102, 0 for auto)", 0),
+ (".mp4", "bool", "Allow .mp4", True),
+ (".flv", "bool", "Allow .flv", True),
+ (".webm", "bool", "Allow .webm", False),
+ (".3gp", "bool", "Allow .3gp", False),
+ ("3d", "bool", "Prefer 3D", False)]
+
+ __description = """Youtube.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("spoob", "spoob@pyload.org"),
+ ("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ URL_REPLACEMENTS = [(r'youtu\.be/', 'youtube.com/')]
+
+ # Invalid characters that must be removed from the file name
+ invalidChars = u'\u2605:?><"|\\'
+
+ # name, width, height, quality ranking, 3D
+ formats = {5 : (".flv" , 400 , 240 , 1 , False),
+ 6 : (".flv" , 640 , 400 , 4 , False),
+ 17 : (".3gp" , 176 , 144 , 0 , False),
+ 18 : (".mp4" , 480 , 360 , 2 , False),
+ 22 : (".mp4" , 1280, 720 , 8 , False),
+ 43 : (".webm", 640 , 360 , 3 , False),
+ 34 : (".flv" , 640 , 360 , 4 , False),
+ 35 : (".flv" , 854 , 480 , 6 , False),
+ 36 : (".3gp" , 400 , 240 , 1 , False),
+ 37 : (".mp4" , 1920, 1080, 9 , False),
+ 38 : (".mp4" , 4096, 3072, 10, False),
+ 44 : (".webm", 854 , 480 , 5 , False),
+ 45 : (".webm", 1280, 720 , 7 , False),
+ 46 : (".webm", 1920, 1080, 9 , False),
+ 82 : (".mp4" , 640 , 360 , 3 , True ),
+ 83 : (".mp4" , 400 , 240 , 1 , True ),
+ 84 : (".mp4" , 1280, 720 , 8 , True ),
+ 85 : (".mp4" , 1920, 1080, 9 , True ),
+ 100: (".webm", 640 , 360 , 3 , True ),
+ 101: (".webm", 640 , 360 , 4 , True ),
+ 102: (".webm", 1280, 720 , 8 , True )}
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def process(self, pyfile):
+ pyfile.url = replace_patterns(pyfile.url, self.URL_REPLACEMENTS)
+ html = self.load(pyfile.url, decode=True)
+
+ if re.search(r'<div id="player-unavailable" class="\s*player-width player-height\s*">', html):
+ self.offline()
+
+ if "We have been receiving a large volume of requests from your network." in html:
+ self.tempOffline()
+
+ #get config
+ use3d = self.getConfig("3d")
+ if use3d:
+ quality = {"sd": 82, "hd": 84, "fullhd": 85, "240p": 83, "360p": 82,
+ "480p": 82, "720p": 84, "1080p": 85, "3072p": 85}
+ else:
+ quality = {"sd": 18, "hd": 22, "fullhd": 37, "240p": 5, "360p": 18,
+ "480p": 35, "720p": 22, "1080p": 37, "3072p": 38}
+ desired_fmt = self.getConfig("fmt")
+ if desired_fmt and desired_fmt not in self.formats:
+ self.logWarning(_("FMT %d unknown, using default") % desired_fmt)
+ desired_fmt = 0
+ if not desired_fmt:
+ desired_fmt = quality.get(self.getConfig("quality"), 18)
+
+ #parse available streams
+ streams = re.search(r'"url_encoded_fmt_stream_map": "(.*?)",', html).group(1)
+ streams = [x.split('\u0026') for x in streams.split(',')]
+ streams = [dict((y.split('=', 1)) for y in x) for x in streams]
+ streams = [(int(x['itag']), unquote(x['url'])) for x in streams]
+ #self.logDebug("Found links: %s" % streams)
+ self.logDebug("AVAILABLE STREAMS: %s" % [x[0] for x in streams])
+
+ #build dictionary of supported itags (3D/2D)
+ allowed = lambda x: self.getConfig(self.formats[x][0])
+ streams = [x for x in streams if x[0] in self.formats and allowed(x[0])]
+ if not streams:
+ self.fail(_("No available stream meets your preferences"))
+ fmt_dict = dict([x for x in streams if self.formats[x[0]][4] == use3d] or streams)
+
+ self.logDebug("DESIRED STREAM: ITAG:%d (%s) %sfound, %sallowed" %
+ (desired_fmt, "%s %dx%d Q:%d 3D:%s" % self.formats[desired_fmt],
+ "" if desired_fmt in fmt_dict else "NOT ", "" if allowed(desired_fmt) else "NOT "))
+
+ #return fmt nearest to quality index
+ if desired_fmt in fmt_dict and allowed(desired_fmt):
+ fmt = desired_fmt
+ else:
+ sel = lambda x: self.formats[x][3] # select quality index
+ comp = lambda x, y: abs(sel(x) - sel(y))
+
+ self.logDebug("Choosing nearest fmt: %s" % [(x, allowed(x), comp(x, desired_fmt)) for x in fmt_dict.keys()])
+ fmt = reduce(lambda x, y: x if comp(x, desired_fmt) <= comp(y, desired_fmt) and
+ sel(x) > sel(y) else y, fmt_dict.keys())
+
+ self.logDebug("Chosen fmt: %s" % fmt)
+ url = fmt_dict[fmt]
+ self.logDebug("URL: %s" % url)
+
+ #set file name
+ file_suffix = self.formats[fmt][0] if fmt in self.formats else ".flv"
+ file_name_pattern = '<meta name="title" content="(.+?)">'
+ name = re.search(file_name_pattern, html).group(1).replace("/", "")
+
+ # Cleaning invalid characters from the file name
+ name = name.encode('ascii', 'replace')
+ for c in self.invalidChars:
+ name = name.replace(c, '_')
+
+ pyfile.name = html_unescape(name)
+
+ time = re.search(r"t=((\d+)m)?(\d+)s", pyfile.url)
+ ffmpeg = which("ffmpeg")
+ if ffmpeg and time:
+ m, s = time.groups()[1:]
+ if m is None:
+ m = "0"
+
+ pyfile.name += " (starting at %s:%s)" % (m, s)
+ pyfile.name += file_suffix
+
+ filename = self.download(url)
+
+ if ffmpeg and time:
+ inputfile = filename + "_"
+ os.rename(filename, inputfile)
+
+ subprocess.call([
+ ffmpeg,
+ "-ss", "00:%s:%s" % (m, s),
+ "-i", inputfile,
+ "-vcodec", "copy",
+ "-acodec", "copy",
+ filename])
+ os.remove(inputfile)
diff --git a/pyload/plugin/hoster/ZDF.py b/pyload/plugin/hoster/ZDF.py
new file mode 100644
index 000000000..c02eadc23
--- /dev/null
+++ b/pyload/plugin/hoster/ZDF.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from xml.etree.ElementTree import fromstring
+
+from pyload.plugin.Hoster import Hoster
+
+
+# Based on zdfm by Roland Beermann (http://github.com/enkore/zdfm/)
+class ZDF(Hoster):
+ __name = "ZDF Mediathek"
+ __type = "hoster"
+ __version = "0.80"
+
+ __pattern = r'http://(?:www\.)?zdf\.de/ZDFmediathek/\D*(\d+)\D*'
+
+ __description = """ZDF.de hoster plugin"""
+ __license = "GPLv3"
+ __authors = []
+
+ XML_API = "http://www.zdf.de/ZDFmediathek/xmlservice/web/beitragsDetails?id=%i"
+
+
+ @staticmethod
+ def video_key(video):
+ return (
+ int(video.findtext("videoBitrate", "0")),
+ any(f.text == "progressive" for f in video.iter("facet")),
+ )
+
+
+ @staticmethod
+ def video_valid(video):
+ return video.findtext("url").startswith("http") and video.findtext("url").endswith(".mp4") and \
+ video.findtext("facets/facet").startswith("progressive")
+
+
+ @staticmethod
+ def get_id(url):
+ return int(re.search(r"\D*(\d{4,})\D*", url).group(1))
+
+
+ def process(self, pyfile):
+ xml = fromstring(self.load(self.XML_API % self.get_id(pyfile.url)))
+
+ status = xml.findtext("./status/statuscode")
+ if status != "ok":
+ self.fail(_("Error retrieving manifest"))
+
+ video = xml.find("video")
+ title = video.findtext("information/title")
+
+ pyfile.name = title
+
+ target_url = sorted((v for v in video.iter("formitaet") if self.video_valid(v)),
+ key=self.video_key)[-1].findtext("url")
+
+ self.download(target_url)
diff --git a/pyload/plugin/hoster/ZShareNet.py b/pyload/plugin/hoster/ZShareNet.py
new file mode 100644
index 000000000..1ab0cc996
--- /dev/null
+++ b/pyload/plugin/hoster/ZShareNet.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.DeadHoster import DeadHoster, create_getInfo
+
+
+class ZShareNet(DeadHoster):
+ __name = "ZShareNet"
+ __type = "hoster"
+ __version = "0.21"
+
+ __pattern = r'https?://(?:ww[2w]\.)?zshares?\.net/.+'
+
+ __description = """ZShare.net hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("espes", ""),
+ ("Cptn Sandwich", "")]
+
+
+getInfo = create_getInfo(ZShareNet)
diff --git a/pyload/plugin/hoster/ZeveraCom.py b/pyload/plugin/hoster/ZeveraCom.py
new file mode 100644
index 000000000..3eeccc52c
--- /dev/null
+++ b/pyload/plugin/hoster/ZeveraCom.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Hoster import Hoster
+
+
+class ZeveraCom(Hoster):
+ __name = "ZeveraCom"
+ __type = "hoster"
+ __version = "0.21"
+
+ __pattern = r'http://(?:www\.)?zevera\.com/.*'
+
+ __description = """Zevera.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = 1
+
+
+ def process(self, pyfile):
+ if not self.account:
+ self.logError(_("Please enter your %s account or deactivate this plugin") % "zevera.com")
+ self.fail(_("No zevera.com account provided"))
+
+ self.logDebug("Old URL: %s" % pyfile.url)
+
+ if self.account.getAPIData(self.req, cmd="checklink", olink=pyfile.url) != "Alive":
+ self.fail(_("Offline or not downloadable - contact Zevera support"))
+
+ header = self.account.getAPIData(self.req, just_header=True, cmd="generatedownloaddirect", olink=pyfile.url)
+ if not "location" in header:
+ self.fail(_("Unable to initialize download"))
+
+ self.download(header['location'], disposition=True)
+
+ check = self.checkDownload({"error": 'action="ErrorDownload.aspx'})
+ if check == "error":
+ self.fail(_("Error response received - contact Zevera support"))
diff --git a/pyload/plugin/hoster/ZippyshareCom.py b/pyload/plugin/hoster/ZippyshareCom.py
new file mode 100644
index 000000000..1bf9338c3
--- /dev/null
+++ b/pyload/plugin/hoster/ZippyshareCom.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from os.path import join
+from urlparse import urljoin
+
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class ZippyshareCom(SimpleHoster):
+ __name = "ZippyshareCom"
+ __type = "hoster"
+ __version = "0.62"
+
+ __pattern = r'(?P<HOST>http://www\d{0,2}\.zippyshare\.com)/v(?:/|iew\.jsp.*key=)(?P<KEY>\d+)'
+
+ __description = """Zippyshare.com hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ NAME_PATTERN = r'("\d{6,}/"[ ]*\+.+?"/|<title>Zippyshare.com - )(?P<N>.+?)("|</title>)'
+ SIZE_PATTERN = r'>Size:.+?">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
+
+ OFFLINE_PATTERN = r'>File does not exist on this server<'
+
+ COOKIES = [("zippyshare.com", "ziplocale", "en")]
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
+
+ def handleFree(self):
+ url = self.get_link()
+ self.download(url)
+
+
+ def get_checksum(self):
+ try:
+ m = re.search(r'\+[ ]*\((\d+)[ ]*\%[ ]*(\d+)[ ]*\+[ ]*(\d+)[ ]*\%[ ]*(\d+)\)[ ]*\+', self.html)
+ if m:
+ a1, a2, c1, c2 = map(int, m.groups())
+ else:
+ a1, a2 = map(int, re.search(r'\(\'downloadB\'\).omg = (\d+)%(\d+)', self.html).groups())
+ c1, c2 = map(int, re.search(r'\(\'downloadB\'\).omg\) \* \((\d+)%(\d+)', self.html).groups())
+
+ b = (a1 % a2) * (c1 % c2)
+ except Exception:
+ self.error(_("Unable to calculate checksum"))
+ else:
+ return b + 18
+
+
+ def get_link(self):
+ checksum = self.get_checksum()
+ p_url = join("d", self.info['pattern']['KEY'], str(checksum), self.pyfile.name)
+ dl_link = urljoin(self.info['pattern']['HOST'], p_url)
+ return dl_link
+
+
+getInfo = create_getInfo(ZippyshareCom)
diff --git a/pyload/plugins/hoster/__init__.py b/pyload/plugin/hoster/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/hoster/__init__.py
+++ b/pyload/plugin/hoster/__init__.py
diff --git a/pyload/plugins/internal/AbstractExtractor.py b/pyload/plugin/internal/AbstractExtractor.py
index b4fd10895..b4fd10895 100644
--- a/pyload/plugins/internal/AbstractExtractor.py
+++ b/pyload/plugin/internal/AbstractExtractor.py
diff --git a/pyload/plugin/internal/BasePlugin.py b/pyload/plugin/internal/BasePlugin.py
new file mode 100644
index 000000000..f4b8e8997
--- /dev/null
+++ b/pyload/plugin/internal/BasePlugin.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+from urlparse import urljoin, urlparse
+
+from pyload.network.HTTPRequest import BadHeader
+from pyload.plugin.internal.SimpleHoster import create_getInfo
+from pyload.plugin.Hoster import Hoster
+
+
+class BasePlugin(Hoster):
+ __name = "BasePlugin"
+ __type = "hoster"
+ __version = "0.25"
+
+ __pattern = r'^unmatchable$'
+
+ __description = """Base plugin when any other didnt fit"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ @classmethod
+ def getInfo(cls, url="", html=""): #@TODO: Move to hoster class in 0.4.10
+ return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3 if url else 1, 'url': unquote(url) or ""}
+
+
+ def setup(self):
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
+
+ def process(self, pyfile):
+ """main function"""
+
+ pyfile.name = self.getInfo(pyfile.url)['name']
+
+ if not pyfile.url.startswith("http"):
+ self.fail(_("No plugin matched"))
+
+ for _i in xrange(5):
+ try:
+ self.downloadFile(pyfile)
+
+ except BadHeader, e:
+ if e.code is 404:
+ self.offline()
+
+ elif e.code in (401, 403):
+ self.logDebug("Auth required", "Received HTTP status code: %d" % e.code)
+
+ account = self.core.accountManager.getAccountPlugin('Http')
+ servers = [x['login'] for x in account.getAllAccounts()]
+ server = urlparse(pyfile.url).netloc
+
+ if server in servers:
+ self.logDebug("Logging on to %s" % server)
+ self.req.addAuth(account.accounts[server]['password'])
+ else:
+ for pwd in self.getPassword().splitlines():
+ if ":" in pwd:
+ self.req.addAuth(pwd.strip())
+ break
+ else:
+ self.fail(_("Authorization required"))
+ else:
+ self.fail(e)
+ else:
+ break
+ else:
+ self.fail(_("No file downloaded")) #@TODO: Move to hoster class in 0.4.10
+
+ if self.checkDownload({'empty': re.compile(r"^$")}) is "empty": #@TODO: Move to hoster in 0.4.10
+ self.fail(_("Empty file"))
+
+
+ def downloadFile(self, pyfile):
+ url = pyfile.url
+
+ for i in xrange(1, 7): #@TODO: retrieve the pycurl.MAXREDIRS value set by req
+ header = self.load(url, ref=True, cookies=True, just_header=True, decode=True)
+
+ if 'location' not in header or not header['location']:
+ if 'code' in header and header['code'] not in (200, 201, 203, 206):
+ self.logDebug("Received HTTP status code: %d" % header['code'])
+ self.fail(_("File not found"))
+ else:
+ break
+
+ location = header['location']
+
+ self.logDebug("Redirect #%d to: %s" % (i, location))
+
+ if urlparse(location).scheme:
+ url = location
+ else:
+ p = urlparse(url)
+ base = "%s://%s" % (p.scheme, p.netloc)
+ url = urljoin(base, location)
+ else:
+ self.fail(_("Too many redirects"))
+
+ self.download(unquote(url), disposition=True)
diff --git a/pyload/plugin/internal/DeadCrypter.py b/pyload/plugin/internal/DeadCrypter.py
new file mode 100644
index 000000000..fcc2e6eb2
--- /dev/null
+++ b/pyload/plugin/internal/DeadCrypter.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+
+from urllib import unquote
+from urlparse import urlparse
+
+from pyload.plugin.Crypter import Crypter as _Crypter
+from pyload.plugin.internal.SimpleCrypter import create_getInfo
+
+
+class DeadCrypter(_Crypter):
+ __name = "DeadCrypter"
+ __type = "crypter"
+ __version = "0.04"
+
+ __pattern = r'^unmatchable$'
+
+ __description = """Crypter is no longer available"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it")]
+
+
+ @classmethod
+ def getInfo(cls, url="", html=""):
+ return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 1, 'url': url}
+
+
+ def setup(self):
+ self.pyfile.error = "Crypter is no longer available"
+ self.offline() #@TODO: self.offline("Crypter is no longer available")
+
+
+getInfo = create_getInfo(DeadCrypter)
diff --git a/pyload/plugin/internal/DeadHoster.py b/pyload/plugin/internal/DeadHoster.py
new file mode 100644
index 000000000..d8f57801a
--- /dev/null
+++ b/pyload/plugin/internal/DeadHoster.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+
+from urllib import unquote
+from urlparse import urlparse
+
+from pyload.plugin.Hoster import Hoster as _Hoster
+from pyload.plugin.internal.SimpleHoster import create_getInfo
+
+
+class DeadHoster(_Hoster):
+ __name = "DeadHoster"
+ __type = "hoster"
+ __version = "0.14"
+
+ __pattern = r'^unmatchable$'
+
+ __description = """Hoster is no longer available"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz")]
+
+
+ @classmethod
+ def getInfo(cls, url="", html=""):
+ return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 1, 'url': url}
+
+
+ def setup(self):
+ self.pyfile.error = "Hoster is no longer available"
+ self.offline() #@TODO: self.offline("Hoster is no longer available")
+
+
+getInfo = create_getInfo(DeadHoster)
diff --git a/pyload/plugin/internal/MultiHoster.py b/pyload/plugin/internal/MultiHoster.py
new file mode 100644
index 000000000..93fa13561
--- /dev/null
+++ b/pyload/plugin/internal/MultiHoster.py
@@ -0,0 +1,202 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import remove_chars
+
+
+class MultiHoster(Addon):
+ __name = "MultiHoster"
+ __type = "addon"
+ __version = "0.20"
+
+ __description = """Base multi-hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("pyLoad Team", "admin@pyload.org")]
+
+
+ HOSTER_REPLACEMENTS = [("1fichier.com", "onefichier.com"), ("2shared.com", "twoshared.com"),
+ ("4shared.com", "fourshared.com"), ("cloudnator.com", "shragle.com"),
+ ("easy-share.com", "crocko.com"), ("freakshare.net", "freakshare.com"),
+ ("hellshare.com", "hellshare.cz"), ("ifile.it", "filecloud.io"),
+ ("putlocker.com", "firedrive.com"), ("share-rapid.cz", "multishare.cz"),
+ ("sharerapid.cz", "multishare.cz"), ("ul.to", "uploaded.to"),
+ ("uploaded.net", "uploaded.to")]
+ HOSTER_EXCLUDED = []
+
+
+ def setup(self):
+ self.interval = 12 * 60 * 60 #: reload hosters every 12h
+ self.hosters = []
+ self.supported = []
+ self.new_supported = []
+
+
+ def getConfig(self, option, default=''):
+ """getConfig with default value - subclass may not implements all config options"""
+ try:
+ # Fixed loop due to getConf deprecation in 0.4.10
+ return super(MultiHoster, self).getConfig(option)
+ except KeyError:
+ return default
+
+
+ def getHosterCached(self):
+ if not self.hosters:
+ try:
+ hosterSet = self.toHosterSet(self.getHoster()) - set(self.HOSTER_EXCLUDED)
+ except Exception, e:
+ self.logError(e)
+ return []
+
+ try:
+ configMode = self.getConfig('hosterListMode', 'all')
+ if configMode in ("listed", "unlisted"):
+ configSet = self.toHosterSet(self.getConfig('hosterList', '').replace('|', ',').replace(';', ',').split(','))
+
+ if configMode == "listed":
+ hosterSet &= configSet
+ else:
+ hosterSet -= configSet
+
+ except Exception, e:
+ self.logError(e)
+
+ self.hosters = list(hosterSet)
+
+ return self.hosters
+
+
+ def toHosterSet(self, hosters):
+ hosters = set((str(x).strip().lower() for x in hosters))
+
+ for rep in self.HOSTER_REPLACEMENTS:
+ if rep[0] in hosters:
+ hosters.remove(rep[0])
+ hosters.add(rep[1])
+
+ hosters.discard('')
+ return hosters
+
+
+ def getHoster(self):
+ """Load list of supported hoster
+
+ :return: List of domain names
+ """
+ raise NotImplementedError
+
+
+ def activate(self):
+ if self.cb:
+ self.core.scheduler.removeJob(self.cb)
+
+ self.setConfig("activated", True) #: config not in sync after plugin reload
+
+ cfg_interval = self.getConfig("interval", None) #: reload interval in hours
+ if cfg_interval is not None:
+ self.interval = cfg_interval * 60 * 60
+
+ if self.interval:
+ self._periodical()
+ else:
+ self.periodical()
+
+
+ def periodical(self):
+ """reload hoster list periodically"""
+ self.logInfo(_("Reloading supported hoster list"))
+
+ old_supported = self.supported
+ self.supported = []
+ self.new_supported = []
+ self.hosters = []
+
+ self.overridePlugins()
+
+ old_supported = [hoster for hoster in old_supported if hoster not in self.supported]
+ if old_supported:
+ self.logDebug("UNLOAD", ", ".join(old_supported))
+ for hoster in old_supported:
+ self.unloadHoster(hoster)
+
+
+ def overridePlugins(self):
+ pluginMap = dict((name.lower(), name) for name in self.core.pluginManager.hosterPlugins.keys())
+ accountList = [name.lower() for name, data in self.core.accountManager.accounts.iteritems() if data]
+ excludedList = []
+
+ for hoster in self.getHosterCached():
+ name = remove_chars(hoster.lower(), "-.")
+
+ if name in accountList:
+ excludedList.append(hoster)
+ else:
+ if name in pluginMap:
+ self.supported.append(pluginMap[name])
+ else:
+ self.new_supported.append(hoster)
+
+ if not self.supported and not self.new_supported:
+ self.logError(_("No Hoster loaded"))
+ return
+
+ module = self.core.pluginManager.getPlugin(self.__type, self.__name)
+ klass = getattr(module, self.__name)
+
+ # inject plugin plugin
+ self.logDebug("Overwritten Hosters", ", ".join(sorted(self.supported)))
+ for hoster in self.supported:
+ dict = self.core.pluginManager.hosterPlugins[hoster]
+ dict['new_module'] = module
+ dict['new_name'] = self.__name
+
+ if excludedList:
+ self.logInfo(_("The following hosters were not overwritten - account exists"), ", ".join(sorted(excludedList)))
+
+ if self.new_supported:
+ self.logDebug("New Hosters", ", ".join(sorted(self.new_supported)))
+
+ # create new regexp
+ regexp = r'.*(%s).*' % "|".join([x.replace(".", "\.") for x in self.new_supported])
+ if hasattr(klass, "__pattern") and isinstance(klass.__pattern, basestring) and '://' in klass.__pattern:
+ regexp = r'%s|%s' % (klass.__pattern, regexp)
+
+ self.logDebug("Regexp", regexp)
+
+ dict = self.core.pluginManager.hosterPlugins[self.__name]
+ dict['pattern'] = regexp
+ dict['re'] = re.compile(regexp)
+
+
+ def unloadHoster(self, hoster):
+ dict = self.core.pluginManager.hosterPlugins[hoster]
+ if "module" in dict:
+ del dict['module']
+
+ if "new_module" in dict:
+ del dict['new_module']
+ del dict['new_name']
+
+
+ def deactivate(self):
+ """Remove override for all hosters. Scheduler job is removed by AddonManager"""
+ for hoster in self.supported:
+ self.unloadHoster(hoster)
+
+ # reset pattern
+ klass = getattr(self.core.pluginManager.getPlugin(self.__type, self.__name), self.__name)
+ dict = self.core.pluginManager.hosterPlugins[self.__name]
+ dict['pattern'] = getattr(klass, "__pattern", r'^unmatchable$')
+ dict['re'] = re.compile(dict['pattern'])
+
+
+ def downloadFailed(self, pyfile):
+ """remove plugin override if download fails but not if file is offline/temp.offline"""
+ if pyfile.hasStatus("failed") and self.getConfig("unloadFailing", True):
+ hdict = self.core.pluginManager.hosterPlugins[pyfile.pluginname]
+ if "new_name" in hdict and hdict['new_name'] == self.__name:
+ self.logDebug("Unload MultiHoster", pyfile.pluginname, hdict)
+ self.unloadHoster(pyfile.pluginname)
+ pyfile.setStatus("queued")
diff --git a/pyload/plugin/internal/SimpleCrypter.py b/pyload/plugin/internal/SimpleCrypter.py
new file mode 100644
index 000000000..30b455545
--- /dev/null
+++ b/pyload/plugin/internal/SimpleCrypter.py
@@ -0,0 +1,152 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urlparse
+
+from pyload.plugin.Crypter import Crypter
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo, replace_patterns, set_cookies
+from pyload.utils import fixup
+
+
+class SimpleCrypter(Crypter, SimpleHoster):
+ __name = "SimpleCrypter"
+ __type = "crypter"
+ __version = "0.32"
+
+ __pattern = r'^unmatchable$'
+ __config = [("use_subfolder", "bool", "Save package to subfolder", True), #: Overrides core.config['general']['folder_per_package']
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+
+ __description = """Simple decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("stickell", "l.stickell@yahoo.it"),
+ ("zoidberg", "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ """
+ Following patterns should be defined by each crypter:
+
+ LINK_PATTERN: group(1) must be a download link or a regex to catch more links
+ example: LINK_PATTERN = r'<div class="link"><a href="(.+?)"'
+
+ NAME_PATTERN: (optional) folder name or webpage title
+ example: NAME_PATTERN = r'<title>Files of: (?P<N>[^<]+) folder</title>'
+
+ OFFLINE_PATTERN: (optional) Checks if the file is yet available online
+ example: OFFLINE_PATTERN = r'File (deleted|not found)'
+
+ TEMP_OFFLINE_PATTERN: (optional) Checks if the file is temporarily offline
+ example: TEMP_OFFLINE_PATTERN = r'Server maintainance'
+
+
+ You can override the getLinks method if you need a more sophisticated way to extract the links.
+
+
+ If the links are splitted on multiple pages you can define the PAGES_PATTERN regex:
+
+ PAGES_PATTERN: (optional) group(1) should be the number of overall pages containing the links
+ example: PAGES_PATTERN = r'Pages: (\d+)'
+
+ and its loadPage method:
+
+
+ def loadPage(self, page_n):
+ return the html of the page number page_n
+ """
+
+ LINK_PATTERN = None
+
+ NAME_REPLACEMENTS = [("&#?\w+;", fixup)]
+ URL_REPLACEMENTS = []
+
+ TEXT_ENCODING = False #: Set to True or encoding name if encoding in http header is not correct
+ COOKIES = True #: or False or list of tuples [(domain, name, value)]
+
+ LOGIN_ACCOUNT = False
+ LOGIN_PREMIUM = False
+
+
+ def prepare(self):
+ self.info = {}
+ self.links = []
+
+ if self.LOGIN_ACCOUNT and not self.account:
+ self.fail(_("Required account not found"))
+
+ if self.LOGIN_PREMIUM and not self.premium:
+ self.fail(_("Required premium account not found"))
+
+ self.req.setOption("timeout", 120)
+
+ if isinstance(self.COOKIES, list):
+ set_cookies(self.req.cj, self.COOKIES)
+
+ self.pyfile.url = replace_patterns(self.pyfile.url, self.URL_REPLACEMENTS)
+
+
+ def decrypt(self, pyfile):
+ self.prepare()
+
+ self.preload()
+
+ if self.html is None:
+ self.fail(_("No html retrieved"))
+
+ self.checkInfo()
+
+ self.links = self.getLinks()
+
+ if hasattr(self, 'PAGES_PATTERN') and hasattr(self, 'loadPage'):
+ self.handleMultiPages()
+
+ self.logDebug("Package has %d links" % len(self.links))
+
+ if self.links:
+ self.packages = [(self.info['name'], self.links, self.info['folder'])]
+
+
+ def checkStatus(self):
+ status = self.info['status']
+
+ if status is 1:
+ self.offline()
+
+ elif status is 6:
+ self.tempOffline()
+
+
+ def checkNameSize(self):
+ name = self.info['name']
+ url = self.info['url']
+
+ if name and name != url:
+ self.pyfile.name = name
+ else:
+ self.pyfile.name = name = self.info['name'] = urlparse(name).path.split('/')[-1]
+
+ folder = self.info['folder'] = name
+
+ self.logDebug("File name: %s" % name,
+ "File folder: %s" % folder)
+
+
+ def getLinks(self):
+ """
+ Returns the links extracted from self.html
+ You should override this only if it's impossible to extract links using only the LINK_PATTERN.
+ """
+ return re.findall(self.LINK_PATTERN, self.html)
+
+
+ def handleMultiPages(self):
+ try:
+ m = re.search(self.PAGES_PATTERN, self.html)
+ pages = int(m.group(1))
+ except Exception:
+ pages = 1
+
+ for p in xrange(2, pages + 1):
+ self.html = self.loadPage(p)
+ self.links += self.getLinks()
diff --git a/pyload/plugin/internal/SimpleHoster.py b/pyload/plugin/internal/SimpleHoster.py
new file mode 100644
index 000000000..9355cfe86
--- /dev/null
+++ b/pyload/plugin/internal/SimpleHoster.py
@@ -0,0 +1,530 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import time
+from urllib import unquote
+from urlparse import urljoin, urlparse
+
+from pyload.datatype.File import statusMap as _statusMap
+from pyload.network.CookieJar import CookieJar
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Hoster import Hoster
+from pyload.utils import fixup, formatSize, parseFileSize
+
+
+#@TODO: Adapt and move to PyFile in 0.4.10
+statusMap = dict((v, k) for k, v in _statusMap.iteritems())
+
+
+def replace_patterns(string, ruleslist):
+ for r in ruleslist:
+ rf, rt = r
+ string = re.sub(rf, rt, string)
+ return string
+
+
+def set_cookies(cj, cookies):
+ for cookie in cookies:
+ if isinstance(cookie, tuple) and len(cookie) == 3:
+ domain, name, value = cookie
+ cj.setCookie(domain, name, value)
+
+
+def parseHtmlTagAttrValue(attr_name, tag):
+ m = re.search(r"%s\s*=\s*([\"']?)((?<=\")[^\"]+|(?<=')[^']+|[^>\s\"'][^>\s]*)\1" % attr_name, tag, re.I)
+ return m.group(2) if m else None
+
+
+def parseHtmlForm(attr_str, html, input_names={}):
+ for form in re.finditer(r"(?P<TAG><form[^>]*%s[^>]*>)(?P<CONTENT>.*?)</?(form|body|html)[^>]*>" % attr_str,
+ html, re.S | re.I):
+ inputs = {}
+ action = parseHtmlTagAttrValue("action", form.group('TAG'))
+
+ for inputtag in re.finditer(r'(<(input|textarea)[^>]*>)([^<]*(?=</\2)|)', form.group('CONTENT'), re.S | re.I):
+ name = parseHtmlTagAttrValue("name", inputtag.group(1))
+ if name:
+ value = parseHtmlTagAttrValue("value", inputtag.group(1))
+ if not value:
+ inputs[name] = inputtag.group(3) or ''
+ else:
+ inputs[name] = value
+
+ if input_names:
+ # check input attributes
+ for key, val in input_names.iteritems():
+ if key in inputs:
+ if isinstance(val, basestring) and inputs[key] == val:
+ continue
+ elif isinstance(val, tuple) and inputs[key] in val:
+ continue
+ elif hasattr(val, "search") and re.match(val, inputs[key]):
+ continue
+ break #: attibute value does not match
+ else:
+ break #: attibute name does not match
+ else:
+ return action, inputs #: passed attribute check
+ else:
+ # no attribute check
+ return action, inputs
+
+ return {}, None #: no matching form found
+
+
+#: Deprecated
+def parseFileInfo(plugin, url="", html=""):
+ info = plugin.getInfo(url, html)
+ return info['name'], info['size'], info['status'], info['url']
+
+
+#@TODO: Remove in 0.4.10
+#@NOTE: Every plugin must have own parseInfos classmethod to work with 0.4.10
+def create_getInfo(plugin):
+ return lambda urls: [(info['name'], info['size'], info['status'], info['url']) for info in plugin.parseInfos(urls)]
+
+
+def timestamp():
+ return int(time() * 1000)
+
+
+#@TODO: Move to hoster class in 0.4.10
+def _isDirectLink(self, url, resumable=True):
+ header = self.load(url, ref=True, just_header=True, decode=True)
+
+ if not 'location' in header or not header['location']:
+ return ""
+
+ location = header['location']
+
+ resumable = False #@NOTE: Testing...
+
+ if resumable: #: sometimes http code may be wrong...
+ if 'location' in self.load(location, ref=True, cookies=True, just_header=True, decode=True):
+ return ""
+ else:
+ if not 'code' in header or header['code'] != 302:
+ return ""
+
+ if urlparse(location).scheme:
+ link = location
+ else:
+ p = urlparse(url)
+ base = "%s://%s" % (p.scheme, p.netloc)
+ link = urljoin(base, location)
+
+ return link
+
+
+class SimpleHoster(Hoster):
+ __name = "SimpleHoster"
+ __type = "hoster"
+ __version = "0.72"
+
+ __pattern = r'^unmatchable$'
+
+ __description = """Simple hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ """
+ Info patterns should be defined by each hoster:
+
+ INFO_PATTERN: (optional) Name and Size of the file
+ example: INFO_PATTERN = r'(?P<N>file_name) (?P<S>file_size) (?P<U>size_unit)'
+ or
+ NAME_PATTERN: (optional) Name that will be set for the file
+ example: NAME_PATTERN = r'(?P<N>file_name)'
+ SIZE_PATTERN: (optional) Size that will be checked for the file
+ example: SIZE_PATTERN = r'(?P<S>file_size) (?P<U>size_unit)'
+
+ HASHSUM_PATTERN: (optional) Hash code and type of the file
+ example: HASHSUM_PATTERN = r'(?P<H>hash_code) (?P<T>MD5)'
+
+ OFFLINE_PATTERN: (optional) Check if the file is yet available online
+ example: OFFLINE_PATTERN = r'File (deleted|not found)'
+
+ TEMP_OFFLINE_PATTERN: (optional) Check if the file is temporarily offline
+ example: TEMP_OFFLINE_PATTERN = r'Server (maintenance|maintainance)'
+
+
+ Error handling patterns are all optional:
+
+ WAIT_PATTERN: (optional) Detect waiting time
+ example: WAIT_PATTERN = r''
+
+ PREMIUM_ONLY_PATTERN: (optional) Check if the file can be downloaded only with a premium account
+ example: PREMIUM_ONLY_PATTERN = r'Premium account required'
+
+ ERROR_PATTERN: (optional) Detect any error preventing download
+ example: ERROR_PATTERN = r''
+
+
+ Instead overriding handleFree and handlePremium methods you can define the following patterns for direct download:
+
+ LINK_FREE_PATTERN: (optional) group(1) should be the direct link for free download
+ example: LINK_FREE_PATTERN = r'<div class="link"><a href="(.+?)"'
+
+ LINK_PREMIUM_PATTERN: (optional) group(1) should be the direct link for premium download
+ example: LINK_PREMIUM_PATTERN = r'<div class="link"><a href="(.+?)"'
+ """
+
+ NAME_REPLACEMENTS = [("&#?\w+;", fixup)]
+ SIZE_REPLACEMENTS = []
+ URL_REPLACEMENTS = []
+
+ TEXT_ENCODING = False #: Set to True or encoding name if encoding value in http header is not correct
+ COOKIES = True #: or False or list of tuples [(domain, name, value)]
+ FORCE_CHECK_TRAFFIC = False #: Set to True to force checking traffic left for premium account
+ CHECK_DIRECT_LINK = None #: Set to True to check for direct link, set to None to do it only if self.account is True
+ MULTI_HOSTER = False #: Set to True to leech other hoster link (according its multihoster hook if available)
+
+
+ @classmethod
+ def parseInfos(cls, urls):
+ for url in urls:
+ url = replace_patterns(url, cls.URL_REPLACEMENTS)
+ yield cls.getInfo(url)
+
+
+ @classmethod
+ def getInfo(cls, url="", html=""):
+ info = {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3, 'url': url}
+
+ if not html:
+ try:
+ if not url:
+ info['error'] = "missing url"
+ info['status'] = 1
+ raise
+
+ try:
+ html = getURL(url, cookies=cls.COOKIES, decode=not cls.TEXT_ENCODING)
+
+ if isinstance(cls.TEXT_ENCODING, basestring):
+ html = unicode(html, cls.TEXT_ENCODING)
+
+ except BadHeader, e:
+ info['error'] = "%d: %s" % (e.code, e.content)
+
+ if e.code is 404:
+ info['status'] = 1
+ raise
+
+ if e.code is 503:
+ info['status'] = 6
+ raise
+ except Exception:
+ return info
+
+ online = False
+
+ if hasattr(cls, "OFFLINE_PATTERN") and re.search(cls.OFFLINE_PATTERN, html):
+ info['status'] = 1
+
+ elif hasattr(cls, "TEMP_OFFLINE_PATTERN") and re.search(cls.TEMP_OFFLINE_PATTERN, html):
+ info['status'] = 6
+
+ else:
+ try:
+ info['pattern'] = re.match(cls.__pattern, url).groupdict() #: pattern groups will be saved here, please save api stuff to info['api']
+ except Exception:
+ pass
+
+ for pattern in ("INFO_PATTERN", "NAME_PATTERN", "SIZE_PATTERN", "HASHSUM_PATTERN"):
+ try:
+ attr = getattr(cls, pattern)
+ dict = re.search(attr, html).groupdict()
+
+ if all(True for k in dict if k not in info['pattern']):
+ info['pattern'].update(dict)
+
+ except AttributeError:
+ continue
+
+ else:
+ online = True
+
+ if online:
+ info['status'] = 2
+
+ if 'N' in info['pattern']:
+ info['name'] = replace_patterns(unquote(info['pattern']['N'].strip()), cls.NAME_REPLACEMENTS)
+
+ if 'S' in info['pattern']:
+ size = replace_patterns(info['pattern']['S'] + info['pattern']['U'] if 'U' in info else info['pattern']['S'],
+ cls.SIZE_REPLACEMENTS)
+ info['size'] = parseFileSize(size)
+
+ elif isinstance(info['size'], basestring):
+ unit = info['units'] if 'units' in info else None
+ info['size'] = parseFileSize(info['size'], unit)
+
+ if 'H' in info['pattern']:
+ hashtype = info['pattern']['T'] if 'T' in info['pattern'] else "hash"
+ info[hashtype] = info['pattern']['H']
+
+ return info
+
+
+ def setup(self):
+ self.resumeDownload = self.multiDL = self.premium
+
+
+ def prepare(self):
+ self.info = {}
+ self.link = "" #@TODO: Move to hoster class in 0.4.10
+ self.directDL = False #@TODO: Move to hoster class in 0.4.10
+ self.multihost = False #@TODO: Move to hoster class in 0.4.10
+
+ self.req.setOption("timeout", 120)
+
+ if isinstance(self.COOKIES, list):
+ set_cookies(self.req.cj, self.COOKIES)
+
+ if (self.MULTI_HOSTER
+ and (self.__pattern != self.core.pluginManager.hosterPlugins[self.__name]['pattern']
+ or re.match(self.__pattern, self.pyfile.url) is None)):
+
+ self.logInfo("Multi hoster detected")
+
+ if self.account:
+ self.multihost = True
+ return
+ else:
+ self.fail(_("Only registered or premium users can use url leech feature"))
+
+ if self.CHECK_DIRECT_LINK is None:
+ self.directDL = bool(self.account)
+
+ self.pyfile.url = replace_patterns(self.pyfile.url, self.URL_REPLACEMENTS)
+
+
+ def preload(self):
+ self.html = self.load(self.pyfile.url, cookies=bool(self.COOKIES), decode=not self.TEXT_ENCODING)
+
+ if isinstance(self.TEXT_ENCODING, basestring):
+ self.html = unicode(self.html, self.TEXT_ENCODING)
+
+
+ def process(self, pyfile):
+ self.prepare()
+
+ if self.multihost:
+ self.logDebug("Looking for leeched download link...")
+ self.handleMulti()
+
+ elif self.directDL:
+ self.logDebug("Looking for direct download link...")
+ self.handleDirect()
+
+ if not self.link:
+ self.preload()
+
+ if self.html is None:
+ self.fail(_("No html retrieved"))
+
+ self.checkErrors()
+
+ premium_only = 'error' in self.info and self.info['error'] == "premium-only"
+
+ self._updateInfo(self.getInfo(pyfile.url, self.html))
+
+ self.checkNameSize()
+
+ #: Usually premium only pages doesn't show any file information
+ if not premium_only:
+ self.checkStatus()
+
+ if self.premium and (not self.FORCE_CHECK_TRAFFIC or self.checkTrafficLeft()):
+ self.logDebug("Handled as premium download")
+ self.handlePremium()
+
+ elif premium_only:
+ self.fail(_("Link require a premium account to be handled"))
+
+ else:
+ self.logDebug("Handled as free download")
+ self.handleFree()
+
+ self.downloadLink(self.link)
+ self.checkFile()
+
+
+ def downloadLink(self, link):
+ if not link:
+ return
+
+ self.download(link, disposition=True)
+
+
+ def checkFile(self):
+ if self.checkDownload({'empty': re.compile(r"^$")}) is "empty": #@TODO: Move to hoster in 0.4.10
+ self.fail(_("Empty file"))
+
+
+ def checkErrors(self):
+ if hasattr(self, 'ERROR_PATTERN'):
+ m = re.search(self.ERROR_PATTERN, self.html)
+ if m:
+ errmsg = self.info['error'] = m.group(1)
+ self.error(errmsg)
+
+ if hasattr(self, 'PREMIUM_ONLY_PATTERN'):
+ m = re.search(self.PREMIUM_ONLY_PATTERN, self.html)
+ if m:
+ self.info['error'] = "premium-only"
+ return
+
+ if hasattr(self, 'WAIT_PATTERN'):
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ wait_time = sum([int(v) * {"hr": 3600, "hour": 3600, "min": 60, "sec": 1}[u.lower()] for v, u in
+ re.findall(r'(\d+)\s*(hr|hour|min|sec)', m, re.I)])
+ self.wait(wait_time, False)
+ return
+
+ self.info.pop('error', None)
+
+
+ def checkStatus(self):
+ status = self.info['status']
+
+ if status is 1:
+ self.offline()
+
+ elif status is 6:
+ self.tempOffline()
+
+ elif status is not 2:
+ self.logInfo(_("File status: %s") % statusMap[status],
+ _("File info: %s") % self.info)
+ self.error(_("No file info retrieved"))
+
+
+ def checkNameSize(self):
+ name = self.info['name']
+ size = self.info['size']
+ url = self.info['url']
+
+ if name and name != url:
+ self.pyfile.name = name
+ else:
+ self.pyfile.name = name = self.info['name'] = urlparse(name).path.split('/')[-1]
+
+ if size > 0:
+ self.pyfile.size = size
+ else:
+ size = "Unknown"
+
+ self.logDebug("File name: %s" % name,
+ "File size: %s" % size)
+
+
+ def checkInfo(self):
+ self.checkErrors()
+
+ self._updateInfo(self.getInfo(self.pyfile.url, self.html or ""))
+
+ self.checkNameSize()
+ self.checkStatus()
+
+
+ #: Deprecated
+ def getFileInfo(self):
+ self.info = {}
+ self.checkInfo()
+ return self.info
+
+
+ def _updateInfo(self, info):
+ self.logDebug(_("File info (before update): %s") % self.info)
+ self.info.update(info)
+ self.logDebug(_("File info (after update): %s") % self.info)
+
+
+ def handleDirect(self):
+ link = _isDirectLink(self, self.pyfile.url, self.resumeDownload)
+
+ if link:
+ self.logInfo(_("Direct download link detected"))
+
+ self.link = link
+
+ self._updateInfo(self.getInfo(self.pyfile.url))
+ self.checkNameSize()
+ else:
+ self.logDebug(_("Direct download link not found"))
+
+
+ def handleMulti(self): #: Multi-hoster handler
+ pass
+
+
+ def handleFree(self):
+ if not hasattr(self, 'LINK_FREE_PATTERN'):
+ self.fail(_("Free download not implemented"))
+
+ try:
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
+ if m is None:
+ self.error(_("Free download link not found"))
+
+ self.link = m.group(1)
+
+ except Exception, e:
+ self.fail(e)
+
+
+ def handlePremium(self):
+ if not hasattr(self, 'LINK_PREMIUM_PATTERN'):
+ self.fail(_("Premium download not implemented"))
+
+ try:
+ m = re.search(self.LINK_PREMIUM_PATTERN, self.html)
+ if m is None:
+ self.error(_("Premium download link not found"))
+
+ self.link = m.group(1)
+
+ except Exception, e:
+ self.fail(e)
+
+
+ def longWait(self, wait_time=None, max_tries=3):
+ if wait_time and isinstance(wait_time, (int, long, float)):
+ time_str = "%dh %dm" % divmod(wait_time / 60, 60)
+ else:
+ wait_time = 900
+ time_str = _("(unknown time)")
+ max_tries = 100
+
+ self.logInfo(_("Download limit reached, reconnect or wait %s") % time_str)
+
+ self.setWait(wait_time, True)
+ self.wait()
+ self.retry(max_tries=max_tries, reason=_("Download limit reached"))
+
+
+ def parseHtmlForm(self, attr_str="", input_names={}):
+ return parseHtmlForm(attr_str, self.html, input_names)
+
+
+ def checkTrafficLeft(self):
+ traffic = self.account.getAccountInfo(self.user, True)['trafficleft']
+
+ if traffic is None:
+ return False
+ elif traffic == -1:
+ return True
+ else:
+ self.logInfo(_("Filesize: %s, Traffic left for user %s: %s") % (formatSize(size), self.user, formatSize(traffic)))
+ return self.pyfile.size <= traffic
+
+
+ def error(self, reason="", type="parse"):
+ return super(SimpleHoster, self).error(self, reason, type)
diff --git a/pyload/plugin/internal/UnRar.py b/pyload/plugin/internal/UnRar.py
new file mode 100644
index 000000000..404ee906b
--- /dev/null
+++ b/pyload/plugin/internal/UnRar.py
@@ -0,0 +1,221 @@
+# -*- coding: utf-8 -*-
+
+import os
+import re
+
+from glob import glob
+from os.path import basename, join
+from string import digits
+from subprocess import Popen, PIPE
+
+from pyload.plugin.internal.AbstractExtractor import AbtractExtractor, WrongPassword, ArchiveError, CRCError
+from pyload.utils import safe_join, decode
+
+
+def renice(pid, value):
+ if os.name != "nt" and value:
+ try:
+ Popen(["renice", str(value), str(pid)], stdout=PIPE, stderr=PIPE, bufsize=-1)
+ except Exception:
+ print "Renice failed"
+
+
+class UnRar(AbtractExtractor):
+ __name = "UnRar"
+ __version = "0.19"
+
+ __description = """Rar extractor plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ CMD = "unrar"
+
+ # there are some more uncovered rar formats
+ re_version = re.compile(r'UNRAR ([\w .]+?) freeware')
+ re_splitfile = re.compile(r'(.*)\.part(\d+)\.rar$', re.I)
+ re_partfiles = re.compile(r'.*\.(rar|r\d+)', re.I)
+ re_filelist = re.compile(r'(.+)\s+(\d+)\s+(\d+)\s+')
+ re_filelist5 = re.compile(r'(.+)\s+(\d+)\s+\d\d-\d\d-\d\d\s+\d\d:\d\d\s+(.+)')
+ re_wrongpwd = re.compile(r'(Corrupt file or wrong password|password incorrect)', re.I)
+
+
+ @staticmethod
+ def checkDeps():
+ if os.name == "nt":
+ UnRar.CMD = join(pypath, "UnRAR.exe")
+ p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE)
+ p.communicate()
+ else:
+ try:
+ p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE)
+ p.communicate()
+ except OSError:
+
+ # fallback to rar
+ UnRar.CMD = "rar"
+ p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE)
+ p.communicate()
+
+ return True
+
+
+ @staticmethod
+ def getTargets(files_ids):
+ result = []
+
+ for file, id in files_ids:
+ if not file.endswith(".rar"):
+ continue
+
+ match = UnRar.re_splitfile.findall(file)
+ if match:
+ # only add first parts
+ if int(match[0][1]) == 1:
+ result.append((file, id))
+ else:
+ result.append((file, id))
+
+ return result
+
+
+ def init(self):
+ self.passwordProtected = False
+ self.headerProtected = False #: list files will not work without password
+ self.smallestFile = None #: small file to test passwords
+ self.password = "" #: save the correct password
+
+
+ def checkArchive(self):
+ p = self.call_unrar("l", "-v", self.file)
+ out, err = p.communicate()
+ if self.re_wrongpwd.search(err):
+ self.passwordProtected = True
+ self.headerProtected = True
+ return True
+
+ # output only used to check if passworded files are present
+ if self.re_version.search(out):
+ for attr, size, name in self.re_filelist5.findall(out):
+ if attr.startswith("*"):
+ self.passwordProtected = True
+ return True
+ else:
+ for name, size, packed in self.re_filelist.findall(out):
+ if name.startswith("*"):
+ self.passwordProtected = True
+ return True
+
+ self.listContent()
+ if not self.files:
+ raise ArchiveError("Empty Archive")
+
+ return False
+
+
+ def checkPassword(self, password):
+ # at this point we can only verify header protected files
+ if self.headerProtected:
+ p = self.call_unrar("l", "-v", self.file, password=password)
+ out, err = p.communicate()
+ if self.re_wrongpwd.search(err):
+ return False
+
+ return True
+
+
+ def extract(self, progress, password=None):
+ command = "x" if self.fullpath else "e"
+
+ p = self.call_unrar(command, self.file, self.out, password=password)
+ renice(p.pid, self.renice)
+
+ progress(0)
+ progressstring = ""
+ while True:
+ c = p.stdout.read(1)
+ # quit loop on eof
+ if not c:
+ break
+ # reading a percentage sign -> set progress and restart
+ if c == '%':
+ progress(int(progressstring))
+ progressstring = ""
+ # not reading a digit -> therefore restart
+ elif c not in digits:
+ progressstring = ""
+ # add digit to progressstring
+ else:
+ progressstring = progressstring + c
+ progress(100)
+
+ # retrieve stderr
+ err = p.stderr.read()
+
+ if "CRC failed" in err and not password and not self.passwordProtected:
+ raise CRCError
+ elif "CRC failed" in err:
+ raise WrongPassword
+ if err.strip(): #: raise error if anything is on stderr
+ raise ArchiveError(err.strip())
+ if p.returncode:
+ raise ArchiveError("Process terminated")
+
+ if not self.files:
+ self.password = password
+ self.listContent()
+
+
+ def getDeleteFiles(self):
+ if ".part" in basename(self.file):
+ return glob(re.sub("(?<=\.part)([01]+)", "*", self.file, re.I))
+ # get files which matches .r* and filter unsuited files out
+ parts = glob(re.sub(r"(?<=\.r)ar$", "*", self.file, re.I))
+ return filter(lambda x: self.re_partfiles.match(x), parts)
+
+
+ def listContent(self):
+ command = "vb" if self.fullpath else "lb"
+ p = self.call_unrar(command, "-v", self.file, password=self.password)
+ out, err = p.communicate()
+
+ if "Cannot open" in err:
+ raise ArchiveError("Cannot open file")
+
+ if err.strip(): #: only log error at this point
+ self.m.logError(err.strip())
+
+ result = set()
+
+ for f in decode(out).splitlines():
+ f = f.strip()
+ result.add(safe_join(self.out, f))
+
+ self.files = result
+
+
+ def call_unrar(self, command, *xargs, **kwargs):
+ args = []
+ # overwrite flag
+ args.append("-o+") if self.overwrite else args.append("-o-")
+
+ if self.excludefiles:
+ for word in self.excludefiles.split(';'):
+ args.append("-x%s" % word)
+
+ # assume yes on all queries
+ args.append("-y")
+
+ # set a password
+ if "password" in kwargs and kwargs['password']:
+ args.append("-p%s" % kwargs['password'])
+ else:
+ args.append("-p-")
+
+ # NOTE: return codes are not reliable, some kind of threading, cleanup whatever issue
+ call = [self.CMD, command] + args + list(xargs)
+ self.m.logDebug(" ".join(call))
+
+ p = Popen(call, stdout=PIPE, stderr=PIPE)
+
+ return p
diff --git a/pyload/plugin/internal/UnZip.py b/pyload/plugin/internal/UnZip.py
new file mode 100644
index 000000000..8cf71f659
--- /dev/null
+++ b/pyload/plugin/internal/UnZip.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+import sys
+import zipfile
+
+from pyload.plugin.internal.AbstractExtractor import AbtractExtractor
+
+
+class UnZip(AbtractExtractor):
+ __name = "UnZip"
+ __version = "0.10"
+
+ __description = """Zip extractor plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ @staticmethod
+ def checkDeps():
+ return sys.version_info[:2] >= (2, 6)
+
+
+ @staticmethod
+ def getTargets(files_ids):
+ result = []
+
+ for file, id in files_ids:
+ if file.endswith(".zip"):
+ result.append((file, id))
+
+ return result
+
+
+ def extract(self, progress, password=None):
+ z = zipfile.ZipFile(self.file)
+ self.files = z.namelist()
+ z.extractall(self.out)
+
+
+ def getDeleteFiles(self):
+ return [self.file]
diff --git a/pyload/plugin/internal/UpdateManager.py b/pyload/plugin/internal/UpdateManager.py
new file mode 100644
index 000000000..0c40b4192
--- /dev/null
+++ b/pyload/plugin/internal/UpdateManager.py
@@ -0,0 +1,300 @@
+# -*- coding: utf-8 -*-
+
+import re
+import sys
+
+from operator import itemgetter
+from os import path, remove, stat
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Expose, Addon, threaded
+from pyload.utils import safe_join
+
+
+class UpdateManager(Addon):
+ __name = "UpdateManager"
+ __type = "addon"
+ __version = "0.40"
+
+ __config = [("activated" , "bool" , "Activated" , True ),
+ ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
+ ("interval" , "int" , "Check interval in hours" , 8 ),
+ ("reloadplugins", "bool" , "Monitor plugins for code changes (debug mode only)", True ),
+ ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , True )]
+
+ __description = """Check for updates"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+ SERVER_URL = "http://updatemanager.pyload.org"
+ MIN_INTERVAL = 6 * 60 * 60 #: 6h minimum check interval (value is in seconds)
+
+
+ def pluginConfigChanged(self, plugin, name, value):
+ if name == "interval":
+ interval = value * 60 * 60
+ if self.MIN_INTERVAL <= interval != self.interval:
+ self.core.scheduler.removeJob(self.cb)
+ self.interval = interval
+ self.initPeriodical()
+ else:
+ self.logDebug("Invalid interval value, kept current")
+
+ elif name == "reloadplugins":
+ if self.cb2:
+ self.core.scheduler.removeJob(self.cb2)
+ if value is True and self.core.debug:
+ self.periodical2()
+
+
+ def activate(self):
+ self.pluginConfigChanged(self.__name, "interval", self.getConfig("interval"))
+ x = lambda: self.pluginConfigChanged(self.__name, "reloadplugins", self.getConfig("reloadplugins"))
+ self.core.scheduler.addJob(10, x, threaded=False)
+
+
+ def deactivate(self):
+ self.pluginConfigChanged(self.__name, "reloadplugins", False)
+
+
+ def setup(self):
+ self.cb2 = None
+ self.interval = self.MIN_INTERVAL
+ self.updating = False
+ self.info = {'pyload': False, 'version': None, 'plugins': False}
+ self.mtimes = {} #: store modification time for each plugin
+
+
+ def periodical2(self):
+ if not self.updating:
+ self.autoreloadPlugins()
+
+ self.cb2 = self.core.scheduler.addJob(4, self.periodical2, threaded=False)
+
+
+ @Expose
+ def autoreloadPlugins(self):
+ """ reload and reindex all modified plugins """
+ modules = filter(
+ lambda m: m and (m.__name.startswith("pyload.plugin.") or
+ m.__name.startswith("userplugins.")) and
+ m.__name.count(".") >= 2, sys.modules.itervalues()
+ )
+
+ reloads = []
+
+ for m in modules:
+ root, type, name = m.__name.rsplit(".", 2)
+ id = (type, name)
+ if type in self.core.pluginManager.plugins:
+ f = m.__file__.replace(".pyc", ".py")
+ if not path.isfile(f):
+ continue
+
+ mtime = stat(f).st_mtime
+
+ if id not in self.mtimes:
+ self.mtimes[id] = mtime
+ elif self.mtimes[id] < mtime:
+ reloads.append(id)
+ self.mtimes[id] = mtime
+
+ return True if self.core.pluginManager.reloadPlugins(reloads) else False
+
+
+ def periodical(self):
+ if self.info['pyload'] or self.getConfig("nodebugupdate") and self.core.debug:
+ return
+
+ self.updateThread()
+
+
+ def server_request(self):
+ try:
+ return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines()
+ except Exception:
+ self.logWarning(_("Unable to contact server to get updates"))
+
+
+ @threaded
+ def updateThread(self):
+ self.updating = True
+
+ status = self.update(onlyplugin=self.getConfig("mode") == "plugins only")
+
+ if status == 2:
+ self.core.api.restart()
+ else:
+ self.updating = False
+
+
+ @Expose
+ def updatePlugins(self):
+ """ simple wrapper for calling plugin update quickly """
+ return self.update(onlyplugin=True)
+
+
+ @Expose
+ def update(self, onlyplugin=False):
+ """ check for updates """
+ data = self.server_request()
+
+ if not data:
+ exitcode = 0
+
+ elif data[0] == "None":
+ self.logInfo(_("No new pyLoad version available"))
+ updates = data[1:]
+ exitcode = self._updatePlugins(updates)
+
+ elif onlyplugin:
+ exitcode = 0
+
+ else:
+ newversion = data[0]
+ self.logInfo(_("*** New pyLoad Version %s available ***") % newversion)
+ self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***"))
+ exitcode = 3
+ self.info['pyload'] = True
+ self.info['version'] = newversion
+
+ return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required; 3 = No plugins updated, new pyLoad version available
+
+
+ def _updatePlugins(self, updates):
+ """ check for plugin updates """
+
+ if self.info['plugins']:
+ return False #: plugins were already updated
+
+ exitcode = 0
+ updated = []
+
+ vre = re.compile(r'__version.*=.*("|\')([\d.]+)')
+ url = updates[0]
+ schema = updates[1].split('|')
+
+ if "BLACKLIST" in updates:
+ blacklist = updates[updates.index('BLACKLIST') + 1:]
+ updates = updates[2:updates.index('BLACKLIST')]
+ else:
+ blacklist = None
+ updates = updates[2:]
+
+ upgradable = sorted(map(lambda x: dict(zip(schema, x.split('|'))), updates),
+ key=itemgetter("type", "name"))
+
+ for plugin in upgradable:
+ filename = plugin['name']
+ type = plugin['type']
+ version = plugin['version']
+
+ if filename.endswith(".pyc"):
+ name = filename[:filename.find("_")]
+ else:
+ name = filename.replace(".py", "")
+
+ plugins = getattr(self.core.pluginManager, "%sPlugins" % type)
+
+ oldver = float(plugins[name]['version']) if name in plugins else None
+ newver = float(version)
+
+ if not oldver:
+ msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)"
+ elif newver > oldver:
+ msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)"
+ else:
+ continue
+
+ self.logInfo(_(msg) % {'type' : type,
+ 'name' : name,
+ 'oldver': oldver,
+ 'newver': newver})
+ try:
+ content = getURL(url % plugin)
+ m = vre.search(content)
+
+ if m and m.group(2) == version:
+ f = open(safe_join("userplugins", prefix, filename), "wb")
+ f.write(content)
+ f.close()
+ updated.append((prefix, name))
+ else:
+ raise Exception, _("Version mismatch")
+
+ except Exception, e:
+ self.logError(_("Error updating plugin %s") % filename, e)
+
+ if blacklist:
+ blacklisted = map(lambda x: (x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]), blacklist)
+
+ # Always protect internal plugins from removing
+ for i, n, t in blacklisted.enumerate():
+ if t == "internal":
+ del blacklisted[i]
+
+ blacklisted = sorted(blacklisted)
+ removed = self.removePlugins(blacklisted)
+ for t, n in removed:
+ self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % {
+ 'type': t,
+ 'name': n,
+ })
+
+ if updated:
+ reloaded = self.core.pluginManager.reloadPlugins(updated)
+ if reloaded:
+ self.logInfo(_("Plugins updated and reloaded"))
+ exitcode = 1
+ else:
+ self.logInfo(_("*** Plugins have been updated, but need a pyLoad restart to be reloaded ***"))
+ self.info['plugins'] = True
+ exitcode = 2
+ else:
+ self.logInfo(_("No plugin updates available"))
+
+ return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required
+
+
+ @Expose
+ def removePlugins(self, type_plugins):
+ """ delete plugins from disk """
+
+ if not type_plugins:
+ return
+
+ self.logDebug("Requested deletion of plugins: %s" % type_plugins)
+
+ removed = []
+
+ for type, name in type_plugins:
+ err = False
+ file = name + ".py"
+
+ for root in ("userplugins", path.join(pypath, "pyload", "plugins")):
+
+ filename = safe_join(root, type, file)
+ try:
+ remove(filename)
+ except Exception, e:
+ self.logDebug("Error deleting: %s" % path.basename(filename), e)
+ err = True
+
+ filename += "c"
+ if path.isfile(filename):
+ try:
+ if type == "addon":
+ self.manager.deactivateAddon(name)
+ remove(filename)
+ except Exception, e:
+ self.logDebug("Error deleting: %s" % path.basename(filename), e)
+ err = True
+
+ if not err:
+ id = (type, name)
+ removed.append(id)
+
+ return removed #: return a list of the plugins successfully removed
diff --git a/pyload/plugin/internal/XFSAccount.py b/pyload/plugin/internal/XFSAccount.py
new file mode 100644
index 000000000..388cda26c
--- /dev/null
+++ b/pyload/plugin/internal/XFSAccount.py
@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import gmtime, mktime, strptime
+from urlparse import urljoin
+
+from pyload.plugin.Account import Account
+from pyload.plugin.internal.SimpleHoster import parseHtmlForm, set_cookies
+
+
+class XFSAccount(Account):
+ __name = "XFSAccount"
+ __type = "account"
+ __version = "0.32"
+
+ __description = """XFileSharing account plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = None
+ HOSTER_URL = None
+
+ COOKIES = [(HOSTER_DOMAIN, "lang", "english")]
+
+ PREMIUM_PATTERN = r'\(Premium only\)'
+
+ VALID_UNTIL_PATTERN = r'Premium.[Aa]ccount expire:.*?(\d{1,2} [\w^_]+ \d{4})'
+
+ TRAFFIC_LEFT_PATTERN = r'Traffic available today:.*?<b>\s*(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
+ TRAFFIC_LEFT_UNIT = "MB" #: used only if no group <U> was found
+
+ LEECH_TRAFFIC_PATTERN = r'Leech Traffic left:<b>.*?(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
+ LEECH_TRAFFIC_UNIT = "MB" #: used only if no group <U> was found
+
+ LOGIN_FAIL_PATTERN = r'>\s*(Incorrect Login or Password|Error<)'
+
+
+ def init(self):
+ # if not self.HOSTER_DOMAIN:
+ # self.fail(_("Missing HOSTER_DOMAIN"))
+
+ if not self.HOSTER_URL:
+ self.HOSTER_URL = "http://www.%s/" % self.HOSTER_DOMAIN
+
+
+ def loadAccountInfo(self, user, req):
+ validuntil = None
+ trafficleft = None
+ leechtraffic = None
+ premium = None
+
+ html = req.load(self.HOSTER_URL, get={'op': "my_account"}, decode=True)
+
+ premium = True if re.search(self.PREMIUM_PATTERN, html) else False
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ expiredate = m.group(1).strip()
+ self.logDebug("Expire date: " + expiredate)
+
+ try:
+ validuntil = mktime(strptime(expiredate, "%d %B %Y"))
+
+ except Exception, e:
+ self.logError(e)
+
+ else:
+ self.logDebug("Valid until: %s" % validuntil)
+
+ if validuntil > mktime(gmtime()):
+ premium = True
+ trafficleft = -1
+ else:
+ premium = False
+ validuntil = None #: registered account type (not premium)
+ else:
+ self.logDebug("VALID_UNTIL_PATTERN not found")
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ if m:
+ try:
+ traffic = m.groupdict()
+ size = traffic['S']
+
+ if "nlimited" in size:
+ trafficleft = -1
+ if validuntil is None:
+ validuntil = -1
+ else:
+ if 'U' in traffic:
+ unit = traffic['U']
+ elif isinstance(self.TRAFFIC_LEFT_UNIT, basestring):
+ unit = self.TRAFFIC_LEFT_UNIT
+ else:
+ unit = ""
+
+ trafficleft = self.parseTraffic(size + unit)
+
+ except Exception, e:
+ self.logError(e)
+ else:
+ self.logDebug("TRAFFIC_LEFT_PATTERN not found")
+
+ leech = [m.groupdict() for m in re.finditer(self.LEECH_TRAFFIC_PATTERN, html)]
+ if leech:
+ leechtraffic = 0
+ try:
+ for traffic in leech:
+ size = traffic['S']
+
+ if "nlimited" in size:
+ leechtraffic = -1
+ if validuntil is None:
+ validuntil = -1
+ break
+ else:
+ if 'U' in traffic:
+ unit = traffic['U']
+ elif isinstance(self.LEECH_TRAFFIC_UNIT, basestring):
+ unit = self.LEECH_TRAFFIC_UNIT
+ else:
+ unit = ""
+
+ leechtraffic += self.parseTraffic(size + unit)
+
+ except Exception, e:
+ self.logError(e)
+ else:
+ self.logDebug("LEECH_TRAFFIC_PATTERN not found")
+
+ return {'validuntil': validuntil, 'trafficleft': trafficleft, 'leechtraffic': leechtraffic, 'premium': premium}
+
+
+ def login(self, user, data, req):
+ if isinstance(self.COOKIES, list):
+ set_cookies(req.cj, self.COOKIES)
+
+ url = urljoin(self.HOSTER_URL, "login.html")
+ html = req.load(url, decode=True)
+
+ action, inputs = parseHtmlForm('name="FL"', html)
+ if not inputs:
+ inputs = {'op': "login",
+ 'redirect': self.HOSTER_URL}
+
+ inputs.update({'login': user,
+ 'password': data['password']})
+
+ html = req.load(self.HOSTER_URL, post=inputs, decode=True)
+
+ if re.search(self.LOGIN_FAIL_PATTERN, html):
+ self.wrongPassword()
diff --git a/pyload/plugin/internal/XFSCrypter.py b/pyload/plugin/internal/XFSCrypter.py
new file mode 100644
index 000000000..6440d328a
--- /dev/null
+++ b/pyload/plugin/internal/XFSCrypter.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
+
+
+class XFSCrypter(SimpleCrypter):
+ __name = "XFSCrypter"
+ __type = "crypter"
+ __version = "0.04"
+
+ __pattern = r'^unmatchable$'
+
+ __description = """XFileSharing decrypter plugin"""
+ __license = "GPLv3"
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = None
+ HOSTER_NAME = None
+
+ URL_REPLACEMENTS = [(r'&?per_page=\d+', ""), (r'[?/&]+$', ""), (r'(.+/[^?]+)$', r'\1?'), (r'$', r'&per_page=10000')]
+
+ COOKIES = [(HOSTER_DOMAIN, "lang", "english")]
+
+ LINK_PATTERN = r'<(?:td|TD).*?>\s*<a href="(.+?)".*?>.+?(?:</a>)?\s*</(?:td|TD)>'
+ NAME_PATTERN = r'<[tT]itle>.*?\: (?P<N>.+) folder</[tT]itle>'
+
+ OFFLINE_PATTERN = r'>\s*\w+ (Not Found|file (was|has been) removed)'
+ TEMP_OFFLINE_PATTERN = r'>\s*\w+ server (is in )?(maintenance|maintainance)'
diff --git a/pyload/plugin/internal/XFSHoster.py b/pyload/plugin/internal/XFSHoster.py
new file mode 100644
index 000000000..dfc923f69
--- /dev/null
+++ b/pyload/plugin/internal/XFSHoster.py
@@ -0,0 +1,339 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from random import random
+from time import sleep
+
+from pyload.plugin.hoster.UnrestrictLi import secondsToMidnight
+from pyload.plugin.internal.captcha import ReCaptcha, SolveMedia
+from pyload.plugin.internal.SimpleHoster import SimpleHoster, create_getInfo
+from pyload.utils import html_unescape
+
+
+class XFSHoster(SimpleHoster):
+ __name = "XFSHoster"
+ __type = "hoster"
+ __version = "0.27"
+
+ __pattern = r'^unmatchable$'
+
+ __description = """XFileSharing hoster plugin"""
+ __license = "GPLv3"
+ __authors = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ HOSTER_DOMAIN = None
+ HOSTER_NAME = None
+
+ TEXT_ENCODING = False
+ COOKIES = [(HOSTER_DOMAIN, "lang", "english")]
+ CHECK_DIRECT_LINK = None
+ MULTI_HOSTER = True #@NOTE: Should be default to False for safe, but I'm lazy...
+
+ NAME_PATTERN = r'(>Filename:</b></td><td nowrap>|name="fname" value="|<span class="name">)(?P<N>.+?)(\s*<|")'
+ SIZE_PATTERN = r'(>Size:</b></td><td>|>File:.*>|<span class="size">)(?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)'
+
+ OFFLINE_PATTERN = r'>\s*\w+ (Not Found|file (was|has been) removed)'
+ TEMP_OFFLINE_PATTERN = r'>\s*\w+ server (is in )?(maintenance|maintainance)'
+
+ WAIT_PATTERN = r'<span id="countdown_str">.*?>(\d+)</span>|id="countdown" value=".*?(\d+).*?"'
+ PREMIUM_ONLY_PATTERN = r'>This file is available for Premium Users only'
+ ERROR_PATTERN = r'(?:class=["\']err["\'].*?>|<[Cc]enter><b>|>Error</td>|>\(ERROR:)(?:\s*<.+?>\s*)*(.+?)(?:["\']|<|\))'
+
+ LEECH_LINK_PATTERN = r'<h2>Download Link</h2>\s*<textarea[^>]*>([^<]+)'
+ LINK_PATTERN = None #: final download url pattern
+
+ CAPTCHA_PATTERN = r'(https?://[^"\']+?/captchas?/[^"\']+)'
+ CAPTCHA_BLOCK_PATTERN = r'>Enter code.*?<div.*?>(.+?)</div>'
+ RECAPTCHA_PATTERN = None
+ SOLVEMEDIA_PATTERN = None
+
+ FORM_PATTERN = None
+ FORM_INPUTS_MAP = None #: dict passed as input_names to parseHtmlForm
+
+
+ def setup(self):
+ self.chunkLimit = 1
+ self.resumeDownload = self.multiDL = self.premium
+
+
+ def prepare(self):
+ """ Initialize important variables """
+ if not self.HOSTER_DOMAIN:
+ self.fail(_("Missing HOSTER_DOMAIN"))
+
+ if not self.HOSTER_NAME:
+ self.HOSTER_NAME = "".join([str.capitalize() for str in self.HOSTER_DOMAIN.split('.')])
+
+ if not self.LINK_PATTERN:
+ pattern = r'(https?://(www\.)?([^/]*?%s|\d+\.\d+\.\d+\.\d+)(\:\d+)?(/d/|(/files)?/\d+/\w+/).+?)["\'<]'
+ self.LINK_PATTERN = pattern % self.HOSTER_DOMAIN.replace('.', '\.')
+
+ self.captcha = None
+ self.errmsg = None
+ self.passwords = self.getPassword().splitlines()
+
+ super(XFSHoster, self).prepare()
+
+ if self.CHECK_DIRECT_LINK is None:
+ self.directDL = bool(self.premium)
+
+
+ def handleFree(self):
+ link = self.getDownloadLink()
+
+ if link:
+ if self.captcha:
+ self.correctCaptcha()
+
+ self.download(link, ref=True, cookies=True, disposition=True)
+
+ elif self.errmsg:
+ if 'captcha' in self.errmsg:
+ self.fail(_("No valid captcha code entered"))
+ else:
+ self.fail(self.errmsg)
+
+ else:
+ self.fail(_("Download link not found"))
+
+
+ def handlePremium(self):
+ return self.handleFree()
+
+
+ def getDownloadLink(self):
+ for i in xrange(1, 6):
+ self.logDebug("Getting download link: #%d" % i)
+
+ self.checkErrors()
+
+ m = re.search(self.LINK_PATTERN, self.html, re.S)
+ if m:
+ break
+
+ data = self.getPostParameters()
+
+ self.html = self.load(self.pyfile.url, post=data, ref=True, decode=True, follow_location=False)
+
+ m = re.search(r'Location\s*:\s*(.+)', self.req.http.header, re.I)
+ if m and not "op=" in m.group(1):
+ break
+
+ m = re.search(self.LINK_PATTERN, self.html, re.S)
+ if m:
+ break
+ else:
+ self.logError(data['op'] if 'op' in data else _("UNKNOWN"))
+ return ""
+
+ self.errmsg = None
+
+ return m.group(1)
+
+
+ def handleMulti(self):
+ #only tested with easybytez.com
+ self.html = self.load("http://www.%s/" % self.HOSTER_DOMAIN)
+
+ action, inputs = self.parseHtmlForm()
+
+ upload_id = "%012d" % int(random() * 10 ** 12)
+ action += upload_id + "&js_on=1&utype=prem&upload_type=url"
+
+ inputs['tos'] = '1'
+ inputs['url_mass'] = self.pyfile.url
+ inputs['up1oad_type'] = 'url'
+
+ self.logDebug(action, inputs)
+
+ self.req.setOption("timeout", 600) #: wait for file to upload to easybytez.com
+
+ self.html = self.load(action, post=inputs)
+
+ self.checkErrors()
+
+ action, inputs = self.parseHtmlForm('F1')
+ if not inputs:
+ if self.errmsg:
+ self.retry(reason=self.errmsg)
+ else:
+ self.error(_("TEXTAREA F1 not found"))
+
+ self.logDebug(inputs)
+
+ stmsg = inputs['st']
+
+ if stmsg == 'OK':
+ self.html = self.load(action, post=inputs)
+
+ elif 'Can not leech file' in stmsg:
+ self.retry(20, 3 * 60, _("Can not leech file"))
+
+ elif 'today' in stmsg:
+ self.retry(wait_time=secondsToMidnight(gmt=2), reason=_("You've used all Leech traffic today"))
+
+ else:
+ self.fail(stmsg)
+
+ #get easybytez.com link for uploaded file
+ m = re.search(self.LEECH_LINK_PATTERN, self.html)
+ if m is None:
+ self.error(_("LEECH_LINK_PATTERN not found"))
+
+ header = self.load(m.group(1), just_header=True, decode=True)
+
+ if 'location' in header: #: Direct download link
+ self.link = header['location']
+ else:
+ self.fail(_("Download link not found"))
+
+
+ def checkErrors(self):
+ m = re.search(self.PREMIUM_ONLY_PATTERN, self.html)
+ if m:
+ self.info['error'] = "premium-only"
+ return
+
+ m = re.search(self.ERROR_PATTERN, self.html)
+
+ if m is None:
+ self.errmsg = None
+ else:
+ self.errmsg = m.group(1).strip()
+
+ self.logWarning(re.sub(r"<.*?>", " ", self.errmsg))
+
+ if 'wait' in self.errmsg:
+ wait_time = sum([int(v) * {"hr": 3600, "hour": 3600, "min": 60, "sec": 1}[u.lower()] for v, u in
+ re.findall(r'(\d+)\s*(hr|hour|min|sec)', self.errmsg, re.I)])
+ self.wait(wait_time, True)
+
+ elif 'country' in self.errmsg:
+ self.fail(_("Downloads are disabled for your country"))
+
+ elif 'captcha' in self.errmsg:
+ self.invalidCaptcha()
+
+ elif 'premium' in self.errmsg and 'require' in self.errmsg:
+ self.fail(_("File can be downloaded by premium users only"))
+
+ elif 'limit' in self.errmsg:
+ if 'days' in self.errmsg:
+ delay = secondsToMidnight(gmt=2)
+ retries = 3
+ else:
+ delay = 1 * 60 * 60
+ retries = 24
+
+ self.wantReconnect = True
+ self.retry(retries, delay, _("Download limit exceeded"))
+
+ elif 'countdown' in self.errmsg or 'Expired' in self.errmsg:
+ self.retry(reason=_("Link expired"))
+
+ elif 'maintenance' in self.errmsg or 'maintainance' in self.errmsg:
+ self.tempOffline()
+
+ elif 'download files up to' in self.errmsg:
+ self.fail(_("File too large for free download"))
+
+ else:
+ self.wantReconnect = True
+ self.retry(wait_time=60, reason=self.errmsg)
+
+ if self.errmsg:
+ self.info['error'] = self.errmsg
+ else:
+ self.info.pop('error', None)
+
+
+ def getPostParameters(self):
+ if self.FORM_PATTERN or self.FORM_INPUTS_MAP:
+ action, inputs = self.parseHtmlForm(self.FORM_PATTERN or "", self.FORM_INPUTS_MAP or {})
+ else:
+ action, inputs = self.parseHtmlForm(input_names={'op': re.compile(r'^download')})
+
+ if not inputs:
+ action, inputs = self.parseHtmlForm('F1')
+ if not inputs:
+ if self.errmsg:
+ self.retry(reason=self.errmsg)
+ else:
+ self.error(_("TEXTAREA F1 not found"))
+
+ self.logDebug(inputs)
+
+ if 'op' in inputs:
+ if "password" in inputs:
+ if self.passwords:
+ inputs['password'] = self.passwords.pop(0)
+ else:
+ self.fail(_("Missing password"))
+
+ if not self.premium:
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ wait_time = int(m.group(1))
+ self.setWait(wait_time, False)
+
+ self.captcha = self.handleCaptcha(inputs)
+
+ self.wait()
+ else:
+ inputs['referer'] = self.pyfile.url
+
+ if self.premium:
+ inputs['method_premium'] = "Premium Download"
+ inputs.pop('method_free', None)
+ else:
+ inputs['method_free'] = "Free Download"
+ inputs.pop('method_premium', None)
+
+ return inputs
+
+
+ def handleCaptcha(self, inputs):
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
+ if m:
+ captcha_url = m.group(1)
+ inputs['code'] = self.decryptCaptcha(captcha_url)
+ return 1
+
+ m = re.search(self.CAPTCHA_BLOCK_PATTERN, self.html, re.S)
+ if m:
+ captcha_div = m.group(1)
+ numerals = re.findall(r'<span.*?padding-left\s*:\s*(\d+).*?>(\d)</span>', html_unescape(captcha_div))
+ self.logDebug(captcha_div)
+ inputs['code'] = "".join([a[1] for a in sorted(numerals, key=lambda num: int(num[0]))])
+ self.logDebug("Captcha code: %s" % inputs['code'], numerals)
+ return 2
+
+ recaptcha = ReCaptcha(self)
+ try:
+ captcha_key = re.search(self.RECAPTCHA_PATTERN, self.html).group(1)
+ except Exception:
+ captcha_key = recaptcha.detect_key()
+ else:
+ self.logDebug("ReCaptcha key: %s" % captcha_key)
+
+ if captcha_key:
+ inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge(captcha_key)
+ return 3
+
+ solvemedia = SolveMedia(self)
+ try:
+ captcha_key = re.search(self.SOLVEMEDIA_PATTERN, self.html).group(1)
+ except Exception:
+ captcha_key = solvemedia.detect_key()
+ else:
+ self.logDebug("SolveMedia key: %s" % captcha_key)
+
+ if captcha_key:
+ inputs['adcopy_challenge'], inputs['adcopy_response'] = solvemedia.challenge(captcha_key)
+ return 4
+
+ return 0
diff --git a/pyload/plugins/internal/__init__.py b/pyload/plugin/internal/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/internal/__init__.py
+++ b/pyload/plugin/internal/__init__.py
diff --git a/pyload/plugin/ocr/GigasizeCom.py b/pyload/plugin/ocr/GigasizeCom.py
new file mode 100644
index 000000000..f818cdb41
--- /dev/null
+++ b/pyload/plugin/ocr/GigasizeCom.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.OCR import OCR
+
+
+class GigasizeCom(OCR):
+ __name = "GigasizeCom"
+ __type = "ocr"
+ __version = "0.10"
+
+ __description = """Gigasize.com ocr plugin"""
+ __license = "GPLv3"
+ __authors = [("pyLoad Team", "admin@pyload.org")]
+
+
+ def __init__(self):
+ OCR.__init__(self)
+
+
+ def get_captcha(self, image):
+ self.load_image(image)
+ self.threshold(2.8)
+ self.run_tesser(True, False, False, True)
+ return self.result_captcha
diff --git a/pyload/plugin/ocr/LinksaveIn.py b/pyload/plugin/ocr/LinksaveIn.py
new file mode 100644
index 000000000..cd0fe96f2
--- /dev/null
+++ b/pyload/plugin/ocr/LinksaveIn.py
@@ -0,0 +1,158 @@
+# -*- coding: utf-8 -*-
+
+try:
+ from PIL import Image
+except ImportError:
+ import Image
+
+from glob import glob
+from os import sep
+from os.path import abspath, dirname
+
+from pyload.plugin.OCR import OCR
+
+
+class LinksaveIn(OCR):
+ __name = "LinksaveIn"
+ __type = "ocr"
+ __version = "0.10"
+
+ __description = """Linksave.in ocr plugin"""
+ __license = "GPLv3"
+ __authors = [("pyLoad Team", "admin@pyload.org")]
+
+
+ def __init__(self):
+ OCR.__init__(self)
+ self.data_dir = dirname(abspath(__file__)) + sep + "LinksaveIn" + sep
+
+
+ def load_image(self, image):
+ im = Image.open(image)
+ frame_nr = 0
+
+ lut = im.resize((256, 1))
+ lut.putdata(range(256))
+ lut = list(lut.convert("RGB").getdata())
+
+ new = Image.new("RGB", im.size)
+ npix = new.load()
+ while True:
+ try:
+ im.seek(frame_nr)
+ except EOFError:
+ break
+ frame = im.copy()
+ pix = frame.load()
+ for x in xrange(frame.size[0]):
+ for y in xrange(frame.size[1]):
+ if lut[pix[x, y]] != (0,0,0):
+ npix[x, y] = lut[pix[x, y]]
+ frame_nr += 1
+ new.save(self.data_dir+"unblacked.png")
+ self.image = new.copy()
+ self.pixels = self.image.load()
+ self.result_captcha = ''
+
+
+ def get_bg(self):
+ stat = {}
+ cstat = {}
+ img = self.image.convert("P")
+ for bgpath in glob(self.data_dir+"bg/*.gif"):
+ stat[bgpath] = 0
+ bg = Image.open(bgpath)
+
+ bglut = bg.resize((256, 1))
+ bglut.putdata(range(256))
+ bglut = list(bglut.convert("RGB").getdata())
+
+ lut = img.resize((256, 1))
+ lut.putdata(range(256))
+ lut = list(lut.convert("RGB").getdata())
+
+ bgpix = bg.load()
+ pix = img.load()
+ for x in xrange(bg.size[0]):
+ for y in xrange(bg.size[1]):
+ rgb_bg = bglut[bgpix[x, y]]
+ rgb_c = lut[pix[x, y]]
+ try:
+ cstat[rgb_c] += 1
+ except Exception:
+ cstat[rgb_c] = 1
+ if rgb_bg == rgb_c:
+ stat[bgpath] += 1
+ max_p = 0
+ bg = ""
+ for bgpath, value in stat.iteritems():
+ if max_p < value:
+ bg = bgpath
+ max_p = value
+ return bg
+
+
+ def substract_bg(self, bgpath):
+ bg = Image.open(bgpath)
+ img = self.image.convert("P")
+
+ bglut = bg.resize((256, 1))
+ bglut.putdata(range(256))
+ bglut = list(bglut.convert("RGB").getdata())
+
+ lut = img.resize((256, 1))
+ lut.putdata(range(256))
+ lut = list(lut.convert("RGB").getdata())
+
+ bgpix = bg.load()
+ pix = img.load()
+ orgpix = self.image.load()
+ for x in xrange(bg.size[0]):
+ for y in xrange(bg.size[1]):
+ rgb_bg = bglut[bgpix[x, y]]
+ rgb_c = lut[pix[x, y]]
+ if rgb_c == rgb_bg:
+ orgpix[x, y] = (255,255,255)
+
+
+ def eval_black_white(self):
+ new = Image.new("RGB", (140, 75))
+ pix = new.load()
+ orgpix = self.image.load()
+ thresh = 4
+ for x in xrange(new.size[0]):
+ for y in xrange(new.size[1]):
+ rgb = orgpix[x, y]
+ r, g, b = rgb
+ pix[x, y] = (255,255,255)
+ if r > max(b, g)+thresh:
+ pix[x, y] = (0,0,0)
+ if g < min(r, b):
+ pix[x, y] = (0,0,0)
+ if g > max(r, b)+thresh:
+ pix[x, y] = (0,0,0)
+ if b > max(r, g)+thresh:
+ pix[x, y] = (0,0,0)
+ self.image = new
+ self.pixels = self.image.load()
+
+
+ def get_captcha(self, image):
+ self.load_image(image)
+ bg = self.get_bg()
+ self.substract_bg(bg)
+ self.eval_black_white()
+ self.to_greyscale()
+ self.image.save(self.data_dir+"cleaned_pass1.png")
+ self.clean(4)
+ self.clean(4)
+ self.image.save(self.data_dir+"cleaned_pass2.png")
+ letters = self.split_captcha_letters()
+ final = ""
+ for n, letter in enumerate(letters):
+ self.image = letter
+ self.image.save(ocr.data_dir+"letter%d.png" % n)
+ self.run_tesser(True, True, False, False)
+ final += self.result_captcha
+
+ return final
diff --git a/pyload/plugin/ocr/NetloadIn.py b/pyload/plugin/ocr/NetloadIn.py
new file mode 100644
index 000000000..f15ab4449
--- /dev/null
+++ b/pyload/plugin/ocr/NetloadIn.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.OCR import OCR
+
+
+class NetloadIn(OCR):
+ __name = "NetloadIn"
+ __type = "ocr"
+ __version = "0.10"
+
+ __description = """Netload.in ocr plugin"""
+ __license = "GPLv3"
+ __authors = [("pyLoad Team", "admin@pyload.org")]
+
+
+ def __init__(self):
+ OCR.__init__(self)
+
+
+ def get_captcha(self, image):
+ self.load_image(image)
+ self.to_greyscale()
+ self.clean(3)
+ self.clean(3)
+ self.run_tesser(True, True, False, False)
+
+ self.result_captcha = self.result_captcha.replace(" ", "")[:4] # cut to 4 numbers
+
+ return self.result_captcha
diff --git a/pyload/plugin/ocr/ShareonlineBiz.py b/pyload/plugin/ocr/ShareonlineBiz.py
new file mode 100644
index 000000000..103a9f96f
--- /dev/null
+++ b/pyload/plugin/ocr/ShareonlineBiz.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.OCR import OCR
+
+
+class ShareonlineBiz(OCR):
+ __name = "ShareonlineBiz"
+ __type = "ocr"
+ __version = "0.10"
+
+ __description = """Shareonline.biz ocr plugin"""
+ __license = "GPLv3"
+ __authors = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ def __init__(self):
+ OCR.__init__(self)
+
+
+ def get_captcha(self, image):
+ self.load_image(image)
+ self.to_greyscale()
+ self.image = self.image.resize((160, 50))
+ self.pixels = self.image.load()
+ self.threshold(1.85)
+ #self.eval_black_white(240)
+ #self.derotate_by_average()
+
+ letters = self.split_captcha_letters()
+
+ final = ""
+ for letter in letters:
+ self.image = letter
+ self.run_tesser(True, True, False, False)
+ final += self.result_captcha
+
+ return final
+
+ #tesseract at 60%
diff --git a/pyload/plugins/ocr/__init__.py b/pyload/plugin/ocr/__init__.py
index 40a96afc6..40a96afc6 100644
--- a/pyload/plugins/ocr/__init__.py
+++ b/pyload/plugin/ocr/__init__.py
diff --git a/pyload/plugins/Account.py b/pyload/plugins/Account.py
deleted file mode 100644
index 1444a4300..000000000
--- a/pyload/plugins/Account.py
+++ /dev/null
@@ -1,307 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from random import choice
-from time import time
-from traceback import print_exc
-from threading import RLock
-
-from pyload.plugins.Plugin import Base
-from pyload.utils import compare_time, parseFileSize, lock
-
-
-class WrongPassword(Exception):
- pass
-
-
-class Account(Base):
- """
- Base class for every Account plugin.
- Just overwrite `login` and cookies will be stored and account becomes accessible in\
- associated hoster plugin. Plugin should also provide `loadAccountInfo`
- """
- __name = "Account"
- __type = "account"
- __version = "0.03"
-
- __description = """Base account plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
- #: after that time (in minutes) pyload will relogin the account
- login_timeout = 10 * 60
- #: after that time (in minutes) account data will be reloaded
- info_threshold = 10 * 60
-
-
- def __init__(self, manager, accounts):
- Base.__init__(self, manager.core)
-
- self.manager = manager
- self.accounts = {}
- self.infos = {} #: cache for account information
- self.lock = RLock()
- self.timestamps = {}
-
- self.init()
-
- self.setAccounts(accounts)
-
-
- def init(self):
- pass
-
-
- def login(self, user, data, req):
- """login into account, the cookies will be saved so user can be recognized
-
- :param user: loginname
- :param data: data dictionary
- :param req: `Request` instance
- """
- pass
-
-
- @lock
- def _login(self, user, data):
- # set timestamp for login
- self.timestamps[user] = time()
-
- req = self.getAccountRequest(user)
- try:
- self.login(user, data, req)
- except WrongPassword:
- self.logWarning(
- _("Could not login with account %(user)s | %(msg)s") % {"user": user,
- "msg": _("Wrong Password")})
- success = data['valid'] = False
- except Exception, e:
- self.logWarning(
- _("Could not login with account %(user)s | %(msg)s") % {"user": user,
- "msg": e})
- success = data['valid'] = False
- if self.core.debug:
- print_exc()
- else:
- success = True
- finally:
- if req:
- req.close()
- return success
-
-
- def relogin(self, user):
- req = self.getAccountRequest(user)
- if req:
- req.cj.clear()
- req.close()
- if user in self.infos:
- del self.infos[user] #delete old information
-
- return self._login(user, self.accounts[user])
-
-
- def setAccounts(self, accounts):
- self.accounts = accounts
- for user, data in self.accounts.iteritems():
- self._login(user, data)
- self.infos[user] = {}
-
-
- def updateAccounts(self, user, password=None, options={}):
- """ updates account and return true if anything changed """
-
- if user in self.accounts:
- self.accounts[user]['valid'] = True #do not remove or accounts will not login
- if password:
- self.accounts[user]['password'] = password
- self.relogin(user)
- return True
- if options:
- before = self.accounts[user]['options']
- self.accounts[user]['options'].update(options)
- return self.accounts[user]['options'] != before
- else:
- self.accounts[user] = {"password": password, "options": options, "valid": True}
- self._login(user, self.accounts[user])
- return True
-
-
- def removeAccount(self, user):
- if user in self.accounts:
- del self.accounts[user]
- if user in self.infos:
- del self.infos[user]
- if user in self.timestamps:
- del self.timestamps[user]
-
-
- @lock
- def getAccountInfo(self, name, force=False):
- """retrieve account infos for an user, do **not** overwrite this method!\\
- just use it to retrieve infos in hoster plugins. see `loadAccountInfo`
-
- :param name: username
- :param force: reloads cached account information
- :return: dictionary with information
- """
- data = Account.loadAccountInfo(self, name)
-
- if force or name not in self.infos:
- self.logDebug("Get Account Info for %s" % name)
- req = self.getAccountRequest(name)
-
- try:
- infos = self.loadAccountInfo(name, req)
- if not type(infos) == dict:
- raise Exception("Wrong return format")
- except Exception, e:
- infos = {"error": str(e)}
- print_exc()
-
- if req:
- req.close()
-
- self.logDebug("Account Info: %s" % infos)
-
- infos['timestamp'] = time()
- self.infos[name] = infos
- elif "timestamp" in self.infos[name] and self.infos[name][
- "timestamp"] + self.info_threshold * 60 < time():
- self.logDebug("Reached timeout for account data")
- self.scheduleRefresh(name)
-
- data.update(self.infos[name])
- return data
-
-
- def isPremium(self, user):
- info = self.getAccountInfo(user)
- return info['premium']
-
-
- def loadAccountInfo(self, name, req=None):
- """this should be overwritten in account plugin,\
- and retrieving account information for user
-
- :param name:
- :param req: `Request` instance
- :return:
- """
- return {"validuntil" : None, #: -1 for unlimited
- "login" : name,
- # "password" : self.accounts[name]['password'], #: commented due security reason
- "options" : self.accounts[name]['options'],
- "valid" : self.accounts[name]['valid'],
- "trafficleft": None, #: in bytes, -1 for unlimited
- "maxtraffic" : None,
- "premium" : None,
- "timestamp" : 0, #: time this info was retrieved
- "type" : self.__name}
-
-
- def getAllAccounts(self, force=False):
- return [self.getAccountInfo(user, force) for user, data in self.accounts.iteritems()]
-
-
- def getAccountRequest(self, user=None):
- if not user:
- user, data = self.selectAccount()
- if not user:
- return None
-
- req = self.core.requestFactory.getRequest(self.__name, user)
- return req
-
-
- def getAccountCookies(self, user=None):
- if not user:
- user, data = self.selectAccount()
- if not user:
- return None
-
- cj = self.core.requestFactory.getCookieJar(self.__name, user)
- return cj
-
-
- def getAccountData(self, user):
- return self.accounts[user]
-
-
- def selectAccount(self):
- """ returns an valid account name and data"""
- usable = []
- for user, data in self.accounts.iteritems():
- if not data['valid']: continue
-
- if "time" in data['options'] and data['options']['time']:
- time_data = ""
- try:
- time_data = data['options']['time'][0]
- start, end = time_data.split("-")
- if not compare_time(start.split(":"), end.split(":")):
- continue
- except Exception:
- self.logWarning(_("Your Time %s has wrong format, use: 1:22-3:44") % time_data)
-
- if user in self.infos:
- if "validuntil" in self.infos[user]:
- if self.infos[user]['validuntil'] > 0 and time() > self.infos[user]['validuntil']:
- continue
- if "trafficleft" in self.infos[user]:
- if self.infos[user]['trafficleft'] == 0:
- continue
-
- usable.append((user, data))
-
- if not usable: return None, None
- return choice(usable)
-
-
- def canUse(self):
- return False if self.selectAccount() == (None, None) else True
-
-
- def parseTraffic(self, value, unit=None): #: return bytes
- if not unit and not isinstance(value, basestring):
- unit = "KB"
- return parseFileSize(value, unit)
-
-
- def wrongPassword(self):
- raise WrongPassword
-
-
- def empty(self, user):
- if user in self.infos:
- self.logWarning(_("Account %s has not enough traffic, checking again in 30min") % user)
-
- self.infos[user].update({"trafficleft": 0})
- self.scheduleRefresh(user, 30 * 60)
-
-
- def expired(self, user):
- if user in self.infos:
- self.logWarning(_("Account %s is expired, checking again in 1h") % user)
-
- self.infos[user].update({"validuntil": time() - 1})
- self.scheduleRefresh(user, 60 * 60)
-
-
- def scheduleRefresh(self, user, time=0, force=True):
- """ add task to refresh account info to sheduler """
- self.logDebug("Scheduled Account refresh for %s in %s seconds." % (user, time))
- self.core.scheduler.addJob(time, self.getAccountInfo, [user, force])
-
-
- @lock
- def checkLogin(self, user):
- """ checks if user is still logged in """
- if user in self.timestamps:
- if self.login_timeout > 0 and self.timestamps[user] + self.login_timeout * 60 < time():
- self.logDebug("Reached login timeout for %s" % user)
- return self.relogin(user)
- else:
- return True
- else:
- return False
diff --git a/pyload/plugins/Addon.py b/pyload/plugins/Addon.py
deleted file mode 100644
index 1c3049c10..000000000
--- a/pyload/plugins/Addon.py
+++ /dev/null
@@ -1,185 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from traceback import print_exc
-
-from pyload.plugins.Plugin import Base
-from pyload.utils import has_method
-
-
-class Expose(object):
- """ used for decoration to declare rpc services """
-
- def __new__(cls, f, *args, **kwargs):
- addonManager.addRPC(f.__module__, f.func_name, f.func_doc)
- return f
-
-
-def threaded(fn):
-
- def run(*args,**kwargs):
- addonManager.startThread(fn, *args, **kwargs)
-
- return run
-
-
-class Addon(Base):
- """
- Base class for addon plugins.
- """
- __name = "Addon"
- __type = "addon"
- __version = "0.03"
-
- __config = [] #: [("name", "type", "desc", "default")]
-
- __description = """Base addon/hook plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de"),
- ("RaNaN", "RaNaN@pyload.org")]
-
-
- #: automatically register event listeners for functions, attribute will be deleted dont use it yourself
- event_map = {}
-
- # Deprecated alternative to event_map
- #: List of events the plugin can handle, name the functions exactly like eventname.
- event_list = [] #@NOTE: dont make duplicate entries in event_map
-
-
- def __init__(self, core, manager):
- Base.__init__(self, core)
-
- #: Provide information in dict here, usable by API `getInfo`
- self.info = {}
-
- #: Callback of periodical job task, used by AddonManager
- self.cb = None
- self.interval = 60
-
- #: `AddonManager`
- self.manager = manager
-
- #register events
- if self.event_map:
- for event, funcs in self.event_map.iteritems():
- if type(funcs) in (list, tuple):
- for f in funcs:
- self.manager.addEvent(event, getattr(self,f))
- else:
- self.manager.addEvent(event, getattr(self,funcs))
-
- #delete for various reasons
- self.event_map = None
-
- if self.event_list:
- for f in self.event_list:
- self.manager.addEvent(f, getattr(self,f))
-
- self.event_list = None
-
- self.setup()
-
- # self.initPeriodical()
-
-
- def initPeriodical(self, delay=0, threaded=False):
- self.cb = self.core.scheduler.addJob(delay, self._periodical, args=[threaded], threaded=threaded)
-
-
- def _periodical(self, threaded):
- if self.interval < 0:
- self.cb = None
- return
-
- try:
- self.periodical()
-
- except Exception, e:
- self.logError(_("Error executing addon: %s") % e)
- if self.core.debug:
- print_exc()
-
- self.cb = self.core.scheduler.addJob(self.interval, self._periodical, threaded=threaded)
-
-
- def __repr__(self):
- return "<Addon %s>" % self.__name
-
-
- def setup(self):
- """ more init stuff if needed """
- pass
-
-
- def deactivate(self):
- """ called when addon was deactivated """
- if has_method(self.__class__, "unload"):
- self.unload()
-
- def unload(self): # Deprecated, use method deactivate() instead
- pass
-
-
- def isActivated(self):
- """ checks if addon is activated"""
- return self.core.config.getPlugin(self.__name, "activated")
-
-
- # Event methods - overwrite these if needed
- def activate(self):
- """ called when addon was activated """
- if has_method(self.__class__, "coreReady"):
- self.coreReady()
-
- def coreReady(self): # Deprecated, use method activate() instead
- pass
-
-
- def exit(self):
- """ called by core.shutdown just before pyLoad exit """
- if has_method(self.__class__, "coreExiting"):
- self.coreExiting()
-
- def coreExiting(self): # Deprecated, use method exit() instead
- pass
-
-
- def downloadPreparing(self, pyfile):
- pass
-
-
- def downloadFinished(self, pyfile):
- pass
-
-
- def downloadFailed(self, pyfile):
- pass
-
-
- def packageFinished(self, pypack):
- pass
-
-
- def beforeReconnecting(self, ip):
- pass
-
-
- def afterReconnecting(self, ip):
- pass
-
-
- def periodical(self):
- pass
-
-
- def captchaTask(self, task):
- """ new captcha task for the plugin, it MUST set the handler and timeout or will be ignored """
- pass
-
-
- def captchaCorrect(self, task):
- pass
-
-
- def captchaInvalid(self, task):
- pass
diff --git a/pyload/plugins/Captcha.py b/pyload/plugins/Captcha.py
deleted file mode 100644
index 2bf115de4..000000000
--- a/pyload/plugins/Captcha.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Plugin import Plugin
-
-
-class Captcha(Plugin):
- __name = "Captcha"
- __type = "captcha"
- __version = "0.14"
-
- __description = """Base captcha service plugin"""
- __license = "GPLv3"
- __authors = [("pyLoad Team", "admin@pyload.org")]
-
-
- KEY_PATTERN = None
-
- key = None #: last key detected
-
-
- def __init__(self, plugin):
- self.plugin = plugin
-
-
- def detect_key(self, html=None):
- if not html:
- if hasattr(self.plugin, "html") and self.plugin.html:
- html = self.plugin.html
- else:
- errmsg = _("%s html not found") % self.__name
- self.plugin.error(errmsg)
- raise TypeError(errmsg)
-
- m = re.search(self.KEY_PATTERN, html)
- if m:
- self.key = m.group("KEY")
- self.plugin.logDebug("%s key: %s" % (self.__name, self.key))
- return self.key
- else:
- self.plugin.logDebug("%s key not found" % self.__name)
- return None
-
-
- def challenge(self, key=None):
- raise NotImplementedError
-
-
- def result(self, server, challenge):
- raise NotImplementedError
diff --git a/pyload/plugins/Container.py b/pyload/plugins/Container.py
deleted file mode 100644
index bcdb433b8..000000000
--- a/pyload/plugins/Container.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-
-from os import remove
-from os.path import basename, exists
-
-from pyload.plugins.internal.Crypter import Crypter
-from pyload.utils import safe_join
-
-
-class Container(Crypter):
- __name = "Container"
- __type = "container"
- __version = "0.01"
-
- __pattern = r'^unmatchable$'
- __config = [] #: [("name", "type", "desc", "default")]
-
- __description = """Base container decrypter plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
- def preprocessing(self, thread):
- """prepare"""
-
- self.setup()
- self.thread = thread
-
- self.loadToDisk()
-
- self.decrypt(self.pyfile)
- self.deleteTmp()
-
- self.createPackages()
-
-
- def loadToDisk(self):
- """loads container to disk if its stored remotely and overwrite url,
- or check existent on several places at disk"""
-
- if self.pyfile.url.startswith("http"):
- self.pyfile.name = re.findall("([^\/=]+)", self.pyfile.url)[-1]
- content = self.load(self.pyfile.url)
- self.pyfile.url = safe_join(self.core.config['general']['download_folder'], self.pyfile.name)
- try:
- with open(self.pyfile.url, "wb") as f:
- f.write(content)
- except IOError, e:
- self.fail(str(e))
-
- else:
- self.pyfile.name = basename(self.pyfile.url)
- if not exists(self.pyfile.url):
- if exists(safe_join(pypath, self.pyfile.url)):
- self.pyfile.url = safe_join(pypath, self.pyfile.url)
- else:
- self.fail(_("File not exists"))
-
-
- def deleteTmp(self):
- if self.pyfile.name.startswith("tmp_"):
- remove(self.pyfile.url)
diff --git a/pyload/plugins/Crypter.py b/pyload/plugins/Crypter.py
deleted file mode 100644
index e0fe2bc0f..000000000
--- a/pyload/plugins/Crypter.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from urlparse import urlparse
-
-from pyload.plugins.Plugin import Plugin
-from pyload.utils import decode, safe_filename
-
-
-class Crypter(Plugin):
- __name = "Crypter"
- __type = "crypter"
- __version = "0.05"
-
- __pattern = r'^unmatchable$'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True), #: Overrides core.config['general']['folder_per_package']
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Base decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- html = None #: last html loaded
-
-
- def __init__(self, pyfile):
- #: Put all packages here. It's a list of tuples like: ( name, [list of links], folder )
- self.packages = []
-
- #: List of urls, pyLoad will generate packagenames
- self.urls = []
-
- Plugin.__init__(self, pyfile)
-
-
- def process(self, pyfile):
- """ main method """
-
- self.decrypt(pyfile)
-
- if self.urls:
- self.generatePackages()
-
- elif not self.packages:
- self.error(_("No link extracted"), "decrypt")
-
- self.createPackages()
-
-
- def decrypt(self, pyfile):
- raise NotImplementedError
-
-
- def generatePackages(self):
- """ generate new packages from self.urls """
-
- packages = map(lambda name, links: (name, links, None), self.core.api.generatePackages(self.urls).iteritems())
- self.packages.extend(packages)
-
-
- def createPackages(self):
- """ create new packages from self.packages """
-
- package_folder = self.pyfile.package().folder
- package_password = self.pyfile.package().password
- package_queue = self.pyfile.package().queue
-
- folder_per_package = self.core.config['general']['folder_per_package']
- try:
- use_subfolder = self.getConfig('use_subfolder')
- except Exception:
- use_subfolder = folder_per_package
- try:
- subfolder_per_package = self.getConfig('subfolder_per_package')
- except Exception:
- subfolder_per_package = True
-
- for pack in self.packages:
- name, links, folder = pack
-
- self.logDebug("Parsed package: %s" % name,
- "%d links" % len(links),
- "Saved to folder: %s" % folder if folder else "Saved to download folder")
-
- links = map(decode, links)
-
- pid = self.core.api.addPackage(name, links, package_queue)
-
- if package_password:
- self.core.api.setPackageData(pid, {"password": package_password})
-
- setFolder = lambda x: self.core.api.setPackageData(pid, {"folder": x or ""}) #: Workaround to do not break API addPackage method
-
- if use_subfolder:
- if not subfolder_per_package:
- setFolder(package_folder)
- self.logDebug("Set package %(name)s folder to: %(folder)s" % {"name": name, "folder": folder})
-
- elif not folder_per_package or name != folder:
- if not folder:
- folder = urlparse(name).path.split("/")[-1]
-
- setFolder(safe_filename(folder))
- self.logDebug("Set package %(name)s folder to: %(folder)s" % {"name": name, "folder": folder})
-
- elif folder_per_package:
- setFolder(None)
diff --git a/pyload/plugins/Hoster.py b/pyload/plugins/Hoster.py
deleted file mode 100644
index feb2ae714..000000000
--- a/pyload/plugins/Hoster.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Plugin import Plugin
-
-
-def getInfo(self):
- #result = [ .. (name, size, status, url) .. ]
- return
-
-
-class Hoster(Plugin):
- __name = "Hoster"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'^unmatchable$'
- __config = [] #: [("name", "type", "desc", "default")]
-
- __description = """Base hoster plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
diff --git a/pyload/plugins/account/AlldebridCom.py b/pyload/plugins/account/AlldebridCom.py
deleted file mode 100644
index 78a4318ac..000000000
--- a/pyload/plugins/account/AlldebridCom.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import xml.dom.minidom as dom
-
-from time import time
-from urllib import urlencode
-
-from BeautifulSoup import BeautifulSoup
-
-from pyload.plugins.Account import Account
-
-
-class AlldebridCom(Account):
- __name = "AlldebridCom"
- __type = "account"
- __version = "0.22"
-
- __description = """AllDebrid.com account plugin"""
- __license = "GPLv3"
- __authors = [("Andy Voigt", "spamsales@online.de")]
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAccountData(user)
- page = req.load("http://www.alldebrid.com/account/")
- soup = BeautifulSoup(page)
- #Try to parse expiration date directly from the control panel page (better accuracy)
- try:
- time_text = soup.find('div', attrs={'class': 'remaining_time_text'}).strong.string
- self.logDebug("Account expires in: %s" % time_text)
- p = re.compile('\d+')
- exp_data = p.findall(time_text)
- exp_time = time() + int(exp_data[0]) * 24 * 60 * 60 + int(
- exp_data[1]) * 60 * 60 + (int(exp_data[2]) - 1) * 60
- #Get expiration date from API
- except Exception:
- data = self.getAccountData(user)
- page = req.load("http://www.alldebrid.com/api.php",
- get={'action': "info_user", 'login': user, 'pw': data['password']})
- self.logDebug(page)
- xml = dom.parseString(page)
- exp_time = time() + int(xml.getElementsByTagName("date")[0].childNodes[0].nodeValue) * 24 * 60 * 60
- account_info = {"validuntil": exp_time, "trafficleft": -1}
- return account_info
-
-
- def login(self, user, data, req):
- urlparams = urlencode({'action': 'login', 'login_login': user, 'login_password': data['password']})
- page = req.load("http://www.alldebrid.com/register/?%s" % urlparams)
-
- if "This login doesn't exist" in page:
- self.wrongPassword()
-
- if "The password is not valid" in page:
- self.wrongPassword()
-
- if "Invalid captcha" in page:
- self.wrongPassword()
diff --git a/pyload/plugins/account/BayfilesCom.py b/pyload/plugins/account/BayfilesCom.py
deleted file mode 100644
index d75b55151..000000000
--- a/pyload/plugins/account/BayfilesCom.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import time
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class BayfilesCom(Account):
- __name = "BayfilesCom"
- __type = "account"
- __version = "0.03"
-
- __description = """Bayfiles.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def loadAccountInfo(self, user, req):
- for _i in xrange(2):
- res = json_loads(req.load("http://api.bayfiles.com/v1/account/info"))
- self.logDebug(res)
- if not res['error']:
- break
- self.logWarning(res['error'])
- self.relogin(user)
-
- return {"premium": bool(res['premium']), "trafficleft": -1,
- "validuntil": res['expires'] if res['expires'] >= int(time()) else -1}
-
-
- def login(self, user, data, req):
- res = json_loads(req.load("http://api.bayfiles.com/v1/account/login/%s/%s" % (user, data['password'])))
- self.logDebug(res)
- if res['error']:
- self.logError(res['error'])
- self.wrongPassword()
diff --git a/pyload/plugins/account/BillionuploadsCom.py b/pyload/plugins/account/BillionuploadsCom.py
deleted file mode 100644
index c0c6ef313..000000000
--- a/pyload/plugins/account/BillionuploadsCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class BillionuploadsCom(XFSAccount):
- __name = "BillionuploadsCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Billionuploads.com account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "billionuploads.com"
diff --git a/pyload/plugins/account/BitshareCom.py b/pyload/plugins/account/BitshareCom.py
deleted file mode 100644
index c0fc55c2d..000000000
--- a/pyload/plugins/account/BitshareCom.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-
-
-class BitshareCom(Account):
- __name = "BitshareCom"
- __type = "account"
- __version = "0.12"
-
- __description = """Bitshare account plugin"""
- __license = "GPLv3"
- __authors = [("Paul King", "")]
-
-
- def loadAccountInfo(self, user, req):
- page = req.load("http://bitshare.com/mysettings.html")
-
- if "\"http://bitshare.com/myupgrade.html\">Free" in page:
- return {"validuntil": -1, "trafficleft": -1, "premium": False}
-
- if not '<input type="checkbox" name="directdownload" checked="checked" />' in page:
- self.logWarning(_("Activate direct Download in your Bitshare Account"))
-
- return {"validuntil": -1, "trafficleft": -1, "premium": True}
-
-
- def login(self, user, data, req):
- page = req.load("http://bitshare.com/login.html",
- post={"user": user, "password": data['password'], "submit": "Login"}, cookies=True)
- if "login" in req.lastEffectiveURL:
- self.wrongPassword()
diff --git a/pyload/plugins/account/CatShareNet.py b/pyload/plugins/account/CatShareNet.py
deleted file mode 100644
index 2df503991..000000000
--- a/pyload/plugins/account/CatShareNet.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class CatShareNet(Account):
- __name = "CatShareNet"
- __type = "account"
- __version = "0.01"
-
- __description = """CatShareNet account plugin"""
- __license = "GPLv3"
- __authors = [("prOq", "")]
-
-
- PREMIUM_PATTERN = r'class="nav-collapse collapse pull-right">[\s\w<>=-."/:]*\sz.</a></li>\s*<li><a href="/premium">.*\s*<span style="color: red">(.*?)</span>[\s\w<>/]*href="/logout"'
- VALID_UNTIL_PATTERN = r'<div class="span6 pull-right">[\s\w<>=-":;]*<span style="font-size:13px;">.*?<strong>(.*?)</strong></span>'
-
-
- def loadAccountInfo(self, user, req):
- premium = False
- validuntil = -1
-
- html = req.load("http://catshare.net/", decode=True)
-
- try:
- m = re.search(self.PREMIUM_PATTERN, html)
- if "Premium" in m.group(1):
- premium = True
- except Exception:
- pass
-
- try:
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- expiredate = m.group(1)
- if "-" not in expiredate:
- validuntil = mktime(strptime(expiredate, "%d.%m.%Y"))
- except Exception:
- pass
-
- return {'premium': premium, 'trafficleft': -1, 'validuntil': validuntil}
-
-
- def login(self, user, data, req):
- html = req.load("http://catshare.net/login",
- post={'user_email': user,
- 'user_password': data['password'],
- 'remindPassword': 0,
- 'user[submit]': "Login"})
-
- if not '<a href="/logout">Wyloguj</a>' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/CramitIn.py b/pyload/plugins/account/CramitIn.py
deleted file mode 100644
index 6b8b5eead..000000000
--- a/pyload/plugins/account/CramitIn.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class CramitIn(XFSAccount):
- __name = "CramitIn"
- __type = "account"
- __version = "0.03"
-
- __description = """Cramit.in account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "cramit.in"
diff --git a/pyload/plugins/account/CzshareCom.py b/pyload/plugins/account/CzshareCom.py
deleted file mode 100644
index 5e94b1516..000000000
--- a/pyload/plugins/account/CzshareCom.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import mktime, strptime
-import re
-
-from pyload.plugins.Account import Account
-
-
-class CzshareCom(Account):
- __name = "CzshareCom"
- __type = "account"
- __version = "0.14"
-
- __description = """Czshare.com account plugin, now Sdilej.cz"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- CREDIT_LEFT_PATTERN = r'<tr class="active">\s*<td>([\d ,]+) (KiB|MiB|GiB)</td>\s*<td>([^<]*)</td>\s*</tr>'
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("http://sdilej.cz/prehled_kreditu/")
-
- m = re.search(self.CREDIT_LEFT_PATTERN, html)
- if m is None:
- return {"validuntil": 0, "trafficleft": 0}
- else:
- credits = float(m.group(1).replace(' ', '').replace(',', '.'))
- credits = credits * 1024 ** {'KiB': 0, 'MiB': 1, 'GiB': 2}[m.group(2)]
- validuntil = mktime(strptime(m.group(3), '%d.%m.%y %H:%M'))
- return {"validuntil": validuntil, "trafficleft": credits}
-
-
- def login(self, user, data, req):
- html = req.load('https://sdilej.cz/index.php', post={
- "Prihlasit": "Prihlasit",
- "login-password": data['password'],
- "login-name": user
- })
-
- if '<div class="login' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/DebridItaliaCom.py b/pyload/plugins/account/DebridItaliaCom.py
deleted file mode 100644
index 7252fc14b..000000000
--- a/pyload/plugins/account/DebridItaliaCom.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class DebridItaliaCom(Account):
- __name = "DebridItaliaCom"
- __type = "account"
- __version = "0.11"
-
- __description = """Debriditalia.com account plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- WALID_UNTIL_PATTERN = r'Premium valid till: (.+?) \|'
-
-
- def loadAccountInfo(self, user, req):
- info = {'premium': False, 'validuntil': None, 'trafficleft': None}
- html = req.load("http://debriditalia.com/")
-
- if 'Account premium not activated' not in html:
- m = re.search(self.WALID_UNTIL_PATTERN, html)
- if m:
- validuntil = int(mktime(strptime(m.group(1), "%d/%m/%Y %H:%M")))
- info = {'premium': True, 'validuntil': validuntil, 'trafficleft': -1}
- else:
- self.logError(_("Unable to retrieve account information"))
-
- return info
-
-
- def login(self, user, data, req):
- html = req.load("http://debriditalia.com/login.php",
- get={'u': user, 'p': data['password']})
-
- if 'NO' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/DepositfilesCom.py b/pyload/plugins/account/DepositfilesCom.py
deleted file mode 100644
index fba6111f0..000000000
--- a/pyload/plugins/account/DepositfilesCom.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import strptime, mktime
-
-from pyload.plugins.Account import Account
-
-
-class DepositfilesCom(Account):
- __name = "DepositfilesCom"
- __type = "account"
- __version = "0.30"
-
- __description = """Depositfiles.com account plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de"),
- ("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("https://dfiles.eu/de/gold/")
- validuntil = re.search(r"Sie haben Gold Zugang bis: <b>(.*?)</b></div>", html).group(1)
-
- validuntil = int(mktime(strptime(validuntil, "%Y-%m-%d %H:%M:%S")))
-
- return {"validuntil": validuntil, "trafficleft": -1}
-
-
- def login(self, user, data, req):
- html = req.load("https://dfiles.eu/de/login.php", get={"return": "/de/gold/payment.php"},
- post={"login": user, "password": data['password']})
- if r'<div class="error_message">Sie haben eine falsche Benutzername-Passwort-Kombination verwendet.</div>' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/DropboxCom.py b/pyload/plugins/account/DropboxCom.py
deleted file mode 100644
index 30c26bc20..000000000
--- a/pyload/plugins/account/DropboxCom.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class DropboxCom(SimpleHoster):
- __name = "DropboxCom"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'https?://(?:www\.)?dropbox\.com/.+'
-
- __description = """Dropbox.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
-
-
- NAME_PATTERN = r'<title>Dropbox - (?P<N>.+?)<'
- SIZE_PATTERN = r'&nbsp;&middot;&nbsp; (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- OFFLINE_PATTERN = r'<title>Dropbox - (404|Shared link error)<'
-
- COOKIES = [("dropbox.com", "lang", "en")]
-
-
- def setup(self):
- self.multiDL = True
- self.chunkLimit = 1
- self.resumeDownload = True
-
-
- def handleFree(self):
- self.download(self.pyfile.url, get={'dl': "1"})
-
- check = self.checkDownload({'html': re.compile("html")})
- if check == "html":
- self.error(_("Downloaded file is an html page"))
-
-
-getInfo = create_getInfo(DropboxCom)
diff --git a/pyload/plugins/account/EasybytezCom.py b/pyload/plugins/account/EasybytezCom.py
deleted file mode 100644
index 1491b8a80..000000000
--- a/pyload/plugins/account/EasybytezCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class EasybytezCom(XFSAccount):
- __name = "EasybytezCom"
- __type = "account"
- __version = "0.12"
-
- __description = """EasyBytez.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "easybytez.com"
diff --git a/pyload/plugins/account/EuroshareEu.py b/pyload/plugins/account/EuroshareEu.py
deleted file mode 100644
index 667718651..000000000
--- a/pyload/plugins/account/EuroshareEu.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import mktime, strptime
-import re
-
-from pyload.plugins.Account import Account
-
-
-class EuroshareEu(Account):
- __name = "EuroshareEu"
- __type = "account"
- __version = "0.01"
-
- __description = """Euroshare.eu account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def loadAccountInfo(self, user, req):
- self.relogin(user)
- html = req.load("http://euroshare.eu/customer-zone/settings/")
-
- m = re.search('id="input_expire_date" value="(\d+\.\d+\.\d+ \d+:\d+)"', html)
- if m is None:
- premium, validuntil = False, -1
- else:
- premium = True
- validuntil = mktime(strptime(m.group(1), "%d.%m.%Y %H:%M"))
-
- return {"validuntil": validuntil, "trafficleft": -1, "premium": premium}
-
-
- def login(self, user, data, req):
- html = req.load('http://euroshare.eu/customer-zone/login/', post={
- "trvale": "1",
- "login": user,
- "password": data['password']
- }, decode=True)
-
- if u">Nesprávne prihlasovacie meno alebo heslo" in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/FastixRu.py b/pyload/plugins/account/FastixRu.py
deleted file mode 100644
index 40b567b92..000000000
--- a/pyload/plugins/account/FastixRu.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class FastixRu(Account):
- __name = "FastixRu"
- __type = "account"
- __version = "0.02"
-
- __description = """Fastix account plugin"""
- __license = "GPLv3"
- __authors = [("Massimo Rosamilia", "max@spiritix.eu")]
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAccountData(user)
- page = json_loads(req.load("http://fastix.ru/api_v2/", get={'apikey': data['api'], 'sub': "getaccountdetails"}))
-
- points = page['points']
- kb = float(points) * 1024 ** 2 / 1000
-
- if points > 0:
- account_info = {"validuntil": -1, "trafficleft": kb}
- else:
- account_info = {"validuntil": None, "trafficleft": None, "premium": False}
- return account_info
-
-
- def login(self, user, data, req):
- page = req.load("http://fastix.ru/api_v2/",
- get={'sub': "get_apikey", 'email': user, 'password': data['password']})
- api = json_loads(page)
- api = api['apikey']
- data['api'] = api
- if "error_code" in page:
- self.wrongPassword()
diff --git a/pyload/plugins/account/FastshareCz.py b/pyload/plugins/account/FastshareCz.py
deleted file mode 100644
index 46f4c304c..000000000
--- a/pyload/plugins/account/FastshareCz.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Account import Account
-from pyload.utils import parseFileSize
-
-
-class FastshareCz(Account):
- __name = "FastshareCz"
- __type = "account"
- __version = "0.05"
-
- __description = """Fastshare.cz account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- CREDIT_PATTERN = r'My account\s*\((.+?)\)'
-
-
- def loadAccountInfo(self, user, req):
- validuntil = None
- trafficleft = None
- premium = None
-
- html = req.load("http://www.fastshare.cz/user", decode=True)
-
- m = re.search(self.CREDIT_PATTERN, html)
- if m:
- trafficleft = self.parseTraffic(m.group(1))
-
- if trafficleft:
- premium = True
- validuntil = -1
- else:
- premium = False
-
- return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
-
-
- def login(self, user, data, req):
- req.cj.setCookie("fastshare.cz", "lang", "en")
-
- req.load('http://www.fastshare.cz/login') # Do not remove or it will not login
-
- html = req.load("http://www.fastshare.cz/sql.php",
- post={'login': user, 'heslo': data['password']},
- decode=True)
-
- if ">Wrong username or password" in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/File4safeCom.py b/pyload/plugins/account/File4safeCom.py
deleted file mode 100644
index 555e9f4ea..000000000
--- a/pyload/plugins/account/File4safeCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class File4safeCom(XFSAccount):
- __name = "File4safeCom"
- __type = "account"
- __version = "0.04"
-
- __description = """File4safe.com account plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- HOSTER_DOMAIN = "file4safe.com"
-
- LOGIN_FAIL_PATTERN = r'input_login'
diff --git a/pyload/plugins/account/FileParadoxIn.py b/pyload/plugins/account/FileParadoxIn.py
deleted file mode 100644
index b0090bb27..000000000
--- a/pyload/plugins/account/FileParadoxIn.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class FileParadoxIn(XFSAccount):
- __name = "FileParadoxIn"
- __type = "account"
- __version = "0.02"
-
- __description = """FileParadox.in account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "fileparadox.in"
diff --git a/pyload/plugins/account/FilecloudIo.py b/pyload/plugins/account/FilecloudIo.py
deleted file mode 100644
index ce0cc2d34..000000000
--- a/pyload/plugins/account/FilecloudIo.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class FilecloudIo(Account):
- __name = "FilecloudIo"
- __type = "account"
- __version = "0.02"
-
- __description = """FilecloudIo account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- def loadAccountInfo(self, user, req):
- # It looks like the first API request always fails, so we retry 5 times, it should work on the second try
- for _i in xrange(5):
- rep = req.load("https://secure.filecloud.io/api-fetch_apikey.api",
- post={"username": user, "password": self.accounts[user]['password']})
- rep = json_loads(rep)
- if rep['status'] == 'ok':
- break
- elif rep['status'] == 'error' and rep['message'] == 'no such user or wrong password':
- self.logError(_("Wrong username or password"))
- return {"valid": False, "premium": False}
- else:
- return {"premium": False}
-
- akey = rep['akey']
- self.accounts[user]['akey'] = akey # Saved for hoster plugin
- rep = req.load("http://api.filecloud.io/api-fetch_account_details.api",
- post={"akey": akey})
- rep = json_loads(rep)
-
- if rep['is_premium'] == 1:
- return {"validuntil": int(rep['premium_until']), "trafficleft": -1}
- else:
- return {"premium": False}
-
-
- def login(self, user, data, req):
- req.cj.setCookie("secure.filecloud.io", "lang", "en")
- html = req.load('https://secure.filecloud.io/user-login.html')
-
- if not hasattr(self, "form_data"):
- self.form_data = {}
-
- self.form_data['username'] = user
- self.form_data['password'] = data['password']
-
- html = req.load('https://secure.filecloud.io/user-login_p.html',
- post=self.form_data,
- multipart=True)
-
- self.logged_in = True if "you have successfully logged in - filecloud.io" in html else False
- self.form_data = {}
diff --git a/pyload/plugins/account/FilefactoryCom.py b/pyload/plugins/account/FilefactoryCom.py
deleted file mode 100644
index 21a6db6fb..000000000
--- a/pyload/plugins/account/FilefactoryCom.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from time import mktime, strptime
-
-from pycurl import REFERER
-
-from pyload.plugins.Account import Account
-
-
-class FilefactoryCom(Account):
- __name = "FilefactoryCom"
- __type = "account"
- __version = "0.14"
-
- __description = """Filefactory.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- VALID_UNTIL_PATTERN = r'Premium valid until: <strong>(?P<d>\d{1,2})\w{1,2} (?P<m>\w{3}), (?P<y>\d{4})</strong>'
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("http://www.filefactory.com/account/")
-
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
- premium = True
- validuntil = re.sub(self.VALID_UNTIL_PATTERN, '\g<d> \g<m> \g<y>', m.group(0))
- validuntil = mktime(strptime(validuntil, "%d %b %Y"))
- else:
- premium = False
- validuntil = -1
-
- return {"premium": premium, "trafficleft": -1, "validuntil": validuntil}
-
-
- def login(self, user, data, req):
- req.http.c.setopt(REFERER, "http://www.filefactory.com/member/login.php")
-
- html = req.load("http://www.filefactory.com/member/signin.php", post={
- "loginEmail": user,
- "loginPassword": data['password'],
- "Submit": "Sign In"})
-
- if req.lastEffectiveURL != "http://www.filefactory.com/account/":
- self.wrongPassword()
diff --git a/pyload/plugins/account/FilejungleCom.py b/pyload/plugins/account/FilejungleCom.py
deleted file mode 100644
index d3dbcd603..000000000
--- a/pyload/plugins/account/FilejungleCom.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from time import mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class FilejungleCom(Account):
- __name = "FilejungleCom"
- __type = "account"
- __version = "0.11"
-
- __description = """Filejungle.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- login_timeout = 60
-
- URL = "http://filejungle.com/"
- TRAFFIC_LEFT_PATTERN = r'"/extend_premium\.php">Until (\d+ \w+ \d+)<br'
- LOGIN_FAILED_PATTERN = r'<span htmlfor="loginUser(Name|Password)" generated="true" class="fail_info">'
-
-
- def loadAccountInfo(self, user, req):
- html = req.load(self.URL + "dashboard.php")
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- if m:
- premium = True
- validuntil = mktime(strptime(m.group(1), "%d %b %Y"))
- else:
- premium = False
- validuntil = -1
-
- return {"premium": premium, "trafficleft": -1, "validuntil": validuntil}
-
-
- def login(self, user, data, req):
- html = req.load(self.URL + "login.php", post={
- "loginUserName": user,
- "loginUserPassword": data['password'],
- "loginFormSubmit": "Login",
- "recaptcha_challenge_field": "",
- "recaptcha_response_field": "",
- "recaptcha_shortencode_field": ""})
-
- if re.search(self.LOGIN_FAILED_PATTERN, html):
- self.wrongPassword()
diff --git a/pyload/plugins/account/FileomCom.py b/pyload/plugins/account/FileomCom.py
deleted file mode 100644
index 583b81d8a..000000000
--- a/pyload/plugins/account/FileomCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class FileomCom(XFSAccount):
- __name = "FileomCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Fileom.com account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "fileom.com"
diff --git a/pyload/plugins/account/FilerNet.py b/pyload/plugins/account/FilerNet.py
deleted file mode 100644
index 36758f37d..000000000
--- a/pyload/plugins/account/FilerNet.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import time
-
-from pyload.plugins.Account import Account
-
-
-class FilerNet(Account):
- __name = "FilerNet"
- __type = "account"
- __version = "0.02"
-
- __description = """Filer.net account plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- TOKEN_PATTERN = r'_csrf_token" value="([^"]+)" />'
- WALID_UNTIL_PATTERN = r'Der Premium-Zugang ist gÃŒltig bis (.+)\.\s*</td>'
- TRAFFIC_PATTERN = r'Traffic</th>\s*<td>([^<]+)</td>'
- FREE_PATTERN = r'Account Status</th>\s*<td>\s*Free'
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("https://filer.net/profile")
-
- # Free user
- if re.search(self.FREE_PATTERN, html):
- return {"premium": False, "validuntil": None, "trafficleft": None}
-
- until = re.search(self.WALID_UNTIL_PATTERN, html)
- traffic = re.search(self.TRAFFIC_PATTERN, html)
- if until and traffic:
- validuntil = int(time.mktime(time.strptime(until.group(1), "%d.%m.%Y %H:%M:%S")))
- trafficleft = self.parseTraffic(traffic.group(1))
- return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
- else:
- self.logError(_("Unable to retrieve account information"))
- return {"premium": False, "validuntil": None, "trafficleft": None}
-
-
- def login(self, user, data, req):
- html = req.load("https://filer.net/login")
- token = re.search(self.TOKEN_PATTERN, html).group(1)
- html = req.load("https://filer.net/login_check",
- post={"_username": user, "_password": data['password'],
- "_remember_me": "on", "_csrf_token": token, "_target_path": "https://filer.net/"})
- if 'Logout' not in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/FilerioCom.py b/pyload/plugins/account/FilerioCom.py
deleted file mode 100644
index 87a633dff..000000000
--- a/pyload/plugins/account/FilerioCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class FilerioCom(XFSAccount):
- __name = "FilerioCom"
- __type = "account"
- __version = "0.03"
-
- __description = """FileRio.in account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "filerio.in"
diff --git a/pyload/plugins/account/FilesMailRu.py b/pyload/plugins/account/FilesMailRu.py
deleted file mode 100644
index 4a829be89..000000000
--- a/pyload/plugins/account/FilesMailRu.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-
-
-class FilesMailRu(Account):
- __name = "FilesMailRu"
- __type = "account"
- __version = "0.10"
-
- __description = """Filesmail.ru account plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- def loadAccountInfo(self, user, req):
- return {"validuntil": None, "trafficleft": None}
-
-
- def login(self, user, data, req):
- user, domain = user.split("@")
-
- page = req.load("http://swa.mail.ru/cgi-bin/auth", None,
- {"Domain": domain, "Login": user, "Password": data['password'],
- "Page": "http://files.mail.ru/"}, cookies=True)
-
- if "НеверМПе ОЌя пПльзПвателя ОлО парПль" in page: # @TODO seems not to work
- self.wrongPassword()
diff --git a/pyload/plugins/account/FileserveCom.py b/pyload/plugins/account/FileserveCom.py
deleted file mode 100644
index 80be1db70..000000000
--- a/pyload/plugins/account/FileserveCom.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import mktime, strptime
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class FileserveCom(Account):
- __name = "FileserveCom"
- __type = "account"
- __version = "0.20"
-
- __description = """Fileserve.com account plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAccountData(user)
-
- page = req.load("http://app.fileserve.com/api/login/", post={"username": user, "password": data['password'],
- "submit": "Submit+Query"})
- res = json_loads(page)
-
- if res['type'] == "premium":
- validuntil = mktime(strptime(res['expireTime'], "%Y-%m-%d %H:%M:%S"))
- return {"trafficleft": res['traffic'], "validuntil": validuntil}
- else:
- return {"premium": False, "trafficleft": None, "validuntil": None}
-
-
- def login(self, user, data, req):
- page = req.load("http://app.fileserve.com/api/login/", post={"username": user, "password": data['password'],
- "submit": "Submit+Query"})
- res = json_loads(page)
-
- if not res['type']:
- self.wrongPassword()
-
- #login at fileserv page
- req.load("http://www.fileserve.com/login.php",
- post={"loginUserName": user, "loginUserPassword": data['password'], "autoLogin": "checked",
- "loginFormSubmit": "Login"})
diff --git a/pyload/plugins/account/FourSharedCom.py b/pyload/plugins/account/FourSharedCom.py
deleted file mode 100644
index 5565cbfc0..000000000
--- a/pyload/plugins/account/FourSharedCom.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class FourSharedCom(Account):
- __name = "FourSharedCom"
- __type = "account"
- __version = "0.03"
-
- __description = """FourShared.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- def loadAccountInfo(self, user, req):
- # Free mode only for now
- return {"premium": False}
-
-
- def login(self, user, data, req):
- req.cj.setCookie("4shared.com", "4langcookie", "en")
- res = req.load('http://www.4shared.com/web/login',
- post={'login': user,
- 'password': data['password'],
- 'remember': "on",
- '_remember': "on",
- 'returnTo': "http://www.4shared.com/account/home.jsp"})
-
- if 'Please log in to access your 4shared account' in res:
- self.wrongPassword()
diff --git a/pyload/plugins/account/FreakshareCom.py b/pyload/plugins/account/FreakshareCom.py
deleted file mode 100644
index 4a7bf5b59..000000000
--- a/pyload/plugins/account/FreakshareCom.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import strptime, mktime
-
-from pyload.plugins.Account import Account
-
-
-class FreakshareCom(Account):
- __name = "FreakshareCom"
- __type = "account"
- __version = "0.11"
-
- __description = """Freakshare.com account plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- def loadAccountInfo(self, user, req):
- page = req.load("http://freakshare.com/")
-
- validuntil = r'ltig bis:</td>\s*<td><b>([\d.:-]+)</b></td>'
- validuntil = re.search(validuntil, page, re.M)
- validuntil = validuntil.group(1).strip()
- validuntil = mktime(strptime(validuntil, "%d.%m.%Y - %H:%M"))
-
- traffic = r'Traffic verbleibend:</td>\s*<td>([^<]+)'
- traffic = re.search(traffic, page, re.M)
- traffic = traffic.group(1).strip()
- traffic = self.parseTraffic(traffic)
-
- return {"validuntil": validuntil, "trafficleft": traffic}
-
-
- def login(self, user, data, req):
- req.load("http://freakshare.com/index.php?language=EN")
-
- page = req.load("http://freakshare.com/login.html", None,
- {"submit": "Login", "user": user, "pass": data['password']}, cookies=True)
-
- if ">Wrong Username or Password" in page:
- self.wrongPassword()
diff --git a/pyload/plugins/account/FreeWayMe.py b/pyload/plugins/account/FreeWayMe.py
deleted file mode 100644
index efc4b28fc..000000000
--- a/pyload/plugins/account/FreeWayMe.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class FreeWayMe(Account):
- __name = "FreeWayMe"
- __type = "account"
- __version = "0.11"
-
- __description = """FreeWayMe account plugin"""
- __license = "GPLv3"
- __authors = [("Nicolas Giese", "james@free-way.me")]
-
-
- def loadAccountInfo(self, user, req):
- status = self.getAccountStatus(user, req)
- if not status:
- return False
- self.logDebug(status)
-
- account_info = {"validuntil": -1, "premium": False}
- if status['premium'] == "Free":
- account_info['trafficleft'] = int(status['guthaben']) * 1024
- elif status['premium'] == "Spender":
- account_info['trafficleft'] = -1
- elif status['premium'] == "Flatrate":
- account_info = {"validuntil": int(status['Flatrate']),
- "trafficleft": -1,
- "premium": True}
-
- return account_info
-
-
- def getpw(self, user):
- return self.accounts[user]['password']
-
-
- def login(self, user, data, req):
- status = self.getAccountStatus(user, req)
-
- # Check if user and password are valid
- if not status:
- self.wrongPassword()
-
-
- def getAccountStatus(self, user, req):
- answer = req.load("https://www.free-way.me/ajax/jd.php",
- get={"id": 4, "user": user, "pass": self.accounts[user]['password']})
- self.logDebug("Login: %s" % answer)
- if answer == "Invalid login":
- self.wrongPassword()
- return False
- return json_loads(answer)
diff --git a/pyload/plugins/account/FshareVn.py b/pyload/plugins/account/FshareVn.py
deleted file mode 100644
index 00ad9711c..000000000
--- a/pyload/plugins/account/FshareVn.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import mktime, strptime
-from pycurl import REFERER
-import re
-
-from pyload.plugins.Account import Account
-
-
-class FshareVn(Account):
- __name = "FshareVn"
- __type = "account"
- __version = "0.07"
-
- __description = """Fshare.vn account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- VALID_UNTIL_PATTERN = ur'<dt>Thời hạn dùng:</dt>\s*<dd>([^<]+)</dd>'
- LIFETIME_PATTERN = ur'<dt>Lần đăng nhập trước:</dt>\s*<dd>[^<]+</dd>'
- TRAFFIC_LEFT_PATTERN = ur'<dt>Tổng Dung Lượng Tài Khoản</dt>\s*<dd[^>]*>([\d.]+) ([kKMG])B</dd>'
- DIRECT_DOWNLOAD_PATTERN = ur'<input type="checkbox"\s*([^=>]*)[^>]*/>Kích hoạt download trực tiếp</dt>'
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("http://www.fshare.vn/account_info.php", decode=True)
-
- if re.search(self.LIFETIME_PATTERN, html):
- self.logDebug("Lifetime membership detected")
- trafficleft = self.getTrafficLeft()
- return {"validuntil": -1, "trafficleft": trafficleft, "premium": True}
-
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
- premium = True
- validuntil = mktime(strptime(m.group(1), '%I:%M:%S %p %d-%m-%Y'))
- trafficleft = self.getTrafficLeft()
- else:
- premium = False
- validuntil = None
- trafficleft = None
-
- return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
-
-
- def login(self, user, data, req):
- req.http.c.setopt(REFERER, "https://www.fshare.vn/login.php")
-
- html = req.load('https://www.fshare.vn/login.php', post={
- "login_password": data['password'],
- "login_useremail": user,
- "url_refe": "http://www.fshare.vn/index.php"
- }, referer=True, decode=True)
-
- if not re.search(r'<img\s+alt="VIP"', html):
- self.wrongPassword()
-
-
- def getTrafficLeft(self):
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- return float(m.group(1)) * 1024 ** {'k': 0, 'K': 0, 'M': 1, 'G': 2}[m.group(2)] if m else 0
diff --git a/pyload/plugins/account/Ftp.py b/pyload/plugins/account/Ftp.py
deleted file mode 100644
index cdc4d46cb..000000000
--- a/pyload/plugins/account/Ftp.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-
-
-class Ftp(Account):
- __name = "Ftp"
- __type = "account"
- __version = "0.01"
-
- __description = """Ftp dummy account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- login_timeout = -1 #: Unlimited
- info_threshold = -1 #: Unlimited
diff --git a/pyload/plugins/account/HellshareCz.py b/pyload/plugins/account/HellshareCz.py
deleted file mode 100644
index 964511383..000000000
--- a/pyload/plugins/account/HellshareCz.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import time
-
-from pyload.plugins.Account import Account
-
-
-class HellshareCz(Account):
- __name = "HellshareCz"
- __type = "account"
- __version = "0.14"
-
- __description = """Hellshare.cz account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- CREDIT_LEFT_PATTERN = r'<div class="credit-link">\s*<table>\s*<tr>\s*<th>(\d+|\d\d\.\d\d\.)</th>'
-
-
- def loadAccountInfo(self, user, req):
- self.relogin(user)
- html = req.load("http://www.hellshare.com/")
-
- m = re.search(self.CREDIT_LEFT_PATTERN, html)
- if m is None:
- trafficleft = None
- validuntil = None
- premium = False
- else:
- credit = m.group(1)
- premium = True
- try:
- if "." in credit:
- #Time-based account
- vt = [int(x) for x in credit.split('.')[:2]]
- lt = time.localtime()
- year = lt.tm_year + int(vt[1] < lt.tm_mon or (vt[1] == lt.tm_mon and vt[0] < lt.tm_mday))
- validuntil = time.mktime(time.strptime("%s%d 23:59:59" % (credit, year), "%d.%m.%Y %H:%M:%S"))
- trafficleft = -1
- else:
- #Traffic-based account
- trafficleft = int(credit) * 1024
- validuntil = -1
- except Exception, e:
- self.logError(_("Unable to parse credit info"), e)
- validuntil = -1
- trafficleft = -1
-
- return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
-
-
- def login(self, user, data, req):
- html = req.load('http://www.hellshare.com/')
- if req.lastEffectiveURL != 'http://www.hellshare.com/':
- #Switch to English
- self.logDebug("Switch lang - URL: %s" % req.lastEffectiveURL)
- json = req.load("%s?do=locRouter-show" % req.lastEffectiveURL)
- hash = re.search(r"(\-\-[0-9a-f]+\-)", json).group(1)
- self.logDebug("Switch lang - HASH: %s" % hash)
- html = req.load('http://www.hellshare.com/%s/' % hash)
-
- if re.search(self.CREDIT_LEFT_PATTERN, html):
- self.logDebug("Already logged in")
- return
-
- html = req.load('http://www.hellshare.com/login?do=loginForm-submit', post={
- "login": "Log in",
- "password": data['password'],
- "username": user,
- "perm_login": "on"
- })
-
- if "<p>You input a wrong user name or wrong password</p>" in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/Http.py b/pyload/plugins/account/Http.py
deleted file mode 100644
index 141443e27..000000000
--- a/pyload/plugins/account/Http.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-
-
-class Http(Account):
- __name = "Http"
- __type = "account"
- __version = "0.01"
-
- __description = """Http dummy account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- login_timeout = -1 #: Unlimited
- info_threshold = -1 #: Unlimited
diff --git a/pyload/plugins/account/HugefilesNet.py b/pyload/plugins/account/HugefilesNet.py
deleted file mode 100644
index 475d50295..000000000
--- a/pyload/plugins/account/HugefilesNet.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class HugefilesNet(XFSAccount):
- __name = "HugefilesNet"
- __type = "account"
- __version = "0.02"
-
- __description = """Hugefiles.net account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "hugefiles.net"
diff --git a/pyload/plugins/account/HundredEightyUploadCom.py b/pyload/plugins/account/HundredEightyUploadCom.py
deleted file mode 100644
index 6a22285cb..000000000
--- a/pyload/plugins/account/HundredEightyUploadCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class HundredEightyUploadCom(XFSAccount):
- __name = "HundredEightyUploadCom"
- __type = "account"
- __version = "0.02"
-
- __description = """180upload.com account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "180upload.com"
diff --git a/pyload/plugins/account/JunocloudMe.py b/pyload/plugins/account/JunocloudMe.py
deleted file mode 100644
index 547aec47a..000000000
--- a/pyload/plugins/account/JunocloudMe.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class JunocloudMe(XFSAccount):
- __name = "JunocloudMe"
- __type = "account"
- __version = "0.02"
-
- __description = """Junocloud.me account plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "junocloud.me"
diff --git a/pyload/plugins/account/Keep2shareCc.py b/pyload/plugins/account/Keep2shareCc.py
deleted file mode 100644
index cbbf75548..000000000
--- a/pyload/plugins/account/Keep2shareCc.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import gmtime, mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class Keep2shareCc(Account):
- __name = "Keep2shareCc"
- __type = "account"
- __version = "0.02"
-
- __description = """Keep2share.cc account plugin"""
- __license = "GPLv3"
- __authors = [("aeronaut", "aeronaut@pianoguy.de")]
-
-
- VALID_UNTIL_PATTERN = r'Premium expires: <b>(.+?)</b>'
- TRAFFIC_LEFT_PATTERN = r'Available traffic \(today\):<b><a href="/user/statistic.html">(.+?)</a>'
-
- LOGIN_FAIL_PATTERN = r'Please fix the following input errors'
-
-
- def loadAccountInfo(self, user, req):
- validuntil = None
- trafficleft = None
- premium = None
-
- html = req.load("http://keep2share.cc/site/profile.html", decode=True)
-
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
- expiredate = m.group(1).strip()
- self.logDebug("Expire date: " + expiredate)
-
- try:
- validuntil = mktime(strptime(expiredate, "%Y.%m.%d"))
-
- except Exception, e:
- self.logError(e)
-
- else:
- if validuntil > mktime(gmtime()):
- premium = True
- else:
- premium = False
- validuntil = None
-
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- if m:
- try:
- trafficleft = self.parseTraffic(m.group(1))
-
- except Exception, e:
- self.logError(e)
-
- return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium}
-
-
- def login(self, user, data, req):
- req.cj.setCookie("keep2share.cc", "lang", "en")
-
- html = req.load("http://keep2share.cc/login.html",
- post={'LoginForm[username]': user, 'LoginForm[password]': data['password']})
-
- if re.search(self.LOGIN_FAIL_PATTERN, html):
- self.wrongPassword()
diff --git a/pyload/plugins/account/LetitbitNet.py b/pyload/plugins/account/LetitbitNet.py
deleted file mode 100644
index 5a3d2ea90..000000000
--- a/pyload/plugins/account/LetitbitNet.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-# from pyload.utils import json_loads, json_dumps
-
-
-class LetitbitNet(Account):
- __name = "LetitbitNet"
- __type = "account"
- __version = "0.01"
-
- __description = """Letitbit.net account plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def loadAccountInfo(self, user, req):
- ## DISABLED BECAUSE IT GET 'key exausted' EVEN IF VALID ##
- # api_key = self.accounts[user]['password']
- # json_data = [api_key, ['key/info']]
- # api_rep = req.load('http://api.letitbit.net/json', post={'r': json_dumps(json_data)})
- # self.logDebug("API Key Info: " + api_rep)
- # api_rep = json_loads(api_rep)
- #
- # if api_rep['status'] == 'FAIL':
- # self.logWarning(api_rep['data'])
- # return {'valid': False, 'premium': False}
-
- return {"premium": True}
-
-
- def login(self, user, data, req):
- # API_KEY is the username and the PREMIUM_KEY is the password
- self.logInfo(_("You must use your API KEY as username and the PREMIUM KEY as password"))
diff --git a/pyload/plugins/account/LinestorageCom.py b/pyload/plugins/account/LinestorageCom.py
deleted file mode 100644
index f94ee8fa0..000000000
--- a/pyload/plugins/account/LinestorageCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class LinestorageCom(XFSAccount):
- __name = "LinestorageCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Linestorage.com account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "linestorage.com"
diff --git a/pyload/plugins/account/LinksnappyCom.py b/pyload/plugins/account/LinksnappyCom.py
deleted file mode 100644
index cf3d7a6e2..000000000
--- a/pyload/plugins/account/LinksnappyCom.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from hashlib import md5
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class LinksnappyCom(Account):
- __name = "LinksnappyCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Linksnappy.com account plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAccountData(user)
- r = req.load('http://gen.linksnappy.com/lseAPI.php',
- get={'act': 'USERDETAILS', 'username': user, 'password': md5(data['password']).hexdigest()})
- self.logDebug("JSON data: " + r)
- j = json_loads(r)
-
- if j['error']:
- return {"premium": False}
-
- validuntil = j['return']['expire']
- if validuntil == 'lifetime':
- validuntil = -1
- elif validuntil == 'expired':
- return {"premium": False}
- else:
- validuntil = float(validuntil)
-
- if 'trafficleft' not in j['return'] or isinstance(j['return']['trafficleft'], str):
- trafficleft = -1
- else:
- trafficleft = int(j['return']['trafficleft']) * 1024
-
- return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
-
-
- def login(self, user, data, req):
- r = req.load('http://gen.linksnappy.com/lseAPI.php',
- get={'act': 'USERDETAILS', 'username': user, 'password': md5(data['password']).hexdigest()})
-
- if 'Invalid Account Details' in r:
- self.wrongPassword()
diff --git a/pyload/plugins/account/LomafileCom.py b/pyload/plugins/account/LomafileCom.py
deleted file mode 100644
index fdf0bd528..000000000
--- a/pyload/plugins/account/LomafileCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class LomafileCom(XFSAccount):
- __name = "LomafileCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Lomafile.com account plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "lomafile.com"
diff --git a/pyload/plugins/account/MegaDebridEu.py b/pyload/plugins/account/MegaDebridEu.py
deleted file mode 100644
index 7b0e6f3f4..000000000
--- a/pyload/plugins/account/MegaDebridEu.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class MegaDebridEu(Account):
- __name = "MegaDebridEu"
- __type = "account"
- __version = "0.20"
-
- __description = """mega-debrid.eu account plugin"""
- __license = "GPLv3"
- __authors = [("D.Ducatel", "dducatel@je-geek.fr")]
-
-
- # Define the base URL of MegaDebrid api
- API_URL = "https://www.mega-debrid.eu/api.php"
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAccountData(user)
- jsonResponse = req.load(self.API_URL,
- get={'action': 'connectUser', 'login': user, 'password': data['password']})
- res = json_loads(jsonResponse)
-
- if res['response_code'] == "ok":
- return {"premium": True, "validuntil": float(res['vip_end']), "status": True}
- else:
- self.logError(res)
- return {"status": False, "premium": False}
-
-
- def login(self, user, data, req):
- jsonResponse = req.load(self.API_URL,
- get={'action': 'connectUser', 'login': user, 'password': data['password']})
- res = json_loads(jsonResponse)
- if res['response_code'] != "ok":
- self.wrongPassword()
diff --git a/pyload/plugins/account/MegaRapidCz.py b/pyload/plugins/account/MegaRapidCz.py
deleted file mode 100644
index ffe08319f..000000000
--- a/pyload/plugins/account/MegaRapidCz.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import mktime, strptime
-from pyload.plugins.Account import Account
-
-
-class MegaRapidCz(Account):
- __name = "MegaRapidCz"
- __type = "account"
- __version = "0.34"
-
- __description = """MegaRapid.cz account plugin"""
- __license = "GPLv3"
- __authors = [("MikyWoW", "mikywow@seznam.cz"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- login_timeout = 60
-
- LIMITDL_PATTERN = ur'<td>Max. počet paralelních stahování: </td><td>(\d+)'
- VALID_UNTIL_PATTERN = ur'<td>Paušální stahování aktivní. Vyprší </td><td><strong>(.*?)</strong>'
- TRAFFIC_LEFT_PATTERN = r'<tr><td>Kredit</td><td>(.*?) GiB'
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("http://megarapid.cz/mujucet/", decode=True)
-
- m = re.search(self.LIMITDL_PATTERN, html)
- if m:
- data = self.getAccountData(user)
- data['options']['limitDL'] = [int(m.group(1))]
-
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
- validuntil = mktime(strptime(m.group(1), "%d.%m.%Y - %H:%M"))
- return {"premium": True, "trafficleft": -1, "validuntil": validuntil}
-
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- if m:
- trafficleft = float(m.group(1)) * (1 << 20)
- return {"premium": True, "trafficleft": trafficleft, "validuntil": -1}
-
- return {"premium": False, "trafficleft": None, "validuntil": None}
-
-
- def login(self, user, data, req):
- htm = req.load("http://megarapid.cz/prihlaseni/")
- if "Heslo:" in htm:
- start = htm.index('id="inp_hash" name="hash" value="')
- htm = htm[start + 33:]
- hashes = htm[0:32]
- htm = req.load("http://megarapid.cz/prihlaseni/",
- post={"hash": hashes,
- "login": user,
- "pass1": data['password'],
- "remember": 0,
- "sbmt": u"Přihlásit"})
diff --git a/pyload/plugins/account/MegasharesCom.py b/pyload/plugins/account/MegasharesCom.py
deleted file mode 100644
index d55e6c3a4..000000000
--- a/pyload/plugins/account/MegasharesCom.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from time import mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class MegasharesCom(Account):
- __name = "MegasharesCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Megashares.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- VALID_UNTIL_PATTERN = r'<p class="premium_info_box">Period Ends: (\w{3} \d{1,2}, \d{4})</p>'
-
-
- def loadAccountInfo(self, user, req):
- #self.relogin(user)
- html = req.load("http://d01.megashares.com/myms.php", decode=True)
-
- premium = False if '>Premium Upgrade<' in html else True
-
- validuntil = trafficleft = -1
- try:
- timestr = re.search(self.VALID_UNTIL_PATTERN, html).group(1)
- self.logDebug(timestr)
- validuntil = mktime(strptime(timestr, "%b %d, %Y"))
- except Exception, e:
- self.logError(e)
-
- return {"validuntil": validuntil, "trafficleft": -1, "premium": premium}
-
-
- def login(self, user, data, req):
- html = req.load('http://d01.megashares.com/myms_login.php', post={
- "httpref": "",
- "myms_login": "Login",
- "mymslogin_name": user,
- "mymspassword": data['password']
- }, decode=True)
-
- if not '<span class="b ml">%s</span>' % user in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/MovReelCom.py b/pyload/plugins/account/MovReelCom.py
deleted file mode 100644
index 7e41693e0..000000000
--- a/pyload/plugins/account/MovReelCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class MovReelCom(XFSAccount):
- __name = "MovReelCom"
- __type = "account"
- __version = "0.03"
-
- __description = """Movreel.com account plugin"""
- __license = "GPLv3"
- __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
-
-
- login_timeout = 60
- info_threshold = 30
-
- HOSTER_DOMAIN = "movreel.com"
diff --git a/pyload/plugins/account/MultishareCz.py b/pyload/plugins/account/MultishareCz.py
deleted file mode 100644
index cb2f6a1d4..000000000
--- a/pyload/plugins/account/MultishareCz.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Account import Account
-
-
-class MultishareCz(Account):
- __name = "MultishareCz"
- __type = "account"
- __version = "0.03"
-
- __description = """Multishare.cz account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- TRAFFIC_LEFT_PATTERN = r'<span class="profil-zvyrazneni">Kredit:</span>\s*<strong>(?P<S>[\d.,]+)&nbsp;(?P<U>[\w^_]+)</strong>'
- ACCOUNT_INFO_PATTERN = r'<input type="hidden" id="(u_ID|u_hash)" name="[^"]*" value="([^"]+)">'
-
-
- def loadAccountInfo(self, user, req):
- #self.relogin(user)
- html = req.load("http://www.multishare.cz/profil/", decode=True)
-
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- trafficleft = self.parseTraffic(m.group('S'), m.group('U')) if m else 0
- self.premium = True if trafficleft else False
-
- html = req.load("http://www.multishare.cz/", decode=True)
- mms_info = dict(re.findall(self.ACCOUNT_INFO_PATTERN, html))
-
- return dict(mms_info, **{"validuntil": -1, "trafficleft": trafficleft})
-
-
- def login(self, user, data, req):
- html = req.load('http://www.multishare.cz/html/prihlaseni_process.php', post={
- "akce": "Přihlásit",
- "heslo": data['password'],
- "jmeno": user
- }, decode=True)
-
- if '<div class="akce-chyba akce">' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/MyfastfileCom.py b/pyload/plugins/account/MyfastfileCom.py
deleted file mode 100644
index 3aa16ee80..000000000
--- a/pyload/plugins/account/MyfastfileCom.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import time
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class MyfastfileCom(Account):
- __name = "MyfastfileCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Myfastfile.com account plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def loadAccountInfo(self, user, req):
- if 'days_left' in self.json_data:
- validuntil = int(time() + self.json_data['days_left'] * 24 * 60 * 60)
- return {"premium": True, "validuntil": validuntil, "trafficleft": -1}
- else:
- self.logError(_("Unable to get account information"))
-
-
- def login(self, user, data, req):
- # Password to use is the API-Password written in http://myfastfile.com/myaccount
- html = req.load("http://myfastfile.com/api.php",
- get={"user": user, "pass": data['password']})
- self.logDebug("JSON data: " + html)
- self.json_data = json_loads(html)
- if self.json_data['status'] != 'ok':
- self.logError(_('Invalid login. The password to use is the API-Password you find in your "My Account" page'))
- self.wrongPassword()
diff --git a/pyload/plugins/account/NetloadIn.py b/pyload/plugins/account/NetloadIn.py
deleted file mode 100644
index e2cb03b91..000000000
--- a/pyload/plugins/account/NetloadIn.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from time import time
-
-from pyload.plugins.Account import Account
-
-
-class NetloadIn(Account):
- __name = "NetloadIn"
- __type = "account"
- __version = "0.22"
-
- __description = """Netload.in account plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("CryNickSystems", "webmaster@pcProfil.de")]
-
-
- def loadAccountInfo(self, user, req):
- page = req.load("http://netload.in/index.php", get={'id': 2, 'lang': "de"})
- left = r'>(\d+) (Tag|Tage), (\d+) Stunden<'
- left = re.search(left, page)
- if left:
- validuntil = time() + int(left.group(1)) * 24 * 60 * 60 + int(left.group(3)) * 60 * 60
- trafficleft = -1
- premium = True
- else:
- validuntil = None
- premium = False
- trafficleft = None
- return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
-
-
- def login(self, user, data, req):
- page = req.load("http://netload.in/index.php", None,
- {"txtuser": user, "txtpass": data['password'], "txtcheck": "login", "txtlogin": "Login"},
- cookies=True)
- if "password or it might be invalid!" in page:
- self.wrongPassword()
diff --git a/pyload/plugins/account/NosuploadCom.py b/pyload/plugins/account/NosuploadCom.py
deleted file mode 100644
index d839827ed..000000000
--- a/pyload/plugins/account/NosuploadCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class NosuploadCom(XFSAccount):
- __name = "NosuploadCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Nosupload.com account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "nosupload.com"
diff --git a/pyload/plugins/account/NovafileCom.py b/pyload/plugins/account/NovafileCom.py
deleted file mode 100644
index b244139bc..000000000
--- a/pyload/plugins/account/NovafileCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class NovafileCom(XFSAccount):
- __name = "NovafileCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Novafile.com account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "novafile.com"
diff --git a/pyload/plugins/account/NowVideoAt.py b/pyload/plugins/account/NowVideoAt.py
deleted file mode 100644
index 0926dc03b..000000000
--- a/pyload/plugins/account/NowVideoAt.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import gmtime, mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class NowVideoAt(Account):
- __name = "NowVideoAt"
- __type = "account"
- __version = "0.01"
-
- __description = """NowVideo.at account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- VALID_UNTIL_PATTERN = r'>Your premium membership expires on: (.+?)<'
-
-
- def loadAccountInfo(self, user, req):
- validuntil = None
- trafficleft = -1
- premium = None
-
- html = req.load("http://www.nowvideo.at/premium.php")
-
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
- expiredate = m.group(1).strip()
- self.logDebug("Expire date: " + expiredate)
-
- try:
- validuntil = mktime(strptime(expiredate, "%Y-%b-%d"))
-
- except Exception, e:
- self.logError(e)
-
- else:
- if validuntil > mktime(gmtime()):
- premium = True
- else:
- premium = False
- validuntil = -1
-
- return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
-
-
- def login(self, user, data, req):
- html = req.load("http://www.nowvideo.at/login.php",
- post={'user': user, 'pass': data['password']})
-
- if ">Invalid login details" is html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/OboomCom.py b/pyload/plugins/account/OboomCom.py
deleted file mode 100644
index 4ea2483aa..000000000
--- a/pyload/plugins/account/OboomCom.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import time
-
-from beaker.crypto.pbkdf2 import PBKDF2
-
-from pyload.utils import json_loads
-from pyload.plugins.Account import Account
-
-
-class OboomCom(Account):
- __name = "OboomCom"
- __type = "account"
- __version = "0.21"
-
- __description = """Oboom.com account plugin"""
- __license = "GPLv3"
- __authors = [("stanley", "stanley.foerster@gmail.com")]
-
-
- def loadAccountData(self, user, req):
- passwd = self.getAccountData(user)['password']
- salt = passwd[::-1]
- pbkdf2 = PBKDF2(passwd, salt, 1000).hexread(16)
- result = json_loads(req.load("https://www.oboom.com/1/login", get={"auth": user, "pass": pbkdf2}))
- if not result[0] == 200:
- self.logWarning(_("Failed to log in: %s") % result[1])
- self.wrongPassword()
- return result[1]
-
-
- def loadAccountInfo(self, name, req):
- accountData = self.loadAccountData(name, req)
-
- userData = accountData['user']
-
- if userData['premium'] == "null":
- premium = False
- else:
- premium = True
-
- if userData['premium_unix'] == "null":
- validUntil = -1
- else:
- validUntil = int(userData['premium_unix'])
-
- traffic = userData['traffic']
-
- trafficLeft = traffic['current']
- maxTraffic = traffic['max']
-
- session = accountData['session']
-
- return {'premium' : premium,
- 'validuntil' : validUntil,
- 'trafficleft': trafficLeft / 1024, #@TODO: Remove / 1024 in 0.4.10
- 'maxtraffic' : maxTraffic / 1024, #@TODO: Remove / 1024 in 0.4.10
- 'session' : session}
-
-
- def login(self, user, data, req):
- self.loadAccountData(user, req)
diff --git a/pyload/plugins/account/OneFichierCom.py b/pyload/plugins/account/OneFichierCom.py
deleted file mode 100644
index 3ee602202..000000000
--- a/pyload/plugins/account/OneFichierCom.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import strptime, mktime
-
-from pycurl import REFERER
-
-from pyload.plugins.Account import Account
-
-
-class OneFichierCom(Account):
- __name = "OneFichierCom"
- __type = "account"
- __version = "0.11"
-
- __description = """1fichier.com account plugin"""
- __license = "GPLv3"
- __authors = [("Elrick69", "elrick69[AT]rocketmail[DOT]com"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- VALID_UNTIL_PATTERN = r'Your Premium Status will end the (\d+/\d+/\d+)'
-
-
- def loadAccountInfo(self, user, req):
- validuntil = None
- trafficleft = -1
- premium = None
-
- html = req.load("https://1fichier.com/console/abo.pl")
-
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
- expiredate = m.group(1)
- self.logDebug("Expire date: " + expiredate)
-
- try:
- validuntil = mktime(strptime(expiredate, "%d/%m/%Y"))
- except Exception, e:
- self.logError(e)
- else:
- premium = True
-
- return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium or False}
-
-
- def login(self, user, data, req):
- req.http.c.setopt(REFERER, "https://1fichier.com/login.pl?lg=en")
-
- html = req.load("https://1fichier.com/login.pl?lg=en",
- post={'mail': user, 'pass': data['password'], 'It': "on", 'purge': "off", 'valider': "Send"})
-
- if '>Invalid email address' in html or '>Invalid password' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/OverLoadMe.py b/pyload/plugins/account/OverLoadMe.py
deleted file mode 100644
index 2bedb496a..000000000
--- a/pyload/plugins/account/OverLoadMe.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class OverLoadMe(Account):
- __name = "OverLoadMe"
- __type = "account"
- __version = "0.01"
-
- __description = """Over-Load.me account plugin"""
- __license = "GPLv3"
- __authors = [("marley", "marley@over-load.me")]
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAccountData(user)
- page = req.load("https://api.over-load.me/account.php", get={"user": user, "auth": data['password']}).strip()
- data = json_loads(page)
-
- # Check for premium
- if data['membership'] == "Free":
- return {"premium": False}
-
- account_info = {"validuntil": data['expirationunix'], "trafficleft": -1}
- return account_info
-
-
- def login(self, user, data, req):
- jsondata = req.load("https://api.over-load.me/account.php",
- get={"user": user, "auth": data['password']}).strip()
- data = json_loads(jsondata)
-
- if data['err'] == 1:
- self.wrongPassword()
diff --git a/pyload/plugins/account/PremiumTo.py b/pyload/plugins/account/PremiumTo.py
deleted file mode 100644
index 191678434..000000000
--- a/pyload/plugins/account/PremiumTo.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-
-
-class PremiumTo(Account):
- __name = "PremiumTo"
- __type = "account"
- __version = "0.04"
-
- __description = """Premium.to account plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
-
- def loadAccountInfo(self, user, req):
- api_r = req.load("http://premium.to/api/straffic.php",
- get={'username': self.username, 'password': self.password})
- traffic = sum(map(int, api_r.split(';')))
-
- return {"trafficleft": int(traffic) / 1024, "validuntil": -1} #@TODO: Remove / 1024 in 0.4.10
-
-
- def login(self, user, data, req):
- self.username = user
- self.password = data['password']
- authcode = req.load("http://premium.to/api/getauthcode.php",
- get={'username': user, 'password': self.password}).strip()
-
- if "wrong username" in authcode:
- self.wrongPassword()
diff --git a/pyload/plugins/account/PremiumizeMe.py b/pyload/plugins/account/PremiumizeMe.py
deleted file mode 100644
index 96dd67b98..000000000
--- a/pyload/plugins/account/PremiumizeMe.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-
-from pyload.utils import json_loads
-
-
-class PremiumizeMe(Account):
- __name = "PremiumizeMe"
- __type = "account"
- __version = "0.11"
-
- __description = """Premiumize.me account plugin"""
- __license = "GPLv3"
- __authors = [("Florian Franzen", "FlorianFranzen@gmail.com")]
-
-
- def loadAccountInfo(self, user, req):
- # Get user data from premiumize.me
- status = self.getAccountStatus(user, req)
- self.logDebug(status)
-
- # Parse account info
- account_info = {"validuntil": float(status['result']['expires']),
- "trafficleft": max(0, status['result']['trafficleft_bytes'])}
-
- if status['result']['type'] == 'free':
- account_info['premium'] = False
-
- return account_info
-
-
- def login(self, user, data, req):
- # Get user data from premiumize.me
- status = self.getAccountStatus(user, req)
-
- # Check if user and password are valid
- if status['status'] != 200:
- self.wrongPassword()
-
-
- def getAccountStatus(self, user, req):
- # Use premiumize.me API v1 (see https://secure.premiumize.me/?show=api)
- # to retrieve account info and return the parsed json answer
- answer = req.load("https://api.premiumize.me/pm-api/v1.php",
- get={'method' : "accountstatus",
- 'params[login]': user,
- 'params[pass]' : self.accounts[user]['password']})
- return json_loads(answer)
diff --git a/pyload/plugins/account/QuickshareCz.py b/pyload/plugins/account/QuickshareCz.py
deleted file mode 100644
index c4da516fb..000000000
--- a/pyload/plugins/account/QuickshareCz.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Account import Account
-
-
-class QuickshareCz(Account):
- __name = "QuickshareCz"
- __type = "account"
- __version = "0.02"
-
- __description = """Quickshare.cz account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- TRAFFIC_LEFT_PATTERN = r'Stav kreditu: <strong>(.+?)</strong>'
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("http://www.quickshare.cz/premium", decode=True)
-
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- if m:
- trafficleft = self.parseTraffic(m.group(1))
- premium = True if trafficleft else False
- else:
- trafficleft = None
- premium = False
-
- return {"validuntil": -1, "trafficleft": trafficleft, "premium": premium}
-
-
- def login(self, user, data, req):
- html = req.load('http://www.quickshare.cz/html/prihlaseni_process.php', post={
- "akce": u'Přihlásit',
- "heslo": data['password'],
- "jmeno": user
- }, decode=True)
-
- if u'>TakovÜ uşivatel neexistuje.<' in html or u'>Špatné heslo.<' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/RPNetBiz.py b/pyload/plugins/account/RPNetBiz.py
deleted file mode 100644
index b652b377c..000000000
--- a/pyload/plugins/account/RPNetBiz.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class RPNetBiz(Account):
- __name = "RPNetBiz"
- __type = "account"
- __version = "0.10"
-
- __description = """RPNet.biz account plugin"""
- __license = "GPLv3"
- __authors = [("Dman", "dmanugm@gmail.com")]
-
-
- def loadAccountInfo(self, user, req):
- # Get account information from rpnet.biz
- res = self.getAccountStatus(user, req)
- try:
- if res['accountInfo']['isPremium']:
- # Parse account info. Change the trafficleft later to support per host info.
- account_info = {"validuntil": int(res['accountInfo']['premiumExpiry']),
- "trafficleft": -1, "premium": True}
- else:
- account_info = {"validuntil": None, "trafficleft": None, "premium": False}
-
- except KeyError:
- #handle wrong password exception
- account_info = {"validuntil": None, "trafficleft": None, "premium": False}
-
- return account_info
-
-
- def login(self, user, data, req):
- # Get account information from rpnet.biz
- res = self.getAccountStatus(user, req)
-
- # If we have an error in the res, we have wrong login information
- if 'error' in res:
- self.wrongPassword()
-
-
- def getAccountStatus(self, user, req):
- # Using the rpnet API, check if valid premium account
- res = req.load("https://premium.rpnet.biz/client_api.php",
- get={"username": user, "password": self.accounts[user]['password'],
- "action": "showAccountInformation"})
- self.logDebug("JSON data: %s" % res)
-
- return json_loads(res)
diff --git a/pyload/plugins/account/RapidfileshareNet.py b/pyload/plugins/account/RapidfileshareNet.py
deleted file mode 100644
index 1c055ea19..000000000
--- a/pyload/plugins/account/RapidfileshareNet.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class RapidfileshareNet(XFSAccount):
- __name = "RapidfileshareNet"
- __type = "account"
- __version = "0.05"
-
- __description = """Rapidfileshare.net account plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "rapidfileshare.net"
-
- TRAFFIC_LEFT_PATTERN = r'>Traffic available today:</TD><TD><label for="name">\s*(?P<S>[\d.,]+)\s*(?:(?P<U>[\w^_]+))?'
diff --git a/pyload/plugins/account/RapidgatorNet.py b/pyload/plugins/account/RapidgatorNet.py
deleted file mode 100644
index 3dd8cf3d2..000000000
--- a/pyload/plugins/account/RapidgatorNet.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class RapidgatorNet(Account):
- __name = "RapidgatorNet"
- __type = "account"
- __version = "0.04"
-
- __description = """Rapidgator.net account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- API_URL = 'http://rapidgator.net/api/user'
-
-
- def loadAccountInfo(self, user, req):
- try:
- sid = self.getAccountData(user).get('SID')
- assert sid
-
- json = req.load("%s/info?sid=%s" % (self.API_URL, sid))
- self.logDebug("API:USERINFO", json)
- json = json_loads(json)
-
- if json['response_status'] == 200:
- if "reset_in" in json['response']:
- self.scheduleRefresh(user, json['response']['reset_in'])
-
- return {"validuntil": json['response']['expire_date'],
- "trafficleft": int(json['response']['traffic_left']),
- "premium": True}
- else:
- self.logError(json['response_details'])
- except Exception, e:
- self.logError(e)
-
- return {"validuntil": None, "trafficleft": None, "premium": False}
-
-
- def login(self, user, data, req):
- try:
- json = req.load('%s/login' % self.API_URL, post={"username": user, "password": data['password']})
- self.logDebug("API:LOGIN", json)
- json = json_loads(json)
-
- if json['response_status'] == 200:
- data['SID'] = str(json['response']['session_id'])
- return
- else:
- self.logError(json['response_details'])
- except Exception, e:
- self.logError(e)
-
- self.wrongPassword()
diff --git a/pyload/plugins/account/RapiduNet.py b/pyload/plugins/account/RapiduNet.py
deleted file mode 100644
index 4489c6fbc..000000000
--- a/pyload/plugins/account/RapiduNet.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class RapiduNet(Account):
- __name = "RapiduNet"
- __type = "account"
- __version = "0.02"
-
- __description = """Rapidu.net account plugin"""
- __license = "GPLv3"
- __authors = [("prOq", "")]
-
-
- PREMIUM_PATTERN = r'<a href="premium/" style="padding-left: 0px;">Account: <b>Premium</b></a>'
-
-
- def loadAccountInfo(self, user, req):
- info = {'validuntil': None, 'trafficleft': None, 'premium': False}
-
- req.load("https://rapidu.net/ajax.php", get={'a': "getChangeLang"}, post={"_go": "", "lang": "en"})
- html = req.load("https://rapidu.net/", decode=True)
-
- if re.search(self.PREMIUM_PATTERN, html):
- info['premium'] = True
-
- return info
-
-
- def login(self, user, data, req):
- try:
- json = json_loads(req.load("https://rapidu.net/ajax.php?a=getUserLogin",
- post={'_go': "",
- 'login': user,
- 'pass': data['password'],
- 'member': "1"}))
-
- self.logDebug(json)
-
- if not json['message'] == "success":
- self.wrongPassword()
-
- except Exception, e:
- self.logError(e)
diff --git a/pyload/plugins/account/RarefileNet.py b/pyload/plugins/account/RarefileNet.py
deleted file mode 100644
index c0b855373..000000000
--- a/pyload/plugins/account/RarefileNet.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class RarefileNet(XFSAccount):
- __name = "RarefileNet"
- __type = "account"
- __version = "0.04"
-
- __description = """RareFile.net account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "rarefile.net"
diff --git a/pyload/plugins/account/RealdebridCom.py b/pyload/plugins/account/RealdebridCom.py
deleted file mode 100644
index c181a1343..000000000
--- a/pyload/plugins/account/RealdebridCom.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import xml.dom.minidom as dom
-
-from pyload.plugins.Account import Account
-
-
-class RealdebridCom(Account):
- __name = "RealdebridCom"
- __type = "account"
- __version = "0.43"
-
- __description = """Real-Debrid.com account plugin"""
- __license = "GPLv3"
- __authors = [("Devirex Hazzard", "naibaf_11@yahoo.de")]
-
-
- def loadAccountInfo(self, user, req):
- if self.pin_code:
- return {"premium": False}
- page = req.load("https://real-debrid.com/api/account.php")
- xml = dom.parseString(page)
- account_info = {"validuntil": int(xml.getElementsByTagName("expiration")[0].childNodes[0].nodeValue),
- "trafficleft": -1}
-
- return account_info
-
-
- def login(self, user, data, req):
- self.pin_code = False
- page = req.load("https://real-debrid.com/ajax/login.php", get={"user": user, "pass": data['password']})
- if "Your login informations are incorrect" in page:
- self.wrongPassword()
- elif "PIN Code required" in page:
- self.logWarning(_("PIN code required. Please login to https://real-debrid.com using the PIN or disable the double authentication in your control panel on https://real-debrid.com"))
- self.pin_code = True
diff --git a/pyload/plugins/account/RehostTo.py b/pyload/plugins/account/RehostTo.py
deleted file mode 100644
index 1a7ea9577..000000000
--- a/pyload/plugins/account/RehostTo.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-
-
-class RehostTo(Account):
- __name = "RehostTo"
- __type = "account"
- __version = "0.10"
-
- __description = """Rehost.to account plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAccountData(user)
- page = req.load("http://rehost.to/api.php",
- get={'cmd': "login", 'user': user, 'pass': data['password']})
- data = [x.split("=") for x in page.split(",")]
- ses = data[0][1]
- long_ses = data[1][1]
-
- page = req.load("http://rehost.to/api.php",
- get={'cmd': "get_premium_credits", 'long_ses': long_ses})
- traffic, valid = page.split(",")
-
- account_info = {"trafficleft": int(traffic) * 1024,
- "validuntil": int(valid),
- "long_ses": long_ses,
- "ses": ses}
-
- return account_info
-
-
- def login(self, user, data, req):
- page = req.load("http://rehost.to/api.php",
- get={'cmd': "login", 'user': user, 'pass': data['password']})
-
- if "Login failed." in page:
- self.wrongPassword()
diff --git a/pyload/plugins/account/RyushareCom.py b/pyload/plugins/account/RyushareCom.py
deleted file mode 100644
index a555e6208..000000000
--- a/pyload/plugins/account/RyushareCom.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class RyushareCom(XFSAccount):
- __name = "RyushareCom"
- __type = "account"
- __version = "0.05"
-
- __description = """Ryushare.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("trance4us", "")]
-
-
- HOSTER_DOMAIN = "ryushare.com"
-
-
- def login(self, user, data, req):
- req.lastURL = "http://ryushare.com/login.python"
- html = req.load("http://ryushare.com/login.python",
- post={"login": user, "password": data['password'], "op": "login"})
- if 'Incorrect Login or Password' in html or '>Error<' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/SafesharingEu.py b/pyload/plugins/account/SafesharingEu.py
deleted file mode 100644
index a48ce86b3..000000000
--- a/pyload/plugins/account/SafesharingEu.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class SafesharingEu(XFSAccount):
- __name = "SafesharingEu"
- __type = "account"
- __version = "0.02"
-
- __description = """Safesharing.eu account plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "safesharing.eu"
diff --git a/pyload/plugins/account/SecureUploadEu.py b/pyload/plugins/account/SecureUploadEu.py
deleted file mode 100644
index 38c7f9646..000000000
--- a/pyload/plugins/account/SecureUploadEu.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class SecureUploadEu(XFSAccount):
- __name = "SecureUploadEu"
- __type = "account"
- __version = "0.02"
-
- __description = """SecureUpload.eu account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "secureupload.eu"
diff --git a/pyload/plugins/account/SendmywayCom.py b/pyload/plugins/account/SendmywayCom.py
deleted file mode 100644
index fefbcba4b..000000000
--- a/pyload/plugins/account/SendmywayCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class SendmywayCom(XFSAccount):
- __name = "SendmywayCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Sendmyway.com account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "sendmyway.com"
diff --git a/pyload/plugins/account/ShareonlineBiz.py b/pyload/plugins/account/ShareonlineBiz.py
deleted file mode 100644
index 1cf6fd3c8..000000000
--- a/pyload/plugins/account/ShareonlineBiz.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-
-
-class ShareonlineBiz(Account):
- __name = "ShareonlineBiz"
- __type = "account"
- __version = "0.24"
-
- __description = """Share-online.biz account plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def getUserAPI(self, user, req):
- return req.load("http://api.share-online.biz/account.php",
- {"username": user, "password": self.accounts[user]['password'], "act": "userDetails"})
-
-
- def loadAccountInfo(self, user, req):
- html = self.getUserAPI(user, req)
-
- info = {}
- for line in html.splitlines():
- if "=" in line:
- key, value = line.split("=")
- info[key] = value
- self.logDebug(info)
-
- if "dl" in info and info['dl'].lower() != "not_available":
- req.cj.setCookie("share-online.biz", "dl", info['dl'])
- if "a" in info and info['a'].lower() != "not_available":
- req.cj.setCookie("share-online.biz", "a", info['a'])
-
- return {"validuntil": int(info['expire_date']) if "expire_date" in info else -1,
- "trafficleft": -1,
- "premium": True if ("dl" in info or "a" in info) and (info['group'] != "Sammler") else False}
-
-
- def login(self, user, data, req):
- html = self.getUserAPI(user, req)
- if "EXCEPTION" in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/SimplyPremiumCom.py b/pyload/plugins/account/SimplyPremiumCom.py
deleted file mode 100644
index 2fabe6ce8..000000000
--- a/pyload/plugins/account/SimplyPremiumCom.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.plugins.Account import Account
-
-
-class SimplyPremiumCom(Account):
- __name = "SimplyPremiumCom"
- __type = "account"
- __version = "0.01"
-
- __description = """Simply-Premium.com account plugin"""
- __license = "GPLv3"
- __authors = [("EvolutionClip", "evolutionclip@live.de")]
-
-
- def loadAccountInfo(self, user, req):
- json_data = req.load('http://www.simply-premium.com/api/user.php?format=json')
- self.logDebug("JSON data: " + json_data)
- json_data = json_loads(json_data)
-
- if 'vip' in json_data['result'] and json_data['result']['vip'] == 0:
- return {"premium": False}
-
- #Time package
- validuntil = float(json_data['result']['timeend'])
- #Traffic package
- # {"trafficleft": int(traffic), "validuntil": -1}
- #trafficleft = int(json_data['result']['traffic'])
-
- #return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
- return {"premium": True, "validuntil": validuntil}
-
-
- def login(self, user, data, req):
- req.cj.setCookie("simply-premium.com", "lang", "EN")
-
- if data['password'] == '' or data['password'] == '0':
- post_data = {"key": user}
- else:
- post_data = {"login_name": user, "login_pass": data['password']}
-
- html = req.load("http://www.simply-premium.com/login.php", post=post_data)
-
- if 'logout' not in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/SimplydebridCom.py b/pyload/plugins/account/SimplydebridCom.py
deleted file mode 100644
index 4930eaea5..000000000
--- a/pyload/plugins/account/SimplydebridCom.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class SimplydebridCom(Account):
- __name = "SimplydebridCom"
- __type = "account"
- __version = "0.10"
-
- __description = """Simply-Debrid.com account plugin"""
- __license = "GPLv3"
- __authors = [("Kagenoshin", "kagenoshin@gmx.ch")]
-
-
- def loadAccountInfo(self, user, req):
- get_data = {'login': 2, 'u': self.loginname, 'p': self.password}
- res = req.load("http://simply-debrid.com/api.php", get=get_data, decode=True)
- data = [x.strip() for x in res.split(";")]
- if str(data[0]) != "1":
- return {"premium": False}
- else:
- return {"trafficleft": -1, "validuntil": mktime(strptime(str(data[2]), "%d/%m/%Y"))}
-
-
- def login(self, user, data, req):
- self.loginname = user
- self.password = data['password']
- get_data = {'login': 1, 'u': self.loginname, 'p': self.password}
- res = req.load("http://simply-debrid.com/api.php", get=get_data, decode=True)
- if res != "02: loggin success":
- self.wrongPassword()
diff --git a/pyload/plugins/account/StahnuTo.py b/pyload/plugins/account/StahnuTo.py
deleted file mode 100644
index d2358191a..000000000
--- a/pyload/plugins/account/StahnuTo.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Account import Account
-
-
-class StahnuTo(Account):
- __name = "StahnuTo"
- __type = "account"
- __version = "0.03"
-
- __description = """StahnuTo account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("http://www.stahnu.to/")
-
- m = re.search(r'>VIP: (\d+.*)<', html)
- trafficleft = self.parseTraffic(m.group(1)) * 1024 if m else 0
-
- return {"premium": trafficleft > (512 * 1024), "trafficleft": trafficleft, "validuntil": -1}
-
-
- def login(self, user, data, req):
- html = req.load("http://www.stahnu.to/login.php", post={
- "username": user,
- "password": data['password'],
- "submit": "Login"})
-
- if not '<a href="logout.php">' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/StreamcloudEu.py b/pyload/plugins/account/StreamcloudEu.py
deleted file mode 100644
index 49768bc6f..000000000
--- a/pyload/plugins/account/StreamcloudEu.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class StreamcloudEu(XFSAccount):
- __name = "StreamcloudEu"
- __type = "account"
- __version = "0.02"
-
- __description = """Streamcloud.eu account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "streamcloud.eu"
diff --git a/pyload/plugins/account/TurbobitNet.py b/pyload/plugins/account/TurbobitNet.py
deleted file mode 100644
index b60c89a61..000000000
--- a/pyload/plugins/account/TurbobitNet.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from time import mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class TurbobitNet(Account):
- __name = "TurbobitNet"
- __type = "account"
- __version = "0.01"
-
- __description = """TurbobitNet account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def loadAccountInfo(self, user, req):
- html = req.load("http://turbobit.net")
-
- m = re.search(r'<u>Turbo Access</u> to ([\d.]+)', html)
- if m:
- premium = True
- validuntil = mktime(strptime(m.group(1), "%d.%m.%Y"))
- else:
- premium = False
- validuntil = -1
-
- return {"premium": premium, "trafficleft": -1, "validuntil": validuntil}
-
-
- def login(self, user, data, req):
- req.cj.setCookie("turbobit.net", "user_lang", "en")
-
- html = req.load("http://turbobit.net/user/login", post={
- "user[login]": user,
- "user[pass]": data['password'],
- "user[submit]": "Login"})
-
- if not '<div class="menu-item user-name">' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/TusfilesNet.py b/pyload/plugins/account/TusfilesNet.py
deleted file mode 100644
index c665c608e..000000000
--- a/pyload/plugins/account/TusfilesNet.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import mktime, strptime, gmtime
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class TusfilesNet(XFSAccount):
- __name = "TusfilesNet"
- __type = "account"
- __version = "0.06"
-
- __description = """Tusfile.net account plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "tusfiles.net"
-
- VALID_UNTIL_PATTERN = r'<span class="label label-default">([^<]+)</span>'
- TRAFFIC_LEFT_PATTERN = r'<td><img src="//www\.tusfiles\.net/i/icon/meter\.png" alt=""/></td>\n<td>&nbsp;(?P<S>[\d.,]+)'
diff --git a/pyload/plugins/account/UlozTo.py b/pyload/plugins/account/UlozTo.py
deleted file mode 100644
index d7087a7c2..000000000
--- a/pyload/plugins/account/UlozTo.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.Account import Account
-
-
-class UlozTo(Account):
- __name = "UlozTo"
- __type = "account"
- __version = "0.07"
-
- __description = """Uloz.to account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("pulpe", "")]
-
-
- TRAFFIC_LEFT_PATTERN = r'<li class="menu-kredit"><a href="/kredit" title="[^"]*?GB = ([\d.]+) MB"'
-
-
- def loadAccountInfo(self, user, req):
- self.phpsessid = req.cj.getCookie("ULOSESSID") #@NOTE: this cookie gets lost somehow after each request
-
- html = req.load("http://www.ulozto.net/", decode=True)
-
- req.cj.setCookie("ulozto.net", "ULOSESSID", self.phpsessid)
-
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- trafficleft = int(float(m.group(1).replace(' ', '').replace(',', '.')) * 1000 * 1.048) if m else 0
- self.premium = True if trafficleft else False
-
- return {"validuntil": -1, "trafficleft": trafficleft}
-
-
- def login(self, user, data, req):
- login_page = req.load('http://www.ulozto.net/?do=web-login', decode=True)
- action = re.findall('<form action="(.+?)"', login_page)[1].replace('&amp;', '&')
- token = re.search('_token_" value="(.+?)"', login_page).group(1)
-
- html = req.load(urljoin("http://www.ulozto.net/", action),
- post={'_token_' : token,
- 'do' : "loginForm-submit",
- 'login' : u"Přihlásit",
- 'password': data['password'],
- 'username': user},
- decode=True)
-
- if '<div class="flash error">' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/UnrestrictLi.py b/pyload/plugins/account/UnrestrictLi.py
deleted file mode 100644
index 8ecfc50eb..000000000
--- a/pyload/plugins/account/UnrestrictLi.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Account import Account
-from pyload.utils import json_loads
-
-
-class UnrestrictLi(Account):
- __name = "UnrestrictLi"
- __type = "account"
- __version = "0.03"
-
- __description = """Unrestrict.li account plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def loadAccountInfo(self, user, req):
- json_data = req.load('http://unrestrict.li/api/jdownloader/user.php?format=json')
- self.logDebug("JSON data: " + json_data)
- json_data = json_loads(json_data)
-
- if 'vip' in json_data['result'] and json_data['result']['vip'] == 0:
- return {"premium": False}
-
- validuntil = json_data['result']['expires']
- trafficleft = int(json_data['result']['traffic'])
-
- return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
-
-
- def login(self, user, data, req):
- req.cj.setCookie("unrestrict.li", "lang", "EN")
- html = req.load("https://unrestrict.li/sign_in")
-
- if 'solvemedia' in html:
- self.logError(_("A Captcha is required. Go to http://unrestrict.li/sign_in and login, then retry"))
- return
-
- post_data = {"username": user, "password": data['password'],
- "remember_me": "remember", "signin": "Sign in"}
- html = req.load("https://unrestrict.li/sign_in", post=post_data)
-
- if 'sign_out' not in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/UploadcCom.py b/pyload/plugins/account/UploadcCom.py
deleted file mode 100644
index df3d89a13..000000000
--- a/pyload/plugins/account/UploadcCom.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class UploadcCom(XFSAccount):
- __name = "UploadcCom"
- __type = "account"
- __version = "0.02"
-
- __description = """Uploadc.com account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "uploadc.com"
diff --git a/pyload/plugins/account/UploadedTo.py b/pyload/plugins/account/UploadedTo.py
deleted file mode 100644
index 2db549802..000000000
--- a/pyload/plugins/account/UploadedTo.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from time import time
-
-from pyload.plugins.Account import Account
-
-
-class UploadedTo(Account):
- __name = "UploadedTo"
- __type = "account"
- __version = "0.27"
-
- __description = """Uploaded.to account plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
- PREMIUM_PATTERN = r'<em>Premium</em>'
- VALID_UNTIL_PATTERN = r'<td>Duration:</td>\s*<th>([^<]+)'
- TRAFFIC_LEFT_PATTERN = r'<th colspan="2"><b class="cB">([^<]+)'
-
-
- def loadAccountInfo(self, user, req):
- validuntil = None
- trafficleft = None
- premium = None
-
- html = req.load("http://uploaded.net/me")
-
- premium = True if re.search(self.PREMIUM_PATTERN, html) else False
-
- m = re.search(self.VALID_UNTIL_PATTERN, html, re.M)
- if m:
- expiredate = m.group(1).strip()
-
- if expiredate == "unlimited":
- validuntil = -1
- else:
- m = re.findall(r'(\d+) (Week|weeks|day|hour)', expiredate)
- if m:
- validuntil = time()
- for n, u in m:
- validuntil += int(n) * 60 * 60 * {'Week': 168, 'weeks': 168, 'day': 24, 'hour': 1}[u]
-
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- if m:
- trafficleft = self.parseTraffic(m.group(1).replace('.', ''))
-
- return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium}
-
-
- def login(self, user, data, req):
- req.cj.setCookie("uploaded.net", "lang", "en")
-
- page = req.load("http://uploaded.net/io/login",
- post={'id': user, 'pw': data['password'], '_': ""})
-
- if "User and password do not match" in page:
- self.wrongPassword()
diff --git a/pyload/plugins/account/UploadheroCom.py b/pyload/plugins/account/UploadheroCom.py
deleted file mode 100644
index 5fce73ff1..000000000
--- a/pyload/plugins/account/UploadheroCom.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import datetime
-import time
-
-from pyload.plugins.Account import Account
-
-
-class UploadheroCom(Account):
- __name = "UploadheroCom"
- __type = "account"
- __version = "0.20"
-
- __description = """Uploadhero.co account plugin"""
- __license = "GPLv3"
- __authors = [("mcmyst", "mcmyst@hotmail.fr")]
-
-
- def loadAccountInfo(self, user, req):
- premium_pattern = re.compile('Il vous reste <span class="bleu">(\d+)</span> jours premium')
-
- data = self.getAccountData(user)
- page = req.load("http://uploadhero.co/my-account")
-
- if premium_pattern.search(page):
- end_date = datetime.date.today() + datetime.timedelta(days=int(premium_pattern.search(page).group(1)))
- end_date = time.mktime(future.timetuple())
- account_info = {"validuntil": end_date, "trafficleft": -1, "premium": True}
- else:
- account_info = {"validuntil": -1, "trafficleft": -1, "premium": False}
-
- return account_info
-
-
- def login(self, user, data, req):
- page = req.load("http://uploadhero.co/lib/connexion.php",
- post={"pseudo_login": user, "password_login": data['password']})
-
- if "mot de passe invalide" in page:
- self.wrongPassword()
diff --git a/pyload/plugins/account/UploadingCom.py b/pyload/plugins/account/UploadingCom.py
deleted file mode 100644
index b7eecd6b6..000000000
--- a/pyload/plugins/account/UploadingCom.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import time, strptime, mktime
-
-from pyload.plugins.Account import Account
-from pyload.plugins.internal.SimpleHoster import set_cookies
-
-
-class UploadingCom(Account):
- __name = "UploadingCom"
- __type = "account"
- __version = "0.11"
-
- __description = """Uploading.com account plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
- PREMIUM_PATTERN = r'UPGRADE TO PREMIUM'
- VALID_UNTIL_PATTERN = r'Valid Until:(.+?)<'
-
-
- def loadAccountInfo(self, user, req):
- validuntil = None
- trafficleft = None
- premium = None
-
- html = req.load("http://uploading.com/")
-
- premium = False if re.search(self.PREMIUM_PATTERN, html) else True
-
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
- expiredate = m.group(1).strip()
- self.logDebug("Expire date: " + expiredate)
-
- try:
- validuntil = mktime(strptime(expiredate, "%b %d, %Y"))
-
- except Exception, e:
- self.logError(e)
-
- else:
- if validuntil > mktime(gmtime()):
- premium = True
- else:
- premium = False
- validuntil = None
-
- return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium}
-
-
- def login(self, user, data, req):
- set_cookies([("uploading.com", "lang", "1"),
- ("uploading.com", "language", "1"),
- ("uploading.com", "setlang", "en"),
- ("uploading.com", "_lang", "en")]
-
- req.load("http://uploading.com/")
- req.load("http://uploading.com/general/login_form/?JsHttpRequest=%s-xml" % long(time() * 1000),
- post={'email': user, 'password': data['password'], 'remember': "on"})
diff --git a/pyload/plugins/account/UptoboxCom.py b/pyload/plugins/account/UptoboxCom.py
deleted file mode 100644
index 869ddf214..000000000
--- a/pyload/plugins/account/UptoboxCom.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class UptoboxCom(XFSAccount):
- __name = "UptoboxCom"
- __type = "account"
- __version = "0.07"
-
- __description = """DDLStorage.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "uptobox.com"
- HOSTER_URL = "https://uptobox.com/"
diff --git a/pyload/plugins/account/VidPlayNet.py b/pyload/plugins/account/VidPlayNet.py
deleted file mode 100644
index f3c7f0d4a..000000000
--- a/pyload/plugins/account/VidPlayNet.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class VidPlayNet(XFSAccount):
- __name = "VidPlayNet"
- __type = "account"
- __version = "0.02"
-
- __description = """VidPlay.net account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "vidplay.net"
diff --git a/pyload/plugins/account/XFileSharingPro.py b/pyload/plugins/account/XFileSharingPro.py
deleted file mode 100644
index 834fb9735..000000000
--- a/pyload/plugins/account/XFileSharingPro.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSAccount import XFSAccount
-
-
-class XFileSharingPro(XFSAccount):
- __name = "XFileSharingPro"
- __type = "account"
- __version = "0.05"
-
- __description = """XFileSharingPro multi-purpose account plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = None
-
-
- def init(self):
- if self.HOSTER_DOMAIN:
- return super(XFileSharingPro, self).init()
-
-
- def loadAccountInfo(self, user, req):
- return super(XFileSharingPro if self.HOSTER_DOMAIN else XFSAccount, self).loadAccountInfo(user, req)
-
-
- def login(self, user, data, req):
- if self.HOSTER_DOMAIN:
- return super(XFileSharingPro, self).login(user, data, req)
diff --git a/pyload/plugins/account/YibaishiwuCom.py b/pyload/plugins/account/YibaishiwuCom.py
deleted file mode 100644
index fd9382f0d..000000000
--- a/pyload/plugins/account/YibaishiwuCom.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Account import Account
-
-
-class YibaishiwuCom(Account):
- __name = "YibaishiwuCom"
- __type = "account"
- __version = "0.01"
-
- __description = """115.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- ACCOUNT_INFO_PATTERN = r'var USER_PERMISSION = {(.*?)}'
-
-
- def loadAccountInfo(self, user, req):
- #self.relogin(user)
- html = req.load("http://115.com/", decode=True)
-
- m = re.search(self.ACCOUNT_INFO_PATTERN, html, re.S)
- premium = True if (m and 'is_vip: 1' in m.group(1)) else False
- validuntil = trafficleft = (-1 if m else 0)
- return dict({"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium})
-
-
- def login(self, user, data, req):
- html = req.load('http://passport.115.com/?ac=login', post={
- "back": "http://www.115.com/",
- "goto": "http://115.com/",
- "login[account]": user,
- "login[passwd]": data['password']
- }, decode=True)
-
- if not 'var USER_PERMISSION = {' in html:
- self.wrongPassword()
diff --git a/pyload/plugins/account/ZeveraCom.py b/pyload/plugins/account/ZeveraCom.py
deleted file mode 100644
index f748bef17..000000000
--- a/pyload/plugins/account/ZeveraCom.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import mktime, strptime
-
-from pyload.plugins.Account import Account
-
-
-class ZeveraCom(Account):
- __name = "ZeveraCom"
- __type = "account"
- __version = "0.21"
-
- __description = """Zevera.com account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAPIData(req)
- if data == "No traffic":
- account_info = {"trafficleft": 0, "validuntil": 0, "premium": False}
- else:
- account_info = {
- "trafficleft": int(data['availabletodaytraffic']) * 1024,
- "validuntil": mktime(strptime(data['endsubscriptiondate'], "%Y/%m/%d %H:%M:%S")),
- "premium": True
- }
- return account_info
-
-
- def login(self, user, data, req):
- self.loginname = user
- self.password = data['password']
- if self.getAPIData(req) == "No traffic":
- self.wrongPassword()
-
-
- def getAPIData(self, req, just_header=False, **kwargs):
- get_data = {
- 'cmd': 'accountinfo',
- 'login': self.loginname,
- 'pass': self.password
- }
- get_data.update(kwargs)
-
- res = req.load("http://www.zevera.com/jDownloader.ashx", get=get_data,
- decode=True, just_header=just_header)
- self.logDebug(res)
-
- if ':' in res:
- if not just_header:
- res = res.replace(',', '\n')
- return dict((y.strip().lower(), z.strip()) for (y, z) in
- [x.split(':', 1) for x in res.splitlines() if ':' in x])
- else:
- return res
diff --git a/pyload/plugins/addon/Checksum.py b/pyload/plugins/addon/Checksum.py
deleted file mode 100644
index 1b9941f4b..000000000
--- a/pyload/plugins/addon/Checksum.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import hashlib
-import re
-import zlib
-
-from os import remove
-from os.path import getsize, isfile, splitext
-
-from pyload.plugins.Addon import Addon
-from pyload.utils import safe_join, fs_encode
-
-
-def computeChecksum(local_file, algorithm):
- if algorithm in getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")):
- h = getattr(hashlib, algorithm)()
-
- with open(local_file, 'rb') as f:
- for chunk in iter(lambda: f.read(128 * h.block_size), ''):
- h.update(chunk)
-
- return h.hexdigest()
-
- elif algorithm in ("adler32", "crc32"):
- hf = getattr(zlib, algorithm)
- last = 0
-
- with open(local_file, 'rb') as f:
- for chunk in iter(lambda: f.read(8192), ''):
- last = hf(chunk, last)
-
- return "%x" % last
-
- else:
- return None
-
-
-class Checksum(Addon):
- __name = "Checksum"
- __type = "addon"
- __version = "0.15"
-
- __config = [("activated" , "bool" , "Activated" , True ),
- ("check_checksum", "bool" , "Check checksum? (If False only size will be verified)", True ),
- ("check_action" , "fail;retry;nothing", "What to do if check fails?" , "retry"),
- ("max_tries" , "int" , "Number of retries" , 2 ),
- ("retry_action" , "fail;nothing" , "What to do if all retries fail?" , "fail" ),
- ("wait_time" , "int" , "Time to wait before each retry (seconds)" , 1 )]
-
- __description = """Verify downloaded file size and checksum"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("Walter Purcaro", "vuolter@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- methods = {'sfv': 'crc32', 'crc': 'crc32', 'hash': 'md5'}
- regexps = {'sfv': r'^(?P<name>[^;].+)\s+(?P<hash>[0-9A-Fa-f]{8})$',
- 'md5': r'^(?P<name>[0-9A-Fa-f]{32}) (?P<file>.+)$',
- 'crc': r'filename=(?P<name>.+)\nsize=(?P<size>\d+)\ncrc32=(?P<hash>[0-9A-Fa-f]{8})$',
- 'default': r'^(?P<hash>[0-9A-Fa-f]+)\s+\*?(?P<name>.+)$'}
-
-
- def activate(self):
- if not self.getConfig("check_checksum"):
- self.logInfo(_("Checksum validation is disabled in plugin configuration"))
-
-
- def setup(self):
- self.algorithms = sorted(
- getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")), reverse=True)
- self.algorithms.extend(["crc32", "adler32"])
- self.formats = self.algorithms + ["sfv", "crc", "hash"]
-
-
- def downloadFinished(self, pyfile):
- """
- Compute checksum for the downloaded file and compare it with the hash provided by the hoster.
- pyfile.plugin.check_data should be a dictionary which can contain:
- a) if known, the exact filesize in bytes (e.g. "size": 123456789)
- b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307")
- """
- if hasattr(pyfile.plugin, "check_data") and isinstance(pyfile.plugin.check_data, dict):
- data = pyfile.plugin.check_data.copy()
-
- elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict):
- data = pyfile.plugin.api_data.copy()
-
- # elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict):
- # data = pyfile.plugin.info.copy()
-
- else:
- return
-
- self.logDebug(data)
-
- if not pyfile.plugin.lastDownload:
- self.checkFailed(pyfile, None, "No file downloaded")
-
- local_file = fs_encode(pyfile.plugin.lastDownload)
- #download_folder = self.config['general']['download_folder']
- #local_file = fs_encode(safe_join(download_folder, pyfile.package().folder, pyfile.name))
-
- if not isfile(local_file):
- self.checkFailed(pyfile, None, "File does not exist")
-
- # validate file size
- if "size" in data:
- api_size = int(data['size'])
- file_size = getsize(local_file)
- if api_size != file_size:
- self.logWarning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size))
- self.checkFailed(pyfile, local_file, "Incorrect file size")
- del data['size']
-
- # validate checksum
- if data and self.getConfig("check_checksum"):
- if "checksum" in data:
- data['md5'] = data['checksum']
-
- for key in self.algorithms:
- if key in data:
- checksum = computeChecksum(local_file, key.replace("-", "").lower())
- if checksum:
- if checksum == data[key].lower():
- self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
- (pyfile.name, key.upper(), checksum))
- break
- else:
- self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
- (key.upper(), pyfile.name, checksum, data[key]))
- self.checkFailed(pyfile, local_file, "Checksums do not match")
- else:
- self.logWarning(_("Unsupported hashing algorithm"), key.upper())
- else:
- self.logWarning(_("Unable to validate checksum for file: ") + pyfile.name)
-
-
- def checkFailed(self, pyfile, local_file, msg):
- check_action = self.getConfig("check_action")
- if check_action == "retry":
- max_tries = self.getConfig("max_tries")
- retry_action = self.getConfig("retry_action")
- if pyfile.plugin.retries < max_tries:
- if local_file:
- remove(local_file)
- pyfile.plugin.retry(max_tries, self.getConfig("wait_time"), msg)
- elif retry_action == "nothing":
- return
- elif check_action == "nothing":
- return
- pyfile.plugin.fail(reason=msg)
-
-
- def packageFinished(self, pypack):
- download_folder = safe_join(self.config['general']['download_folder'], pypack.folder, "")
-
- for link in pypack.getChildren().itervalues():
- file_type = splitext(link['name'])[1][1:].lower()
-
- if file_type not in self.formats:
- continue
-
- hash_file = fs_encode(safe_join(download_folder, link['name']))
- if not isfile(hash_file):
- self.logWarning(_("File not found"), link['name'])
- continue
-
- with open(hash_file) as f:
- text = f.read()
-
- for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
- data = m.groupdict()
- self.logDebug(link['name'], data)
-
- local_file = fs_encode(safe_join(download_folder, data['name']))
- algorithm = self.methods.get(file_type, file_type)
- checksum = computeChecksum(local_file, algorithm)
- if checksum == data['hash']:
- self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
- (data['name'], algorithm, checksum))
- else:
- self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
- (algorithm, data['name'], checksum, data['hash']))
diff --git a/pyload/plugins/addon/ClickAndLoad.py b/pyload/plugins/addon/ClickAndLoad.py
deleted file mode 100644
index 490837f65..000000000
--- a/pyload/plugins/addon/ClickAndLoad.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from socket import socket, error
-from threading import Thread
-
-from pyload.plugins.Addon import Addon
-
-
-def forward(source, destination):
- string = ' '
- while string:
- string = source.recv(1024)
- if string:
- destination.sendall(string)
- else:
- #source.shutdown(socket.SHUT_RD)
- destination.shutdown(socket.SHUT_WR)
-
-
-class ClickAndLoad(Addon):
- __name = "ClickAndLoad"
- __type = "addon"
- __version = "0.23"
-
- __config = [("activated", "bool", "Activated" , True ),
- ("port" , "int" , "Port" , 9666 ),
- ("extern" , "bool", "Allow external link adding", False)]
-
- __description = """Click'N'Load hook plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.de"),
- ("mkaay", "mkaay@mkaay.de"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def setup(self):
- self.interval = 300
-
-
- def activate(self):
- self.initPeriodical()
-
-
- def periodical(self):
- webip = "0.0.0.0" if self.getConfig("extern") else "127.0.0.1"
- webport = self.config['webinterface']['port']
- cnlport = self.getConfig("port"))
-
- try:
- s = socket()
- s.bind((webip, cnlport))
- s.listen(5)
-
- client = s.accept()[0]
- server = socket()
-
- server.connect(("127.0.0.1", webport))
-
- except error, e:
- if hasattr(e, "errno"):
- errno = e.errno
- else:
- errno = e.args[0]
-
- if errno == 98:
- self.logWarning(_("Port %d already in use") % cnlport)
- else:
- self.logDebug(e)
-
- else:
- self.core.scheduler.removeJob(self.cb)
- t = Thread(target=forward, args=[client, server])
- t.setDaemon(True)
- t.start()
diff --git a/pyload/plugins/addon/DeleteFinished.py b/pyload/plugins/addon/DeleteFinished.py
deleted file mode 100644
index 5450a8ee2..000000000
--- a/pyload/plugins/addon/DeleteFinished.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.database import style
-from pyload.plugins.Addon import Addon
-
-
-class DeleteFinished(Addon):
- __name = "DeleteFinished"
- __type = "addon"
- __version = "1.11"
-
- __config = [('interval' , 'int' , 'Delete every (hours)' , '72' ),
- ('deloffline', 'bool', 'Delete packages with offline links', 'False')]
-
- __description = """Automatically delete all finished packages from queue"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # event_list = ["pluginConfigChanged"]
-
-
- ## overwritten methods ##
- def periodical(self):
- if not self.info['sleep']:
- deloffline = self.getConfig('deloffline')
- mode = '0,1,4' if deloffline else '0,4'
- msg = _('delete all finished packages in queue list (%s packages with offline links)')
- self.logInfo(msg % (_('including') if deloffline else _('excluding')))
- self.deleteFinished(mode)
- self.info['sleep'] = True
- self.addEvent('packageFinished', self.wakeup)
-
-
- def pluginConfigChanged(self, plugin, name, value):
- if name == "interval" and value != self.interval:
- self.interval = value * 3600
- self.initPeriodical()
-
-
- def deactivate(self):
- self.removeEvent('packageFinished', self.wakeup)
-
-
- def activate(self):
- self.info = {'sleep': True}
- interval = self.getConfig('interval')
- self.pluginConfigChanged(self.__name, 'interval', interval)
- self.addEvent('packageFinished', self.wakeup)
-
-
- ## own methods ##
- @style.queue
- def deleteFinished(self, mode):
- self.c.execute('DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE package=packages.id AND status NOT IN (%s))' % mode)
- self.c.execute('DELETE FROM links WHERE NOT EXISTS(SELECT 1 FROM packages WHERE id=links.package)')
-
-
- def wakeup(self, pypack):
- self.removeEvent('packageFinished', self.wakeup)
- self.info['sleep'] = False
-
-
- ## event managing ##
- def addEvent(self, event, func):
- """Adds an event listener for event name"""
- if event in self.m.events:
- if func in self.m.events[event]:
- self.logDebug("Function already registered", func)
- else:
- self.m.events[event].append(func)
- else:
- self.m.events[event] = [func]
-
-
- def setup(self):
- self.interval = 0
- self.m = self.manager
- self.removeEvent = self.m.removeEvent
diff --git a/pyload/plugins/addon/DownloadScheduler.py b/pyload/plugins/addon/DownloadScheduler.py
deleted file mode 100644
index 95cede509..000000000
--- a/pyload/plugins/addon/DownloadScheduler.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import localtime
-
-from pyload.plugins.Addon import Addon
-
-
-class DownloadScheduler(Addon):
- __name = "DownloadScheduler"
- __type = "addon"
- __version = "0.22"
-
- __config = [("timetable", "str" , "List time periods as hh:mm full or number(kB/s)" , "0:00 full, 7:00 250, 10:00 0, 17:00 150"),
- ("abort" , "bool", "Abort active downloads when start period with speed 0", False )]
-
- __description = """Download Scheduler"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- def setup(self):
- self.cb = None #: callback to scheduler job; will be by removed AddonManager when addon unloaded
-
-
- def activate(self):
- self.updateSchedule()
-
-
- def updateSchedule(self, schedule=None):
- if schedule is None:
- schedule = self.getConfig("timetable")
-
- schedule = re.findall("(\d{1,2}):(\d{2})[\s]*(-?\d+)",
- schedule.lower().replace("full", "-1").replace("none", "0"))
- if not schedule:
- self.logError(_("Invalid schedule"))
- return
-
- t0 = localtime()
- now = (t0.tm_hour, t0.tm_min, t0.tm_sec, "X")
- schedule = sorted([(int(x[0]), int(x[1]), 0, int(x[2])) for x in schedule] + [now])
-
- self.logDebug("Schedule", schedule)
-
- for i, v in enumerate(schedule):
- if v[3] == "X":
- last, next = schedule[i - 1], schedule[(i + 1) % len(schedule)]
- self.logDebug("Now/Last/Next", now, last, next)
-
- self.setDownloadSpeed(last[3])
-
- next_time = (((24 + next[0] - now[0]) * 60 + next[1] - now[1]) * 60 + next[2] - now[2]) % 86400
- self.core.scheduler.removeJob(self.cb)
- self.cb = self.core.scheduler.addJob(next_time, self.updateSchedule, threaded=False)
-
-
- def setDownloadSpeed(self, speed):
- if speed == 0:
- abort = self.getConfig("abort")
- self.logInfo(_("Stopping download server. (Running downloads will %sbe aborted.)") % '' if abort else _('not '))
- self.core.api.pauseServer()
- if abort:
- self.core.api.stopAllDownloads()
- else:
- self.core.api.unpauseServer()
-
- if speed > 0:
- self.logInfo(_("Setting download speed to %d kB/s") % speed)
- self.core.api.setConfigValue("download", "limit_speed", 1)
- self.core.api.setConfigValue("download", "max_speed", speed)
- else:
- self.logInfo(_("Setting download speed to FULL"))
- self.core.api.setConfigValue("download", "limit_speed", 0)
- self.core.api.setConfigValue("download", "max_speed", -1)
diff --git a/pyload/plugins/addon/ExternalScripts.py b/pyload/plugins/addon/ExternalScripts.py
deleted file mode 100644
index a2d7b8d86..000000000
--- a/pyload/plugins/addon/ExternalScripts.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import subprocess
-
-from itertools import chain
-from os import listdir, access, X_OK, makedirs
-from os.path import join, exists, basename, abspath
-
-from pyload.plugins.Addon import Addon
-from pyload.utils import safe_join
-
-
-class ExternalScripts(Addon):
- __name = "ExternalScripts"
- __type = "addon"
- __version = "0.25"
-
- __config = [("activated", "bool", "Activated", True)]
-
- __description = """Run external scripts"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de"),
- ("RaNaN", "ranan@pyload.org"),
- ("spoob", "spoob@pyload.org"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- event_map = {'archive-extracted' : "archive_extracted",
- 'package-extracted' : "package_extracted",
- 'all_archives-extracted' : "all_archives_extracted",
- 'all_archives-processed' : "all_archives_processed",
- 'all_downloads-finished' : "allDownloadsFinished",
- 'all_downloads-processed': "allDownloadsProcessed"}
-
-
- def setup(self):
- self.scripts = {}
-
- folders = ["download_preparing", "download_finished", "all_downloads_finished", "all_downloads_processed",
- "before_reconnect", "after_reconnect",
- "package_finished", "package_extracted",
- "archive_extracted", "all_archives_extracted", "all_archives_processed",
- # deprecated folders
- "unrar_finished", "all_dls_finished", "all_dls_processed"]
-
- for folder in folders:
- self.scripts[folder] = []
-
- self.initPluginType(folder, join(pypath, 'scripts', folder))
- self.initPluginType(folder, join('scripts', folder))
-
- for script_type, names in self.scripts.iteritems():
- if names:
- self.logInfo(_("Installed scripts for"), script_type, ", ".join([basename(x) for x in names]))
-
-
- def initPluginType(self, folder, path):
- if not exists(path):
- try:
- makedirs(path)
- except Exception:
- self.logDebug("Script folder %s not created" % folder)
- return
-
- for f in listdir(path):
- if f.startswith("#") or f.startswith(".") or f.startswith("_") or f.endswith("~") or f.endswith(".swp"):
- continue
-
- if not access(join(path, f), X_OK):
- self.logWarning(_("Script not executable:") + " %s/%s" % (folder, f))
-
- self.scripts[folder].append(join(path, f))
-
-
- def callScript(self, script, *args):
- try:
- cmd = [script] + [str(x) if not isinstance(x, basestring) else x for x in args]
- self.logDebug("Executing", abspath(script), " ".join(cmd))
- #output goes to pyload
- subprocess.Popen(cmd, bufsize=-1)
- except Exception, e:
- self.logError(_("Error in %(script)s: %(error)s") % {"script": basename(script), "error": e})
-
-
- def downloadPreparing(self, pyfile):
- for script in self.scripts['download_preparing']:
- self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.id)
-
-
- def downloadFinished(self, pyfile):
- download_folder = self.config['general']['download_folder']
- for script in self.scripts['download_finished']:
- filename = safe_join(download_folder, pyfile.package().folder, pyfile.name)
- self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.name, filename, pyfile.id)
-
-
- def packageFinished(self, pypack):
- download_folder = self.config['general']['download_folder']
- for script in self.scripts['package_finished']:
- folder = safe_join(download_folder, pypack.folder)
- self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
-
-
- def beforeReconnecting(self, ip):
- for script in self.scripts['before_reconnect']:
- self.callScript(script, ip)
-
-
- def afterReconnecting(self, ip):
- for script in self.scripts['after_reconnect']:
- self.callScript(script, ip)
-
-
- def archive_extracted(self, pyfile, folder, filename, files):
- for script in self.scripts['archive_extracted']:
- self.callScript(script, folder, filename, files)
- for script in self.scripts['unrar_finished']: #: deprecated
- self.callScript(script, folder, filename)
-
-
- def package_extracted(self, pypack):
- download_folder = self.config['general']['download_folder']
- for script in self.scripts['package_extracted']:
- folder = safe_join(download_folder, pypack.folder)
- self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
-
-
- def all_archives_extracted(self):
- for script in self.scripts['all_archives_extracted']:
- self.callScript(script)
-
-
- def all_archives_processed(self):
- for script in self.scripts['all_archives_processed']:
- self.callScript(script)
-
-
- def allDownloadsFinished(self):
- for script in chain(self.scripts['all_downloads_finished'], self.scripts['all_dls_finished']):
- self.callScript(script)
-
-
- def allDownloadsProcessed(self):
- for script in chain(self.scripts['all_downloads_processed'], self.scripts['all_dls_processed']):
- self.callScript(script)
diff --git a/pyload/plugins/addon/ExtractArchive.py b/pyload/plugins/addon/ExtractArchive.py
deleted file mode 100644
index 25da7ba12..000000000
--- a/pyload/plugins/addon/ExtractArchive.py
+++ /dev/null
@@ -1,363 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import os
-import sys
-
-from copy import copy
-from os import remove, chmod, makedirs
-from os.path import exists, basename, isfile, isdir
-from traceback import print_exc
-
-# monkey patch bug in python 2.6 and lower
-# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
-if sys.version_info < (2, 7) and os.name != "nt":
- import errno
- from subprocess import Popen
-
-
- def _eintr_retry_call(func, *args):
- while True:
- try:
- return func(*args)
- except OSError, e:
- if e.errno == errno.EINTR:
- continue
- raise
-
-
- # unsued timeout option for older python version
- def wait(self, timeout=0):
- """Wait for child process to terminate. Returns returncode
- attribute."""
- if self.returncode is None:
- try:
- pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
- except OSError, e:
- if e.errno != errno.ECHILD:
- raise
- # This happens if SIGCLD is set to be ignored or waiting
- # for child processes has otherwise been disabled for our
- # process. This child is dead, we can't get the status.
- sts = 0
- self._handle_exitstatus(sts)
- return self.returncode
-
- Popen.wait = wait
-
-if os.name != "nt":
- from grp import getgrnam
- from os import chown
- from pwd import getpwnam
-
-from pyload.plugins.Addon import Addon, threaded, Expose
-from pyload.plugins.internal.AbstractExtractor import ArchiveError, CRCError, WrongPassword
-from pyload.utils import safe_join, fs_encode
-
-
-class ExtractArchive(Addon):
- __name = "ExtractArchive"
- __type = "addon"
- __version = "0.19"
-
- __config = [("activated" , "bool" , "Activated" , True ),
- ("fullpath" , "bool" , "Extract full path" , True ),
- ("overwrite" , "bool" , "Overwrite files" , True ),
- ("passwordfile" , "file" , "password file" , "archive_password.txt"),
- ("deletearchive", "bool" , "Delete archives when done" , False ),
- ("subfolder" , "bool" , "Create subfolder for each package" , False ),
- ("destination" , "folder", "Extract files to" , "" ),
- ("excludefiles" , "str" , "Exclude files from unpacking (seperated by ;)", "" ),
- ("recursive" , "bool" , "Extract archives in archvies" , True ),
- ("queue" , "bool" , "Wait for all downloads to be finished" , True ),
- ("renice" , "int" , "CPU Priority" , 0 )]
-
- __description = """Extract different kind of archives"""
- __license = "GPLv3"
- __authors = [("RaNaN", "ranan@pyload.org"),
- ("AndroKev", ""),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- event_map = {'all_downloads-processed': "allDownloadsProcessed"}
-
-
- def setup(self):
- self.plugins = []
- self.passwords = []
- names = []
-
- for p in ("UnRar", "UnZip"):
- try:
- module = self.core.pluginManager.loadModule("internal", p)
- klass = getattr(module, p)
- if klass.checkDeps():
- names.append(p)
- self.plugins.append(klass)
-
- except OSError, e:
- if e.errno == 2:
- self.logInfo(_("No %s installed") % p)
- else:
- self.logWarning(_("Could not activate %s") % p, e)
- if self.core.debug:
- print_exc()
-
- except Exception, e:
- self.logWarning(_("Could not activate %s") % p, e)
- if self.core.debug:
- print_exc()
-
- if names:
- self.logInfo(_("Activated") + " " + " ".join(names))
- else:
- self.logInfo(_("No Extract plugins activated"))
-
- # queue with package ids
- self.queue = []
-
-
- @Expose
- def extractPackage(self, id):
- """ Extract package with given id"""
- self.manager.startThread(self.extract, [id])
-
-
- def packageFinished(self, pypack):
- pid = pypack.id
- if self.getConfig("queue"):
- self.logInfo(_("Package %s queued for later extracting") % pypack.name)
- self.queue.append(pid)
- else:
- self.manager.startThread(self.extract, [pid])
-
-
- @threaded
- def allDownloadsProcessed(self, thread):
- local = copy(self.queue)
- del self.queue[:]
- if self.extract(local, thread): #: check only if all gone fine, no failed reporting for now
- self.manager.dispatchEvent("all_archives-extracted")
- self.manager.dispatchEvent("all_archives-processed")
-
-
- def extract(self, ids, thread=None):
- processed = []
- extracted = []
- failed = []
-
- destination = self.getConfig("destination")
- subfolder = self.getConfig("subfolder")
- fullpath = self.getConfig("fullpath")
- overwrite = self.getConfig("overwrite")
- excludefiles = self.getConfig("excludefiles")
- renice = self.getConfig("renice")
- recursive = self.getConfig("recursive")
-
- # reload from txt file
- self.reloadPasswords()
-
- # dl folder
- dl = self.config['general']['download_folder']
-
- #iterate packages -> plugins -> targets
- for pid in ids:
- p = self.core.files.getPackage(pid)
- self.logInfo(_("Check package %s") % p.name)
- if not p:
- continue
-
- # determine output folder
- out = safe_join(dl, p.folder, "")
-
- out = safe_join(dl, p.folder, self.getConfig("destination"), "")
- if subfolder:
- out = safe_join(out, fs_encode(p.folder))
-
- if not exists(out):
- makedirs(out)
-
- files_ids = [(safe_join(dl, p.folder, x['name']), x['id']) for x in p.getChildren().itervalues()]
- matched = False
- success = True
-
- # check as long there are unseen files
- while files_ids:
- new_files_ids = []
-
- for plugin in self.plugins:
- targets = plugin.getTargets(files_ids)
- if targets:
- self.logDebug("Targets for %s: %s" % (plugin.__name, targets))
- matched = True
- for target, fid in targets:
- if target in processed:
- self.logDebug(basename(target), "skipped")
- continue
-
- processed.append(target) # prevent extracting same file twice
-
- self.logInfo(basename(target), _("Extract to %s") % out)
- try:
- klass = plugin(self, target, out, fullpath, overwrite, excludefiles, renice)
- klass.init()
- password = p.password.strip().splitlines()
- new_files = self._extract(klass, fid, password, thread)
- except Exception, e:
- self.logError(basename(target), e)
- success = False
- continue
-
- self.logDebug("Extracted", new_files)
- self.setPermissions(new_files)
-
- for file in new_files:
- if not exists(file):
- self.logDebug("New file %s does not exists" % file)
- continue
- if recursive and isfile(file):
- new_files_ids.append((file, fid)) # append as new target
-
- files_ids = new_files_ids # also check extracted files
-
- if matched:
- if success:
- extracted.append(pid)
- self.manager.dispatchEvent("package-extracted", p)
- else:
- failed.append(pid)
- self.manager.dispatchEvent("package-extract_failed", p)
- else:
- self.logInfo(_("No files found to extract"))
-
- return True if not failed else False
-
-
- def _extract(self, plugin, fid, passwords, thread):
- pyfile = self.core.files.getFile(fid)
- deletearchive = self.getConfig("deletearchive")
-
- pyfile.setCustomStatus(_("extracting"))
- thread.addActive(pyfile) # keep this file until everything is done
-
- try:
- progress = lambda x: pyfile.setProgress(x)
- success = False
-
- if not plugin.checkArchive():
- plugin.extract(progress)
- success = True
- else:
- self.logInfo(basename(plugin.file), _("Password protected"))
- self.logDebug("Passwords", passwords)
-
- pwlist = copy(self.getPasswords())
- # remove already supplied pws from list (only local)
- for pw in passwords:
- if pw in pwlist:
- pwlist.remove(pw)
-
- for pw in passwords + pwlist:
- try:
- self.logDebug("Try password", pw)
- if plugin.checkPassword(pw):
- plugin.extract(progress, pw)
- self.addPassword(pw)
- success = True
- break
- except WrongPassword:
- self.logDebug("Password was wrong")
-
- if not success:
- raise Exception(_("Wrong password"))
-
- if self.core.debug:
- self.logDebug("Would delete", ", ".join(plugin.getDeleteFiles()))
-
- if deletearchive:
- files = plugin.getDeleteFiles()
- self.logInfo(_("Deleting %s files") % len(files))
- for f in files:
- if exists(f):
- remove(f)
- else:
- self.logDebug("%s does not exists" % f)
-
- self.logInfo(basename(plugin.file), _("Extracting finished"))
-
- extracted_files = plugin.getExtractedFiles()
- self.manager.dispatchEvent("archive-extracted", pyfile, plugin.out, plugin.file, extracted_files)
-
- return extracted_files
-
- except ArchiveError, e:
- self.logError(basename(plugin.file), _("Archive Error"), e)
- except CRCError:
- self.logError(basename(plugin.file), _("CRC Mismatch"))
- except Exception, e:
- if self.core.debug:
- print_exc()
- self.logError(basename(plugin.file), _("Unknown Error"), e)
-
- self.manager.dispatchEvent("archive-extract_failed", pyfile)
- raise Exception(_("Extract failed"))
-
-
- @Expose
- def getPasswords(self):
- """ List of saved passwords """
- return self.passwords
-
-
- def reloadPasswords(self):
- passwordfile = self.getConfig("passwordfile")
-
- try:
- passwords = []
- with open(passwordfile, "a+") as f:
- for pw in f.read().splitlines():
- passwords.append(pw)
-
- except IOError, e:
- self.logError(e)
-
- else:
- self.passwords = passwords
-
-
- @Expose
- def addPassword(self, pw):
- """ Adds a password to saved list"""
- passwordfile = self.getConfig("passwordfile")
-
- if pw in self.passwords:
- self.passwords.remove(pw)
-
- self.passwords.insert(0, pw)
-
- try:
- with open(passwordfile, "wb") as f:
- for pw in self.passwords:
- f.write(pw + "\n")
- except IOError, e:
- self.logError(e)
-
-
- def setPermissions(self, files):
- for f in files:
- if not exists(f):
- continue
- try:
- if self.config['permission']['change_file']:
- if isfile(f):
- chmod(f, int(self.config['permission']['file'], 8))
- elif isdir(f):
- chmod(f, int(self.config['permission']['folder'], 8))
-
- if self.config['permission']['change_dl'] and os.name != "nt":
- uid = getpwnam(self.config['permission']['user'])[2]
- gid = getgrnam(self.config['permission']['group'])[2]
- chown(f, uid, gid)
- except Exception, e:
- self.logWarning(_("Setting User and Group failed"), e)
diff --git a/pyload/plugins/addon/HotFolder.py b/pyload/plugins/addon/HotFolder.py
deleted file mode 100644
index 297a06c5a..000000000
--- a/pyload/plugins/addon/HotFolder.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import time
-
-from os import listdir, makedirs
-from os.path import exists, isfile, join
-from shutil import move
-
-from pyload.plugins.Addon import Addon
-from pyload.utils import fs_encode, safe_join
-
-
-class HotFolder(Addon):
- __name = "HotFolder"
- __type = "addon"
- __version = "0.12"
-
- __config = [("folder" , "str" , "Folder to observe" , "container"),
- ("watch_file", "bool", "Observe link file" , False ),
- ("keep" , "bool", "Keep added containers", True ),
- ("file" , "str" , "Link file" , "links.txt")]
-
- __description = """Observe folder and file for changes and add container and links"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.de")]
-
-
- def setup(self):
- self.interval = 10
-
-
- def activate(self):
- self.initPeriodical()
-
-
- def periodical(self):
- folder = fs_encode(self.getConfig("folder"))
-
- try:
- if not exists(join(folder, "finished")):
- makedirs(join(folder, "finished"))
-
- if self.getConfig("watch_file"):
- with open(fs_encode(self.getConfig("file")), "a+") as f:
- content = f.read().strip()
-
- if content:
- name = "%s_%s.txt" % (self.getConfig("file"), time.strftime("%H-%M-%S_%d%b%Y"))
-
- with open(safe_join(folder, "finished", name), "wb") as f:
- f.write(content)
-
- self.core.api.addPackage(f.name, [f.name], 1)
-
- for f in listdir(folder):
- path = join(folder, f)
-
- if not isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."):
- continue
-
- newpath = join(folder, "finished", f if self.getConfig("keep") else "tmp_" + f)
- move(path, newpath)
-
- self.logInfo(_("Added %s from HotFolder") % f)
- self.core.api.addPackage(f, [newpath], 1)
-
- except IOError, e:
- self.logError(e)
diff --git a/pyload/plugins/addon/IRCInterface.py b/pyload/plugins/addon/IRCInterface.py
deleted file mode 100644
index e7a905bf7..000000000
--- a/pyload/plugins/addon/IRCInterface.py
+++ /dev/null
@@ -1,431 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import socket
-import ssl
-import time
-
-from pycurl import FORM_FILE
-from select import select
-from threading import Thread
-from time import sleep
-from traceback import print_exc
-
-from pyload.api import PackageDoesNotExists, FileDoesNotExists
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Addon import Addon
-from pyload.utils import formatSize
-
-
-class IRCInterface(Thread, Addon):
- __name = "IRCInterface"
- __type = "addon"
- __version = "0.13"
-
- __config = [("host" , "str" , "IRC-Server Address" , "Enter your server here!"),
- ("port" , "int" , "IRC-Server Port" , 6667 ),
- ("ident" , "str" , "Clients ident" , "pyload-irc" ),
- ("realname" , "str" , "Realname" , "pyload-irc" ),
- ("ssl" , "bool", "Use SSL" , False ),
- ("nick" , "str" , "Nickname the Client will take" , "pyLoad-IRC" ),
- ("owner" , "str" , "Nickname the Client will accept commands from", "Enter your nick here!" ),
- ("info_file", "bool", "Inform about every file finished" , False ),
- ("info_pack", "bool", "Inform about every package finished" , True ),
- ("captcha" , "bool", "Send captcha requests" , True )]
-
- __description = """Connect to irc and let owner perform different tasks"""
- __license = "GPLv3"
- __authors = [("Jeix", "Jeix@hasnomail.com")]
-
-
- def __init__(self, core, manager):
- Thread.__init__(self)
- Addon.__init__(self, core, manager)
- self.setDaemon(True)
-
-
- def activate(self):
- self.abort = False
- self.more = []
- self.new_package = {}
-
- self.start()
-
-
- def packageFinished(self, pypack):
- try:
- if self.getConfig("info_pack"):
- self.response(_("Package finished: %s") % pypack.name)
- except Exception:
- pass
-
-
- def downloadFinished(self, pyfile):
- try:
- if self.getConfig("info_file"):
- self.response(
- _("Download finished: %(name)s @ %(plugin)s ") % {"name": pyfile.name, "plugin": pyfile.pluginname})
- except Exception:
- pass
-
-
- def captchaTask(self, task):
- if self.getConfig("captcha") and task.isTextual():
- task.handler.append(self)
- task.setWaiting(60)
-
- page = getURL("http://www.freeimagehosting.net/upload.php",
- post={"attached": (FORM_FILE, task.captchaFile)}, multipart=True)
-
- url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", page).group(1)
- self.response(_("New Captcha Request: %s") % url)
- self.response(_("Answer with 'c %s text on the captcha'") % task.id)
-
-
- def run(self):
- # connect to IRC etc.
- self.sock = socket.socket()
- host = self.getConfig("host")
- self.sock.connect((host, self.getConfig("port")))
-
- if self.getConfig("ssl"):
- self.sock = ssl.wrap_socket(self.sock, cert_reqs=ssl.CERT_NONE) #@TODO: support custom certificate
-
- nick = self.getConfig("nick")
- self.sock.send("NICK %s\r\n" % nick)
- self.sock.send("USER %s %s bla :%s\r\n" % (nick, host, nick))
- for t in self.getConfig("owner").split():
- if t.strip().startswith("#"):
- self.sock.send("JOIN %s\r\n" % t.strip())
- self.logInfo(_("Connected to"), host)
- self.logInfo(_("Switching to listening mode!"))
- try:
- self.main_loop()
-
- except IRCError, ex:
- self.sock.send("QUIT :byebye\r\n")
- print_exc()
- self.sock.close()
-
-
- def main_loop(self):
- readbuffer = ""
- while True:
- sleep(1)
- fdset = select([self.sock], [], [], 0)
- if self.sock not in fdset[0]:
- continue
-
- if self.abort:
- raise IRCError("quit")
-
- readbuffer += self.sock.recv(1024)
- temp = readbuffer.split("\n")
- readbuffer = temp.pop()
-
- for line in temp:
- line = line.rstrip()
- first = line.split()
-
- if first[0] == "PING":
- self.sock.send("PONG %s\r\n" % first[1])
-
- if first[0] == "ERROR":
- raise IRCError(line)
-
- msg = line.split(None, 3)
- if len(msg) < 4:
- continue
-
- msg = {
- "origin": msg[0][1:],
- "action": msg[1],
- "target": msg[2],
- "text": msg[3][1:]
- }
-
- self.handle_events(msg)
-
-
- def handle_events(self, msg):
- if not msg['origin'].split("!", 1)[0] in self.getConfig("owner").split():
- return
-
- if msg['target'].split("!", 1)[0] != self.getConfig("nick"):
- return
-
- if msg['action'] != "PRIVMSG":
- return
-
- # HANDLE CTCP ANTI FLOOD/BOT PROTECTION
- if msg['text'] == "\x01VERSION\x01":
- self.logDebug("Sending CTCP VERSION")
- self.sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
- return
- elif msg['text'] == "\x01TIME\x01":
- self.logDebug("Sending CTCP TIME")
- self.sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
- return
- elif msg['text'] == "\x01LAG\x01":
- self.logDebug("Received CTCP LAG") #: don't know how to answer
- return
-
- trigger = "pass"
- args = None
-
- try:
- temp = msg['text'].split()
- trigger = temp[0]
- if len(temp) > 1:
- args = temp[1:]
- except Exception:
- pass
-
- handler = getattr(self, "event_%s" % trigger, self.event_pass)
- try:
- res = handler(args)
- for line in res:
- self.response(line, msg['origin'])
- except Exception, e:
- self.logError(e)
-
-
- def response(self, msg, origin=""):
- if origin == "":
- for t in self.getConfig("owner").split():
- self.sock.send("PRIVMSG %s :%s\r\n" % (t.strip(), msg))
- else:
- self.sock.send("PRIVMSG %s :%s\r\n" % (origin.split("!", 1)[0], msg))
-
-
- #### Events
-
- def event_pass(self, args):
- return []
-
-
- def event_status(self, args):
- downloads = self.core.api.statusDownloads()
- if not downloads:
- return ["INFO: There are no active downloads currently."]
-
- temp_progress = ""
- lines = ["ID - Name - Status - Speed - ETA - Progress"]
- for data in downloads:
-
- if data.status == 5:
- temp_progress = data.format_wait
- else:
- temp_progress = "%d%% (%s)" % (data.percent, data.format_size)
-
- lines.append("#%d - %s - %s - %s - %s - %s" %
- (
- data.fid,
- data.name,
- data.statusmsg,
- "%s/s" % formatSize(data.speed),
- "%s" % data.format_eta,
- temp_progress
- ))
- return lines
-
-
- def event_queue(self, args):
- ps = self.core.api.getQueueData()
-
- if not ps:
- return ["INFO: There are no packages in queue."]
-
- lines = []
- for pack in ps:
- lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
-
- return lines
-
-
- def event_collector(self, args):
- ps = self.core.api.getCollectorData()
- if not ps:
- return ["INFO: No packages in collector!"]
-
- lines = []
- for pack in ps:
- lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
-
- return lines
-
-
- def event_info(self, args):
- if not args:
- return ["ERROR: Use info like this: info <id>"]
-
- info = None
- try:
- info = self.core.api.getFileData(int(args[0]))
-
- except FileDoesNotExists:
- return ["ERROR: Link doesn't exists."]
-
- return ['LINK #%s: %s (%s) [%s][%s]' % (info.fid, info.name, info.format_size, info.statusmsg, info.plugin)]
-
-
- def event_packinfo(self, args):
- if not args:
- return ["ERROR: Use packinfo like this: packinfo <id>"]
-
- lines = []
- pack = None
- try:
- pack = self.core.api.getPackageData(int(args[0]))
-
- except PackageDoesNotExists:
- return ["ERROR: Package doesn't exists."]
-
- id = args[0]
-
- self.more = []
-
- lines.append('PACKAGE #%s: "%s" with %d links' % (id, pack.name, len(pack.links)))
- for pyfile in pack.links:
- self.more.append('LINK #%s: %s (%s) [%s][%s]' % (pyfile.fid, pyfile.name, pyfile.format_size,
- pyfile.statusmsg, pyfile.plugin))
-
- if len(self.more) < 6:
- lines.extend(self.more)
- self.more = []
- else:
- lines.extend(self.more[:6])
- self.more = self.more[6:]
- lines.append("%d more links do display." % len(self.more))
-
- return lines
-
-
- def event_more(self, args):
- if not self.more:
- return ["No more information to display."]
-
- lines = self.more[:6]
- self.more = self.more[6:]
- lines.append("%d more links do display." % len(self.more))
-
- return lines
-
-
- def event_start(self, args):
- self.core.api.unpauseServer()
- return ["INFO: Starting downloads."]
-
-
- def event_stop(self, args):
- self.core.api.pauseServer()
- return ["INFO: No new downloads will be started."]
-
-
- def event_add(self, args):
- if len(args) < 2:
- return ['ERROR: Add links like this: "add <packagename|id> links". ',
- "This will add the link <link> to to the package <package> / the package with id <id>!"]
-
- pack = args[0].strip()
- links = [x.strip() for x in args[1:]]
-
- count_added = 0
- count_failed = 0
- try:
- id = int(pack)
- pack = self.core.api.getPackageData(id)
- if not pack:
- return ["ERROR: Package doesn't exists."]
-
- #TODO add links
-
- return ["INFO: Added %d links to Package %s [#%d]" % (len(links), pack['name'], id)]
-
- except Exception:
- # create new package
- id = self.core.api.addPackage(pack, links, 1)
- return ["INFO: Created new Package %s [#%d] with %d links." % (pack, id, len(links))]
-
-
- def event_del(self, args):
- if len(args) < 2:
- return ["ERROR: Use del command like this: del -p|-l <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
-
- if args[0] == "-p":
- ret = self.core.api.deletePackages(map(int, args[1:]))
- return ["INFO: Deleted %d packages!" % len(args[1:])]
-
- elif args[0] == "-l":
- ret = self.core.api.delLinks(map(int, args[1:]))
- return ["INFO: Deleted %d links!" % len(args[1:])]
-
- else:
- return ["ERROR: Use del command like this: del <-p|-l> <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
-
-
- def event_push(self, args):
- if not args:
- return ["ERROR: Push package to queue like this: push <package id>"]
-
- id = int(args[0])
- try:
- info = self.core.api.getPackageInfo(id)
- except PackageDoesNotExists:
- return ["ERROR: Package #%d does not exist." % id]
-
- self.core.api.pushToQueue(id)
- return ["INFO: Pushed package #%d to queue." % id]
-
-
- def event_pull(self, args):
- if not args:
- return ["ERROR: Pull package from queue like this: pull <package id>."]
-
- id = int(args[0])
- if not self.core.api.getPackageData(id):
- return ["ERROR: Package #%d does not exist." % id]
-
- self.core.api.pullFromQueue(id)
- return ["INFO: Pulled package #%d from queue to collector." % id]
-
-
- def event_c(self, args):
- """ captcha answer """
- if not args:
- return ["ERROR: Captcha ID missing."]
-
- task = self.core.captchaManager.getTaskByID(args[0])
- if not task:
- return ["ERROR: Captcha Task with ID %s does not exists." % args[0]]
-
- task.setResult(" ".join(args[1:]))
- return ["INFO: Result %s saved." % " ".join(args[1:])]
-
-
- def event_help(self, args):
- lines = ["The following commands are available:",
- "add <package|packid> <links> [...] Adds link to package. (creates new package if it does not exist)",
- "queue Shows all packages in the queue",
- "collector Shows all packages in collector",
- "del -p|-l <id> [...] Deletes all packages|links with the ids specified",
- "info <id> Shows info of the link with id <id>",
- "packinfo <id> Shows info of the package with id <id>",
- "more Shows more info when the result was truncated",
- "start Starts all downloads",
- "stop Stops the download (but not abort active downloads)",
- "push <id> Push package to queue",
- "pull <id> Pull package from queue",
- "status Show general download status",
- "help Shows this help message"]
- return lines
-
-
-class IRCError(Exception):
-
- def __init__(self, value):
- self.value = value
-
-
- def __str__(self):
- return repr(self.value)
diff --git a/pyload/plugins/addon/MergeFiles.py b/pyload/plugins/addon/MergeFiles.py
deleted file mode 100644
index 71ad7a39d..000000000
--- a/pyload/plugins/addon/MergeFiles.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import os
-import re
-
-from traceback import print_exc
-
-from pyload.plugins.Addon import Addon, threaded
-from pyload.utils import safe_join, fs_encode
-
-
-class MergeFiles(Addon):
- __name = "MergeFiles"
- __type = "addon"
- __version = "0.13"
-
- __config = [("activated", "bool", "Activated", True)]
-
- __description = """Merges parts splitted with hjsplit"""
- __license = "GPLv3"
- __authors = [("and9000", "me@has-no-mail.com")]
-
-
- BUFFER_SIZE = 4096
-
-
- def setup(self):
- pass
-
-
- @threaded
- def packageFinished(self, pack):
- files = {}
- fid_dict = {}
- for fid, data in pack.getChildren().iteritems():
- if re.search("\.\d{3}$", data['name']):
- if data['name'][:-4] not in files:
- files[data['name'][:-4]] = []
- files[data['name'][:-4]].append(data['name'])
- files[data['name'][:-4]].sort()
- fid_dict[data['name']] = fid
-
- download_folder = self.config['general']['download_folder']
-
- if self.config['general']['folder_per_package']:
- download_folder = safe_join(download_folder, pack.folder)
-
- for name, file_list in files.iteritems():
- self.logInfo(_("Starting merging of"), name)
-
- final_file = open(safe_join(download_folder, name), "wb")
- for splitted_file in file_list:
- self.logDebug("Merging part", splitted_file)
-
- pyfile = self.core.files.getFile(fid_dict[splitted_file])
-
- pyfile.setStatus("processing")
-
- try:
- with open(os.path.join(download_folder, splitted_file), "rb") as s_file:
- size_written = 0
- s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file)))
-
- while True:
- f_buffer = s_file.read(self.BUFFER_SIZE)
- if f_buffer:
- final_file.write(f_buffer)
- size_written += self.BUFFER_SIZE
- pyfile.setProgress((size_written * 100) / s_file_size)
- else:
- break
-
- self.logDebug("Finished merging part", splitted_file)
-
- except Exception, e:
- print_exc()
-
- finally:
- pyfile.setProgress(100)
- pyfile.setStatus("finished")
- pyfile.release()
-
- self.logInfo(_("Finished merging of"), name)
diff --git a/pyload/plugins/addon/MultiHome.py b/pyload/plugins/addon/MultiHome.py
deleted file mode 100644
index f9c9e1cef..000000000
--- a/pyload/plugins/addon/MultiHome.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import time
-
-from pyload.plugins.Addon import Addon
-
-
-class MultiHome(Addon):
- __name = "MultiHome"
- __type = "addon"
- __version = "0.12"
-
- __config = [("interfaces", "str", "Interfaces", "None")]
-
- __description = """Ip address changer"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
- def setup(self):
- self.register = {}
- self.interfaces = []
- self.parseInterfaces(self.getConfig("interfaces").split(";"))
- if not self.interfaces:
- self.parseInterfaces([self.config['download']['interface']])
- self.setConfig("interfaces", self.toConfig())
-
-
- def toConfig(self):
- return ";".join([i.adress for i in self.interfaces])
-
-
- def parseInterfaces(self, interfaces):
- for interface in interfaces:
- if not interface or str(interface).lower() == "none":
- continue
- self.interfaces.append(Interface(interface))
-
-
- def activate(self):
- requestFactory = self.core.requestFactory
- oldGetRequest = requestFactory.getRequest
-
- def getRequest(pluginName, account=None):
- iface = self.bestInterface(pluginName, account)
- if iface:
- iface.useFor(pluginName, account)
- requestFactory.iface = lambda: iface.adress
- self.logDebug("Using address", iface.adress)
- return oldGetRequest(pluginName, account)
-
- requestFactory.getRequest = getRequest
-
-
- def bestInterface(self, pluginName, account):
- best = None
- for interface in self.interfaces:
- if not best or interface.lastPluginAccess(pluginName, account) < best.lastPluginAccess(pluginName, account):
- best = interface
- return best
-
-
-class Interface(object):
-
- def __init__(self, adress):
- self.adress = adress
- self.history = {}
-
-
- def lastPluginAccess(self, pluginName, account):
- if (pluginName, account) in self.history:
- return self.history[(pluginName, account)]
- return 0
-
-
- def useFor(self, pluginName, account):
- self.history[(pluginName, account)] = time()
-
-
- def __repr__(self):
- return "<Interface - %s>" % self.adress
diff --git a/pyload/plugins/addon/RestartFailed.py b/pyload/plugins/addon/RestartFailed.py
deleted file mode 100644
index ffb305e71..000000000
--- a/pyload/plugins/addon/RestartFailed.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Addon import Addon
-
-
-class RestartFailed(Addon):
- __name = "RestartFailed"
- __type = "addon"
- __version = "1.57"
-
- __config = [("activated", "bool", "Activated" , True),
- ("interval" , "int" , "Check interval in minutes", 90 )]
-
- __description = """Periodically restart all failed downloads in queue"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # event_list = ["pluginConfigChanged"]
-
- MIN_INTERVAL = 15 * 60 #: 15m minimum check interval (value is in seconds)
-
-
- def pluginConfigChanged(self, plugin, name, value):
- if name == "interval":
- interval = value * 60
- if self.MIN_INTERVAL <= interval != self.interval:
- self.core.scheduler.removeJob(self.cb)
- self.interval = interval
- self.initPeriodical()
- else:
- self.logDebug("Invalid interval value, kept current")
-
-
- def periodical(self):
- self.logDebug(_("Restart failed downloads"))
- self.core.api.restartFailed()
-
-
- def setup(self):
- self.interval = 0
-
-
- def activate(self):
- self.pluginConfigChanged(self.__name, "interval", self.getConfig("interval"))
diff --git a/pyload/plugins/addon/RestartSlow.py b/pyload/plugins/addon/RestartSlow.py
deleted file mode 100644
index 61d842b7d..000000000
--- a/pyload/plugins/addon/RestartSlow.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import pycurl
-
-from pyload.plugins.Addon import Addon
-
-
-class RestartSlow(Addon):
- __name = "RestartSlow"
- __type = "addon"
- __version = "0.02"
-
- __config = [("free_limit" , "int" , "Transfer speed threshold in kilobytes" , 100 ),
- ("free_time" , "int" , "Sample interval in minutes" , 5 ),
- ("premium_limit", "int" , "Transfer speed threshold for premium download in kilobytes", 300 ),
- ("premium_time" , "int" , "Sample interval for premium download in minutes" , 2 ),
- ("safe_mode" , "bool", "Don't restart if download is not resumable" , True)]
-
- __description = """Restart slow downloads"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- event_map = {'download-start': "downloadStarts"}
-
-
- def setup(self):
- self.info = {'chunk': {}}
-
-
- def periodical(self):
- if not self.pyfile.req.dl:
- return
-
- if self.getConfig("safe_mode") and not self.pyfile.plugin.resumeDownload:
- time = 30
- limit = 5
- else:
- type = "premium" if self.pyfile.plugin.premium else "free"
- time = max(30, self.getConfig("%s_time" % type) * 60)
- limit = max(5, self.getConfig("%s_limit" % type) * 1024)
-
- chunks = [chunk for chunk in self.pyfile.req.dl.chunks \
- if chunk.id not in self.info['chunk'] or self.info['chunk'][chunk.id] not is (time, limit)]
-
- for chunk in chunks:
- chunk.c.setopt(pycurl.LOW_SPEED_TIME , time)
- chunk.c.setopt(pycurl.LOW_SPEED_LIMIT, limit)
-
- self.info['chunk'][chunk.id] = (time, limit)
-
-
- def downloadStarts(self, pyfile, url, filename):
- if self.cb or (self.getConfig("safe_mode") and not pyfile.plugin.resumeDownload):
- return
-
- self.initPeriodical()
diff --git a/pyload/plugins/addon/SkipRev.py b/pyload/plugins/addon/SkipRev.py
deleted file mode 100644
index 90544959e..000000000
--- a/pyload/plugins/addon/SkipRev.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from urllib import unquote
-from urlparse import urlparse
-
-from pyload.plugins.Addon import Addon
-from pyload.plugins.Plugin import SkipDownload
-
-
-class SkipRev(Adoon):
- __name = "SkipRev"
- __type = "addon"
- __version = "0.15"
-
- __config = [("tokeep", "int", "Number of rev files to keep for package (-1 to auto)", -1)]
-
- __description = """Skip files ending with extension rev"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def _setup(self):
- super(self.pyfile.plugin, self).setup()
- if self.pyfile.hasStatus("skipped"):
- raise SkipDownload(self.pyfile.getStatusName() or self.pyfile.pluginname)
-
-
- def pyname(self, pyfile):
- url = pyfile.url
- plugin = pyfile.plugin
-
- if hasattr(plugin, "info") and 'name' in plugin.info and plugin.info['name']:
- name = plugin.info['name']
-
- elif hasattr(plugin, "parseInfos"):
- name = next(plugin.parseInfos([url]))['name']
-
- elif hasattr(plugin, "getInfo"): #@NOTE: if parseInfos was not found, getInfo should be missing too
- name = plugin.getInfo(url)['name']
-
- else:
- self.logWarning("Unable to grab file name")
- name = urlparse(unquote(url)).path.split('/')[-1])
-
- return name
-
-
- def downloadPreparing(self, pyfile):
- if pyfile.getStatusName() is "unskipped" or not pyname(pyfile).endswith(".rev"):
- return
-
- tokeep = self.getConfig("tokeep")
-
- if tokeep:
- saved = [True for link in pyfile.package().getChildren() \
- if link.name.endswith(".rev") and (link.hasStatus("finished") or link.hasStatus("downloading"))].count(True)
-
- if not saved or saved < tokeep: #: keep one rev at least in auto mode
- return
-
- pyfile.setCustomStatus("SkipRev", "skipped")
- pyfile.plugin.setup = _setup #: work-around: inject status checker inside the preprocessing routine of the plugin
-
-
- def downloadFailed(self, pyfile):
- tokeep = self.getConfig("tokeep")
-
- if not tokeep:
- return
-
- for link in pyfile.package().getChildren():
- if link.hasStatus("skipped") and link.name.endswith(".rev"):
- if tokeep > -1 or pyfile.name.endswith(".rev"):
- link.setStatus("queued")
- else:
- link.setCustomStatus("unskipped", "queued")
- return
diff --git a/pyload/plugins/addon/UnSkipOnFail.py b/pyload/plugins/addon/UnSkipOnFail.py
deleted file mode 100644
index 15c0dfa4a..000000000
--- a/pyload/plugins/addon/UnSkipOnFail.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from os.path import basename
-
-from pyload.datatype.PyFile import PyFile
-from pyload.plugins.Addon import Addon
-from pyload.utils import fs_encode
-
-
-class UnSkipOnFail(Addon):
- __name = "UnSkipOnFail"
- __type = "addon"
- __version = "0.02"
-
- __config = [("activated", "bool", "Activated", True)]
-
- __description = """When a download fails, restart skipped duplicates"""
- __license = "GPLv3"
- __authors = [("hagg", "")]
-
-
- def downloadFailed(self, pyfile):
- pyfile_name = basename(pyfile.name)
- pid = pyfile.package().id
- msg = _('look for skipped duplicates for %s (pid:%s)')
- self.logInfo(msg % (pyfile_name, pid))
- dups = self.findDuplicates(pyfile)
- for link in dups:
- # check if link is "skipped"(=4)
- if link.status == 4:
- lpid = link.packageID
- self.logInfo(_('restart "%s" (pid:%s)') % (pyfile_name, lpid))
- self.setLinkStatus(link, "queued")
-
-
- def findDuplicates(self, pyfile):
- """ Search all packages for duplicate links to "pyfile".
- Duplicates are links that would overwrite "pyfile".
- To test on duplicity the package-folder and link-name
- of twolinks are compared (basename(link.name)).
- So this method returns a list of all links with equal
- package-folders and filenames as "pyfile", but except
- the data for "pyfile" iotselöf.
- It does MOT check the link's status.
- """
- dups = []
- pyfile_name = fs_encode(basename(pyfile.name))
- # get packages (w/o files, as most file data is useless here)
- queue = self.core.api.getQueue()
- for package in queue:
- # check if package-folder equals pyfile's package folder
- if fs_encode(package.folder) == fs_encode(pyfile.package().folder):
- # now get packaged data w/ files/links
- pdata = self.core.api.getPackageData(package.pid)
- if pdata.links:
- for link in pdata.links:
- link_name = fs_encode(basename(link.name))
- # check if link name collides with pdata's name
- if link_name == pyfile_name:
- # at last check if it is not pyfile itself
- if link.fid != pyfile.id:
- dups.append(link)
- return dups
-
-
- def setLinkStatus(self, link, new_status):
- """ Change status of "link" to "new_status".
- "link" has to be a valid FileData object,
- "new_status" has to be a valid status name
- (i.e. "queued" for this Plugin)
- It creates a temporary PyFile object using
- "link" data, changes its status, and tells
- the core.files-manager to save its data.
- """
- pyfile = PyFile(self.core.files,
- link.fid,
- link.url,
- link.name,
- link.size,
- link.status,
- link.error,
- link.plugin,
- link.packageID,
- link.order)
- pyfile.setStatus(new_status)
- self.core.files.save()
- pyfile.release()
diff --git a/pyload/plugins/addon/UpdateManager.py b/pyload/plugins/addon/UpdateManager.py
deleted file mode 100644
index 05197f15d..000000000
--- a/pyload/plugins/addon/UpdateManager.py
+++ /dev/null
@@ -1,305 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-import sys
-
-from operator import itemgetter
-from os import path, remove, stat
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Addon import Expose, Addon, threaded
-from pyload.utils import safe_join
-
-
-class UpdateManager(Addon):
- __name = "UpdateManager"
- __type = "addon"
- __version = "0.42"
-
- __config = [("activated" , "bool" , "Activated" , True ),
- ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
- ("interval" , "int" , "Check interval in hours" , 8 ),
- ("autorestart" , "bool" , "Automatically restart pyLoad when required" , True ),
- ("reloadplugins", "bool" , "Monitor plugins for code changes in debug mode", True ),
- ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , True )]
-
- __description = """Check for updates"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # event_list = ["pluginConfigChanged"]
-
- SERVER_URL = "http://updatemanager.pyload.org"
- VERSION = re.compile(r'__version.*=.*("|\')([\d.]+)')
- MIN_INTERVAL = 3 * 60 * 60 #: 3h minimum check interval (value is in seconds)
-
-
- def pluginConfigChanged(self, plugin, name, value):
- if name == "interval":
- interval = value * 60 * 60
- if self.MIN_INTERVAL <= interval != self.interval:
- self.core.scheduler.removeJob(self.cb)
- self.interval = interval
- self.initPeriodical()
- else:
- self.logDebug("Invalid interval value, kept current")
-
- elif name == "reloadplugins":
- if self.cb2:
- self.core.scheduler.removeJob(self.cb2)
- if value is True and self.core.debug:
- self.periodical2()
-
-
- def activate(self):
- self.pluginConfigChanged(self.__name, "interval", self.getConfig("interval"))
- x = lambda: self.pluginConfigChanged(self.__name, "reloadplugins", self.getConfig("reloadplugins"))
- self.core.scheduler.addJob(10, x, threaded=False)
-
-
- def deactivate(self):
- self.pluginConfigChanged(self.__name, "reloadplugins", False)
-
-
- def setup(self):
- self.cb2 = None
- self.interval = 0
- self.updating = False
- self.info = {'pyload': False, 'version': None, 'plugins': False}
- self.mtimes = {} #: store modification time for each plugin
-
-
- def periodical2(self):
- if not self.updating:
- self.autoreloadPlugins()
-
- self.cb2 = self.core.scheduler.addJob(4, self.periodical2, threaded=False)
-
-
- @Expose
- def autoreloadPlugins(self):
- """ reload and reindex all modified plugins """
- modules = filter(
- lambda m: m and (m.__name.startswith("pyload.plugins.") or
- m.__name.startswith("userplugins.")) and
- m.__name.count(".") >= 2, sys.modules.itervalues()
- )
-
- reloads = []
-
- for m in modules:
- root, type, name = m.__name.rsplit(".", 2)
- id = (type, name)
- if type in self.core.pluginManager.plugins:
- f = m.__file__.replace(".pyc", ".py")
- if not path.isfile(f):
- continue
-
- mtime = stat(f).st_mtime
-
- if id not in self.mtimes:
- self.mtimes[id] = mtime
- elif self.mtimes[id] < mtime:
- reloads.append(id)
- self.mtimes[id] = mtime
-
- return True if self.core.pluginManager.reloadPlugins(reloads) else False
-
-
- def periodical(self):
- if self.info['pyload'] or self.getConfig("nodebugupdate") and self.core.debug:
- return
-
- self.updateThread()
-
-
- def server_request(self):
- try:
- return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines()
- except Exception:
- self.logWarning(_("Unable to contact server to get updates"))
-
-
- @threaded
- def updateThread(self):
- self.updating = True
-
- status = self.update(onlyplugin=self.getConfig("mode") == "plugins only")
-
- if status is 2 and self.getConfig("autorestart"):
- self.core.api.restart()
- else:
- self.updating = False
-
-
- @Expose
- def updatePlugins(self):
- """ simple wrapper for calling plugin update quickly """
- return self.update(onlyplugin=True)
-
-
- @Expose
- def update(self, onlyplugin=False):
- """ check for updates """
- data = self.server_request()
-
- if not data:
- exitcode = 0
-
- elif data[0] == "None":
- self.logInfo(_("No new pyLoad version available"))
- updates = data[1:]
- exitcode = self._updatePlugins(updates)
-
- elif onlyplugin:
- exitcode = 0
-
- else:
- newversion = data[0]
- self.logInfo(_("*** New pyLoad Version %s available ***") % newversion)
- self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***"))
- exitcode = 3
- self.info['pyload'] = True
- self.info['version'] = newversion
-
- return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required; 3 = No plugins updated, new pyLoad version available
-
-
- def _updatePlugins(self, updates):
- """ check for plugin updates """
-
- if self.info['plugins']:
- return False #: plugins were already updated
-
- exitcode = 0
- updated = []
-
- url = updates[0]
- schema = updates[1].split('|')
-
- if "BLACKLIST" in updates:
- blacklist = updates[updates.index('BLACKLIST') + 1:]
- updates = updates[2:updates.index('BLACKLIST')]
- else:
- blacklist = None
- updates = updates[2:]
-
- upgradable = [dict(zip(schema, x.split('|'))) for x in updates]
- blacklisted = [(x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]) for x in blacklist] if blacklist else []
-
- if blacklist:
- # Protect internal plugins against removing
- for i, t, n in enumerate(blacklisted):
- if t == "internal":
- blacklisted.pop(i)
- continue
-
- for idx, plugin in enumerate(upgradable):
- if n == plugin['name'] and t == plugin['type']:
- upgradable.pop(idx)
- break
-
- for t, n in self.removePlugins(sorted(blacklisted)):
- self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % {
- 'type': t,
- 'name': n,
- })
-
- for plugin in sorted(upgradable, key=itemgetter("type", "name")):
- filename = plugin['name']
- type = plugin['type']
- version = plugin['version']
-
- if filename.endswith(".pyc"):
- name = filename[:filename.find("_")]
- else:
- name = filename.replace(".py", "")
-
- plugins = getattr(self.core.pluginManager, "%sPlugins" % type)
-
- oldver = float(plugins[name]['version']) if name in plugins else None
- newver = float(version)
-
- if not oldver:
- msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)"
- elif newver > oldver:
- msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)"
- else:
- continue
-
- self.logInfo(_(msg) % {'type' : type,
- 'name' : name,
- 'oldver': oldver,
- 'newver': newver})
- try:
- content = getURL(url % plugin)
- m = self.VERSION.search(content)
-
- if m and m.group(2) == version:
- with open(safe_join("userplugins", prefix, filename), "wb") as f:
- f.write(content)
-
- updated.append((prefix, name))
- else:
- raise Exception, _("Version mismatch")
-
- except Exception, e:
- self.logError(_("Error updating plugin: %s") % filename, str(e))
-
- if updated:
- reloaded = self.core.pluginManager.reloadPlugins(updated)
- if reloaded:
- self.logInfo(_("Plugins updated and reloaded"))
- exitcode = 1
- else:
- self.logInfo(_("*** Plugins have been updated, but need a pyLoad restart to be reloaded ***"))
- self.info['plugins'] = True
- exitcode = 2
- else:
- self.logInfo(_("No plugin updates available"))
-
- return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required
-
-
- @Expose
- def removePlugins(self, type_plugins):
- """ delete plugins from disk """
-
- if not type_plugins:
- return
-
- self.logDebug("Requested deletion of plugins: %s" % type_plugins)
-
- removed = []
-
- for type, name in type_plugins:
- err = False
- file = name + ".py"
-
- for root in ("userplugins", path.join(pypath, "pyload", "plugins")):
-
- filename = safe_join(root, type, file)
- try:
- remove(filename)
- except Exception, e:
- self.logDebug("Error deleting: %s" % path.basename(filename), e)
- err = True
-
- filename += "c"
- if path.isfile(filename):
- try:
- if type == "addon":
- self.manager.deactivateAddon(name)
- remove(filename)
- except Exception, e:
- self.logDebug("Error deleting: %s" % path.basename(filename), e)
- err = True
-
- if not err:
- id = (type, name)
- removed.append(id)
-
- return removed #: return a list of the plugins successfully removed
diff --git a/pyload/plugins/addon/WindowsPhoneToastNotify.py b/pyload/plugins/addon/WindowsPhoneToastNotify.py
deleted file mode 100644
index b12ed96d1..000000000
--- a/pyload/plugins/addon/WindowsPhoneToastNotify.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import httplib
-import time
-
-from pyload.plugins.Addon import Addon
-
-
-class WindowsPhoneToastNotify(Addon):
- __name = "WindowsPhoneToastNotify"
- __type = "addon"
- __version = "0.03"
-
- __config = [("force" , "bool", "Force even if client is connected" , False),
- ("pushId" , "str" , "pushId" , "" ),
- ("pushUrl" , "str" , "pushUrl" , "" ),
- ("pushTimeout", "int" , "Timeout between notifications in seconds", 0 )]
-
- __description = """Send push notifications to Windows Phone"""
- __license = "GPLv3"
- __authors = [("Andy Voigt", "phone-support@hotmail.de")]
-
-
- def getXmlData(self):
- myxml = ("<?xml version='1.0' encoding='utf-8'?> <wp:Notification xmlns:wp='WPNotification'> "
- "<wp:Toast> <wp:Text1>Pyload Mobile</wp:Text1> <wp:Text2>Captcha waiting!</wp:Text2> "
- "</wp:Toast> </wp:Notification>")
- return myxml
-
-
- def doRequest(self):
- URL = self.getConfig("pushUrl")
- request = self.getXmlData()
- webservice = httplib.HTTP(URL)
- webservice.putrequest("POST", self.getConfig("pushId"))
- webservice.putheader("Host", URL)
- webservice.putheader("Content-type", "text/xml")
- webservice.putheader("X-NotificationClass", "2")
- webservice.putheader("X-WindowsPhone-Target", "toast")
- webservice.putheader("Content-length", "%d" % len(request))
- webservice.endheaders()
- webservice.send(request)
- webservice.close()
- self.setStorage("LAST_NOTIFY", time.time())
-
-
- def captchaTask(self, task):
- if not self.getConfig("pushId") or not self.getConfig("pushUrl"):
- return False
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return False
-
- if (time.time() - float(self.getStorage("LAST_NOTIFY", 0))) < self.getConf("pushTimeout"):
- return False
-
- self.doRequest()
diff --git a/pyload/plugins/addon/XMPPInterface.py b/pyload/plugins/addon/XMPPInterface.py
deleted file mode 100644
index 51e904008..000000000
--- a/pyload/plugins/addon/XMPPInterface.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyxmpp import streamtls
-from pyxmpp.all import JID, Message
-from pyxmpp.interface import implements
-from pyxmpp.interfaces import *
-from pyxmpp.jabber.client import JabberClient
-
-from pyload.plugins.addon.IRCInterface import IRCInterface
-
-
-class XMPPInterface(IRCInterface, JabberClient):
- __name = "XMPPInterface"
- __type = "addon"
- __version = "0.11"
-
- __config = [("jid" , "str" , "Jabber ID" , "user@exmaple-jabber-server.org" ),
- ("pw" , "str" , "Password" , "" ),
- ("tls" , "bool", "Use TLS" , False ),
- ("owners" , "str" , "List of JIDs accepting commands from", "me@icq-gateway.org;some@msn-gateway.org"),
- ("info_file", "bool", "Inform about every file finished" , False ),
- ("info_pack", "bool", "Inform about every package finished" , True ),
- ("captcha" , "bool", "Send captcha requests" , True )]
-
- __description = """Connect to jabber and let owner perform different tasks"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- implements(IMessageHandlersProvider)
-
-
- def __init__(self, core, manager):
- IRCInterface.__init__(self, core, manager)
-
- self.jid = JID(self.getConfig("jid"))
- password = self.getConfig("pw")
-
- # if bare JID is provided add a resource -- it is required
- if not self.jid.resource:
- self.jid = JID(self.jid.node, self.jid.domain, "pyLoad")
-
- if self.getConfig("tls"):
- tls_settings = streamtls.TLSSettings(require=True, verify_peer=False)
- auth = ("sasl:PLAIN", "sasl:DIGEST-MD5")
- else:
- tls_settings = None
- auth = ("sasl:DIGEST-MD5", "digest")
-
- # setup client with provided connection information
- # and identity data
- JabberClient.__init__(self, self.jid, password,
- disco_name="pyLoad XMPP Client", disco_type="bot",
- tls_settings=tls_settings, auth_methods=auth)
-
- self.interface_providers = [
- VersionHandler(self),
- self,
- ]
-
-
- def activate(self):
- self.new_package = {}
-
- self.start()
-
-
- def packageFinished(self, pypack):
- try:
- if self.getConfig("info_pack"):
- self.announce(_("Package finished: %s") % pypack.name)
- except Exception:
- pass
-
-
- def downloadFinished(self, pyfile):
- try:
- if self.getConfig("info_file"):
- self.announce(
- _("Download finished: %(name)s @ %(plugin)s") % {"name": pyfile.name, "plugin": pyfile.pluginname})
- except Exception:
- pass
-
-
- def run(self):
- # connect to IRC etc.
- self.connect()
- try:
- self.loop()
- except Exception, ex:
- self.logError(ex)
-
-
- def stream_state_changed(self, state, arg):
- """This one is called when the state of stream connecting the component
- to a server changes. This will usually be used to let the user
- know what is going on."""
- self.logDebug("*** State changed: %s %r ***" % (state, arg))
-
-
- def disconnected(self):
- self.logDebug("Client was disconnected")
-
-
- def stream_closed(self, stream):
- self.logDebug("Stream was closed", stream)
-
-
- def stream_error(self, err):
- self.logDebug("Stream Error", err)
-
-
- def get_message_handlers(self):
- """Return list of (message_type, message_handler) tuples.
-
- The handlers returned will be called when matching message is received
- in a client session."""
- return [("normal", self.message)]
-
-
- def message(self, stanza):
- """Message handler for the component."""
- subject = stanza.get_subject()
- body = stanza.get_body()
- t = stanza.get_type()
- self.logDebug("Message from %s received." % unicode(stanza.get_from()))
- self.logDebug("Body: %s Subject: %s Type: %s" % (body, subject, t))
-
- if t == "headline":
- # 'headline' messages should never be replied to
- return True
- if subject:
- subject = u"Re: " + subject
-
- to_jid = stanza.get_from()
- from_jid = stanza.get_to()
-
- #j = JID()
- to_name = to_jid.as_utf8()
- from_name = from_jid.as_utf8()
-
- names = self.getConfig("owners").split(";")
-
- if to_name in names or to_jid.node + "@" + to_jid.domain in names:
- messages = []
-
- trigger = "pass"
- args = None
-
- try:
- temp = body.split()
- trigger = temp[0]
- if len(temp) > 1:
- args = temp[1:]
- except Exception:
- pass
-
- handler = getattr(self, "event_%s" % trigger, self.event_pass)
- try:
- res = handler(args)
- for line in res:
- m = Message(
- to_jid=to_jid,
- from_jid=from_jid,
- stanza_type=stanza.get_type(),
- subject=subject,
- body=line)
-
- messages.append(m)
- except Exception, e:
- self.logError(e)
-
- return messages
-
- else:
- return True
-
-
- def response(self, msg, origin=""):
- return self.announce(msg)
-
-
- def announce(self, message):
- """ send message to all owners"""
- for user in self.getConfig("owners").split(";"):
- self.logDebug("Send message to", user)
-
- to_jid = JID(user)
-
- m = Message(from_jid=self.jid,
- to_jid=to_jid,
- stanza_type="chat",
- body=message)
-
- stream = self.get_stream()
- if not stream:
- self.connect()
- stream = self.get_stream()
-
- stream.send(m)
-
-
- def beforeReconnecting(self, ip):
- self.disconnect()
-
-
- def afterReconnecting(self, ip):
- self.connect()
-
-
-class VersionHandler(object):
- """Provides handler for a version query.
-
- This class will answer version query and announce 'jabber:iq:version' namespace
- in the client's disco#info results."""
-
- implements(IIqHandlersProvider, IFeaturesProvider)
-
-
- def __init__(self, client):
- """Just remember who created this."""
- self.client = client
-
-
- def get_features(self):
- """Return namespace which should the client include in its reply to a
- disco#info query."""
- return ["jabber:iq:version"]
-
-
- def get_iq_get_handlers(self):
- """Return list of tuples (element_name, namespace, handler) describing
- handlers of <iq type='get'/> stanzas"""
- return [("query", "jabber:iq:version", self.get_version)]
-
-
- def get_iq_set_handlers(self):
- """Return empty list, as this class provides no <iq type='set'/> stanza handler."""
- return []
-
-
- def get_version(self, iq):
- """Handler for jabber:iq:version queries.
-
- jabber:iq:version queries are not supported directly by PyXMPP, so the
- XML node is accessed directly through the libxml2 API. This should be
- used very carefully!"""
- iq = iq.make_result_response()
- q = iq.new_query("jabber:iq:version")
- q.newTextChild(q.ns(), "name", "Echo component")
- q.newTextChild(q.ns(), "version", "1.0")
- return iq
diff --git a/pyload/plugins/captcha/AdYouLike.py b/pyload/plugins/captcha/AdYouLike.py
deleted file mode 100644
index be688e65d..000000000
--- a/pyload/plugins/captcha/AdYouLike.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Captcha import Captcha
-from pyload.utils import json_loads
-
-
-class AdYouLike(Captcha):
- __name = "AdYouLike"
- __type = "captcha"
- __version = "0.02"
-
- __description = """AdYouLike captcha service plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- AYL_PATTERN = r'Adyoulike\.create\s*\((.+?)\)'
- CALLBACK_PATTERN = r'(Adyoulike\.g\._jsonp_\d+)'
-
-
- def detect_key(self, html=None):
- if not html:
- if hasattr(self.plugin, "html") and self.plugin.html:
- html = self.plugin.html
- else:
- errmsg = _("AdYouLike html not found")
- self.plugin.fail(errmsg)
- raise TypeError(errmsg)
-
- m = re.search(self.AYL_PATTERN, html)
- n = re.search(self.CALLBACK_PATTERN, html)
- if m and n:
- self.key = (m.group(1).strip(), n.group(1).strip())
- self.plugin.logDebug("AdYouLike ayl|callback: %s | %s" % self.key)
- return self.key #: key is the tuple(ayl, callback)
- else:
- self.plugin.logDebug("AdYouLike ayl or callback not found")
- return None
-
-
- def challenge(self, key=None):
- if not key:
- if self.detect_key():
- key = self.key
- else:
- errmsg = _("AdYouLike key not found")
- self.plugin.fail(errmsg)
- raise TypeError(errmsg)
-
- ayl, callback = key
-
- # {"adyoulike":{"key":"P~zQ~O0zV0WTiAzC-iw0navWQpCLoYEP"},
- # "all":{"element_id":"ayl_private_cap_92300","lang":"fr","env":"prod"}}
- ayl = json_loads(ayl)
-
- html = self.plugin.req.load("http://api-ayl.appspot.com/challenge",
- get={'key' : ayl['adyoulike']['key'],
- 'env' : ayl['all']['env'],
- 'callback': callback})
- try:
- challenge = json_loads(re.search(callback + r'\s*\((.+?)\)', html).group(1))
- except Exception:
- errmsg = _("AdYouLike challenge pattern not found")
- self.plugin.error(errmsg)
- raise ValueError(errmsg)
-
- self.plugin.logDebug("AdYouLike challenge: %s" % challenge)
-
- return self.result(ayl, challenge)
-
-
- def result(self, server, challenge):
- # Adyoulike.g._jsonp_5579316662423138
- # ({"translations":{"fr":{"instructions_visual":"Recopiez « Soonnight » ci-dessous :"}},
- # "site_under":true,"clickable":true,"pixels":{"VIDEO_050":[],"DISPLAY":[],"VIDEO_000":[],"VIDEO_100":[],
- # "VIDEO_025":[],"VIDEO_075":[]},"medium_type":"image/adyoulike",
- # "iframes":{"big":"<iframe src=\"http://www.soonnight.com/campagn.html\" scrolling=\"no\"
- # height=\"250\" width=\"300\" frameborder=\"0\"></iframe>"},"shares":{},"id":256,
- # "token":"e6QuI4aRSnbIZJg02IsV6cp4JQ9~MjA1","formats":{"small":{"y":300,"x":0,"w":300,"h":60},
- # "big":{"y":0,"x":0,"w":300,"h":250},"hover":{"y":440,"x":0,"w":300,"h":60}},
- # "tid":"SqwuAdxT1EZoi4B5q0T63LN2AkiCJBg5"})
-
- if isinstance(server, basestring):
- server = json_loads(server)
-
- if isinstance(challenge, basestring):
- challenge = json_loads(challenge)
-
- try:
- instructions_visual = challenge['translations'][server['all']['lang']]['instructions_visual']
- result = re.search(u'«(.+?)»', instructions_visual).group(1).strip()
- except Exception:
- errmsg = _("AdYouLike result not found")
- self.plugin.error(errmsg)
- raise ValueError(errmsg)
-
- result = {'_ayl_captcha_engine' : "adyoulike",
- '_ayl_env' : server['all']['env'],
- '_ayl_tid' : challenge['tid'],
- '_ayl_token_challenge': challenge['token'],
- '_ayl_response' : response}
-
- self.plugin.logDebug("AdYouLike result: %s" % result)
-
- return result
diff --git a/pyload/plugins/captcha/AdsCaptcha.py b/pyload/plugins/captcha/AdsCaptcha.py
deleted file mode 100644
index 8655e4f7b..000000000
--- a/pyload/plugins/captcha/AdsCaptcha.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import random
-
-from pyload.plugins.Captcha import Captcha
-
-
-class AdsCaptcha(Captcha):
- __name = "AdsCaptcha"
- __type = "captcha"
- __version = "0.06"
-
- __description = """AdsCaptcha captcha service plugin"""
- __license = "GPLv3"
- __authors = [("pyLoad Team", "admin@pyload.org")]
-
-
- CAPTCHAID_PATTERN = r'api\.adscaptcha\.com/Get\.aspx\?[^"\']*CaptchaId=(\d+)'
- PUBLICKEY_PATTERN = r'api\.adscaptcha\.com/Get\.aspx\?[^"\']*PublicKey=([\w-]+)'
-
-
- def detect_key(self, html=None):
- if not html:
- if hasattr(self.plugin, "html") and self.plugin.html:
- html = self.plugin.html
- else:
- errmsg = _("AdsCaptcha html not found")
- self.plugin.fail(errmsg)
- raise TypeError(errmsg)
-
- m = re.search(self.PUBLICKEY_PATTERN, html)
- n = re.search(self.CAPTCHAID_PATTERN, html)
- if m and n:
- self.key = (m.group(1).strip(), n.group(1).strip()) #: key is the tuple(PublicKey, CaptchaId)
- self.plugin.logDebug("AdsCaptcha key|id: %s | %s" % self.key)
- return self.key
- else:
- self.plugin.logDebug("AdsCaptcha key or id not found")
- return None
-
-
- def challenge(self, key=None):
- if not key:
- if self.detect_key():
- key = self.key
- else:
- errmsg = _("AdsCaptcha key not found")
- self.plugin.fail(errmsg)
- raise TypeError(errmsg)
-
- PublicKey, CaptchaId = key
-
- html = self.plugin.req.load("http://api.adscaptcha.com/Get.aspx", get={'CaptchaId': CaptchaId, 'PublicKey': PublicKey})
- try:
- challenge = re.search("challenge: '(.+?)',", html).group(1)
- server = re.search("server: '(.+?)',", html).group(1)
- except Exception:
- errmsg = _("AdsCaptcha challenge pattern not found")
- self.plugin.error(errmsg)
- raise ValueError(errmsg)
-
- self.plugin.logDebug("AdsCaptcha challenge: %s" % challenge)
-
- return challenge, self.result(server, challenge)
-
-
- def result(self, server, challenge):
- result = self.plugin.decryptCaptcha("%sChallenge.aspx" % server,
- get={'cid': challenge, 'dummy': random()},
- cookies=True,
- imgtype="jpg")
-
- self.plugin.logDebug("AdsCaptcha result: %s" % result)
-
- return result
diff --git a/pyload/plugins/captcha/ReCaptcha.py b/pyload/plugins/captcha/ReCaptcha.py
deleted file mode 100644
index e12aba36d..000000000
--- a/pyload/plugins/captcha/ReCaptcha.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Captcha import Captcha
-
-
-class ReCaptcha(Captcha):
- __name = "ReCaptcha"
- __type = "captcha"
- __version = "0.08"
-
- __description = """ReCaptcha captcha service plugin"""
- __license = "GPLv3"
- __authors = [("pyLoad Team", "admin@pyload.org")]
-
-
- KEY_PATTERN = r'recaptcha(?:/api|\.net)/(?:challenge|noscript)\?k=([\w-]+)'
- KEY_AJAX_PATTERN = r'Recaptcha\.create\s*\(\s*["\']([\w-]+)'
-
-
- def detect_key(self, html=None):
- if not html:
- if hasattr(self.plugin, "html") and self.plugin.html:
- html = self.plugin.html
- else:
- errmsg = _("ReCaptcha html not found")
- self.plugin.fail(errmsg)
- raise TypeError(errmsg)
-
- m = re.search(self.KEY_PATTERN, html) or re.search(self.KEY_AJAX_PATTERN, html)
- if m:
- self.key = m.group(1).strip()
- self.plugin.logDebug("ReCaptcha key: %s" % self.key)
- return self.key
- else:
- self.plugin.logDebug("ReCaptcha key not found")
- return None
-
-
- def challenge(self, key=None):
- if not key:
- if self.detect_key():
- key = self.key
- else:
- errmsg = _("ReCaptcha key not found")
- self.plugin.fail(errmsg)
- raise TypeError(errmsg)
-
- html = self.plugin.req.load("http://www.google.com/recaptcha/api/challenge", get={'k': key})
- try:
- challenge = re.search("challenge : '(.+?)',", html).group(1)
- server = re.search("server : '(.+?)',", html).group(1)
- except Exception:
- errmsg = _("ReCaptcha challenge pattern not found")
- self.plugin.error(errmsg)
- raise ValueError(errmsg)
-
- self.plugin.logDebug("ReCaptcha challenge: %s" % challenge)
-
- return challenge, self.result(server, challenge)
-
-
- def result(self, server, challenge):
- result = self.plugin.decryptCaptcha("%simage" % server,
- get={'c': challenge},
- cookies=True,
- forceUser=True,
- imgtype="jpg")
-
- self.plugin.logDebug("ReCaptcha result: %s" % result)
-
- return result
diff --git a/pyload/plugins/captcha/SolveMedia.py b/pyload/plugins/captcha/SolveMedia.py
deleted file mode 100644
index cc48c801b..000000000
--- a/pyload/plugins/captcha/SolveMedia.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Captcha import Captcha
-
-
-class SolveMedia(Captcha):
- __name = "SolveMedia"
- __type = "captcha"
- __version = "0.06"
-
- __description = """SolveMedia captcha service plugin"""
- __license = "GPLv3"
- __authors = [("pyLoad Team", "admin@pyload.org")]
-
-
- KEY_PATTERN = r'api\.solvemedia\.com/papi/challenge\.(?:no)?script\?k=(.+?)["\']'
-
-
- def challenge(self, key=None):
- if not key:
- if self.detect_key():
- key = self.key
- else:
- errmsg = _("SolveMedia key not found")
- self.plugin.fail(errmsg)
- raise TypeError(errmsg)
-
- html = self.plugin.req.load("http://api.solvemedia.com/papi/challenge.noscript", get={'k': key})
- try:
- challenge = re.search(r'<input type=hidden name="adcopy_challenge" id="adcopy_challenge" value="([^"]+)">',
- html).group(1)
- server = "http://api.solvemedia.com/papi/media"
- except Exception:
- errmsg = _("SolveMedia challenge pattern not found")
- self.plugin.error(errmsg)
- raise ValueError(errmsg)
-
- self.plugin.logDebug("SolveMedia challenge: %s" % challenge)
-
- return challenge, self.result(server, challenge)
-
-
- def result(self, server, challenge):
- result = self.plugin.decryptCaptcha(server, get={'c': challenge}, imgtype="gif")
-
- self.plugin.logDebug("SolveMedia result: %s" % result)
-
- return result
diff --git a/pyload/plugins/container/CCF.py b/pyload/plugins/container/CCF.py
deleted file mode 100644
index 9488d75f9..000000000
--- a/pyload/plugins/container/CCF.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-
-from os import makedirs
-from os.path import exists
-from urllib2 import build_opener
-
-from MultipartPostHandler import MultipartPostHandler
-
-from pyload.plugins.Container import Container
-from pyload.utils import safe_join
-
-
-class CCF(Container):
- __name = "CCF"
- __version = "0.20"
-
- __pattern = r'.+\.ccf'
-
- __description = """CCF container decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Willnix", "Willnix@pyload.org")]
-
-
- def decrypt(self, pyfile):
- infile = pyfile.url.replace("\n", "")
-
- opener = build_opener(MultipartPostHandler)
- params = {"src": "ccf",
- "filename": "test.ccf",
- "upload": open(infile, "rb")}
- tempdlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', params).read()
-
- download_folder = self.config['general']['download_folder']
-
- tempdlc_name = safe_join(download_folder, "tmp_%s.dlc" % pyfile.name)
- with open(tempdlc_name, "w") as tempdlc:
- tempdlc.write(re.search(r'<dlc>(.*)</dlc>', tempdlc_content, re.S).group(1))
-
- self.urls = [tempdlc_name]
diff --git a/pyload/plugins/container/LinkList.py b/pyload/plugins/container/LinkList.py
deleted file mode 100644
index f80eecd4d..000000000
--- a/pyload/plugins/container/LinkList.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import codecs
-
-from pyload.plugins.Container import Container
-from pyload.utils import fs_encode
-
-
-class LinkList(Container):
- __name = "LinkList"
- __version = "0.12"
-
- __pattern = r'.+\.txt'
- __config = [("clear", "bool", "Clear Linklist after adding", False),
- ("encoding", "string", "File encoding (default utf-8)", "")]
-
- __description = """Read link lists in txt format"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org"),
- ("jeix", "jeix@hasnomail.com")]
-
-
- def decrypt(self, pyfile):
- try:
- file_enc = codecs.lookup(self.getConfig("encoding")).name
- except Exception:
- file_enc = "utf-8"
-
- file_name = fs_encode(pyfile.url)
-
- txt = codecs.open(file_name, 'r', file_enc)
- links = txt.readlines()
- curPack = "Parsed links from %s" % pyfile.name
-
- packages = {curPack:[],}
-
- for link in links:
- link = link.strip()
- if not link:
- continue
-
- if link.startswith(";"):
- continue
- if link.startswith("[") and link.endswith("]"):
- # new package
- curPack = link[1:-1]
- packages[curPack] = []
- continue
- packages[curPack].append(link)
- txt.close()
-
- # empty packages fix
-
- delete = []
-
- for key,value in packages.iteritems():
- if not value:
- delete.append(key)
-
- for key in delete:
- del packages[key]
-
- if self.getConfig("clear"):
- try:
- txt = open(file_name, 'wb')
- txt.close()
- except Exception:
- self.logWarning(_("LinkList could not be cleared"))
-
- for name, links in packages.iteritems():
- self.packages.append((name, links, name))
diff --git a/pyload/plugins/container/RSDF.py b/pyload/plugins/container/RSDF.py
deleted file mode 100644
index 001b64bbc..000000000
--- a/pyload/plugins/container/RSDF.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import base64
-import binascii
-import re
-
-from pyload.plugins.Container import Container
-from pyload.utils import fs_encode
-
-
-class RSDF(Container):
- __name = "RSDF"
- __version = "0.24"
-
- __pattern = r'.+\.rsdf'
-
- __description = """RSDF container decrypter plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("spoob", "spoob@pyload.org")]
-
-
- def decrypt(self, pyfile):
-
- from Crypto.Cipher import AES
-
- infile = fs_encode(pyfile.url.replace("\n", ""))
- Key = binascii.unhexlify('8C35192D964DC3182C6F84F3252239EB4A320D2500000000')
-
- IV = binascii.unhexlify('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF')
- IV_Cipher = AES.new(Key, AES.MODE_ECB)
- IV = IV_Cipher.encrypt(IV)
-
- obj = AES.new(Key, AES.MODE_CFB, IV)
-
- try:
- with open(infile, 'r') as rsdf:
- data = rsdf.read()
- except IOError, e:
- self.fail(str(e))
-
- if re.search(r"<title>404 - Not Found</title>", data) is None:
- data = binascii.unhexlify(''.join(data.split()))
- data = data.splitlines()
-
- for link in data:
- if not link:
- continue
- link = base64.b64decode(link)
- link = obj.decrypt(link)
- decryptedUrl = link.replace('CCF: ', '')
- self.urls.append(decryptedUrl)
-
- self.logDebug("Adding package %s with %d links" % (pyfile.package().name, len(self.urls)))
diff --git a/pyload/plugins/crypter/BitshareCom.py b/pyload/plugins/crypter/BitshareCom.py
deleted file mode 100644
index 226cd043f..000000000
--- a/pyload/plugins/crypter/BitshareCom.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class BitshareCom(SimpleCrypter):
- __name = "BitshareCom"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?bitshare\.com/\?d=\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Bitshare.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'<a href="(http://bitshare\.com/files/.+)">.+</a></td>'
- NAME_PATTERN = r'View public folder "(?P<N>.+)"</h1>'
diff --git a/pyload/plugins/crypter/C1neonCom.py b/pyload/plugins/crypter/C1neonCom.py
deleted file mode 100644
index 39eac5ee7..000000000
--- a/pyload/plugins/crypter/C1neonCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class C1neonCom(DeadCrypter):
- __name = "C1neonCom"
- __type = "crypter"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?c1neon\.com/.*?'
- __config = []
-
- __description = """C1neon.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("godofdream", "soilfiction@gmail.com")]
-
-
-getInfo = create_getInfo(C1neonCom)
diff --git a/pyload/plugins/crypter/ChipDe.py b/pyload/plugins/crypter/ChipDe.py
deleted file mode 100644
index 6bb879f35..000000000
--- a/pyload/plugins/crypter/ChipDe.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-
-
-class ChipDe(Crypter):
- __name = "ChipDe"
- __type = "crypter"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?chip\.de/video/.*\.html'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Chip.de decrypter plugin"""
- __license = "GPLv3"
- __authors = [("4Christopher", "4Christopher@gmx.de")]
-
-
- def decrypt(self, pyfile):
- self.html = self.load(pyfile.url)
- try:
- f = re.search(r'"(http://video\.chip\.de/.+)"', self.html)
- except Exception:
- self.fail(_("Failed to find the URL"))
- else:
- self.urls = [f.group(1)]
- self.logDebug("The file URL is %s" % self.urls[0])
diff --git a/pyload/plugins/crypter/CrockoCom.py b/pyload/plugins/crypter/CrockoCom.py
deleted file mode 100644
index c8268173a..000000000
--- a/pyload/plugins/crypter/CrockoCom.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class CrockoCom(SimpleCrypter):
- __name = "CrockoCom"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?crocko\.com/f/.*'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Crocko.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- LINK_PATTERN = r'<td class="last"><a href="([^"]+)">download</a>'
diff --git a/pyload/plugins/crypter/CryptItCom.py b/pyload/plugins/crypter/CryptItCom.py
deleted file mode 100644
index c82cb3f0a..000000000
--- a/pyload/plugins/crypter/CryptItCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class CryptItCom(DeadCrypter):
- __name = "CryptItCom"
- __type = "crypter"
- __version = "0.11"
-
- __pattern = r'http://(?:www\.)?crypt-it\.com/(s|e|d|c)/\w+'
- __config = []
-
- __description = """Crypt-it.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de")]
-
-
-getInfo = create_getInfo(CryptItCom)
diff --git a/pyload/plugins/crypter/CzshareCom.py b/pyload/plugins/crypter/CzshareCom.py
deleted file mode 100644
index 4b8646fb6..000000000
--- a/pyload/plugins/crypter/CzshareCom.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-
-
-class CzshareCom(Crypter):
- __name = "CzshareCom"
- __type = "crypter"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?(czshare|sdilej)\.(com|cz)/folders/.*'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Czshare.com folder decrypter plugin, now Sdilej.cz"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- FOLDER_PATTERN = r'<tr class="subdirectory">\s*<td>\s*<table>(.*?)</table>'
- LINK_PATTERN = r'<td class="col2"><a href="([^"]+)">info</a></td>'
-
-
- def decrypt(self, pyfile):
- html = self.load(pyfile.url)
-
- m = re.search(self.FOLDER_PATTERN, html, re.S)
- if m is None:
- self.error(_("FOLDER_PATTERN not found"))
-
- self.urls.extend(re.findall(self.LINK_PATTERN, m.group(1)))
diff --git a/pyload/plugins/crypter/DDLMusicOrg.py b/pyload/plugins/crypter/DDLMusicOrg.py
deleted file mode 100644
index 6500fe5e6..000000000
--- a/pyload/plugins/crypter/DDLMusicOrg.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import sleep
-
-from pyload.plugins.Crypter import Crypter
-
-
-class DDLMusicOrg(Crypter):
- __name = "DDLMusicOrg"
- __type = "crypter"
- __version = "0.30"
-
- __pattern = r'http://(?:www\.)?ddl-music\.org/captcha/ddlm_cr\d\.php\?\d+\?\d+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Ddl-music.org decrypter plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
- def setup(self):
- self.multiDL = False
-
-
- def decrypt(self, pyfile):
- html = self.load(pyfile.url, cookies=True)
-
- if re.search(r"Wer dies nicht rechnen kann", html) is not None:
- self.offline()
-
- math = re.search(r"(\d+) ([+-]) (\d+) =\s+<inp", self.html)
- id = re.search(r"name=\"id\" value=\"(\d+)\"", self.html).group(1)
- linknr = re.search(r"name=\"linknr\" value=\"(\d+)\"", self.html).group(1)
-
- solve = ""
- if math.group(2) == "+":
- solve = int(math.group(1)) + int(math.group(3))
- else:
- solve = int(math.group(1)) - int(math.group(3))
- sleep(3)
- htmlwithlink = self.load(pyfile.url, cookies=True,
- post={"calc%s" % linknr: solve, "send%s" % linknr: "Send", "id": id,
- "linknr": linknr})
- m = re.search(r"<form id=\"ff\" action=\"(.*?)\" method=\"post\">", htmlwithlink)
- if m:
- self.urls = [m.group(1)]
- else:
- self.retry()
diff --git a/pyload/plugins/crypter/DailymotionBatch.py b/pyload/plugins/crypter/DailymotionBatch.py
deleted file mode 100644
index 3ea59bc75..000000000
--- a/pyload/plugins/crypter/DailymotionBatch.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.utils import json_loads
-from pyload.plugins.Crypter import Crypter
-from pyload.utils import safe_join
-
-
-class DailymotionBatch(Crypter):
- __name = "DailymotionBatch"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'https?://(?:www\.)?dailymotion\.com/((playlists/)?(?P<TYPE>playlist|user)/)?(?P<ID>[\w^_]+)(?(TYPE)|#)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Dailymotion.com channel & playlist decrypter"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def api_response(self, ref, req=None):
- url = urljoin("https://api.dailymotion.com/", ref)
- page = self.load(url, get=req)
- return json_loads(page)
-
-
- def getPlaylistInfo(self, id):
- ref = "playlist/" + id
- req = {"fields": "name,owner.screenname"}
- playlist = self.api_response(ref, req)
-
- if "error" in playlist:
- return
-
- name = playlist['name']
- owner = playlist['owner.screenname']
- return name, owner
-
-
- def _getPlaylists(self, user_id, page=1):
- ref = "user/%s/playlists" % user_id
- req = {"fields": "id", "page": page, "limit": 100}
- user = self.api_response(ref, req)
-
- if "error" in user:
- return
-
- for playlist in user['list']:
- yield playlist['id']
-
- if user['has_more']:
- for item in self._getPlaylists(user_id, page + 1):
- yield item
-
-
- def getPlaylists(self, user_id):
- return [(id,) + self.getPlaylistInfo(id) for id in self._getPlaylists(user_id)]
-
-
- def _getVideos(self, id, page=1):
- ref = "playlist/%s/videos" % id
- req = {"fields": "url", "page": page, "limit": 100}
- playlist = self.api_response(ref, req)
-
- if "error" in playlist:
- return
-
- for video in playlist['list']:
- yield video['url']
-
- if playlist['has_more']:
- for item in self._getVideos(id, page + 1):
- yield item
-
-
- def getVideos(self, playlist_id):
- return list(self._getVideos(playlist_id))[::-1]
-
-
- def decrypt(self, pyfile):
- m = re.match(self.__pattern, pyfile.url)
- m_id = m.group("ID")
- m_type = m.group("TYPE")
-
- if m_type == "playlist":
- self.logDebug("Url recognized as Playlist")
- p_info = self.getPlaylistInfo(m_id)
- playlists = [(m_id,) + p_info] if p_info else None
- else:
- self.logDebug("Url recognized as Channel")
- playlists = self.getPlaylists(m_id)
- self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), m_id))
-
- if not playlists:
- self.fail(_("No playlist available"))
-
- for p_id, p_name, p_owner in playlists:
- p_videos = self.getVideos(p_id)
- p_folder = safe_join(self.config['general']['download_folder'], p_owner, p_name)
- self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name))
- self.packages.append((p_name, p_videos, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
diff --git a/pyload/plugins/crypter/DataHu.py b/pyload/plugins/crypter/DataHu.py
deleted file mode 100644
index 96129d53a..000000000
--- a/pyload/plugins/crypter/DataHu.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class DataHu(SimpleCrypter):
- __name = "DataHu"
- __type = "crypter"
- __version = "0.06"
-
- __pattern = r'http://(?:www\.)?data\.hu/dir/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Data.hu folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("crash", ""),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'<a href=\'(http://data\.hu/get/.+)\' target=\'_blank\'>\1</a>'
- NAME_PATTERN = ur'<title>(?P<N>.+) Let\xf6lt\xe9se</title>'
-
-
- def prepare(self):
- super(DataHu, self).prepare()
-
- if u'K\xe9rlek add meg a jelsz\xf3t' in self.html: # Password protected
- password = self.getPassword()
- if not password:
- self.fail(_("Password required"))
-
- self.logDebug("The folder is password protected', 'Using password: " + password)
-
- self.html = self.load(self.pyfile.url, post={'mappa_pass': password}, decode=True)
-
- if u'Hib\xe1s jelsz\xf3' in self.html: # Wrong password
- self.fail(_("Wrong password"))
diff --git a/pyload/plugins/crypter/DdlstorageCom.py b/pyload/plugins/crypter/DdlstorageCom.py
deleted file mode 100644
index a7b69da1d..000000000
--- a/pyload/plugins/crypter/DdlstorageCom.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class DdlstorageCom(DeadCrypter):
- __name = "DdlstorageCom"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'https?://(?:www\.)?ddlstorage\.com/folder/\w+'
- __config = []
-
- __description = """DDLStorage.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("godofdream", "soilfiction@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(DdlstorageCom)
diff --git a/pyload/plugins/crypter/DepositfilesCom.py b/pyload/plugins/crypter/DepositfilesCom.py
deleted file mode 100644
index c2aa14239..000000000
--- a/pyload/plugins/crypter/DepositfilesCom.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class DepositfilesCom(SimpleCrypter):
- __name = "DepositfilesCom"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?depositfiles\.com/folders/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Depositfiles.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- LINK_PATTERN = r'<div class="progressName"[^>]*>\s*<a href="([^"]+)" title="[^"]*" target="_blank">'
diff --git a/pyload/plugins/crypter/Dereferer.py b/pyload/plugins/crypter/Dereferer.py
deleted file mode 100644
index 325bfa912..000000000
--- a/pyload/plugins/crypter/Dereferer.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-
-from pyload.plugins.Crypter import Crypter
-
-
-class Dereferer(Crypter):
- __name = "Dereferer"
- __type = "crypter"
- __version = "0.10"
-
- __pattern = r'https?://([^/]+)/.*?(?P<url>(ht|f)tps?(://|%3A%2F%2F).*)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Crypter for dereferers"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def decrypt(self, pyfile):
- link = re.match(self.__pattern, pyfile.url).group('url')
- self.urls = [unquote(link).rstrip('+')]
diff --git a/pyload/plugins/crypter/DevhostStFolder.py b/pyload/plugins/crypter/DevhostStFolder.py
deleted file mode 100644
index 942dc6b59..000000000
--- a/pyload/plugins/crypter/DevhostStFolder.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://d-h.st/users/shine/?fld_id=37263#files
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class DevhostStFolder(SimpleCrypter):
- __name = "DevhostStFolder"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?d-h\.st/users/(?P<USER>\w+)(/\?fld_id=(?P<ID>\d+))?'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """d-h.st folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- LINK_PATTERN = r'(?:/> |;">)<a href="(.+?)"(?!>Back to \w+<)'
- OFFLINE_PATTERN = r'"/cHP">test\.png<'
-
-
- def getFileInfo(self):
- if re.search(self.OFFLINE_PATTERN, self.html):
- self.offline()
-
- try:
- id = re.match(self.__pattern, self.pyfile.url).group('ID')
- if id == "0":
- raise
-
- p = r'href="(.+?)">Back to \w+<'
- m = re.search(p, self.html)
- html = self.load(urljoin("http://d-h.st", m.group(1)),
- cookies=False)
-
- p = '\?fld_id=%s.*?">(.+?)<' % id
- m = re.search(p, html)
- name = folder = m.group(1)
-
- except Exception, e:
- self.logDebug(e)
- name = folder = re.match(self.__pattern, self.pyfile.url).group('USER')
-
- return {'name': name, 'folder': folder}
-
-
- def getLinks(self):
- return [urljoin("http://d-h.st", link) for link in re.findall(self.LINK_PATTERN, self.html)]
diff --git a/pyload/plugins/crypter/DlProtectCom.py b/pyload/plugins/crypter/DlProtectCom.py
deleted file mode 100644
index b386caa3f..000000000
--- a/pyload/plugins/crypter/DlProtectCom.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from base64 import urlsafe_b64encode
-from time import time
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class DlProtectCom(SimpleCrypter):
- __name = "DlProtectCom"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?dl-protect\.com/((en|fr)/)?(?P<ID>\w+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Dl-protect.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- OFFLINE_PATTERN = r'>Unfortunately, the link you are looking for is not found'
-
-
- def getLinks(self):
- # Direct link with redirect
- if not re.match(r"http://(?:www\.)?dl-protect\.com", self.req.http.lastEffectiveURL):
- return [self.req.http.lastEffectiveURL]
-
- #id = re.match(self.__pattern, self.pyfile.url).group("ID")
- key = re.search(r'name="id_key" value="(.+?)"', self.html).group(1)
-
- post_req = {"id_key": key, "submitform": ""}
-
- if self.OFFLINE_PATTERN in self.html:
- self.offline()
- elif ">Please click on continue to see the content" in self.html:
- post_req.update({"submitform": "Continue"})
- else:
- mstime = int(round(time() * 1000))
- b64time = "_" + urlsafe_b64encode(str(mstime)).replace("=", "%3D")
-
- post_req.update({"i": b64time, "submitform": "Decrypt+link"})
-
- if ">Password :" in self.html:
- post_req['pwd'] = self.getPassword()
-
- if ">Security Code" in self.html:
- captcha_id = re.search(r'/captcha\.php\?uid=(.+?)"', self.html).group(1)
- captcha_url = "http://www.dl-protect.com/captcha.php?uid=" + captcha_id
- captcha_code = self.decryptCaptcha(captcha_url, imgtype="gif")
-
- post_req['secure'] = captcha_code
-
- self.html = self.load(self.pyfile.url, post=post_req)
-
- for errmsg in (">The password is incorrect", ">The security code is incorrect"):
- if errmsg in self.html:
- self.fail(_(errmsg[1:]))
-
- pattern = r'<a href="([^/].+?)" target="_blank">'
- return re.findall(pattern, self.html)
diff --git a/pyload/plugins/crypter/DontKnowMe.py b/pyload/plugins/crypter/DontKnowMe.py
deleted file mode 100644
index e40e3292f..000000000
--- a/pyload/plugins/crypter/DontKnowMe.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-
-from pyload.plugins.Crypter import Crypter
-
-
-class DontKnowMe(Crypter):
- __name = "DontKnowMe"
- __type = "crypter"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?dontknow\.me/at/\?.+$'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """DontKnow.me decrypter plugin"""
- __license = "GPLv3"
- __authors = [("selaux", "")]
-
-
- LINK_PATTERN = r'http://dontknow\.me/at/\?(.+)$'
-
-
- def decrypt(self, pyfile):
- link = re.findall(self.LINK_PATTERN, pyfile.url)[0]
- self.urls = [unquote(link)]
diff --git a/pyload/plugins/crypter/DuckCryptInfo.py b/pyload/plugins/crypter/DuckCryptInfo.py
deleted file mode 100644
index 2188949bb..000000000
--- a/pyload/plugins/crypter/DuckCryptInfo.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from BeautifulSoup import BeautifulSoup
-
-from pyload.plugins.Crypter import Crypter
-
-
-class DuckCryptInfo(Crypter):
- __name = "DuckCryptInfo"
- __type = "crypter"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?duckcrypt\.info/(folder|wait|link)/(\w+)/?(\w*)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """DuckCrypt.info decrypter plugin"""
- __license = "GPLv3"
- __authors = [("godofdream", "soilfiction@gmail.com")]
-
-
- TIMER_PATTERN = r'<span id="timer">(.*)</span>'
-
-
- def decrypt(self, pyfile):
- url = pyfile.url
-
- m = re.match(self.__pattern, url)
- if m is None:
- self.fail(_("Weird error in link"))
- if str(m.group(1)) == "link":
- self.handleLink(url)
- else:
- self.handleFolder(m)
-
-
- def handleFolder(self, m):
- html = self.load("http://duckcrypt.info/ajax/auth.php?hash=" + str(m.group(2)))
- m = re.match(self.__pattern, html)
- self.logDebug("Redirectet to " + str(m.group(0)))
- html = self.load(str(m.group(0)))
- soup = BeautifulSoup(html)
- cryptlinks = soup.findAll("div", attrs={"class": "folderbox"})
- self.logDebug("Redirectet to " + str(cryptlinks))
- if not cryptlinks:
- self.error(_("No link found"))
- for clink in cryptlinks:
- if clink.find("a"):
- self.handleLink(clink.find("a")['href'])
-
-
- def handleLink(self, url):
- html = self.load(url)
- soup = BeautifulSoup(html)
- self.urls = [soup.find("iframe")['src']]
- if not self.urls:
- self.logInfo(_("No link found"))
diff --git a/pyload/plugins/crypter/DuploadOrg.py b/pyload/plugins/crypter/DuploadOrg.py
deleted file mode 100644
index 0365cbe8a..000000000
--- a/pyload/plugins/crypter/DuploadOrg.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class DuploadOrg(DeadCrypter):
- __name = "DuploadOrg"
- __type = "crypter"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?dupload\.org/folder/\d+'
- __config = []
-
- __description = """Dupload.org folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(DuploadOrg)
diff --git a/pyload/plugins/crypter/EasybytezCom.py b/pyload/plugins/crypter/EasybytezCom.py
deleted file mode 100644
index 2f0d29ec7..000000000
--- a/pyload/plugins/crypter/EasybytezCom.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSCrypter import XFSCrypter
-
-
-class EasybytezCom(XFSCrypter):
- __name = "EasybytezCom"
- __type = "crypter"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?easybytez\.com/users/\d+/\d+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Easybytez.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- HOSTER_DOMAIN = "easybytez.com"
-
- LOGIN_ACCOUNT = True
diff --git a/pyload/plugins/crypter/EmbeduploadCom.py b/pyload/plugins/crypter/EmbeduploadCom.py
deleted file mode 100644
index 9b37417be..000000000
--- a/pyload/plugins/crypter/EmbeduploadCom.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-from pyload.network.HTTPRequest import BadHeader
-
-
-class EmbeduploadCom(Crypter):
- __name = "EmbeduploadCom"
- __type = "crypter"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?embedupload\.com/\?d=.*'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True),
- ("preferedHoster", "str", "Prefered hoster list (bar-separated)", "embedupload"),
- ("ignoredHoster", "str", "Ignored hoster list (bar-separated)", "")]
-
- __description = """EmbedUpload.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- LINK_PATTERN = r'<div id="([^"]+)"[^>]*>\s*<a href="([^"]+)" target="_blank" (?:class="DownloadNow"|style="color:red")>'
-
-
- def decrypt(self, pyfile):
- self.html = self.load(pyfile.url, decode=True)
- tmp_links = []
-
- m = re.findall(self.LINK_PATTERN, self.html)
- if m:
- prefered_set = set(self.getConfig("preferedHoster").split('|'))
- prefered_set = map(lambda s: s.lower().split('.')[0], prefered_set)
-
- self.logDebug("PF: %s" % prefered_set)
-
- tmp_links.extend([x[1] for x in m if x[0] in prefered_set])
- self.urls = self.getLocation(tmp_links)
-
- if not self.urls:
- ignored_set = set(self.getConfig("ignoredHoster").split('|'))
- ignored_set = map(lambda s: s.lower().split('.')[0], ignored_set)
-
- self.logDebug("IG: %s" % ignored_set)
-
- tmp_links.extend([x[1] for x in m if x[0] not in ignored_set])
- self.urls = self.getLocation(tmp_links)
-
-
- def getLocation(self, tmp_links):
- new_links = []
- for link in tmp_links:
- try:
- header = self.load(link, just_header=True)
- if 'location' in header:
- new_links.append(header['location'])
- except BadHeader:
- pass
- return new_links
diff --git a/pyload/plugins/crypter/FilebeerInfo.py b/pyload/plugins/crypter/FilebeerInfo.py
deleted file mode 100644
index 51925a3ad..000000000
--- a/pyload/plugins/crypter/FilebeerInfo.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class FilebeerInfo(DeadCrypter):
- __name = "FilebeerInfo"
- __type = "crypter"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?filebeer\.info/(\d+~f).*'
- __config = []
-
- __description = """Filebeer.info folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(FilebeerInfo)
diff --git a/pyload/plugins/crypter/FilecloudIo.py b/pyload/plugins/crypter/FilecloudIo.py
deleted file mode 100644
index a637eefdd..000000000
--- a/pyload/plugins/crypter/FilecloudIo.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class FilecloudIo(SimpleCrypter):
- __name = "FilecloudIo"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'https?://(?:www\.)?(filecloud\.io|ifile\.it)/_\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Filecloud.io folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- LINK_PATTERN = r'href="(http://filecloud\.io/\w+)" title'
- NAME_PATTERN = r'>(?P<N>.+?) - filecloud\.io<'
diff --git a/pyload/plugins/crypter/FilecryptCc.py b/pyload/plugins/crypter/FilecryptCc.py
deleted file mode 100644
index ca81c8fa2..000000000
--- a/pyload/plugins/crypter/FilecryptCc.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import base64
-import binascii
-import re
-
-from Crypto.Cipher import AES
-
-from pyload.plugins.Crypter import Crypter
-
-
-class FilecryptCc(Crypter):
- __name = "FilecryptCc"
- __type = "crypter"
- __version = "0.05"
-
- __pattern = r'https?://(?:www\.)?filecrypt\.cc/Container/\w+'
-
- __description = """Filecrypt.cc decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
-
-
- # URL_REPLACEMENTS = [(r'.html$', ""), (r'$', ".html")] #@TODO: Extend SimpleCrypter
-
- DLC_LINK_PATTERN = r'<button class="dlcdownload" type="button" title="Download \*.dlc" onclick="DownloadDLC\(\'(.+)\'\);"><i></i><span>dlc<'
- WEBLINK_PATTERN = r"openLink.?'([\w_-]*)',"
-
- CAPTCHA_PATTERN = r'<img id="nc" src="(.+?)"'
-
- MIRROR_PAGE_PATTERN = r'"[\w]*" href="(http://filecrypt.cc/Container/\w+\.html\?mirror=\d+)">'
-
-
- def setup(self):
- self.links = []
-
-
- def decrypt(self, pyfile):
- self.html = self.load(pyfile.url, cookies=True)
-
- if "content not found" in self.html:
- self.offline()
-
- self.handlePasswordProtection()
- self.handleCaptcha()
- self.handleMirrorPages()
-
- for handle in (self.handleCNL, self.handleWeblinks, self.handleDlcContainer):
- handle()
- if self.links:
- self.packages = [(pyfile.package().name, self.links, pyfile.package().name)]
- return
-
-
- def handleMirrorPages(self):
- if "mirror=" not in self.siteWithLinks:
- return
-
- mirror = re.findall(self.MIRROR_PAGE_PATTERN, self.siteWithLinks)
-
- self.logInfo(_("Found %d mirrors") % len(mirror))
-
- for i in mirror[1:]:
- self.siteWithLinks = self.siteWithLinks + self.load(i, cookies=True).decode("utf-8", "replace")
-
-
- def handlePasswordProtection(self):
- if '<input type="text" name="password"' not in self.html:
- return
-
- self.logInfo(_("Folder is password protected"))
-
- if not self.pyfile.package().password:
- self.fail(_("Please enter the password in package section and try again"))
-
- self.html = self.load(self.pyfile.url, post={"password": self.password}, cookies=True)
-
-
- def handleCaptcha(self):
- m = re.search(self.CAPTCHA_PATTERN, self.html)
-
- if m:
- self.logDebug("Captcha-URL: %s" % m.group(1))
- captcha_code = self.decryptCaptcha("http://filecrypt.cc" + m.group(1), forceUser=True, imgtype="gif")
- self.siteWithLinks = self.load(self.pyfile.url, post={"recaptcha_response_field":captcha_code}, decode=True, cookies=True)
- else:
- self.logDebug("No captcha found")
- self.siteWithLinks = self.html
-
- if "recaptcha_response_field" in self.siteWithLinks:
- self.invalidCaptcha()
- self.retry()
-
-
- def handleDlcContainer(self):
- dlc = re.findall(self.DLC_LINK_PATTERN, self.siteWithLinks)
-
- if not dlc:
- return
-
- for i in dlc:
- self.links.append("http://filecrypt.cc/DLC/%s.dlc" % i)
-
-
- def handleWeblinks(self):
- try:
- weblinks = re.findall(self.WEBLINK_PATTERN, self.siteWithLinks)
-
- for link in weblinks:
- res = self.load("http://filecrypt.cc/Link/%s.html" % link, cookies=True)
- link2 = re.search('<iframe noresize src="(.*)"></iframe>', res)
- res2 = self.load(link2.group(1), just_header=True, cookies=True)
- self.links.append(res2['location'])
-
- except Exception, e:
- self.logDebug("Error decrypting weblinks: %s" % e)
-
-
- def handleCNL(self):
- try:
- vjk = re.findall('<input type="hidden" name="jk" value="function f\(\){ return \'(.*)\';}">', self.siteWithLinks)
- vcrypted = re.findall('<input type="hidden" name="crypted" value="(.*)">', self.siteWithLinks)
-
- for i in xrange(len(vcrypted)):
- self.links.extend(self._getLinks(vcrypted[i], vjk[i]))
-
- except Exception, e:
- self.logDebug("Error decrypting CNL: %s" % e)
-
-
- def _getLinks(self, crypted, jk):
- # Get key
- key = binascii.unhexlify(str(jk))
-
- # Decode crypted
- crypted = base64.standard_b64decode(crypted)
-
- # Decrypt
- Key = key
- IV = key
- obj = AES.new(Key, AES.MODE_CBC, IV)
- text = obj.decrypt(crypted)
-
- # Extract links
- links = filter(lambda x: x != "",
- text.replace("\x00", "").replace("\r", "").split("\n"))
-
- return links
diff --git a/pyload/plugins/crypter/FilefactoryCom.py b/pyload/plugins/crypter/FilefactoryCom.py
deleted file mode 100644
index 21f958fa4..000000000
--- a/pyload/plugins/crypter/FilefactoryCom.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class FilefactoryCom(SimpleCrypter):
- __name = "FilefactoryCom"
- __type = "crypter"
- __version = "0.31"
-
- __pattern = r'https?://(?:www\.)?filefactory\.com/(?:f|folder)/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Filefactory.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'<td><a href="([^"]+)">'
- NAME_PATTERN = r'<h1>Files in <span>(?P<N>.+)</span></h1>'
- PAGES_PATTERN = r'data-paginator-totalPages="(\d+)"'
-
- COOKIES = [("filefactory.com", "locale", "en_US.utf8")]
-
-
- def loadPage(self, page_n):
- return self.load(self.pyfile.url, get={'page': page_n})
diff --git a/pyload/plugins/crypter/FilerNet.py b/pyload/plugins/crypter/FilerNet.py
deleted file mode 100644
index d6f2b48de..000000000
--- a/pyload/plugins/crypter/FilerNet.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import re
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class FilerNet(SimpleCrypter):
- __name = "FilerNet"
- __type = "crypter"
- __version = "0.41"
-
- __pattern = r'https?://filer\.net/folder/\w{16}'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Filer.net decrypter plugin"""
- __license = "GPLv3"
- __authors = [("nath_schwarz", "nathan.notwhite@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'href="(/get/\w{16})">(?!<)'
- NAME_PATTERN = r'<h3>(?P<N>.+?) - <small'
-
-
- def getLinks(self):
- return ['http://filer.net%s' % link for link in re.findall(self.LINK_PATTERN, self.html)]
diff --git a/pyload/plugins/crypter/FileserveCom.py b/pyload/plugins/crypter/FileserveCom.py
deleted file mode 100644
index 17753c38c..000000000
--- a/pyload/plugins/crypter/FileserveCom.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Crypter import Crypter
-
-
-class FileserveCom(Crypter):
- __name = "FileserveCom"
- __type = "crypter"
- __version = "0.11"
-
- __pattern = r'http://(?:www\.)?fileserve\.com/list/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """FileServe.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("fionnc", "fionnc@gmail.com")]
-
-
- FOLDER_PATTERN = r'<table class="file_list">(.*?)</table>'
- LINK_PATTERN = r'<a href="([^"]+)" class="sheet_icon wbold">'
-
-
- def decrypt(self, pyfile):
- html = self.load(pyfile.url)
-
- new_links = []
-
- folder = re.search(self.FOLDER_PATTERN, html, re.S)
- if folder is None:
- self.error(_("FOLDER_PATTERN not found"))
-
- new_links.extend(re.findall(self.LINK_PATTERN, folder.group(1)))
-
- if new_links:
- self.urls = [map(lambda s: "http://fileserve.com%s" % s, new_links)]
diff --git a/pyload/plugins/crypter/FilesonicCom.py b/pyload/plugins/crypter/FilesonicCom.py
deleted file mode 100644
index 0f6c92ee0..000000000
--- a/pyload/plugins/crypter/FilesonicCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class FilesonicCom(DeadCrypter):
- __name = "FilesonicCom"
- __type = "crypter"
- __version = "0.12"
-
- __pattern = r'http://(?:www\.)?filesonic\.com/folder/\w+'
-
- __description = """Filesonic.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(FilesonicCom)
diff --git a/pyload/plugins/crypter/FilestubeCom.py b/pyload/plugins/crypter/FilestubeCom.py
deleted file mode 100644
index ca315f1d4..000000000
--- a/pyload/plugins/crypter/FilestubeCom.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class FilestubeCom(SimpleCrypter):
- __name = "FilestubeCom"
- __type = "crypter"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?filestube\.(?:com|to)/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Filestube.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'<a class=\"file-link-main(?: noref)?\" [^>]* href=\"(http://[^\"]+)'
- NAME_PATTERN = r'<h1\s*> (?P<N>.+) download\s*</h1>'
diff --git a/pyload/plugins/crypter/FiletramCom.py b/pyload/plugins/crypter/FiletramCom.py
deleted file mode 100644
index 340b7f02a..000000000
--- a/pyload/plugins/crypter/FiletramCom.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class FiletramCom(SimpleCrypter):
- __name = "FiletramCom"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?filetram\.com/[^/]+/.+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Filetram.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("igel", "igelkun@myopera.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'\s+(http://.+)'
- NAME_PATTERN = r'<title>(?P<N>.+?) - Free Download'
diff --git a/pyload/plugins/crypter/FiredriveCom.py b/pyload/plugins/crypter/FiredriveCom.py
deleted file mode 100644
index 019555c2b..000000000
--- a/pyload/plugins/crypter/FiredriveCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class FiredriveCom(DeadCrypter):
- __name = "FiredriveCom"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'https?://(?:www\.)?(firedrive|putlocker)\.com/share/.+'
- __config = []
-
- __description = """Firedrive.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
-getInfo = create_getInfo(FiredriveCom)
diff --git a/pyload/plugins/crypter/FourChanOrg.py b/pyload/plugins/crypter/FourChanOrg.py
deleted file mode 100644
index 79e7a9548..000000000
--- a/pyload/plugins/crypter/FourChanOrg.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Based on 4chandl by Roland Beermann (https://gist.github.com/enkore/3492599)
-
-import re
-
-from pyload.plugins.Crypter import Crypter
-
-
-class FourChanOrg(Crypter):
- __name = "FourChanOrg"
- __type = "crypter"
- __version = "0.30"
-
- __pattern = r'http://(?:www\.)?boards\.4chan\.org/\w+/res/(\d+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """4chan.org folder decrypter plugin"""
- __license = "GPLv3"
- __authors = []
-
-
- def decrypt(self, pyfile):
- pagehtml = self.load(pyfile.url)
- images = set(re.findall(r'(images\.4chan\.org/[^/]*/src/[^"<]*)', pagehtml))
- self.urls = ["http://" + image for image in images]
diff --git a/pyload/plugins/crypter/FreakhareCom.py b/pyload/plugins/crypter/FreakhareCom.py
deleted file mode 100644
index a73b2fed7..000000000
--- a/pyload/plugins/crypter/FreakhareCom.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class FreakhareCom(SimpleCrypter):
- __name = "FreakhareCom"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?freakshare\.com/folder/.+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Freakhare.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'<a href="(http://freakshare\.com/files/[^"]+)" target="_blank">'
- NAME_PATTERN = r'Folder:</b> (?P<N>.+)'
- PAGES_PATTERN = r'Pages: +(\d+)'
-
-
- def loadPage(self, page_n):
- if not hasattr(self, 'f_id') and not hasattr(self, 'f_md5'):
- m = re.search(r'http://freakshare.com/\?x=folder&f_id=(\d+)&f_md5=(\w+)', self.html)
- if m:
- self.f_id = m.group(1)
- self.f_md5 = m.group(2)
- return self.load('http://freakshare.com/', get={'x': 'folder',
- 'f_id': self.f_id,
- 'f_md5': self.f_md5,
- 'entrys': '20',
- 'page': page_n - 1,
- 'order': ''}, decode=True)
diff --git a/pyload/plugins/crypter/FreetexthostCom.py b/pyload/plugins/crypter/FreetexthostCom.py
deleted file mode 100644
index 48365e304..000000000
--- a/pyload/plugins/crypter/FreetexthostCom.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class FreetexthostCom(SimpleCrypter):
- __name = "FreetexthostCom"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?freetexthost\.com/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Freetexthost.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def getLinks(self):
- m = re.search(r'<div id="contentsinner">\s*(.+)<div class="viewcount">', self.html, re.S)
- if m is None:
- self.error(_("Unable to extract links"))
- links = m.group(1)
- return links.strip().split("<br />\r\n")
diff --git a/pyload/plugins/crypter/FshareVn.py b/pyload/plugins/crypter/FshareVn.py
deleted file mode 100644
index 06ae307fc..000000000
--- a/pyload/plugins/crypter/FshareVn.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class FshareVn(SimpleCrypter):
- __name = "FshareVn"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?fshare\.vn/folder/.*'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Fshare.vn folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- LINK_PATTERN = r'<li class="w_80pc"><a href="([^"]+)" target="_blank">'
diff --git a/pyload/plugins/crypter/Go4UpCom.py b/pyload/plugins/crypter/Go4UpCom.py
deleted file mode 100644
index 44c25c88a..000000000
--- a/pyload/plugins/crypter/Go4UpCom.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter, create_getInfo
-
-
-class Go4UpCom(SimpleCrypter):
- __name = "Go4UpCom"
- __type = "crypter"
- __version = "0.11"
-
- __pattern = r'http://go4up\.com/(dl/\w{12}|rd/\w{12}/\d+)'
-
- __description = """Go4Up.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("rlindner81", "rlindner81@gmail.com"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- LINK_PATTERN = r'(http://go4up\.com/rd/.+?)<'
-
- NAME_PATTERN = r'<title>Download (.+?)<'
-
- OFFLINE_PATTERN = r'>\s*(404 Page Not Found|File not Found|Mirror does not exist)'
-
-
- def getLinks(self
- links = []
-
- m = re.search(r'(/download/gethosts/.+?)"')
- if m:
- self.html = self.load(urljoin("http://go4up.com/", m.group(1)))
- pages = [self.load(url) for url in re.findall(self.LINK_PATTERN, self.html)]
- else:
- pages = [self.html]
-
- for html in pages:
- try:
- links.append(re.search(r'<b><a href="(.+?)"', html).group(1))
- except Exception:
- continue
-
- return links
-
-
-getInfo = create_getInfo(Go4UpCom)
diff --git a/pyload/plugins/crypter/GooGl.py b/pyload/plugins/crypter/GooGl.py
deleted file mode 100644
index 928034f5c..000000000
--- a/pyload/plugins/crypter/GooGl.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Crypter import Crypter
-from pyload.utils import json_loads
-
-
-class GooGl(Crypter):
- __name = "GooGl"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'https?://(?:www\.)?goo\.gl/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Goo.gl decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- API_URL = "https://www.googleapis.com/urlshortener/v1/url"
-
-
- def decrypt(self, pyfile):
- rep = self.load(self.API_URL, get={'shortUrl': pyfile.url})
- self.logDebug("JSON data: " + rep)
- rep = json_loads(rep)
-
- if 'longUrl' in rep:
- self.urls = [rep['longUrl']]
- else:
- self.fail(_("Unable to expand shortened link"))
diff --git a/pyload/plugins/crypter/HoerbuchIn.py b/pyload/plugins/crypter/HoerbuchIn.py
deleted file mode 100644
index 83f174b58..000000000
--- a/pyload/plugins/crypter/HoerbuchIn.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from BeautifulSoup import BeautifulSoup, BeautifulStoneSoup
-
-from pyload.plugins.Crypter import Crypter
-
-
-class HoerbuchIn(Crypter):
- __name = "HoerbuchIn"
- __type = "crypter"
- __version = "0.60"
-
- __pattern = r'http://(?:www\.)?hoerbuch\.in/(wp/horbucher/\d+/.+/|tp/out\.php\?.+|protection/folder_\d+\.html)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Hoerbuch.in decrypter plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org"),
- ("mkaay", "mkaay@mkaay.de")]
-
-
- article = re.compile("http://(?:www\.)?hoerbuch\.in/wp/horbucher/\d+/.+/")
- protection = re.compile("http://(?:www\.)?hoerbuch\.in/protection/folder_\d+.html")
-
-
- def decrypt(self, pyfile):
- self.pyfile = pyfile
-
- if self.article.match(pyfile.url):
- html = self.load(pyfile.url)
- soup = BeautifulSoup(html, convertEntities=BeautifulStoneSoup.HTML_ENTITIES)
-
- abookname = soup.find("a", attrs={"rel": "bookmark"}).text
- for a in soup.findAll("a", attrs={"href": self.protection}):
- package = "%s (%s)" % (abookname, a.previousSibling.previousSibling.text[:-1])
- links = self.decryptFolder(a['href'])
-
- self.packages.append((package, links, package))
- else:
- self.urls = self.decryptFolder(pyfile.url)
-
-
- def decryptFolder(self, url):
- m = self.protection.search(url)
- if m is None:
- self.fail(_("Bad URL"))
- url = m.group(0)
-
- self.pyfile.url = url
- html = self.load(url, post={"viewed": "adpg"})
-
- links = []
- pattern = re.compile("http://www\.hoerbuch\.in/protection/(\w+)/(.*?)\"")
- for hoster, lid in pattern.findall(html):
- self.req.lastURL = url
- self.load("http://www.hoerbuch.in/protection/%s/%s" % (hoster, lid))
- links.append(self.req.lastEffectiveURL)
-
- return links
diff --git a/pyload/plugins/crypter/HotfileCom.py b/pyload/plugins/crypter/HotfileCom.py
deleted file mode 100644
index 4cc522022..000000000
--- a/pyload/plugins/crypter/HotfileCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class HotfileCom(DeadCrypter):
- __name = "HotfileCom"
- __type = "crypter"
- __version = "0.30"
-
- __pattern = r'https?://(?:www\.)?hotfile\.com/list/\w+/\w+'
- __config = []
-
- __description = """Hotfile.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
-getInfo = create_getInfo(HotfileCom)
diff --git a/pyload/plugins/crypter/ILoadTo.py b/pyload/plugins/crypter/ILoadTo.py
deleted file mode 100644
index f299924b4..000000000
--- a/pyload/plugins/crypter/ILoadTo.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class ILoadTo(DeadCrypter):
- __name = "ILoadTo"
- __type = "crypter"
- __version = "0.11"
-
- __pattern = r'http://(?:www\.)?iload\.to/go/\d+-[\w.-]+/'
- __config = []
-
- __description = """Iload.to decrypter plugin"""
- __license = "GPLv3"
- __authors = [("hzpz", "")]
-
-
-getInfo = create_getInfo(ILoadTo)
diff --git a/pyload/plugins/crypter/ImgurComAlbum.py b/pyload/plugins/crypter/ImgurComAlbum.py
deleted file mode 100644
index b1e7d3c37..000000000
--- a/pyload/plugins/crypter/ImgurComAlbum.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import re
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-from pyload.utils import uniqify
-
-
-class ImgurComAlbum(SimpleCrypter):
- __name = "ImgurComAlbum"
- __type = "crypter"
- __version = "0.51"
-
- __pattern = r'https?://(?:www\.|m\.)?imgur\.com/(a|gallery|)/?\w{5,7}'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Imgur.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("nath_schwarz", "nathan.notwhite@gmail.com")]
-
-
- NAME_PATTERN = r'(?P<N>.+?) - Imgur'
- LINK_PATTERN = r'i\.imgur\.com/\w{7}s?\.(?:jpeg|jpg|png|gif|apng)'
-
-
- def getLinks(self):
- f = lambda url: "http://" + re.sub(r'(\w{7})s\.', r'\1.', url)
- return uniqify(map(f, re.findall(self.LINK_PATTERN, self.html)))
diff --git a/pyload/plugins/crypter/JunocloudMe.py b/pyload/plugins/crypter/JunocloudMe.py
deleted file mode 100644
index b737fc86a..000000000
--- a/pyload/plugins/crypter/JunocloudMe.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSCrypter import XFSCrypter
-
-
-class JunocloudMe(XFSCrypter):
- __name = "JunocloudMe"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?junocloud\.me/folders/(?P<ID>\d+/\w+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Junocloud.me folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "junocloud.me"
diff --git a/pyload/plugins/crypter/LetitbitNet.py b/pyload/plugins/crypter/LetitbitNet.py
deleted file mode 100644
index 04ad5c1ea..000000000
--- a/pyload/plugins/crypter/LetitbitNet.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-
-
-class LetitbitNet(Crypter):
- __name = "LetitbitNet"
- __type = "crypter"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?letitbit\.net/folder/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Letitbit.net folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("DHMH", "webmaster@pcProfil.de"),
- ("z00nx", "z00nx0@gmail.com")]
-
-
- FOLDER_PATTERN = r'<table>(.*)</table>'
- LINK_PATTERN = r'<a href="([^"]+)" target="_blank">'
-
-
- def decrypt(self, pyfile):
- html = self.load(pyfile.url)
-
- folder = re.search(self.FOLDER_PATTERN, html, re.S)
- if folder is None:
- self.error(_("FOLDER_PATTERN not found"))
-
- self.urls.extend(re.findall(self.LINK_PATTERN, folder.group(0)))
diff --git a/pyload/plugins/crypter/LinkCryptWs.py b/pyload/plugins/crypter/LinkCryptWs.py
deleted file mode 100644
index 3285e2dc8..000000000
--- a/pyload/plugins/crypter/LinkCryptWs.py
+++ /dev/null
@@ -1,327 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import base64
-import binascii
-import re
-
-import pycurl
-
-from Crypto.Cipher import AES
-
-from pyload.plugins.Crypter import Crypter
-from pyload.utils import html_unescape
-
-
-class LinkCryptWs(Crypter):
- __name = "LinkCryptWs"
- __type = "crypter"
- __version = "0.07"
-
- __pattern = r'http://(?:www\.)?linkcrypt\.ws/(dir|container)/(?P<ID>\w+)'
-
- __description = """LinkCrypt.ws decrypter plugin"""
- __license = "GPLv3"
- __authors = [("kagenoshin", "kagenoshin[AT]gmx[DOT]ch"),
- ("glukgluk", ""),
- ("Gummibaer", "")]
-
-
- CRYPTED_KEY = "crypted"
- JK_KEY = "jk"
-
-
- def setup(self):
- self.captcha = False
- self.links = []
- self.sources = ['cnl', 'web', 'dlc', 'rsdf', 'ccf']
-
-
- def prepare(self):
- # Init
- self.fileid = re.match(self.__pattern, self.pyfile.url).group('ID')
-
- self.req.cj.setCookie("linkcrypt.ws", "language", "en")
-
- # Request package
- self.req.http.c.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko") #: better chance to not get those key-captchas
- self.html = self.load(self.pyfile.url)
-
-
- def decrypt(self, pyfile):
- if not self.js:
- self.fail(_("Missing JS Engine"))
-
- self.prepare()
-
- if not self.isOnline():
- self.offline()
-
- if self.isKeyCaptchaProtected():
- self.retry(4, 30, _("Can't handle Key-Captcha"))
-
- if self.isCaptchaProtected():
- self.captcha = True
- self.unlockCaptchaProtection()
- self.handleCaptchaErrors()
-
- # Check for protection
- if self.isPasswordProtected():
- self.unlockPasswordProtection()
- self.handleErrors()
-
- # get unrar password
- self.getunrarpw()
-
- # Get package name and folder
- package_name, folder_name = self.getPackageInfo()
-
- #get the container definitions from script section
- self.get_container_html()
-
- # Extract package links
- for type in self.sources:
- links = self.handleLinkSource(type)
-
- if links:
- self.links.extend(links)
- break
-
- if self.links:
- self.packages = [(package_name, self.links, folder_name)]
-
-
- def isOnline(self):
- if "<title>Linkcrypt.ws // Error 404</title>" in self.html:
- self.logDebug("folder doesen't exist anymore")
- return False
- else:
- return True
-
-
- def isPasswordProtected(self):
- if "Authorizing" in self.html:
- self.logDebug("Links are password protected")
- return True
- else:
- return False
-
-
- def isCaptchaProtected(self):
- if 'id="captcha">' in self.html:
- self.logDebug("Links are captcha protected")
- return True
- else:
- return False
-
-
- def isKeyCaptchaProtected(self):
- if re.search(r'Key[ -]', self.html, re.I):
- return True
- else:
- return False
-
-
- def unlockPasswordProtection(self):
- password = self.getPassword()
-
- if password:
- self.logDebug("Submitting password [%s] for protected links" % password)
- self.html = self.load(self.pyfile.url, post={"password": password, 'x': "0", 'y': "0"})
- else:
- self.fail(_("Folder is password protected"))
-
-
- def unlockCaptchaProtection(self):
- captcha_url = re.search(r'<form.*?id\s*?=\s*?"captcha"[^>]*?>.*?<\s*?input.*?src="([^"]*?)"', self.html, re.I | re.S).group(1)
- captcha_code = self.decryptCaptcha(captcha_url, forceUser=True, imgtype="gif", result_type='positional')
-
- self.html = self.load(self.pyfile.url, post={"x": captcha_code[0], "y": captcha_code[1]})
-
-
- def getPackageInfo(self):
- name = self.pyfile.package().name
- folder = self.pyfile.package().folder
-
- self.logDebug("Defaulting to pyfile name [%s] and folder [%s] for package" % (name, folder))
-
- return name, folder
-
-
- def getunrarpw(self):
- sitein = self.html
- indexi = sitein.find("|source|") + 8
- indexe = sitein.find("|",indexi)
-
- unrarpw = sitein[indexi:indexe]
-
- if not (unrarpw == "Password" or "Dateipasswort") :
- self.logDebug("File password set to: [%s]"% unrarpw)
- self.pyfile.package().password = unrarpw
-
-
- def handleErrors(self):
- if self.isPasswordProtected():
- self.fail(_("Incorrect password"))
-
-
- def handleCaptchaErrors(self):
- if self.captcha:
- if "Your choice was wrong!" in self.html:
- self.invalidCaptcha()
- self.retry()
- else:
- self.correctCaptcha()
-
-
- def handleLinkSource(self, type):
- if type == 'cnl':
- return self.handleCNL2()
-
- elif type == 'web':
- return self.handleWebLinks()
-
- elif type in ('rsdf', 'ccf', 'dlc'):
- return self.handleContainer(type)
-
- else:
- self.fail(_("Unknown source type: %s") % type) #@TODO: Replace with self.error in 0.4.10
-
-
- def handleWebLinks(self):
- self.logDebug("Search for Web links ")
-
- package_links = []
- pattern = r'<form action="http://linkcrypt.ws/out.html"[^>]*?>.*?<input[^>]*?value="([^"]*?)"[^>]*?name="file"'
- ids = re.findall(pattern, self.html, re.I | re.S)
-
- self.logDebug("Decrypting %d Web links" % len(ids))
-
- for idx, weblink_id in enumerate(ids):
- try:
- self.logDebug("Decrypting Web link %d, %s" % (idx + 1, weblink_id))
-
- res = self.load("http://linkcrypt.ws/out.html", post = {'file':weblink_id})
-
- indexs = res.find("window.location =") + 19
- indexe = res.find('"', indexs)
-
- link2 = res[indexs:indexe]
-
- self.logDebug(link2)
-
- link2 = html_unescape(link2)
- package_links.append(link2)
-
- except Exception, detail:
- self.logDebug("Error decrypting Web link %s, %s" % (weblink_id, detail))
-
- return package_links
-
-
- def get_container_html(self):
- self.container_html = []
-
- script = re.search(r'<div.*?id="ad_cont".*?<script.*?javascrip[^>]*?>(.*?)</script', self.html, re.I | re.S)
-
- if script:
- container_html_text = script.group(1)
- container_html_text.strip()
- self.container_html = container_html_text.splitlines()
-
-
- def handle_javascript(self, line):
- return self.js.eval(line.replace('{}))',"{}).replace('document.open();document.write','').replace(';document.close();',''))"))
-
-
- def handleContainer(self, type):
- package_links = []
- type = type.lower()
-
- self.logDebug('Search for %s Container links' % type.upper())
-
- if not type.isalnum(): # check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric)
- self.fail(_("Unknown container type: %s") % type) #@TODO: Replace with self.error in 0.4.10
-
- for line in self.container_html:
- if type in line:
- jseval = self.handle_javascript(line)
- clink = re.search(r'href=["\']([^"\']*?)["\']',jseval,re.I)
-
- if not clink:
- continue
-
- self.logDebug("clink avaible")
-
- package_name, folder_name = self.getPackageInfo()
- self.logDebug("Added package with name %s.%s and container link %s" %( package_name, type, clink.group(1)))
- self.core.api.uploadContainer( "%s.%s" %(package_name, type), self.load(clink.group(1)))
- return "Found it"
-
- return package_links
-
-
- def handleCNL2(self):
- self.logDebug("Search for CNL links")
-
- package_links = []
- cnl_line = None
-
- for line in self.container_html:
- if "cnl" in line:
- cnl_line = line
- break
-
- if cnl_line:
- self.logDebug("cnl_line gefunden")
-
- try:
- cnl_section = self.handle_javascript(cnl_line)
- (vcrypted, vjk) = self._getCipherParams(cnl_section)
- for (crypted, jk) in zip(vcrypted, vjk):
- package_links.extend(self._getLinks(crypted, jk))
- except Exception:
- self.logError(_("Unable to decrypt CNL links (JS Error) try to get over links"))
- return self.handleWebLinks()
-
- return package_links
-
-
- def _getCipherParams(self, cnl_section):
- # Get jk
- jk_re = r'<INPUT.*?NAME="%s".*?VALUE="(.*?)"' % LinkCryptWs.JK_KEY
- vjk = re.findall(jk_re, cnl_section)
-
- # Get crypted
- crypted_re = r'<INPUT.*?NAME="%s".*?VALUE="(.*?)"' % LinkCryptWs.CRYPTED_KEY
- vcrypted = re.findall(crypted_re, cnl_section)
-
- # Log and return
- self.logDebug("Detected %d crypted blocks" % len(vcrypted))
- return vcrypted, vjk
-
-
- def _getLinks(self, crypted, jk):
- # Get key
- jreturn = self.js.eval("%s f()" % jk)
- key = binascii.unhexlify(jreturn)
-
- self.logDebug("JsEngine returns value [%s]" % jreturn)
-
- # Decode crypted
- crypted = base64.standard_b64decode(crypted)
-
- # Decrypt
- Key = key
- IV = key
- obj = AES.new(Key, AES.MODE_CBC, IV)
- text = obj.decrypt(crypted)
-
- # Extract links
- text = text.replace("\x00", "").replace("\r", "")
- links = text.split("\n")
- links = filter(lambda x: x != "", links)
-
- # Log and return
- self.logDebug("Package has %d links" % len(links))
-
- return links
diff --git a/pyload/plugins/crypter/LinkSaveIn.py b/pyload/plugins/crypter/LinkSaveIn.py
deleted file mode 100644
index 53ed008d8..000000000
--- a/pyload/plugins/crypter/LinkSaveIn.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# * cnl2 and web links are skipped if JS is not available (instead of failing the package)
-# * only best available link source is used (priority: cnl2>rsdf>ccf>dlc>web
-
-import base64
-import binascii
-import re
-
-from Crypto.Cipher import AES
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-from pyload.utils import html_unescape
-
-
-class LinkSaveIn(SimpleCrypter):
- __name = "LinkSaveIn"
- __type = "crypter"
- __version = "2.02"
-
- __pattern = r'http://(?:www\.)?linksave\.in/(?P<id>\w+)$'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """LinkSave.in decrypter plugin"""
- __license = "GPLv3"
- __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es")]
-
-
- COOKIES = [("linksave.in", "Linksave_Language", "english")]
-
- # Constants
- _JK_KEY_ = "jk"
- _CRYPTED_KEY_ = "crypted"
-
-
- def setup(self):
- self.fileid = None
- self.captcha = False
- self.package = None
- self.preferred_sources = ["cnl2", "rsdf", "ccf", "dlc", "web"]
-
-
- def decrypt(self, pyfile):
- # Init
- self.package = pyfile.package()
- self.fileid = re.match(self.__pattern, pyfile.url).group('id')
-
- # Request package
- self.html = self.load(pyfile.url)
- if not self.isOnline():
- self.offline()
-
- # Check for protection
- if self.isPasswordProtected():
- self.unlockPasswordProtection()
- self.handleErrors()
-
- if self.isCaptchaProtected():
- self.captcha = True
- self.unlockCaptchaProtection()
- self.handleErrors()
-
- # Get package name and folder
- (package_name, folder_name) = self.getPackageInfo()
-
- # Extract package links
- package_links = []
- for type_ in self.preferred_sources:
- package_links.extend(self.handleLinkSource(type_))
- if package_links: # use only first source which provides links
- break
- package_links = set(package_links)
-
- # Pack
- if package_links:
- self.packages = [(package_name, package_links, folder_name)]
-
-
- def isOnline(self):
- if "<big>Error 404 - Folder not found!</big>" in self.html:
- self.logDebug("File not found")
- return False
- return True
-
-
- def isPasswordProtected(self):
- if re.search(r'''<input.*?type="password"''', self.html):
- self.logDebug("Links are password protected")
- return True
-
-
- def isCaptchaProtected(self):
- if "<b>Captcha:</b>" in self.html:
- self.logDebug("Links are captcha protected")
- return True
- return False
-
-
- def unlockPasswordProtection(self):
- password = self.getPassword()
- self.logDebug("Submitting password [%s] for protected links" % password)
- post = {"id": self.fileid, "besucherpasswort": password, 'login': 'submit'}
- self.html = self.load(self.pyfile.url, post=post)
-
-
- def unlockCaptchaProtection(self):
- captcha_hash = re.search(r'name="hash" value="([^"]+)', self.html).group(1)
- captcha_url = re.search(r'src=".(/captcha/cap.php\?hsh=[^"]+)', self.html).group(1)
- captcha_code = self.decryptCaptcha("http://linksave.in" + captcha_url, forceUser=True)
- self.html = self.load(self.pyfile.url, post={"id": self.fileid, "hash": captcha_hash, "code": captcha_code})
-
-
- def getPackageInfo(self):
- name = self.pyfile.package().name
- folder = self.pyfile.package().folder
- self.logDebug("Defaulting to pyfile name [%s] and folder [%s] for package" % (name, folder))
- return name, folder
-
-
- def handleErrors(self):
- if "The visitorpassword you have entered is wrong" in self.html:
- self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry")
- self.fail(_("Incorrect password, please set right password on 'Edit package' form and retry"))
-
- if self.captcha:
- if "Wrong code. Please retry" in self.html:
- self.invalidCaptcha()
- self.retry()
- else:
- self.correctCaptcha()
-
-
- def handleLinkSource(self, type_):
- if type_ == "cnl2":
- return self.handleCNL2()
- elif type_ in ("rsdf", "ccf", "dlc"):
- return self.handleContainer(type_)
- elif type_ == "web":
- return self.handleWebLinks()
- else:
- self.error('Unknown source type "%s" (this is probably a bug)' % type_)
-
-
- def handleWebLinks(self):
- package_links = []
- self.logDebug("Search for Web links")
- if not self.js:
- self.logDebug("No JS -> skip Web links")
- else:
- #@TODO: Gather paginated web links
- pattern = r'<a href="http://linksave\.in/(\w{43})"'
- ids = re.findall(pattern, self.html)
- self.logDebug("Decrypting %d Web links" % len(ids))
- for i, weblink_id in enumerate(ids):
- try:
- webLink = "http://linksave.in/%s" % weblink_id
-
- self.logDebug("Decrypting Web link %d, %s" % (i + 1, webLink))
-
- fwLink = "http://linksave.in/fw-%s" % weblink_id
- res = self.load(fwLink)
-
- jscode = re.findall(r'<script type="text/javascript">(.*)</script>', res)[-1]
- jseval = self.js.eval("document = { write: function(e) { return e; } }; %s" % jscode)
- dlLink = re.search(r'http://linksave\.in/dl-\w+', jseval).group(0)
- self.logDebug("JsEngine returns value [%s] for redirection link" % dlLink)
-
- res = self.load(dlLink)
- link = html_unescape(re.search(r'<iframe src="(.+?)"', res).group(1))
-
- package_links.append(link)
-
- except Exception, detail:
- self.logDebug("Error decrypting Web link %s, %s" % (webLink, detail))
-
- return package_links
-
-
- def handleContainer(self, type_):
- package_links = []
- type_ = type_.lower()
- self.logDebug("Seach for %s Container links" % type_.upper())
- if not type_.isalnum(): # check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric)
- self.error('Unknown container type "%s" (this is probably a bug)' % type_)
- pattern = r'\(\'%s_link\'\).href=unescape\(\'(.*?\.%s)\'\)' % (type_, type_)
- containersLinks = re.findall(pattern, self.html)
- self.logDebug("Found %d %s Container links" % (len(containersLinks), type_.upper()))
- for containerLink in containersLinks:
- link = "http://linksave.in/%s" % html_unescape(containerLink)
- package_links.append(link)
- return package_links
-
-
- def handleCNL2(self):
- package_links = []
- self.logDebug("Search for CNL2 links")
- if not self.js:
- self.logDebug("No JS -> skip CNL2 links")
- elif 'cnl2_load' in self.html:
- try:
- (vcrypted, vjk) = self._getCipherParams()
- for (crypted, jk) in zip(vcrypted, vjk):
- package_links.extend(self._getLinks(crypted, jk))
- except Exception:
- self.fail(_("Unable to decrypt CNL2 links"))
- return package_links
-
-
- def _getCipherParams(self):
- # Get jk
- jk_re = r'<INPUT.*?NAME="%s".*?VALUE="(.*?)"' % LinkSaveIn._JK_KEY_
- vjk = re.findall(jk_re, self.html)
-
- # Get crypted
- crypted_re = r'<INPUT.*?NAME="%s".*?VALUE="(.*?)"' % LinkSaveIn._CRYPTED_KEY_
- vcrypted = re.findall(crypted_re, self.html)
-
- # Log and return
- self.logDebug("Detected %d crypted blocks" % len(vcrypted))
- return vcrypted, vjk
-
-
- def _getLinks(self, crypted, jk):
- # Get key
- jreturn = self.js.eval("%s f()" % jk)
- self.logDebug("JsEngine returns value [%s]" % jreturn)
- key = binascii.unhexlify(jreturn)
-
- # Decode crypted
- crypted = base64.standard_b64decode(crypted)
-
- # Decrypt
- Key = key
- IV = key
- obj = AES.new(Key, AES.MODE_CBC, IV)
- text = obj.decrypt(crypted)
-
- # Extract links
- text = text.replace("\x00", "").replace("\r", "")
- links = text.split("\n")
- links = filter(lambda x: x != "", links)
-
- # Log and return
- self.logDebug("Package has %d links" % len(links))
- return links
diff --git a/pyload/plugins/crypter/LinkdecrypterCom.py b/pyload/plugins/crypter/LinkdecrypterCom.py
deleted file mode 100644
index dd4429db1..000000000
--- a/pyload/plugins/crypter/LinkdecrypterCom.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-
-
-class LinkdecrypterCom(Crypter):
- __name = "LinkdecrypterCom"
- __type = "crypter"
- __version = "0.27"
-
- __pattern = r'^unmatchable$'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Linkdecrypter.com"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("flowlee", "")]
-
-
- TEXTAREA_PATTERN = r'<textarea name="links" wrap="off" readonly="1" class="caja_des">(.+)</textarea>'
- PASSWORD_PATTERN = r'<input type="text" name="password"'
- CAPTCHA_PATTERN = r'<img class="captcha" src="(.+?)"(.*?)>'
- REDIR_PATTERN = r'<i>(Click <a href="./">here</a> if your browser does not redirect you).</i>'
-
-
- def decrypt(self, pyfile):
- self.passwords = self.getPassword().splitlines()
-
- # API not working anymore
- self.urls = self.decryptHTML()
-
-
- def decryptAPI(self):
- get_dict = {"t": "link", "url": self.pyfile.url, "lcache": "1"}
- self.html = self.load('http://linkdecrypter.com/api', get=get_dict)
- if self.html.startswith('http://'):
- return self.html.splitlines()
-
- if self.html == 'INTERRUPTION(PASSWORD)':
- for get_dict['pass'] in self.passwords:
- self.html = self.load('http://linkdecrypter.com/api', get=get_dict)
- if self.html.startswith('http://'):
- return self.html.splitlines()
-
- self.logError("API", self.html)
- if self.html == 'INTERRUPTION(PASSWORD)':
- self.fail(_("No or incorrect password"))
-
- return None
-
-
- def decryptHTML(self):
- retries = 5
-
- post_dict = {"link_cache": "on", "pro_links": self.pyfile.url, "modo_links": "text"}
- self.html = self.load('http://linkdecrypter.com/', post=post_dict, cookies=True, decode=True)
-
- while self.passwords or retries:
- m = re.search(self.TEXTAREA_PATTERN, self.html, flags=re.S)
- if m:
- return [x for x in m.group(1).splitlines() if '[LINK-ERROR]' not in x]
-
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m:
- captcha_url = 'http://linkdecrypter.com/' + m.group(1)
- result_type = "positional" if "getPos" in m.group(2) else "textual"
-
- m = re.search(r"<p><i><b>([^<]+)</b></i></p>", self.html)
- msg = m.group(1) if m else ""
- self.logInfo(_("Captcha protected link"), result_type, msg)
-
- captcha = self.decryptCaptcha(captcha_url, result_type=result_type)
- if result_type == "positional":
- captcha = "%d|%d" % captcha
- self.html = self.load('http://linkdecrypter.com/', post={"captcha": captcha}, decode=True)
- retries -= 1
-
- elif self.PASSWORD_PATTERN in self.html:
- if self.passwords:
- password = self.passwords.pop(0)
- self.logInfo(_("Password protected link, trying ") + password)
- self.html = self.load('http://linkdecrypter.com/', post={'password': password}, decode=True)
- else:
- self.fail(_("No or incorrect password"))
-
- else:
- retries -= 1
- self.html = self.load('http://linkdecrypter.com/', cookies=True, decode=True)
-
- return None
diff --git a/pyload/plugins/crypter/LixIn.py b/pyload/plugins/crypter/LixIn.py
deleted file mode 100644
index ffd741cdf..000000000
--- a/pyload/plugins/crypter/LixIn.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Crypter import Crypter
-
-
-class LixIn(Crypter):
- __name = "LixIn"
- __type = "crypter"
- __version = "0.22"
-
- __pattern = r'http://(?:www\.)?lix\.in/(?P<ID>.+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Lix.in decrypter plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org")]
-
-
- CAPTCHA_PATTERN = r'<img src="(?P<image>captcha_img\.php\?.*?)"'
- SUBMIT_PATTERN = r'value=\'continue.*?\''
- LINK_PATTERN = r'name="ifram" src="(?P<link>.*?)"'
-
-
- def decrypt(self, pyfile):
- url = pyfile.url
-
- m = re.match(self.__pattern, url)
- if m is None:
- self.error(_("Unable to identify file ID"))
-
- id = m.group("ID")
- self.logDebug("File id is %s" % id)
-
- self.html = self.load(url, decode=True)
-
- m = re.search(self.SUBMIT_PATTERN, self.html)
- if m is None:
- self.error(_("Link doesn't seem valid"))
-
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m:
- for _i in xrange(5):
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m:
- self.logDebug("Trying captcha")
- captcharesult = self.decryptCaptcha("http://lix.in/" + m.group("image"))
- self.html = self.load(url, decode=True,
- post={"capt": captcharesult, "submit": "submit", "tiny": id})
- else:
- self.logDebug("No captcha/captcha solved")
- else:
- self.html = self.load(url, decode=True, post={"submit": "submit", "tiny": id})
-
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Unable to find destination url"))
- else:
- self.urls = [m.group("link")]
- self.logDebug("Found link %s, adding to package" % self.urls[0])
diff --git a/pyload/plugins/crypter/LofCc.py b/pyload/plugins/crypter/LofCc.py
deleted file mode 100644
index dbe785179..000000000
--- a/pyload/plugins/crypter/LofCc.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class LofCc(DeadCrypter):
- __name = "LofCc"
- __type = "crypter"
- __version = "0.21"
-
- __pattern = r'http://(?:www\.)?lof\.cc/(.*)'
- __config = []
-
- __description = """Lof.cc decrypter plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
-getInfo = create_getInfo(LofCc)
diff --git a/pyload/plugins/crypter/MBLinkInfo.py b/pyload/plugins/crypter/MBLinkInfo.py
deleted file mode 100644
index 5f8ae30c2..000000000
--- a/pyload/plugins/crypter/MBLinkInfo.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class MBLinkInfo(DeadCrypter):
- __name = "MBLinkInfo"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?mblink\.info/?\?id=(\d+)'
- __config = []
-
- __description = """MBLink.info decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Gummibaer", "Gummibaer@wiki-bierkiste.de"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(MBLinkInfo)
diff --git a/pyload/plugins/crypter/MediafireCom.py b/pyload/plugins/crypter/MediafireCom.py
deleted file mode 100644
index 32c4a0726..000000000
--- a/pyload/plugins/crypter/MediafireCom.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-from pyload.plugins.hoster.MediafireCom import checkHTMLHeader
-from pyload.utils import json_loads
-
-
-class MediafireCom(Crypter):
- __name = "MediafireCom"
- __type = "crypter"
- __version = "0.14"
-
- __pattern = r'http://(?:www\.)?mediafire\.com/(folder/|\?sharekey=|\?\w{13}($|[/#]))'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Mediafire.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- FOLDER_KEY_PATTERN = r'var afI= \'(\w+)'
- LINK_PATTERN = r'<meta property="og:url" content="http://www\.mediafire\.com/\?(\w+)"/>'
-
-
- def decrypt(self, pyfile):
- url, result = checkHTMLHeader(pyfile.url)
- self.logDebug("Location (%d): %s" % (result, url))
-
- if result == 0:
- # load and parse html
- html = self.load(pyfile.url)
- m = re.search(self.LINK_PATTERN, html)
- if m:
- # file page
- self.urls.append("http://www.mediafire.com/file/%s" % m.group(1))
- else:
- # folder page
- m = re.search(self.FOLDER_KEY_PATTERN, html)
- if m:
- folder_key = m.group(1)
- self.logDebug("FOLDER KEY: %s" % folder_key)
-
- json_resp = json_loads(self.load("http://www.mediafire.com/api/folder/get_info.php",
- get={'folder_key' : folder_key,
- 'response_format': "json",
- 'version' : 1}))
- #self.logInfo(json_resp)
- if json_resp['response']['result'] == "Success":
- for link in json_resp['response']['folder_info']['files']:
- self.urls.append("http://www.mediafire.com/file/%s" % link['quickkey'])
- else:
- self.fail(json_resp['response']['message'])
- elif result == 1:
- self.offline()
- else:
- self.urls.append(url)
diff --git a/pyload/plugins/crypter/MegaRapidCz.py b/pyload/plugins/crypter/MegaRapidCz.py
deleted file mode 100644
index f9b7a3e38..000000000
--- a/pyload/plugins/crypter/MegaRapidCz.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class MegaRapidCz(SimpleCrypter):
- __name = "MegaRapidCz"
- __type = "crypter"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?(share|mega)rapid\.cz/slozka/\d+/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Share-Rapid.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- LINK_PATTERN = r'<td class="soubor"[^>]*><a href="([^"]+)">'
diff --git a/pyload/plugins/crypter/MegauploadCom.py b/pyload/plugins/crypter/MegauploadCom.py
deleted file mode 100644
index 455e90b52..000000000
--- a/pyload/plugins/crypter/MegauploadCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class MegauploadCom(DeadCrypter):
- __name = "MegauploadCom"
- __type = "crypter"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?megaupload\.com/(\?f|xml/folderfiles\.php\?.*&?folderid)=\w+'
-
- __description = """Megaupload.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(MegauploadCom)
diff --git a/pyload/plugins/crypter/Movie2kTo.py b/pyload/plugins/crypter/Movie2kTo.py
deleted file mode 100644
index 534ac767e..000000000
--- a/pyload/plugins/crypter/Movie2kTo.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class Movie2kTo(DeadCrypter):
- __name = "Movie2kTo"
- __type = "crypter"
- __version = "0.51"
-
- __pattern = r'http://(?:www\.)?movie2k\.to/(.*)\.html'
- __config = []
-
- __description = """Movie2k.to decrypter plugin"""
- __license = "GPLv3"
- __authors = [("4Christopher", "4Christopher@gmx.de")]
-
-
-getInfo = create_getInfo(Movie2kTo)
diff --git a/pyload/plugins/crypter/MultiUpOrg.py b/pyload/plugins/crypter/MultiUpOrg.py
deleted file mode 100644
index bb2768ad3..000000000
--- a/pyload/plugins/crypter/MultiUpOrg.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class MultiUpOrg(SimpleCrypter):
- __name = "MultiUpOrg"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?multiup\.org/(en|fr)/(?P<TYPE>project|download|miror)/\w+(/\w+)?'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """MultiUp.org decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'<title>.*(?:Project|Projet|ownload|élécharger) (?P<N>.+?) (\(|- )'
-
-
- def getLinks(self):
- m_type = re.match(self.__pattern, self.pyfile.url).group("TYPE")
-
- if m_type == "project":
- pattern = r'\n(http://www\.multiup\.org/(?:en|fr)/download/.*)'
- else:
- pattern = r'style="width:97%;text-align:left".*\n.*href="(.*)"'
- if m_type == "download":
- dl_pattern = r'href="(.*)">.*\n.*<h5>DOWNLOAD</h5>'
- miror_page = urljoin("http://www.multiup.org", re.search(dl_pattern, self.html).group(1))
- self.html = self.load(miror_page)
-
- return re.findall(pattern, self.html)
diff --git a/pyload/plugins/crypter/MultiloadCz.py b/pyload/plugins/crypter/MultiloadCz.py
deleted file mode 100644
index c1d33bd14..000000000
--- a/pyload/plugins/crypter/MultiloadCz.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-
-
-class MultiloadCz(Crypter):
- __name = "MultiloadCz"
- __type = "crypter"
- __version = "0.40"
-
- __pattern = r'http://(?:[^/]*\.)?multiload\.cz/(stahnout|slozka)/.*'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True),
- ("usedHoster", "str", "Prefered hoster list (bar-separated)", ""),
- ("ignoredHoster", "str", "Ignored hoster list (bar-separated)", "")]
-
- __description = """Multiload.cz decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- FOLDER_PATTERN = r'<form action="" method="get"><textarea[^>]*>([^>]*)</textarea></form>'
- LINK_PATTERN = r'<p class="manager-server"><strong>([^<]+)</strong></p><p class="manager-linky"><a href="([^"]+)">'
-
-
- def decrypt(self, pyfile):
- self.html = self.load(pyfile.url, decode=True)
-
- if re.match(self.__pattern, pyfile.url).group(1) == "slozka":
- m = re.search(self.FOLDER_PATTERN, self.html)
- if m:
- self.urls.extend(m.group(1).split())
- else:
- m = re.findall(self.LINK_PATTERN, self.html)
- if m:
- prefered_set = set(self.getConfig("usedHoster").split('|'))
- self.urls.extend([x[1] for x in m if x[0] in prefered_set])
-
- if not self.urls:
- ignored_set = set(self.getConfig("ignoredHoster").split('|'))
- self.urls.extend([x[1] for x in m if x[0] not in ignored_set])
diff --git a/pyload/plugins/crypter/MultiuploadCom.py b/pyload/plugins/crypter/MultiuploadCom.py
deleted file mode 100644
index 19daef3d8..000000000
--- a/pyload/plugins/crypter/MultiuploadCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class MultiuploadCom(DeadCrypter):
- __name = "MultiuploadCom"
- __type = "crypter"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?multiupload\.(com|nl)/\w+'
-
- __description = """MultiUpload.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(MultiuploadCom)
diff --git a/pyload/plugins/crypter/NCryptIn.py b/pyload/plugins/crypter/NCryptIn.py
deleted file mode 100644
index d209102f6..000000000
--- a/pyload/plugins/crypter/NCryptIn.py
+++ /dev/null
@@ -1,315 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import base64
-import binascii
-import re
-
-from Crypto.Cipher import AES
-
-from pyload.plugins.Crypter import Crypter
-from pyload.plugins.internal.captcha import ReCaptcha
-
-
-class NCryptIn(Crypter):
- __name = "NCryptIn"
- __type = "crypter"
- __version = "1.33"
-
- __pattern = r'http://(?:www\.)?ncrypt\.in/(?P<type>folder|link|frame)-([^/\?]+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """NCrypt.in decrypter plugin"""
- __license = "GPLv3"
- __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- JK_KEY = "jk"
- CRYPTED_KEY = "crypted"
-
- NAME_PATTERN = r'<meta name="description" content="(?P<N>[^"]+)"'
-
-
- def setup(self):
- self.package = None
- self.cleanedHtml = None
- self.links_source_order = ["cnl2", "rsdf", "ccf", "dlc", "web"]
- self.protection_type = None
-
-
- def decrypt(self, pyfile):
- # Init
- self.package = pyfile.package()
- package_links = []
- package_name = self.package.name
- folder_name = self.package.folder
-
- # Deal with single links
- if self.isSingleLink():
- package_links.extend(self.handleSingleLink())
-
- # Deal with folders
- else:
-
- # Request folder home
- self.html = self.requestFolderHome()
- self.cleanedHtml = self.removeHtmlCrap(self.html)
- if not self.isOnline():
- self.offline()
-
- # Check for folder protection
- if self.isProtected():
- self.html = self.unlockProtection()
- self.cleanedHtml = self.removeHtmlCrap(self.html)
- self.handleErrors()
-
- # Prepare package name and folder
- (package_name, folder_name) = self.getPackageInfo()
-
- # Extract package links
- for link_source_type in self.links_source_order:
- package_links.extend(self.handleLinkSource(link_source_type))
- if package_links: # use only first source which provides links
- break
- package_links = set(package_links)
-
- # Pack and return links
- if package_links:
- self.packages = [(package_name, package_links, folder_name)]
-
-
- def isSingleLink(self):
- link_type = re.match(self.__pattern, self.pyfile.url).group('type')
- return link_type in ("link", "frame")
-
-
- def requestFolderHome(self):
- return self.load(self.pyfile.url, decode=True)
-
-
- def removeHtmlCrap(self, content):
- patterns = (r'(type="hidden".*?(name=".*?")?.*?value=".*?")',
- r'display:none;">(.*?)</(div|span)>',
- r'<div\s+class="jdownloader"(.*?)</div>',
- r'<table class="global">(.*?)</table>',
- r'<iframe\s+style="display:none(.*?)</iframe>')
- for pattern in patterns:
- rexpr = re.compile(pattern, re.S)
- content = re.sub(rexpr, "", content)
- return content
-
-
- def isOnline(self):
- if "Your folder does not exist" in self.cleanedHtml:
- self.logDebug("File not m")
- return False
- return True
-
-
- def isProtected(self):
- form = re.search(r'<form.*?name.*?protected.*?>(.*?)</form>', self.cleanedHtml, re.S)
- if form is not None:
- content = form.group(1)
- for keyword in ("password", "captcha"):
- if keyword in content:
- self.protection_type = keyword
- self.logDebug("Links are %s protected" % self.protection_type)
- return True
- return False
-
-
- def getPackageInfo(self):
- m = re.search(self.NAME_PATTERN, self.html)
- if m:
- name = folder = m.group('N').strip()
- self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder))
- else:
- name = self.package.name
- folder = self.package.folder
- self.logDebug("Package info not m, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
- return name, folder
-
-
- def unlockProtection(self):
- postData = {}
-
- form = re.search(r'<form name="protected"(.*?)</form>', self.cleanedHtml, re.S).group(1)
-
- # Submit package password
- if "password" in form:
- password = self.getPassword()
- self.logDebug("Submitting password [%s] for protected links" % password)
- postData['password'] = password
-
- # Resolve anicaptcha
- if "anicaptcha" in form:
- self.logDebug("Captcha protected")
- captchaUri = re.search(r'src="(/temp/anicaptcha/[^"]+)', form).group(1)
- captcha = self.decryptCaptcha("http://ncrypt.in" + captchaUri)
- self.logDebug("Captcha resolved [%s]" % captcha)
- postData['captcha'] = captcha
-
- # Resolve recaptcha
- if "recaptcha" in form:
- self.logDebug("ReCaptcha protected")
- captcha_key = re.search(r'\?k=(.*?)"', form).group(1)
- self.logDebug("Resolving ReCaptcha with key [%s]" % captcha_key)
- recaptcha = ReCaptcha(self)
- challenge, response = recaptcha.challenge(captcha_key)
- postData['recaptcha_challenge_field'] = challenge
- postData['recaptcha_response_field'] = response
-
- # Resolve circlecaptcha
- if "circlecaptcha" in form:
- self.logDebug("CircleCaptcha protected")
- captcha_img_url = "http://ncrypt.in/classes/captcha/circlecaptcha.php"
- coords = self.decryptCaptcha(captcha_img_url, forceUser=True, imgtype="png", result_type='positional')
- self.logDebug("Captcha resolved, coords [%s]" % str(coords))
- postData['circle.x'] = coords[0]
- postData['circle.y'] = coords[1]
-
- # Unlock protection
- postData['submit_protected'] = 'Continue to folder'
- return self.load(self.pyfile.url, post=postData, decode=True)
-
-
- def handleErrors(self):
- if self.protection_type == "password":
- if "This password is invalid!" in self.cleanedHtml:
- self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry")
- self.fail(_("Incorrect password, please set right password on 'Edit package' form and retry"))
-
- if self.protection_type == "captcha":
- if "The securitycheck was wrong!" in self.cleanedHtml:
- self.invalidCaptcha()
- self.retry()
- else:
- self.correctCaptcha()
-
-
- def handleLinkSource(self, link_source_type):
- # Check for JS engine
- require_js_engine = link_source_type in ("cnl2", "rsdf", "ccf", "dlc")
- if require_js_engine and not self.js:
- self.logDebug("No JS engine available, skip %s links" % link_source_type)
- return []
-
- # Select suitable handler
- if link_source_type == 'single':
- return self.handleSingleLink()
- if link_source_type == 'cnl2':
- return self.handleCNL2()
- elif link_source_type in ("rsdf", "ccf", "dlc"):
- return self.handleContainer(link_source_type)
- elif link_source_type == "web":
- return self.handleWebLinks()
- else:
- self.error('Unknown source type "%s" (this is probably a bug)' % link_source_type)
-
-
- def handleSingleLink(self):
- self.logDebug("Handling Single link")
- package_links = []
-
- # Decrypt single link
- decrypted_link = self.decryptLink(self.pyfile.url)
- if decrypted_link:
- package_links.append(decrypted_link)
-
- return package_links
-
-
- def handleCNL2(self):
- self.logDebug("Handling CNL2 links")
- package_links = []
-
- if 'cnl2_output' in self.cleanedHtml:
- try:
- (vcrypted, vjk) = self._getCipherParams()
- for (crypted, jk) in zip(vcrypted, vjk):
- package_links.extend(self._getLinks(crypted, jk))
- except Exception:
- self.fail(_("Unable to decrypt CNL2 links"))
-
- return package_links
-
-
- def handleContainers(self):
- self.logDebug("Handling Container links")
- package_links = []
-
- pattern = r'/container/(rsdf|dlc|ccf)/(\w+)'
- containersLinks = re.findall(pattern, self.html)
- self.logDebug("Decrypting %d Container links" % len(containersLinks))
- for containerLink in containersLinks:
- link = "http://ncrypt.in/container/%s/%s.%s" % (containerLink[0], containerLink[1], containerLink[0])
- package_links.append(link)
-
- return package_links
-
-
- def handleWebLinks(self):
- self.logDebug("Handling Web links")
- pattern = r'(http://ncrypt\.in/link-.*?=)'
- links = re.findall(pattern, self.html)
-
- package_links = []
- self.logDebug("Decrypting %d Web links" % len(links))
- for i, link in enumerate(links):
- self.logDebug("Decrypting Web link %d, %s" % (i + 1, link))
- decrypted_link = self.decrypt(link)
- if decrypted_link:
- package_links.append(decrypted_link)
-
- return package_links
-
-
- def decryptLink(self, link):
- try:
- url = link.replace("link-", "frame-")
- link = self.load(url, just_header=True)['location']
- return link
- except Exception, detail:
- self.logDebug("Error decrypting link %s, %s" % (link, detail))
-
-
- def _getCipherParams(self):
- pattern = r'<input.*?name="%s".*?value="(.*?)"'
-
- # Get jk
- jk_re = pattern % NCryptIn.JK_KEY
- vjk = re.findall(jk_re, self.html)
-
- # Get crypted
- crypted_re = pattern % NCryptIn.CRYPTED_KEY
- vcrypted = re.findall(crypted_re, self.html)
-
- # Log and return
- self.logDebug("Detected %d crypted blocks" % len(vcrypted))
- return vcrypted, vjk
-
-
- def _getLinks(self, crypted, jk):
- # Get key
- jreturn = self.js.eval("%s f()" % jk)
- self.logDebug("JsEngine returns value [%s]" % jreturn)
- key = binascii.unhexlify(jreturn)
-
- # Decode crypted
- crypted = base64.standard_b64decode(crypted)
-
- # Decrypt
- Key = key
- IV = key
- obj = AES.new(Key, AES.MODE_CBC, IV)
- text = obj.decrypt(crypted)
-
- # Extract links
- text = text.replace("\x00", "").replace("\r", "")
- links = text.split("\n")
- links = filter(lambda x: x != "", links)
-
- # Log and return
- self.logDebug("Block has %d links" % len(links))
- return links
diff --git a/pyload/plugins/crypter/NetfolderIn.py b/pyload/plugins/crypter/NetfolderIn.py
deleted file mode 100644
index cc1a64f99..000000000
--- a/pyload/plugins/crypter/NetfolderIn.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class NetfolderIn(SimpleCrypter):
- __name = "NetfolderIn"
- __type = "crypter"
- __version = "0.72"
-
- __pattern = r'http://(?:www\.)?netfolder\.in/((?P<id1>\w+)/\w+|folder\.php\?folder_id=(?P<id2>\w+))'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """NetFolder.in decrypter plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("fragonib", "fragonib[AT]yahoo[DOT]es")]
-
-
- NAME_PATTERN = r'<div class="Text">Inhalt des Ordners <span.*>(?P<N>.+)</span></div>'
-
-
- def prepare(self):
- super(NetfolderIn, self).prepare()
-
- # Check for password protection
- if self.isPasswordProtected():
- self.html = self.submitPassword()
- if not self.html:
- self.fail(_("Incorrect password, please set right password on Add package form and retry"))
-
-
- def isPasswordProtected(self):
- if '<input type="password" name="password"' in self.html:
- self.logDebug("Links are password protected")
- return True
- return False
-
-
- def submitPassword(self):
- # Gather data
- try:
- m = re.match(self.__pattern, self.pyfile.url)
- id = max(m.group('id1'), m.group('id2'))
- except AttributeError:
- self.logDebug("Unable to get package id from url [%s]" % self.pyfile.url)
- return
- url = "http://netfolder.in/folder.php?folder_id=" + id
- password = self.getPassword()
-
- # Submit package password
- post = {'password': password, 'save': 'Absenden'}
- self.logDebug("Submitting password [%s] for protected links with id [%s]" % (password, id))
- html = self.load(url, {}, post)
-
- # Check for invalid password
- if '<div class="InPage_Error">' in html:
- self.logDebug("Incorrect password, please set right password on Edit package form and retry")
- return None
-
- return html
-
-
- def getLinks(self):
- links = re.search(r'name="list" value="(.*?)"', self.html).group(1).split(",")
- self.logDebug("Package has %d links" % len(links))
- return links
diff --git a/pyload/plugins/crypter/NosvideoCom.py b/pyload/plugins/crypter/NosvideoCom.py
deleted file mode 100644
index 02c279a57..000000000
--- a/pyload/plugins/crypter/NosvideoCom.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class NosvideoCom(SimpleCrypter):
- __name = "NosvideoCom"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?nosvideo\.com/\?v=\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Nosvideo.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("igel", "igelkun@myopera.com")]
-
-
- LINK_PATTERN = r'href="(http://(?:w{3}\.)?nosupload\.com/\?d=\w+)"'
- NAME_PATTERN = r'<[tT]itle>Watch (?P<N>.+?)<'
diff --git a/pyload/plugins/crypter/OneKhDe.py b/pyload/plugins/crypter/OneKhDe.py
deleted file mode 100644
index 9966df35b..000000000
--- a/pyload/plugins/crypter/OneKhDe.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.utils import html_unescape
-from pyload.plugins.Crypter import Crypter
-
-
-class OneKhDe(Crypter):
- __name = "OneKhDe"
- __type = "crypter"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?1kh\.de/f/'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """1kh.de decrypter plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org")]
-
-
- def __init__(self, parent):
- Crypter.__init__(self, parent)
- self.parent = parent
-
-
- def file_exists(self):
- """ returns True or False
- """
- return True
-
-
- def proceed(self, url, location):
- url = self.parent.url
- self.html = self.load(url)
- link_ids = re.findall(r"<a id=\"DownloadLink_(\d*)\" href=\"http://1kh.de/", self.html)
- for id in link_ids:
- new_link = html_unescape(re.search("width=\"100%\" src=\"(.*)\"></iframe>", self.load("http://1kh.de/l/" + id)).group(1))
- self.urls.append(new_link)
diff --git a/pyload/plugins/crypter/OronCom.py b/pyload/plugins/crypter/OronCom.py
deleted file mode 100644
index 96fbac7e5..000000000
--- a/pyload/plugins/crypter/OronCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class OronCom(DeadCrypter):
- __name = "OronCom"
- __type = "crypter"
- __version = "0.11"
-
- __pattern = r'http://(?:www\.)?oron\.com/folder/\w+'
- __config = []
-
- __description = """Oron.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("DHMH", "webmaster@pcProfil.de")]
-
-
-getInfo = create_getInfo(OronCom)
diff --git a/pyload/plugins/crypter/PastebinCom.py b/pyload/plugins/crypter/PastebinCom.py
deleted file mode 100644
index d9a6238b0..000000000
--- a/pyload/plugins/crypter/PastebinCom.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class PastebinCom(SimpleCrypter):
- __name = "PastebinCom"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?pastebin\.com/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Pastebin.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'<div class="de\d+">(https?://[^ <]+)(?:[^<]*)</div>'
- NAME_PATTERN = r'<div class="paste_box_line1" title="(?P<N>[^"]+)">'
diff --git a/pyload/plugins/crypter/QuickshareCz.py b/pyload/plugins/crypter/QuickshareCz.py
deleted file mode 100644
index c850e124f..000000000
--- a/pyload/plugins/crypter/QuickshareCz.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-
-
-class QuickshareCz(Crypter):
- __name = "QuickshareCz"
- __type = "crypter"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?quickshare\.cz/slozka-\d+.*'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Quickshare.cz folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- FOLDER_PATTERN = r'<textarea[^>]*>(.*?)</textarea>'
- LINK_PATTERN = r'(http://www\.quickshare\.cz/\S+)'
-
-
- def decrypt(self, pyfile):
- html = self.load(pyfile.url)
-
- m = re.search(self.FOLDER_PATTERN, html, re.S)
- if m is None:
- self.error(_("FOLDER_PATTERN not found"))
- self.urls.extend(re.findall(self.LINK_PATTERN, m.group(1)))
diff --git a/pyload/plugins/crypter/RSLayerCom.py b/pyload/plugins/crypter/RSLayerCom.py
deleted file mode 100644
index 0578fc3cf..000000000
--- a/pyload/plugins/crypter/RSLayerCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class RSLayerCom(DeadCrypter):
- __name = "RSLayerCom"
- __type = "crypter"
- __version = "0.21"
-
- __pattern = r'http://(?:www\.)?rs-layer\.com/directory-'
- __config = []
-
- __description = """RS-Layer.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("hzpz", "")]
-
-
-getInfo = create_getInfo(RSLayerCom)
diff --git a/pyload/plugins/crypter/RapidfileshareNet.py b/pyload/plugins/crypter/RapidfileshareNet.py
deleted file mode 100644
index 8ce212afe..000000000
--- a/pyload/plugins/crypter/RapidfileshareNet.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSCrypter import XFSCrypter
-
-
-class RapidfileshareNet(XFSCrypter):
- __name = "RapidfileshareNet"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?rapidfileshare\.net/users/\w+/\d+/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Rapidfileshare.net folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "rapidfileshare.net"
diff --git a/pyload/plugins/crypter/RelinkUs.py b/pyload/plugins/crypter/RelinkUs.py
deleted file mode 100644
index c995a1f7d..000000000
--- a/pyload/plugins/crypter/RelinkUs.py
+++ /dev/null
@@ -1,293 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import base64
-import binascii
-import re
-import os
-
-from Crypto.Cipher import AES
-from pyload.plugins.Crypter import Crypter
-
-
-class RelinkUs(Crypter):
- __name = "RelinkUs"
- __type = "crypter"
- __version = "3.11"
-
- __pattern = r'http://(?:www\.)?relink\.us/(f/|((view|go)\.php\?id=))(?P<id>.+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Relink.us decrypter plugin"""
- __license = "GPLv3"
- __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
- ("AndroKev", "neureither.kevin@gmail.com")]
-
-
- PREFERRED_LINK_SOURCES = ["cnl2", "dlc", "web"]
-
- OFFLINE_TOKEN = r'<title>Tattooside'
-
- PASSWORD_TOKEN = r'container_password.php'
- PASSWORD_ERROR_ROKEN = r'You have entered an incorrect password'
- PASSWORD_SUBMIT_URL = r'http://www.relink.us/container_password.php'
-
- CAPTCHA_TOKEN = r'container_captcha.php'
- CAPTCHA_ERROR_ROKEN = r'You have solved the captcha wrong'
- CAPTCHA_IMG_URL = r'http://www.relink.us/core/captcha/circlecaptcha.php'
- CAPTCHA_SUBMIT_URL = r'http://www.relink.us/container_captcha.php'
-
- FILE_TITLE_REGEX = r'<th>Title</th><td>(.*)</td></tr>'
- FILE_NOTITLE = r'No title'
-
- CNL2_FORM_REGEX = r'<form id="cnl_form-(.*?)</form>'
- CNL2_FORMINPUT_REGEX = r'<input.*?name="%s".*?value="(.*?)"'
- CNL2_JK_KEY = "jk"
- CNL2_CRYPTED_KEY = "crypted"
-
- DLC_LINK_REGEX = r'<a href=".*?" class="dlc_button" target="_blank">'
- DLC_DOWNLOAD_URL = r'http://www.relink.us/download.php'
-
- WEB_FORWARD_REGEX = r'getFile\(\'(?P<link>.+)\'\)'
- WEB_FORWARD_URL = r'http://www.relink.us/frame.php'
- WEB_LINK_REGEX = r'<iframe name="Container" height="100%" frameborder="no" width="100%" src="(?P<link>.+)"></iframe>'
-
-
- def setup(self):
- self.fileid = None
- self.package = None
- self.password = None
- self.captcha = False
-
-
- def decrypt(self, pyfile):
- # Init
- self.initPackage(pyfile)
-
- # Request package
- self.requestPackage()
-
- # Check for online
- if not self.isOnline():
- self.offline()
-
- # Check for protection
- if self.isPasswordProtected():
- self.unlockPasswordProtection()
- self.handleErrors()
-
- if self.isCaptchaProtected():
- self.captcha = True
- self.unlockCaptchaProtection()
- self.handleErrors()
-
- # Get package name and folder
- (package_name, folder_name) = self.getPackageInfo()
-
- # Extract package links
- package_links = []
- for sources in self.PREFERRED_LINK_SOURCES:
- package_links.extend(self.handleLinkSource(sources))
- if package_links: # use only first source which provides links
- break
- package_links = set(package_links)
-
- # Pack
- if package_links:
- self.packages = [(package_name, package_links, folder_name)]
-
-
- def initPackage(self, pyfile):
- self.fileid = re.match(self.__pattern, pyfile.url).group('id')
- self.package = pyfile.package()
- self.password = self.getPassword()
-
-
- def requestPackage(self):
- self.html = self.load(self.pyfile.url, decode=True)
-
-
- def isOnline(self):
- if self.OFFLINE_TOKEN in self.html:
- self.logDebug("File not found")
- return False
- return True
-
-
- def isPasswordProtected(self):
- if self.PASSWORD_TOKEN in self.html:
- self.logDebug("Links are password protected")
- return True
-
-
- def isCaptchaProtected(self):
- if self.CAPTCHA_TOKEN in self.html:
- self.logDebug("Links are captcha protected")
- return True
- return False
-
-
- def unlockPasswordProtection(self):
- self.logDebug("Submitting password [%s] for protected links" % self.password)
- passwd_url = self.PASSWORD_SUBMIT_URL + "?id=%s" % self.fileid
- passwd_data = {'id': self.fileid, 'password': self.password, 'pw': 'submit'}
- self.html = self.load(passwd_url, post=passwd_data, decode=True)
-
-
- def unlockCaptchaProtection(self):
- self.logDebug("Request user positional captcha resolving")
- captcha_img_url = self.CAPTCHA_IMG_URL + "?id=%s" % self.fileid
- coords = self.decryptCaptcha(captcha_img_url, forceUser=True, imgtype="png", result_type='positional')
- self.logDebug("Captcha resolved, coords [%s]" % str(coords))
- captcha_post_url = self.CAPTCHA_SUBMIT_URL + "?id=%s" % self.fileid
- captcha_post_data = {'button.x': coords[0], 'button.y': coords[1], 'captcha': 'submit'}
- self.html = self.load(captcha_post_url, post=captcha_post_data, decode=True)
-
-
- def getPackageInfo(self):
- name = folder = None
-
- # Try to get info from web
- m = re.search(self.FILE_TITLE_REGEX, self.html)
- if m is not None:
- title = m.group(1).strip()
- if not self.FILE_NOTITLE in title:
- name = folder = title
- self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder))
-
- # Fallback to defaults
- if not name or not folder:
- name = self.package.name
- folder = self.package.folder
- self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
-
- # Return package info
- return name, folder
-
-
- def handleErrors(self):
- if self.PASSWORD_ERROR_ROKEN in self.html:
- msg = "Incorrect password, please set right password on 'Edit package' form and retry"
- self.logDebug(msg)
- self.fail(_(msg))
-
- if self.captcha:
- if self.CAPTCHA_ERROR_ROKEN in self.html:
- self.invalidCaptcha()
- self.retry()
- else:
- self.correctCaptcha()
-
-
- def handleLinkSource(self, source):
- if source == 'cnl2':
- return self.handleCNL2Links()
- elif source == 'dlc':
- return self.handleDLCLinks()
- elif source == 'web':
- return self.handleWEBLinks()
- else:
- self.error('Unknown source type "%s" (this is probably a bug)' % source)
-
-
- def handleCNL2Links(self):
- self.logDebug("Search for CNL2 links")
- package_links = []
- m = re.search(self.CNL2_FORM_REGEX, self.html, re.S)
- if m is not None:
- cnl2_form = m.group(1)
- try:
- (vcrypted, vjk) = self._getCipherParams(cnl2_form)
- for (crypted, jk) in zip(vcrypted, vjk):
- package_links.extend(self._getLinks(crypted, jk))
- except Exception:
- self.logDebug("Unable to decrypt CNL2 links")
- return package_links
-
-
- def handleDLCLinks(self):
- self.logDebug("Search for DLC links")
- package_links = []
- m = re.search(self.DLC_LINK_REGEX, self.html)
- if m is not None:
- container_url = self.DLC_DOWNLOAD_URL + "?id=%s&dlc=1" % self.fileid
- self.logDebug("Downloading DLC container link [%s]" % container_url)
- try:
- dlc = self.load(container_url)
- dlc_filename = self.fileid + ".dlc"
- dlc_filepath = os.path.join(self.config['general']['download_folder'], dlc_filename)
- with open(dlc_filepath, "wb") as f:
- f.write(dlc)
- package_links.append(dlc_filepath)
- except Exception:
- self.fail("Unable to download DLC container")
- return package_links
-
-
- def handleWEBLinks(self):
- self.logDebug("Search for WEB links")
-
- package_links = []
- params = re.findall(self.WEB_FORWARD_REGEX, self.html)
-
- self.logDebug("Decrypting %d Web links" % len(params))
-
- for index, param in enumerate(params):
- try:
- url = self.WEB_FORWARD_URL + "?%s" % param
-
- self.logDebug("Decrypting Web link %d, %s" % (index + 1, url))
-
- res = self.load(url, decode=True)
- link = re.search(self.WEB_LINK_REGEX, res).group('link')
-
- package_links.append(link)
-
- except Exception, detail:
- self.logDebug("Error decrypting Web link %s, %s" % (index, detail))
-
- self.setWait(4)
- self.wait()
-
- return package_links
-
-
- def _getCipherParams(self, cnl2_form):
- # Get jk
- jk_re = self.CNL2_FORMINPUT_REGEX % self.CNL2_JK_KEY
- vjk = re.findall(jk_re, cnl2_form, re.I)
-
- # Get crypted
- crypted_re = self.CNL2_FORMINPUT_REGEX % RelinkUs.CNL2_CRYPTED_KEY
- vcrypted = re.findall(crypted_re, cnl2_form, re.I)
-
- # Log and return
- self.logDebug("Detected %d crypted blocks" % len(vcrypted))
- return vcrypted, vjk
-
-
- def _getLinks(self, crypted, jk):
- # Get key
- jreturn = self.js.eval("%s f()" % jk)
- self.logDebug("JsEngine returns value [%s]" % jreturn)
- key = binascii.unhexlify(jreturn)
-
- # Decode crypted
- crypted = base64.standard_b64decode(crypted)
-
- # Decrypt
- Key = key
- IV = key
- obj = AES.new(Key, AES.MODE_CBC, IV)
- text = obj.decrypt(crypted)
-
- # Extract links
- text = text.replace("\x00", "").replace("\r", "")
- links = text.split("\n")
- links = filter(lambda x: x != "", links)
-
- # Log and return
- self.logDebug("Package has %d links" % len(links))
- return links
diff --git a/pyload/plugins/crypter/SafelinkingNet.py b/pyload/plugins/crypter/SafelinkingNet.py
deleted file mode 100644
index 0ebe1a4a6..000000000
--- a/pyload/plugins/crypter/SafelinkingNet.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from BeautifulSoup import BeautifulSoup
-
-from pyload.utils import json_loads
-from pyload.plugins.Crypter import Crypter
-from pyload.plugins.internal.captcha import SolveMedia
-
-
-class SafelinkingNet(Crypter):
- __name = "SafelinkingNet"
- __type = "crypter"
- __version = "0.11"
-
- __pattern = r'https?://(?:www\.)?safelinking\.net/([pd])/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Safelinking.net decrypter plugin"""
- __license = "GPLv3"
- __authors = [("quareevo", "quareevo@arcor.de")]
-
-
- SOLVEMEDIA_PATTERN = "solvemediaApiKey = '([\w.-]+)';"
-
-
- def decrypt(self, pyfile):
- url = pyfile.url
-
- if re.match(self.__pattern, url).group(1) == "d":
-
- header = self.load(url, just_header=True)
- if 'location' in header:
- self.urls = [header['location']]
- else:
- self.error(_("Couldn't find forwarded Link"))
-
- else:
- postData = {"post-protect": "1"}
-
- if "link-password" in self.html:
- postData['link-password'] = self.getPassword()
-
- if "altcaptcha" in self.html:
- for _i in xrange(5):
- m = re.search(self.SOLVEMEDIA_PATTERN, self.html)
- if m:
- captchaKey = m.group(1)
- captcha = SolveMedia(self)
- captchaProvider = "Solvemedia"
- else:
- self.fail(_("Error parsing captcha"))
-
- challenge, response = captcha.challenge(captchaKey)
- postData['adcopy_challenge'] = challenge
- postData['adcopy_response'] = response
-
- self.html = self.load(url, post=postData)
- if "The password you entered was incorrect" in self.html:
- self.fail(_("Incorrect Password"))
- if not "The CAPTCHA code you entered was wrong" in self.html:
- break
-
- pyfile.package().password = ""
- soup = BeautifulSoup(self.html)
- scripts = soup.findAll("script")
- for s in scripts:
- if "d_links" in s.text:
- break
- m = re.search('d_links":(\[.*?\])', s.text)
- if m:
- linkDict = json_loads(m.group(1))
- for link in linkDict:
- if not "http://" in link['full']:
- self.urls.append("https://safelinking.net/d/" + link['full'])
- else:
- self.urls.append(link['full'])
diff --git a/pyload/plugins/crypter/SecuredIn.py b/pyload/plugins/crypter/SecuredIn.py
deleted file mode 100644
index b959d741f..000000000
--- a/pyload/plugins/crypter/SecuredIn.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class SecuredIn(DeadCrypter):
- __name = "SecuredIn"
- __type = "crypter"
- __version = "0.21"
-
- __pattern = r'http://(?:www\.)?secured\.in/download-[\d]+-\w{8}\.html'
- __config = []
-
- __description = """Secured.in decrypter plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
-getInfo = create_getInfo(SecuredIn)
diff --git a/pyload/plugins/crypter/SexuriaCom.py b/pyload/plugins/crypter/SexuriaCom.py
deleted file mode 100644
index 7f8df5283..000000000
--- a/pyload/plugins/crypter/SexuriaCom.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Crypter import Crypter
-
-
-class SexuriaCom(Crypter):
- __name = "SexuriaCom"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?sexuria\.com/(v1/)?(Pornos_Kostenlos_.+?_(\d+)\.html|dl_links_\d+_\d+\.html|id=\d+\&part=\d+\&link=\d+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Sexuria.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("NETHead", "NETHead.AT.gmx.DOT.net")]
-
-
- PATTERN_SUPPORTED_MAIN = re.compile(r'http://(www\.)?sexuria\.com/(v1/)?Pornos_Kostenlos_.+?_(\d+)\.html', flags=re.I)
- PATTERN_SUPPORTED_CRYPT = re.compile(r'http://(www\.)?sexuria\.com/(v1/)?dl_links_\d+_(?P<ID>\d+)\.html', flags=re.I)
- PATTERN_SUPPORTED_REDIRECT = re.compile(r'http://(www\.)?sexuria\.com/out\.php\?id=(?P<ID>\d+)\&part=\d+\&link=\d+', flags=re.I)
- PATTERN_TITLE = re.compile(r'<title> - (?P<TITLE>.*) Sexuria - Kostenlose Pornos - Rapidshare XXX Porn</title>', flags=re.I)
- PATTERN_PASSWORD = re.compile(r'<strong>Passwort: </strong></div></td>.*?bgcolor="#EFEFEF">(?P<PWD>.*?)</td>', flags=re.I | re.S)
- PATTERN_DL_LINK_PAGE = re.compile(r'"(dl_links_\d+_\d+\.html)"', flags=re.I)
- PATTERN_REDIRECT_LINKS = re.compile(r'value="(http://sexuria\.com/out\.php\?id=\d+\&part=\d+\&link=\d+)" readonly', flags=re.I)
-
-
- def decrypt(self, pyfile):
- # Init
- self.pyfile = pyfile
- self.package = pyfile.package()
-
- # Get package links
- package_name, self.links, folder_name, package_pwd = self.decryptLinks(self.pyfile.url)
- self.packages = [(package_name, self.links, folder_name)]
-
-
- def decryptLinks(self, url):
- linklist = []
- name = self.package.name
- folder = self.package.folder
- password = None
-
- if re.match(self.PATTERN_SUPPORTED_MAIN, url):
- # Processing main page
- html = self.load(url)
- links = re.findall(self.PATTERN_DL_LINK_PAGE, html)
- for link in links:
- linklist.append("http://sexuria.com/v1/" + link)
-
- elif re.match(self.PATTERN_SUPPORTED_REDIRECT, url):
- # Processing direct redirect link (out.php), redirecting to main page
- id = re.search(self.PATTERN_SUPPORTED_REDIRECT, url).group('ID')
- if id:
- linklist.append("http://sexuria.com/v1/Pornos_Kostenlos_liebe_%s.html" % id)
-
- elif re.match(self.PATTERN_SUPPORTED_CRYPT, url):
- # Extract info from main file
- id = re.search(self.PATTERN_SUPPORTED_CRYPT, url).group('ID')
- html = self.load("http://sexuria.com/v1/Pornos_Kostenlos_info_%s.html" % id, decode=True)
-
- title = re.search(self.PATTERN_TITLE, html).group('TITLE').strip()
- if title:
- name = folder = title
- self.logDebug("Package info found, name [%s] and folder [%s]" % (name, folder))
-
- pwd = re.search(self.PATTERN_PASSWORD, html).group('PWD')
- if pwd:
- password = pwd.strip()
- self.logDebug("Password info [%s] found" % password)
-
- # Process link (dl_link)
- html = self.load(url)
- links = re.findall(self.PATTERN_REDIRECT_LINKS, html)
- if len(links) == 0:
- self.LogError("Broken for link %s" % link)
- else:
- for link in links:
- link = link.replace("http://sexuria.com/", "http://www.sexuria.com/")
- finallink = self.load(link, just_header=True)['location']
- if not finallink or "sexuria.com/" in finallink:
- self.LogError("Broken for link %s" % link)
- else:
- linklist.append(finallink)
-
- # Debug log
- self.logDebug("%d supported links" % len(linklist))
- for i, link in enumerate(linklist):
- self.logDebug("Supported link %d, %s" % (i + 1, link))
-
- return name, linklist, folder, password
diff --git a/pyload/plugins/crypter/ShareLinksBiz.py b/pyload/plugins/crypter/ShareLinksBiz.py
deleted file mode 100644
index 0a8eeacbd..000000000
--- a/pyload/plugins/crypter/ShareLinksBiz.py
+++ /dev/null
@@ -1,286 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import base64
-import binascii
-import re
-
-from Crypto.Cipher import AES
-from pyload.plugins.Crypter import Crypter
-
-
-class ShareLinksBiz(Crypter):
- __name = "ShareLinksBiz"
- __type = "crypter"
- __version = "1.14"
-
- __pattern = r'http://(?:www\.)?(share-links|s2l)\.biz/(?P<ID>_?\w+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Share-Links.biz decrypter plugin"""
- __license = "GPLv3"
- __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es")]
-
-
- def setup(self):
- self.baseUrl = None
- self.fileId = None
- self.package = None
- self.captcha = False
-
-
- def decrypt(self, pyfile):
- # Init
- self.initFile(pyfile)
-
- # Request package
- url = self.baseUrl + '/' + self.fileId
- self.html = self.load(url, decode=True)
-
- # Unblock server (load all images)
- self.unblockServer()
-
- # Check for protection
- if self.isPasswordProtected():
- self.unlockPasswordProtection()
- self.handleErrors()
-
- if self.isCaptchaProtected():
- self.captcha = True
- self.unlockCaptchaProtection()
- self.handleErrors()
-
- # Extract package links
- package_links = []
- package_links.extend(self.handleWebLinks())
- package_links.extend(self.handleContainers())
- package_links.extend(self.handleCNL2())
- package_links = set(package_links)
-
- # Get package info
- package_name, package_folder = self.getPackageInfo()
-
- # Pack
- self.packages = [(package_name, package_links, package_folder)]
-
-
- def initFile(self, pyfile):
- url = pyfile.url
- if 's2l.biz' in url:
- url = self.load(url, just_header=True)['location']
- self.baseUrl = "http://www.%s.biz" % re.match(self.__pattern, url).group(1)
- self.fileId = re.match(self.__pattern, url).group('ID')
- self.package = pyfile.package()
-
-
- def isOnline(self):
- if "No usable content was found" in self.html:
- self.logDebug("File not found")
- return False
- return True
-
-
- def isPasswordProtected(self):
- if re.search(r'''<form.*?id="passwordForm".*?>''', self.html):
- self.logDebug("Links are protected")
- return True
- return False
-
-
- def isCaptchaProtected(self):
- if '<map id="captchamap"' in self.html:
- self.logDebug("Links are captcha protected")
- return True
- return False
-
-
- def unblockServer(self):
- imgs = re.findall(r"(/template/images/.*?\.gif)", self.html)
- for img in imgs:
- self.load(self.baseUrl + img)
-
-
- def unlockPasswordProtection(self):
- password = self.getPassword()
- self.logDebug("Submitting password [%s] for protected links" % password)
- post = {"password": password, 'login': 'Submit form'}
- url = self.baseUrl + '/' + self.fileId
- self.html = self.load(url, post=post, decode=True)
-
-
- def unlockCaptchaProtection(self):
- # Get captcha map
- captchaMap = self._getCaptchaMap()
- self.logDebug("Captcha map with [%d] positions" % len(captchaMap.keys()))
-
- # Request user for captcha coords
- m = re.search(r'<img src="/captcha.gif\?d=(.*?)&amp;PHPSESSID=(.*?)&amp;legend=1"', self.html)
- captchaUrl = self.baseUrl + '/captcha.gif?d=%s&PHPSESSID=%s' % (m.group(1), m.group(2))
- self.logDebug("Waiting user for correct position")
- coords = self.decryptCaptcha(captchaUrl, forceUser=True, imgtype="gif", result_type='positional')
- self.logDebug("Captcha resolved, coords [%s]" % str(coords))
-
- # Resolve captcha
- href = self._resolveCoords(coords, captchaMap)
- if href is None:
- self.invalidCaptcha()
- self.retry(wait_time=5)
- url = self.baseUrl + href
- self.html = self.load(url, decode=True)
-
-
- def _getCaptchaMap(self):
- mapp = {}
- for m in re.finditer(r'<area shape="rect" coords="(.*?)" href="(.*?)"', self.html):
- rect = eval('(' + m.group(1) + ')')
- href = m.group(2)
- mapp[rect] = href
- return mapp
-
-
- def _resolveCoords(self, coords, captchaMap):
- x, y = coords
- for rect, href in captchaMap.iteritems():
- x1, y1, x2, y2 = rect
- if (x >= x1 and x <= x2) and (y >= y1 and y <= y2):
- return href
-
-
- def handleErrors(self):
- if "The inserted password was wrong" in self.html:
- self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry")
- self.fail(_("Incorrect password, please set right password on 'Edit package' form and retry"))
-
- if self.captcha:
- if "Your choice was wrong" in self.html:
- self.invalidCaptcha()
- self.retry(wait_time=5)
- else:
- self.correctCaptcha()
-
-
- def getPackageInfo(self):
- name = folder = None
-
- # Extract from web package header
- title_re = r'<h2><img.*?/>(.*)</h2>'
- m = re.search(title_re, self.html, re.S)
- if m is not None:
- title = m.group(1).strip()
- if 'unnamed' not in title:
- name = folder = title
- self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder))
-
- # Fallback to defaults
- if not name or not folder:
- name = self.package.name
- folder = self.package.folder
- self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
-
- # Return package info
- return name, folder
-
-
- def handleWebLinks(self):
- package_links = []
- self.logDebug("Handling Web links")
-
- #@TODO: Gather paginated web links
- pattern = r'javascript:_get\(\'(.*?)\', \d+, \'\'\)'
- ids = re.findall(pattern, self.html)
- self.logDebug("Decrypting %d Web links" % len(ids))
- for i, ID in enumerate(ids):
- try:
- self.logDebug("Decrypting Web link %d, [%s]" % (i + 1, ID))
-
- dwLink = self.baseUrl + "/get/lnk/" + ID
- res = self.load(dwLink)
-
- code = re.search(r'frm/(\d+)', res).group(1)
- fwLink = self.baseUrl + "/get/frm/" + code
- res = self.load(fwLink)
-
- jscode = re.search(r'<script language="javascript">\s*eval\((.*)\)\s*</script>', res, re.S).group(1)
- jscode = self.js.eval("f = %s" % jscode)
- jslauncher = "window=''; parent={frames:{Main:{location:{href:''}}},location:''}; %s; parent.frames.Main.location.href"
-
- dlLink = self.js.eval(jslauncher % jscode)
-
- self.logDebug("JsEngine returns value [%s] for redirection link" % dlLink)
-
- package_links.append(dlLink)
- except Exception, detail:
- self.logDebug("Error decrypting Web link [%s], %s" % (ID, detail))
- return package_links
-
-
- def handleContainers(self):
- package_links = []
- self.logDebug("Handling Container links")
-
- pattern = r'javascript:_get\(\'(.*?)\', 0, \'(rsdf|ccf|dlc)\'\)'
- containersLinks = re.findall(pattern, self.html)
- self.logDebug("Decrypting %d Container links" % len(containersLinks))
- for containerLink in containersLinks:
- link = "%s/get/%s/%s" % (self.baseUrl, containerLink[1], containerLink[0])
- package_links.append(link)
- return package_links
-
-
- def handleCNL2(self):
- package_links = []
- self.logDebug("Handling CNL2 links")
-
- if '/lib/cnl2/ClicknLoad.swf' in self.html:
- try:
- (crypted, jk) = self._getCipherParams()
- package_links.extend(self._getLinks(crypted, jk))
- except Exception:
- self.fail(_("Unable to decrypt CNL2 links"))
- return package_links
-
-
- def _getCipherParams(self):
- # Request CNL2
- code = re.search(r'ClicknLoad.swf\?code=(.*?)"', self.html).group(1)
- url = "%s/get/cnl2/%s" % (self.baseUrl, code)
- res = self.load(url)
- params = res.split(";;")
-
- # Get jk
- strlist = list(base64.standard_b64decode(params[1]))
- strlist.reverse()
- jk = ''.join(strlist)
-
- # Get crypted
- strlist = list(base64.standard_b64decode(params[2]))
- strlist.reverse()
- crypted = ''.join(strlist)
-
- # Log and return
- return crypted, jk
-
-
- def _getLinks(self, crypted, jk):
- # Get key
- jreturn = self.js.eval("%s f()" % jk)
- self.logDebug("JsEngine returns value [%s]" % jreturn)
- key = binascii.unhexlify(jreturn)
-
- # Decode crypted
- crypted = base64.standard_b64decode(crypted)
-
- # Decrypt
- Key = key
- IV = key
- obj = AES.new(Key, AES.MODE_CBC, IV)
- text = obj.decrypt(crypted)
-
- # Extract links
- text = text.replace("\x00", "").replace("\r", "")
- links = text.split("\n")
- links = filter(lambda x: x != "", links)
-
- # Log and return
- self.logDebug("Block has %d links" % len(links))
- return links
diff --git a/pyload/plugins/crypter/SharingmatrixCom.py b/pyload/plugins/crypter/SharingmatrixCom.py
deleted file mode 100644
index 72b268173..000000000
--- a/pyload/plugins/crypter/SharingmatrixCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class SharingmatrixCom(DeadCrypter):
- __name = "SharingmatrixCom"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?sharingmatrix\.com/folder/\w+'
-
- __description = """Sharingmatrix.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(SharingmatrixCom)
diff --git a/pyload/plugins/crypter/SpeedLoadOrg.py b/pyload/plugins/crypter/SpeedLoadOrg.py
deleted file mode 100644
index 5c88931b4..000000000
--- a/pyload/plugins/crypter/SpeedLoadOrg.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class SpeedLoadOrg(DeadCrypter):
- __name = "SpeedLoadOrg"
- __type = "crypter"
- __version = "0.30"
-
- __pattern = r'http://(?:www\.)?speedload\.org/(\d+~f$|folder/\d+/)'
- __config = []
-
- __description = """Speedload decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(SpeedLoadOrg)
diff --git a/pyload/plugins/crypter/StealthTo.py b/pyload/plugins/crypter/StealthTo.py
deleted file mode 100644
index 1fdf1fd3b..000000000
--- a/pyload/plugins/crypter/StealthTo.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class StealthTo(DeadCrypter):
- __name = "StealthTo"
- __type = "crypter"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?stealth\.to/folder/.+'
- __config = []
-
- __description = """Stealth.to decrypter plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org")]
-
-
-getInfo = create_getInfo(StealthTo)
diff --git a/pyload/plugins/crypter/TnyCz.py b/pyload/plugins/crypter/TnyCz.py
deleted file mode 100644
index 435601acc..000000000
--- a/pyload/plugins/crypter/TnyCz.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-import re
-
-
-class TnyCz(SimpleCrypter):
- __name = "TnyCz"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?tny\.cz/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Tny.cz decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'<title>(?P<N>.+) - .+</title>'
-
-
- def getLinks(self):
- m = re.search(r'<a id=\'save_paste\' href="(.+save\.php\?hash=.+)">', self.html)
- return re.findall(".+", self.load(m.group(1), decode=True)) if m else None
diff --git a/pyload/plugins/crypter/TrailerzoneInfo.py b/pyload/plugins/crypter/TrailerzoneInfo.py
deleted file mode 100644
index 67d494301..000000000
--- a/pyload/plugins/crypter/TrailerzoneInfo.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class TrailerzoneInfo(DeadCrypter):
- __name = "TrailerzoneInfo"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?trailerzone\.info/.*?'
- __config = []
-
- __description = """TrailerZone.info decrypter plugin"""
- __license = "GPLv3"
- __authors = [("godofdream", "soilfiction@gmail.com")]
-
-
-getInfo = create_getInfo(TrailerzoneInfo)
diff --git a/pyload/plugins/crypter/TurbobitNet.py b/pyload/plugins/crypter/TurbobitNet.py
deleted file mode 100644
index 065876d3c..000000000
--- a/pyload/plugins/crypter/TurbobitNet.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-from pyload.utils import json_loads
-
-
-class TurbobitNet(SimpleCrypter):
- __name = "TurbobitNet"
- __type = "crypter"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?turbobit\.net/download/folder/(?P<ID>\w+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Turbobit.net folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'src=\'/js/lib/grid/icon/folder.png\'> <span>(?P<N>.+?)</span>'
-
-
- def _getLinks(self, id, page=1):
- gridFile = self.load("http://turbobit.net/downloadfolder/gridFile",
- get={"rootId": id, "rows": 200, "page": page}, decode=True)
- grid = json_loads(gridFile)
-
- if grid['rows']:
- for i in grid['rows']:
- yield i['id']
- for id in self._getLinks(id, page + 1):
- yield id
- else:
- return
-
-
- def getLinks(self):
- id = re.match(self.__pattern, self.pyfile.url).group("ID")
- fixurl = lambda id: "http://turbobit.net/%s.html" % id
- return map(fixurl, self._getLinks(id))
diff --git a/pyload/plugins/crypter/TusfilesNet.py b/pyload/plugins/crypter/TusfilesNet.py
deleted file mode 100644
index fbd5de8d3..000000000
--- a/pyload/plugins/crypter/TusfilesNet.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import math
-import re
-from urlparse import urljoin
-
-from pyload.plugins.internal.XFSCrypter import XFSCrypter
-
-
-class TusfilesNet(XFSCrypter):
- __name = "TusfilesNet"
- __type = "crypter"
- __version = "0.07"
-
- __pattern = r'https?://(?:www\.)?tusfiles\.net/go/(?P<ID>\w+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Tusfiles.net folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- HOSTER_DOMAIN = "tusfiles.net"
-
- PAGES_PATTERN = r'>\((\d+) \w+\)<'
-
- URL_REPLACEMENTS = [(__pattern + ".*", r'https://www.tusfiles.net/go/\g<ID>/')]
-
-
- def loadPage(self, page_n):
- return self.load(urljoin(self.pyfile.url, str(page_n)), decode=True)
-
-
- def handleMultiPages(self):
- pages = re.search(self.PAGES_PATTERN, self.html)
- if pages:
- pages = int(math.ceil(int(pages.group('pages')) / 25.0))
- else:
- return
-
- for p in xrange(2, pages + 1):
- self.html = self.loadPage(p)
- self.links += self.getLinks()
diff --git a/pyload/plugins/crypter/UlozTo.py b/pyload/plugins/crypter/UlozTo.py
deleted file mode 100644
index 1d8575c18..000000000
--- a/pyload/plugins/crypter/UlozTo.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-from pyload.plugins.Crypter import Crypter
-
-
-class UlozTo(Crypter):
- __name = "UlozTo"
- __type = "crypter"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?(uloz\.to|ulozto\.(cz|sk|net)|bagruj\.cz|zachowajto\.pl)/(m|soubory)/.*'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Uloz.to folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- FOLDER_PATTERN = r'<ul class="profile_files">(.*?)</ul>'
- LINK_PATTERN = r'<br /><a href="/([^"]+)">[^<]+</a>'
- NEXT_PAGE_PATTERN = r'<a class="next " href="/([^"]+)">&nbsp;</a>'
-
-
- def decrypt(self, pyfile):
- html = self.load(pyfile.url)
-
- new_links = []
- for i in xrange(1, 100):
- self.logInfo(_("Fetching links from page %i") % i)
- m = re.search(self.FOLDER_PATTERN, html, re.S)
- if m is None:
- self.error(_("FOLDER_PATTERN not found"))
-
- new_links.extend(re.findall(self.LINK_PATTERN, m.group(1)))
- m = re.search(self.NEXT_PAGE_PATTERN, html)
- if m:
- html = self.load("http://ulozto.net/" + m.group(1))
- else:
- break
- else:
- self.logInfo(_("Limit of 99 pages reached, aborting"))
-
- if new_links:
- self.urls = [map(lambda s: "http://ulozto.net/%s" % s, new_links)]
diff --git a/pyload/plugins/crypter/UploadableCh.py b/pyload/plugins/crypter/UploadableCh.py
deleted file mode 100644
index cff7f2c73..000000000
--- a/pyload/plugins/crypter/UploadableCh.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class UploadableCh(SimpleCrypter):
- __name = "UploadableCh"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?uploadable\.ch/list/\w+'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Uploadable.ch folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- LINK_PATTERN = r'"(.+?)" class="icon_zipfile">'
- NAME_PATTERN = r'<div class="folder"><span>&nbsp;</span>(?P<N>.+?)</div>'
- OFFLINE_PATTERN = r'We are sorry... The URL you entered cannot be found on the server.'
- TEMP_OFFLINE_PATTERN = r'<div class="icon_err">'
diff --git a/pyload/plugins/crypter/UploadedTo.py b/pyload/plugins/crypter/UploadedTo.py
deleted file mode 100644
index bce34f743..000000000
--- a/pyload/plugins/crypter/UploadedTo.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class UploadedTo(SimpleCrypter):
- __name = "UploadedTo"
- __type = "crypter"
- __version = "0.42"
-
- __pattern = r'http://(?:www\.)?(uploaded|ul)\.(to|net)/(f|folder|list)/(?P<id>\w+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """UploadedTo decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- PLAIN_PATTERN = r'<small class="date"><a href="(?P<plain>[\w/]+)" onclick='
- NAME_PATTERN = r'<title>(?P<N>.+?)<'
-
-
- def getLinks(self):
- m = re.search(self.PLAIN_PATTERN, self.html)
- if m is None:
- self.error(_("PLAIN_PATTERN not found"))
-
- plain_link = urljoin("http://uploaded.net/", m.group('plain'))
- return self.load(plain_link).split('\n')[:-1]
diff --git a/pyload/plugins/crypter/WiiReloadedOrg.py b/pyload/plugins/crypter/WiiReloadedOrg.py
deleted file mode 100644
index 333b2d316..000000000
--- a/pyload/plugins/crypter/WiiReloadedOrg.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class WiiReloadedOrg(DeadCrypter):
- __name = "WiiReloadedOrg"
- __type = "crypter"
- __version = "0.11"
-
- __pattern = r'http://(?:www\.)?wii-reloaded\.org/protect/get\.php\?i=.+'
- __config = []
-
- __description = """Wii-Reloaded.org decrypter plugin"""
- __license = "GPLv3"
- __authors = [("hzpz", "")]
-
-
-getInfo = create_getInfo(WiiReloadedOrg)
diff --git a/pyload/plugins/crypter/WuploadCom.py b/pyload/plugins/crypter/WuploadCom.py
deleted file mode 100644
index f8b695fee..000000000
--- a/pyload/plugins/crypter/WuploadCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadCrypter import DeadCrypter, create_getInfo
-
-
-class WuploadCom(DeadCrypter):
- __name = "WuploadCom"
- __type = "crypter"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?wupload\.com/folder/\w+'
-
- __description = """Wupload.com folder decrypter plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(WuploadCom)
diff --git a/pyload/plugins/crypter/XFileSharingPro.py b/pyload/plugins/crypter/XFileSharingPro.py
deleted file mode 100644
index 616bdb193..000000000
--- a/pyload/plugins/crypter/XFileSharingPro.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.XFSCrypter import XFSCrypter
-
-
-class XFileSharingPro(XFSCrypter):
- __name = "XFileSharingPro"
- __type = "crypter"
- __version = "0.03"
-
- __pattern = r'^unmatchable$'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """XFileSharingPro dummy folder decrypter plugin for hook"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def _log(self, type, args):
- msg = " | ".join([str(a).strip() for a in args if a])
- logger = getattr(self.log, type)
- logger("%s: %s: %s" % (self.__name, self.HOSTER_NAME, msg or _("%s MARK" % type.upper())))
-
-
- def init(self):
- super(XFileSharingPro, self).init()
-
- self.__pattern = self.core.pluginManager.crypterPlugins[self.__name]['pattern']
-
- self.HOSTER_DOMAIN = re.match(self.__pattern, self.pyfile.url).group(1).lower()
- self.HOSTER_NAME = "".join([str.capitalize() for str in self.HOSTER_DOMAIN.split('.')])
-
- account = self.core.accountManager.getAccountPlugin(self.HOSTER_NAME)
-
- if account and account.canUse():
- self.account = account
- elif self.account:
- self.account.HOSTER_DOMAIN = self.HOSTER_DOMAIN
- else:
- return
-
- self.user, data = self.account.selectAccount()
- self.req = self.account.getAccountRequest(self.user)
- self.premium = self.account.isPremium(self.user)
diff --git a/pyload/plugins/crypter/XupPl.py b/pyload/plugins/crypter/XupPl.py
deleted file mode 100644
index 0a01a6c36..000000000
--- a/pyload/plugins/crypter/XupPl.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Crypter import Crypter
-
-
-class XupPl(Crypter):
- __name = "XupPl"
- __type = "crypter"
- __version = "0.10"
-
- __pattern = r'https?://(?:[^/]*\.)?xup\.pl/.*'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Xup.pl decrypter plugin"""
- __license = "GPLv3"
- __authors = [("z00nx", "z00nx0@gmail.com")]
-
-
- def decrypt(self, pyfile):
- header = self.load(pyfile.url, just_header=True)
- if 'location' in header:
- self.urls = [header['location']]
- else:
- self.fail(_("Unable to find link"))
diff --git a/pyload/plugins/crypter/YoutubeBatch.py b/pyload/plugins/crypter/YoutubeBatch.py
deleted file mode 100644
index c7758982a..000000000
--- a/pyload/plugins/crypter/YoutubeBatch.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.utils import json_loads
-from pyload.plugins.Crypter import Crypter
-from pyload.utils import safe_join
-
-
-class YoutubeBatch(Crypter):
- __name = "YoutubeBatch"
- __type = "crypter"
- __version = "1.01"
-
- __pattern = r'https?://(?:www\.|m\.)?youtube\.com/(?P<TYPE>user|playlist|view_play_list)(/|.*?[?&](?:list|p)=)(?P<ID>[\w-]+)'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True),
- ("likes", "bool", "Grab user (channel) liked videos", False),
- ("favorites", "bool", "Grab user (channel) favorite videos", False),
- ("uploads", "bool", "Grab channel unplaylisted videos", True)]
-
- __description = """Youtube.com channel & playlist decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- API_KEY = "AIzaSyCKnWLNlkX-L4oD1aEzqqhRw1zczeD6_k0"
-
-
- def api_response(self, ref, req):
- req.update({"key": self.API_KEY})
- url = urljoin("https://www.googleapis.com/youtube/v3/", ref)
- page = self.load(url, get=req)
- return json_loads(page)
-
-
- def getChannel(self, user):
- channels = self.api_response("channels", {"part": "id,snippet,contentDetails", "forUsername": user, "maxResults": "50"})
- if channels['items']:
- channel = channels['items'][0]
- return {"id": channel['id'],
- "title": channel['snippet']['title'],
- "relatedPlaylists": channel['contentDetails']['relatedPlaylists'],
- "user": user} # One lone channel for user?
-
-
- def getPlaylist(self, p_id):
- playlists = self.api_response("playlists", {"part": "snippet", "id": p_id})
- if playlists['items']:
- playlist = playlists['items'][0]
- return {"id": p_id,
- "title": playlist['snippet']['title'],
- "channelId": playlist['snippet']['channelId'],
- "channelTitle": playlist['snippet']['channelTitle']}
-
-
- def _getPlaylists(self, id, token=None):
- req = {"part": "id", "maxResults": "50", "channelId": id}
- if token:
- req.update({"pageToken": token})
-
- playlists = self.api_response("playlists", req)
-
- for playlist in playlists['items']:
- yield playlist['id']
-
- if "nextPageToken" in playlists:
- for item in self._getPlaylists(id, playlists['nextPageToken']):
- yield item
-
-
- def getPlaylists(self, ch_id):
- return map(self.getPlaylist, self._getPlaylists(ch_id))
-
-
- def _getVideosId(self, id, token=None):
- req = {"part": "contentDetails", "maxResults": "50", "playlistId": id}
- if token:
- req.update({"pageToken": token})
-
- playlist = self.api_response("playlistItems", req)
-
- for item in playlist['items']:
- yield item['contentDetails']['videoId']
-
- if "nextPageToken" in playlist:
- for item in self._getVideosId(id, playlist['nextPageToken']):
- yield item
-
-
- def getVideosId(self, p_id):
- return list(self._getVideosId(p_id))
-
-
- def decrypt(self, pyfile):
- m = re.match(self.__pattern, pyfile.url)
- m_id = m.group("ID")
- m_type = m.group("TYPE")
-
- if m_type == "user":
- self.logDebug("Url recognized as Channel")
- user = m_id
- channel = self.getChannel(user)
-
- if channel:
- playlists = self.getPlaylists(channel['id'])
- self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel['title']))
-
- relatedplaylist = {p_name: self.getPlaylist(p_id) for p_name, p_id in channel['relatedPlaylists'].iteritems()}
- self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys())
-
- relatedplaylist['uploads']['title'] = "Unplaylisted videos"
- relatedplaylist['uploads']['checkDups'] = True #: checkDups flag
-
- for p_name, p_data in relatedplaylist.iteritems():
- if self.getConfig(p_name):
- p_data['title'] += " of " + user
- playlists.append(p_data)
- else:
- playlists = []
- else:
- self.logDebug("Url recognized as Playlist")
- playlists = [self.getPlaylist(m_id)]
-
- if not playlists:
- self.fail(_("No playlist available"))
-
- addedvideos = []
- urlize = lambda x: "https://www.youtube.com/watch?v=" + x
- for p in playlists:
- p_name = p['title']
- p_videos = self.getVideosId(p['id'])
- p_folder = safe_join(self.config['general']['download_folder'], p['channelTitle'], p_name)
- self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name))
-
- if not p_videos:
- continue
- elif "checkDups" in p:
- p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos]
- self.logDebug("%s video\s available on playlist \"%s\" after duplicates cleanup" % (len(p_urls), p_name))
- else:
- p_urls = map(urlize, p_videos)
-
- self.packages.append((p_name, p_urls, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
-
- addedvideos.extend(p_videos)
diff --git a/pyload/plugins/hook/AlldebridCom.py b/pyload/plugins/hook/AlldebridCom.py
deleted file mode 100644
index a5312345b..000000000
--- a/pyload/plugins/hook/AlldebridCom.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class AlldebridCom(MultiHoster):
- __name = "AlldebridCom"
- __type = "hook"
- __version = "0.13"
-
- __config = [("https", "bool", "Enable HTTPS", False),
- ("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """Alldebrid.com hook plugin"""
- __license = "GPLv3"
- __authors = [("Andy Voigt", "spamsales@online.de")]
-
-
- def getHoster(self):
- https = "https" if self.getConfig("https") else "http"
- page = getURL(https + "://www.alldebrid.com/api.php", get={'action': "get_host"}).replace("\"", "").strip()
-
- return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugins/hook/BypassCaptcha.py b/pyload/plugins/hook/BypassCaptcha.py
deleted file mode 100644
index 9c6d662e0..000000000
--- a/pyload/plugins/hook/BypassCaptcha.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pycurl import FORM_FILE, LOW_SPEED_TIME
-
-from pyload.network.HTTPRequest import BadHeader
-from pyload.network.RequestFactory import getURL, getRequest
-from pyload.plugins.Addon import Addon
-
-
-class BypassCaptchaException(Exception):
-
- def __init__(self, err):
- self.err = err
-
-
- def getCode(self):
- return self.err
-
-
- def __str__(self):
- return "<BypassCaptchaException %s>" % self.err
-
-
- def __repr__(self):
- return "<BypassCaptchaException %s>" % self.err
-
-
-class BypassCaptcha(Addon):
- __name = "BypassCaptcha"
- __type = "hook"
- __version = "0.05"
-
- __config = [("force", "bool", "Force BC even if client is connected", False),
- ("passkey", "password", "Passkey", "")]
-
- __description = """Send captchas to BypassCaptcha.com"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("Godofdream", "soilfcition@gmail.com"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- PYLOAD_KEY = "4f771155b640970d5607f919a615bdefc67e7d32"
-
- SUBMIT_URL = "http://bypasscaptcha.com/upload.php"
- RESPOND_URL = "http://bypasscaptcha.com/check_value.php"
- GETCREDITS_URL = "http://bypasscaptcha.com/ex_left.php"
-
-
- def getCredits(self):
- res = getURL(self.GETCREDITS_URL, post={"key": self.getConfig("passkey")})
-
- data = dict(x.split(' ', 1) for x in res.splitlines())
- return int(data['Left'])
-
-
- def submit(self, captcha, captchaType="file", match=None):
- req = getRequest()
-
- #raise timeout threshold
- req.c.setopt(LOW_SPEED_TIME, 80)
-
- try:
- res = req.load(self.SUBMIT_URL,
- post={'vendor_key': self.PYLOAD_KEY,
- 'key': self.getConfig("passkey"),
- 'gen_task_id': "1",
- 'file': (FORM_FILE, captcha)},
- multipart=True)
- finally:
- req.close()
-
- data = dict(x.split(' ', 1) for x in res.splitlines())
- if not data or "Value" not in data:
- raise BypassCaptchaException(res)
-
- result = data['Value']
- ticket = data['TaskId']
- self.logDebug("Result %s : %s" % (ticket, result))
-
- return ticket, result
-
-
- def respond(self, ticket, success):
- try:
- res = getURL(self.RESPOND_URL, post={"task_id": ticket, "key": self.getConfig("passkey"),
- "cv": 1 if success else 0})
- except BadHeader, e:
- self.logError(_("Could not send response"), e)
-
-
- def captchaTask(self, task):
- if "service" in task.data:
- return False
-
- if not task.isTextual():
- return False
-
- if not self.getConfig("passkey"):
- return False
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return False
-
- if self.getCredits() > 0:
- task.handler.append(self)
- task.data['service'] = self.__name
- task.setWaiting(100)
- self.processCaptcha(task)
- else:
- self.logInfo(_("Your %s account has not enough credits") % self.__name)
-
-
- def captchaCorrect(self, task):
- if task.data['service'] == self.__name and "ticket" in task.data:
- self.respond(task.data['ticket'], True)
-
-
- def captchaInvalid(self, task):
- if task.data['service'] == self.__name and "ticket" in task.data:
- self.respond(task.data['ticket'], False)
-
-
- def processCaptcha(self, task):
- c = task.captchaFile
- try:
- ticket, result = self.submit(c)
- except BypassCaptchaException, e:
- task.error = e.getCode()
- return
-
- task.data['ticket'] = ticket
- task.setResult(result)
diff --git a/pyload/plugins/hook/Captcha9kw.py b/pyload/plugins/hook/Captcha9kw.py
deleted file mode 100644
index 2fc098473..000000000
--- a/pyload/plugins/hook/Captcha9kw.py
+++ /dev/null
@@ -1,253 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-
-from base64 import b64encode
-from time import sleep
-
-from pyload.network.HTTPRequest import BadHeader
-from pyload.network.RequestFactory import getURL
-
-from pyload.plugins.Addon import Addon
-
-
-class Captcha9kw(Addon):
- __name = "Captcha9kw"
- __type = "hook"
- __version = "0.26"
-
- __config = [("ssl" , "bool" , "Use HTTPS" , True ),
- ("force" , "bool" , "Force captcha resolving even if client is connected" , True ),
- ("confirm" , "bool" , "Confirm Captcha (cost +6 credits)" , False ),
- ("captchaperhour", "int" , "Captcha per hour" , "9999" ),
- ("captchapermin" , "int" , "Captcha per minute" , "9999" ),
- ("prio" , "int" , "Priority (max 10)(cost +0 -> +10 credits)" , "0" ),
- ("queue" , "int" , "Max. Queue (max 999)" , "50" ),
- ("hoster_options", "string" , "Hoster options (format: pluginname:prio=1:selfsolfe=1:confirm=1:timeout=900|...)", "ShareonlineBiz:prio=0:timeout=999 | UploadedTo:prio=0:timeout=999"),
- ("selfsolve" , "bool" , "Selfsolve (manually solve your captcha in your 9kw client if active)" , "0" ),
- ("passkey" , "password", "API key" , "" ),
- ("timeout" , "int" , "Timeout in seconds (min 60, max 3999)" , "900" )]
-
- __description = """Send captchas to 9kw.eu"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- API_URL = "http://www.9kw.eu/index.cgi"
-
-
- def activate(self):
- if self.getConfig("ssl"):
- self.API_URL = self.API_URL.replace("http://", "https://")
-
-
- def getCredits(self):
- res = getURL(self.API_URL,
- get={'apikey': self.getConfig("passkey"),
- 'pyload': "1",
- 'source': "pyload",
- 'action': "usercaptchaguthaben"})
-
- if res.isdigit():
- self.logInfo(_("%s credits left") % res)
- credits = self.info['credits'] = int(res)
- return credits
- else:
- self.logError(res)
- return 0
-
-
- def _processCaptcha(self, task):
- try:
- with open(task.captchaFile, 'rb') as f:
- data = f.read()
-
- except IOError, e:
- self.logError(e)
- return
-
- data = b64encode(data)
- mouse = 1 if task.isPositional() else 0
- pluginname = re.search(r'_([^_]*)_\d+.\w+', task.captchaFile).group(1)
-
- option = {'min' : 2,
- 'max' : 50,
- 'phrase' : 0,
- 'numeric' : 0,
- 'case_sensitive': 0,
- 'math' : 0,
- 'prio' : min(max(self.getConfig("prio"), 0), 10),
- 'confirm' : self.getConfig("confirm"),
- 'timeout' : min(max(self.getConfig("timeout"), 300), 3999),
- 'selfsolve' : self.getConfig("selfsolve"),
- 'cph' : self.getConfig("captchaperhour"),
- 'cpm' : self.getConfig("captchapermin")}
-
- for opt in str(self.getConfig("hoster_options").split('|')):
-
- details = map(str.strip, opt.split(':'))
-
- if not details or details[0].lower() != pluginname.lower():
- continue
-
- for d in details:
- hosteroption = d.split("=")
-
- if len(hosteroption) < 2 or not hosteroption[1].isdigit():
- continue
-
- o = hosteroption[0].lower()
- if o in option:
- option[o] = hosteroption[1]
-
- break
-
- post_data = {'apikey' : self.getConfig("passkey"),
- 'prio' : option['prio'],
- 'confirm' : option['confirm'],
- 'maxtimeout' : option['timeout'],
- 'selfsolve' : option['selfsolve'],
- 'captchaperhour': option['cph'],
- 'captchapermin' : option['cpm'],
- 'case-sensitive': option['case_sensitive'],
- 'min_len' : option['min'],
- 'max_len' : option['max'],
- 'phrase' : option['phrase'],
- 'numeric' : option['numeric'],
- 'math' : option['math'],
- 'oldsource' : pluginname,
- 'pyload' : "1",
- 'source' : "pyload",
- 'base64' : "1",
- 'mouse' : mouse,
- 'file-upload-01': data,
- 'action' : "usercaptchaupload"}
-
- for _i in xrange(5):
- try:
- res = getURL(self.API_URL, post=post_data)
- except BadHeader, e:
- sleep(3)
- else:
- if res and res.isdigit():
- break
- else:
- self.logError(_("Bad upload: %s") % res)
- return
-
- self.logDebug(_("NewCaptchaID ticket: %s") % res, task.captchaFile)
-
- task.data["ticket"] = res
-
- for _i in xrange(int(self.getConfig("timeout") / 5)):
- result = getURL(self.API_URL,
- get={'apikey': self.getConfig("passkey"),
- 'id' : res,
- 'pyload': "1",
- 'info' : "1",
- 'source': "pyload",
- 'action': "usercaptchacorrectdata"})
-
- if not result or result == "NO DATA":
- sleep(5)
- else:
- break
- else:
- self.logDebug("Could not send request: %s" % res)
- result = None
-
- self.logInfo(_("Captcha result for ticket %s: %s") % (res, result))
-
- task.setResult(result)
-
-
- def captchaTask(self, task):
- if not task.isTextual() and not task.isPositional():
- return
-
- if not self.getConfig("passkey"):
- return
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return
-
- credits = self.getCredits()
-
- if not credits:
- self.logError(_("Your captcha 9kw.eu account has not enough credits"))
- return
-
- queue = min(self.getConfig("queue"), 999)
- timeout = min(max(self.getConfig("timeout"), 300), 3999)
- pluginname = re.search(r'_([^_]*)_\d+.\w+', task.captchaFile).group(1)
-
- for _i in xrange(5):
- servercheck = getURL("http://www.9kw.eu/grafik/servercheck.txt")
- if queue < re.search(r'queue=(\d+)', servercheck).group(1):
- break
-
- sleep(10)
- else:
- self.fail(_("Too many captchas in queue"))
-
- for opt in str(self.getConfig("hoster_options").split('|')):
- details = map(str.strip, opt.split(':'))
-
- if not details or details[0].lower() != pluginname.lower():
- continue
-
- for d in details:
- hosteroption = d.split("=")
-
- if (len(hosteroption) > 1
- and hosteroption[0].lower() == 'timeout'
- and hosteroption[1].isdigit()):
- timeout = int(hosteroption[1])
-
- break
-
- task.handler.append(self)
-
- task.setWaiting(timeout)
-
- self._processCaptcha(task)
-
-
- def _captchaResponse(self, task, correct):
- type = "correct" if correct else "refund"
-
- if 'ticket' not in task.data:
- self.logDebug("No CaptchaID for %s request (task: %s)" % (type, task))
- return
-
- passkey = self.getConfig("passkey")
-
- for _i in xrange(3):
- res = getURL(self.API_URL,
- get={'action' : "usercaptchacorrectback",
- 'apikey' : passkey,
- 'api_key': passkey,
- 'correct': "1" if correct else "2",
- 'pyload' : "1",
- 'source' : "pyload",
- 'id' : task.data["ticket"]})
-
- self.logDebug("Request %s: %s" % (type, res))
-
- if res == "OK":
- break
-
- sleep(5)
- else:
- self.logDebug("Could not send %s request: %s" % (type, res))
-
-
- def captchaCorrect(self, task):
- self._captchaResponse(task, True)
-
-
- def captchaInvalid(self, task):
- self._captchaResponse(task, False)
diff --git a/pyload/plugins/hook/CaptchaBrotherhood.py b/pyload/plugins/hook/CaptchaBrotherhood.py
deleted file mode 100644
index 01c5588e3..000000000
--- a/pyload/plugins/hook/CaptchaBrotherhood.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import StringIO
-import pycurl
-
-try:
- from PIL import Image
-except ImportError:
- import Image
-
-from time import sleep
-from urllib import urlencode
-
-from pyload.network.RequestFactory import getURL, getRequest
-from pyload.plugins.Addon import Addon
-
-
-class CaptchaBrotherhoodException(Exception):
-
- def __init__(self, err):
- self.err = err
-
-
- def getCode(self):
- return self.err
-
-
- def __str__(self):
- return "<CaptchaBrotherhoodException %s>" % self.err
-
-
- def __repr__(self):
- return "<CaptchaBrotherhoodException %s>" % self.err
-
-
-class CaptchaBrotherhood(Addon):
- __name = "CaptchaBrotherhood"
- __type = "hook"
- __version = "0.06"
-
- __config = [("username", "str", "Username", ""),
- ("force", "bool", "Force CT even if client is connected", False),
- ("passkey", "password", "Password", "")]
-
- __description = """Send captchas to CaptchaBrotherhood.com"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- API_URL = "http://www.captchabrotherhood.com/"
-
-
- def getCredits(self):
- res = getURL(self.API_URL + "askCredits.aspx",
- get={"username": self.getConfig("username"), "password": self.getConfig("passkey")})
- if not res.startswith("OK"):
- raise CaptchaBrotherhoodException(res)
- else:
- credits = int(res[3:])
- self.logInfo(_("%d credits left") % credits)
- self.info['credits'] = credits
- return credits
-
-
- def submit(self, captcha, captchaType="file", match=None):
- try:
- img = Image.open(captcha)
- output = StringIO.StringIO()
- self.logDebug("CAPTCHA IMAGE", img, img.format, img.mode)
- if img.format in ("GIF", "JPEG"):
- img.save(output, img.format)
- else:
- if img.mode != "RGB":
- img = img.convert("RGB")
- img.save(output, "JPEG")
- data = output.getvalue()
- output.close()
- except Exception, e:
- raise CaptchaBrotherhoodException("Reading or converting captcha image failed: %s" % e)
-
- req = getRequest()
-
- url = "%ssendNewCaptcha.aspx?%s" % (self.API_URL,
- urlencode({"username": self.getConfig("username"),
- "password": self.getConfig("passkey"),
- "captchaSource": "pyLoad",
- "timeout": "80"}))
-
- req.c.setopt(pycurl.URL, url)
- req.c.setopt(pycurl.POST, 1)
- req.c.setopt(pycurl.POSTFIELDS, data)
- req.c.setopt(pycurl.HTTPHEADER, ["Content-Type: text/html"])
-
- try:
- req.c.perform()
- res = req.getResponse()
- except Exception, e:
- raise CaptchaBrotherhoodException("Submit captcha image failed")
-
- req.close()
-
- if not res.startswith("OK"):
- raise CaptchaBrotherhoodException(res[1])
-
- ticket = res[3:]
-
- for _i in xrange(15):
- sleep(5)
- res = self.get_api("askCaptchaResult", ticket)
- if res.startswith("OK-answered"):
- return ticket, res[12:]
-
- raise CaptchaBrotherhoodException("No solution received in time")
-
-
- def get_api(self, api, ticket):
- res = getURL("%s%s.aspx" % (self.API_URL, api),
- get={"username": self.getConfig("username"),
- "password": self.getConfig("passkey"),
- "captchaID": ticket})
- if not res.startswith("OK"):
- raise CaptchaBrotherhoodException("Unknown response: %s" % res)
-
- return res
-
-
- def captchaTask(self, task):
- if "service" in task.data:
- return False
-
- if not task.isTextual():
- return False
-
- if not self.getConfig("username") or not self.getConfig("passkey"):
- return False
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return False
-
- if self.getCredits() > 10:
- task.handler.append(self)
- task.data['service'] = self.__name
- task.setWaiting(100)
- self.processCaptcha(task)
- else:
- self.logInfo(_("Your CaptchaBrotherhood Account has not enough credits"))
-
-
- def captchaInvalid(self, task):
- if task.data['service'] == self.__name and "ticket" in task.data:
- res = self.get_api("complainCaptcha", task.data['ticket'])
-
-
- def processCaptcha(self, task):
- c = task.captchaFile
- try:
- ticket, result = self.submit(c)
- except CaptchaBrotherhoodException, e:
- task.error = e.getCode()
- return
-
- task.data['ticket'] = ticket
- task.setResult(result)
diff --git a/pyload/plugins/hook/DeathByCaptcha.py b/pyload/plugins/hook/DeathByCaptcha.py
deleted file mode 100644
index 9ab88127a..000000000
--- a/pyload/plugins/hook/DeathByCaptcha.py
+++ /dev/null
@@ -1,213 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-
-from base64 import b64encode
-from pycurl import FORM_FILE, HTTPHEADER
-from time import sleep
-
-from pyload.utils import json_loads
-from pyload.network.HTTPRequest import BadHeader
-from pyload.network.RequestFactory import getRequest
-from pyload.plugins.Addon import Addon
-
-
-class DeathByCaptchaException(Exception):
- DBC_ERRORS = {'not-logged-in': 'Access denied, check your credentials',
- 'invalid-credentials': 'Access denied, check your credentials',
- 'banned': 'Access denied, account is suspended',
- 'insufficient-funds': 'Insufficient account balance to decrypt CAPTCHA',
- 'invalid-captcha': 'CAPTCHA is not a valid image',
- 'service-overload': 'CAPTCHA was rejected due to service overload, try again later',
- 'invalid-request': 'Invalid request',
- 'timed-out': 'No CAPTCHA solution received in time'}
-
-
- def __init__(self, err):
- self.err = err
-
-
- def getCode(self):
- return self.err
-
-
- def getDesc(self):
- if self.err in self.DBC_ERRORS.keys():
- return self.DBC_ERRORS[self.err]
- else:
- return self.err
-
-
- def __str__(self):
- return "<DeathByCaptchaException %s>" % self.err
-
-
- def __repr__(self):
- return "<DeathByCaptchaException %s>" % self.err
-
-
-class DeathByCaptcha(Addon):
- __name = "DeathByCaptcha"
- __type = "hook"
- __version = "0.04"
-
- __config = [("username", "str", "Username", ""),
- ("passkey", "password", "Password", ""),
- ("force", "bool", "Force DBC even if client is connected", False)]
-
- __description = """Send captchas to DeathByCaptcha.com"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- API_URL = "http://api.dbcapi.me/api/"
-
-
- def call_api(self, api="captcha", post=False, multipart=False):
- req = getRequest()
- req.c.setopt(HTTPHEADER, ["Accept: application/json", "User-Agent: pyLoad %s" % self.core.version])
-
- if post:
- if not isinstance(post, dict):
- post = {}
- post.update({"username": self.getConfig("username"),
- "password": self.getConfig("passkey")})
-
- res = None
- try:
- json = req.load("%s%s" % (self.API_URL, api),
- post=post,
- multipart=multipart)
- self.logDebug(json)
- res = json_loads(json)
-
- if "error" in res:
- raise DeathByCaptchaException(res['error'])
- elif "status" not in res:
- raise DeathByCaptchaException(str(res))
-
- except BadHeader, e:
- if 403 == e.code:
- raise DeathByCaptchaException('not-logged-in')
- elif 413 == e.code:
- raise DeathByCaptchaException('invalid-captcha')
- elif 503 == e.code:
- raise DeathByCaptchaException('service-overload')
- elif e.code in (400, 405):
- raise DeathByCaptchaException('invalid-request')
- else:
- raise
-
- finally:
- req.close()
-
- return res
-
-
- def getCredits(self):
- res = self.call_api("user", True)
-
- if 'is_banned' in res and res['is_banned']:
- raise DeathByCaptchaException('banned')
- elif 'balance' in res and 'rate' in res:
- self.info.update(res)
- else:
- raise DeathByCaptchaException(res)
-
-
- def getStatus(self):
- res = self.call_api("status", False)
-
- if 'is_service_overloaded' in res and res['is_service_overloaded']:
- raise DeathByCaptchaException('service-overload')
-
-
- def submit(self, captcha, captchaType="file", match=None):
- #workaround multipart-post bug in HTTPRequest.py
- if re.match("^\w*$", self.getConfig("passkey")):
- multipart = True
- data = (FORM_FILE, captcha)
- else:
- multipart = False
- with open(captcha, 'rb') as f:
- data = f.read()
- data = "base64:" + b64encode(data)
-
- res = self.call_api("captcha", {"captchafile": data}, multipart)
-
- if "captcha" not in res:
- raise DeathByCaptchaException(res)
- ticket = res['captcha']
-
- for _i in xrange(24):
- sleep(5)
- res = self.call_api("captcha/%d" % ticket, False)
- if res['text'] and res['is_correct']:
- break
- else:
- raise DeathByCaptchaException('timed-out')
-
- result = res['text']
- self.logDebug("Result %s : %s" % (ticket, result))
-
- return ticket, result
-
-
- def captchaTask(self, task):
- if "service" in task.data:
- return False
-
- if not task.isTextual():
- return False
-
- if not self.getConfig("username") or not self.getConfig("passkey"):
- return False
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return False
-
- try:
- self.getStatus()
- self.getCredits()
- except DeathByCaptchaException, e:
- self.logError(e.getDesc())
- return False
-
- balance, rate = self.info['balance'], self.info['rate']
- self.logInfo(_("Account balance"),
- _("US$%.3f (%d captchas left at %.2f cents each)") % (balance / 100,
- balance // rate, rate))
-
- if balance > rate:
- task.handler.append(self)
- task.data['service'] = self.__name
- task.setWaiting(180)
- self.processCaptcha(task)
-
-
- def captchaInvalid(self, task):
- if task.data['service'] == self.__name and "ticket" in task.data:
- try:
- res = self.call_api("captcha/%d/report" % task.data['ticket'], True)
-
- except DeathByCaptchaException, e:
- self.logError(e.getDesc())
-
- except Exception, e:
- self.logError(e)
-
-
- def processCaptcha(self, task):
- c = task.captchaFile
- try:
- ticket, result = self.submit(c)
- except DeathByCaptchaException, e:
- task.error = e.getCode()
- self.logError(e.getDesc())
- return
-
- task.data['ticket'] = ticket
- task.setResult(result)
diff --git a/pyload/plugins/hook/DebridItaliaCom.py b/pyload/plugins/hook/DebridItaliaCom.py
deleted file mode 100644
index ee00fc375..000000000
--- a/pyload/plugins/hook/DebridItaliaCom.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class DebridItaliaCom(MultiHoster):
- __name = "DebridItaliaCom"
- __type = "hook"
- __version = "0.08"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to standard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """Debriditalia.com hook plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def getHoster(self):
- html = getURL("http://www.debriditalia.com/status.php")
- return re.findall(r'title="(.+?)"> \1</td><td><img src="/images/(?:attivo|testing)', html)
diff --git a/pyload/plugins/hook/EasybytezCom.py b/pyload/plugins/hook/EasybytezCom.py
deleted file mode 100644
index e24c82a9f..000000000
--- a/pyload/plugins/hook/EasybytezCom.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class EasybytezCom(MultiHoster):
- __name = "EasybytezCom"
- __type = "hook"
- __version = "0.03"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", "")]
-
- __description = """EasyBytez.com hook plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def getHoster(self):
- self.account = self.core.accountManager.getAccountPlugin(self.__name)
- user = self.account.selectAccount()[0]
-
- try:
- req = self.account.getAccountRequest(user)
- page = req.load("http://www.easybytez.com")
-
- hosters = re.search(r'</textarea>\s*Supported sites:(.*)', page).group(1).split(',')
-
- except Exception, e:
- self.logWarning(_("Unable to load supported hoster list, using last known"))
- self.logDebug(e)
-
- hosters = ["bitshare.com", "crocko.com", "ddlstorage.com", "depositfiles.com", "extabit.com", "hotfile.com",
- "mediafire.com", "netload.in", "rapidgator.net", "rapidshare.com", "uploading.com", "uload.to",
- "uploaded.to"]
- finally:
- return hosters
diff --git a/pyload/plugins/hook/ExpertDecoders.py b/pyload/plugins/hook/ExpertDecoders.py
deleted file mode 100644
index 746dcf246..000000000
--- a/pyload/plugins/hook/ExpertDecoders.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-from base64 import b64encode
-from pycurl import LOW_SPEED_TIME
-from uuid import uuid4
-
-from pyload.network.HTTPRequest import BadHeader
-from pyload.network.RequestFactory import getURL, getRequest
-from pyload.plugins.Addon import Addon
-
-
-class ExpertDecoders(Addon):
- __name = "ExpertDecoders"
- __type = "hook"
- __version = "0.02"
-
- __config = [("force", "bool", "Force CT even if client is connected", False),
- ("passkey", "password", "Access key", "")]
-
- __description = """Send captchas to expertdecoders.com"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- API_URL = "http://www.fasttypers.org/imagepost.ashx"
-
-
- def getCredits(self):
- res = getURL(self.API_URL, post={"key": self.getConfig("passkey"), "action": "balance"})
-
- if res.isdigit():
- self.logInfo(_("%s credits left") % res)
- self.info['credits'] = credits = int(res)
- return credits
- else:
- self.logError(res)
- return 0
-
-
- def processCaptcha(self, task):
- task.data['ticket'] = ticket = uuid4()
- result = None
-
- with open(task.captchaFile, 'rb') as f:
- data = f.read()
- data = b64encode(data)
-
- req = getRequest()
- #raise timeout threshold
- req.c.setopt(LOW_SPEED_TIME, 80)
-
- try:
- result = req.load(self.API_URL, post={"action": "upload", "key": self.getConfig("passkey"),
- "file": data, "gen_task_id": ticket})
- finally:
- req.close()
-
- self.logDebug("Result %s : %s" % (ticket, result))
- task.setResult(result)
-
-
- def captchaTask(self, task):
- if not task.isTextual():
- return False
-
- if not self.getConfig("passkey"):
- return False
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return False
-
- if self.getCredits() > 0:
- task.handler.append(self)
- task.setWaiting(100)
- self.processCaptcha(task)
- else:
- self.logInfo(_("Your ExpertDecoders Account has not enough credits"))
-
-
- def captchaInvalid(self, task):
- if "ticket" in task.data:
-
- try:
- res = getURL(self.API_URL,
- post={'action': "refund", 'key': self.getConfig("passkey"), 'gen_task_id': task.data['ticket']})
- self.logInfo(_("Request refund", res)
-
- except BadHeader, e:
- self.logError(_("Could not send refund request"), e)
diff --git a/pyload/plugins/hook/FastixRu.py b/pyload/plugins/hook/FastixRu.py
deleted file mode 100644
index bd3994243..000000000
--- a/pyload/plugins/hook/FastixRu.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class FastixRu(MultiHoster):
- __name = "FastixRu"
- __type = "hook"
- __version = "0.02"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("unloadFailing", "bool", "Revert to standard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """Fastix.ru hook plugin"""
- __license = "GPLv3"
- __authors = [("Massimo Rosamilia", "max@spiritix.eu")]
-
-
- def getHoster(self):
- page = getURL("http://fastix.ru/api_v2",
- get={'apikey': "5182964c3f8f9a7f0b00000a_kelmFB4n1IrnCDYuIFn2y",
- 'sub' : "allowed_sources"})
- host_list = json_loads(page)
- host_list = host_list['allow']
- return host_list
diff --git a/pyload/plugins/hook/FreeWayMe.py b/pyload/plugins/hook/FreeWayMe.py
deleted file mode 100644
index 27297f77a..000000000
--- a/pyload/plugins/hook/FreeWayMe.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class FreeWayMe(MultiHoster):
- __name = "FreeWayMe"
- __type = "hook"
- __version = "0.11"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """FreeWay.me hook plugin"""
- __license = "GPLv3"
- __authors = [("Nicolas Giese", "james@free-way.me")]
-
-
- def getHoster(self):
- hostis = getURL("https://www.free-way.me/ajax/jd.php", get={'id': 3}).replace("\"", "").strip()
- self.logDebug("Hosters", hostis)
- return [x.strip() for x in hostis.split(",") if x.strip()]
diff --git a/pyload/plugins/hook/ImageTyperz.py b/pyload/plugins/hook/ImageTyperz.py
deleted file mode 100644
index ea914ed23..000000000
--- a/pyload/plugins/hook/ImageTyperz.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-
-from base64 import b64encode
-from pycurl import FORM_FILE, LOW_SPEED_TIME
-
-from pyload.network.RequestFactory import getURL, getRequest
-from pyload.plugins.Addon import Addon
-
-
-class ImageTyperzException(Exception):
-
- def __init__(self, err):
- self.err = err
-
-
- def getCode(self):
- return self.err
-
-
- def __str__(self):
- return "<ImageTyperzException %s>" % self.err
-
-
- def __repr__(self):
- return "<ImageTyperzException %s>" % self.err
-
-
-class ImageTyperz(Addon):
- __name = "ImageTyperz"
- __type = "hook"
- __version = "0.05"
-
- __config = [("username", "str", "Username", ""),
- ("passkey", "password", "Password", ""),
- ("force", "bool", "Force IT even if client is connected", False)]
-
- __description = """Send captchas to ImageTyperz.com"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- SUBMIT_URL = "http://captchatypers.com/Forms/UploadFileAndGetTextNEW.ashx"
- RESPOND_URL = "http://captchatypers.com/Forms/SetBadImage.ashx"
- GETCREDITS_URL = "http://captchatypers.com/Forms/RequestBalance.ashx"
-
-
- def getCredits(self):
- res = getURL(self.GETCREDITS_URL,
- post={'action': "REQUESTBALANCE",
- 'username': self.getConfig("username"),
- 'password': self.getConfig("passkey")})
-
- if res.startswith('ERROR'):
- raise ImageTyperzException(res)
-
- try:
- balance = float(res)
- except Exception:
- raise ImageTyperzException("Invalid response")
-
- self.logInfo(_("Account balance: $%s left") % res)
- return balance
-
-
- def submit(self, captcha, captchaType="file", match=None):
- req = getRequest()
- #raise timeout threshold
- req.c.setopt(LOW_SPEED_TIME, 80)
-
- try:
- #workaround multipart-post bug in HTTPRequest.py
- if re.match("^\w*$", self.getConfig("passkey")):
- multipart = True
- data = (FORM_FILE, captcha)
- else:
- multipart = False
- with open(captcha, 'rb') as f:
- data = f.read()
- data = b64encode(data)
-
- res = req.load(self.SUBMIT_URL,
- post={'action': "UPLOADCAPTCHA",
- 'username': self.getConfig("username"),
- 'password': self.getConfig("passkey"), "file": data},
- multipart=multipart)
- finally:
- req.close()
-
- if res.startswith("ERROR"):
- raise ImageTyperzException(res)
- else:
- data = res.split('|')
- if len(data) == 2:
- ticket, result = data
- else:
- raise ImageTyperzException("Unknown response: %s" % res)
-
- return ticket, result
-
-
- def captchaTask(self, task):
- if "service" in task.data:
- return False
-
- if not task.isTextual():
- return False
-
- if not self.getConfig("username") or not self.getConfig("passkey"):
- return False
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return False
-
- if self.getCredits() > 0:
- task.handler.append(self)
- task.data['service'] = self.__name
- task.setWaiting(100)
- self.processCaptcha(task)
- else:
- self.logInfo(_("Your %s account has not enough credits") % self.__name)
-
-
- def captchaInvalid(self, task):
- if task.data['service'] == self.__name and "ticket" in task.data:
- res = getURL(self.RESPOND_URL,
- post={'action': "SETBADIMAGE",
- 'username': self.getConfig("username"),
- 'password': self.getConfig("passkey"),
- 'imageid': task.data['ticket']})
-
- if res == "SUCCESS":
- self.logInfo(_("Bad captcha solution received, requested refund"))
- else:
- self.logError(_("Bad captcha solution received, refund request failed"), res)
-
-
- def processCaptcha(self, task):
- c = task.captchaFile
- try:
- ticket, result = self.submit(c)
- except ImageTyperzException, e:
- task.error = e.getCode()
- return
-
- task.data['ticket'] = ticket
- task.setResult(result)
diff --git a/pyload/plugins/hook/LinkdecrypterCom.py b/pyload/plugins/hook/LinkdecrypterCom.py
deleted file mode 100644
index ad3ace2a6..000000000
--- a/pyload/plugins/hook/LinkdecrypterCom.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Addon import Addon
-from pyload.utils import remove_chars
-
-
-class LinkdecrypterCom(Addon):
- __name = "LinkdecrypterCom"
- __type = "hook"
- __version = "0.21"
-
- __description = """Linkdecrypter.com hook plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def activate(self):
- try:
- self.loadPatterns()
- except Exception, e:
- self.logError(e)
-
-
- def loadPatterns(self):
- html = getURL("http://linkdecrypter.com/")
-
- m = re.search(r'<title>', html)
- if m is None:
- self.logError(_("Linkdecrypter site is down"))
- return
-
- m = re.search(r'<b>Supported\(\d+\)</b>: <i>([^+<]*)', html)
- if m is None:
- self.logError(_("Crypter list not found"))
- return
-
- builtin = [name.lower() for name in self.core.pluginManager.crypterPlugins.keys()]
- builtin.append("downloadserienjunkiesorg")
-
- crypter_pattern = re.compile("(\w[\w.-]+)")
- online = []
- for crypter in m.group(1).split(', '):
- m = re.match(crypter_pattern, crypter)
- if m and remove_chars(m.group(1), "-.") not in builtin:
- online.append(m.group(1).replace(".", "\\."))
-
- if not online:
- self.logError(_("Crypter list is empty"))
- return
-
- regexp = r'https?://([^.]+\.)*?(%s)/.*' % '|'.join(online)
-
- dict = self.core.pluginManager.crypterPlugins[self.__name]
- dict['pattern'] = regexp
- dict['re'] = re.compile(regexp)
-
- self.logDebug("Loaded pattern: %s" % regexp)
diff --git a/pyload/plugins/hook/LinksnappyCom.py b/pyload/plugins/hook/LinksnappyCom.py
deleted file mode 100644
index d34188987..000000000
--- a/pyload/plugins/hook/LinksnappyCom.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class LinksnappyCom(MultiHoster):
- __name = "LinksnappyCom"
- __type = "hook"
- __version = "0.01"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to standard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """Linksnappy.com hook plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def getHoster(self):
- json_data = getURL("http://gen.linksnappy.com/lseAPI.php", get={'act': "FILEHOSTS"})
- json_data = json_loads(json_data)
-
- return json_data['return'].keys()
diff --git a/pyload/plugins/hook/MegaDebridEu.py b/pyload/plugins/hook/MegaDebridEu.py
deleted file mode 100644
index 4fff41a51..000000000
--- a/pyload/plugins/hook/MegaDebridEu.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class MegaDebridEu(MultiHoster):
- __name = "MegaDebridEu"
- __type = "hook"
- __version = "0.02"
-
- __config = [("unloadFailing", "bool", "Revert to standard download if download fails", False)]
-
- __description = """mega-debrid.eu hook plugin"""
- __license = "GPLv3"
- __authors = [("D.Ducatel", "dducatel@je-geek.fr")]
-
-
- def getHoster(self):
- reponse = getURL("http://www.mega-debrid.eu/api.php", get={'action': "getHosters"})
- json_data = json_loads(reponse)
-
- if json_data['response_code'] == "ok":
- host_list = [element[0] for element in json_data['hosters']]
- else:
- self.logError(_("Unable to retrieve hoster list"))
- host_list = list()
-
- return host_list
diff --git a/pyload/plugins/hook/MultishareCz.py b/pyload/plugins/hook/MultishareCz.py
deleted file mode 100644
index 13906734f..000000000
--- a/pyload/plugins/hook/MultishareCz.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class MultishareCz(MultiHoster):
- __name = "MultishareCz"
- __type = "hook"
- __version = "0.04"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", "uloz.to")]
-
- __description = """MultiShare.cz hook plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_PATTERN = r'<img class="logo-shareserveru"[^>]*?alt="([^"]+)"></td>\s*<td class="stav">[^>]*?alt="OK"'
-
-
- def getHoster(self):
- page = getURL("http://www.multishare.cz/monitoring/")
- return re.findall(self.HOSTER_PATTERN, page)
diff --git a/pyload/plugins/hook/MyfastfileCom.py b/pyload/plugins/hook/MyfastfileCom.py
deleted file mode 100644
index bf07992f5..000000000
--- a/pyload/plugins/hook/MyfastfileCom.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-from pyload.utils import json_loads
-
-
-class MyfastfileCom(MultiHoster):
- __name = "MyfastfileCom"
- __type = "hook"
- __version = "0.02"
-
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to standard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """Myfastfile.com hook plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-
- def getHoster(self):
- json_data = getURL("http://myfastfile.com/api.php", get={'hosts': ""}, decode=True)
- self.logDebug("JSON data", json_data)
- json_data = json_loads(json_data)
-
- return json_data['hosts']
diff --git a/pyload/plugins/hook/OverLoadMe.py b/pyload/plugins/hook/OverLoadMe.py
deleted file mode 100644
index caa9ef14c..000000000
--- a/pyload/plugins/hook/OverLoadMe.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class OverLoadMe(MultiHoster):
- __name = "OverLoadMe"
- __type = "hook"
- __version = "0.01"
-
- __config = [("https", "bool", "Enable HTTPS", True),
- ("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to standard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 12)]
-
- __description = """Over-Load.me hook plugin"""
- __license = "GPLv3"
- __authors = [("marley", "marley@over-load.me")]
-
-
- def getHoster(self):
- https = "https" if self.getConfig("https") else "http"
- page = getURL(https + "://api.over-load.me/hoster.php",
- get={'auth': "0001-cb1f24dadb3aa487bda5afd3b76298935329be7700cd7-5329be77-00cf-1ca0135f"}).replace("\"", "").strip()
- self.logDebug("Hosterlist", page)
-
- return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugins/hook/PremiumTo.py b/pyload/plugins/hook/PremiumTo.py
deleted file mode 100644
index 6a4e4a644..000000000
--- a/pyload/plugins/hook/PremiumTo.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class PremiumTo(MultiHoster):
- __name = "PremiumTo"
- __type = "hook"
- __version = "0.04"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for downloads from supported hosters:", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", "")]
-
- __description = """Premium.to hook plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
-
- def getHoster(self):
- page = getURL("http://premium.to/api/hosters.php",
- get={'username': self.account.username, 'password': self.account.password})
- return [x.strip() for x in page.replace("\"", "").split(";")]
-
-
- def activate(self):
- self.account = self.core.accountManager.getAccountPlugin("PremiumTo")
-
- user = self.account.selectAccount()[0]
-
- if not user:
- self.logError(_("Please add your premium.to account first and restart pyLoad"))
- return
-
- return MultiHoster.activate(self)
diff --git a/pyload/plugins/hook/PremiumizeMe.py b/pyload/plugins/hook/PremiumizeMe.py
deleted file mode 100644
index de887999f..000000000
--- a/pyload/plugins/hook/PremiumizeMe.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class PremiumizeMe(MultiHoster):
- __name = "PremiumizeMe"
- __type = "hook"
- __version = "0.12"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """Premiumize.me hook plugin"""
- __license = "GPLv3"
- __authors = [("Florian Franzen", "FlorianFranzen@gmail.com")]
-
-
- def getHoster(self):
- # If no accounts are available there will be no hosters available
- if not self.account or not self.account.canUse():
- return []
-
- # Get account data
- (user, data) = self.account.selectAccount()
-
- # Get supported hosters list from premiumize.me using the
- # json API v1 (see https://secure.premiumize.me/?show=api)
- answer = getURL("https://api.premiumize.me/pm-api/v1.php"
- get={'method': "hosterlist", 'params[login]': user, 'params[pass]': data['password']})
- data = json_loads(answer)
-
- # If account is not valid thera are no hosters available
- if data['status'] != 200:
- return []
-
- # Extract hosters from json file
- return data['result']['hosterlist']
-
-
- def activate(self):
- # Get account plugin and check if there is a valid account available
- self.account = self.core.accountManager.getAccountPlugin("PremiumizeMe")
- if not self.account.canUse():
- self.account = None
- self.logError(_("Please add a valid premiumize.me account first and restart pyLoad"))
- return
-
- # Run the overwriten core ready which actually enables the multihoster hook
- return MultiHoster.activate(self)
diff --git a/pyload/plugins/hook/RPNetBiz.py b/pyload/plugins/hook/RPNetBiz.py
deleted file mode 100644
index 88e091d37..000000000
--- a/pyload/plugins/hook/RPNetBiz.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class RPNetBiz(MultiHoster):
- __name = "RPNetBiz"
- __type = "hook"
- __version = "0.10"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """RPNet.biz hook plugin"""
- __license = "GPLv3"
- __authors = [("Dman", "dmanugm@gmail.com")]
-
-
- def getHoster(self):
- # No hosts supported if no account
- if not self.account or not self.account.canUse():
- return []
-
- # Get account data
- (user, data) = self.account.selectAccount()
-
- res = getURL("https://premium.rpnet.biz/client_api.php",
- get={'username': user, 'password': data['password'], 'action': "showHosterList"})
- hoster_list = json_loads(res)
-
- # If account is not valid thera are no hosters available
- if 'error' in hoster_list:
- return []
-
- # Extract hosters from json file
- return hoster_list['hosters']
-
-
- def activate(self):
- # Get account plugin and check if there is a valid account available
- self.account = self.core.accountManager.getAccountPlugin("RPNetBiz")
- if not self.account.canUse():
- self.account = None
- self.logError(_("Please enter your %s account or deactivate this plugin") % "rpnet")
- return
-
- # Run the overwriten core ready which actually enables the multihoster hook
- return MultiHoster.activate(self)
diff --git a/pyload/plugins/hook/RealdebridCom.py b/pyload/plugins/hook/RealdebridCom.py
deleted file mode 100644
index 01f7c4fad..000000000
--- a/pyload/plugins/hook/RealdebridCom.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class RealdebridCom(MultiHoster):
- __name = "RealdebridCom"
- __type = "hook"
- __version = "0.43"
-
- __config = [("https", "bool", "Enable HTTPS", False),
- ("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported):", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """Real-Debrid.com hook plugin"""
- __license = "GPLv3"
- __authors = [("Devirex Hazzard", "naibaf_11@yahoo.de")]
-
-
- def getHoster(self):
- https = "https" if self.getConfig("https") else "http"
- page = getURL(https + "://real-debrid.com/api/hosters.php").replace("\"", "").strip()
-
- return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugins/hook/RehostTo.py b/pyload/plugins/hook/RehostTo.py
deleted file mode 100644
index 3da17ea8f..000000000
--- a/pyload/plugins/hook/RehostTo.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class RehostTo(MultiHoster):
- __name = "RehostTo"
- __type = "hook"
- __version = "0.43"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to stanard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24)]
-
- __description = """Rehost.to hook plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- def getHoster(self):
- page = getURL("http://rehost.to/api.php",
- get={'cmd': "get_supported_och_dl", 'long_ses': self.long_ses})
- return [x.strip() for x in page.replace("\"", "").split(",")]
-
-
- def activate(self):
- self.account = self.core.accountManager.getAccountPlugin("RehostTo")
-
- user = self.account.selectAccount()[0]
-
- if not user:
- self.logError(_("Please add your rehost.to account first and restart pyLoad"))
- return
-
- data = self.account.getAccountInfo(user)
- self.ses = data['ses']
- self.long_ses = data['long_ses']
-
- return MultiHoster.activate(self)
diff --git a/pyload/plugins/hook/SimplyPremiumCom.py b/pyload/plugins/hook/SimplyPremiumCom.py
deleted file mode 100644
index b4e173521..000000000
--- a/pyload/plugins/hook/SimplyPremiumCom.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class SimplyPremiumCom(MultiHoster):
- __name = "SimplyPremiumCom"
- __type = "hook"
- __version = "0.02"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to standard download if download fails", "False"),
- ("interval", "int", "Reload interval in hours (0 to disable)", "24")]
-
- __description = """Simply-Premium.com hook plugin"""
- __license = "GPLv3"
- __authors = [("EvolutionClip", "evolutionclip@live.de")]
-
-
- def getHoster(self):
- json_data = getURL("http://www.simply-premium.com/api/hosts.php", get={'format': "json", 'online': 1})
- json_data = json_loads(json_data)
-
- host_list = [element['regex'] for element in json_data['result']]
-
- return host_list
diff --git a/pyload/plugins/hook/SimplydebridCom.py b/pyload/plugins/hook/SimplydebridCom.py
deleted file mode 100644
index a6e00aae0..000000000
--- a/pyload/plugins/hook/SimplydebridCom.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class SimplydebridCom(MultiHoster):
- __name = "SimplydebridCom"
- __type = "hook"
- __version = "0.01"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", "")]
-
- __description = """Simply-Debrid.com hook plugin"""
- __license = "GPLv3"
- __authors = [("Kagenoshin", "kagenoshin@gmx.ch")]
-
-
- def getHoster(self):
- page = getURL("http://simply-debrid.com/api.php", get={'list': 1})
- return [x.strip() for x in page.rstrip(';').replace("\"", "").split(";")]
diff --git a/pyload/plugins/hook/UnrestrictLi.py b/pyload/plugins/hook/UnrestrictLi.py
deleted file mode 100644
index d48e8587e..000000000
--- a/pyload/plugins/hook/UnrestrictLi.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class UnrestrictLi(MultiHoster):
- __name = "UnrestrictLi"
- __type = "hook"
- __version = "0.02"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", ""),
- ("unloadFailing", "bool", "Revert to standard download if download fails", False),
- ("interval", "int", "Reload interval in hours (0 to disable)", 24),
- ("history", "bool", "Delete History", False)]
-
- __description = """Unrestrict.li hook plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def getHoster(self):
- json_data = getURL("http://unrestrict.li/api/jdownloader/hosts.php", get={'format': "json"})
- json_data = json_loads(json_data)
-
- host_list = [element['host'] for element in json_data['result']]
-
- return host_list
diff --git a/pyload/plugins/hook/XFileSharingPro.py b/pyload/plugins/hook/XFileSharingPro.py
deleted file mode 100644
index 28cc5f62d..000000000
--- a/pyload/plugins/hook/XFileSharingPro.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Addon import Addon
-
-
-class XFileSharingPro(Addon):
- __name = "XFileSharingPro"
- __type = "hook"
- __version = "0.26"
-
- __config = [("activated" , "bool", "Activated" , True ),
- ("use_hoster_list" , "bool", "Load listed hosters only" , True ),
- ("use_crypter_list", "bool", "Load listed crypters only" , False),
- ("use_builtin_list", "bool", "Load built-in plugin list" , True ),
- ("hoster_list" , "str" , "Hoster list (comma separated)" , "" ),
- ("crypter_list" , "str" , "Crypter list (comma separated)", "" )]
-
- __description = """Load XFileSharingPro based hosters and crypter which don't need a own plugin to run"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # event_list = ["pluginConfigChanged"]
- regexp = {'hoster' : (r'https?://(?:www\.)?([\w.^_]+(?:\.[a-zA-Z]{2,})(?:\:\d+)?)/(?:embed-)?\w{12}(?:\W|$)',
- r'https?://(?:[^/]+\.)?(%s)/(?:embed-)?\w+'),
- 'crypter': (r'https?://(?:www\.)?([\w.^_]+(?:\.[a-zA-Z]{2,})(?:\:\d+)?)/(?:user|folder)s?/\w+',
- r'https?://(?:[^/]+\.)?(%s)/(?:user|folder)s?/\w+')}
-
- HOSTER_LIST = [#WORKING HOSTERS:
- "eyesfile.ca", "file4safe.com", "fileband.com", "filedwon.com", "filevice.com", "hostingbulk.com",
- "linestorage.com", "ravishare.com", "sharesix.com", "thefile.me", "verzend.be", "xvidstage.com",
- #NOT TESTED:
- "101shared.com", "4upfiles.com", "filemaze.ws", "filenuke.com", "linkzhost.com", "mightyupload.com",
- "rockdizfile.com", "sharebeast.com", "sharerepo.com", "shareswift.com", "uploadbaz.com", "uploadc.com",
- "vidbull.com", "zalaa.com", "zomgupload.com",
- #NOT WORKING:
- "amonshare.com", "banicrazy.info", "boosterking.com", "host4desi.com", "laoupload.com", "rd-fs.com"]
- CRYPTER_LIST = []
-
-
- # def pluginConfigChanged(self.__name, plugin, name, value):
- # self.loadPattern()
-
-
- def activate(self):
- self.loadPattern()
-
-
- def loadPattern(self):
- use_builtin_list = self.getConfig('use_builtin_list')
-
- for type in ("hoster", "crypter"):
- every_plugin = not self.getConfig("use_%s_list" % type)
-
- if every_plugin:
- self.logInfo(_("Handling any %s I can!") % type)
- pattern = self.regexp[type][0]
- else:
- s = self.getConfig('%s_list' % type).replace('\\', '').replace('|', ',').replace(';', ',').lower()
- plugin_list = set([x.strip() for x in s.split(',')])
-
- if use_builtin_list:
- plugin_list |= set([x.lower() for x in getattr(self, "%s_LIST" % type.upper())])
-
- plugin_list -= set(('', u''))
-
- if not plugin_list:
- self.logInfo(_("No %s to handle") % type)
- self._unload(type)
- return
-
- match_list = '|'.join(sorted(plugin_list))
-
- len_match_list = len(plugin_list)
- self.logInfo(_("Handling %d %s%s: %s") % (len_match_list, type, "" if len_match_list is 1 else "s", match_list.replace('|', ', ')))
-
- pattern = self.regexp[type][1] % match_list.replace('.', '\.')
-
- dict = self.core.pluginManager.plugins[type]["XFileSharingPro"]
- dict['pattern'] = pattern
- dict['re'] = re.compile(pattern)
-
- self.logDebug("Loaded %s pattern: %s" % (type, pattern))
-
-
- def _unload(self, type):
- dict = self.core.pluginManager.plugins[type]["XFileSharingPro"]
- dict['pattern'] = r'^unmatchable$'
- dict['re'] = re.compile(dict['pattern'])
-
-
- def deactivate(self):
- for type in ("hoster", "crypter"):
- self._unload(type, "XFileSharingPro")
diff --git a/pyload/plugins/hook/ZeveraCom.py b/pyload/plugins/hook/ZeveraCom.py
deleted file mode 100644
index 6ff99b142..000000000
--- a/pyload/plugins/hook/ZeveraCom.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.MultiHoster import MultiHoster
-
-
-class ZeveraCom(MultiHoster):
- __name = "ZeveraCom"
- __type = "hook"
- __version = "0.02"
-
- __config = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
- ("hosterList", "str", "Hoster list (comma separated)", "")]
-
- __description = """Real-Debrid.com hook plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def getHoster(self):
- page = getURL("http://www.zevera.com/jDownloader.ashx", get={'cmd': "gethosters"})
- return [x.strip() for x in page.replace("\"", "").split(",")]
diff --git a/pyload/plugins/hoster/AlldebridCom.py b/pyload/plugins/hoster/AlldebridCom.py
deleted file mode 100644
index c2894f86b..000000000
--- a/pyload/plugins/hoster/AlldebridCom.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import randrange
-from urllib import unquote
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import parseFileSize
-
-
-class AlldebridCom(Hoster):
- __name = "AlldebridCom"
- __type = "hoster"
- __version = "0.34"
-
- __pattern = r'https?://(?:[^/]*\.)?alldebrid\..*'
-
- __description = """Alldebrid.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Andy Voigt", "spamsales@online.de")]
-
-
- def getFilename(self, url):
- try:
- name = unquote(url.rsplit("/", 1)[1])
- except IndexError:
- name = "Unknown_Filename..."
- if name.endswith("..."): # incomplete filename, append random stuff
- name += "%s.tmp" % randrange(100, 999)
- return name
-
-
- def setup(self):
- self.chunkLimit = 16
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "AllDebrid")
- self.fail(_("No AllDebrid account provided"))
- else:
- self.logDebug("Old URL: %s" % pyfile.url)
- password = self.getPassword().splitlines()[0] or ""
-
- data = json_loads(self.load("http://www.alldebrid.com/service.php",
- get={'link': pyfile.url, 'json': "true", 'pw': password}))
-
- self.logDebug("Json data", data)
-
- if data['error']:
- if data['error'] == "This link isn't available on the hoster website.":
- self.offline()
- else:
- self.logWarning(data['error'])
- self.tempOffline()
- else:
- if pyfile.name and not pyfile.name.endswith('.tmp'):
- pyfile.name = data['filename']
- pyfile.size = parseFileSize(data['filesize'])
- new_url = data['link']
-
- if self.getConfig("https"):
- new_url = new_url.replace("http://", "https://")
- else:
- new_url = new_url.replace("https://", "http://")
-
- if new_url != pyfile.url:
- self.logDebug("New URL: %s" % new_url)
-
- if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown"):
- #only use when name wasnt already set
- pyfile.name = self.getFilename(new_url)
-
- self.download(new_url, disposition=True)
-
- check = self.checkDownload({'error': "<title>An error occured while processing your request</title>",
- 'empty': re.compile(r"^$")})
-
- if check == "error":
- self.retry(wait_time=60, reason=_("An error occured while generating link"))
- elif check == "empty":
- self.retry(wait_time=60, reason=_("Downloaded File was empty"))
diff --git a/pyload/plugins/hoster/BayfilesCom.py b/pyload/plugins/hoster/BayfilesCom.py
deleted file mode 100644
index d51119284..000000000
--- a/pyload/plugins/hoster/BayfilesCom.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import time
-
-from pyload.utils import json_loads
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class BayfilesCom(SimpleHoster):
- __name = "BayfilesCom"
- __type = "hoster"
- __version = "0.08"
-
- __pattern = r'https?://(?:www\.)?bayfiles\.(com|net)/file/(?P<ID>\w+/\w+/[^/]+)'
-
- __description = """Bayfiles.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- INFO_PATTERN = r'<p title="(?P<N>[^"]+)">[^<]*<strong>(?P<S>[\d .,]+)(?P<U>[\w^_]+)</strong></p>'
- OFFLINE_PATTERN = r'(<p>The requested file could not be found.</p>|<title>404 Not Found</title>)'
-
- WAIT_PATTERN = r'>Your IP [\d.]* has recently downloaded a file\. Upgrade to premium or wait (\d+) minutes\.<'
- VARS_PATTERN = r'var vfid = (\d+);\s*var delay = (\d+);'
- FREE_LINK_PATTERN = r'javascript:window\.location\.href = \'(.+?)\';'
- PREMIUM_LINK_PATTERN = r'(?:<a class="highlighted-btn" href="|(?=http://s\d+\.baycdn\.com/dl/))(.*?)"'
-
-
- def handleFree(self):
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- self.retry(wait_time=int(m.group(1)) * 60)
-
- # Get download token
- m = re.search(self.VARS_PATTERN, self.html)
- if m is None:
- self.error(_("VARS_PATTERN not found"))
- vfid, delay = m.groups()
-
- res = json_loads(self.load('http://bayfiles.com/ajax_download',
- get={"_": time() * 1000,
- "action": "startTimer",
- "vfid": vfid}, decode=True))
-
- if not "token" in res or not res['token']:
- self.fail(_("No token"))
-
- self.wait(int(delay))
-
- self.html = self.load('http://bayfiles.com/ajax_download', get={
- "token": res['token'],
- "action": "getLink",
- "vfid": vfid})
-
- # Get final link and download
- m = re.search(self.FREE_LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Free link"))
- self.startDownload(m.group(1))
-
-
- def handlePremium(self):
- m = re.search(self.PREMIUM_LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Premium link"))
- self.startDownload(m.group(1))
-
-
- def startDownload(self, url):
- self.logDebug("%s URL: %s" % ("Premium" if self.premium else "Free", url))
- self.download(url)
- # check download
- check = self.checkDownload({
- "waitforfreeslots": re.compile(r"<title>BayFiles</title>"),
- "notfound": re.compile(r"<title>404 Not Found</title>")
- })
- if check == "waitforfreeslots":
- self.retry(30, 5 * 60, "Wait for free slot")
- elif check == "notfound":
- self.retry(30, 5 * 60, "404 Not found")
-
-
-getInfo = create_getInfo(BayfilesCom)
diff --git a/pyload/plugins/hoster/BezvadataCz.py b/pyload/plugins/hoster/BezvadataCz.py
deleted file mode 100644
index 6d34b5067..000000000
--- a/pyload/plugins/hoster/BezvadataCz.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class BezvadataCz(SimpleHoster):
- __name = "BezvadataCz"
- __type = "hoster"
- __version = "0.25"
-
- __pattern = r'http://(?:www\.)?bezvadata\.cz/stahnout/.*'
-
- __description = """BezvaData.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<p><b>Soubor: (?P<N>[^<]+)</b></p>'
- SIZE_PATTERN = r'<li><strong>Velikost:</strong> (?P<S>[^<]+)</li>'
- OFFLINE_PATTERN = r'<title>BezvaData \| Soubor nenalezen</title>'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def handleFree(self):
- #download button
- m = re.search(r'<a class="stahnoutSoubor".*?href="(.*?)"', self.html)
- if m is None:
- self.error(_("Page 1 URL not found"))
- url = "http://bezvadata.cz%s" % m.group(1)
-
- #captcha form
- self.html = self.load(url)
- self.checkErrors()
- for _i in xrange(5):
- action, inputs = self.parseHtmlForm('frm-stahnoutFreeForm')
- if not inputs:
- self.error(_("FreeForm"))
-
- m = re.search(r'<img src="data:image/png;base64,(.*?)"', self.html)
- if m is None:
- self.error(_("Wrong captcha image"))
-
- #captcha image is contained in html page as base64encoded data but decryptCaptcha() expects image url
- self.load, proper_load = self.loadcaptcha, self.load
- try:
- inputs['captcha'] = self.decryptCaptcha(m.group(1), imgtype='png')
- finally:
- self.load = proper_load
-
- if '<img src="data:image/png;base64' in self.html:
- self.invalidCaptcha()
- else:
- self.correctCaptcha()
- break
- else:
- self.fail(_("No valid captcha code entered"))
-
- #download url
- self.html = self.load("http://bezvadata.cz%s" % action, post=inputs)
- self.checkErrors()
- m = re.search(r'<a class="stahnoutSoubor2" href="(.*?)">', self.html)
- if m is None:
- self.error(_("Page 2 URL not found"))
- url = "http://bezvadata.cz%s" % m.group(1)
- self.logDebug("DL URL %s" % url)
-
- #countdown
- m = re.search(r'id="countdown">(\d\d):(\d\d)<', self.html)
- wait_time = (int(m.group(1)) * 60 + int(m.group(2))) if m else 120
- self.wait(wait_time, False)
-
- self.download(url)
-
-
- def checkErrors(self):
- if 'images/button-download-disable.png' in self.html:
- self.longWait(5 * 60, 24) #: parallel dl limit
- elif '<div class="infobox' in self.html:
- self.tempOffline()
-
- self.info.pop('error', None)
-
-
- def loadcaptcha(self, data, *args, **kwargs):
- return data.decode("base64")
-
-
-getInfo = create_getInfo(BezvadataCz)
diff --git a/pyload/plugins/hoster/BillionuploadsCom.py b/pyload/plugins/hoster/BillionuploadsCom.py
deleted file mode 100644
index 87d6f1bce..000000000
--- a/pyload/plugins/hoster/BillionuploadsCom.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class BillionuploadsCom(XFSHoster):
- __name = "BillionuploadsCom"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'http://(?:www\.)?billionuploads\.com/\w{12}'
-
- __description = """Billionuploads.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "billionuploads.com"
-
- NAME_PATTERN = r'<td class="dofir" title="(?P<N>.+?)"'
- SIZE_PATTERN = r'<td class="dofir">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
-
-getInfo = create_getInfo(BillionuploadsCom)
diff --git a/pyload/plugins/hoster/BitshareCom.py b/pyload/plugins/hoster/BitshareCom.py
deleted file mode 100644
index cd0028850..000000000
--- a/pyload/plugins/hoster/BitshareCom.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class BitshareCom(SimpleHoster):
- __name = "BitshareCom"
- __type = "hoster"
- __version = "0.51"
-
- __pattern = r'http://(?:www\.)?bitshare\.com/(files/(?P<id1>\w+)(/(?P<name>.*?)\.html)?|\?f=(?P<id2>\w+))'
-
- __description = """Bitshare.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Paul King", ""),
- ("fragonib", "fragonib[AT]yahoo[DOT]es")]
-
-
- INFO_PATTERN = r'Downloading (?P<N>.+) - (?P<S>[\d.,]+) (?P<U>[\w^_]+)</h1>'
- OFFLINE_PATTERN = r'(>We are sorry, but the requested file was not found in our database|>Error - File not available<|The file was deleted either by the uploader, inactivity or due to copyright claim)'
-
- COOKIES = [("bitshare.com", "language_selection", "EN")]
-
- AJAXID_PATTERN = r'var ajaxdl = "(.*?)";'
- TRAFFIC_USED_UP = r'Your Traffic is used up for today. Upgrade to premium to continue!'
-
-
- def setup(self):
- self.multiDL = self.premium
- self.chunkLimit = 1
-
-
- def process(self, pyfile):
- if self.premium:
- self.account.relogin(self.user)
-
- self.pyfile = pyfile
-
- # File id
- m = re.match(self.__pattern, pyfile.url)
- self.file_id = max(m.group('id1'), m.group('id2'))
- self.logDebug("File id is [%s]" % self.file_id)
-
- # Load main page
- self.html = self.load(pyfile.url, ref=False, decode=True)
-
- # Check offline
- if re.search(self.OFFLINE_PATTERN, self.html):
- self.offline()
-
- # Check Traffic used up
- if re.search(self.TRAFFIC_USED_UP, self.html):
- self.logInfo(_("Your Traffic is used up for today"))
- self.wait(30 * 60, True)
- self.retry()
-
- # File name
- m = re.match(self.__pattern, pyfile.url)
- name1 = m.group('name') if m else None
- m = re.search(self.INFO_PATTERN, self.html)
- name2 = m.group('N') if m else None
- pyfile.name = max(name1, name2)
-
- # Ajax file id
- self.ajaxid = re.search(self.AJAXID_PATTERN, self.html).group(1)
- self.logDebug("File ajax id is [%s]" % self.ajaxid)
-
- # This may either download our file or forward us to an error page
- url = self.getDownloadUrl()
- self.download(url)
-
- check = self.checkDownload({"404": ">404 Not Found<", "Error": ">Error occured<"})
- if check == "404":
- self.retry(3, 60, 'Error 404')
- elif check == "error":
- self.retry(5, 5 * 60, "Bitshare host : Error occured")
-
-
- def getDownloadUrl(self):
- # Return location if direct download is active
- if self.premium:
- header = self.load(self.pyfile.url, cookies=True, just_header=True)
- if 'location' in header:
- return header['location']
-
- # Get download info
- self.logDebug("Getting download info")
- res = self.load("http://bitshare.com/files-ajax/" + self.file_id + "/request.html",
- post={"request": "generateID", "ajaxid": self.ajaxid})
- self.handleErrors(res, ':')
- parts = res.split(":")
- filetype = parts[0]
- wait = int(parts[1])
- captcha = int(parts[2])
- self.logDebug("Download info [type: '%s', waiting: %d, captcha: %d]" % (filetype, wait, captcha))
-
- # Waiting
- if wait > 0:
- self.logDebug("Waiting %d seconds." % wait)
- if wait < 120:
- self.wait(wait, False)
- else:
- self.wait(wait - 55, True)
- self.retry()
-
- # Resolve captcha
- if captcha == 1:
- self.logDebug("File is captcha protected")
- recaptcha = ReCaptcha(self)
-
- # Try up to 3 times
- for i in xrange(3):
- challenge, response = recaptcha.challenge()
- res = self.load("http://bitshare.com/files-ajax/" + self.file_id + "/request.html",
- post={"request" : "validateCaptcha",
- "ajaxid" : self.ajaxid,
- "recaptcha_challenge_field": challenge,
- "recaptcha_response_field" : response})
- if self.handleCaptchaErrors(res):
- break
-
- # Get download URL
- self.logDebug("Getting download url")
- res = self.load("http://bitshare.com/files-ajax/" + self.file_id + "/request.html",
- post={"request": "getDownloadURL", "ajaxid": self.ajaxid})
- self.handleErrors(res, '#')
- url = res.split("#")[-1]
-
- return url
-
-
- def handleErrors(self, res, separator):
- self.logDebug("Checking response [%s]" % res)
- if "ERROR:Session timed out" in res:
- self.retry()
- elif "ERROR" in res:
- msg = res.split(separator)[-1]
- self.fail(msg)
-
-
- def handleCaptchaErrors(self, res):
- self.logDebug("Result of captcha resolving [%s]" % res)
- if "SUCCESS" in res:
- self.correctCaptcha()
- return True
- elif "ERROR:SESSION ERROR" in res:
- self.retry()
-
- self.invalidCaptcha()
-
-
-getInfo = create_getInfo(BitshareCom)
diff --git a/pyload/plugins/hoster/BoltsharingCom.py b/pyload/plugins/hoster/BoltsharingCom.py
deleted file mode 100644
index 0eea35b8c..000000000
--- a/pyload/plugins/hoster/BoltsharingCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class BoltsharingCom(DeadHoster):
- __name = "BoltsharingCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?boltsharing\.com/\w{12}'
-
- __description = """Boltsharing.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(BoltsharingCom)
diff --git a/pyload/plugins/hoster/CatShareNet.py b/pyload/plugins/hoster/CatShareNet.py
deleted file mode 100644
index 3e1520d4b..000000000
--- a/pyload/plugins/hoster/CatShareNet.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class CatShareNet(SimpleHoster):
- __name = "CatShareNet"
- __type = "hoster"
- __version = "0.08"
-
- __pattern = r'http://(?:www\.)?catshare\.net/\w{16}'
-
- __description = """CatShare.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("z00nx", "z00nx0@gmail.com"),
- ("prOq", ""),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- TEXT_ENCODING = True
-
- INFO_PATTERN = r'<title>(?P<N>.+) \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)<'
- OFFLINE_PATTERN = ur'Podany plik został usunięty\s*</div>'
-
- IP_BLOCKED_PATTERN = ur'>Nasz serwis wykrył ÅŒe Twój adres IP nie pochodzi z Polski.<'
- SECONDS_PATTERN = 'var\scount\s=\s(\d+);'
- LINK_PATTERN = r'<form action="(.+?)" method="GET">'
-
-
- def setup(self):
- self.multiDL = self.premium
- self.resumeDownload = True
-
-
- def getFileInfo(self):
- m = re.search(self.IP_BLOCKED_PATTERN, self.html)
- if m:
- self.fail(_("Only connections from Polish IP address are allowed"))
- return super(CatShareNet, self).getFileInfo()
-
-
- def handleFree(self):
- m = re.search(self.SECONDS_PATTERN, self.html)
- if m:
- wait_time = int(m.group(1))
- self.wait(wait_time, True)
-
- recaptcha = ReCaptcha(self)
-
- challenge, response = recaptcha.challenge()
- self.html = self.load(self.pyfile.url,
- post={'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field' : response})
-
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.invalidCaptcha()
- self.retry(reason=_("Wrong captcha entered"))
-
- dl_link = m.group(1)
- self.download(dl_link, disposition=True)
-
-
-getInfo = create_getInfo(CatShareNet)
diff --git a/pyload/plugins/hoster/CloudzerNet.py b/pyload/plugins/hoster/CloudzerNet.py
deleted file mode 100644
index 1056df232..000000000
--- a/pyload/plugins/hoster/CloudzerNet.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class CloudzerNet(DeadHoster):
- __name = "CloudzerNet"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'https?://(?:www\.)?(cloudzer\.net/file/|clz\.to/(file/)?)\w+'
-
- __description = """Cloudzer.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("gs", "I-_-I-_-I@web.de"),
- ("z00nx", "z00nx0@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(CloudzerNet)
diff --git a/pyload/plugins/hoster/CramitIn.py b/pyload/plugins/hoster/CramitIn.py
deleted file mode 100644
index dafe2e5ed..000000000
--- a/pyload/plugins/hoster/CramitIn.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class CramitIn(XFSHoster):
- __name = "CramitIn"
- __type = "hoster"
- __version = "0.07"
-
- __pattern = r'http://(?:www\.)?cramit\.in/\w{12}'
-
- __description = """Cramit.in hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "cramit.in"
-
- INFO_PATTERN = r'<span class=t2>\s*(?P<N>.*?)</span>.*?<small>\s*\((?P<S>.*?)\)'
- LINK_PATTERN = r'href="(http://cramit\.in/file_download/.*?)"'
-
-
-getInfo = create_getInfo(CramitIn)
diff --git a/pyload/plugins/hoster/CrockoCom.py b/pyload/plugins/hoster/CrockoCom.py
deleted file mode 100644
index dcce3973f..000000000
--- a/pyload/plugins/hoster/CrockoCom.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class CrockoCom(SimpleHoster):
- __name = "CrockoCom"
- __type = "hoster"
- __version = "0.17"
-
- __pattern = r'http://(?:www\.)?(crocko|easy-share)\.com/\w+'
-
- __description = """Crocko hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<span class="fz24">Download:\s*<strong>(?P<N>.*)'
- SIZE_PATTERN = r'<span class="tip1"><span class="inner">(?P<S>[^<]+)</span></span>'
- OFFLINE_PATTERN = r'<h1>Sorry,<br />the page you\'re looking for <br />isn\'t here.</h1>|File not found'
-
- CAPTCHA_PATTERN = re.compile(r"u='(/file_contents/captcha/\w+)';\s*w='(\d+)';")
-
- FORM_PATTERN = r'<form method="post" action="([^"]+)">(.*?)</form>'
- FORM_INPUT_PATTERN = r'<input[^>]* name="?([^" ]+)"? value="?([^" ]+)"?[^>]*>'
-
- NAME_REPLACEMENTS = [(r'<[^>]*>', '')]
-
-
- def handleFree(self):
- if "You need Premium membership to download this file." in self.html:
- self.fail(_("You need Premium membership to download this file"))
-
- for _i in xrange(5):
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m:
- url, wait_time = 'http://crocko.com' + m.group(1), int(m.group(2))
- self.wait(wait_time)
- self.html = self.load(url)
- else:
- break
-
- m = re.search(self.FORM_PATTERN, self.html, re.S)
- if m is None:
- self.error(_("FORM_PATTERN not found"))
-
- action, form = m.groups()
- inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge()
- self.download(action, post=inputs)
-
- check = self.checkDownload({
- "captcha_err": recaptcha.KEY_AJAX_PATTERN
- })
-
- if check == "captcha_err":
- self.invalidCaptcha()
- else:
- break
- else:
- self.fail(_("No valid captcha solution received"))
-
-
-getInfo = create_getInfo(CrockoCom)
diff --git a/pyload/plugins/hoster/CyberlockerCh.py b/pyload/plugins/hoster/CyberlockerCh.py
deleted file mode 100644
index f4c7aaa57..000000000
--- a/pyload/plugins/hoster/CyberlockerCh.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class CyberlockerCh(DeadHoster):
- __name = "CyberlockerCh"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?cyberlocker\.ch/\w+'
-
- __description = """Cyberlocker.ch hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(CyberlockerCh)
diff --git a/pyload/plugins/hoster/CzshareCom.py b/pyload/plugins/hoster/CzshareCom.py
deleted file mode 100644
index 5821da1fd..000000000
--- a/pyload/plugins/hoster/CzshareCom.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://czshare.com/5278880/random.bin
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-from pyload.utils import parseFileSize
-
-
-class CzshareCom(SimpleHoster):
- __name = "CzshareCom"
- __type = "hoster"
- __version = "0.95"
-
- __pattern = r'http://(?:www\.)?(czshare|sdilej)\.(com|cz)/(\d+/|download\.php\?).*'
-
- __description = """CZshare.com hoster plugin, now Sdilej.cz"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<div class="tab" id="parameters">\s*<p>\s*Cel. n.zev: <a href=[^>]*>(?P<N>[^<]+)</a>'
- SIZE_PATTERN = r'<div class="tab" id="category">(?:\s*<p>[^\n]*</p>)*\s*Velikost:\s*(?P<S>[\d .,]+)(?P<U>[\w^_]+)\s*</div>'
- OFFLINE_PATTERN = r'<div class="header clearfix">\s*<h2 class="red">'
-
- SIZE_REPLACEMENTS = [(' ', '')]
- URL_REPLACEMENTS = [(r'http://[^/]*/download.php\?.*?id=(\w+).*', r'http://sdilej.cz/\1/x/')]
-
- FORCE_CHECK_TRAFFIC = True
-
- FREE_URL_PATTERN = r'<a href="([^"]+)" class="page-download">[^>]*alt="([^"]+)" /></a>'
- FREE_FORM_PATTERN = r'<form action="download\.php" method="post">\s*<img src="captcha\.php" id="captcha" />(.*?)</form>'
- PREMIUM_FORM_PATTERN = r'<form action="/profi_down\.php" method="post">(.*?)</form>'
- FORM_INPUT_PATTERN = r'<input[^>]* name="([^"]+)" value="([^"]+)"[^>]*/>'
- MULTIDL_PATTERN = r'<p><font color=\'red\'>Z[^<]*PROFI.</font></p>'
- USER_CREDIT_PATTERN = r'<div class="credit">\s*kredit: <strong>([\d .,]+)(\w+)</strong>\s*</div><!-- .credit -->'
-
-
- def checkTrafficLeft(self):
- # check if user logged in
- m = re.search(self.USER_CREDIT_PATTERN, self.html)
- if m is None:
- self.account.relogin(self.user)
- self.html = self.load(self.pyfile.url, cookies=True, decode=True)
- m = re.search(self.USER_CREDIT_PATTERN, self.html)
- if m is None:
- return False
-
- # check user credit
- try:
- credit = parseFileSize(m.group(1).replace(' ', ''), m.group(2))
- self.logInfo(_("Premium download for %i KiB of Credit") % (self.pyfile.size / 1024))
- self.logInfo(_("User %s has %i KiB left") % (self.user, credit / 1024))
- if credit < self.pyfile.size:
- self.logInfo(_("Not enough credit to download file: %s") % self.pyfile.name)
- return False
- except Exception, e:
- # let's continue and see what happens...
- self.logError(e)
-
- return True
-
-
- def handlePremium(self):
- # parse download link
- try:
- form = re.search(self.PREMIUM_FORM_PATTERN, self.html, re.S).group(1)
- inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
- except Exception, e:
- self.logError(e)
- self.resetAccount()
-
- # download the file, destination is determined by pyLoad
- self.download("http://sdilej.cz/profi_down.php", post=inputs, disposition=True)
- self.checkDownloadedFile()
-
-
- def handleFree(self):
- # get free url
- m = re.search(self.FREE_URL_PATTERN, self.html)
- if m is None:
- self.error(_("FREE_URL_PATTERN not found"))
- parsed_url = "http://sdilej.cz" + m.group(1)
- self.logDebug("PARSED_URL:" + parsed_url)
-
- # get download ticket and parse html
- self.html = self.load(parsed_url, cookies=True, decode=True)
- if re.search(self.MULTIDL_PATTERN, self.html):
- self.longWait(5 * 60, 12)
-
- try:
- form = re.search(self.FREE_FORM_PATTERN, self.html, re.S).group(1)
- inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
- self.pyfile.size = int(inputs['size'])
- except Exception, e:
- self.logError(e)
- self.error(_("Form"))
-
- # get and decrypt captcha
- captcha_url = 'http://sdilej.cz/captcha.php'
- for _i in xrange(5):
- inputs['captchastring2'] = self.decryptCaptcha(captcha_url)
- self.html = self.load(parsed_url, cookies=True, post=inputs, decode=True)
- if u"<li>ZadanÃœ ověřovací kód nesouhlasí!</li>" in self.html:
- self.invalidCaptcha()
- elif re.search(self.MULTIDL_PATTERN, self.html):
- self.longWait(5 * 60, 12)
- else:
- self.correctCaptcha()
- break
- else:
- self.fail(_("No valid captcha code entered"))
-
- m = re.search("countdown_number = (\d+);", self.html)
- self.setWait(int(m.group(1)) if m else 50)
-
- # download the file, destination is determined by pyLoad
- self.logDebug("WAIT URL", self.req.lastEffectiveURL)
- m = re.search("free_wait.php\?server=(.*?)&(.*)", self.req.lastEffectiveURL)
- if m is None:
- self.error(_("Download URL not found"))
-
- url = "http://%s/download.php?%s" % (m.group(1), m.group(2))
-
- self.wait()
- self.download(url)
- self.checkDownloadedFile()
-
-
- def checkDownloadedFile(self):
- # check download
- check = self.checkDownload({
- "temp_offline": re.compile(r"^Soubor je do.*asn.* nedostupn.*$"),
- "credit": re.compile(r"^Nem.*te dostate.*n.* kredit.$"),
- "multi_dl": re.compile(self.MULTIDL_PATTERN),
- "captcha_err": "<li>ZadanÃœ ověřovací kód nesouhlasí!</li>"
- })
-
- if check == "temp_offline":
- self.fail(_("File not available - try later"))
- if check == "credit":
- self.resetAccount()
- elif check == "multi_dl":
- self.longWait(5 * 60, 12)
- elif check == "captcha_err":
- self.invalidCaptcha()
- self.retry()
-
-
-getInfo = create_getInfo(CzshareCom)
diff --git a/pyload/plugins/hoster/DailymotionCom.py b/pyload/plugins/hoster/DailymotionCom.py
deleted file mode 100644
index abc282295..000000000
--- a/pyload/plugins/hoster/DailymotionCom.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.datatype.PyFile import statusMap
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Hoster import Hoster
-
-
-def getInfo(urls):
- result = []
- regex = re.compile(DailymotionCom.__pattern)
- apiurl = "https://api.dailymotion.com/video/%s"
- request = {"fields": "access_error,status,title"}
-
- for url in urls:
- id = regex.match(url).group("ID")
- page = getURL(apiurl % id, get=request)
- info = json_loads(page)
-
- name = info['title'] + ".mp4" if "title" in info else url
-
- if "error" in info or info['access_error']:
- status = "offline"
- else:
- status = info['status']
- if status in ("ready", "published"):
- status = "online"
- elif status in ("waiting", "processing"):
- status = "temp. offline"
- else:
- status = "offline"
-
- result.append((name, 0, statusMap[status], url))
-
- return result
-
-
-class DailymotionCom(Hoster):
- __name = "DailymotionCom"
- __type = "hoster"
- __version = "0.20"
-
- __pattern = r'https?://(?:www\.)?dailymotion\.com/.*video/(?P<ID>[\w^_]+)'
- __config = [("quality", "Lowest;LD 144p;LD 240p;SD 384p;HQ 480p;HD 720p;HD 1080p;Highest", "Quality", "Highest")]
-
- __description = """Dailymotion.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def getStreams(self):
- streams = []
-
- for result in re.finditer(r"\"(?P<URL>http:\\/\\/www.dailymotion.com\\/cdn\\/H264-(?P<QF>.*?)\\.*?)\"",
- self.html):
- url = result.group("URL")
- qf = result.group("QF")
-
- link = url.replace("\\", "")
- quality = tuple(int(x) for x in qf.split("x"))
-
- streams.append((quality, link))
-
- return sorted(streams, key=lambda x: x[0][::-1])
-
-
- def getQuality(self):
- q = self.getConfig("quality")
-
- if q == "Lowest":
- quality = 0
- elif q == "Highest":
- quality = -1
- else:
- quality = int(q.rsplit(" ")[1][:-1])
-
- return quality
-
-
- def getLink(self, streams, quality):
- if quality > 0:
- for x, s in reversed([item for item in enumerate(streams)]):
- qf = s[0][1]
- if qf <= quality:
- idx = x
- break
- else:
- idx = 0
- else:
- idx = quality
-
- s = streams[idx]
-
- self.logInfo(_("Download video quality %sx%s") % s[0])
-
- return s[1]
-
-
- def checkInfo(self, pyfile):
- pyfile.name, pyfile.size, pyfile.status, pyfile.url = getInfo([pyfile.url])[0]
-
- if pyfile.status == 1:
- self.offline()
-
- elif pyfile.status == 6:
- self.tempOffline()
-
-
- def process(self, pyfile):
- self.checkInfo(pyfile)
-
- id = re.match(self.__pattern, pyfile.url).group("ID")
- self.html = self.load("http://www.dailymotion.com/embed/video/" + id, decode=True)
-
- streams = self.getStreams()
- quality = self.getQuality()
-
- self.download(self.getLink(streams, quality))
diff --git a/pyload/plugins/hoster/DataHu.py b/pyload/plugins/hoster/DataHu.py
deleted file mode 100644
index 6f798c5dc..000000000
--- a/pyload/plugins/hoster/DataHu.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://data.hu/get/6381232/random.bin
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class DataHu(SimpleHoster):
- __name = "DataHu"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?data\.hu/get/\w+'
-
- __description = """Data.hu hoster plugin"""
- __license = "GPLv3"
- __authors = [("crash", ""),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- INFO_PATTERN = ur'<title>(?P<N>.*) \((?P<S>[^)]+)\) let\xf6lt\xe9se</title>'
- OFFLINE_PATTERN = ur'Az adott f\xe1jl nem l\xe9tezik'
- LINK_PATTERN = r'<div class="download_box_button"><a href="([^"]+)">'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = self.premium
-
-
- def handleFree(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
-
- self.download(m.group(1), disposition=True)
-
-
-getInfo = create_getInfo(DataHu)
diff --git a/pyload/plugins/hoster/DataportCz.py b/pyload/plugins/hoster/DataportCz.py
deleted file mode 100644
index fe01a1bda..000000000
--- a/pyload/plugins/hoster/DataportCz.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class DataportCz(SimpleHoster):
- __name = "DataportCz"
- __type = "hoster"
- __version = "0.40"
-
- __pattern = r'http://(?:www\.)?dataport\.cz/file/(.*)'
-
- __description = """Dataport.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<span itemprop="name">(?P<N>[^<]+)</span>'
- SIZE_PATTERN = r'<td class="fil">Velikost</td>\s*<td>(?P<S>[^<]+)</td>'
- OFFLINE_PATTERN = r'<h2>Soubor nebyl nalezen</h2>'
-
- CAPTCHA_PATTERN = r'<section id="captcha_bg">\s*<img src="(.*?)"'
- FREE_SLOTS_PATTERN = ur'Počet volnÜch slotů: <span class="darkblue">(\d+)</span><br />'
-
-
- def handleFree(self):
- captchas = {"1": "jkeG", "2": "hMJQ", "3": "vmEK", "4": "ePQM", "5": "blBd"}
-
- for _i in xrange(60):
- action, inputs = self.parseHtmlForm('free_download_form')
- self.logDebug(action, inputs)
- if not action or not inputs:
- self.error(_("free_download_form"))
-
- if "captchaId" in inputs and inputs['captchaId'] in captchas:
- inputs['captchaCode'] = captchas[inputs['captchaId']]
- else:
- self.error(_("captcha"))
-
- self.html = self.download("http://www.dataport.cz%s" % action, post=inputs)
-
- check = self.checkDownload({"captcha": 'alert("\u0160patn\u011b opsan\u00fd k\u00f3d z obr\u00e1zu");',
- "slot": 'alert("Je n\u00e1m l\u00edto, ale moment\u00e1ln\u011b nejsou'})
- if check == "captcha":
- self.error(_("invalid captcha"))
- elif check == "slot":
- self.logDebug("No free slots - wait 60s and retry")
- self.wait(60, False)
- self.html = self.load(self.pyfile.url, decode=True)
- continue
- else:
- break
-
-
-getInfo = create_getInfo(DataportCz)
diff --git a/pyload/plugins/hoster/DateiTo.py b/pyload/plugins/hoster/DateiTo.py
deleted file mode 100644
index 4ab4ce6a2..000000000
--- a/pyload/plugins/hoster/DateiTo.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class DateiTo(SimpleHoster):
- __name = "DateiTo"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?datei\.to/datei/(?P<ID>\w+)\.html'
-
- __description = """Datei.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'Dateiname:</td>\s*<td colspan="2"><strong>(?P<N>.*?)</'
- SIZE_PATTERN = r'Dateigr&ouml;&szlig;e:</td>\s*<td colspan="2">(?P<S>.*?)</'
- OFFLINE_PATTERN = r'>Datei wurde nicht gefunden<|>Bitte wÀhle deine Datei aus... <'
-
- WAIT_PATTERN = r'countdown\({seconds: (\d+)'
- MULTIDL_PATTERN = r'>Du lÀdst bereits eine Datei herunter<'
-
- DATA_PATTERN = r'url: "(.*?)", data: "(.*?)",'
-
-
- def handleFree(self):
- url = 'http://datei.to/ajax/download.php'
- data = {'P': 'I', 'ID': self.info['pattern']['ID']}
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(10):
- self.logDebug("URL", url, "POST", data)
- self.html = self.load(url, post=data)
- self.checkErrors()
-
- if url.endswith('download.php') and 'P' in data:
- if data['P'] == 'I':
- self.doWait()
-
- elif data['P'] == 'IV':
- break
-
- m = re.search(self.DATA_PATTERN, self.html)
- if m is None:
- self.error(_("data"))
- url = 'http://datei.to/' + m.group(1)
- data = dict(x.split('=') for x in m.group(2).split('&'))
-
- if url.endswith('recaptcha.php'):
- data['recaptcha_challenge_field'], data['recaptcha_response_field'] = recaptcha.challenge()
- else:
- self.fail(_("Too bad..."))
-
- self.download(self.html)
-
-
- def checkErrors(self):
- m = re.search(self.MULTIDL_PATTERN, self.html)
- if m:
- m = re.search(self.WAIT_PATTERN, self.html)
- wait_time = int(m.group(1)) if m else 30
-
- errmsg = self.info['error'] = _("Parallel downloads")
- self.retry(wait_time=wait_time, reason=errmsg)
-
- self.info.pop('error', None)
-
-
- def doWait(self):
- m = re.search(self.WAIT_PATTERN, self.html)
- wait_time = int(m.group(1)) if m else 30
-
- self.load('http://datei.to/ajax/download.php', post={'P': 'Ads'})
- self.wait(wait_time, False)
-
-
-getInfo = create_getInfo(DateiTo)
diff --git a/pyload/plugins/hoster/DdlstorageCom.py b/pyload/plugins/hoster/DdlstorageCom.py
deleted file mode 100644
index e2f4c87dc..000000000
--- a/pyload/plugins/hoster/DdlstorageCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class DdlstorageCom(DeadHoster):
- __name = "DdlstorageCom"
- __type = "hoster"
- __version = "1.02"
-
- __pattern = r'https?://(?:www\.)?ddlstorage\.com/\w+'
-
- __description = """DDLStorage.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(DdlstorageCom)
diff --git a/pyload/plugins/hoster/DebridItaliaCom.py b/pyload/plugins/hoster/DebridItaliaCom.py
deleted file mode 100644
index 34379ce69..000000000
--- a/pyload/plugins/hoster/DebridItaliaCom.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.internal.SimpleHoster import replace_patterns
-
-
-class DebridItaliaCom(Hoster):
- __name = "DebridItaliaCom"
- __type = "hoster"
- __version = "0.07"
-
- __pattern = r'http://s\d+\.debriditalia\.com/dl/\d+'
-
- __description = """Debriditalia.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- URL_REPLACEMENTS = [(r'(/dl/\d+)$', '\1/')]
-
-
- def setup(self):
- self.chunkLimit = -1
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- pyfile.url = replace_patterns(pyfile.url, cls.URL_REPLACEMENTS)
-
- if re.match(self.__pattern, pyfile.url):
- link = pyfile.url
-
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "DebridItalia")
- self.fail(_("No DebridItalia account provided"))
-
- else:
- html = self.load("http://www.debriditalia.com/api.php", get={'generate': "", 'link': pyfile.url})
-
- if "ERROR" in html:
- self.fail(re.search(r'ERROR:(.*)', html).strip())
-
- link = html.strip()
-
- self.download(link, disposition=True)
-
- check = self.checkDownload({'empty': re.compile(r'^$')})
-
- if check == "empty":
- self.retry(5, 2 * 60, "Empty file downloaded")
diff --git a/pyload/plugins/hoster/DepositfilesCom.py b/pyload/plugins/hoster/DepositfilesCom.py
deleted file mode 100644
index 39a0a8745..000000000
--- a/pyload/plugins/hoster/DepositfilesCom.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class DepositfilesCom(SimpleHoster):
- __name = "DepositfilesCom"
- __type = "hoster"
- __version = "0.51"
-
- __pattern = r'https?://(?:www\.)?(depositfiles\.com|dfiles\.(eu|ru))(/\w{1,3})?/files/(?P<ID>\w+)'
-
- __description = """Depositfiles.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'<script type="text/javascript">eval\( unescape\(\'(?P<N>.*?)\''
- SIZE_PATTERN = r': <b>(?P<S>[\d.,]+)&nbsp;(?P<U>[\w^_]+)</b>'
- OFFLINE_PATTERN = r'<span class="html_download_api-not_exists"></span>'
-
- NAME_REPLACEMENTS = [(r'\%u([0-9A-Fa-f]{4})', lambda m: unichr(int(m.group(1), 16))),
- (r'.*<b title="(?P<N>[^"]+).*', "\g<N>")]
- URL_REPLACEMENTS = [(__pattern + ".*", "https://dfiles.eu/files/\g<ID>")]
-
- COOKIES = [("dfiles.eu", "lang_current", "en")]
-
- FREE_LINK_PATTERN = r'<form id="downloader_file_form" action="(http://.+?\.(dfiles\.eu|depositfiles\.com)/.+?)" method="post"'
- PREMIUM_LINK_PATTERN = r'class="repeat"><a href="(.+?)"'
- PREMIUM_MIRROR_PATTERN = r'class="repeat_mirror"><a href="(.+?)"'
-
-
- def handleFree(self):
- self.html = self.load(self.pyfile.url, post={"gateway_result": "1"}, cookies=True)
-
- if re.search(r'File is checked, please try again in a minute.', self.html) is not None:
- self.logInfo(_("The file is being checked. Waiting 1 minute"))
- self.retry(wait_time=60)
-
- wait = re.search(r'html_download_api-limit_interval\">(\d+)</span>', self.html)
- if wait:
- wait_time = int(wait.group(1))
- self.logInfo(_("Traffic used up. Waiting %d seconds") % wait_time)
- self.wait(wait_time, True)
- self.retry()
-
- wait = re.search(r'>Try in (\d+) minutes or use GOLD account', self.html)
- if wait:
- wait_time = int(wait.group(1))
- self.logInfo(_("All free slots occupied. Waiting %d minutes") % wait_time)
- self.setWait(wait_time * 60, False)
-
- wait = re.search(r'Please wait (\d+) sec', self.html)
- if wait:
- self.setWait(int(wait.group(1)))
-
- m = re.search(r"var fid = '(\w+)';", self.html)
- if m is None:
- self.retry(wait_time=5)
- params = {'fid': m.group(1)}
- self.logDebug("FID: %s" % params['fid'])
-
- self.wait()
- recaptcha = ReCaptcha(self)
- captcha_key = recaptcha.detect_key()
- if captcha_key is None:
- self.error(_("ReCaptcha key not found"))
-
- for _i in xrange(5):
- self.html = self.load("https://dfiles.eu/get_file.php", get=params)
-
- if '<input type=button value="Continue" onclick="check_recaptcha' in self.html:
- if 'response' in params:
- self.invalidCaptcha()
- params['challenge'], params['response'] = recaptcha.challenge(captcha_key)
- self.logDebug(params)
- continue
-
- m = re.search(self.FREE_LINK_PATTERN, self.html)
- if m:
- if 'response' in params:
- self.correctCaptcha()
- link = unquote(m.group(1))
- self.logDebug("LINK: %s" % link)
- break
- else:
- self.error(_("Download link"))
- else:
- self.fail(_("No valid captcha response received"))
-
- try:
- self.download(link, disposition=True)
- except Exception:
- self.retry(wait_time=60)
-
-
- def handlePremium(self):
- if '<span class="html_download_api-gold_traffic_limit">' in self.html:
- self.logWarning(_("Download limit reached"))
- self.retry(25, 60 * 60, "Download limit reached")
- elif 'onClick="show_gold_offer' in self.html:
- self.account.relogin(self.user)
- self.retry()
- else:
- link = re.search(self.PREMIUM_LINK_PATTERN, self.html)
- mirror = re.search(self.PREMIUM_MIRROR_PATTERN, self.html)
- if link:
- dlink = link.group(1)
- elif mirror:
- dlink = mirror.group(1)
- else:
- self.error(_("No direct download link or mirror found"))
- self.download(dlink, disposition=True)
-
-
-getInfo = create_getInfo(DepositfilesCom)
diff --git a/pyload/plugins/hoster/DevhostSt.py b/pyload/plugins/hoster/DevhostSt.py
deleted file mode 100644
index 5be211809..000000000
--- a/pyload/plugins/hoster/DevhostSt.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://d-h.st/mM8
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class DevhostSt(SimpleHoster):
- __name = "DevhostSt"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?d-h\.st/(?!users/)\w{3}'
-
- __description = """d-h.st hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
-
-
- NAME_PATTERN = r'>Filename:</span> <div title="(?P<N>.+?)"'
- SIZE_PATTERN = r'>Size:</span> (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- OFFLINE_PATTERN = r'>File Not Found<'
- LINK_PATTERN = r'id="downloadfile" href="(.+?)"'
-
-
- def setup(self):
- self.multiDL = True
- self.chunkLimit = 1
-
-
- def handleFree(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Download link not found"))
-
- dl_url = m.group(1)
- self.download(dl_url, disposition=True)
-
- check = self.checkDownload({'html': re.compile("html")})
- if check == "html":
- self.error(_("Downloaded file is an html page"))
-
-
-getInfo = create_getInfo(DevhostSt)
diff --git a/pyload/plugins/hoster/DlFreeFr.py b/pyload/plugins/hoster/DlFreeFr.py
deleted file mode 100644
index dc5adeec9..000000000
--- a/pyload/plugins/hoster/DlFreeFr.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import pycurl
-import re
-
-from pyload.network.Browser import Browser
-from pyload.network.CookieJar import CookieJar
-from pyload.plugins.internal.captcha import AdYouLike
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, replace_patterns
-from pyload.utils import json_loads
-
-
-class CustomBrowser(Browser):
-
- def __init__(self, bucket=None, options={}):
- Browser.__init__(self, bucket, options)
-
-
- def load(self, *args, **kwargs):
- post = kwargs.get("post")
-
- if post is None and len(args) > 2:
- post = args[2]
-
- if post:
- self.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
- self.http.c.setopt(pycurl.POST, 1)
- self.http.c.setopt(pycurl.CUSTOMREQUEST, "POST")
- else:
- self.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
- self.http.c.setopt(pycurl.POST, 0)
- self.http.c.setopt(pycurl.CUSTOMREQUEST, "GET")
-
- return Browser.load(self, *args, **kwargs)
-
-
-class DlFreeFr(SimpleHoster):
- __name = "DlFreeFr"
- __type = "hoster"
- __version = "0.26"
-
- __pattern = r'http://(?:www\.)?dl\.free\.fr/(\w+|getfile\.pl\?file=/\w+)'
-
- __description = """Dl.free.fr hoster plugin"""
- __license = "GPLv3"
- __authors = [("the-razer", "daniel_ AT gmx DOT net"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("Toilal", "toilal.dev@gmail.com")]
-
-
- NAME_PATTERN = r'Fichier:</td>\s*<td[^>]*>(?P<N>[^>]*)</td>'
- SIZE_PATTERN = r'Taille:</td>\s*<td[^>]*>(?P<S>[\d.,]+\w)o'
- OFFLINE_PATTERN = r'Erreur 404 - Document non trouv|Fichier inexistant|Le fichier demand&eacute; n\'a pas &eacute;t&eacute; trouv&eacute;'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.limitDL = 5
- self.chunkLimit = 1
-
-
- def init(self):
- factory = self.core.requestFactory
- self.req = CustomBrowser(factory.bucket, factory.getOptions())
-
-
- def process(self, pyfile):
- pyfile.url = replace_patterns(pyfile.url, self.URL_REPLACEMENTS)
- valid_url = pyfile.url
- headers = self.load(valid_url, just_header=True)
-
- if headers.get('code') == 302:
- valid_url = headers.get('location')
- headers = self.load(valid_url, just_header=True)
-
- if headers.get('code') == 200:
- content_type = headers.get('content-type')
- if content_type and content_type.startswith("text/html"):
- # Undirect acces to requested file, with a web page providing it (captcha)
- self.html = self.load(valid_url)
- self.handleFree()
- else:
- # Direct access to requested file for users using free.fr as Internet Service Provider.
- self.download(valid_url, disposition=True)
- elif headers.get('code') == 404:
- self.offline()
- else:
- self.fail(_("Invalid return code: ") + str(headers.get('code')))
-
-
- def handleFree(self):
- action, inputs = self.parseHtmlForm('action="getfile.pl"')
-
- adyoulike = AdYouLike(self)
- inputs.update(adyoulike.challenge())
-
- self.load("http://dl.free.fr/getfile.pl", post=inputs)
- headers = self.getLastHeaders()
- if headers.get("code") == 302 and "set-cookie" in headers and "location" in headers:
- m = re.search("(.*?)=(.*?); path=(.*?); domain=(.*?)", headers.get("set-cookie"))
- cj = CookieJar(__name)
- if m:
- cj.setCookie(m.group(4), m.group(1), m.group(2), m.group(3))
- else:
- self.fail(_("Cookie error"))
- location = headers.get("location")
- self.req.setCookieJar(cj)
- self.download(location, disposition=True)
- else:
- self.fail(_("Invalid response"))
-
-
- def getLastHeaders(self):
- #parse header
- header = {"code": self.req.code}
- for line in self.req.http.header.splitlines():
- line = line.strip()
- if not line or ":" not in line:
- continue
-
- key, none, value = line.partition(":")
- key = key.lower().strip()
- value = value.strip()
-
- if key in header:
- if type(header[key]) == list:
- header[key].append(value)
- else:
- header[key] = [header[key], value]
- else:
- header[key] = value
- return header
-
-
-getInfo = create_getInfo(DlFreeFr)
diff --git a/pyload/plugins/hoster/DodanePl.py b/pyload/plugins/hoster/DodanePl.py
deleted file mode 100644
index b9c903f6d..000000000
--- a/pyload/plugins/hoster/DodanePl.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class DodanePl(DeadHoster):
- __name = "DodanePl"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?dodane\.pl/file/\d+'
-
- __description = """Dodane.pl hoster plugin"""
- __license = "GPLv3"
- __authors = [("z00nx", "z00nx0@gmail.com")]
-
-
-getInfo = create_getInfo(DodanePl)
diff --git a/pyload/plugins/hoster/DuploadOrg.py b/pyload/plugins/hoster/DuploadOrg.py
deleted file mode 100644
index dcd74a02c..000000000
--- a/pyload/plugins/hoster/DuploadOrg.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class DuploadOrg(DeadHoster):
- __name = "DuploadOrg"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?dupload\.org/\w{12}'
-
- __description = """Dupload.grg hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(DuploadOrg)
diff --git a/pyload/plugins/hoster/EasybytezCom.py b/pyload/plugins/hoster/EasybytezCom.py
deleted file mode 100644
index f1083a624..000000000
--- a/pyload/plugins/hoster/EasybytezCom.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class EasybytezCom(XFSHoster):
- __name = "EasybytezCom"
- __type = "hoster"
- __version = "0.23"
-
- __pattern = r'http://(?:www\.)?easybytez\.com/\w{12}'
-
- __description = """Easybytez.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- HOSTER_DOMAIN = "easybytez.com"
-
- OFFLINE_PATTERN = r'>File not available'
-
- LINK_PATTERN = r'(http://(\w+\.(easybytez|easyload|ezbytez|zingload)\.(com|to)|\d+\.\d+\.\d+\.\d+)/files/\d+/\w+/.+?)["\'<]'
-
-
-getInfo = create_getInfo(EasybytezCom)
diff --git a/pyload/plugins/hoster/EdiskCz.py b/pyload/plugins/hoster/EdiskCz.py
deleted file mode 100644
index 17c903c1c..000000000
--- a/pyload/plugins/hoster/EdiskCz.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class EdiskCz(SimpleHoster):
- __name = "EdiskCz"
- __type = "hoster"
- __version = "0.22"
-
- __pattern = r'http://(?:www\.)?edisk\.(cz|sk|eu)/(stahni|sk/stahni|en/download)/.*'
-
- __description = """Edisk.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- INFO_PATTERN = r'<span class="fl" title="(?P<N>[^"]+)">\s*.*?\((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)</h1></span>'
- OFFLINE_PATTERN = r'<h3>This file does not exist due to one of the following:</h3><ul><li>'
-
- ACTION_PATTERN = r'/en/download/(\d+/.*\.html)'
- LINK_PATTERN = r'http://.*edisk\.cz.*\.html'
-
-
- def setup(self):
- self.multiDL = False
-
-
- def process(self, pyfile):
- url = re.sub("/(stahni|sk/stahni)/", "/en/download/", pyfile.url)
-
- self.logDebug("URL:" + url)
-
- m = re.search(self.ACTION_PATTERN, url)
- if m is None:
- self.error(_("ACTION_PATTERN not found"))
- action = m.group(1)
-
- self.html = self.load(url, decode=True)
- self.getFileInfo()
-
- self.html = self.load(re.sub("/en/download/", "/en/download-slow/", url))
-
- url = self.load(re.sub("/en/download/", "/x-download/", url), post={
- "action": action
- })
-
- if not re.match(self.LINK_PATTERN, url):
- self.fail(_("Unexpected server response"))
-
- self.download(url)
-
-
-getInfo = create_getInfo(EdiskCz)
diff --git a/pyload/plugins/hoster/EgoFilesCom.py b/pyload/plugins/hoster/EgoFilesCom.py
deleted file mode 100644
index 204273299..000000000
--- a/pyload/plugins/hoster/EgoFilesCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class EgoFilesCom(DeadHoster):
- __name = "EgoFilesCom"
- __type = "hoster"
- __version = "0.16"
-
- __pattern = r'https?://(?:www\.)?egofiles\.com/\w+'
-
- __description = """Egofiles.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(EgoFilesCom)
diff --git a/pyload/plugins/hoster/EnteruploadCom.py b/pyload/plugins/hoster/EnteruploadCom.py
deleted file mode 100644
index b013d511a..000000000
--- a/pyload/plugins/hoster/EnteruploadCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class EnteruploadCom(DeadHoster):
- __name = "EnteruploadCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?enterupload\.com/\w+'
-
- __description = """EnterUpload.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(EnteruploadCom)
diff --git a/pyload/plugins/hoster/EpicShareNet.py b/pyload/plugins/hoster/EpicShareNet.py
deleted file mode 100644
index a0f8fa8f8..000000000
--- a/pyload/plugins/hoster/EpicShareNet.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class EpicShareNet(DeadHoster):
- __name = "EpicShareNet"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'https?://(?:www\.)?epicshare\.net/\w{12}'
-
- __description = """EpicShare.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
-
-
-getInfo = create_getInfo(EpicShareNet)
diff --git a/pyload/plugins/hoster/EuroshareEu.py b/pyload/plugins/hoster/EuroshareEu.py
deleted file mode 100644
index 815aaec26..000000000
--- a/pyload/plugins/hoster/EuroshareEu.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class EuroshareEu(SimpleHoster):
- __name = "EuroshareEu"
- __type = "hoster"
- __version = "0.26"
-
- __pattern = r'http://(?:www\.)?euroshare\.(eu|sk|cz|hu|pl)/file/.*'
-
- __description = """Euroshare.eu hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- INFO_PATTERN = r'<span style="float: left;"><strong>(?P<N>.+?)</strong> \((?P<S>.+?)\)</span>'
- OFFLINE_PATTERN = ur'<h2>S.bor sa nena.iel</h2>|Poşadovaná stránka neexistuje!'
-
- FREE_URL_PATTERN = r'<a href="(/file/\d+/[^/]*/download/)"><div class="downloadButton"'
- ERR_PARDL_PATTERN = r'<h2>Prebieha s.ahovanie</h2>|<p>Naraz je z jednej IP adresy mo.n. s.ahova. iba jeden s.bor'
- ERR_NOT_LOGGED_IN_PATTERN = r'href="/customer-zone/login/"'
-
- URL_REPLACEMENTS = [(r"(http://[^/]*\.)(sk|cz|hu|pl)/", r"\1eu/")]
-
-
- def setup(self):
- self.multiDL = self.resumeDownload = self.premium
- self.req.setOption("timeout", 120)
-
-
- def handlePremium(self):
- if self.ERR_NOT_LOGGED_IN_PATTERN in self.html:
- self.account.relogin(self.user)
- self.retry(reason=_("User not logged in"))
-
- self.download(self.pyfile.url.rstrip('/') + "/download/")
-
- check = self.checkDownload({"login": re.compile(self.ERR_NOT_LOGGED_IN_PATTERN),
- "json": re.compile(r'\{"status":"error".*?"message":"(.*?)"')})
- if check == "login" or (check == "json" and self.lastCheck.group(1) == "Access token expired"):
- self.account.relogin(self.user)
- self.retry(reason=_("Access token expired"))
- elif check == "json":
- self.fail(self.lastCheck.group(1))
-
-
- def handleFree(self):
- if re.search(self.ERR_PARDL_PATTERN, self.html) is not None:
- self.longWait(5 * 60, 12)
-
- m = re.search(self.FREE_URL_PATTERN, self.html)
- if m is None:
- self.error(_("FREE_URL_PATTERN not found"))
- parsed_url = "http://euroshare.eu%s" % m.group(1)
- self.logDebug("URL", parsed_url)
- self.download(parsed_url, disposition=True)
-
- check = self.checkDownload({"multi_dl": re.compile(self.ERR_PARDL_PATTERN)})
- if check == "multi_dl":
- self.longWait(5 * 60, 12)
-
-
-getInfo = create_getInfo(EuroshareEu)
diff --git a/pyload/plugins/hoster/ExtabitCom.py b/pyload/plugins/hoster/ExtabitCom.py
deleted file mode 100644
index 104b3d580..000000000
--- a/pyload/plugins/hoster/ExtabitCom.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.utils import json_loads
-
-from pyload.plugins.hoster.UnrestrictLi import secondsToMidnight
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class ExtabitCom(SimpleHoster):
- __name = "ExtabitCom"
- __type = "hoster"
- __version = "0.62"
-
- __pattern = r'http://(?:www\.)?extabit\.com/(file|go|fid)/(?P<ID>\w+)'
-
- __description = """Extabit.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<th>File:</th>\s*<td class="col-fileinfo">\s*<div title="(?P<N>[^"]+)">'
- SIZE_PATTERN = r'<th>Size:</th>\s*<td class="col-fileinfo">(?P<S>[^<]+)</td>'
- OFFLINE_PATTERN = r'>File not found<'
- TEMP_OFFLINE_PATTERN = r'>(File is temporary unavailable|No download mirror)<'
-
- LINK_PATTERN = r'[\'"](http://guest\d+\.extabit\.com/\w+/.*?)[\'"]'
-
-
- def handleFree(self):
- if r">Only premium users can download this file" in self.html:
- self.fail(_("Only premium users can download this file"))
-
- m = re.search(r"Next free download from your ip will be available in <b>(\d+)\s*minutes", self.html)
- if m:
- self.wait(int(m.group(1)) * 60, True)
- elif "The daily downloads limit from your IP is exceeded" in self.html:
- self.logWarning(_("You have reached your daily downloads limit for today"))
- self.wait(secondsToMidnight(gmt=2), True)
-
- self.logDebug("URL: " + self.req.http.lastEffectiveURL)
- m = re.match(self.__pattern, self.req.http.lastEffectiveURL)
- fileID = m.group('ID') if m else self.info('ID')
-
- m = re.search(r'recaptcha/api/challenge\?k=(\w+)', self.html)
- if m:
- recaptcha = ReCaptcha(self)
- captcha_key = m.group(1)
-
- for _i in xrange(5):
- get_data = {"type": "recaptcha"}
- get_data['challenge'], get_data['capture'] = recaptcha.challenge(captcha_key)
- res = json_loads(self.load("http://extabit.com/file/%s/" % fileID, get=get_data))
- if "ok" in res:
- self.correctCaptcha()
- break
- else:
- self.invalidCaptcha()
- else:
- self.fail(_("Invalid captcha"))
- else:
- self.error(_("Captcha"))
-
- if not "href" in res:
- self.error(_("Bad JSON response"))
-
- self.html = self.load("http://extabit.com/file/%s%s" % (fileID, res['href']))
-
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
-
- url = m.group(1)
- self.download(url)
-
-
-getInfo = create_getInfo(ExtabitCom)
diff --git a/pyload/plugins/hoster/FastixRu.py b/pyload/plugins/hoster/FastixRu.py
deleted file mode 100644
index 47bfa4035..000000000
--- a/pyload/plugins/hoster/FastixRu.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import randrange
-from urllib import unquote
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-
-
-class FastixRu(Hoster):
- __name = "FastixRu"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'http://(?:www\.)?fastix\.(ru|it)/file/(?P<ID>\w{24})'
-
- __description = """Fastix hoster plugin"""
- __license = "GPLv3"
- __authors = [("Massimo Rosamilia", "max@spiritix.eu")]
-
-
- def getFilename(self, url):
- try:
- name = unquote(url.rsplit("/", 1)[1])
- except IndexError:
- name = "Unknown_Filename..."
- if name.endswith("..."): # incomplete filename, append random stuff
- name += "%s.tmp" % randrange(100, 999)
- return name
-
-
- def setup(self):
- self.chunkLimit = 3
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "Fastix")
- self.fail(_("No Fastix account provided"))
- else:
- self.logDebug("Old URL: %s" % pyfile.url)
- api_key = self.account.getAccountData(self.user)
- api_key = api_key['api']
-
- page = self.load("http://fastix.ru/api_v2/",
- get={'apikey': api_key, 'sub': "getdirectlink", 'link': pyfile.url})
- data = json_loads(page)
-
- self.logDebug("Json data", data)
-
- if "error\":true" in page:
- self.offline()
- else:
- new_url = data['downloadlink']
-
- if new_url != pyfile.url:
- self.logDebug("New URL: %s" % new_url)
-
- if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown"):
- #only use when name wasnt already set
- pyfile.name = self.getFilename(new_url)
-
- self.download(new_url, disposition=True)
-
- check = self.checkDownload({"error": "<title>An error occurred while processing your request</title>",
- "empty": re.compile(r"^$")})
-
- if check == "error":
- self.retry(wait_time=60, reason=_("An error occurred while generating link"))
- elif check == "empty":
- self.retry(wait_time=60, reason=_("Downloaded File was empty"))
diff --git a/pyload/plugins/hoster/FastshareCz.py b/pyload/plugins/hoster/FastshareCz.py
deleted file mode 100644
index 3714e1bc2..000000000
--- a/pyload/plugins/hoster/FastshareCz.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class FastshareCz(SimpleHoster):
- __name = "FastshareCz"
- __type = "hoster"
- __version = "0.25"
-
- __pattern = r'http://(?:www\.)?fastshare\.cz/\d+/.+'
-
- __description = """FastShare.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
- URL_REPLACEMENTS = [("#.*", "")]
-
- COOKIES = [("fastshare.cz", "lang", "en")]
-
- INFO_PATTERN = r'<h1 class="dwp">(?P<N>[^<]+)</h1>\s*<div class="fileinfo">\s*Size\s*: (?P<S>\d+) (?P<U>[\w^_]+),'
- OFFLINE_PATTERN = r'>(The file has been deleted|Requested page not found)'
-
- LINK_FREE_PATTERN = r'action=(/free/.*?)>\s*<img src="([^"]*)"><br'
- LINK_PREMIUM_PATTERN = r'(http://data\d+\.fastshare\.cz/download\.php\?id=\d+&)'
-
- SLOT_ERROR = "> 100% of FREE slots are full"
- CREDIT_ERROR = " credit for "
-
-
- def checkErrors(self):
- if self.SLOT_ERROR in self.html:
- errmsg = self.info['error'] = _("No free slots")
- self.retry(12, 60, errmsg)
-
- if self.CREDIT_ERROR in self.html:
- errmsg = self.info['error'] = _("Not enough traffic left")
- self.logWarning(errmsg)
- self.resetAccount()
-
- self.info.pop('error', None)
-
-
- def handleFree(self):
- m = re.search(self.FREE_URL_PATTERN, self.html)
- if m:
- action, captcha_src = m.groups()
- else:
- self.error(_("FREE_URL_PATTERN not found"))
-
- baseurl = "http://www.fastshare.cz"
- captcha = self.decryptCaptcha(urljoin(baseurl, captcha_src))
- self.download(urljoin(baseurl, action), post={'code': captcha, 'btn.x': 77, 'btn.y': 18})
-
-
- def checkFile(self):
- check = self.checkDownload({
- 'paralell_dl' : re.compile(r"<title>FastShare.cz</title>|<script>alert\('Pres FREE muzete stahovat jen jeden soubor najednou.'\)"),
- 'wrong_captcha': re.compile(r'Download for FREE'),
- 'credit' : re.compile(self.CREDIT_ERROR)
- })
-
- if check == "paralell_dl":
- self.retry(6, 10 * 60, _("Paralell download"))
-
- elif check == "wrong_captcha":
- self.retry(max_tries=5, reason=_("Wrong captcha"))
-
- elif check == "credit":
- self.resetAccount()
-
-
-getInfo = create_getInfo(FastshareCz)
diff --git a/pyload/plugins/hoster/FileApeCom.py b/pyload/plugins/hoster/FileApeCom.py
deleted file mode 100644
index 36a5a5cc0..000000000
--- a/pyload/plugins/hoster/FileApeCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class FileApeCom(DeadHoster):
- __name = "FileApeCom"
- __type = "hoster"
- __version = "0.12"
-
- __pattern = r'http://(?:www\.)?fileape\.com/(index\.php\?act=download\&id=|dl/)\w+'
-
- __description = """FileApe.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("espes", "")]
-
-
-getInfo = create_getInfo(FileApeCom)
diff --git a/pyload/plugins/hoster/FileParadoxIn.py b/pyload/plugins/hoster/FileParadoxIn.py
deleted file mode 100644
index eee67fa80..000000000
--- a/pyload/plugins/hoster/FileParadoxIn.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class FileParadoxIn(XFSHoster):
- __name = "FileParadoxIn"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'https?://(?:www\.)?fileparadox\.in/\w{12}'
-
- __description = """FileParadox.in hoster plugin"""
- __license = "GPLv3"
- __authors = [("RazorWing", "muppetuk1@hotmail.com")]
-
-
- HOSTER_DOMAIN = "fileparadox.in"
-
- SIZE_PATTERN = r'</font>\s*\(\s*(?P<S>[^)]+)\s*\)</font>'
-
-
-getInfo = create_getInfo(FileParadoxIn)
diff --git a/pyload/plugins/hoster/FileSharkPl.py b/pyload/plugins/hoster/FileSharkPl.py
deleted file mode 100644
index 673da41d0..000000000
--- a/pyload/plugins/hoster/FileSharkPl.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class FileSharkPl(SimpleHoster):
- __name = "FileSharkPl"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'http://(?:www\.)?fileshark\.pl/pobierz/\d{6}/\w{5}'
-
- __description = """FileShark.pl hoster plugin"""
- __license = "GPLv3"
- __authors = [("prOq", ""),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'<h2 class="name-file">(?P<N>.+)</h2>'
- SIZE_PATTERN = r'<p class="size-file">(.*?)<strong>(?P<S>\d+\.?\d*)\s(?P<U>\w+)</strong></p>'
-
- OFFLINE_PATTERN = '(P|p)lik zosta. (usuni.ty|przeniesiony)'
-
- LINK_FREE_PATTERN = r'<a href="(.*?)" class="btn-upload-free">'
- LINK_PREMIUM_PATTERN = r'<a href="(.*?)" class="btn-upload-premium">'
-
- WAIT_PATTERN = r'var timeToDownload = (\d+);'
- ERROR_PATTERN = r'<p class="lead text-center alert alert-warning">(.*?)</p>'
- IP_ERROR_PATTERN = r'Strona jest dost.pna wy..cznie dla u.ytkownik.w znajduj.cych si. na terenie Polski'
- SLOT_ERROR_PATTERN = r'Osi.gni.to maksymaln. liczb. .ci.ganych jednocze.nie plik.w\.'
-
- CAPTCHA_PATTERN = '<img src="data:image/jpeg;base64,(.*?)" title="captcha"'
- TOKEN_PATTERN = r'name="form\[_token\]" value="(.*?)" />'
-
-
- def setup(self):
- self.resumeDownload = True
- if self.premium:
- self.multiDL = True
- self.limitDL = 20
- else:
- self.multiDL = False
-
-
- def checkErrors(self):
- # check if file is now available for download (-> file name can be found in html body)
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- errmsg = self.info['error'] = _("Another download already run")
- self.retry(15, int(m.group(1)), errmsg)
-
- m = re.search(self.ERROR_PATTERN, self.html):
- if m:
- alert = m.group(1)
-
- if re.match(self.IP_ERROR_PATTERN, alert):
- self.fail(_("Only connections from Polish IP are allowed"))
-
- elif re.match(self.SLOT_ERROR_PATTERN, alert):
- errmsg = self.info['error'] = _("No free download slots available")
- self.logWarning(errmsg)
- self.retry(10, 30 * 60, _("Still no free download slots available"))
-
- else:
- self.info['error'] = alert
- self.retry(10, 10 * 60, _("Try again later"))
-
- self.info.pop('error', None)
-
-
- #@NOTE: handlePremium method was never been tested
- def handlePremium(self):
- super(FilerNet, self).handlePremium()
- if self.link:
- self.link = urljoin("http://fileshark.pl/", self.link)
-
-
- def handleFree(self):
- m = re.search(self.LINK_FREE_PATTERN, self.html)
- if m is None:
- self.error(_("Download url not found"))
-
- link = urljoin("http://fileshark.pl", m.group(1))
-
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- seconds = int(m.group(1))
- self.logDebug("Wait %s seconds" % seconds)
- self.wait(seconds)
-
- action, inputs = self.parseHtmlForm('action=""')
-
- m = re.search(self.TOKEN_PATTERN, self.html)
- if m is None:
- self.retry(reason=_("Captcha form not found"))
-
- inputs['form[_token]'] = m.group(1)
-
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m is None:
- self.retry(reason=_("Captcha image not found"))
-
- tmp_load = self.load
- self.load = self._decode64 #: work-around: injects decode64 inside decryptCaptcha
-
- inputs['form[captcha]'] = self.decryptCaptcha(m.group(1), imgtype='jpeg')
- inputs['form[start]'] = ""
-
- self.load = tmp_load
-
- self.download(link, post=inputs, cookies=True, disposition=True)
-
-
- def checkFile(self):
- check = self.checkDownload({'wrong_captcha': re.compile(r'<label for="form_captcha" generated="true" class="error">(.*?)</label>'),
- 'wait_pattern' : re.compile(self.SECONDS_PATTERN),
- 'DL-found' : re.compile('<a href="(.*)">')})
-
- if check == "DL-found":
- self.correctCaptcha()
-
- elif check == "wrong_captcha":
- self.invalidCaptcha()
- self.retry(10, 1, _("Wrong captcha solution"))
-
- elif check == "wait_pattern":
- self.retry()
-
-
- def _decode64(self, data, *args, **kwargs):
- return data.decode("base64")
-
-
-getInfo = create_getInfo(FileSharkPl)
diff --git a/pyload/plugins/hoster/FileStoreTo.py b/pyload/plugins/hoster/FileStoreTo.py
deleted file mode 100644
index e1e23f378..000000000
--- a/pyload/plugins/hoster/FileStoreTo.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class FileStoreTo(SimpleHoster):
- __name = "FileStoreTo"
- __type = "hoster"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?filestore\.to/\?d=(?P<ID>\w+)'
-
- __description = """FileStore.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- INFO_PATTERN = r'File: <span[^>]*>(?P<N>.+)</span><br />Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
- OFFLINE_PATTERN = r'>Download-Datei wurde nicht gefunden<'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def handleFree(self):
- self.wait(10)
- ldc = re.search(r'wert="(\w+)"', self.html).group(1)
- link = self.load("http://filestore.to/ajax/download.php", get={"LDC": ldc})
- self.download(link)
-
-
-getInfo = create_getInfo(FileStoreTo)
diff --git a/pyload/plugins/hoster/FilebeerInfo.py b/pyload/plugins/hoster/FilebeerInfo.py
deleted file mode 100644
index 66d6626be..000000000
--- a/pyload/plugins/hoster/FilebeerInfo.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class FilebeerInfo(DeadHoster):
- __name = "FilebeerInfo"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?filebeer\.info/(?!\d*~f)(?P<ID>\w+).*'
-
- __description = """Filebeer.info plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(FilebeerInfo)
diff --git a/pyload/plugins/hoster/FilecloudIo.py b/pyload/plugins/hoster/FilecloudIo.py
deleted file mode 100644
index 751542caf..000000000
--- a/pyload/plugins/hoster/FilecloudIo.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.utils import json_loads
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class FilecloudIo(SimpleHoster):
- __name = "FilecloudIo"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?(?:filecloud\.io|ifile\.it|mihd\.net)/(?P<ID>\w+).*'
-
- __description = """Filecloud.io hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- SIZE_PATTERN = r'{var __ab1 = (?P<S>\d+);}'
- NAME_PATTERN = r'id="aliasSpan">(?P<N>.*?)&nbsp;&nbsp;<'
- OFFLINE_PATTERN = r'l10n\.(FILES__DOESNT_EXIST|REMOVED)'
- TEMP_OFFLINE_PATTERN = r'l10n\.FILES__WARNING'
-
- UKEY_PATTERN = r'\'ukey\'\s*:\'(\w+)'
- AB1_PATTERN = r'if\( __ab1 == \'(\w+)\' \)'
- ERROR_MSG_PATTERN = r'var __error_msg\s*=\s*l10n\.(.*?);'
- RECAPTCHA_PATTERN = r'var __recaptcha_public\s*=\s*\'(.+?)\';'
-
- LINK_PATTERN = r'"(http://s\d+\.filecloud\.io/%s/\d+/.*?)"'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.chunkLimit = 1
-
-
- def handleFree(self):
- data = {"ukey": self.info['pattern']['ID']}
-
- m = re.search(self.AB1_PATTERN, self.html)
- if m is None:
- self.error(_("__AB1"))
- data['__ab1'] = m.group(1)
-
- recaptcha = ReCaptcha(self)
-
- m = re.search(self.RECAPTCHA_PATTERN, self.html)
- captcha_key = m.group(1) if m else recaptcha.detect_key()
-
- if captcha_key is None:
- self.error(_("ReCaptcha key not found"))
-
- if not self.account:
- self.fail(_("User not logged in"))
- elif not self.account.logged_in:
- challenge, response = recaptcha.challenge(captcha_key)
- self.account.form_data = {"recaptcha_challenge_field": challenge,
- "recaptcha_response_field" : response}
- self.account.relogin(self.user)
- self.retry(2)
-
- json_url = "http://filecloud.io/download-request.json"
- res = self.load(json_url, post=data)
- self.logDebug(res)
- res = json_loads(res)
-
- if "error" in res and res['error']:
- self.fail(res)
-
- self.logDebug(res)
- if res['captcha']:
- data['ctype'] = "recaptcha"
-
- for _i in xrange(5):
- data['recaptcha_challenge'], data['recaptcha_response'] = recaptcha.challenge(captcha_key)
-
- json_url = "http://filecloud.io/download-request.json"
- res = self.load(json_url, post=data)
- self.logDebug(res)
- res = json_loads(res)
-
- if "retry" in res and res['retry']:
- self.invalidCaptcha()
- else:
- self.correctCaptcha()
- break
- else:
- self.fail(_("Incorrect captcha"))
-
- if res['dl']:
- self.html = self.load('http://filecloud.io/download.html')
-
- m = re.search(self.LINK_PATTERN % self.info['pattern']['ID'], self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
-
- if "size" in self.info and self.info['size']:
- self.check_data = {"size": int(self.info['size'])}
-
- download_url = m.group(1)
- self.download(download_url)
- else:
- self.fail(_("Unexpected server response"))
-
-
- def handlePremium(self):
- akey = self.account.getAccountData(self.user)['akey']
- ukey = self.info['pattern']['ID']
- self.logDebug("Akey: %s | Ukey: %s" % (akey, ukey))
- rep = self.load("http://api.filecloud.io/api-fetch_download_url.api",
- post={"akey": akey, "ukey": ukey})
- self.logDebug("FetchDownloadUrl: " + rep)
- rep = json_loads(rep)
- if rep['status'] == 'ok':
- self.download(rep['download_url'], disposition=True)
- else:
- self.fail(rep['message'])
-
-
-getInfo = create_getInfo(FilecloudIo)
diff --git a/pyload/plugins/hoster/FilefactoryCom.py b/pyload/plugins/hoster/FilefactoryCom.py
deleted file mode 100644
index a161e1e40..000000000
--- a/pyload/plugins/hoster/FilefactoryCom.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, parseFileInfo
-
-
-def getInfo(urls):
- for url in urls:
- h = getURL(url, just_header=True)
- m = re.search(r'Location: (.+)\r\n', h)
- if m and not re.match(m.group(1), FilefactoryCom.__pattern): #: It's a direct link! Skipping
- yield (url, 0, 3, url)
- else: #: It's a standard html page
- yield parseFileInfo(FilefactoryCom, url, getURL(url))
-
-
-class FilefactoryCom(SimpleHoster):
- __name = "FilefactoryCom"
- __type = "hoster"
- __version = "0.52"
-
- __pattern = r'https?://(?:www\.)?filefactory\.com/(file|trafficshare/\w+)/\w+'
-
- __description = """Filefactory.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- INFO_PATTERN = r'<div id="file_name"[^>]*>\s*<h2>(?P<N>[^<]+)</h2>\s*<div id="file_info">\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+) uploaded'
- OFFLINE_PATTERN = r'<h2>File Removed</h2>|This file is no longer available'
-
- LINK_PATTERN = r'"([^"]+filefactory\.com/get.+?)"'
-
- WAIT_PATTERN = r'<div id="countdown_clock" data-delay="(\d+)">'
- PREMIUM_ONLY_PATTERN = r'>Premium Account Required'
-
- COOKIES = [("filefactory.com", "locale", "en_US.utf8")]
-
-
- def handleFree(self):
- if "Currently only Premium Members can download files larger than" in self.html:
- self.fail(_("File too large for free download"))
- elif "All free download slots on this server are currently in use" in self.html:
- self.retry(50, 15 * 60, _("All free slots are busy"))
-
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Free download link not found"))
-
- dl_link = m.group(1)
-
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- self.wait(int(m.group(1)))
-
- self.download(dl_link, disposition=True)
-
- check = self.checkDownload({'multiple': "You are currently downloading too many files at once.",
- 'error': '<div id="errorMessage">'})
-
- if check == "multiple":
- self.logDebug("Parallel downloads detected; waiting 15 minutes")
- self.retry(wait_time=15 * 60, reason=_("Parallel downloads"))
- elif check == "error":
- self.error(_("Unknown error"))
-
-
- def handlePremium(self):
- header = self.load(self.pyfile.url, just_header=True)
-
- if 'location' in header:
- url = header['location'].strip()
- if not url.startswith("http://"):
- url = urljoin("http://www.filefactory.com", url)
- elif 'content-disposition' in header:
- url = self.pyfile.url
- else:
- html = self.load(self.pyfile.url)
- m = re.search(self.LINK_PATTERN, html)
- if m:
- url = m.group(1)
- else:
- self.error(_("Premium download link not found"))
-
- self.download(url, disposition=True)
diff --git a/pyload/plugins/hoster/FilejungleCom.py b/pyload/plugins/hoster/FilejungleCom.py
deleted file mode 100644
index e582277b6..000000000
--- a/pyload/plugins/hoster/FilejungleCom.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.hoster.FileserveCom import FileserveCom, checkFile
-from pyload.plugins.Plugin import chunks
-
-
-class FilejungleCom(FileserveCom):
- __name = "FilejungleCom"
- __type = "hoster"
- __version = "0.51"
-
- __pattern = r'http://(?:www\.)?filejungle\.com/f/(?P<id>[^/]+).*'
-
- __description = """Filejungle.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- URLS = ["http://www.filejungle.com/f/", "http://www.filejungle.com/check_links.php",
- "http://www.filejungle.com/checkReCaptcha.php"]
- LINKCHECK_TR = r'<li>\s*(<div class="col1">.*?)</li>'
- LINKCHECK_TD = r'<div class="(?:col )?col\d">(?:<[^>]*>|&nbsp;)*([^<]*)'
-
- LONG_WAIT_PATTERN = r'<h1>Please wait for (\d+) (\w+)\s*to download the next file\.</h1>'
-
-
-def getInfo(urls):
- for chunk in chunks(urls, 100):
- yield checkFile(FilejungleCom, chunk)
diff --git a/pyload/plugins/hoster/FileomCom.py b/pyload/plugins/hoster/FileomCom.py
deleted file mode 100644
index 458efec42..000000000
--- a/pyload/plugins/hoster/FileomCom.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://fileom.com/gycaytyzdw3g/random.bin.html
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class FileomCom(XFSHoster):
- __name = "FileomCom"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'https?://(?:www\.)?fileom\.com/\w{12}'
-
- __description = """Fileom.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "fileom.com"
-
- NAME_PATTERN = r'Filename: <span>(?P<N>.+?)<'
- SIZE_PATTERN = r'File Size: <span class="size">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- LINK_PATTERN = r'var url2 = \'(.+?)\';'
-
-
- def setup(self):
- self.multiDL = True
- self.chunkLimit = 1
- self.resumeDownload = self.premium
-
-
-getInfo = create_getInfo(FileomCom)
diff --git a/pyload/plugins/hoster/FilepostCom.py b/pyload/plugins/hoster/FilepostCom.py
deleted file mode 100644
index 4db034a8b..000000000
--- a/pyload/plugins/hoster/FilepostCom.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import time
-
-from pyload.utils import json_loads
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class FilepostCom(SimpleHoster):
- __name = "FilepostCom"
- __type = "hoster"
- __version = "0.30"
-
- __pattern = r'https?://(?:www\.)?(?:filepost\.com/files|fp\.io)/(?P<ID>[^/]+)'
-
- __description = """Filepost.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- INFO_PATTERN = r'<input type="text" id="url" value=\'<a href[^>]*>(?P<N>[^>]+?) - (?P<S>[\d.,]+) (?P<U>[\w^_]+)</a>\' class="inp_text"/>'
- OFFLINE_PATTERN = r'class="error_msg_title"> Invalid or Deleted File. </div>|<div class="file_info file_info_deleted">'
-
- PREMIUM_ONLY_PATTERN = r'members only. Please upgrade to premium|a premium membership is required to download this file'
- RECAPTCHA_PATTERN = r'Captcha.init\({\s*key:\s*\'(.+?)\''
- FLP_TOKEN_PATTERN = r'set_store_options\({token: \'(.+?)\''
-
-
- def handleFree(self):
- m = re.search(self.FLP_TOKEN_PATTERN, self.html)
- if m is None:
- self.error(_("Token"))
- flp_token = m.group(1)
-
- m = re.search(self.RECAPTCHA_PATTERN, self.html)
- if m is None:
- self.error(_("Captcha key"))
- captcha_key = m.group(1)
-
- # Get wait time
- get_dict = {'SID': self.req.cj.getCookie('SID'), 'JsHttpRequest': str(int(time() * 10000)) + '-xml'}
- post_dict = {'action': 'set_download', 'token': flp_token, 'code': self.info['pattern']['ID']}
- wait_time = int(self.getJsonResponse(get_dict, post_dict, 'wait_time'))
-
- if wait_time > 0:
- self.wait(wait_time)
-
- post_dict = {"token": flp_token, "code": self.info['pattern']['ID'], "file_pass": ''}
-
- if 'var is_pass_exists = true;' in self.html:
- # Solve password
- for file_pass in self.getPassword().splitlines():
- get_dict['JsHttpRequest'] = str(int(time() * 10000)) + '-xml'
- post_dict['file_pass'] = file_pass
- self.logInfo(_("Password protected link, trying ") + file_pass)
-
- download_url = self.getJsonResponse(get_dict, post_dict, 'link')
- if download_url:
- break
-
- else:
- self.fail(_("No or incorrect password"))
-
- else:
- # Solve recaptcha
- recaptcha = ReCaptcha(self)
-
- for i in xrange(5):
- get_dict['JsHttpRequest'] = str(int(time() * 10000)) + '-xml'
- if i:
- post_dict['recaptcha_challenge_field'], post_dict['recaptcha_response_field'] = recaptcha.challenge(
- captcha_key)
- self.logDebug(u"RECAPTCHA: %s : %s : %s" % (
- captcha_key, post_dict['recaptcha_challenge_field'], post_dict['recaptcha_response_field']))
-
- download_url = self.getJsonResponse(get_dict, post_dict, 'link')
- if download_url:
- if i:
- self.correctCaptcha()
- break
- elif i:
- self.invalidCaptcha()
-
- else:
- self.fail(_("Invalid captcha"))
-
- # Download
- self.download(download_url)
-
-
- def getJsonResponse(self, get_dict, post_dict, field):
- res = json_loads(self.load('https://filepost.com/files/get/', get=get_dict, post=post_dict))
-
- self.logDebug(res)
-
- if not 'js' in res:
- self.error(_("JSON %s 1") % field)
-
- # i changed js_answer to res['js'] since js_answer is nowhere set.
- # i don't know the JSON-HTTP specs in detail, but the previous author
- # accessed res['js']['error'] as well as js_answer['error'].
- # see the two lines commented out with "# ~?".
- if 'error' in res['js']:
-
- if res['js']['error'] == 'download_delay':
- self.retry(wait_time=res['js']['params']['next_download'])
- # ~? self.retry(wait_time=js_answer['params']['next_download'])
-
- elif ('Wrong file password' in res['js']['error']
- or 'You entered a wrong CAPTCHA code' in res['js']['error']
- or 'CAPTCHA Code nicht korrekt' in res['js']['error']):
- return None
-
- elif 'CAPTCHA' in res['js']['error']:
- self.logDebug("Error response is unknown, but mentions CAPTCHA")
- return None
-
- else:
- self.fail(res['js']['error'])
-
- if not 'answer' in res['js'] or not field in res['js']['answer']:
- self.error(_("JSON %s 2") % field)
-
- return res['js']['answer'][field]
-
-
-getInfo = create_getInfo(FilepostCom)
diff --git a/pyload/plugins/hoster/FilepupNet.py b/pyload/plugins/hoster/FilepupNet.py
deleted file mode 100644
index 27b3acb44..000000000
--- a/pyload/plugins/hoster/FilepupNet.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://www.filepup.net/files/k5w4ZVoF1410184283.html
-# http://www.filepup.net/files/R4GBq9XH1410186553.html
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class FilepupNet(SimpleHoster):
- __name = "FilepupNet"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?filepup\.net/files/\w+'
-
- __description = """Filepup.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'>(?P<N>.+?)</h1>'
- SIZE_PATTERN = r'class="fa fa-archive"></i> \((?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- OFFLINE_PATTERN = r'>This file has been deleted'
-
- LINK_PATTERN = r'(http://www\.filepup\.net/get/.+?)\''
-
-
- def setup(self):
- self.multiDL = False
- self.chunkLimit = 1
-
-
- def handleFree(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Download link not found"))
-
- dl_link = m.group(1)
- self.download(dl_link, post={'task': "download"})
-
- check = self.checkDownload({'html': re.compile("html")})
- if check == "html":
- self.error(_("Downloaded file is an html page"))
-
-
-getInfo = create_getInfo(FilepupNet)
diff --git a/pyload/plugins/hoster/FilerNet.py b/pyload/plugins/hoster/FilerNet.py
deleted file mode 100644
index 16008a232..000000000
--- a/pyload/plugins/hoster/FilerNet.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://filer.net/get/ivgf5ztw53et3ogd
-# http://filer.net/get/hgo14gzcng3scbvv
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class FilerNet(SimpleHoster):
- __name = "FilerNet"
- __type = "hoster"
- __version = "0.10"
-
- __pattern = r'https?://(?:www\.)?filer\.net/get/\w+'
-
- __description = """Filer.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- INFO_PATTERN = r'<h1 class="page-header">Free Download (?P<N>\S+) <small>(?P<S>[\w.]+) (?P<U>[\w^_]+)</small></h1>'
- OFFLINE_PATTERN = r'Nicht gefunden'
-
- LINK_FREE_PATTERN = LINK_PREMIUM_PATTERN = r'href="([^"]+)">Get download</a>'
-
-
- def checkErrors(self):
- # Wait between downloads
- m = re.search(r'musst du <span id="time">(\d+)</span> Sekunden warten', self.html)
- if m:
- errmsg = self.info['error'] = _("Wait between free downloads")
- self.retry(wait_time=int(m.group(1)), reason=errmsg)
-
- self.info.pop('error', None)
-
-
- def handleFree(self):
- inputs = self.parseHtmlForm(input_names={'token': re.compile(r'.+')})[1]
- if 'token' not in inputs:
- self.error(_("Unable to detect token"))
-
- self.html = self.load(self.pyfile.url, post={'token': inputs['token']}, decode=True)
-
- inputs = self.parseHtmlForm(input_names={'hash': re.compile(r'.+')})[1]
- if 'hash' not in inputs:
- self.error(_("Unable to detect hash"))
-
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- challenge, response = recaptcha.challenge()
-
- header = self.load(self.pyfile.url,
- post={'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field' : response,
- 'hash' : inputs['hash']})
-
- if 'location' in header and header['location']:
- self.correctCaptcha()
- self.link = header['location']
- return
- else:
- self.invalidCaptcha()
-
-
- def downloadLink(self, link):
- if not link:
- return
-
- self.download(urljoin("http://filer.net/", link), disposition=True)
-
-
-getInfo = create_getInfo(FilerNet)
diff --git a/pyload/plugins/hoster/FilerioCom.py b/pyload/plugins/hoster/FilerioCom.py
deleted file mode 100644
index b865ff3f3..000000000
--- a/pyload/plugins/hoster/FilerioCom.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class FilerioCom(XFSHoster):
- __name = "FilerioCom"
- __type = "hoster"
- __version = "0.07"
-
- __pattern = r'http://(?:www\.)?(filerio\.(in|com)|filekeen\.com)/\w{12}'
-
- __description = """FileRio.in hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "filerio.in"
-
- URL_REPLACEMENTS = [(r'filekeen\.com', "filerio.in")]
-
- OFFLINE_PATTERN = r'>&quot;File Not Found|File has been removed'
-
-
-getInfo = create_getInfo(FilerioCom)
diff --git a/pyload/plugins/hoster/FilesMailRu.py b/pyload/plugins/hoster/FilesMailRu.py
deleted file mode 100644
index b2147a993..000000000
--- a/pyload/plugins/hoster/FilesMailRu.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.Plugin import chunks
-
-
-def getInfo(urls):
- result = []
- for chunk in chunks(urls, 10):
- for url in chunk:
- html = getURL(url)
- if r'<div class="errorMessage mb10">' in html:
- result.append((url, 0, 1, url))
- elif r'Page cannot be displayed' in html:
- result.append((url, 0, 1, url))
- else:
- try:
- url_pattern = '<a href="(.+?)" onclick="return Act\(this\, \'dlink\'\, event\)">(.+?)</a>'
- file_name = re.search(url_pattern, html).group(0).split(', event)">')[1].split('</a>')[0]
- result.append((file_name, 0, 2, url))
- except Exception:
- pass
-
- # status 1=OFFLINE, 2=OK, 3=UNKNOWN
- # result.append((#name,#size,#status,#url))
- yield result
-
-
-class FilesMailRu(Hoster):
- __name = "FilesMailRu"
- __type = "hoster"
- __version = "0.31"
-
- __pattern = r'http://(?:www\.)?files\.mail\.ru/.*'
-
- __description = """Files.mail.ru hoster plugin"""
- __license = "GPLv3"
- __authors = [("oZiRiz", "ich@oziriz.de")]
-
-
- def setup(self):
- if not self.account:
- self.multiDL = False
-
-
- def process(self, pyfile):
- self.html = self.load(pyfile.url)
- self.url_pattern = '<a href="(.+?)" onclick="return Act\(this\, \'dlink\'\, event\)">(.+?)</a>'
-
- #marks the file as "offline" when the pattern was found on the html-page'''
- if r'<div class="errorMessage mb10">' in self.html:
- self.offline()
-
- elif r'Page cannot be displayed' in self.html:
- self.offline()
-
- #the filename that will be showed in the list (e.g. test.part1.rar)'''
- pyfile.name = self.getFileName()
-
- #prepare and download'''
- if not self.account:
- self.prepare()
- self.download(self.getFileUrl())
- self.myPostProcess()
- else:
- self.download(self.getFileUrl())
- self.myPostProcess()
-
-
- def prepare(self):
- """You have to wait some seconds. Otherwise you will get a 40Byte HTML Page instead of the file you expected"""
- self.setWait(10)
- self.wait()
- return True
-
-
- def getFileUrl(self):
- """gives you the URL to the file. Extracted from the Files.mail.ru HTML-page stored in self.html"""
- return re.search(self.url_pattern, self.html).group(0).split('<a href="')[1].split('" onclick="return Act')[0]
-
-
- def getFileName(self):
- """gives you the Name for each file. Also extracted from the HTML-Page"""
- return re.search(self.url_pattern, self.html).group(0).split(', event)">')[1].split('</a>')[0]
-
-
- def myPostProcess(self):
- # searches the file for HTMl-Code. Sometimes the Redirect
- # doesn't work (maybe a curl Problem) and you get only a small
- # HTML file and the Download is marked as "finished"
- # then the download will be restarted. It's only bad for these
- # who want download a HTML-File (it's one in a million ;-) )
- #
- # The maximum UploadSize allowed on files.mail.ru at the moment is 100MB
- # so i set it to check every download because sometimes there are downloads
- # that contain the HTML-Text and 60MB ZEROs after that in a xyzfile.part1.rar file
- # (Loading 100MB in to ram is not an option)
- check = self.checkDownload({"html": "<meta name="}, read_size=50000)
- if check == "html":
- self.logInfo(_(
- "There was HTML Code in the Downloaded File (%s)...redirect error? The Download will be restarted." %
- self.pyfile.name))
- self.retry()
diff --git a/pyload/plugins/hoster/FileserveCom.py b/pyload/plugins/hoster/FileserveCom.py
deleted file mode 100644
index bc2e963b8..000000000
--- a/pyload/plugins/hoster/FileserveCom.py
+++ /dev/null
@@ -1,217 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.utils import json_loads
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.Plugin import chunks
-from pyload.plugins.hoster.UnrestrictLi import secondsToMidnight
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.utils import parseFileSize
-
-
-def checkFile(plugin, urls):
- html = getURL(plugin.URLS[1], post={"urls": "\n".join(urls)}, decode=True)
-
- file_info = []
- for li in re.finditer(plugin.LINKCHECK_TR, html, re.S):
- try:
- cols = re.findall(plugin.LINKCHECK_TD, li.group(1))
- if cols:
- file_info.append((
- cols[1] if cols[1] != '--' else cols[0],
- parseFileSize(cols[2]) if cols[2] != '--' else 0,
- 2 if cols[3].startswith('Available') else 1,
- cols[0]))
- except Exception, e:
- continue
-
- return file_info
-
-
-class FileserveCom(Hoster):
- __name = "FileserveCom"
- __type = "hoster"
- __version = "0.52"
-
- __pattern = r'http://(?:www\.)?fileserve\.com/file/(?P<id>[^/]+).*'
-
- __description = """Fileserve.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("mkaay", "mkaay@mkaay.de"),
- ("Paul King", ""),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- URLS = ["http://www.fileserve.com/file/", "http://www.fileserve.com/link-checker.php",
- "http://www.fileserve.com/checkReCaptcha.php"]
- LINKCHECK_TR = r'<tr>\s*(<td>http://www\.fileserve\.com/file/.*?)</tr>'
- LINKCHECK_TD = r'<td>(?:<[^>]*>|&nbsp;)*([^<]*)'
-
- CAPTCHA_KEY_PATTERN = r'var reCAPTCHA_publickey=\'(?P<key>.+?)\''
- LONG_WAIT_PATTERN = r'<li class="title">You need to wait (\d+) (\w+) to start another download\.</li>'
- LINK_EXPIRED_PATTERN = r'Your download link has expired'
- DAILY_LIMIT_PATTERN = r'Your daily download limit has been reached'
- NOT_LOGGED_IN_PATTERN = r'<form (name="loginDialogBoxForm"|id="login_form")|<li><a href="/login\.php">Login</a></li>'
-
-
- def setup(self):
- self.resumeDownload = self.multiDL = self.premium
- self.file_id = re.match(self.__pattern, self.pyfile.url).group('id')
- self.url = "%s%s" % (self.URLS[0], self.file_id)
-
- self.logDebug("File ID: %s URL: %s" % (self.file_id, self.url))
-
-
- def process(self, pyfile):
- pyfile.name, pyfile.size, status, self.url = checkFile(self, [self.url])[0]
- if status != 2:
- self.offline()
- self.logDebug("File Name: %s Size: %d" % (pyfile.name, pyfile.size))
-
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
-
-
- def handleFree(self):
- self.html = self.load(self.url)
- action = self.load(self.url, post={"checkDownload": "check"}, decode=True)
- action = json_loads(action)
- self.logDebug(action)
-
- if "fail" in action:
- if action['fail'] == "timeLimit":
- self.html = self.load(self.url, post={"checkDownload": "showError", "errorType": "timeLimit"},
- decode=True)
-
- self.doLongWait(re.search(self.LONG_WAIT_PATTERN, self.html))
-
- elif action['fail'] == "parallelDownload":
- self.logWarning(_("Parallel download error, now waiting 60s"))
- self.retry(wait_time=60, reason=_("parallelDownload"))
-
- else:
- self.fail(_("Download check returned: %s") % action['fail'])
-
- elif "success" in action:
- if action['success'] == "showCaptcha":
- self.doCaptcha()
- self.doTimmer()
- elif action['success'] == "showTimmer":
- self.doTimmer()
-
- else:
- self.error(_("Unknown server response"))
-
- # show download link
- res = self.load(self.url, post={"downloadLink": "show"}, decode=True)
- self.logDebug("Show downloadLink response: %s" % res)
- if "fail" in res:
- self.error(_("Couldn't retrieve download url"))
-
- # this may either download our file or forward us to an error page
- self.download(self.url, post={"download": "normal"})
- self.logDebug(self.req.http.lastEffectiveURL)
-
- check = self.checkDownload({"expired": self.LINK_EXPIRED_PATTERN,
- "wait": re.compile(self.LONG_WAIT_PATTERN),
- "limit": self.DAILY_LIMIT_PATTERN})
-
- if check == "expired":
- self.logDebug("Download link was expired")
- self.retry()
- elif check == "wait":
- self.doLongWait(self.lastCheck)
- elif check == "limit":
- self.logWarning(_("Download limited reached for today"))
- self.setWait(secondsToMidnight(gmt=2), True)
- self.wait()
- self.retry()
-
- self.thread.m.reconnecting.wait(3) # Ease issue with later downloads appearing to be in parallel
-
-
- def doTimmer(self):
- res = self.load(self.url, post={"downloadLink": "wait"}, decode=True)
- self.logDebug("Wait response: %s" % res[:80])
-
- if "fail" in res:
- self.fail(_("Failed getting wait time"))
-
- if self.__name == "FilejungleCom":
- m = re.search(r'"waitTime":(\d+)', res)
- if m is None:
- self.fail(_("Cannot get wait time"))
- wait_time = int(m.group(1))
- else:
- wait_time = int(res) + 3
-
- self.setWait(wait_time)
- self.wait()
-
-
- def doCaptcha(self):
- captcha_key = re.search(self.CAPTCHA_KEY_PATTERN, self.html).group("key")
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- challenge, response = recaptcha.challenge(captcha_key)
- res = json_loads(self.load(self.URLS[2],
- post={'recaptcha_challenge_field' : challenge,
- 'recaptcha_response_field' : response,
- 'recaptcha_shortencode_field': self.file_id}))
- if not res['success']:
- self.invalidCaptcha()
- else:
- self.correctCaptcha()
- break
- else:
- self.fail(_("Invalid captcha"))
-
-
- def doLongWait(self, m):
- wait_time = (int(m.group(1)) * {'seconds': 1, 'minutes': 60, 'hours': 3600}[m.group(2)]) if m else 12 * 60
- self.setWait(wait_time, True)
- self.wait()
- self.retry()
-
-
- def handlePremium(self):
- premium_url = None
- if self.__name == "FileserveCom":
- #try api download
- res = self.load("http://app.fileserve.com/api/download/premium/",
- post={"username": self.user,
- "password": self.account.getAccountData(self.user)['password'],
- "shorten": self.file_id},
- decode=True)
- if res:
- res = json_loads(res)
- if res['error_code'] == "302":
- premium_url = res['next']
- elif res['error_code'] in ["305", "500"]:
- self.tempOffline()
- elif res['error_code'] in ["403", "605"]:
- self.resetAccount()
- elif res['error_code'] in ["606", "607", "608"]:
- self.offline()
- else:
- self.logError(res['error_code'], res['error_message'])
-
- self.download(premium_url or self.pyfile.url)
-
- if not premium_url:
- check = self.checkDownload({"login": re.compile(self.NOT_LOGGED_IN_PATTERN)})
-
- if check == "login":
- self.account.relogin(self.user)
- self.retry(reason=_("Not logged in"))
-
-
-def getInfo(urls):
- for chunk in chunks(urls, 100):
- yield checkFile(FileserveCom, chunk)
diff --git a/pyload/plugins/hoster/FileshareInUa.py b/pyload/plugins/hoster/FileshareInUa.py
deleted file mode 100644
index ef528b69b..000000000
--- a/pyload/plugins/hoster/FileshareInUa.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class FileshareInUa(DeadHoster):
- __name = "FileshareInUa"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'https?://(?:www\.)?fileshare\.in\.ua/\w{7}'
-
- __description = """Fileshare.in.ua hoster plugin"""
- __license = "GPLv3"
- __authors = [("fwannmacher", "felipe@warhammerproject.com")]
-
-
-getInfo = create_getInfo(FileshareInUa)
diff --git a/pyload/plugins/hoster/FilesonicCom.py b/pyload/plugins/hoster/FilesonicCom.py
deleted file mode 100644
index a37408523..000000000
--- a/pyload/plugins/hoster/FilesonicCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class FilesonicCom(DeadHoster):
- __name = "FilesonicCom"
- __type = "hoster"
- __version = "0.35"
-
- __pattern = r'http://(?:www\.)?filesonic\.com/file/\w+'
-
- __description = """Filesonic.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("paulking", "")]
-
-
-getInfo = create_getInfo(FilesonicCom)
diff --git a/pyload/plugins/hoster/FilezyNet.py b/pyload/plugins/hoster/FilezyNet.py
deleted file mode 100644
index 3e4efa372..000000000
--- a/pyload/plugins/hoster/FilezyNet.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class FilezyNet(DeadHoster):
- __name = "FilezyNet"
- __type = "hoster"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?filezy\.net/\w{12}'
-
- __description = """Filezy.net hoster plugin"""
- __license = "GPLv3"
- __authors = []
-
-
-getInfo = create_getInfo(FilezyNet)
diff --git a/pyload/plugins/hoster/FiredriveCom.py b/pyload/plugins/hoster/FiredriveCom.py
deleted file mode 100644
index 449c24874..000000000
--- a/pyload/plugins/hoster/FiredriveCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class FiredriveCom(DeadHoster):
- __name = "FiredriveCom"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'https?://(?:www\.)?(firedrive|putlocker)\.com/(mobile/)?(file|embed)/(?P<ID>\w+)'
-
- __description = """Firedrive.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
-getInfo = create_getInfo(FiredriveCom)
diff --git a/pyload/plugins/hoster/FlyFilesNet.py b/pyload/plugins/hoster/FlyFilesNet.py
deleted file mode 100644
index 3853cc309..000000000
--- a/pyload/plugins/hoster/FlyFilesNet.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.SimpleHoster import SimpleHoster
-
-
-class FlyFilesNet(SimpleHoster):
- __name = "FlyFilesNet"
- __type = "hoster"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?flyfiles\.net/.*'
-
- __description = """FlyFiles.net hoster plugin"""
- __license = "GPLv3"
- __authors = []
-
- SESSION_PATTERN = r'flyfiles\.net/(.*)/.*'
- NAME_PATTERN = r'flyfiles\.net/.*/(.*)'
-
-
- def process(self, pyfile):
- name = re.search(self.NAME_PATTERN, pyfile.url).group(1)
- pyfile.name = unquote_plus(name)
-
- session = re.search(self.SESSION_PATTERN, pyfile.url).group(1)
-
- url = "http://flyfiles.net"
-
- # get download URL
- parsed_url = getURL(url, post={"getDownLink": session}, cookies=True)
- self.logDebug("Parsed URL: %s" % parsed_url)
-
- if parsed_url == '#downlink|' or parsed_url == "#downlink|#":
- self.logWarning(_("Could not get the download URL. Please wait 10 minutes"))
- self.wait(10 * 60, True)
- self.retry()
-
- download_url = parsed_url.replace('#downlink|', '')
-
- self.download(download_url)
diff --git a/pyload/plugins/hoster/FourSharedCom.py b/pyload/plugins/hoster/FourSharedCom.py
deleted file mode 100644
index c1066454a..000000000
--- a/pyload/plugins/hoster/FourSharedCom.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class FourSharedCom(SimpleHoster):
- __name = "FourSharedCom"
- __type = "hoster"
- __version = "0.30"
-
- __pattern = r'https?://(?:www\.)?4shared(\-china)?\.com/(account/)?(download|get|file|document|photo|video|audio|mp3|office|rar|zip|archive|music)/.+?/.*'
-
- __description = """4Shared.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<meta name="title" content="(?P<N>.+?)"'
- SIZE_PATTERN = r'<span title="Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)">'
- OFFLINE_PATTERN = r'The file link that you requested is not valid\.|This file was deleted.'
-
- NAME_REPLACEMENTS = [(r"&#(\d+).", lambda m: unichr(int(m.group(1))))]
- SIZE_REPLACEMENTS = [(",", "")]
-
- DOWNLOAD_URL_PATTERN = r'name="d3link" value="(.*?)"'
- DOWNLOAD_BUTTON_PATTERN = r'id="btnLink" href="(.*?)"'
- FID_PATTERN = r'name="d3fid" value="(.*?)"'
-
-
- def handleFree(self):
- if not self.account:
- self.fail(_("User not logged in"))
-
- m = re.search(self.DOWNLOAD_BUTTON_PATTERN, self.html)
- if m:
- link = m.group(1)
- else:
- link = re.sub(r'/(download|get|file|document|photo|video|audio)/', r'/get/', self.pyfile.url)
-
- self.html = self.load(link)
-
- m = re.search(self.DOWNLOAD_URL_PATTERN, self.html)
- if m is None:
- self.error(_("Download link"))
- link = m.group(1)
-
- try:
- m = re.search(self.FID_PATTERN, self.html)
- res = self.load('http://www.4shared.com/web/d2/getFreeDownloadLimitInfo?fileId=%s' % m.group(1))
- self.logDebug(res)
- except Exception:
- pass
-
- self.wait(20)
- self.download(link)
-
-
-getInfo = create_getInfo(FourSharedCom)
diff --git a/pyload/plugins/hoster/FreakshareCom.py b/pyload/plugins/hoster/FreakshareCom.py
deleted file mode 100644
index 4741f1938..000000000
--- a/pyload/plugins/hoster/FreakshareCom.py
+++ /dev/null
@@ -1,176 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.hoster.UnrestrictLi import secondsToMidnight
-from pyload.plugins.internal.captcha import ReCaptcha
-
-
-class FreakshareCom(Hoster):
- __name = "FreakshareCom"
- __type = "hoster"
- __version = "0.39"
-
- __pattern = r'http://(?:www\.)?freakshare\.(net|com)/files/\S*?/'
-
- __description = """Freakshare.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("sitacuisses", "sitacuisses@yahoo.de"),
- ("spoob", "spoob@pyload.org"),
- ("mkaay", "mkaay@mkaay.de"),
- ("Toilal", "toilal.dev@gmail.com")]
-
-
- def setup(self):
- self.multiDL = False
- self.req_opts = []
-
-
- def process(self, pyfile):
- self.pyfile = pyfile
-
- pyfile.url = pyfile.url.replace("freakshare.net/", "freakshare.com/")
-
- if self.account:
- self.html = self.load(pyfile.url, cookies=False)
- pyfile.name = self.get_file_name()
- self.download(pyfile.url)
-
- else:
- self.prepare()
- self.get_file_url()
-
- self.download(pyfile.url, post=self.req_opts)
-
- check = self.checkDownload({"bad": "bad try",
- "paralell": "> Sorry, you cant download more then 1 files at time. <",
- "empty": "Warning: Unknown: Filename cannot be empty",
- "wrong_captcha": "Wrong Captcha!",
- "downloadserver": "No Downloadserver. Please try again later!"})
-
- if check == "bad":
- self.fail(_("Bad Try"))
- elif check == "paralell":
- self.setWait(300, True)
- self.wait()
- self.retry()
- elif check == "empty":
- self.fail(_("File not downloadable"))
- elif check == "wrong_captcha":
- self.invalidCaptcha()
- self.retry()
- elif check == "downloadserver":
- self.retry(5, 15 * 60, _("No Download server"))
-
-
- def prepare(self):
- pyfile = self.pyfile
-
- self.download_html()
-
- if not self.file_exists():
- self.offline()
-
- self.setWait(self.get_waiting_time())
-
- pyfile.name = self.get_file_name()
- pyfile.size = self.get_file_size()
-
- self.wait()
-
- return True
-
-
- def download_html(self):
- self.load("http://freakshare.com/index.php", {"language": "EN"}) # Set english language in server session
- self.html = self.load(self.pyfile.url)
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- if not self.html:
- self.download_html()
- if not self.wantReconnect:
- self.req_opts = self.get_download_options() # get the Post options for the Request
- #file_url = self.pyfile.url
- #return file_url
- else:
- self.offline()
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
- if not self.wantReconnect:
- file_name = re.search(r"<h1\sclass=\"box_heading\"\sstyle=\"text-align:center;\">([^ ]+)", self.html)
- if file_name is not None:
- file_name = file_name.group(1)
- else:
- file_name = self.pyfile.url
- return file_name
- else:
- return self.pyfile.url
-
-
- def get_file_size(self):
- size = 0
- if not self.html:
- self.download_html()
- if not self.wantReconnect:
- file_size_check = re.search(
- r"<h1\sclass=\"box_heading\"\sstyle=\"text-align:center;\">[^ ]+ - ([^ ]+) (\w\w)yte", self.html)
- if file_size_check is not None:
- units = float(file_size_check.group(1).replace(",", ""))
- pow = {'KB': 1, 'MB': 2, 'GB': 3}[file_size_check.group(2)]
- size = int(units * 1024 ** pow)
-
- return size
-
-
- def get_waiting_time(self):
- if not self.html:
- self.download_html()
-
- if "Your Traffic is used up for today" in self.html:
- self.wantReconnect = True
- return secondsToMidnight(gmt=2)
-
- timestring = re.search('\s*var\s(?:downloadWait|time)\s=\s(\d*)[\d.]*;', self.html)
- if timestring:
- return int(timestring.group(1))
- else:
- return 60
-
-
- def file_exists(self):
- """ returns True or False
- """
- if not self.html:
- self.download_html()
- if re.search(r"This file does not exist!", self.html) is not None:
- return False
- else:
- return True
-
-
- def get_download_options(self):
- re_envelope = re.search(r".*?value=\"Free\sDownload\".*?\n*?(.*?<.*?>\n*)*?\n*\s*?</form>",
- self.html).group(0) # get the whole request
- to_sort = re.findall(r"<input\stype=\"hidden\"\svalue=\"(.*?)\"\sname=\"(.*?)\"\s\/>", re_envelope)
- request_options = dict((n, v) for (v, n) in to_sort)
-
- herewego = self.load(self.pyfile.url, None, request_options) # the actual download-Page
-
- to_sort = re.findall(r"<input\stype=\".*?\"\svalue=\"(\S*?)\".*?name=\"(\S*?)\"\s.*?\/>", herewego)
- request_options = dict((n, v) for (v, n) in to_sort)
-
- challenge = re.search(r"http://api\.recaptcha\.net/challenge\?k=(\w+)", herewego)
-
- if challenge:
- re_captcha = ReCaptcha(self)
- (request_options['recaptcha_challenge_field'],
- request_options['recaptcha_response_field']) = re_captcha.challenge(challenge.group(1))
-
- return request_options
diff --git a/pyload/plugins/hoster/FreeWayMe.py b/pyload/plugins/hoster/FreeWayMe.py
deleted file mode 100644
index fe44de8ee..000000000
--- a/pyload/plugins/hoster/FreeWayMe.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Hoster import Hoster
-
-
-class FreeWayMe(Hoster):
- __name = "FreeWayMe"
- __type = "hoster"
- __version = "0.11"
-
- __pattern = r'https://(?:www\.)?free-way\.me/.*'
-
- __description = """FreeWayMe hoster plugin"""
- __license = "GPLv3"
- __authors = [("Nicolas Giese", "james@free-way.me")]
-
-
- def setup(self):
- self.resumeDownload = False
- self.multiDL = self.premium
- self.chunkLimit = 1
-
-
- def process(self, pyfile):
- if not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "FreeWayMe")
- self.fail(_("No FreeWay account provided"))
-
- self.logDebug("Old URL: %s" % pyfile.url)
-
- (user, data) = self.account.selectAccount()
-
- self.download(
- "https://www.free-way.me/load.php",
- get={"multiget": 7, "url": pyfile.url, "user": user, "pw": self.account.getpw(user), "json": ""},
- disposition=True)
diff --git a/pyload/plugins/hoster/FreevideoCz.py b/pyload/plugins/hoster/FreevideoCz.py
deleted file mode 100644
index 3fc4a45bd..000000000
--- a/pyload/plugins/hoster/FreevideoCz.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class FreevideoCz(DeadHoster):
- __name = "FreevideoCz"
- __type = "hoster"
- __version = "0.30"
-
- __pattern = r'http://(?:www\.)?freevideo\.cz/vase-videa/.+'
-
- __description = """Freevideo.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(FreevideoCz) \ No newline at end of file
diff --git a/pyload/plugins/hoster/FshareVn.py b/pyload/plugins/hoster/FshareVn.py
deleted file mode 100644
index 3879c1607..000000000
--- a/pyload/plugins/hoster/FshareVn.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import strptime, mktime, gmtime
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, parseFileInfo
-
-
-def getInfo(urls):
- for url in urls:
- html = getURL("http://www.fshare.vn/check_link.php",
- post={'action': "check_link", 'arrlinks': url},
- decode=True)
-
- yield parseFileInfo(FshareVn, url, html)
-
-
-def doubleDecode(m):
- return m.group(1).decode('raw_unicode_escape')
-
-
-class FshareVn(SimpleHoster):
- __name = "FshareVn"
- __type = "hoster"
- __version = "0.17"
-
- __pattern = r'http://(?:www\.)?fshare\.vn/file/.*'
-
- __description = """FshareVn hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- INFO_PATTERN = r'<p>(?P<N>[^<]+)<\\/p>[\\trn\s]*<p>(?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)<\\/p>'
- OFFLINE_PATTERN = r'<div class=\\"f_left file_w\\"|<\\/p>\\t\\t\\t\\t\\r\\n\\t\\t<p><\\/p>\\t\\t\\r\\n\\t\\t<p>0 KB<\\/p>'
-
- NAME_REPLACEMENTS = [("(.*)", doubleDecode)]
-
- LINK_PATTERN = r'action="(http://download.*?)[#"]'
- WAIT_PATTERN = ur'Lượt tải xuống kế tiếp là:\s*(.*?)\s*<'
-
-
- def process(self, pyfile):
- self.html = self.load('http://www.fshare.vn/check_link.php', post={
- "action": "check_link",
- "arrlinks": pyfile.url
- }, decode=True)
- self.getFileInfo()
-
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
- self.checkDownloadedFile()
-
-
- def handleFree(self):
- self.html = self.load(self.pyfile.url, decode=True)
-
- self.checkErrors()
-
- action, inputs = self.parseHtmlForm('frm_download')
- self.url = self.pyfile.url + action
-
- if not inputs:
- self.error(_("No FORM"))
- elif 'link_file_pwd_dl' in inputs:
- for password in self.getPassword().splitlines():
- self.logInfo(_("Password protected link, trying ") + password)
- inputs['link_file_pwd_dl'] = password
- self.html = self.load(self.url, post=inputs, decode=True)
- if not 'name="link_file_pwd_dl"' in self.html:
- break
- else:
- self.fail(_("No or incorrect password"))
- else:
- self.html = self.load(self.url, post=inputs, decode=True)
-
- self.checkErrors()
-
- m = re.search(r'var count = (\d+)', self.html)
- self.setWait(int(m.group(1)) if m else 30)
-
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
- self.url = m.group(1)
- self.logDebug("FREE DL URL: %s" % self.url)
-
- self.wait()
- self.download(self.url)
-
-
- def handlePremium(self):
- self.download(self.pyfile.url)
-
-
- def checkErrors(self):
- if '/error.php?' in self.req.lastEffectiveURL or u"Liên kết bạn chọn khÃŽng tồn" in self.html:
- self.offline()
-
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- self.logInfo(_("Wait until %s ICT") % m.group(1))
- wait_until = mktime(strptime(m.group(1), "%d/%m/%Y %H:%M"))
- self.wait(wait_until - mktime(gmtime()) - 7 * 60 * 60, True)
- self.retry()
- elif '<ul class="message-error">' in self.html:
- msg = "Unknown error occured or wait time not parsed"
- self.logError(msg)
- self.retry(30, 2 * 60, msg)
-
- self.info.pop('error', None)
-
-
- def checkDownloadedFile(self):
- # check download
- check = self.checkDownload({
- "not_found": "<head><title>404 Not Found</title></head>"
- })
-
- if check == "not_found":
- self.fail(_("File not m on server"))
diff --git a/pyload/plugins/hoster/Ftp.py b/pyload/plugins/hoster/Ftp.py
deleted file mode 100644
index 8256c7464..000000000
--- a/pyload/plugins/hoster/Ftp.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import pycurl
-import re
-
-from urllib import quote, unquote
-from urlparse import urlparse
-
-from pyload.plugins.Hoster import Hoster
-
-
-class Ftp(Hoster):
- __name = "Ftp"
- __type = "hoster"
- __version = "0.43"
-
- __pattern = r'(?:ftps?|sftp)://([\w.-]+(:[\w.-]+)?@)?[\w.-]+(:\d+)?/.+'
-
- __description = """Download from ftp directory"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.com"),
- ("mkaay", "mkaay@mkaay.de"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def setup(self):
- self.chunkLimit = -1
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- parsed_url = urlparse(pyfile.url)
- netloc = parsed_url.netloc
-
- pyfile.name = parsed_url.path.rpartition('/')[2]
- try:
- pyfile.name = unquote(str(pyfile.name)).decode('utf8')
- except Exception:
- pass
-
- if not "@" in netloc:
- servers = [x['login'] for x in self.account.getAllAccounts()] if self.account else []
-
- if netloc in servers:
- self.logDebug("Logging on to %s" % netloc)
- self.req.addAuth(self.account.accounts[netloc]['password'])
- else:
- for pwd in self.getPassword().splitlines():
- if ":" in pwd:
- self.req.addAuth(pwd.strip())
- break
-
- self.req.http.c.setopt(pycurl.NOBODY, 1)
-
- try:
- res = self.load(pyfile.url)
- except pycurl.error, e:
- self.fail(_("Error %d: %s") % e.args)
-
- self.req.http.c.setopt(pycurl.NOBODY, 0)
- self.logDebug(self.req.http.header)
-
- m = re.search(r"Content-Length:\s*(\d+)", res)
- if m:
- pyfile.size = int(m.group(1))
- self.download(pyfile.url)
- else:
- #Naive ftp directory listing
- if re.search(r'^25\d.*?"', self.req.http.header, re.M):
- pyfile.url = pyfile.url.rstrip('/')
- pkgname = "/".join(pyfile.package().name, urlparse(pyfile.url).path.rpartition('/')[2])
- pyfile.url += '/'
- self.req.http.c.setopt(48, 1) # CURLOPT_DIRLISTONLY
- res = self.load(pyfile.url, decode=False)
- links = [pyfile.url + quote(x) for x in res.splitlines()]
- self.logDebug("LINKS", links)
- self.core.api.addPackage(pkgname, links)
- else:
- self.fail(_("Unexpected server response"))
diff --git a/pyload/plugins/hoster/GamefrontCom.py b/pyload/plugins/hoster/GamefrontCom.py
deleted file mode 100644
index 354a6c974..000000000
--- a/pyload/plugins/hoster/GamefrontCom.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import parseFileSize
-
-
-class GamefrontCom(Hoster):
- __name = "GamefrontCom"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'http://(?:www\.)?gamefront\.com/files/\w+'
-
- __description = """Gamefront.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("fwannmacher", "felipe@warhammerproject.com")]
-
-
- PATTERN_FILENAME = r'<title>(.*?) | Game Front'
- PATTERN_FILESIZE = r'<dt>File Size:</dt>[\n\s]*<dd>(.*?)</dd>'
- PATTERN_OFFLINE = r'This file doesn\'t exist, or has been removed.'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.chunkLimit = -1
-
-
- def process(self, pyfile):
- self.pyfile = pyfile
- self.html = self.load(pyfile.url, decode=True)
-
- if not self._checkOnline():
- self.offline()
-
- pyfile.name = self._getName()
-
- link = self._getLink()
-
- if not link.startswith('http://'):
- link = "http://www.gamefront.com/" + link
-
- self.download(link)
-
-
- def _checkOnline(self):
- if re.search(self.PATTERN_OFFLINE, self.html):
- return False
- else:
- return True
-
-
- def _getName(self):
- name = re.search(self.PATTERN_FILENAME, self.html)
- if name is None:
- self.fail(_("Plugin broken")
-
- return name.group(1)
-
-
- def _getLink(self):
- self.html2 = self.load("http://www.gamefront.com/" + re.search("(files/service/thankyou\\?id=\w+)",
- self.html).group(1))
- return re.search("<a href=\"(http://media\d+\.gamefront.com/.*)\">click here</a>", self.html2).group(1).replace("&amp;", "&")
-
-
-def getInfo(urls):
- result = []
-
- for url in urls:
- html = getURL(url)
-
- if re.search(GamefrontCom.PATTERN_OFFLINE, html):
- result.append((url, 0, 1, url))
- else:
- name = re.search(GamefrontCom.PATTERN_FILENAME, html)
- if name is None:
- result.append((url, 0, 1, url))
- else:
- name = name.group(1)
- size = re.search(GamefrontCom.PATTERN_FILESIZE, html)
- size = parseFileSize(size.group(1))
-
- result.append((name, size, 3, url))
-
- yield result
diff --git a/pyload/plugins/hoster/GigapetaCom.py b/pyload/plugins/hoster/GigapetaCom.py
deleted file mode 100644
index 06d3cc6cf..000000000
--- a/pyload/plugins/hoster/GigapetaCom.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import randint
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class GigapetaCom(SimpleHoster):
- __name = "GigapetaCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?gigapeta\.com/dl/\w+'
-
- __description = """GigaPeta.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<img src=".*" alt="file" />-->\s*(?P<N>.*?)\s*</td>'
- SIZE_PATTERN = r'<th>\s*Size\s*</th>\s*<td>\s*(?P<S>.*?)\s*</td>'
- OFFLINE_PATTERN = r'<div id="page_error">'
-
- COOKIES = [("gigapeta.com", "lang", "us")]
-
-
- def handleFree(self):
- captcha_key = str(randint(1, 100000000))
- captcha_url = "http://gigapeta.com/img/captcha.gif?x=%s" % captcha_key
-
- for _i in xrange(5):
- self.checkErrors()
-
- captcha = self.decryptCaptcha(captcha_url)
- self.html = self.load(self.pyfile.url,
- post={'captcha_key': captcha_key,
- 'captcha' : captcha,
- 'download' : "Download"},
- follow_location=False)
-
- m = re.search(r'Location\s*:\s*(.+)', self.req.http.header, re.I)
- if m:
- download_url = m.group(1)
- break
- elif "Entered figures don&#96;t coincide with the picture" in self.html:
- self.invalidCaptcha()
- else:
- self.fail(_("No valid captcha code entered"))
-
- self.download(download_url)
-
-
- def checkErrors(self):
- if "All threads for IP" in self.html:
- self.logDebug("Your IP is already downloading a file")
- self.wait(5 * 60, True)
- self.retry()
-
- self.info.pop('error', None)
-
-
-getInfo = create_getInfo(GigapetaCom)
diff --git a/pyload/plugins/hoster/GooIm.py b/pyload/plugins/hoster/GooIm.py
deleted file mode 100644
index db214c6fa..000000000
--- a/pyload/plugins/hoster/GooIm.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# https://goo.im/devs/liquidsmooth/3.x/codina/Nightly/LS-KK-v3.2-2014-08-01-codina.zip
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class GooIm(SimpleHoster):
- __name = "GooIm"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'https?://(?:www\.)?goo\.im/.+'
-
- __description = """Goo.im hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
-
-
- NAME_PATTERN = r'You will be redirected to .*(?P<N>[^/ ]+) in'
- OFFLINE_PATTERN = r'The file you requested was not found'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def handleFree(self):
- url = self.pyfile.url
- self.html = self.load(url, cookies=True)
- self.wait(10)
- self.download(url, cookies=True)
-
-
-getInfo = create_getInfo(GooIm)
diff --git a/pyload/plugins/hoster/HellshareCz.py b/pyload/plugins/hoster/HellshareCz.py
deleted file mode 100644
index 0601aabbd..000000000
--- a/pyload/plugins/hoster/HellshareCz.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class HellshareCz(SimpleHoster):
- __name = "HellshareCz"
- __type = "hoster"
- __version = "0.83"
-
- __pattern = r'(http://(?:www\.)?hellshare\.(?:cz|com|sk|hu|pl)/[^?]*/\d+).*'
-
- __description = """Hellshare.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<h1 id="filename"[^>]*>(?P<N>[^<]+)</h1>'
- SIZE_PATTERN = r'<strong id="FileSize_master">(?P<S>[\d.,]+)&nbsp;(?P<U>[\w^_]+)</strong>'
- OFFLINE_PATTERN = r'<h1>File not found.</h1>'
- SHOW_WINDOW_PATTERN = r'<a href="([^?]+/(\d+)/\?do=(fileDownloadButton|relatedFileDownloadButton-\2)-showDownloadWindow)"'
-
-
- def setup(self):
- self.resumeDownload = self.multiDL = True if self.account else False
- self.chunkLimit = 1
-
-
- def process(self, pyfile):
- if not self.account:
- self.fail(_("User not logged in"))
- pyfile.url = re.match(self.__pattern, pyfile.url).group(1)
- self.html = self.load(pyfile.url, decode=True)
- self.getFileInfo()
- if not self.checkTrafficLeft():
- self.fail(_("Not enough traffic left for user ") + self.user)
-
- m = re.search(self.SHOW_WINDOW_PATTERN, self.html)
- if m is None:
- self.error(_("SHOW_WINDOW_PATTERN not found"))
-
- self.url = "http://www.hellshare.com" + m.group(1)
- self.download(self.url)
-
-
-getInfo = create_getInfo(HellshareCz)
diff --git a/pyload/plugins/hoster/HellspyCz.py b/pyload/plugins/hoster/HellspyCz.py
deleted file mode 100644
index 178970fc4..000000000
--- a/pyload/plugins/hoster/HellspyCz.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class HellspyCz(DeadHoster):
- __name = "HellspyCz"
- __type = "hoster"
- __version = "0.28"
-
- __pattern = r'http://(?:www\.)?(?:hellspy\.(?:cz|com|sk|hu|pl)|sciagaj\.pl)(/\S+/\d+)/?.*'
-
- __description = """HellSpy.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(HellspyCz)
diff --git a/pyload/plugins/hoster/HotfileCom.py b/pyload/plugins/hoster/HotfileCom.py
deleted file mode 100644
index f7351bc6c..000000000
--- a/pyload/plugins/hoster/HotfileCom.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class HotfileCom(DeadHoster):
- __name = "HotfileCom"
- __type = "hoster"
- __version = "0.37"
-
- __pattern = r'https?://(?:www\.)?hotfile\.com/dl/\d+/\w+'
-
- __description = """Hotfile.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("sitacuisses", "sitacuisses@yhoo.de"),
- ("spoob", "spoob@pyload.org"),
- ("mkaay", "mkaay@mkaay.de"),
- ("JoKoT3", "jokot3@gmail.com")]
-
-
-getInfo = create_getInfo(HotfileCom)
diff --git a/pyload/plugins/hoster/HugefilesNet.py b/pyload/plugins/hoster/HugefilesNet.py
deleted file mode 100644
index 1833d8114..000000000
--- a/pyload/plugins/hoster/HugefilesNet.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class HugefilesNet(XFSHoster):
- __name = "HugefilesNet"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?hugefiles\.net/\w{12}'
-
- __description = """Hugefiles.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- HOSTER_DOMAIN = "hugefiles.net"
-
- SIZE_PATTERN = r'File Size:</span>\s*<span[^>]*>(?P<S>[^<]+)</span></div>'
-
- FORM_INPUTS_MAP = {'ctype': re.compile(r'\d+')}
-
-
-getInfo = create_getInfo(HugefilesNet)
diff --git a/pyload/plugins/hoster/HundredEightyUploadCom.py b/pyload/plugins/hoster/HundredEightyUploadCom.py
deleted file mode 100644
index 54cf46bdd..000000000
--- a/pyload/plugins/hoster/HundredEightyUploadCom.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://180upload.com/js9qdm6kjnrs
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class HundredEightyUploadCom(XFSHoster):
- __name = "HundredEightyUploadCom"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'http://(?:www\.)?180upload\.com/\w{12}'
-
- __description = """180upload.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- HOSTER_DOMAIN = "180upload.com"
-
- NAME_PATTERN = r'Filename:</b></td><td nowrap>(?P<N>.+)</td></tr>-->'
- SIZE_PATTERN = r'Size:</b></td><td>(?P<S>[\d.,]+) (?P<U>[\w^_]+)\s*<small>'
-
-
-getInfo = create_getInfo(HundredEightyUploadCom)
diff --git a/pyload/plugins/hoster/IFileWs.py b/pyload/plugins/hoster/IFileWs.py
deleted file mode 100644
index 223460793..000000000
--- a/pyload/plugins/hoster/IFileWs.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class IFileWs(DeadHoster):
- __name = "IFileWs"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?ifile\.ws/\w{12}'
-
- __description = """Ifile.ws hoster plugin"""
- __license = "GPLv3"
- __authors = [("z00nx", "z00nx0@gmail.com")]
-
-
-getInfo = create_getInfo(IFileWs)
diff --git a/pyload/plugins/hoster/IcyFilesCom.py b/pyload/plugins/hoster/IcyFilesCom.py
deleted file mode 100644
index 57fcd13e3..000000000
--- a/pyload/plugins/hoster/IcyFilesCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class IcyFilesCom(DeadHoster):
- __name = "IcyFilesCom"
- __type = "hoster"
- __version = "0.06"
-
- __pattern = r'http://(?:www\.)?icyfiles\.com/(.*)'
-
- __description = """IcyFiles.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("godofdream", "soilfiction@gmail.com")]
-
-
-getInfo = create_getInfo(IcyFilesCom)
diff --git a/pyload/plugins/hoster/IfileIt.py b/pyload/plugins/hoster/IfileIt.py
deleted file mode 100644
index 309acdec3..000000000
--- a/pyload/plugins/hoster/IfileIt.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.utils import json_loads
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class IfileIt(SimpleHoster):
- __name = "IfileIt"
- __type = "hoster"
- __version = "0.28"
-
- __pattern = r'^unmatchable$'
-
- __description = """Ifile.it"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- LINK_PATTERN = r'</span> If it doesn\'t, <a target="_blank" href="([^"]+)">'
- RECAPTCHA_PATTERN = r'var __recaptcha_public\s*=\s*\'(.+?)\''
- INFO_PATTERN = r'<span style="cursor: default;[^>]*>\s*(?P<N>.*?)\s*&nbsp;\s*<strong>\s*(?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)\s*</strong>\s*</span>'
- OFFLINE_PATTERN = r'<span style="cursor: default;[^>]*>\s*&nbsp;\s*<strong>\s*</strong>\s*</span>'
- TEMP_OFFLINE_PATTERN = r'<span class="msg_red">Downloading of this file is temporarily disabled</span>'
-
-
- def handleFree(self):
- ukey = re.match(self.__pattern, self.pyfile.url).group(1)
- json_url = 'http://ifile.it/new_download-request.json'
- post_data = {"ukey": ukey, "ab": "0"}
- res = json_loads(self.load(json_url, post=post_data))
-
- self.logDebug(res)
-
- if res['status'] == 3:
- self.offline()
-
- if res['captcha']:
- captcha_key = re.search(self.RECAPTCHA_PATTERN, self.html).group(1)
-
- recaptcha = ReCaptcha(self)
- post_data['ctype'] = "recaptcha"
-
- for _i in xrange(5):
- challenge, response = recaptcha.challenge(captcha_key)
- post_data.update({'recaptcha_challenge': challenge,
- 'recaptcha_response' : response})
- res = json_loads(self.load(json_url, post=post_data))
- self.logDebug(res)
-
- if res['retry']:
- self.invalidCaptcha()
- else:
- self.correctCaptcha()
- break
- else:
- self.fail(_("Incorrect captcha"))
-
- if not "ticket_url" in res:
- self.error(_("No download URL"))
-
- self.download(res['ticket_url'])
-
-
-getInfo = create_getInfo(IfileIt)
diff --git a/pyload/plugins/hoster/IfolderRu.py b/pyload/plugins/hoster/IfolderRu.py
deleted file mode 100644
index 49f04dfcb..000000000
--- a/pyload/plugins/hoster/IfolderRu.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class IfolderRu(SimpleHoster):
- __name = "IfolderRu"
- __type = "hoster"
- __version = "0.38"
-
- __pattern = r'http://(?:www\.)?(?:ifolder\.ru|rusfolder\.(?:com|net|ru))/(?:files/)?(?P<ID>\d+).*'
-
- __description = """Ifolder.ru hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- SIZE_REPLACEMENTS = [(u'Кб', 'KB'), (u'Мб', 'MB'), (u'Гб', 'GB')]
- NAME_PATTERN = ur'(?:<div><span>)?НазваМОе:(?:</span>)? <b>(?P<N>[^<]+)</b><(?:/div|br)>'
- SIZE_PATTERN = ur'(?:<div><span>)?РазЌер:(?:</span>)? <b>(?P<S>[^<]+)</b><(?:/div|br)>'
- OFFLINE_PATTERN = ur'<p>Ѐайл МПЌер <b>[^<]*</b> (Ме МайЎеМ|уЎалеМ) !!!</p>'
-
- SESSION_ID_PATTERN = r'<a href=(http://ints\.(?:rusfolder\.com|ifolder\.ru)/ints/sponsor/\?bi=\d*&session=([^&]+)&u=[^>]+)>'
- INTS_SESSION_PATTERN = r'\(\'ints_session\'\);\s*if\(tag\)\{tag\.value = "([^"]+)";\}'
- HIDDEN_INPUT_PATTERN = r'var v = .*?name=\'(.+?)\' value=\'1\''
- LINK_PATTERN = r'<a id="download_file_href" href="([^"]+)"'
- WRONG_CAPTCHA_PATTERN = ur'<font color=Red>МеверМый кПЎ,<br>ввеЎОте еще раз</font><br>'
-
-
- def setup(self):
- self.resumeDownload = self.multiDL = True if self.account else False
- self.chunkLimit = 1
-
-
- def process(self, pyfile):
- file_id = re.match(self.__pattern, pyfile.url).group('ID')
- self.html = self.load("http://rusfolder.com/%s" % file_id, cookies=True, decode=True)
- self.getFileInfo()
-
- url = re.search(r"location\.href = '(http://ints\..*?=)'", self.html).group(1)
- self.html = self.load(url, cookies=True, decode=True)
-
- url, session_id = re.search(self.SESSION_ID_PATTERN, self.html).groups()
- self.html = self.load(url, cookies=True, decode=True)
-
- url = "http://ints.rusfolder.com/ints/frame/?session=%s" % session_id
- self.html = self.load(url, cookies=True)
-
- self.wait(31, False)
-
- captcha_url = "http://ints.rusfolder.com/random/images/?session=%s" % session_id
- for _i in xrange(5):
- self.html = self.load(url, cookies=True)
- action, inputs = self.parseHtmlForm('ID="Form1"')
- inputs['ints_session'] = re.search(self.INTS_SESSION_PATTERN, self.html).group(1)
- inputs[re.search(self.HIDDEN_INPUT_PATTERN, self.html).group(1)] = '1'
- inputs['confirmed_number'] = self.decryptCaptcha(captcha_url, cookies=True)
- inputs['action'] = '1'
- self.logDebug(inputs)
-
- self.html = self.load(url, decode=True, cookies=True, post=inputs)
- if self.WRONG_CAPTCHA_PATTERN in self.html:
- self.invalidCaptcha()
- else:
- break
- else:
- self.fail(_("Invalid captcha"))
-
- download_url = re.search(self.LINK_PATTERN, self.html).group(1)
- self.correctCaptcha()
- self.download(download_url)
-
-
-getInfo = create_getInfo(IfolderRu)
diff --git a/pyload/plugins/hoster/JumbofilesCom.py b/pyload/plugins/hoster/JumbofilesCom.py
deleted file mode 100644
index 90190f0ff..000000000
--- a/pyload/plugins/hoster/JumbofilesCom.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class JumbofilesCom(SimpleHoster):
- __name = "JumbofilesCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?jumbofiles\.com/(\w{12}).*'
-
- __description = """JumboFiles.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("godofdream", "soilfiction@gmail.com")]
-
-
- INFO_PATTERN = r'<TR><TD>(?P<N>[^<]+?)\s*<small>\((?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)'
- OFFLINE_PATTERN = r'Not Found or Deleted / Disabled due to inactivity or DMCA'
- LINK_PATTERN = r'<meta http-equiv="refresh" content="10;url=(.+)">'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def handleFree(self):
- ukey = re.match(self.__pattern, self.pyfile.url).group(1)
- post_data = {"id": ukey, "op": "download3", "rand": ""}
- html = self.load(self.pyfile.url, post=post_data, decode=True)
- url = re.search(self.LINK_PATTERN, html).group(1)
- self.download(url)
-
-
-getInfo = create_getInfo(JumbofilesCom)
diff --git a/pyload/plugins/hoster/JunocloudMe.py b/pyload/plugins/hoster/JunocloudMe.py
deleted file mode 100644
index 6aaf81844..000000000
--- a/pyload/plugins/hoster/JunocloudMe.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class JunocloudMe(XFSHoster):
- __name = "JunocloudMe"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:\w+\.)?junocloud\.me/\w{12}'
-
- __description = """Junocloud.me hoster plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "junocloud.me"
-
- URL_REPLACEMENTS = [(r'//(www\.)?junocloud', "//dl3.junocloud")]
-
- SIZE_PATTERN = r'<p class="request_filesize">Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)</p>'
-
- OFFLINE_PATTERN = r'>No such file with this filename<'
- TEMP_OFFLINE_PATTERN = r'The page may have been renamed, removed or be temporarily unavailable.<'
-
-
-getInfo = create_getInfo(JunocloudMe)
diff --git a/pyload/plugins/hoster/Keep2shareCc.py b/pyload/plugins/hoster/Keep2shareCc.py
deleted file mode 100644
index 31867d2d4..000000000
--- a/pyload/plugins/hoster/Keep2shareCc.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin, urlparse
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import _isDirectLink, SimpleHoster, create_getInfo
-
-
-class Keep2shareCc(SimpleHoster):
- __name = "Keep2shareCc"
- __type = "hoster"
- __version = "0.17"
-
- __pattern = r'https?://(?:www\.)?(keep2share|k2s|keep2s)\.cc/file/(?P<ID>\w+)'
-
- __description = """Keep2share.cc hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- URL_REPLACEMENTS = [(__pattern + ".*", "http://k2s.cc/file/\g<ID>")]
-
- NAME_PATTERN = r'File: <span>(?P<N>.+)</span>'
- SIZE_PATTERN = r'Size: (?P<S>[^<]+)</div>'
-
- OFFLINE_PATTERN = r'File not found or deleted|Sorry, this file is blocked or deleted|Error 404'
- TEMP_OFFLINE_PATTERN = r'Downloading blocked due to'
-
- LINK_FREE_PATTERN = LINK_PREMIUM_PATTERN = r'"([^"]+url.html?file=.+?)"|window\.location\.href = \'(.+?)\';'
-
- CAPTCHA_PATTERN = r'src="(/file/captcha\.html.+?)"'
-
- WAIT_PATTERN = r'Please wait ([\d:]+) to download this file'
- TEMP_ERROR_PATTERN = r'>\s*(Download count files exceed|Traffic limit exceed|Free account does not allow to download more than one file at the same time)'
- ERROR_PATTERN = r'>\s*(Free user can\'t download large files|You no can access to this file|This download available only for premium users|This is private file)'
-
-
- def checkErrors(self):
- m = re.search(self.TEMP_ERROR_PATTERN, self.html)
- if m:
- self.info['error'] = m.group(1)
- self.wantReconnect = True
- self.retry(wait_time=30 * 60, reason=m.group(0))
-
- m = re.search(self.ERROR_PATTERN, self.html)
- if m:
- errmsg = self.info['error'] = m.group(1)
- self.error(errmsg)
-
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- self.logDebug("Hoster told us to wait for %s" % m.group(1))
-
- # string to time convert courtesy of https://stackoverflow.com/questions/10663720
- ftr = [3600, 60, 1]
- wait_time = sum([a * b for a, b in zip(ftr, map(int, m.group(1).split(':')))])
-
- self.wantReconnect = True
- self.retry(wait_time=wait_time, reason="Please wait to download this file")
-
- self.info.pop('error', None)
-
-
- def handleFree(self):
- self.fid = re.search(r'<input type="hidden" name="slow_id" value="([^"]+)">', self.html).group(1)
- self.html = self.load(self.pyfile.url, post={'yt0': '', 'slow_id': self.fid})
-
- self.checkErrors()
-
- m = re.search(self.LINK_FREE_PATTERN, self.html)
-
- if m is None:
- self.handleCaptcha()
-
- self.wait(30)
-
- self.html = self.load(self.pyfile.url, post={'uniqueId': self.fid, 'free': 1})
-
- self.checkErrors()
-
- m = re.search(self.LINK_FREE_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_FREE_PATTERN not found"))
-
- self.link = m.group(1)
-
-
- def handleCaptcha(self):
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- post_data = {'free' : 1,
- 'freeDownloadRequest': 1,
- 'uniqueId' : self.fid,
- 'yt0' : ''}
-
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m:
- captcha_url = urljoin(self.base, m.group(1))
- post_data['CaptchaForm[code]'] = self.decryptCaptcha(captcha_url)
- else:
- challenge, response = recaptcha.challenge()
- post_data.update({'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field' : response})
-
- self.html = self.load(self.pyfile.url, post=post_data)
-
- if 'recaptcha' not in self.html:
- self.correctCaptcha()
- break
- else:
- self.invalidCaptcha()
- else:
- self.fail(_("All captcha attempts failed"))
-
-
- def downloadLink(self, link):
- if not link:
- return
-
- p = urlparse(self.pyfile.url)
- base = "%s://%s" % (p.scheme, p.netloc)
- link = _isDirectLink(self, link, self.premium)
-
- if link:
- self.download(urljoin(base, link), disposition=True)
-
-
-getInfo = create_getInfo(Keep2shareCc)
diff --git a/pyload/plugins/hoster/KickloadCom.py b/pyload/plugins/hoster/KickloadCom.py
deleted file mode 100644
index 70c0bb1ce..000000000
--- a/pyload/plugins/hoster/KickloadCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class KickloadCom(DeadHoster):
- __name = "KickloadCom"
- __type = "hoster"
- __version = "0.21"
-
- __pattern = r'http://(?:www\.)?kickload\.com/get/.+'
-
- __description = """Kickload.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
-getInfo = create_getInfo(KickloadCom)
diff --git a/pyload/plugins/hoster/KingfilesNet.py b/pyload/plugins/hoster/KingfilesNet.py
deleted file mode 100644
index b8002741f..000000000
--- a/pyload/plugins/hoster/KingfilesNet.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.captcha import SolveMedia
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class KingfilesNet(SimpleHoster):
- __name = "KingfilesNet"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?kingfiles\.net/(?P<ID>\w{12})'
-
- __description = """Kingfiles.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'name="fname" value="(?P<N>.+?)">'
- SIZE_PATTERN = r'>Size: .+?">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- OFFLINE_PATTERN = r'>(File Not Found</b><br><br>|File Not Found</h2>)'
-
- RAND_ID_PATTERN = r'type=\"hidden\" name=\"rand\" value=\"(.+)\">'
-
- LINK_PATTERN = r'var download_url = \'(.+)\';'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def handleFree(self):
- # Click the free user button
- post_data = {'op' : "download1",
- 'usr_login' : "",
- 'id' : self.info['pattern']['ID'],
- 'fname' : self.pyfile.name,
- 'referer' : "",
- 'method_free': "+"}
-
- self.html = self.load(self.pyfile.url, post=post_data, cookies=True, decode=True)
-
- solvemedia = SolveMedia(self)
- challenge, response = solvemedia.challenge()
-
- # Make the downloadlink appear and load the file
- m = re.search(self.RAND_ID_PATTERN, self.html)
- if m is None:
- self.error(_("Random key not found"))
-
- rand = m.group(1)
- self.logDebug("rand = ", rand)
-
- post_data = {'op' : "download2",
- 'id' : self.info['pattern']['ID'],
- 'rand' : rand,
- 'referer' : self.pyfile.url,
- 'method_free' : "+",
- 'method_premium' : "",
- 'adcopy_response' : response,
- 'adcopy_challenge': challenge,
- 'down_direct' : "1"}
-
- self.html = self.load(self.pyfile.url, post=post_data, cookies=True, decode=True)
-
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Download url not found"))
-
- self.download(m.group(1), cookies=True, disposition=True)
-
- check = self.checkDownload({'html': re.compile("<html>")})
- if check == "html":
- self.error(_("Downloaded file is an html page"))
-
-
-getInfo = create_getInfo(KingfilesNet)
diff --git a/pyload/plugins/hoster/LemUploadsCom.py b/pyload/plugins/hoster/LemUploadsCom.py
deleted file mode 100644
index c2fb63fd8..000000000
--- a/pyload/plugins/hoster/LemUploadsCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class LemUploadsCom(DeadHoster):
- __name = "LemUploadsCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'https?://(?:www\.)?lemuploads\.com/\w{12}'
-
- __description = """LemUploads.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
-
-
-getInfo = create_getInfo(LemUploadsCom)
diff --git a/pyload/plugins/hoster/LetitbitNet.py b/pyload/plugins/hoster/LetitbitNet.py
deleted file mode 100644
index 8d79fd92c..000000000
--- a/pyload/plugins/hoster/LetitbitNet.py
+++ /dev/null
@@ -1,142 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# API Documentation:
-# http://api.letitbit.net/reg/static/api.pdf
-#
-# Test links:
-# http://letitbit.net/download/07874.0b5709a7d3beee2408bb1f2eefce/random.bin.html
-
-import re
-
-from urllib import urlencode, urlopen
-from urlparse import urljoin
-
-from pyload.utils import json_loads, json_dumps
-from pyload.plugins.hoster.UnrestrictLi import secondsToMidnight
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster
-
-
-def api_download_info(url):
- json_data = ["yw7XQy2v9", ["download/info", {"link": url}]]
- post_data = urlencode({'r': json_dumps(json_data)})
- api_rep = urlopen("http://api.letitbit.net/json", data=post_data).read()
- return json_loads(api_rep)
-
-
-def getInfo(urls):
- for url in urls:
- api_rep = api_download_info(url)
- if api_rep['status'] == 'OK':
- info = api_rep['data'][0]
- yield (info['name'], info['size'], 2, url)
- else:
- yield (url, 0, 1, url)
-
-
-class LetitbitNet(SimpleHoster):
- __name = "LetitbitNet"
- __type = "hoster"
- __version = "0.26"
-
- __pattern = r'https?://(?:www\.)?(letitbit|shareflare)\.net/download/.*'
-
- __description = """Letitbit.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("z00nx", "z00nx0@gmail.com")]
-
-
- URL_REPLACEMENTS = [(r"(?<=http://)([^/]+)", "letitbit.net")]
-
- SECONDS_PATTERN = r'seconds\s*=\s*(\d+);'
- CAPTCHA_CONTROL_FIELD = r'recaptcha_control_field\s=\s\'(?P<value>.+?)\''
-
-
- def setup(self):
- self.resumeDownload = True
-
-
- def getFileInfo(self):
- api_rep = api_download_info(self.pyfile.url)
- if api_rep['status'] == 'OK':
- self.api_data = api_rep['data'][0]
- self.pyfile.name = self.api_data['name']
- self.pyfile.size = self.api_data['size']
- else:
- self.offline()
-
-
- def handleFree(self):
- action, inputs = self.parseHtmlForm('id="ifree_form"')
- if not action:
- self.error(_("ifree_form"))
-
- self.pyfile.size = float(inputs['sssize'])
- self.logDebug(action, inputs)
- inputs['desc'] = ""
-
- self.html = self.load(urljoin("http://letitbit.net/", action), post=inputs, cookies=True)
-
- m = re.search(self.SECONDS_PATTERN, self.html)
- seconds = int(m.group(1)) if m else 60
- self.logDebug("Seconds found", seconds)
- m = re.search(self.CAPTCHA_CONTROL_FIELD, self.html)
- recaptcha_control_field = m.group(1)
- self.logDebug("ReCaptcha control field found", recaptcha_control_field)
- self.wait(seconds)
-
- res = self.load("http://letitbit.net/ajax/download3.php", post=" ", cookies=True)
- if res != '1':
- self.error(_("Unknown response - ajax_check_url"))
- self.logDebug(res)
-
- recaptcha = ReCaptcha(self)
- challenge, response = recaptcha.challenge()
-
- post_data = {"recaptcha_challenge_field": challenge,
- "recaptcha_response_field": response,
- "recaptcha_control_field": recaptcha_control_field}
- self.logDebug("Post data to send", post_data)
- res = self.load("http://letitbit.net/ajax/check_recaptcha.php", post=post_data, cookies=True)
- self.logDebug(res)
- if not res:
- self.invalidCaptcha()
- if res == "error_free_download_blocked":
- self.logWarning(_("Daily limit reached"))
- self.wait(secondsToMidnight(gmt=2), True)
- if res == "error_wrong_captcha":
- self.invalidCaptcha()
- self.retry()
- elif res.startswith('['):
- urls = json_loads(res)
- elif res.startswith('http://'):
- urls = [res]
- else:
- self.error(_("Unknown response - captcha check"))
-
- self.correctCaptcha()
-
- for download_url in urls:
- try:
- self.download(download_url)
- break
- except Exception, e:
- self.logError(e)
- else:
- self.fail(_("Download did not finish correctly"))
-
-
- def handlePremium(self):
- api_key = self.user
- premium_key = self.account.getAccountData(self.user)['password']
-
- json_data = [api_key, ["download/direct_links", {"pass": premium_key, "link": self.pyfile.url}]]
- api_rep = self.load('http://api.letitbit.net/json', post={'r': json_dumps(json_data)})
- self.logDebug("API Data: " + api_rep)
- api_rep = json_loads(api_rep)
-
- if api_rep['status'] == 'FAIL':
- self.fail(api_rep['data'])
-
- self.download(api_rep['data'][0][0], disposition=True)
diff --git a/pyload/plugins/hoster/LinksnappyCom.py b/pyload/plugins/hoster/LinksnappyCom.py
deleted file mode 100644
index f45eba428..000000000
--- a/pyload/plugins/hoster/LinksnappyCom.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urlsplit
-
-from pyload.utils import json_loads, json_dumps
-from pyload.plugins.Hoster import Hoster
-
-
-class LinksnappyCom(Hoster):
- __name = "LinksnappyCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'https?://(?:[^/]*\.)?linksnappy\.com'
-
- __description = """Linksnappy.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- SINGLE_CHUNK_HOSTERS = ('easybytez.com')
-
-
- def setup(self):
- self.chunkLimit = -1
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "Linksnappy.com")
- self.fail(_("No Linksnappy.com account provided"))
- else:
- self.logDebug("Old URL: %s" % pyfile.url)
- host = self._get_host(pyfile.url)
- json_params = json_dumps({'link': pyfile.url,
- 'type': host,
- 'username': self.user,
- 'password': self.account.getAccountData(self.user)['password']})
- r = self.load('http://gen.linksnappy.com/genAPI.php',
- post={'genLinks': json_params})
- self.logDebug("JSON data: " + r)
-
- j = json_loads(r)['links'][0]
-
- if j['error']:
- msg = _("Error converting the link")
- self.logError(msg, j['error'])
- self.fail(msg)
-
- pyfile.name = j['filename']
- new_url = j['generated']
-
- if host in self.SINGLE_CHUNK_HOSTERS:
- self.chunkLimit = 1
- else:
- self.setup()
-
- if new_url != pyfile.url:
- self.logDebug("New URL: " + new_url)
-
- self.download(new_url, disposition=True)
-
- check = self.checkDownload({"html302": "<title>302 Found</title>"})
- if check == "html302":
- self.retry(wait_time=5, reason=_("Linksnappy returns only HTML data"))
-
-
- @staticmethod
- def _get_host(url):
- host = urlsplit(url).netloc
- return re.search(r'[\w-]+\.\w+$', host).group(0)
diff --git a/pyload/plugins/hoster/LoadTo.py b/pyload/plugins/hoster/LoadTo.py
deleted file mode 100644
index e5e19d3c6..000000000
--- a/pyload/plugins/hoster/LoadTo.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://www.load.to/JWydcofUY6/random.bin
-# http://www.load.to/oeSmrfkXE/random100.bin
-
-import re
-
-from pyload.plugins.internal.captcha import SolveMedia
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class LoadTo(SimpleHoster):
- __name = "LoadTo"
- __type = "hoster"
- __version = "0.18"
-
- __pattern = r'http://(?:www\.)?load\.to/\w+'
-
- __description = """Load.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("halfman", "Pulpan3@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- NAME_PATTERN = r'<h1>(?P<N>.+)</h1>'
- SIZE_PATTERN = r'Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
- OFFLINE_PATTERN = r'>Can\'t find file'
-
- LINK_PATTERN = r'<form method="post" action="(.+?)"'
- WAIT_PATTERN = r'type="submit" value="Download \((\d+)\)"'
-
- URL_REPLACEMENTS = [(r'(\w)$', r'\1/')]
-
-
- def setup(self):
- self.multiDL = True
- self.chunkLimit = 1
-
-
- def handleFree(self):
- # Search for Download URL
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
-
- download_url = m.group(1)
-
- # Set Timer - may be obsolete
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- self.wait(int(m.group(1)))
-
- # Load.to is using solvemedia captchas since ~july 2014:
- solvemedia = SolveMedia(self)
- captcha_key = solvemedia.detect_key()
-
- if captcha_key is None:
- self.download(download_url)
- else:
- challenge, response = solvemedia.challenge(captcha_key)
-
- self.download(download_url, post={"adcopy_challenge": challenge, "adcopy_response": response})
-
- check = self.checkDownload({'404': re.compile("\A<h1>404 Not Found</h1>"), 'html': re.compile("html")})
-
- if check == "404":
- self.invalidCaptcha()
- self.retry()
- elif check == "html":
- self.logWarning(_("Downloaded file is an html page, will retry"))
- self.retry()
-
-
-getInfo = create_getInfo(LoadTo)
diff --git a/pyload/plugins/hoster/LomafileCom.py b/pyload/plugins/hoster/LomafileCom.py
deleted file mode 100644
index 5c16711c0..000000000
--- a/pyload/plugins/hoster/LomafileCom.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class LomafileCom(XFSHoster):
- __name = "LomafileCom"
- __type = "hoster"
- __version = "0.51"
-
- __pattern = r'http://lomafile\.com/\w{12}'
-
- __description = """Lomafile.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("nath_schwarz", "nathan.notwhite@gmail.com"),
- ("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "lomafile.com"
-
- NAME_PATTERN = r'<a href="http://lomafile\.com/w{12}/(?P<N>.+?)">'
- SIZE_PATTERN = r'Size:</b></td><td>(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- OFFLINE_PATTERN = r'>(No such file|Software error:<)'
- TEMP_OFFLINE_PATTERN = r'The page may have been renamed, removed or be temporarily unavailable.<'
-
- CAPTCHA_PATTERN = r'(http://lomafile\.com/captchas/[^"\']+)'
-
-
-getInfo = create_getInfo(LomafileCom)
diff --git a/pyload/plugins/hoster/LuckyShareNet.py b/pyload/plugins/hoster/LuckyShareNet.py
deleted file mode 100644
index 156e66f21..000000000
--- a/pyload/plugins/hoster/LuckyShareNet.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from bottle import json_loads
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class LuckyShareNet(SimpleHoster):
- __name = "LuckyShareNet"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'https?://(?:www\.)?luckyshare\.net/(?P<ID>\d{10,})'
-
- __description = """LuckyShare.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- INFO_PATTERN = r'<h1 class=\'file_name\'>(?P<N>\S+)</h1>\s*<span class=\'file_size\'>Filesize: (?P<S>[\d.,]+)(?P<U>[\w^_]+)</span>'
- OFFLINE_PATTERN = r'There is no such file available'
-
-
- def parseJson(self, rep):
- if 'AJAX Error' in rep:
- html = self.load(self.pyfile.url, decode=True)
- m = re.search(r"waitingtime = (\d+);", html)
- if m:
- seconds = int(m.group(1))
- self.logDebug("You have to wait %d seconds between free downloads" % seconds)
- self.retry(wait_time=seconds)
- else:
- self.error(_("Unable to detect wait time between free downloads"))
- elif 'Hash expired' in rep:
- self.retry(reason=_("Hash expired"))
- return json_loads(rep)
-
-
- # TODO: There should be a filesize limit for free downloads
- # TODO: Some files could not be downloaded in free mode
- def handleFree(self):
- rep = self.load(r"http://luckyshare.net/download/request/type/time/file/" + self.info['pattern']['ID'], decode=True)
- self.logDebug("JSON: " + rep)
- json = self.parseJson(rep)
-
- self.wait(int(json['time']))
-
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- challenge, response = recaptcha.challenge()
- rep = self.load(r"http://luckyshare.net/download/verify/challenge/%s/response/%s/hash/%s" %
- (challenge, response, json['hash']), decode=True)
- self.logDebug("JSON: " + rep)
- if 'link' in rep:
- json.update(self.parseJson(rep))
- self.correctCaptcha()
- break
- elif 'Verification failed' in rep:
- self.invalidCaptcha()
- else:
- self.error(_("Unable to get downlaod link"))
-
- if not json['link']:
- self.fail(_("No Download url retrieved/all captcha attempts failed"))
-
- self.download(json['link'])
-
-
-getInfo = create_getInfo(LuckyShareNet)
diff --git a/pyload/plugins/hoster/MediafireCom.py b/pyload/plugins/hoster/MediafireCom.py
deleted file mode 100644
index 2f75c61dd..000000000
--- a/pyload/plugins/hoster/MediafireCom.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.captcha import SolveMedia
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, parseFileInfo
-from pyload.network.RequestFactory import getURL
-
-
-def replace_eval(js_expr):
- return js_expr.replace(r'eval("', '').replace(r"\'", r"'").replace(r'\"', r'"')
-
-
-def checkHTMLHeader(url):
- try:
- for _i in xrange(3):
- header = getURL(url, just_header=True)
- for line in header.splitlines():
- line = line.lower()
- if 'location' in line:
- url = line.split(':', 1)[1].strip()
- if 'error.php?errno=320' in url:
- return url, 1
- if not url.startswith('http://'):
- url = 'http://www.mediafire.com' + url
- break
- elif 'content-disposition' in line:
- return url, 2
- else:
- break
- except Exception:
- return url, 3
-
- return url, 0
-
-
-def getInfo(urls):
- for url in urls:
- location, status = checkHTMLHeader(url)
-
- if status:
- file_info = (url, 0, status, url)
- else:
- file_info = parseFileInfo(MediafireCom, url, getURL(url, decode=True))
-
- yield file_info
-
-
-class MediafireCom(SimpleHoster):
- __name = "MediafireCom"
- __type = "hoster"
- __version = "0.80"
-
- __pattern = r'http://(?:www\.)?mediafire\.com/(file/|(view/?|download\.php)?\?)(\w{11}|\w{15})($|/)'
-
- __description = """Mediafire.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- LINK_PATTERN = r'<div class="download_link"[^>]*(?:z-index:(?P<zindex>\d+))?[^>]*>\s*<a href="(?P<href>http://[^"]+)"'
- JS_KEY_PATTERN = r'DoShow\(\'mfpromo1\'\);[^{]*{((\w+)=\'\';.*?)eval\(\2\);'
- JS_ZMODULO_PATTERN = r'\(\'z-index\'\)\) \% (\d+)\)\);'
- PAGE1_ACTION_PATTERN = r'<link rel="canonical" href="([^"]+)"/>'
- PASSWORD_PATTERN = r'<form name="form_password"'
-
- NAME_PATTERN = r'<META NAME="description" CONTENT="(?P<N>[^"]+)"/>'
- INFO_PATTERN = r'oFileSharePopup\.ald\(\'(?P<ID>[^\']*)\',\'(?P<N>[^\']*)\',\'(?P<S>[^\']*)\',\'\',\'(?P<sha256>[^\']*)\'\)'
- OFFLINE_PATTERN = r'class="error_msg_title"> Invalid or Deleted File. </div>'
-
-
- def setup(self):
- self.multiDL = False
-
-
- def process(self, pyfile):
- pyfile.url = re.sub(r'/view/?\?', '/?', pyfile.url)
-
- self.url, result = checkHTMLHeader(pyfile.url)
- self.logDebug("Location (%d): %s" % (result, self.url))
-
- if result == 0:
- self.html = self.load(self.url, decode=True)
- self.checkCaptcha()
- self.multiDL = True
- self.check_data = self.getFileInfo()
-
- if self.account:
- self.handlePremium()
- else:
- self.handleFree()
- elif result == 1:
- self.offline()
- else:
- self.multiDL = True
- self.download(self.url, disposition=True)
-
-
- def handleFree(self):
- passwords = self.getPassword().splitlines()
- while self.PASSWORD_PATTERN in self.html:
- if len(passwords):
- password = passwords.pop(0)
- self.logInfo(_("Password protected link, trying ") + password)
- self.html = self.load(self.url, post={"downloadp": password})
- else:
- self.fail(_("No or incorrect password"))
-
- m = re.search(r'kNO = r"(http://.*?)";', self.html)
- if m is None:
- self.error(_("No download URL"))
-
- download_url = m.group(1)
- self.download(download_url)
-
-
- def checkCaptcha(self):
- solvemedia = SolveMedia(self)
- challenge, response = solvemedia.challenge()
- self.html = self.load(self.url,
- post={'adcopy_challenge': challenge,
- 'adcopy_response' : response},
- decode=True)
diff --git a/pyload/plugins/hoster/MegaCoNz.py b/pyload/plugins/hoster/MegaCoNz.py
deleted file mode 100644
index d3af1b72e..000000000
--- a/pyload/plugins/hoster/MegaCoNz.py
+++ /dev/null
@@ -1,171 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import random
-import re
-
-from array import array
-from base64 import standard_b64decode
-from os import remove
-
-from Crypto.Cipher import AES
-from Crypto.Util import Counter
-from pycurl import SSL_CIPHER_LIST
-
-from pyload.utils import json_loads, json_dumps
-from pyload.plugins.Hoster import Hoster
-
-############################ General errors ###################################
-# EINTERNAL (-1): An internal error has occurred. Please submit a bug report, detailing the exact circumstances in which this error occurred
-# EARGS (-2): You have passed invalid arguments to this command
-# EAGAIN (-3): (always at the request level) A temporary congestion or server malfunction prevented your request from being processed. No data was altered. Retry. Retries must be spaced with exponential backoff
-# ERATELIMIT (-4): You have exceeded your command weight per time quota. Please wait a few seconds, then try again (this should never happen in sane real-life applications)
-#
-############################ Upload errors ####################################
-# EFAILED (-5): The upload failed. Please restart it from scratch
-# ETOOMANY (-6): Too many concurrent IP addresses are accessing this upload target URL
-# ERANGE (-7): The upload file packet is out of range or not starting and ending on a chunk boundary
-# EEXPIRED (-8): The upload target URL you are trying to access has expired. Please request a fresh one
-#
-############################ Stream/System errors #############################
-# ENOENT (-9): Object (typically, node or user) not found
-# ECIRCULAR (-10): Circular linkage attempted
-# EACCESS (-11): Access violation (e.g., trying to write to a read-only share)
-# EEXIST (-12): Trying to create an object that already exists
-# EINCOMPLETE (-13): Trying to access an incomplete resource
-# EKEY (-14): A decryption operation failed (never returned by the API)
-# ESID (-15): Invalid or expired user session, please relogin
-# EBLOCKED (-16): User blocked
-# EOVERQUOTA (-17): Request over quota
-# ETEMPUNAVAIL (-18): Resource temporarily not available, please try again later
-# ETOOMANYCONNECTIONS (-19): Too many connections on this resource
-# EWRITE (-20): Write failed
-# EREAD (-21): Read failed
-# EAPPKEY (-22): Invalid application key; request not processed
-
-
-class MegaCoNz(Hoster):
- __name = "MegaCoNz"
- __type = "hoster"
- __version = "0.16"
-
- __pattern = r'https?://(\w+\.)?mega\.co\.nz/#!([\w!-]+)'
-
- __description = """Mega.co.nz hoster plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "ranan@pyload.org")]
-
- API_URL = "https://g.api.mega.co.nz/cs"
- FILE_SUFFIX = ".crypted"
-
-
- def b64_decode(self, data):
- data = data.replace("-", "+").replace("_", "/")
- return standard_b64decode(data + '=' * (-len(data) % 4))
-
-
- def getCipherKey(self, key):
- """ Construct the cipher key from the given data """
- a = array("I", key)
- key_array = array("I", [a[0] ^ a[4], a[1] ^ a[5], a[2] ^ a[6], a[3] ^ a[7]])
- return key_array
-
-
- def callApi(self, **kwargs):
- """ Dispatch a call to the api, see https://mega.co.nz/#developers """
- # generate a session id, no idea where to obtain elsewhere
- uid = random.randint(10 << 9, 10 ** 10)
-
- res = self.load(self.API_URL, get={'id': uid}, post=json_dumps([kwargs]))
- self.logDebug("Api Response: " + res)
- return json_loads(res)
-
-
- def decryptAttr(self, data, key):
- cbc = AES.new(self.getCipherKey(key), AES.MODE_CBC, "\0" * 16)
- attr = cbc.decrypt(self.b64_decode(data))
- self.logDebug("Decrypted Attr: " + attr)
- if not attr.startswith("MEGA"):
- self.fail(_("Decryption failed"))
-
- # Data is padded, 0-bytes must be stripped
- return json_loads(re.search(r'{.+?}', attr).group(0))
-
-
- def decryptFile(self, key):
- """ Decrypts the file at lastDownload` """
-
- # upper 64 bit of counter start
- n = key[16:24]
-
- # convert counter to long and shift bytes
- ctr = Counter.new(128, initial_value=long(n.encode("hex"), 16) << 64)
- cipher = AES.new(self.getCipherKey(key), AES.MODE_CTR, counter=ctr)
-
- self.pyfile.setStatus("decrypting")
-
- file_crypted = self.lastDownload
- file_decrypted = file_crypted.rsplit(self.FILE_SUFFIX)[0]
-
- try:
- f = open(file_crypted, "rb")
- df = open(file_decrypted, "wb")
- except IOError, e:
- self.fail(str(e))
-
- # TODO: calculate CBC-MAC for checksum
-
- size = 2 ** 15 # buffer size, 32k
- while True:
- buf = f.read(size)
- if not buf:
- break
-
- df.write(cipher.decrypt(buf))
-
- f.close()
- df.close()
- remove(file_crypted)
-
- self.lastDownload = file_decrypted
-
-
- def process(self, pyfile):
- key = None
-
- # match is guaranteed because plugin was chosen to handle url
- node = re.match(self.__pattern, pyfile.url).group(2)
- if "!" in node:
- node, key = node.split("!")
-
- self.logDebug("File id: %s | Key: %s" % (node, key))
-
- if not key:
- self.fail(_("No file key provided in the URL"))
-
- # g is for requesting a download url
- # this is similar to the calls in the mega js app, documentation is very bad
- dl = self.callApi(a="g", g=1, p=node, ssl=1)[0]
-
- if "e" in dl:
- e = dl['e']
- # ETEMPUNAVAIL (-18): Resource temporarily not available, please try again later
- if e == -18:
- self.retry()
- else:
- self.fail(_("Error code:") + e)
-
- # TODO: map other error codes, e.g
- # EACCESS (-11): Access violation (e.g., trying to write to a read-only share)
-
- key = self.b64_decode(key)
- attr = self.decryptAttr(dl['at'], key)
-
- pyfile.name = attr['n'] + self.FILE_SUFFIX
-
- self.req.http.c.setopt(SSL_CIPHER_LIST, "RC4-MD5:DEFAULT")
-
- self.download(dl['g'])
- self.decryptFile(key)
-
- # Everything is finished and final name can be set
- pyfile.name = attr['n']
diff --git a/pyload/plugins/hoster/MegaDebridEu.py b/pyload/plugins/hoster/MegaDebridEu.py
deleted file mode 100644
index 5eac2a41d..000000000
--- a/pyload/plugins/hoster/MegaDebridEu.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote_plus
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-
-
-class MegaDebridEu(Hoster):
- __name = "MegaDebridEu"
- __type = "hoster"
- __version = "0.40"
-
- __pattern = r'^https?://(?:w{3}\d+\.mega-debrid\.eu|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/download/file/[^/]+/.+$'
-
- __description = """mega-debrid.eu hoster plugin"""
- __license = "GPLv3"
- __authors = [("D.Ducatel", "dducatel@je-geek.fr")]
-
-
- API_URL = "https://www.mega-debrid.eu/api.php"
-
-
- def getFilename(self, url):
- try:
- return unquote_plus(url.rsplit("/", 1)[1])
- except IndexError:
- return ""
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.exitOnFail("Please enter your %s account or deactivate this plugin" % "Mega-debrid.eu")
- else:
- if not self.connectToApi():
- self.exitOnFail("Unable to connect to Mega-debrid.eu")
-
- self.logDebug("Old URL: %s" % pyfile.url)
- new_url = self.debridLink(pyfile.url)
- self.logDebug("New URL: " + new_url)
-
- filename = self.getFilename(new_url)
- if filename != "":
- pyfile.name = filename
- self.download(new_url, disposition=True)
-
-
- def connectToApi(self):
- """
- Connexion to the mega-debrid API
- Return True if succeed
- """
- user, data = self.account.selectAccount()
- jsonResponse = self.load(self.API_URL,
- get={'action': 'connectUser', 'login': user, 'password': data['password']})
- res = json_loads(jsonResponse)
-
- if res['response_code'] == "ok":
- self.token = res['token']
- return True
- else:
- return False
-
-
- def debridLink(self, linkToDebrid):
- """
- Debrid a link
- Return The debrided link if succeed or original link if fail
- """
- jsonResponse = self.load(self.API_URL, get={'action': 'getLink', 'token': self.token},
- post={"link": linkToDebrid})
- res = json_loads(jsonResponse)
-
- if res['response_code'] == "ok":
- debridedLink = res['debridLink'][1:-1]
- return debridedLink
- else:
- self.exitOnFail("Unable to debrid %s" % linkToDebrid)
-
-
- def exitOnFail(self, msg):
- """
- exit the plugin on fail case
- And display the reason of this failure
- """
- if self.getConfig("unloadFailing"):
- self.logError(_(msg))
- self.resetAccount()
- else:
- self.fail(_(msg))
diff --git a/pyload/plugins/hoster/MegaFilesSe.py b/pyload/plugins/hoster/MegaFilesSe.py
deleted file mode 100644
index d9a2b31b9..000000000
--- a/pyload/plugins/hoster/MegaFilesSe.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class MegaFilesSe(DeadHoster):
- __name = "MegaFilesSe"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?megafiles\.se/\w{12}'
-
- __description = """MegaFiles.se hoster plugin"""
- __license = "GPLv3"
- __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
-
-
-getInfo = create_getInfo(MegaFilesSe)
diff --git a/pyload/plugins/hoster/MegaRapidCz.py b/pyload/plugins/hoster/MegaRapidCz.py
deleted file mode 100644
index ed0798f78..000000000
--- a/pyload/plugins/hoster/MegaRapidCz.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pycurl import HTTPHEADER
-
-from pyload.network.RequestFactory import getRequest
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, parseFileInfo
-
-
-def getInfo(urls):
- h = getRequest()
- h.c.setopt(HTTPHEADER,
- ["Accept: text/html",
- "User-Agent: Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"])
-
- for url in urls:
- html = h.load(url, decode=True)
- yield parseFileInfo(MegaRapidCz, url, html)
-
-
-class MegaRapidCz(SimpleHoster):
- __name = "MegaRapidCz"
- __type = "hoster"
- __version = "0.54"
-
- __pattern = r'http://(?:www\.)?(share|mega)rapid\.cz/soubor/\d+/.+'
-
- __description = """MegaRapid.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("MikyWoW", "mikywow@seznam.cz"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'<h1[^>]*><span[^>]*>(?:<a[^>]*>)?(?P<N>[^<]+)'
- SIZE_PATTERN = r'<td class="i">Velikost:</td>\s*<td class="h"><strong>\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+)</strong></td>'
- OFFLINE_PATTERN = ur'Nastala chyba 404|Soubor byl smazán'
-
- FORCE_CHECK_TRAFFIC = True
-
- LINK_PATTERN = r'<a href="([^"]+)" title="Stahnout">([^<]+)</a>'
- ERR_LOGIN_PATTERN = ur'<div class="error_div"><strong>Stahování je přístupné pouze přihlášenÃœm uÅŸivatelům'
- ERR_CREDIT_PATTERN = ur'<div class="error_div"><strong>Stahování zdarma je moÅŸné jen přes náš'
-
-
- def setup(self):
- self.chunkLimit = 1
-
-
- def handlePremium(self):
- try:
- self.html = self.load(self.pyfile.url, decode=True)
- except BadHeader, e:
- self.account.relogin(self.user)
- self.retry(wait_time=60, reason=str(e))
-
- m = re.search(self.LINK_PATTERN, self.html)
- if m:
- link = m.group(1)
- self.logDebug("Premium link: %s" % link)
- self.download(link, disposition=True)
- else:
- if re.search(self.ERR_LOGIN_PATTERN, self.html):
- self.relogin(self.user)
- self.retry(wait_time=60, reason=_("User login failed"))
- elif re.search(self.ERR_CREDIT_PATTERN, self.html):
- self.fail(_("Not enough credit left"))
- else:
- self.fail(_("Download link not found"))
diff --git a/pyload/plugins/hoster/MegacrypterCom.py b/pyload/plugins/hoster/MegacrypterCom.py
deleted file mode 100644
index 0397437ec..000000000
--- a/pyload/plugins/hoster/MegacrypterCom.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.utils import json_loads, json_dumps
-
-from pyload.plugins.hoster.MegaCoNz import MegaCoNz
-
-
-class MegacrypterCom(MegaCoNz):
- __name = "MegacrypterCom"
- __type = "hoster"
- __version = "0.21"
-
- __pattern = r'(https?://\w{0,10}\.?megacrypter\.com/[\w!-]+)'
-
- __description = """Megacrypter.com decrypter plugin"""
- __license = "GPLv3"
- __authors = [("GonzaloSR", "gonzalo@gonzalosr.com")]
-
-
- API_URL = "http://megacrypter.com/api"
- FILE_SUFFIX = ".crypted"
-
-
- def callApi(self, **kwargs):
- """ Dispatch a call to the api, see megacrypter.com/api_doc """
- self.logDebug("JSON request: " + json_dumps(kwargs))
- res = self.load(self.API_URL, post=json_dumps(kwargs))
- self.logDebug("API Response: " + res)
- return json_loads(res)
-
-
- def process(self, pyfile):
- # match is guaranteed because plugin was chosen to handle url
- node = re.match(self.__pattern, pyfile.url).group(1)
-
- # get Mega.co.nz link info
- info = self.callApi(link=node, m="info")
-
- # get crypted file URL
- dl = self.callApi(link=node, m="dl")
-
- # TODO: map error codes, implement password protection
- # if info['pass'] is True:
- # crypted_file_key, md5_file_key = info['key'].split("#")
-
- key = self.b64_decode(info['key'])
-
- pyfile.name = info['name'] + self.FILE_SUFFIX
-
- self.download(dl['url'])
- self.decryptFile(key)
-
- # Everything is finished and final name can be set
- pyfile.name = info['name']
diff --git a/pyload/plugins/hoster/MegareleaseOrg.py b/pyload/plugins/hoster/MegareleaseOrg.py
deleted file mode 100644
index c38396ef9..000000000
--- a/pyload/plugins/hoster/MegareleaseOrg.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class MegareleaseOrg(DeadHoster):
- __name = "MegareleaseOrg"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'https?://(?:www\.)?megarelease\.org/\w{12}'
-
- __description = """Megarelease.org hoster plugin"""
- __license = "GPLv3"
- __authors = [("derek3x", "derek3x@vmail.me"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(MegareleaseOrg)
diff --git a/pyload/plugins/hoster/MegasharesCom.py b/pyload/plugins/hoster/MegasharesCom.py
deleted file mode 100644
index e04f8f8dc..000000000
--- a/pyload/plugins/hoster/MegasharesCom.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import time
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class MegasharesCom(SimpleHoster):
- __name = "MegasharesCom"
- __type = "hoster"
- __version = "0.27"
-
- __pattern = r'http://(?:www\.)?(d\d{2}\.)?megashares\.com/((index\.php)?\?d\d{2}=|dl/)\w+'
-
- __description = """Megashares.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'<h1 class="black xxl"[^>]*title="(?P<N>[^"]+)">'
- SIZE_PATTERN = r'<strong><span class="black">Filesize:</span></strong> (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
- OFFLINE_PATTERN = r'<dd class="red">(Invalid Link Request|Link has been deleted|Invalid link)'
-
- LINK_PATTERN = r'<div id="show_download_button_%d"[^>]*>\s*<a href="([^"]+)">'
-
- PASSPORT_LEFT_PATTERN = r'Your Download Passport is: <[^>]*>(\w+).*?You have.*?<[^>]*>.*?([\d.]+) (\w+)'
- PASSPORT_RENEW_PATTERN = r'(\d+):<strong>(\d+)</strong>:<strong>(\d+)</strong>'
- REACTIVATE_NUM_PATTERN = r'<input[^>]*id="random_num" value="(\d+)" />'
- REACTIVATE_PASSPORT_PATTERN = r'<input[^>]*id="passport_num" value="(\w+)" />'
- REQUEST_URI_PATTERN = r'var request_uri = "([^"]+)";'
- NO_SLOTS_PATTERN = r'<dd class="red">All download slots for this link are currently filled'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = self.premium
-
-
- def handlePremium(self):
- self.handleDownload(True)
-
-
- def handleFree(self):
- if self.NO_SLOTS_PATTERN in self.html:
- self.retry(wait_time=5 * 60)
-
- m = re.search(self.REACTIVATE_PASSPORT_PATTERN, self.html)
- if m:
- passport_num = m.group(1)
- request_uri = re.search(self.REQUEST_URI_PATTERN, self.html).group(1)
-
- for _i in xrange(5):
- random_num = re.search(self.REACTIVATE_NUM_PATTERN, self.html).group(1)
-
- verifyinput = self.decryptCaptcha("http://d01.megashares.com/index.php",
- get={'secgfx': "gfx", 'random_num': random_num})
-
- self.logInfo(_("Reactivating passport %s: %s %s") % (passport_num, random_num, verifyinput))
-
- res = self.load("http://d01.megashares.com%s" % request_uri,
- get={'rs' : "check_passport_renewal",
- 'rsargs[]': verifyinput,
- 'rsargs[]': random_num,
- 'rsargs[]': passport_num,
- 'rsargs[]': "replace_sec_pprenewal",
- 'rsrnd[]' : str(int(time() * 1000))})
-
- if 'Thank you for reactivating your passport.' in res:
- self.correctCaptcha()
- self.retry()
- else:
- self.invalidCaptcha()
- else:
- self.fail(_("Failed to reactivate passport"))
-
- m = re.search(self.PASSPORT_RENEW_PATTERN, self.html)
- if m:
- time = [int(x) for x in m.groups()]
- renew = time[0] + (time[1] * 60) + (time[2] * 60)
- self.logDebug("Waiting %d seconds for a new passport" % renew)
- self.retry(wait_time=renew, reason=_("Passport renewal"))
-
- # Check traffic left on passport
- m = re.search(self.PASSPORT_LEFT_PATTERN, self.html, re.M | re.S)
- if m is None:
- self.fail(_("Passport not found"))
-
- self.logInfo(_("Download passport: %s") % m.group(1))
- data_left = float(m.group(2)) * 1024 ** {'B': 0, 'KB': 1, 'MB': 2, 'GB': 3}[m.group(3)]
- self.logInfo(_("Data left: %s %s (%d MB needed)") % (m.group(2), m.group(3), self.pyfile.size / 1048576))
-
- if not data_left:
- self.retry(wait_time=600, reason=_("Passport renewal"))
-
- self.handleDownload(False)
-
-
- def handleDownload(self, premium=False):
- # Find download link;
- m = re.search(self.LINK_PATTERN % (1 if premium else 2), self.html)
- msg = _('%s download URL' % ('Premium' if premium else 'Free'))
- if m is None:
- self.error(msg)
-
- download_url = m.group(1)
- self.logDebug("%s: %s" % (msg, download_url))
- self.download(download_url)
-
-
-getInfo = create_getInfo(MegasharesCom)
diff --git a/pyload/plugins/hoster/MegauploadCom.py b/pyload/plugins/hoster/MegauploadCom.py
deleted file mode 100644
index 16c5b80b0..000000000
--- a/pyload/plugins/hoster/MegauploadCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class MegauploadCom(DeadHoster):
- __name = "MegauploadCom"
- __type = "hoster"
- __version = "0.31"
-
- __pattern = r'http://(?:www\.)?megaupload\.com/\?.*&?(d|v)=\w+'
-
- __description = """Megaupload.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org")]
-
-
-getInfo = create_getInfo(MegauploadCom)
diff --git a/pyload/plugins/hoster/MegavideoCom.py b/pyload/plugins/hoster/MegavideoCom.py
deleted file mode 100644
index ef4adab13..000000000
--- a/pyload/plugins/hoster/MegavideoCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class MegavideoCom(DeadHoster):
- __name = "MegavideoCom"
- __type = "hoster"
- __version = "0.21"
-
- __pattern = r'http://(?:www\.)?megavideo\.com/\?.*&?(d|v)=\w+'
-
- __description = """Megavideo.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("mkaay", "mkaay@mkaay.de")]
-
-
-getInfo = create_getInfo(MegavideoCom)
diff --git a/pyload/plugins/hoster/MovReelCom.py b/pyload/plugins/hoster/MovReelCom.py
deleted file mode 100644
index a3989b965..000000000
--- a/pyload/plugins/hoster/MovReelCom.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class MovReelCom(XFSHoster):
- __name = "MovReelCom"
- __type = "hoster"
- __version = "1.24"
-
- __pattern = r'http://(?:www\.)?movreel\.com/\w{12}'
-
- __description = """MovReel.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("JorisV83", "jorisv83-pyload@yahoo.com")]
-
-
- HOSTER_DOMAIN = "movreel.com"
-
- NAME_PATTERN = r'Filename: <b>(?P<N>.+?)<'
- SIZE_PATTERN = r'Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- LINK_PATTERN = r'<a href="([^"]+)">Download Link'
-
-
-getInfo = create_getInfo(MovReelCom)
diff --git a/pyload/plugins/hoster/MultishareCz.py b/pyload/plugins/hoster/MultishareCz.py
deleted file mode 100644
index 0a3f78cea..000000000
--- a/pyload/plugins/hoster/MultishareCz.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import random
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class MultishareCz(SimpleHoster):
- __name = "MultishareCz"
- __type = "hoster"
- __version = "0.35"
-
- __pattern = r'http://(?:www\.)?multishare\.cz/stahnout/(?P<ID>\d+).*'
-
- __description = """MultiShare.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- SIZE_REPLACEMENTS = [('&nbsp;', '')]
-
- MULTI_HOSTER = True
-
- INFO_PATTERN = ur'(?:<li>Název|Soubor): <strong>(?P<N>[^<]+)</strong><(?:/li><li|br)>Velikost: <strong>(?P<S>[^<]+)</strong>'
- OFFLINE_PATTERN = ur'<h1>Stáhnout soubor</h1><p><strong>PoşadovanÜ soubor neexistuje.</strong></p>'
-
-
- def process(self, pyfile):
- msurl = re.match(self.__pattern, pyfile.url)
- if msurl:
- self.fileID = msurl.group('ID')
- self.html = self.load(pyfile.url, decode=True)
- self.getFileInfo()
-
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
- else:
- self.handleOverriden()
-
-
- def handleFree(self):
- self.download("http://www.multishare.cz/html/download_free.php?ID=%s" % self.fileID)
-
-
- def handlePremium(self):
- if not self.checkCredit():
- self.logWarning(_("Not enough credit left to download file"))
- self.resetAccount()
-
- self.download("http://www.multishare.cz/html/download_premium.php?ID=%s" % self.fileID)
-
-
- def handleOverriden(self):
- if not self.premium:
- self.fail(_("Only premium users can download from other hosters"))
-
- self.html = self.load('http://www.multishare.cz/html/mms_ajax.php', post={"link": self.pyfile.url}, decode=True)
- self.getFileInfo()
-
- if not self.checkCredit():
- self.fail(_("Not enough credit left to download file"))
-
- url = "http://dl%d.mms.multishare.cz/html/mms_process.php" % round(random() * 10000 * random())
- params = {"u_ID": self.acc_info['u_ID'], "u_hash": self.acc_info['u_hash'], "link": self.pyfile.url}
- self.logDebug(url, params)
- self.download(url, get=params)
-
-
- def checkCredit(self):
- self.acc_info = self.account.getAccountInfo(self.user, True)
- self.logInfo(_("User %s has %i MB left") % (self.user, self.acc_info['trafficleft'] / 1024))
-
- return self.pyfile.size / 1024 <= self.acc_info['trafficleft']
-
-
-getInfo = create_getInfo(MultishareCz)
diff --git a/pyload/plugins/hoster/MyfastfileCom.py b/pyload/plugins/hoster/MyfastfileCom.py
deleted file mode 100644
index ebb9b19a4..000000000
--- a/pyload/plugins/hoster/MyfastfileCom.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import json_loads
-
-
-class MyfastfileCom(Hoster):
- __name = "MyfastfileCom"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'http://(?:www\.)?\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/dl/'
-
- __description = """Myfastfile.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-
- def setup(self):
- self.chunkLimit = -1
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "Myfastfile.com")
- self.fail(_("No Myfastfile.com account provided"))
- else:
- self.logDebug("Original URL: %s" % pyfile.url)
- page = self.load('http://myfastfile.com/api.php',
- get={'user': self.user, 'pass': self.account.getAccountData(self.user)['password'],
- 'link': pyfile.url})
- self.logDebug("JSON data: " + page)
- page = json_loads(page)
- if page['status'] != 'ok':
- self.fail(_("Unable to unrestrict link"))
- new_url = page['link']
-
- if new_url != pyfile.url:
- self.logDebug("Unrestricted URL: " + new_url)
-
- self.download(new_url, disposition=True)
diff --git a/pyload/plugins/hoster/MyvideoDe.py b/pyload/plugins/hoster/MyvideoDe.py
deleted file mode 100644
index 5a8747dce..000000000
--- a/pyload/plugins/hoster/MyvideoDe.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import html_unescape
-
-
-class MyvideoDe(Hoster):
- __name = "MyvideoDe"
- __type = "hoster"
- __version = "0.90"
-
- __pattern = r'http://(?:www\.)?myvideo\.de/watch/'
-
- __description = """Myvideo.de hoster plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org")]
-
-
- def process(self, pyfile):
- self.pyfile = pyfile
- self.download_html()
- pyfile.name = self.get_file_name()
- self.download(self.get_file_url())
-
-
- def download_html(self):
- self.html = self.load(self.pyfile.url)
-
-
- def get_file_url(self):
- videoId = re.search(r"addVariable\('_videoid','(.*)'\);p.addParam\('quality'", self.html).group(1)
- videoServer = re.search("rel='image_src' href='(.*)thumbs/.*' />", self.html).group(1)
- file_url = videoServer + videoId + ".flv"
- return file_url
-
-
- def get_file_name(self):
- file_name_pattern = r"<h1 class='globalHd'>(.*)</h1>"
- return html_unescape(re.search(file_name_pattern, self.html).group(1).replace("/", "") + '.flv')
-
-
- def file_exists(self):
- self.download_html()
- self.load(str(self.pyfile.url), cookies=False, just_header=True)
- if self.req.lastEffectiveURL == "http://www.myvideo.de/":
- return False
- return True
diff --git a/pyload/plugins/hoster/NahrajCz.py b/pyload/plugins/hoster/NahrajCz.py
deleted file mode 100644
index df77d04ef..000000000
--- a/pyload/plugins/hoster/NahrajCz.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class NahrajCz(DeadHoster):
- __name = "NahrajCz"
- __type = "hoster"
- __version = "0.21"
-
- __pattern = r'http://(?:www\.)?nahraj\.cz/content/download/.+'
-
- __description = """Nahraj.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(NahrajCz)
diff --git a/pyload/plugins/hoster/NarodRu.py b/pyload/plugins/hoster/NarodRu.py
deleted file mode 100644
index 9ef0a82c6..000000000
--- a/pyload/plugins/hoster/NarodRu.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import random
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class NarodRu(SimpleHoster):
- __name = "NarodRu"
- __type = "hoster"
- __version = "0.11"
-
- __pattern = r'http://(?:www\.)?narod(\.yandex)?\.ru/(disk|start/\d+\.\w+-narod\.yandex\.ru)/(?P<ID>\d+)/.+'
-
- __description = """Narod.ru hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<dt class="name">(?:<[^<]*>)*(?P<N>[^<]+)</dt>'
- SIZE_PATTERN = r'<dd class="size">(?P<S>\d[^<]*)</dd>'
- OFFLINE_PATTERN = r'<title>404</title>|Ѐайл уЎалеМ с сервОса|ЗакПМчОлся срПк храМеМОя файла\.'
-
- SIZE_REPLACEMENTS = [(u'КБ', 'KB'), (u'МБ', 'MB'), (u'ГБ', 'GB')]
- URL_REPLACEMENTS = [("narod.yandex.ru/", "narod.ru/"),
- (r"/start/\d+\.\w+-narod\.yandex\.ru/(\d{6,15})/\w+/(\w+)", r"/disk/\1/\2")]
-
- CAPTCHA_PATTERN = r'<number url="(.*?)">(\w+)</number>'
- LINK_PATTERN = r'<a class="h-link" rel="yandex_bar" href="(.+?)">'
-
-
- def handleFree(self):
- for _i in xrange(5):
- self.html = self.load('http://narod.ru/disk/getcapchaxml/?rnd=%d' % int(random() * 777))
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m is None:
- self.error(_("Captcha"))
- post_data = {"action": "sendcapcha"}
- captcha_url, post_data['key'] = m.groups()
- post_data['rep'] = self.decryptCaptcha(captcha_url)
-
- self.html = self.load(self.pyfile.url, post=post_data, decode=True)
- m = re.search(self.LINK_PATTERN, self.html)
- if m:
- url = 'http://narod.ru' + m.group(1)
- self.correctCaptcha()
- break
- elif u'<b class="error-msg"><strong>ОшОблОсь?</strong>' in self.html:
- self.invalidCaptcha()
- else:
- self.error(_("Download link"))
- else:
- self.fail(_("No valid captcha code entered"))
-
- self.download(url)
-
-
-getInfo = create_getInfo(NarodRu)
diff --git a/pyload/plugins/hoster/NetloadIn.py b/pyload/plugins/hoster/NetloadIn.py
deleted file mode 100644
index 9437e1c11..000000000
--- a/pyload/plugins/hoster/NetloadIn.py
+++ /dev/null
@@ -1,294 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-from time import sleep, time
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.Plugin import chunks
-from pyload.plugins.captcha import ReCaptcha
-
-
-def getInfo(urls):
- ## returns list of tupels (name, size (in bytes), status (see FileDatabase), url)
-
- apiurl = "http://api.netload.in/info.php"
- id_regex = re.compile(NetloadIn.__pattern)
- urls_per_query = 80
-
- for chunk in chunks(urls, urls_per_query):
- ids = ""
- for url in chunk:
- match = id_regex.search(url)
- if match:
- ids = ids + match.group(1) + ";"
-
- api = getURL(apiurl,
- get={'auth' : "Zf9SnQh9WiReEsb18akjvQGqT0I830e8",
- 'bz' : 1,
- 'md5' : 1,
- 'file_id': ids},
- decode=True)
-
- if api is None or len(api) < 10:
- self.logDebug("Prefetch failed")
- return
-
- if api.find("unknown_auth") >= 0:
- self.logDebug("Outdated auth code")
- return
-
- result = []
-
- for i, r in enumerate(api.splitlines()):
- try:
- tmp = r.split(";")
-
- try:
- size = int(tmp[2])
- except Exception:
- size = 0
-
- result.append((tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i] ))
-
- except Exception:
- self.logDebug("Error while processing response: %s" % r)
-
- yield result
-
-
-class NetloadIn(Hoster):
- __name = "NetloadIn"
- __type = "hoster"
- __version = "0.47"
-
- __pattern = r'https?://(?:[^/]*\.)?netload\.in/(?:datei(.*?)(?:\.htm|/)|index\.php?id=10&file_id=)'
-
- __description = """Netload.in hoster plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org"),
- ("RaNaN", "ranan@pyload.org"),
- ("Gregy", "gregy@gregy.cz")]
-
-
- def setup(self):
- self.multiDL = self.resumeDownload = self.premium
-
-
- def process(self, pyfile):
- self.url = pyfile.url
-
- self.prepare()
-
- pyfile.setStatus("downloading")
-
- self.proceed(self.url)
-
-
- def prepare(self):
- self.download_api_data()
-
- if self.api_data and self.api_data['filename']:
- self.pyfile.name = self.api_data['filename']
-
- if self.premium:
- self.logDebug("Use Premium Account")
-
- settings = self.load("http://www.netload.in/index.php", get={'id': 2, 'lang': "en"})
-
- if '<option value="2" selected="selected">Direkter Download' in settings:
- self.logDebug("Using direct download")
- return True
- else:
- self.logDebug("Direct downloads not enabled. Parsing html for a download URL")
-
- if self.download_html():
- return True
- else:
- self.fail(_("Failed"))
- return False
-
-
- def download_api_data(self, n=0):
- url = self.url
- id_regex = re.compile(self.__pattern)
- match = id_regex.search(url)
-
- if match:
- #normalize url
- self.url = 'http://www.netload.in/datei%s.htm' % match.group(1)
- self.logDebug("URL: %s" % self.url)
- else:
- self.api_data = False
- return
-
- apiurl = "http://api.netload.in/info.php"
- html = self.load(apiurl, cookies=False,
- get={"file_id": match.group(1), "auth": "Zf9SnQh9WiReEsb18akjvQGqT0I830e8", "bz": "1",
- "md5": "1"}, decode=True).strip()
- if not html and n <= 3:
- sleep(0.2)
- self.download_api_data(n + 1)
- return
-
- self.logDebug("APIDATA: " + html)
-
- self.api_data = {}
-
- if html and ";" in html and html not in ("unknown file_data", "unknown_server_data", "No input file specified."):
- lines = html.split(";")
- self.api_data['exists'] = True
- self.api_data['fileid'] = lines[0]
- self.api_data['filename'] = lines[1]
- self.api_data['size'] = lines[2]
- self.api_data['status'] = lines[3]
-
- if self.api_data['status'] == "online":
- self.api_data['checksum'] = lines[4].strip()
- else:
- self.api_data = False # check manually since api data is useless sometimes
-
- if lines[0] == lines[1] and lines[2] == "0": # useless api data
- self.api_data = False
- else:
- self.api_data = False
-
-
- def final_wait(self, page):
- wait_time = self.get_wait_time(page)
-
- self.setWait(wait_time)
-
- self.logDebug("Final wait %d seconds" % wait_time)
-
- self.wait()
-
- self.url = self.get_file_url(page)
-
-
- def check_free_wait(self,page):
- if ">An access request has been made from IP address <" in page:
- self.wantReconnect = True
- self.setWait(self.get_wait_time(page) or 30)
- self.wait()
- return True
- else:
- return False
-
-
- def download_html(self):
- page = self.load(self.url, decode=True)
-
- if "/share/templates/download_hddcrash.tpl" in page:
- self.logError(_("Netload HDD Crash"))
- self.fail(_("File temporarily not available"))
-
- if not self.api_data:
- self.logDebug("API Data may be useless, get details from html page")
-
- if "* The file was deleted" in page:
- self.offline()
-
- name = re.search(r'class="dl_first_filename">([^<]+)', page, re.M)
- # the found filename is not truncated
- if name:
- name = name.group(1).strip()
- if not name.endswith(".."):
- self.pyfile.name = name
-
- captchawaited = False
-
- for i in xrange(5):
- if not page:
- page = self.load(self.url)
- t = time() + 30
-
- if "/share/templates/download_hddcrash.tpl" in page:
- self.logError(_("Netload HDD Crash"))
- self.fail(_("File temporarily not available"))
-
- self.logDebug("Try number %d " % i)
-
- if ">Your download is being prepared.<" in page:
- self.logDebug("We will prepare your download")
- self.final_wait(page)
- return True
-
- self.logDebug("Trying to find captcha")
-
- try:
- url_captcha_html = re.search(r'(index.php\?id=10&amp;.*&amp;captcha=1)', page).group(1).replace("amp;", "")
-
- except Exception, e:
- self.logDebug("Exception during Captcha regex: %s" % e.message)
- page = None
-
- else:
- url_captcha_html = urljoin("http://netload.in/", url_captcha_html)
- break
-
- self.html = self.load(url_captcha_html)
-
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- challenge, response = recaptcha.challenge()
-
- response_page = self.load("http://www.netload.in/index.php?id=10",
- post={'captcha_check' : '1',
- 'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field' : response,
- 'file_id' : self.api_data['fileid'],
- 'Download_Next' : ''})
- if "Orange_Link" in response_page:
- break
-
- if self.check_free_wait(response_page):
- self.logDebug("Had to wait for next free slot, trying again")
- return self.download_html()
-
- else:
- download_url = self.get_file_url(response_page)
- self.logDebug("Download URL after get_file: " + download_url)
- if not download_url.startswith("http://"):
- self.error("download url: %s" % download_url)
- self.wait()
-
- self.url = download_url
- return True
-
-
- def get_file_url(self, page):
- try:
- file_url_pattern = r'<a class="Orange_Link" href="(http://.+)".?>Or click here'
- attempt = re.search(file_url_pattern, page)
- if attempt is not None:
- return attempt.group(1)
- else:
- self.logDebug("Backup try for final link")
- file_url_pattern = r'<a href="(.+)" class="Orange_Link">Click here'
- attempt = re.search(file_url_pattern, page)
- return "http://netload.in/" + attempt.group(1)
-
- except Exception, e:
- self.logDebug("Getting final link failed", e.message)
- return None
-
-
- def get_wait_time(self, page):
- return int(re.search(r"countdown\((.+),'change\(\)'\)", page).group(1)) / 100
-
-
- def proceed(self, url):
- self.download(url, disposition=True)
-
- check = self.checkDownload({'empty' : re.compile(r'^$'),
- 'offline': re.compile("The file was deleted")})
- if check == "empty":
- self.logInfo(_("Downloaded File was empty"))
- self.retry()
-
- elif check == "offline":
- self.offline()
diff --git a/pyload/plugins/hoster/NosuploadCom.py b/pyload/plugins/hoster/NosuploadCom.py
deleted file mode 100644
index 60b2b5b4e..000000000
--- a/pyload/plugins/hoster/NosuploadCom.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class NosuploadCom(XFSHoster):
- __name = "NosuploadCom"
- __type = "hoster"
- __version = "0.31"
-
- __pattern = r'http://(?:www\.)?nosupload\.com/\?d=\w{12}'
-
- __description = """Nosupload.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("igel", "igelkun@myopera.com")]
-
-
- HOSTER_DOMAIN = "nosupload.com"
-
- SIZE_PATTERN = r'<p><strong>Size:</strong> (?P<S>[\d.,]+) (?P<U>[\w^_]+)</p>'
- LINK_PATTERN = r'<a class="select" href="(http://.+?)">Download</a>'
- WAIT_PATTERN = r'Please wait.*?>(\d+)</span>'
-
-
- def getDownloadLink(self):
- # stage1: press the "Free Download" button
- data = self.getPostParameters()
- self.html = self.load(self.pyfile.url, post=data, ref=True, decode=True)
-
- # stage2: wait some time and press the "Download File" button
- data = self.getPostParameters()
- wait_time = re.search(self.WAIT_PATTERN, self.html, re.M | re.S).group(1)
- self.logDebug("Hoster told us to wait %s seconds" % wait_time)
- self.wait(wait_time)
- self.html = self.load(self.pyfile.url, post=data, ref=True, decode=True)
-
- # stage3: get the download link
- return re.search(self.LINK_PATTERN, self.html, re.S).group(1)
-
-
-getInfo = create_getInfo(NosuploadCom)
diff --git a/pyload/plugins/hoster/NovafileCom.py b/pyload/plugins/hoster/NovafileCom.py
deleted file mode 100644
index b51ffce01..000000000
--- a/pyload/plugins/hoster/NovafileCom.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://novafile.com/vfun4z6o2cit
-# http://novafile.com/s6zrr5wemuz4
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class NovafileCom(XFSHoster):
- __name = "NovafileCom"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?novafile\.com/\w{12}'
-
- __description = """Novafile.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- HOSTER_DOMAIN = "novafile.com"
-
- SIZE_PATTERN = r'<div class="size">(?P<S>.+?)</div>'
- ERROR_PATTERN = r'class="alert[^"]*alert-separate"[^>]*>\s*(?:<p>)?(.*?)\s*</'
- LINK_PATTERN = r'<a href="(http://s\d+\.novafile\.com/.*?)" class="btn btn-green">Download File</a>'
- WAIT_PATTERN = r'<p>Please wait <span id="count"[^>]*>(\d+)</span> seconds</p>'
-
-
-getInfo = create_getInfo(NovafileCom)
diff --git a/pyload/plugins/hoster/NowDownloadSx.py b/pyload/plugins/hoster/NowDownloadSx.py
deleted file mode 100644
index 225b74f32..000000000
--- a/pyload/plugins/hoster/NowDownloadSx.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-from pyload.utils import fixup
-
-
-class NowDownloadSx(SimpleHoster):
- __name = "NowDownloadSx"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?nowdownload\.(at|ch|co|eu|sx)/(dl/|download\.php\?id=)\w+'
-
- __description = """NowDownload.sx hoster plugin"""
- __license = "GPLv3"
- __authors = [("godofdream", "soilfiction@gmail.com"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- INFO_PATTERN = r'Downloading</span> <br> (?P<N>.*) (?P<S>[\d.,]+) (?P<U>[\w^_]+) </h4>'
- OFFLINE_PATTERN = r'>This file does not exist'
-
- TOKEN_PATTERN = r'"(/api/token\.php\?token=\w+)"'
- CONTINUE_PATTERN = r'"(/dl2/\w+/\w+)"'
- WAIT_PATTERN = r'\.countdown\(\{until: \+(\d+),'
- LINK_PATTERN = r'(http://s\d+\.coolcdn\.info/nowdownload/.+?)["\']'
-
- NAME_REPLACEMENTS = [("&#?\w+;", fixup), (r'<[^>]*>', '')]
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.chunkLimit = -1
-
-
- def handleFree(self):
- tokenlink = re.search(self.TOKEN_PATTERN, self.html)
- continuelink = re.search(self.CONTINUE_PATTERN, self.html)
- if tokenlink is None or continuelink is None:
- self.error()
-
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- wait = int(m.group(1))
- else:
- wait = 60
-
- baseurl = "http://www.nowdownload.at"
- self.html = self.load(baseurl + str(tokenlink.group(1)))
- self.wait(wait)
-
- self.html = self.load(baseurl + str(continuelink.group(1)))
-
- url = re.search(self.LINK_PATTERN, self.html)
- if url is None:
- self.error(_("Download link not found"))
-
- self.download(str(url.group(1)))
-
-
-getInfo = create_getInfo(NowDownloadSx)
diff --git a/pyload/plugins/hoster/NowVideoSx.py b/pyload/plugins/hoster/NowVideoSx.py
deleted file mode 100644
index 83a97e7ef..000000000
--- a/pyload/plugins/hoster/NowVideoSx.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class NowVideoSx(SimpleHoster):
- __name = "NowVideoSx"
- __type = "hoster"
- __version = "0.07"
-
- __pattern = r'http://(?:www\.)?nowvideo\.(at|ch|co|eu|sx)/(video|mobile/#/videos)/(?P<ID>\w+)'
-
- __description = """NowVideo.sx hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- URL_REPLACEMENTS = [(__pattern + ".*", r'http://www.nowvideo.at/video/\g<ID>')]
-
- NAME_PATTERN = r'<h4>(?P<N>.+?)<'
- OFFLINE_PATTERN = r'>This file no longer exists'
-
- LINK_FREE_PATTERN = r'<source src="(.+?)"'
- LINK_PREMIUM_PATTERN = r'<div id="content_player" >\s*<a href="(.+?)"'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def handleFree(self):
- self.html = self.load("http://www.nowvideo.at/mobile/video.php", get={'id': self.info['pattern']['ID']})
-
- m = re.search(self.LINK_FREE_PATTERN, self.html)
- if m is None:
- self.error(_("Free download link not found"))
-
- self.download(m.group(1))
-
-
-getInfo = create_getInfo(NowVideoSx)
diff --git a/pyload/plugins/hoster/OboomCom.py b/pyload/plugins/hoster/OboomCom.py
deleted file mode 100644
index aa22ba41a..000000000
--- a/pyload/plugins/hoster/OboomCom.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# https://www.oboom.com/B7CYZIEB/10Mio.dat
-
-import re
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.internal.captcha import ReCaptcha
-
-
-class OboomCom(Hoster):
- __name = "OboomCom"
- __type = "hoster"
- __version = "0.30"
-
- __pattern = r'https?://(?:www\.)?oboom\.com/(#(id=|/)?)?(?P<ID>\w{8})'
-
- __description = """oboom.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("stanley", "stanley.foerster@gmail.com")]
-
-
- RECAPTCHA_KEY = "6LdqpO0SAAAAAJGHXo63HyalP7H4qlRs_vff0kJX"
-
-
- def setup(self):
- self.chunkLimit = 1
- self.multiDL = self.resumeDownload = self.premium
-
-
- def process(self, pyfile):
- self.pyfile.url.replace(".com/#id=", ".com/#")
- self.pyfile.url.replace(".com/#/", ".com/#")
- self.getFileId(self.pyfile.url)
- self.getSessionToken()
- self.getFileInfo(self.sessionToken, self.fileId)
- self.pyfile.name = self.fileName
- self.pyfile.size = self.fileSize
- if not self.premium:
- self.solveCaptcha()
- self.getDownloadTicket()
- self.download("https://%s/1.0/dlh" % self.downloadDomain, get={"ticket": self.downloadTicket, "http_errors": 0})
-
-
- def loadUrl(self, url, get=None):
- if get is None:
- get = dict()
- return json_loads(self.load(url, get, decode=True))
-
-
- def getFileId(self, url):
- self.fileId = re.match(OboomCom.__pattern, url).group('ID')
-
-
- def getSessionToken(self):
- if self.premium:
- accountInfo = self.account.getAccountInfo(self.user, True)
- if "session" in accountInfo:
- self.sessionToken = accountInfo['session']
- else:
- self.fail(_("Could not retrieve premium session"))
- else:
- apiUrl = "https://www.oboom.com/1.0/guestsession"
- result = self.loadUrl(apiUrl)
- if result[0] == 200:
- self.sessionToken = result[1]
- else:
- self.fail(_("Could not retrieve token for guest session. Error code: %s") % result[0])
-
-
- def solveCaptcha(self):
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
- apiUrl = "https://www.oboom.com/1.0/download/ticket"
- params = {"recaptcha_challenge_field": challenge,
- "recaptcha_response_field": response,
- "download_id": self.fileId,
- "token": self.sessionToken}
- result = self.loadUrl(apiUrl, params)
-
- if result[0] == 200:
- self.downloadToken = result[1]
- self.downloadAuth = result[2]
- self.correctCaptcha()
- self.setWait(30)
- self.wait()
- break
-
- elif result[0] == 400:
- if result[1] == "incorrect-captcha-sol":
- self.invalidCaptcha()
- elif result[1] == "captcha-timeout":
- self.invalidCaptcha()
- elif result[1] == "forbidden":
- self.retry(5, 15 * 60, _("Service unavailable"))
-
- elif result[0] == 403:
- if result[1] == -1: # another download is running
- self.setWait(15 * 60)
- else:
- self.setWait(result[1], True)
- self.wait()
- self.retry(5)
- else:
- self.invalidCaptcha()
- self.fail(_("Received invalid captcha 5 times"))
-
-
- def getFileInfo(self, token, fileId):
- apiUrl = "https://api.oboom.com/1.0/info"
- params = {"token": token, "items": fileId, "http_errors": 0}
-
- result = self.loadUrl(apiUrl, params)
- if result[0] == 200:
- item = result[1][0]
- if item['state'] == "online":
- self.fileSize = item['size']
- self.fileName = item['name']
- else:
- self.offline()
- else:
- self.fail(_("Could not retrieve file info. Error code %s: %s") % (result[0], result[1]))
-
-
- def getDownloadTicket(self):
- apiUrl = "https://api.oboom.com/1/dl"
- params = {"item": self.fileId, "http_errors": 0}
- if self.premium:
- params['token'] = self.sessionToken
- else:
- params['token'] = self.downloadToken
- params['auth'] = self.downloadAuth
-
- result = self.loadUrl(apiUrl, params)
- if result[0] == 200:
- self.downloadDomain = result[1]
- self.downloadTicket = result[2]
- elif result[0] == 421:
- self.retry(wait_time=result[2] + 60, reason=_("Connection limit exceeded"))
- else:
- self.fail(_("Could not retrieve download ticket. Error code: %s") % result[0])
diff --git a/pyload/plugins/hoster/OneFichierCom.py b/pyload/plugins/hoster/OneFichierCom.py
deleted file mode 100644
index 9c645618c..000000000
--- a/pyload/plugins/hoster/OneFichierCom.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class OneFichierCom(SimpleHoster):
- __name = "OneFichierCom"
- __type = "hoster"
- __version = "0.74"
-
- __pattern = r'https?://(?:www\.)?(?:(?P<ID1>\w+)\.)?(?P<HOST>1fichier\.com|alterupload\.com|cjoint\.net|d(es)?fichiers\.com|dl4free\.com|megadl\.fr|mesfichiers\.org|piecejointe\.net|pjointe\.com|tenvoi\.com)(?:/\?(?P<ID2>\w+))?'
-
- __description = """1fichier.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
- ("the-razer", "daniel_ AT gmx DOT net"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("imclem", ""),
- ("stickell", "l.stickell@yahoo.it"),
- ("Elrick69", "elrick69[AT]rocketmail[DOT]com"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'>FileName :</td>\s*<td.*>(?P<N>.+?)<'
- SIZE_PATTERN = r'>Size :</td>\s*<td.*>(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- OFFLINE_PATTERN = r'File not found !\s*<'
-
- COOKIES = [("1fichier.com", "LG", "en")]
-
- WAIT_PATTERN = r'>You must wait (\d+)'
-
-
- def setup(self):
- self.multiDL = self.premium
- self.resumeDownload = True
-
-
- def handle(self, reconnect):
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- wait_time = int(m.group(1)) * 60
-
- self.wait(wait_time, reconnect)
- self.retry(reason="You have to wait been each free download")
-
- id = self.info['pattern']['ID1'] or self.info['pattern']['ID2']
- url, inputs = self.parseHtmlForm('action="https://1fichier.com/\?%s' % id)
-
- if not url:
- self.fail(_("Download link not found"))
-
- if "pass" in inputs:
- inputs['pass'] = self.getPassword()
-
- inputs['submit'] = "Download"
-
- self.download(url, post=inputs)
-
-
- def handleFree(self):
- return self.handle(True)
-
-
- def handlePremium(self):
- return self.handle(False)
-
-
-getInfo = create_getInfo(OneFichierCom)
diff --git a/pyload/plugins/hoster/OronCom.py b/pyload/plugins/hoster/OronCom.py
deleted file mode 100644
index 1b546ff54..000000000
--- a/pyload/plugins/hoster/OronCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class OronCom(DeadHoster):
- __name = "OronCom"
- __type = "hoster"
- __version = "0.14"
-
- __pattern = r'https?://(?:www\.)?oron\.com/\w{12}'
-
- __description = """Oron.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("chrox", "chrox@pyload.org"),
- ("DHMH", "DHMH@pyload.org")]
-
-
-getInfo = create_getInfo(OronCom)
diff --git a/pyload/plugins/hoster/OverLoadMe.py b/pyload/plugins/hoster/OverLoadMe.py
deleted file mode 100644
index 68f254a2e..000000000
--- a/pyload/plugins/hoster/OverLoadMe.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import randrange
-from urllib import unquote
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import parseFileSize
-
-
-class OverLoadMe(Hoster):
- __name = "OverLoadMe"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'https?://.*overload\.me.*'
-
- __description = """Over-Load.me hoster plugin"""
- __license = "GPLv3"
- __authors = [("marley", "marley@over-load.me")]
-
-
- def getFilename(self, url):
- try:
- name = unquote(url.rsplit("/", 1)[1])
- except IndexError:
- name = "Unknown_Filename..."
- if name.endswith("..."): #: incomplete filename, append random stuff
- name += "%s.tmp" % randrange(100, 999)
- return name
-
-
- def setup(self):
- self.chunkLimit = 5
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "Over-Load")
- self.fail(_("No Over-Load account provided"))
- else:
- self.logDebug("Old URL: %s" % pyfile.url)
- data = self.account.getAccountData(self.user)
-
- page = self.load("https://api.over-load.me/getdownload.php",
- get={"auth": data['password'], "link": pyfile.url})
- data = json_loads(page)
-
- self.logDebug("Returned Data: %s" % data)
-
- if data['error'] == 1:
- self.logWarning(data['msg'])
- self.tempOffline()
- else:
- if pyfile.name is not None and pyfile.name.endswith('.tmp') and data['filename']:
- pyfile.name = data['filename']
- pyfile.size = parseFileSize(data['filesize'])
- new_url = data['downloadlink']
-
- if self.getConfig("https"):
- new_url = new_url.replace("http://", "https://")
- else:
- new_url = new_url.replace("https://", "http://")
-
- if new_url != pyfile.url:
- self.logDebug("New URL: %s" % new_url)
-
- if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown") or pyfile.name.endswith('..'):
- # only use when name wasn't already set
- pyfile.name = self.getFilename(new_url)
-
- self.download(new_url, disposition=True)
-
- check = self.checkDownload(
- {"error": "<title>An error occured while processing your request</title>"})
-
- if check == "error":
- # usual this download can safely be retried
- self.retry(wait_time=60, reason=_("An error occured while generating link."))
diff --git a/pyload/plugins/hoster/PandaplaNet.py b/pyload/plugins/hoster/PandaplaNet.py
deleted file mode 100644
index d5dcf7ab3..000000000
--- a/pyload/plugins/hoster/PandaplaNet.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class PandaplaNet(DeadHoster):
- __name = "PandaplaNet"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?pandapla\.net/\w{12}'
-
- __description = """Pandapla.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
-
-
-getInfo = create_getInfo(PandaplaNet)
diff --git a/pyload/plugins/hoster/PornhostCom.py b/pyload/plugins/hoster/PornhostCom.py
deleted file mode 100644
index ce4de1844..000000000
--- a/pyload/plugins/hoster/PornhostCom.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-
-
-class PornhostCom(Hoster):
- __name = "PornhostCom"
- __type = "hoster"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?pornhost\.com/(\d+/\d+\.html|\d+)'
-
- __description = """Pornhost.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de")]
-
-
- def process(self, pyfile):
- self.download_html()
- if not self.file_exists():
- self.offline()
-
- pyfile.name = self.get_file_name()
- self.download(self.get_file_url())
-
-
- # Old interface
- def download_html(self):
- url = self.pyfile.url
- self.html = self.load(url)
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- if not self.html:
- self.download_html()
-
- url = re.search(r'download this file</label>.*?<a href="(.*?)"', self.html)
- if url is None:
- url = re.search(r'"(http://dl\d+\.pornhost\.com/files/.*?/.*?/.*?/.*?/.*?/.*?\..*?)"', self.html)
- if url is None:
- url = re.search(r'width: 894px; height: 675px">.*?<img src="(.*?)"', self.html)
- if url is None:
- url = re.search(r'"http://file\d+\.pornhost\.com/\d+/.*?"',
- self.html) # TODO: fix this one since it doesn't match
-
- return url.group(1).strip()
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
-
- name = re.search(r'<title>pornhost\.com - free file hosting with a twist - gallery(.*?)</title>', self.html)
- if name is None:
- name = re.search(r'id="url" value="http://www\.pornhost\.com/(.*?)/"', self.html)
- if name is None:
- name = re.search(r'<title>pornhost\.com - free file hosting with a twist -(.*?)</title>', self.html)
- if name is None:
- name = re.search(r'"http://file\d+\.pornhost\.com/.*?/(.*?)"', self.html)
-
- name = name.group(1).strip() + ".flv"
-
- return name
-
-
- def file_exists(self):
- """ returns True or False
- """
- if not self.html:
- self.download_html()
-
- if (re.search(r'gallery not found', self.html) is not None or
- re.search(r'You will be redirected to', self.html) is not None):
- return False
- else:
- return True
diff --git a/pyload/plugins/hoster/PornhubCom.py b/pyload/plugins/hoster/PornhubCom.py
deleted file mode 100644
index 74c3895db..000000000
--- a/pyload/plugins/hoster/PornhubCom.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-
-
-class PornhubCom(Hoster):
- __name = "PornhubCom"
- __type = "hoster"
- __version = "0.50"
-
- __pattern = r'http://(?:www\.)?pornhub\.com/view_video\.php\?viewkey=\w+'
-
- __description = """Pornhub.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de")]
-
-
- def process(self, pyfile):
- self.download_html()
- if not self.file_exists():
- self.offline()
-
- pyfile.name = self.get_file_name()
- self.download(self.get_file_url())
-
-
- def download_html(self):
- url = self.pyfile.url
- self.html = self.load(url)
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- if not self.html:
- self.download_html()
-
- url = "http://www.pornhub.com//gateway.php"
- video_id = self.pyfile.url.split('=')[-1]
- # thanks to jD team for this one v
- post_data = "\x00\x03\x00\x00\x00\x01\x00\x0c\x70\x6c\x61\x79\x65\x72\x43\x6f\x6e\x66\x69\x67\x00\x02\x2f\x31\x00\x00\x00\x44\x0a\x00\x00\x00\x03\x02\x00"
- post_data += chr(len(video_id))
- post_data += video_id
- post_data += "\x02\x00\x02\x2d\x31\x02\x00\x20"
- post_data += "add299463d4410c6d1b1c418868225f7"
-
- content = self.load(url, post=str(post_data))
-
- new_content = ""
- for x in content:
- if ord(x) < 32 or ord(x) > 176:
- new_content += '#'
- else:
- new_content += x
-
- content = new_content
-
- return re.search(r'flv_url.*(http.*?)##post_roll', content).group(1)
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
-
- m = re.search(r'<title[^>]+>([^<]+) - ', self.html)
- if m:
- name = m.group(1)
- else:
- matches = re.findall('<h1>(.*?)</h1>', self.html)
- if len(matches) > 1:
- name = matches[1]
- else:
- name = matches[0]
-
- return name + '.flv'
-
-
- def file_exists(self):
- """ returns True or False
- """
- if not self.html:
- self.download_html()
-
- if re.search(r'This video is no longer in our database or is in conversion', self.html) is not None:
- return False
- else:
- return True
diff --git a/pyload/plugins/hoster/PotloadCom.py b/pyload/plugins/hoster/PotloadCom.py
deleted file mode 100644
index 2d2f9c71c..000000000
--- a/pyload/plugins/hoster/PotloadCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class PotloadCom(DeadHoster):
- __name = "PotloadCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?potload\.com/\w{12}'
-
- __description = """Potload.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(PotloadCom)
diff --git a/pyload/plugins/hoster/PremiumTo.py b/pyload/plugins/hoster/PremiumTo.py
deleted file mode 100644
index 64d6513d0..000000000
--- a/pyload/plugins/hoster/PremiumTo.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-from os import remove
-from os.path import exists
-from urllib import quote
-
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import fs_encode
-
-
-class PremiumTo(Hoster):
- __name = "PremiumTo"
- __type = "hoster"
- __version = "0.11"
-
- __pattern = r'https?://(?:www\.)?premium\.to/.+'
-
- __description = """Premium.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- def setup(self):
- self.resumeDownload = True
- self.chunkLimit = 1
-
-
- def process(self, pyfile):
- if not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "premium.to")
- self.fail(_("No premium.to account provided"))
-
- self.logDebug("Old URL: %s" % pyfile.url)
-
- tra = self.getTraffic()
-
- #raise timeout to 2min
- self.req.setOption("timeout", 120)
-
- self.download("http://premium.to/api/getfile.php",
- get={'username': self.account.username,
- 'password': self.account.password,
- 'link' : quote(pyfile.url, "")},
- disposition=True)
-
- check = self.checkDownload({"nopremium": "No premium account available"})
-
- if check == "nopremium":
- self.retry(60, 5 * 60, "No premium account available")
-
- err = ''
- if self.req.http.code == '420':
- # Custom error code send - fail
- lastDownload = fs_encode(self.lastDownload)
-
- if exists(lastDownload):
- with open(lastDownload, "rb") as f:
- err = f.read(256).strip()
- remove(lastDownload)
- else:
- err = _('File does not exist')
-
- trb = self.getTraffic()
- self.logInfo(_("Filesize: %d, Traffic used %d, traffic left %d") % (pyfile.size, tra - trb, trb))
-
- if err:
- self.fail(err)
-
-
- def getTraffic(self):
- try:
- api_r = self.load("http://premium.to/api/straffic.php",
- get={'username': self.account.username, 'password': self.account.password})
- traffic = sum(map(int, api_r.split(';')))
- except Exception:
- traffic = 0
- return traffic
diff --git a/pyload/plugins/hoster/PremiumizeMe.py b/pyload/plugins/hoster/PremiumizeMe.py
deleted file mode 100644
index af9ae98d9..000000000
--- a/pyload/plugins/hoster/PremiumizeMe.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-
-
-class PremiumizeMe(Hoster):
- __name = "PremiumizeMe"
- __type = "hoster"
- __version = "0.12"
-
- __pattern = r'^unmatchable$' #: Since we want to allow the user to specify the list of hoster to use we let MultiHoster.activate
-
- __description = """Premiumize.me hoster plugin"""
- __license = "GPLv3"
- __authors = [("Florian Franzen", "FlorianFranzen@gmail.com")]
-
-
- def process(self, pyfile):
- # Check account
- if not self.account or not self.account.canUse():
- self.logError(_("Please enter your %s account or deactivate this plugin") % "premiumize.me")
- self.fail(_("No valid premiumize.me account provided"))
-
- # In some cases hostsers do not supply us with a filename at download, so we
- # are going to set a fall back filename (e.g. for freakshare or xfileshare)
- pyfile.name = pyfile.name.split('/').pop() # Remove everthing before last slash
-
- # Correction for automatic assigned filename: Removing html at end if needed
- suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"]
- temp = pyfile.name.split('.')
- if temp.pop() in suffix_to_remove:
- pyfile.name = ".".join(temp)
-
- # Get account data
- (user, data) = self.account.selectAccount()
-
- # Get rewritten link using the premiumize.me api v1 (see https://secure.premiumize.me/?show=api)
- data = json_loads(self.load("https://api.premiumize.me/pm-api/v1.php",
- get={'method' : "directdownloadlink",
- 'params[login]': user,
- 'params[pass]' : data['password'],
- 'params[link]' : pyfile.url}))
-
- # Check status and decide what to do
- status = data['status']
- if status == 200:
- self.download(data['result']['location'], disposition=True)
- elif status == 400:
- self.fail(_("Invalid link"))
- elif status == 404:
- self.offline()
- elif status >= 500:
- self.tempOffline()
- else:
- self.fail(data['statusmessage'])
diff --git a/pyload/plugins/hoster/PromptfileCom.py b/pyload/plugins/hoster/PromptfileCom.py
deleted file mode 100644
index e408116ba..000000000
--- a/pyload/plugins/hoster/PromptfileCom.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class PromptfileCom(SimpleHoster):
- __name = "PromptfileCom"
- __type = "hoster"
- __version = "0.12"
-
- __pattern = r'https?://(?:www\.)?promptfile\.com/'
-
- __description = """Promptfile.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("igel", "igelkun@myopera.com")]
-
-
- INFO_PATTERN = r'<span style="[^"]*" title="[^"]*">(?P<N>.*?) \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)</span>'
- OFFLINE_PATTERN = r'<span style="[^"]*" title="File Not Found">File Not Found</span>'
-
- CHASH_PATTERN = r'<input type="hidden" name="chash" value="([^"]*)" />'
- LINK_PATTERN = r'<a href=\"(.+)\" target=\"_blank\" class=\"view_dl_link\">Download File</a>'
-
-
- def handleFree(self):
- # STAGE 1: get link to continue
- m = re.search(self.CHASH_PATTERN, self.html)
- if m is None:
- self.error(_("CHASH_PATTERN not found"))
- chash = m.group(1)
- self.logDebug("Read chash %s" % chash)
- # continue to stage2
- self.html = self.load(self.pyfile.url, decode=True, post={'chash': chash})
-
- # STAGE 2: get the direct link
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
-
- self.download(m.group(1), disposition=True)
-
-
-getInfo = create_getInfo(PromptfileCom)
diff --git a/pyload/plugins/hoster/PrzeklejPl.py b/pyload/plugins/hoster/PrzeklejPl.py
deleted file mode 100644
index fa37caf3a..000000000
--- a/pyload/plugins/hoster/PrzeklejPl.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class PrzeklejPl(DeadHoster):
- __name = "PrzeklejPl"
- __type = "hoster"
- __version = "0.11"
-
- __pattern = r'http://(?:www\.)?przeklej\.pl/plik/.+'
-
- __description = """Przeklej.pl hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(PrzeklejPl)
diff --git a/pyload/plugins/hoster/QuickshareCz.py b/pyload/plugins/hoster/QuickshareCz.py
deleted file mode 100644
index ba0c30947..000000000
--- a/pyload/plugins/hoster/QuickshareCz.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class QuickshareCz(SimpleHoster):
- __name = "QuickshareCz"
- __type = "hoster"
- __version = "0.55"
-
- __pattern = r'http://(?:[^/]*\.)?quickshare\.cz/stahnout-soubor/.*'
-
- __description = """Quickshare.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<th width="145px">Název:</th>\s*<td style="word-wrap:break-word;">(?P<N>[^<]+)</td>'
- SIZE_PATTERN = r'<th>Velikost:</th>\s*<td>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</td>'
- OFFLINE_PATTERN = r'<script type="text/javascript">location\.href=\'/chyba\';</script>'
-
-
- def process(self, pyfile):
- self.html = self.load(pyfile.url, decode=True)
- self.getFileInfo()
-
- # parse js variables
- self.jsvars = dict((x, y.strip("'")) for x, y in re.findall(r"var (\w+) = ([\d.]+|'[^']*')", self.html))
- self.logDebug(self.jsvars)
- pyfile.name = self.jsvars['ID3']
-
- # determine download type - free or premium
- if self.premium:
- if 'UU_prihlasen' in self.jsvars:
- if self.jsvars['UU_prihlasen'] == '0':
- self.logWarning(_("User not logged in"))
- self.relogin(self.user)
- self.retry()
- elif float(self.jsvars['UU_kredit']) < float(self.jsvars['kredit_odecet']):
- self.logWarning(_("Not enough credit left"))
- self.premium = False
-
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
-
- check = self.checkDownload({"err": re.compile(r"\AChyba!")}, max_size=100)
- if check == "err":
- self.fail(_("File not m or plugin defect"))
-
-
- def handleFree(self):
- # get download url
- download_url = '%s/download.php' % self.jsvars['server']
- data = dict((x, self.jsvars[x]) for x in self.jsvars if x in ("ID1", "ID2", "ID3", "ID4"))
- self.logDebug("FREE URL1:" + download_url, data)
-
- self.load(download_url, post=data, follow_location=False)
- self.header = self.req.http.header
-
- m = re.search(r'Location\s*:\s*(.+)', self.header, re.I)
- if m is None:
- self.fail(_("File not found"))
- download_url = m.group(1)
- self.logDebug("FREE URL2:" + download_url)
-
- # check errors
- m = re.search(r'/chyba/(\d+)', download_url)
- if m:
- if m.group(1) == '1':
- self.retry(60, 2 * 60, "This IP is already downloading")
- elif m.group(1) == '2':
- self.retry(60, 60, "No free slots available")
- else:
- self.fail(_("Error %d") % m.group(1))
-
- # download file
- self.download(download_url)
-
-
- def handlePremium(self):
- download_url = '%s/download_premium.php' % self.jsvars['server']
- data = dict((x, self.jsvars[x]) for x in self.jsvars if x in ("ID1", "ID2", "ID4", "ID5"))
- self.download(download_url, get=data)
-
-
-getInfo = create_getInfo(QuickshareCz)
diff --git a/pyload/plugins/hoster/RPNetBiz.py b/pyload/plugins/hoster/RPNetBiz.py
deleted file mode 100644
index 85e31c1dd..000000000
--- a/pyload/plugins/hoster/RPNetBiz.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import json_loads
-
-
-class RPNetBiz(Hoster):
- __name = "RPNetBiz"
- __type = "hoster"
- __version = "0.10"
-
- __description = """RPNet.biz hoster plugin"""
- __license = "GPLv3"
-
- __pattern = r'https?://.*rpnet\.biz'
- __authors = [("Dman", "dmanugm@gmail.com")]
-
-
- def setup(self):
- self.chunkLimit = -1
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- link_status = {'generated': pyfile.url}
- elif not self.account:
- # Check account
- self.logError(_("Please enter your %s account or deactivate this plugin") % "rpnet")
- self.fail(_("No rpnet account provided"))
- else:
- (user, data) = self.account.selectAccount()
-
- self.logDebug("Original URL: %s" % pyfile.url)
- # Get the download link
- res = self.load("https://premium.rpnet.biz/client_api.php",
- get={"username": user,
- "password": data['password'],
- "action": "generate",
- "links": pyfile.url})
-
- self.logDebug("JSON data: %s" % res)
- link_status = json_loads(res)['links'][0] # get the first link... since we only queried one
-
- # Check if we only have an id as a HDD link
- if 'id' in link_status:
- self.logDebug("Need to wait at least 30 seconds before requery")
- self.setWait(30) # wait for 30 seconds
- self.wait()
- # Lets query the server again asking for the status on the link,
- # we need to keep doing this until we reach 100
- max_tries = 30
- my_try = 0
- while (my_try <= max_tries):
- self.logDebug("Try: %d ; Max Tries: %d" % (my_try, max_tries))
- res = self.load("https://premium.rpnet.biz/client_api.php",
- get={"username": user,
- "password": data['password'],
- "action": "downloadInformation",
- "id": link_status['id']})
- self.logDebug("JSON data hdd query: %s" % res)
- download_status = json_loads(res)['download']
-
- if download_status['status'] == '100':
- link_status['generated'] = download_status['rpnet_link']
- self.logDebug("Successfully downloaded to rpnet HDD: %s" % link_status['generated'])
- break
- else:
- self.logDebug("At %s%% for the file download" % download_status['status'])
-
- self.setWait(30)
- self.wait()
- my_try += 1
-
- if my_try > max_tries: # We went over the limit!
- self.fail(_("Waited for about 15 minutes for download to finish but failed"))
-
- if 'generated' in link_status:
- self.download(link_status['generated'], disposition=True)
- elif 'error' in link_status:
- self.fail(link_status['error'])
- else:
- self.fail(_("Something went wrong, not supposed to enter here"))
diff --git a/pyload/plugins/hoster/RapidfileshareNet.py b/pyload/plugins/hoster/RapidfileshareNet.py
deleted file mode 100644
index 328628e38..000000000
--- a/pyload/plugins/hoster/RapidfileshareNet.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class RapidfileshareNet(XFSHoster):
- __name = "RapidfileshareNet"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?rapidfileshare\.net/\w{12}'
-
- __description = """Rapidfileshare.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "rapidfileshare.net"
-
- NAME_PATTERN = r'<input type="hidden" name="fname" value="(?P<N>.+?)">'
- SIZE_PATTERN = r'>http://www.rapidfileshare.net/\w+?</font> \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)</font>'
-
- OFFLINE_PATTERN = r'>No such file with this filename'
- TEMP_OFFLINE_PATTERN = r'The page may have been renamed, removed or be temporarily unavailable.<'
-
-
- def handlePremium(self):
- self.fail(_("Premium download not implemented"))
-
-
-getInfo = create_getInfo(RapidfileshareNet)
diff --git a/pyload/plugins/hoster/RapidgatorNet.py b/pyload/plugins/hoster/RapidgatorNet.py
deleted file mode 100644
index dd6c7cbb6..000000000
--- a/pyload/plugins/hoster/RapidgatorNet.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pycurl import HTTPHEADER
-
-from pyload.utils import json_loads
-from pyload.network.HTTPRequest import BadHeader
-from pyload.plugins.hoster.UnrestrictLi import secondsToMidnight
-from pyload.plugins.internal.captcha import AdsCaptcha, ReCaptcha, SolveMedia
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class RapidgatorNet(SimpleHoster):
- __name = "RapidgatorNet"
- __type = "hoster"
- __version = "0.26"
-
- __pattern = r'http://(?:www\.)?(rapidgator\.net|rg\.to)/file/\w+'
-
- __description = """Rapidgator.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("chrox", ""),
- ("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- API_URL = "http://rapidgator.net/api/file"
-
- COOKIES = [("rapidgator.net", "lang", "en")]
-
- NAME_PATTERN = r'<title>Download file (?P<N>.*)</title>'
- SIZE_PATTERN = r'File size:\s*<strong>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</strong>'
- OFFLINE_PATTERN = r'>(File not found|Error 404)'
-
- JSVARS_PATTERN = r'\s+var\s*(startTimerUrl|getDownloadUrl|captchaUrl|fid|secs)\s*=\s*\'?(.*?)\'?;'
- PREMIUM_ONLY_ERROR_PATTERN = r'You can download files up to|This file can be downloaded by premium only<'
- DOWNLOAD_LIMIT_ERROR_PATTERN = r'You have reached your (daily|hourly) downloads limit'
- WAIT_PATTERN = r'(?:Delay between downloads must be not less than|Try again in)\s*(\d+)\s*(hour|min)'
- LINK_PATTERN = r'return \'(http://\w+.rapidgator.net/.*)\';'
-
- RECAPTCHA_PATTERN = r'"http://api\.recaptcha\.net/challenge\?k=(.*?)"'
- ADSCAPTCHA_PATTERN = r'(http://api\.adscaptcha\.com/Get\.aspx[^"\']*)'
- SOLVEMEDIA_PATTERN = r'http://api\.solvemedia\.com/papi/challenge\.script\?k=(.*?)"'
-
-
- def setup(self):
- if self.account:
- self.sid = self.account.getAccountData(self.user).get('SID', None)
- else:
- self.sid = None
-
- if self.sid:
- self.premium = True
-
- self.resumeDownload = self.multiDL = self.premium
- self.chunkLimit = 1
-
-
- def api_response(self, cmd):
- try:
- json = self.load('%s/%s' % (self.API_URL, cmd),
- get={'sid': self.sid,
- 'url': self.pyfile.url}, decode=True)
- self.logDebug("API:%s" % cmd, json, "SID: %s" % self.sid)
- json = json_loads(json)
- status = json['response_status']
- msg = json['response_details']
-
- except BadHeader, e:
- self.logError("API: %s" % cmd, e, "SID: %s" % self.sid)
- status = e.code
- msg = e
-
- if status == 200:
- return json['response']
-
- elif status == 423:
- self.account.empty(self.user)
- self.retry()
-
- else:
- self.account.relogin(self.user)
- self.retry(wait_time=60)
-
-
- def handlePremium(self):
- #self.logDebug("ACCOUNT_DATA", self.account.getAccountData(self.user))
- self.api_data = self.api_response('info')
- self.api_data['md5'] = self.api_data['hash']
- self.pyfile.name = self.api_data['filename']
- self.pyfile.size = self.api_data['size']
- url = self.api_response('download')['url']
- self.download(url)
-
-
- def handleFree(self):
- self.checkFree()
-
- jsvars = dict(re.findall(self.JSVARS_PATTERN, self.html))
- self.logDebug(jsvars)
-
- self.req.http.lastURL = self.pyfile.url
- self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
-
- url = "http://rapidgator.net%s?fid=%s" % (
- jsvars.get('startTimerUrl', '/download/AjaxStartTimer'), jsvars['fid'])
- jsvars.update(self.getJsonResponse(url))
-
- self.wait(int(jsvars.get('secs', 45)), False)
-
- url = "http://rapidgator.net%s?sid=%s" % (
- jsvars.get('getDownloadUrl', '/download/AjaxGetDownload'), jsvars['sid'])
- jsvars.update(self.getJsonResponse(url))
-
- self.req.http.lastURL = self.pyfile.url
- self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With:"])
-
- url = "http://rapidgator.net%s" % jsvars.get('captchaUrl', '/download/captcha')
- self.html = self.load(url)
-
- for _i in xrange(5):
- m = re.search(self.LINK_PATTERN, self.html)
- if m:
- link = m.group(1)
- self.logDebug(link)
- self.download(link, disposition=True)
- break
- else:
- captcha, captcha_key = self.getCaptcha()
- challenge, response = captcha.challenge(captcha_key)
-
- self.html = self.load(url, post={'DownloadCaptchaForm[captcha]': "",
- 'adcopy_challenge' : challenge,
- 'adcopy_response' : response})
-
- if "The verification code is incorrect" in self.html:
- self.invalidCaptcha()
- else:
- self.correctCaptcha()
- else:
- self.error(_("Download link"))
-
-
- def getCaptcha(self):
- m = re.search(self.ADSCAPTCHA_PATTERN, self.html)
- if m:
- captcha_key = m.group(1)
- captcha = AdsCaptcha(self)
- else:
- m = re.search(self.RECAPTCHA_PATTERN, self.html)
- if m:
- captcha_key = m.group(1)
- captcha = ReCaptcha(self)
- else:
- m = re.search(self.SOLVEMEDIA_PATTERN, self.html)
- if m:
- captcha_key = m.group(1)
- captcha = SolveMedia(self)
- else:
- self.error(_("Captcha"))
-
- return captcha, captcha_key
-
-
- def checkFree(self):
- m = re.search(self.PREMIUM_ONLY_ERROR_PATTERN, self.html)
- if m:
- self.fail(_("Premium account needed for download"))
- else:
- m = re.search(self.WAIT_PATTERN, self.html)
-
- if m:
- wait_time = int(m.group(1)) * {"hour": 60, "min": 1}[m.group(2)]
- else:
- m = re.search(self.DOWNLOAD_LIMIT_ERROR_PATTERN, self.html)
- if m is None:
- return
- elif m.group(1) == "daily":
- self.logWarning(_("You have reached your daily downloads limit for today"))
- wait_time = secondsToMidnight(gmt=2)
- else:
- wait_time = 1 * 60 * 60
-
- self.logDebug("Waiting %d minutes" % wait_time / 60)
- self.wait(wait_time, True)
- self.retry()
-
-
- def getJsonResponse(self, url):
- res = self.load(url, decode=True)
- if not res.startswith('{'):
- self.retry()
- self.logDebug(url, res)
- return json_loads(res)
-
-
-getInfo = create_getInfo(RapidgatorNet)
diff --git a/pyload/plugins/hoster/RapiduNet.py b/pyload/plugins/hoster/RapiduNet.py
deleted file mode 100644
index cd763bde7..000000000
--- a/pyload/plugins/hoster/RapiduNet.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pycurl import HTTPHEADER
-from time import time, altzone
-
-from pyload.utils import json_loads
-from pyload.plugins.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class RapiduNet(SimpleHoster):
- __name = "RapiduNet"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'https?://(?:www\.)?rapidu\.net/(?P<ID>\d{10})'
-
- __description = """Rapidu.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("prOq", "")]
-
-
- COOKIES = [("rapidu.net", "rapidu_lang", "en")]
-
- FILE_INFO_PATTERN = r'<h1 title="(?P<N>.*)">.*</h1>\s*<small>(?P<S>\d+(\.\d+)?)\s(?P<U>\w+)</small>'
- OFFLINE_PATTERN = r'404 - File not found'
-
- ERROR_PATTERN = r'<div class="error">'
-
- RECAPTCHA_KEY = r'6Ld12ewSAAAAAHoE6WVP_pSfCdJcBQScVweQh8Io'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.limitDL = 0 if self.premium else 2
-
-
- def handleFree(self):
- self.req.http.lastURL = self.pyfile.url
- self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
-
- jsvars = self.getJsonResponse("https://rapidu.net/ajax.php?a=getLoadTimeToDownload", {'_go': None})
-
- if str(jsvars['timeToDownload']) is "stop":
- t = (24 * 60 * 60) - (int(time()) % (24 *60 * 60)) + altzone
-
- self.logInfo("You've reach your daily download transfer")
-
- self.retry(10, 10 if t < 1 else None, "Try tomorrow again") #@NOTE: check t in case of not synchronised clock
-
- else:
- self.wait(int(jsvars['timeToDownload']) - int(time()))
-
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(10):
- challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
-
- jsvars = self.getJsonResponse("https://rapidu.net/ajax.php?a=getCheckCaptcha",
- {'_go' : None,
- 'captcha1': challenge,
- 'captcha2': response,
- 'fileId' : self.info['ID']})
- if jsvars['message'] == 'success':
- self.download(jsvars['url'])
- break
-
-
- def getJsonResponse(self, url, post_data):
- res = self.load(url, post=post_data, decode=True)
- if not res.startswith('{'):
- self.retry()
-
- self.logDebug(url, res)
-
- return json_loads(res)
-
-
-getInfo = create_getInfo(RapiduNet)
diff --git a/pyload/plugins/hoster/RarefileNet.py b/pyload/plugins/hoster/RarefileNet.py
deleted file mode 100644
index 599327be9..000000000
--- a/pyload/plugins/hoster/RarefileNet.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class RarefileNet(XFSHoster):
- __name = "RarefileNet"
- __type = "hoster"
- __version = "0.08"
-
- __pattern = r'http://(?:www\.)?rarefile\.net/\w{12}'
-
- __description = """Rarefile.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "rarefile.net"
-
- NAME_PATTERN = r'<font color="red">(?P<N>.+?)<'
- SIZE_PATTERN = r'>Size : (?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- LINK_PATTERN = r'<a href="(?P<link>[^"]+)">(?P=link)</a>'
-
-
-getInfo = create_getInfo(RarefileNet)
diff --git a/pyload/plugins/hoster/RealdebridCom.py b/pyload/plugins/hoster/RealdebridCom.py
deleted file mode 100644
index 711bea60b..000000000
--- a/pyload/plugins/hoster/RealdebridCom.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import randrange
-from urllib import quote, unquote
-from time import time
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import parseFileSize
-
-
-class RealdebridCom(Hoster):
- __name = "RealdebridCom"
- __type = "hoster"
- __version = "0.53"
-
- __pattern = r'https?://(?:[^/]*\.)?real-debrid\..*'
-
- __description = """Real-Debrid.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Devirex Hazzard", "naibaf_11@yahoo.de")]
-
-
- def getFilename(self, url):
- try:
- name = unquote(url.rsplit("/", 1)[1])
- except IndexError:
- name = "Unknown_Filename..."
- if not name or name.endswith(".."): #: incomplete filename, append random stuff
- name += "%s.tmp" % randrange(100, 999)
- return name
-
-
- def setup(self):
- self.chunkLimit = 3
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "Real-debrid")
- self.fail(_("No Real-debrid account provided"))
- else:
- self.logDebug("Old URL: %s" % pyfile.url)
- password = self.getPassword().splitlines()
- if not password:
- password = ""
- else:
- password = password[0]
-
- data = json_loads(self.load("https://real-debrid.com/ajax/unrestrict.php",
- get={'lang' : "en",
- 'link' : quote(pyfile.url, ""),
- 'password': password,
- 'time' : int(time() * 1000)}))
-
- self.logDebug("Returned Data: %s" % data)
-
- if data['error'] != 0:
- if data['message'] == "Your file is unavailable on the hoster.":
- self.offline()
- else:
- self.logWarning(data['message'])
- self.tempOffline()
- else:
- if pyfile.name is not None and pyfile.name.endswith('.tmp') and data['file_name']:
- pyfile.name = data['file_name']
- pyfile.size = parseFileSize(data['file_size'])
- new_url = data['generated_links'][0][-1]
-
- if self.getConfig("https"):
- new_url = new_url.replace("http://", "https://")
- else:
- new_url = new_url.replace("https://", "http://")
-
- if new_url != pyfile.url:
- self.logDebug("New URL: %s" % new_url)
-
- if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown") or pyfile.name.endswith('..'):
- #only use when name wasnt already set
- pyfile.name = self.getFilename(new_url)
-
- self.download(new_url, disposition=True)
-
- check = self.checkDownload(
- {"error": "<title>An error occured while processing your request</title>"})
-
- if check == "error":
- #usual this download can safely be retried
- self.retry(wait_time=60, reason=_("An error occured while generating link"))
diff --git a/pyload/plugins/hoster/RedtubeCom.py b/pyload/plugins/hoster/RedtubeCom.py
deleted file mode 100644
index ef6db08ca..000000000
--- a/pyload/plugins/hoster/RedtubeCom.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import html_unescape
-
-
-class RedtubeCom(Hoster):
- __name = "RedtubeCom"
- __type = "hoster"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?redtube\.com/\d+'
-
- __description = """Redtube.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de")]
-
-
- def process(self, pyfile):
- self.download_html()
- if not self.file_exists():
- self.offline()
-
- pyfile.name = self.get_file_name()
- self.download(self.get_file_url())
-
-
- def download_html(self):
- url = self.pyfile.url
- self.html = self.load(url)
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- if not self.html:
- self.download_html()
-
- file_url = html_unescape(re.search(r'hashlink=(http.*?)"', self.html).group(1))
-
- return file_url
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
-
- return re.search('<title>(.*?)- RedTube - Free Porn Videos</title>', self.html).group(1).strip() + ".flv"
-
-
- def file_exists(self):
- """ returns True or False
- """
- if not self.html:
- self.download_html()
-
- if re.search(r'This video has been removed.', self.html) is not None:
- return False
- else:
- return True
diff --git a/pyload/plugins/hoster/RehostTo.py b/pyload/plugins/hoster/RehostTo.py
deleted file mode 100644
index 8fd63238c..000000000
--- a/pyload/plugins/hoster/RehostTo.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from urllib import quote, unquote
-
-from pyload.plugins.Hoster import Hoster
-
-
-class RehostTo(Hoster):
- __name = "RehostTo"
- __type = "hoster"
- __version = "0.13"
-
- __pattern = r'https?://.*rehost\.to\..*'
-
- __description = """Rehost.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- def getFilename(self, url):
- return unquote(url.rsplit("/", 1)[1])
-
-
- def setup(self):
- self.chunkLimit = 1
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "rehost.to")
- self.fail(_("No rehost.to account provided"))
-
- data = self.account.getAccountInfo(self.user)
- long_ses = data['long_ses']
-
- self.logDebug("Rehost.to: Old URL: %s" % pyfile.url)
-
- #raise timeout to 2min
- self.req.setOption("timeout", 120)
-
- self.download("http://rehost.to/process_download.php",
- get={'user': "cookie", 'pass': long_ses, 'dl': quote(pyfile.url, "")},
- disposition=True)
diff --git a/pyload/plugins/hoster/RemixshareCom.py b/pyload/plugins/hoster/RemixshareCom.py
deleted file mode 100644
index d6b623620..000000000
--- a/pyload/plugins/hoster/RemixshareCom.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://remixshare.com/download/p946u
-#
-# Note:
-# The remixshare.com website is very very slow, so
-# if your download not starts because of pycurl timeouts:
-# Adjust timeouts in /usr/share/pyload/pyload/network/HTTPRequest.py
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class RemixshareCom(SimpleHoster):
- __name = "RemixshareCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'https?://remixshare\.com/(download|dl)/\w+'
-
- __description = """Remixshare.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- INFO_PATTERN = r'title=\'.+?\'>(?P<N>.+?)</span><span class=\'light2\'>&nbsp;\((?P<S>\d+)&nbsp;(?P<U>[\w^_]+)\)<'
- OFFLINE_PATTERN = r'<h1>Ooops!<'
-
- LINK_PATTERN = r'(http://remixshare\.com/downloadfinal/.+?)"'
- TOKEN_PATTERN = r'var acc = (\d+)'
- WAIT_PATTERN = r'var XYZ = r"(\d+)"'
-
-
- def setup(self):
- self.multiDL = True
- self.chunkLimit = 1
-
-
- def handleFree(self):
- b = re.search(self.LINK_PATTERN, self.html)
- if not b:
- self.error(_("Cannot parse download url"))
- c = re.search(self.TOKEN_PATTERN, self.html)
- if not c:
- self.error(_("Cannot parse file token"))
- dl_url = b.group(1) + c.group(1)
-
- #Check if we have to wait
- seconds = re.search(self.WAIT_PATTERN, self.html)
- if seconds:
- self.logDebug("Wait " + seconds.group(1))
- self.wait(int(seconds.group(1)))
-
- # Finally start downloading...
- self.download(dl_url, disposition=True)
-
-
-getInfo = create_getInfo(RemixshareCom)
diff --git a/pyload/plugins/hoster/RgHostNet.py b/pyload/plugins/hoster/RgHostNet.py
deleted file mode 100644
index 0101802e0..000000000
--- a/pyload/plugins/hoster/RgHostNet.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class RgHostNet(SimpleHoster):
- __name = "RgHostNet"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?rghost\.net/\d+(?:r=\d+)?'
-
- __description = """RgHost.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("z00nx", "z00nx0@gmail.com")]
-
-
- INFO_PATTERN = r'<h1>\s+(<a[^>]+>)?(?P<N>[^<]+)(</a>)?\s+<small[^>]+>\s+\((?P<S>[^)]+)\)\s+</small>\s+</h1>'
- OFFLINE_PATTERN = r'File is deleted|this page is not found'
-
- LINK_FREE_PATTERN = r'<a\s+href="([^"]+)"\s+class="btn\s+large\s+download"[^>]+>Download</a>'
-
-
-getInfo = create_getInfo(RgHostNet)
diff --git a/pyload/plugins/hoster/RyushareCom.py b/pyload/plugins/hoster/RyushareCom.py
deleted file mode 100644
index f1258dd8a..000000000
--- a/pyload/plugins/hoster/RyushareCom.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://ryushare.com/cl0jy8ric2js/random.bin
-
-import re
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-from pyload.plugins.internal.captcha import SolveMedia
-
-
-class RyushareCom(XFSHoster):
- __name = "RyushareCom"
- __type = "hoster"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?ryushare\.com/\w+'
-
- __description = """Ryushare.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it"),
- ("quareevo", "quareevo@arcor.de")]
-
-
- HOSTER_DOMAIN = "ryushare.com"
-
- SIZE_PATTERN = r'You have requested <font color="red">[^<]+</font> \((?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- WAIT_PATTERN = r'You have to wait ((?P<hour>\d+) hour[s]?, )?((?P<min>\d+) minute[s], )?(?P<sec>\d+) second[s]'
- LINK_PATTERN = r'<a href="([^"]+)">Click here to download<'
-
-
- def getDownloadLink(self):
- retry = False
- self.html = self.load(self.pyfile.url)
- action, inputs = self.parseHtmlForm(input_names={"op": re.compile("^download")})
- if "method_premium" in inputs:
- del inputs['method_premium']
-
- self.html = self.load(self.pyfile.url, post=inputs)
- action, inputs = self.parseHtmlForm('F1')
-
- self.setWait(65)
- # Wait 1 hour
- if "You have reached the download-limit" in self.html:
- self.setWait(1 * 60 * 60, True)
- retry = True
-
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- wait = m.groupdict(0)
- waittime = int(wait['hour']) * 60 * 60 + int(wait['min']) * 60 + int(wait['sec'])
- self.setWait(waittime, True)
- retry = True
-
- self.wait()
- if retry:
- self.retry()
-
- for _i in xrange(5):
- solvemedia = SolveMedia(self)
- challenge, response = solvemedia.challenge()
-
- inputs['adcopy_challenge'] = challenge
- inputs['adcopy_response'] = response
-
- self.html = self.load(self.pyfile.url, post=inputs)
- if "WRONG CAPTCHA" in self.html:
- self.invalidCaptcha()
- else:
- self.correctCaptcha()
- break
- else:
- self.fail(_("You have entered 5 invalid captcha codes"))
-
- if "Click here to download" in self.html:
- return re.search(r'<a href="([^"]+)">Click here to download</a>', self.html).group(1)
-
-
-getInfo = create_getInfo(RyushareCom)
diff --git a/pyload/plugins/hoster/SafesharingEu.py b/pyload/plugins/hoster/SafesharingEu.py
deleted file mode 100644
index 86922a0f5..000000000
--- a/pyload/plugins/hoster/SafesharingEu.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class SafesharingEu(XFSHoster):
- __name = "SafesharingEu"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'https?://(?:www\.)?safesharing\.eu/\w{12}'
-
- __description = """Safesharing.eu hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
-
-
- HOSTER_DOMAIN = "safesharing.eu"
-
- WAIT_PATTERN = r'You have to wait (\d+) minutes'
-
- ERROR_PATTERN = r'(?:<div class="alert alert-danger">)(.+?)(?:</div>)'
-
-
-getInfo = create_getInfo(SafesharingEu)
diff --git a/pyload/plugins/hoster/SecureUploadEu.py b/pyload/plugins/hoster/SecureUploadEu.py
deleted file mode 100644
index fbd530679..000000000
--- a/pyload/plugins/hoster/SecureUploadEu.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class SecureUploadEu(XFSHoster):
- __name = "SecureUploadEu"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'https?://(?:www\.)?secureupload\.eu/\w{12}'
-
- __description = """SecureUpload.eu hoster plugin"""
- __license = "GPLv3"
- __authors = [("z00nx", "z00nx0@gmail.com")]
-
-
- HOSTER_DOMAIN = "secureupload.eu"
-
- INFO_PATTERN = r'<h3>Downloading (?P<N>[^<]+) \((?P<S>[^<]+)\)</h3>'
-
-
-getInfo = create_getInfo(SecureUploadEu)
diff --git a/pyload/plugins/hoster/SendmywayCom.py b/pyload/plugins/hoster/SendmywayCom.py
deleted file mode 100644
index bd4c2d5b5..000000000
--- a/pyload/plugins/hoster/SendmywayCom.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class SendmywayCom(XFSHoster):
- __name = "SendmywayCom"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'http://(?:www\.)?sendmyway\.com/\w{12}'
-
- __description = """SendMyWay hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "sendmyway.com"
-
- NAME_PATTERN = r'<p class="file-name" ><.*?>\s*(?P<N>.+)'
- SIZE_PATTERN = r'<small>\((?P<S>\d+) bytes\)</small>'
-
-
-getInfo = create_getInfo(SendmywayCom)
diff --git a/pyload/plugins/hoster/SendspaceCom.py b/pyload/plugins/hoster/SendspaceCom.py
deleted file mode 100644
index 199f6f7a7..000000000
--- a/pyload/plugins/hoster/SendspaceCom.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class SendspaceCom(SimpleHoster):
- __name = "SendspaceCom"
- __type = "hoster"
- __version = "0.14"
-
- __pattern = r'http://(?:www\.)?sendspace\.com/file/.*'
-
- __description = """Sendspace.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<h2 class="bgray">\s*<(?:b|strong)>(?P<N>[^<]+)</'
- SIZE_PATTERN = r'<div class="file_description reverse margin_center">\s*<b>File Size:</b>\s*(?P<S>[\d.,]+)(?P<U>[\w^_]+)\s*</div>'
- OFFLINE_PATTERN = r'<div class="msg error" style="cursor: default">Sorry, the file you requested is not available.</div>'
-
- LINK_PATTERN = r'<a id="download_button" href="([^"]+)"'
- CAPTCHA_PATTERN = r'<td><img src="(/captchas/captcha\.php?captcha=([^"]+))"></td>'
- USER_CAPTCHA_PATTERN = r'<td><img src="/captchas/captcha\.php?user=([^"]+))"></td>'
-
-
- def handleFree(self):
- params = {}
- for _i in xrange(3):
- m = re.search(self.LINK_PATTERN, self.html)
- if m:
- if 'captcha_hash' in params:
- self.correctCaptcha()
- download_url = m.group(1)
- break
-
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m:
- if 'captcha_hash' in params:
- self.invalidCaptcha()
- captcha_url1 = "http://www.sendspace.com/" + m.group(1)
- m = re.search(self.USER_CAPTCHA_PATTERN, self.html)
- captcha_url2 = "http://www.sendspace.com/" + m.group(1)
- params = {'captcha_hash': m.group(2),
- 'captcha_submit': 'Verify',
- 'captcha_answer': self.decryptCaptcha(captcha_url1) + " " + self.decryptCaptcha(captcha_url2)}
- else:
- params = {'download': "Regular Download"}
-
- self.logDebug(params)
- self.html = self.load(self.pyfile.url, post=params)
- else:
- self.fail(_("Download link not found"))
-
- self.download(download_url)
-
-
-getInfo = create_getInfo(SendspaceCom)
diff --git a/pyload/plugins/hoster/Share4webCom.py b/pyload/plugins/hoster/Share4webCom.py
deleted file mode 100644
index 3389001e1..000000000
--- a/pyload/plugins/hoster/Share4webCom.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.hoster.UnibytesCom import UnibytesCom
-from pyload.plugins.internal.SimpleHoster import create_getInfo
-
-
-class Share4webCom(UnibytesCom):
- __name = "Share4webCom"
- __type = "hoster"
- __version = "0.11"
-
- __pattern = r'https?://(?:www\.)?share4web\.com/get/\w+'
-
- __description = """Share4web.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "share4web.com"
-
-
-getInfo = create_getInfo(UnibytesCom)
diff --git a/pyload/plugins/hoster/Share76Com.py b/pyload/plugins/hoster/Share76Com.py
deleted file mode 100644
index 7801aa7d6..000000000
--- a/pyload/plugins/hoster/Share76Com.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class Share76Com(DeadHoster):
- __name = "Share76Com"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'http://(?:www\.)?share76\.com/\w{12}'
-
- __description = """Share76.com hoster plugin"""
- __license = "GPLv3"
- __authors = []
-
-
-getInfo = create_getInfo(Share76Com)
diff --git a/pyload/plugins/hoster/ShareFilesCo.py b/pyload/plugins/hoster/ShareFilesCo.py
deleted file mode 100644
index 764c0f235..000000000
--- a/pyload/plugins/hoster/ShareFilesCo.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class ShareFilesCo(DeadHoster):
- __name = "ShareFilesCo"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?sharefiles\.co/\w{12}'
-
- __description = """Sharefiles.co hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(ShareFilesCo)
diff --git a/pyload/plugins/hoster/SharebeesCom.py b/pyload/plugins/hoster/SharebeesCom.py
deleted file mode 100644
index 9d1e2d516..000000000
--- a/pyload/plugins/hoster/SharebeesCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class SharebeesCom(DeadHoster):
- __name = "SharebeesCom"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?sharebees\.com/\w{12}'
-
- __description = """ShareBees hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(SharebeesCom)
diff --git a/pyload/plugins/hoster/ShareonlineBiz.py b/pyload/plugins/hoster/ShareonlineBiz.py
deleted file mode 100644
index 685d31a2b..000000000
--- a/pyload/plugins/hoster/ShareonlineBiz.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import time
-from urllib import unquote
-from urlparse import urlparse
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class ShareonlineBiz(SimpleHoster):
- __name = "ShareonlineBiz"
- __type = "hoster"
- __version = "0.44"
-
- __pattern = r'https?://(?:www\.)?(share-online\.biz|egoshare\.com)/(download\.php\?id=|dl/)(?P<ID>\w+)'
-
- __description = """Shareonline.biz hoster plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org"),
- ("mkaay", "mkaay@mkaay.de"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- URL_REPLACEMENTS = [(__pattern + ".*", "http://www.share-online.biz/dl/\g<ID>")]
-
- RECAPTCHA_KEY = "6LdatrsSAAAAAHZrB70txiV5p-8Iv8BtVxlTtjKX"
-
- ERROR_INFO_PATTERN = r'<p class="b">Information:</p>\s*<div>\s*<strong>(.*?)</strong>'
-
-
- @classmethod
- def getInfo(cls, url="", html=""):
- info = {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3 if url else 1, 'url': url}
-
- if url:
- info['pattern'] = re.match(cls.__pattern, url).groupdict()
-
- field = getURL("http://api.share-online.biz/linkcheck.php",
- get={'md5': "1"},
- post={'links': info['pattern']['ID']},
- decode=True).split(";")
-
- if field[1] == "OK":
- info['fileid'] = field[0]
- info['status'] = 2
- info['name'] = field[2]
- info['size'] = field[3] #: in bytes
- info['md5'] = field[4].strip().lower().replace("\n\n", "") #: md5
-
- elif field[1] in ("DELETED", "NOT FOUND"):
- info['status'] = 1
-
- return info
-
-
- def setup(self):
- self.resumeDownload = self.premium
- self.multiDL = False
-
-
- def handleCaptcha(self):
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
-
- m = re.search(r'var wait=(\d+);', self.html)
- self.setWait(int(m.group(1)) if m else 30)
-
- res = self.load("%s/free/captcha/%d" % (self.pyfile.url, int(time() * 1000)),
- post={'dl_free' : "1",
- 'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field' : response})
- if not res == '0':
- self.correctCaptcha()
- return res
- else:
- self.invalidCaptcha()
- else:
- self.invalidCaptcha()
- self.fail(_("No valid captcha solution received"))
-
-
- def handleFree(self):
- self.html = self.load(self.pyfile.url, cookies=True) #: refer, stuff
-
- self.wait(3)
-
- self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free": "1", "choice": "free"}, decode=True)
-
- self.checkErrors()
-
- res = self.handleCaptcha()
-
- download_url = res.decode("base64")
-
- if not download_url.startswith("http://"):
- self.error(_("Wrong download url"))
-
- self.wait()
-
- self.download(download_url)
-
-
- def checkFile(self):
- # check download
- check = self.checkDownload({
- 'empty' : re.compile(r"^$"),
- 'cookie': re.compile(r'<div id="dl_failure"'),
- 'fail' : re.compile(r"<title>Share-Online")
- })
-
- if check == "empty":
- self.fail(_("Empty file"))
-
- elif check == "cookie":
- self.invalidCaptcha()
- self.retry(5, 60, _("Cookie failure"))
-
- elif check == "fail":
- self.invalidCaptcha()
- self.retry(5, 5 * 60, _("Download failed"))
-
-
- def handlePremium(self): #: should be working better loading (account) api internally
- self.account.getAccountInfo(self.user, True)
-
- html = self.load("http://api.share-online.biz/account.php",
- {"username": self.user, "password": self.account.accounts[self.user]['password'],
- "act": "download", "lid": self.info['fileid']})
-
- self.api_data = dlinfo = {}
-
- for line in html.splitlines():
- key, value = line.split(": ")
- dlinfo[key.lower()] = value
-
- self.logDebug(dlinfo)
-
- if not dlinfo['status'] == "online":
- self.offline()
- else:
- self.pyfile.name = dlinfo['name']
- self.pyfile.size = int(dlinfo['size'])
-
- dlLink = dlinfo['url']
-
- if dlLink == "server_under_maintenance":
- self.tempOffline()
- else:
- self.multiDL = True
- self.download(dlLink)
-
-
- def checkErrors(self):
- m = re.search(r"/failure/(.*?)/1", self.req.lastEffectiveURL)
- if m is None:
- self.info.pop('error', None)
- return
-
- errmsg = m.group(1).lower()
-
- try:
- self.logError(errmsg, re.search(self.ERROR_INFO_PATTERN, self.html).group(1))
- except Exception:
- self.logError("Unknown error occurred", errmsg)
-
- if errmsg is "invalid":
- self.fail(_("File not available"))
-
- elif errmsg in ("freelimit", "size", "proxy"):
- self.fail(_("Premium account needed"))
-
- elif errmsg in ("expired", "server"):
- self.retry(wait_time=600, reason=errmsg)
-
- elif 'slot' in errmsg:
- self.wantReconnect = True
- self.retry(24, 3600, errmsg)
-
- else:
- self.wantReconnect = True
- self.retry(wait_time=60, reason=errmsg)
-
-
-getInfo = create_getInfo(ShareonlineBiz)
diff --git a/pyload/plugins/hoster/ShareplaceCom.py b/pyload/plugins/hoster/ShareplaceCom.py
deleted file mode 100644
index 4c4dbdc3b..000000000
--- a/pyload/plugins/hoster/ShareplaceCom.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-
-from pyload.plugins.Hoster import Hoster
-
-
-class ShareplaceCom(Hoster):
- __name = "ShareplaceCom"
- __type = "hoster"
- __version = "0.11"
-
- __pattern = r'(http://)?(?:www\.)?shareplace\.(com|org)/\?\w+'
-
- __description = """Shareplace.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("ACCakut", "")]
-
-
- def process(self, pyfile):
- self.pyfile = pyfile
- self.prepare()
- self.download(self.get_file_url())
-
-
- def prepare(self):
- if not self.file_exists():
- self.offline()
-
- self.pyfile.name = self.get_file_name()
-
- wait_time = self.get_waiting_time()
- self.setWait(wait_time)
- self.wait()
-
-
- def get_waiting_time(self):
- if not self.html:
- self.download_html()
-
- #var zzipitime = 15;
- m = re.search(r'var zzipitime = (\d+);', self.html)
- if m:
- sec = int(m.group(1))
- else:
- sec = 0
-
- return sec
-
-
- def download_html(self):
- url = re.sub("shareplace.com\/\?", "shareplace.com//index1.php/?a=", self.pyfile.url)
- self.html = self.load(url, decode=True)
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- url = re.search(r"var beer = '(.*?)';", self.html)
- if url:
- url = url.group(1)
- url = unquote(
- url.replace("http://http:/", "").replace("vvvvvvvvv", "").replace("lllllllll", "").replace(
- "teletubbies", ""))
- self.logDebug("URL: %s" % url)
- return url
- else:
- self.error(_("Absolute filepath not found"))
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
-
- return re.search("<title>\s*(.*?)\s*</title>", self.html).group(1)
-
-
- def file_exists(self):
- """ returns True or False
- """
- if not self.html:
- self.download_html()
-
- if re.search(r"HTTP Status 404", self.html) is not None:
- return False
- else:
- return True
diff --git a/pyload/plugins/hoster/SharingmatrixCom.py b/pyload/plugins/hoster/SharingmatrixCom.py
deleted file mode 100644
index 9642542c8..000000000
--- a/pyload/plugins/hoster/SharingmatrixCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class SharingmatrixCom(DeadHoster):
- __name = "SharingmatrixCom"
- __type = "hoster"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?sharingmatrix\.com/file/\w+'
-
- __description = """Sharingmatrix.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("paulking", "")]
-
-
-getInfo = create_getInfo(SharingmatrixCom)
diff --git a/pyload/plugins/hoster/ShragleCom.py b/pyload/plugins/hoster/ShragleCom.py
deleted file mode 100644
index 19c0a596d..000000000
--- a/pyload/plugins/hoster/ShragleCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class ShragleCom(DeadHoster):
- __name = "ShragleCom"
- __type = "hoster"
- __version = "0.22"
-
- __pattern = r'http://(?:www\.)?(cloudnator|shragle)\.com/files/(?P<ID>.*?)/'
-
- __description = """Cloudnator.com (Shragle.com) hoster plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(ShragleCom)
diff --git a/pyload/plugins/hoster/SimplyPremiumCom.py b/pyload/plugins/hoster/SimplyPremiumCom.py
deleted file mode 100644
index e3871dfb3..000000000
--- a/pyload/plugins/hoster/SimplyPremiumCom.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from datetime import datetime, timedelta
-
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.hoster.UnrestrictLi import secondsToMidnight
-
-
-class SimplyPremiumCom(Hoster):
- __name = "SimplyPremiumCom"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'https?://.*(simply-premium)\.com'
-
- __description = """Simply-Premium.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("EvolutionClip", "evolutionclip@live.de")]
-
-
- def setup(self):
- self.chunkLimit = 16
- self.resumeDownload = False
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "Simply-Premium.com")
- self.fail(_("No Simply-Premium.com account provided"))
- else:
- self.logDebug("Old URL: %s" % pyfile.url)
- for i in xrange(5):
- page = self.load("http://www.simply-premium.com/premium.php", get={'info': "", 'link': pyfile.url})
- self.logDebug("JSON data: " + page)
- if page != '':
- break
- else:
- self.logInfo(_("Unable to get API data, waiting 1 minute and retry"))
- self.retry(5, 60, "Unable to get API data")
-
- if '<valid>0</valid>' in page or (
- "You are not allowed to download from this host" in page and self.premium):
- self.account.relogin(self.user)
- self.retry()
- elif "NOTFOUND" in page:
- self.offline()
- elif "downloadlimit" in page:
- self.logWarning(_("Reached maximum connctions"))
- self.retry(5, 60, "Reached maximum connctions")
- elif "trafficlimit" in page:
- self.logWarning(_("Reached daily limit for this host"))
- self.retry(wait_time=secondsToMidnight(gmt=2), "Daily limit for this host reached")
- elif "hostererror" in page:
- self.logWarning(_("Hoster temporarily unavailable, waiting 1 minute and retry"))
- self.retry(5, 60, "Hoster is temporarily unavailable")
- #page = json_loads(page)
- #new_url = page.keys()[0]
- #self.api_data = page[new_url]
-
- try:
- self.pyfile.name = re.search(r'<name>([^<]+)</name>', page).group(1)
- except AttributeError:
- self.pyfile.name = ""
-
- try:
- self.pyfile.size = re.search(r'<size>(\d+)</size>', page).group(1)
- except AttributeError:
- self.pyfile.size = 0
-
- try:
- new_url = re.search(r'<download>([^<]+)</download>', page).group(1)
- except AttributeError:
- new_url = 'http://www.simply-premium.com/premium.php?link=' + pyfile.url
-
- if new_url != pyfile.url:
- self.logDebug("New URL: " + new_url)
-
- self.download(new_url, disposition=True)
diff --git a/pyload/plugins/hoster/SimplydebridCom.py b/pyload/plugins/hoster/SimplydebridCom.py
deleted file mode 100644
index 4022a7764..000000000
--- a/pyload/plugins/hoster/SimplydebridCom.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-
-
-class SimplydebridCom(Hoster):
- __name = "SimplydebridCom"
- __type = "hoster"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/sd\.php/*'
-
- __description = """Simply-debrid.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Kagenoshin", "kagenoshin@gmx.ch")]
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.chunkLimit = 1
-
-
- def process(self, pyfile):
- if not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "simply-debrid.com")
- self.fail(_("No simply-debrid.com account provided"))
-
- self.logDebug("Old URL: %s" % pyfile.url)
-
- #fix the links for simply-debrid.com!
- new_url = pyfile.url
- new_url = new_url.replace("clz.to", "cloudzer.net/file")
- new_url = new_url.replace("http://share-online", "http://www.share-online")
- new_url = new_url.replace("ul.to", "uploaded.net/file")
- new_url = new_url.replace("uploaded.com", "uploaded.net")
- new_url = new_url.replace("filerio.com", "filerio.in")
- new_url = new_url.replace("lumfile.com", "lumfile.se")
- if('fileparadox' in new_url):
- new_url = new_url.replace("http://", "https://")
-
- if re.match(self.__pattern, new_url):
- new_url = new_url
-
- self.logDebug("New URL: %s" % new_url)
-
- if not re.match(self.__pattern, new_url):
- page = self.load("http://simply-debrid.com/api.php", get={'dl': new_url}) # +'&u='+self.user+'&p='+self.account.getAccountData(self.user)['password'])
- if 'tiger Link' in page or 'Invalid Link' in page or ('API' in page and 'ERROR' in page):
- self.fail(_("Unable to unrestrict link"))
- new_url = page
-
- self.setWait(5)
- self.wait()
- self.logDebug("Unrestricted URL: " + new_url)
-
- self.download(new_url, disposition=True)
-
- check = self.checkDownload({"bad1": "No address associated with hostname", "bad2": "<html"})
-
- if check == "bad1" or check == "bad2":
- self.retry(24, 3 * 60, "Bad file downloaded")
diff --git a/pyload/plugins/hoster/SockshareCom.py b/pyload/plugins/hoster/SockshareCom.py
deleted file mode 100644
index 33b3f2e36..000000000
--- a/pyload/plugins/hoster/SockshareCom.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class SockshareCom(DeadHoster):
- __name = "SockshareCom"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?sockshare\.com/(mobile/)?(file|embed)/(?P<ID>\w+)'
-
- __description = """Sockshare.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
-getInfo = create_getInfo(SockshareCom)
diff --git a/pyload/plugins/hoster/SoundcloudCom.py b/pyload/plugins/hoster/SoundcloudCom.py
deleted file mode 100644
index 0934a2d76..000000000
--- a/pyload/plugins/hoster/SoundcloudCom.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import pycurl
-import re
-
-from pyload.plugins.Hoster import Hoster
-
-
-class SoundcloudCom(Hoster):
- __name = "SoundcloudCom"
- __type = "hoster"
- __version = "0.10"
-
- __pattern = r'https?://(?:www\.)?soundcloud\.com/(?P<UID>.*?)/(?P<SID>.*)'
-
- __description = """SoundCloud.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Peekayy", "peekayy.dev@gmail.com")]
-
-
- def process(self, pyfile):
- # default UserAgent of HTTPRequest fails for this hoster so we use this one
- self.req.http.c.setopt(pycurl.USERAGENT, 'Mozilla/5.0')
- page = self.load(pyfile.url)
- m = re.search(r'<div class="haudio.*?large.*?" data-sc-track="(?P<ID>\d*)"', page)
- songId = clientId = ""
- if m:
- songId = m.group("ID")
- if len(songId) <= 0:
- self.logError(_("Could not find song id"))
- self.offline()
- else:
- m = re.search(r'"clientID":"(?P<CID>.*?)"', page)
- if m:
- clientId = m.group("CID")
-
- if len(clientId) <= 0:
- clientId = "b45b1aa10f1ac2941910a7f0d10f8e28"
-
- m = re.search(r'<em itemprop="name">\s(?P<TITLE>.*?)\s</em>', page)
- if m:
- pyfile.name = m.group("TITLE") + ".mp3"
- else:
- pyfile.name = re.match(self.__pattern, pyfile.url).group("SID") + ".mp3"
-
- # url to retrieve the actual song url
- page = self.load("https://api.sndcdn.com/i1/tracks/%s/streams" % songId, get={"client_id": clientId})
- # getting streams
- # for now we choose the first stream found in all cases
- # it could be improved if relevant for this hoster
- streams = [
- (result.group("QUALITY"), result.group("URL"))
- for result in re.finditer(r'"(?P<QUALITY>.*?)":"(?P<URL>.*?)"', page)
- ]
- self.logDebug("Found Streams", streams)
- self.logDebug("Downloading", streams[0][0], streams[0][1])
- self.download(streams[0][1])
diff --git a/pyload/plugins/hoster/SpeedLoadOrg.py b/pyload/plugins/hoster/SpeedLoadOrg.py
deleted file mode 100644
index 068348ab8..000000000
--- a/pyload/plugins/hoster/SpeedLoadOrg.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class SpeedLoadOrg(DeadHoster):
- __name = "SpeedLoadOrg"
- __type = "hoster"
- __version = "1.02"
-
- __pattern = r'http://(?:www\.)?speedload\.org/(?P<ID>\w+)'
-
- __description = """Speedload.org hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
-getInfo = create_getInfo(SpeedLoadOrg)
diff --git a/pyload/plugins/hoster/SpeedfileCz.py b/pyload/plugins/hoster/SpeedfileCz.py
deleted file mode 100644
index fe57ce96d..000000000
--- a/pyload/plugins/hoster/SpeedfileCz.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class SpeedfileCz(DeadHoster):
- __name = "SpeedFileCz"
- __type = "hoster"
- __version = "0.32"
-
- __pattern = r'http://(?:www\.)?speedfile\.cz/.*'
-
- __description = """Speedfile.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(SpeedfileCz)
diff --git a/pyload/plugins/hoster/SpeedyshareCom.py b/pyload/plugins/hoster/SpeedyshareCom.py
deleted file mode 100644
index 0c1d2848a..000000000
--- a/pyload/plugins/hoster/SpeedyshareCom.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://speedy.sh/ep2qY/Zapp-Brannigan.jpg
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class SpeedyshareCom(SimpleHoster):
- __name = "SpeedyshareCom"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'https?://(?:www\.)?(speedyshare\.com|speedy\.sh)/\w+'
-
- __description = """Speedyshare.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de")]
-
-
- NAME_PATTERN = r'class=downloadfilename>(?P<N>.*)</span></td>'
- SIZE_PATTERN = r'class=sizetagtext>(?P<S>.*) (?P<U>[kKmM]?[iI]?[bB]?)</div>'
-
- OFFLINE_PATTERN = r'class=downloadfilenamenotfound>.*</span>'
-
- LINK_PATTERN = r'<a href=\'(.*)\'><img src=/gf/slowdownload\.png alt=\'Slow Download\' border=0'
-
-
- def setup(self):
- self.multiDL = False
- self.chunkLimit = 1
-
-
- def handleFree(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Download link not found"))
-
- dl_link = urljoin("http://www.speedyshare.com", m.group(1))
- self.download(dl_link, disposition=True)
-
- check = self.checkDownload({'html': re.compile("html")})
- if check == "html":
- self.error(_("Downloaded file is an html page"))
-
-
-getInfo = create_getInfo(SpeedyshareCom)
diff --git a/pyload/plugins/hoster/StorageTo.py b/pyload/plugins/hoster/StorageTo.py
deleted file mode 100644
index adf467055..000000000
--- a/pyload/plugins/hoster/StorageTo.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class StorageTo(DeadHoster):
- __name = "StorageTo"
- __type = "hoster"
- __version = "0.01"
-
- __pattern = r'http://(?:www\.)?storage\.to/get/.+'
-
- __description = """Storage.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("mkaay", "mkaay@mkaay.de")]
-
-
-getInfo = create_getInfo(StorageTo)
diff --git a/pyload/plugins/hoster/StreamCz.py b/pyload/plugins/hoster/StreamCz.py
deleted file mode 100644
index ca42c8669..000000000
--- a/pyload/plugins/hoster/StreamCz.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Hoster import Hoster
-
-
-def getInfo(urls):
- result = []
-
- for url in urls:
-
- html = getURL(url)
- if re.search(StreamCz.OFFLINE_PATTERN, html):
- # File offline
- result.append((url, 0, 1, url))
- else:
- result.append((url, 0, 2, url))
- yield result
-
-
-class StreamCz(Hoster):
- __name = "StreamCz"
- __type = "hoster"
- __version = "0.20"
-
- __pattern = r'https?://(?:www\.)?stream\.cz/[^/]+/\d+.*'
-
- __description = """Stream.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<link rel="video_src" href="http://www\.stream\.cz/\w+/(\d+)-([^"]+)" />'
- OFFLINE_PATTERN = r'<h1 class="commonTitle">Str.nku nebylo mo.n. nal.zt \(404\)</h1>'
-
- CDN_PATTERN = r'<param name="flashvars" value="[^"]*&id=(?P<ID>\d+)(?:&cdnLQ=(?P<cdnLQ>\d*))?(?:&cdnHQ=(?P<cdnHQ>\d*))?(?:&cdnHD=(?P<cdnHD>\d*))?&'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def process(self, pyfile):
- self.html = self.load(pyfile.url, decode=True)
-
- if re.search(self.OFFLINE_PATTERN, self.html):
- self.offline()
-
- m = re.search(self.CDN_PATTERN, self.html)
- if m is None:
- self.error(_("CDN_PATTERN not found"))
- cdn = m.groupdict()
- self.logDebug(cdn)
- for cdnkey in ("cdnHD", "cdnHQ", "cdnLQ"):
- if cdnkey in cdn and cdn[cdnkey] > '':
- cdnid = cdn[cdnkey]
- break
- else:
- self.fail(_("Stream URL not found"))
-
- m = re.search(self.NAME_PATTERN, self.html)
- if m is None:
- self.error(_("NAME_PATTERN not found"))
- pyfile.name = "%s-%s.%s.mp4" % (m.group(2), m.group(1), cdnkey[-2:])
-
- download_url = "http://cdn-dispatcher.stream.cz/?id=" + cdnid
- self.logInfo(_("STREAM: %s") % cdnkey[-2:], download_url)
- self.download(download_url)
diff --git a/pyload/plugins/hoster/StreamcloudEu.py b/pyload/plugins/hoster/StreamcloudEu.py
deleted file mode 100644
index 0a6262387..000000000
--- a/pyload/plugins/hoster/StreamcloudEu.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class StreamcloudEu(XFSHoster):
- __name = "StreamcloudEu"
- __type = "hoster"
- __version = "0.09"
-
- __pattern = r'http://(?:www\.)?streamcloud\.eu/\w{12}'
-
- __description = """Streamcloud.eu hoster plugin"""
- __license = "GPLv3"
- __authors = [("seoester", "seoester@googlemail.com")]
-
-
- HOSTER_DOMAIN = "streamcloud.eu"
-
- LINK_PATTERN = r'file: "(http://(stor|cdn)\d+\.streamcloud\.eu:?\d*/.*/video\.(mp4|flv))",'
-
-
- def setup(self):
- self.multiDL = True
- self.chunkLimit = 1
- self.resumeDownload = self.premium
-
-
-getInfo = create_getInfo(StreamcloudEu)
diff --git a/pyload/plugins/hoster/TurbobitNet.py b/pyload/plugins/hoster/TurbobitNet.py
deleted file mode 100644
index 4145316ec..000000000
--- a/pyload/plugins/hoster/TurbobitNet.py
+++ /dev/null
@@ -1,173 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import random
-import re
-import time
-
-from Crypto.Cipher import ARC4
-from binascii import hexlify, unhexlify
-from pycurl import HTTPHEADER
-from urllib import quote
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, timestamp
-
-
-class TurbobitNet(SimpleHoster):
- __name = "TurbobitNet"
- __type = "hoster"
- __version = "0.16"
-
- __pattern = r'http://(?:www\.)?turbobit\.net/(?:download/free/)?(?P<ID>\w+)'
-
- __description = """Turbobit.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("prOq", "")]
-
-
- URL_REPLACEMENTS = [(__pattern + ".*", "http://turbobit.net/\g<ID>.html")]
-
- COOKIES = [("turbobit.net", "user_lang", "en")]
-
- NAME_PATTERN = r'id="file-title">(?P<N>.+?)<'
- SIZE_PATTERN = r'class="file-size">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
- OFFLINE_PATTERN = r'<h2>File Not Found</h2>|html\(\'File (?:was )?not found'
-
- LINK_PATTERN = r'(?P<url>/download/redirect/[^"\']+)'
- LIMIT_WAIT_PATTERN = r'<div id=\'timeout\'>(\d+)<'
-
- CAPTCHA_PATTERN = r'<img alt="Captcha" src="(.+?)"'
-
-
- def handleFree(self):
- self.url = "http://turbobit.net/download/free/%s" % self.info['pattern']['ID']
- self.html = self.load(self.url, ref=True, decode=True)
-
- rtUpdate = self.getRtUpdate()
-
- self.solveCaptcha()
- self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
- self.url = self.getDownloadUrl(rtUpdate)
-
- self.wait()
- self.html = self.load(self.url)
- self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With:"])
- self.downloadFile()
-
-
- def solveCaptcha(self):
- for _i in xrange(5):
- m = re.search(self.LIMIT_WAIT_PATTERN, self.html)
- if m:
- wait_time = int(m.group(1))
- self.wait(wait_time, wait_time > 60)
- self.retry()
-
- action, inputs = self.parseHtmlForm("action='#'")
- if not inputs:
- self.error(_("Captcha form not found"))
- self.logDebug(inputs)
-
- if inputs['captcha_type'] == 'recaptcha':
- recaptcha = ReCaptcha(self)
- inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge()
- else:
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m is None:
- self.error(_("captcha"))
- captcha_url = m.group(1)
- inputs['captcha_response'] = self.decryptCaptcha(captcha_url)
-
- self.logDebug(inputs)
- self.html = self.load(self.url, post=inputs)
-
- if '<div class="captcha-error">Incorrect, try again!<' in self.html:
- self.invalidCaptcha()
- else:
- self.correctCaptcha()
- break
- else:
- self.fail(_("Invalid captcha"))
-
-
- def getRtUpdate(self):
- rtUpdate = self.getStorage("rtUpdate")
- if not rtUpdate:
- if self.getStorage("version") != self.__version \
- or int(self.getStorage("timestamp", 0)) + 86400000 < timestamp():
- # that's right, we are even using jdownloader updates
- rtUpdate = getURL("http://update0.jdownloader.org/pluginstuff/tbupdate.js")
- rtUpdate = self.decrypt(rtUpdate.splitlines()[1])
- # but we still need to fix the syntax to work with other engines than rhino
- rtUpdate = re.sub(r'for each\(var (\w+) in(\[[^\]]+\])\)\{',
- r'zza=\2;for(var zzi=0;zzi<zza.length;zzi++){\1=zza[zzi];', rtUpdate)
- rtUpdate = re.sub(r"for\((\w+)=", r"for(var \1=", rtUpdate)
-
- self.setStorage("rtUpdate", rtUpdate)
- self.setStorage("timestamp", timestamp())
- self.setStorage("version", self.__version)
- else:
- self.logError(_("Unable to download, wait for update..."))
- self.tempOffline()
-
- return rtUpdate
-
-
- def getDownloadUrl(self, rtUpdate):
- self.req.http.lastURL = self.url
-
- m = re.search("(/\w+/timeout\.js\?\w+=)([^\"\'<>]+)", self.html)
- if m:
- url = "http://turbobit.net%s%s" % m.groups()
- else:
- url = "http://turbobit.net/files/timeout.js?ver=%s" % "".join(random.choice('0123456789ABCDEF') for _i in xrange(32))
-
- fun = self.load(url)
-
- self.setWait(65, False)
-
- for b in [1, 3]:
- self.jscode = "var id = \'%s\';var b = %d;var inn = \'%s\';%sout" % (
- self.info['pattern']['ID'], b, quote(fun), rtUpdate)
-
- try:
- out = self.js.eval(self.jscode)
- self.logDebug("URL", self.js.engine, out)
- if out.startswith('/download/'):
- return "http://turbobit.net%s" % out.strip()
- except Exception, e:
- self.logError(e)
- else:
- if self.retries >= 2:
- # retry with updated js
- self.delStorage("rtUpdate")
- self.retry()
-
-
- def decrypt(self, data):
- cipher = ARC4.new(hexlify('E\x15\xa1\x9e\xa3M\xa0\xc6\xa0\x84\xb6H\x83\xa8o\xa0'))
- return unhexlify(cipher.encrypt(unhexlify(data)))
-
-
- def getLocalTimeString(self):
- lt = time.localtime()
- tz = time.altzone if lt.tm_isdst else time.timezone
- return "%s GMT%+03d%02d" % (time.strftime("%a %b %d %Y %H:%M:%S", lt), -tz // 3600, tz % 3600)
-
-
- def handlePremium(self):
- self.logDebug("Premium download as user %s" % self.user)
- self.downloadFile()
-
-
- def downloadFile(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Download link not found"))
- self.url = "http://turbobit.net" + m.group('url')
- self.download(self.url)
-
-
-getInfo = create_getInfo(TurbobitNet)
diff --git a/pyload/plugins/hoster/TurbouploadCom.py b/pyload/plugins/hoster/TurbouploadCom.py
deleted file mode 100644
index 23f66da8c..000000000
--- a/pyload/plugins/hoster/TurbouploadCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class TurbouploadCom(DeadHoster):
- __name = "TurbouploadCom"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'http://(?:www\.)?turboupload\.com/(\w+).*'
-
- __description = """Turboupload.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(TurbouploadCom)
diff --git a/pyload/plugins/hoster/TusfilesNet.py b/pyload/plugins/hoster/TusfilesNet.py
deleted file mode 100644
index f63d3ed8e..000000000
--- a/pyload/plugins/hoster/TusfilesNet.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class TusfilesNet(XFSHoster):
- __name = "TusfilesNet"
- __type = "hoster"
- __version = "0.07"
-
- __pattern = r'https?://(?:www\.)?tusfiles\.net/\w{12}'
-
- __description = """Tusfiles.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com"),
- ("guidobelix", "guidobelix@hotmail.it")]
-
-
- HOSTER_DOMAIN = "tusfiles.net"
-
- INFO_PATTERN = r'\](?P<N>.+) - (?P<S>[\d.,]+) (?P<U>[\w^_]+)\['
- OFFLINE_PATTERN = r'>File Not Found|<Title>TusFiles - Fast Sharing Files!'
-
-
- def setup(self):
- self.multiDL = False
- self.chunkLimit = -1
- self.resumeDownload = True
-
-
- def handlePremium(self):
- return self.handleFree()
-
-
-getInfo = create_getInfo(TusfilesNet)
diff --git a/pyload/plugins/hoster/TwoSharedCom.py b/pyload/plugins/hoster/TwoSharedCom.py
deleted file mode 100644
index d73df4d0d..000000000
--- a/pyload/plugins/hoster/TwoSharedCom.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class TwoSharedCom(SimpleHoster):
- __name = "TwoSharedCom"
- __type = "hoster"
- __version = "0.12"
-
- __pattern = r'http://(?:www\.)?2shared\.com/(account/)?(download|get|file|document|photo|video|audio)/.*'
-
- __description = """2Shared.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<h1>(?P<N>.*)</h1>'
- SIZE_PATTERN = r'<span class="dtitle">File size:</span>\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
- OFFLINE_PATTERN = r'The file link that you requested is not valid\.|This file was deleted\.'
-
- LINK_PATTERN = r'window.location =\'(.+?)\';'
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def handleFree(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("Download link"))
-
- link = m.group(1)
- self.download(link)
-
-
-getInfo = create_getInfo(TwoSharedCom)
diff --git a/pyload/plugins/hoster/UlozTo.py b/pyload/plugins/hoster/UlozTo.py
deleted file mode 100644
index ca832a3a9..000000000
--- a/pyload/plugins/hoster/UlozTo.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import time
-
-from pyload.utils import json_loads
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-def convertDecimalPrefix(m):
- # decimal prefixes used in filesize and traffic
- return ("%%.%df" % {'k': 3, 'M': 6, 'G': 9}[m.group(2)] % float(m.group(1))).replace('.', '')
-
-
-class UlozTo(SimpleHoster):
- __name = "UlozTo"
- __type = "hoster"
- __version = "1.00"
-
- __pattern = r'http://(?:www\.)?(uloz\.to|ulozto\.(cz|sk|net)|bagruj\.cz|zachowajto\.pl)/(?:live/)?(?P<id>\w+/[^/?]*)'
-
- __description = """Uloz.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- INFO_PATTERN = r'<p>File <strong>(?P<N>[^<]+)</strong> is password protected</p>'
- NAME_PATTERN = r'<title>(?P<N>[^<]+) \| Uloz\.to</title>'
- SIZE_PATTERN = r'<span id="fileSize">.*?(?P<S>[\d.,]+\s[kMG]?B)</span>'
- OFFLINE_PATTERN = r'<title>404 - Page not found</title>|<h1 class="h1">File (has been deleted|was banned)</h1>'
-
- SIZE_REPLACEMENTS = [('([\d.]+)\s([kMG])B', convertDecimalPrefix)]
- URL_REPLACEMENTS = [(r"(?<=http://)([^/]+)", "www.ulozto.net")]
-
- ADULT_PATTERN = r'<form action="(?P<link>[^\"]*)" method="post" id="frm-askAgeForm">'
- PASSWD_PATTERN = r'<div class="passwordProtectedFile">'
- VIPLINK_PATTERN = r'<a href="[^"]*\?disclaimer=1" class="linkVip">'
- FREE_URL_PATTERN = r'<div class="freeDownloadForm"><form action="([^"]+)"'
- PREMIUM_URL_PATTERN = r'<div class="downloadForm"><form action="([^"]+)"'
- TOKEN_PATTERN = r'<input type="hidden" name="_token_" id="[^\"]*" value="(?P<token>.+?)"'
-
-
- def setup(self):
- self.multiDL = self.premium
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- pyfile.url = re.sub(r"(?<=http://)([^/]+)", "www.ulozto.net", pyfile.url)
- self.html = self.load(pyfile.url, decode=True, cookies=True)
-
- if re.search(self.ADULT_PATTERN, self.html):
- self.logInfo(_("Adult content confirmation needed"))
-
- m = re.search(self.TOKEN_PATTERN, self.html)
- if m is None:
- self.error(_("TOKEN_PATTERN not found"))
- token = m.group(1)
-
- self.html = self.load(pyfile.url, get={"do": "askAgeForm-submit"},
- post={"agree": "Confirm", "_token_": token}, cookies=True)
-
- passwords = self.getPassword().splitlines()
- while self.PASSWD_PATTERN in self.html:
- if passwords:
- password = passwords.pop(0)
- self.logInfo(_("Password protected link, trying ") + password)
- self.html = self.load(pyfile.url, get={"do": "passwordProtectedForm-submit"},
- post={"password": password, "password_send": 'Send'}, cookies=True)
- else:
- self.fail(_("No or incorrect password"))
-
- if re.search(self.VIPLINK_PATTERN, self.html):
- self.html = self.load(pyfile.url, get={"disclaimer": "1"})
-
- self.getFileInfo()
-
- if self.premium and self.checkTrafficLeft():
- self.handlePremium()
- else:
- self.handleFree()
-
- self.doCheckDownload()
-
-
- def handleFree(self):
- action, inputs = self.parseHtmlForm('id="frm-downloadDialog-freeDownloadForm"')
- if not action or not inputs:
- self.error(_("Free download form not found"))
-
- self.logDebug("inputs.keys = " + str(inputs.keys()))
- # get and decrypt captcha
- if all(key in inputs for key in ("captcha_value", "captcha_id", "captcha_key")):
- # Old version - last seen 9.12.2013
- self.logDebug('Using "old" version')
-
- captcha_value = self.decryptCaptcha("http://img.uloz.to/captcha/%s.png" % inputs['captcha_id'])
- self.logDebug("CAPTCHA ID: " + inputs['captcha_id'] + ", CAPTCHA VALUE: " + captcha_value)
-
- inputs.update({'captcha_id': inputs['captcha_id'], 'captcha_key': inputs['captcha_key'], 'captcha_value': captcha_value})
-
- elif all(key in inputs for key in ("captcha_value", "timestamp", "salt", "hash")):
- # New version - better to get new parameters (like captcha reload) because of image url - since 6.12.2013
- self.logDebug('Using "new" version')
-
- xapca = self.load("http://www.ulozto.net/reloadXapca.php", get={"rnd": str(int(time.time()))})
- self.logDebug("xapca = " + str(xapca))
-
- data = json_loads(xapca)
- captcha_value = self.decryptCaptcha(str(data['image']))
- self.logDebug("CAPTCHA HASH: " + data['hash'], "CAPTCHA SALT: " + str(data['salt']), "CAPTCHA VALUE: " + captcha_value)
-
- inputs.update({'timestamp': data['timestamp'], 'salt': data['salt'], 'hash': data['hash'], 'captcha_value': captcha_value})
- else:
- self.error(_("CAPTCHA form changed"))
-
- self.multiDL = True
- self.download("http://www.ulozto.net" + action, post=inputs, cookies=True, disposition=True)
-
-
- def handlePremium(self):
- self.download(self.pyfile.url + "?do=directDownload", disposition=True)
- #parsed_url = self.findDownloadURL(premium=True)
- #self.download(parsed_url, post={"download": "Download"})
-
-
- def findDownloadURL(self, premium=False):
- msg = _("%s link" % ("Premium" if premium else "Free"))
- m = re.search(self.PREMIUM_URL_PATTERN if premium else self.FREE_URL_PATTERN, self.html)
- if m is None:
- self.error(msg)
- parsed_url = "http://www.ulozto.net" + m.group(1)
- self.logDebug("%s: %s" % (msg, parsed_url))
- return parsed_url
-
-
- def doCheckDownload(self):
- check = self.checkDownload({
- "wrong_captcha": re.compile(r'<ul class="error">\s*<li>Error rewriting the text.</li>'),
- "offline": re.compile(self.OFFLINE_PATTERN),
- "passwd": self.PASSWD_PATTERN,
- "server_error": 'src="http://img.ulozto.cz/error403/vykricnik.jpg"', # paralell dl, server overload etc.
- "not_found": "<title>UloÅŸ.to</title>"
- })
-
- if check == "wrong_captcha":
- #self.delStorage("captcha_id")
- #self.delStorage("captcha_text")
- self.invalidCaptcha()
- self.retry(reason=_("Wrong captcha code"))
- elif check == "offline":
- self.offline()
- elif check == "passwd":
- self.fail(_("Wrong password"))
- elif check == "server_error":
- self.logError(_("Server error, try downloading later"))
- self.multiDL = False
- self.wait(1 * 60 * 60, True)
- self.retry()
- elif check == "not_found":
- self.fail(_("Server error - file not downloadable"))
-
-
-getInfo = create_getInfo(UlozTo)
diff --git a/pyload/plugins/hoster/UloziskoSk.py b/pyload/plugins/hoster/UloziskoSk.py
deleted file mode 100644
index 1271053c0..000000000
--- a/pyload/plugins/hoster/UloziskoSk.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class UloziskoSk(SimpleHoster):
- __name = "UloziskoSk"
- __type = "hoster"
- __version = "0.24"
-
- __pattern = r'http://(?:www\.)?ulozisko\.sk/.*'
-
- __description = """Ulozisko.sk hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<div class="down1">(?P<N>[^<]+)</div>'
- SIZE_PATTERN = ur'Veğkosť súboru: <strong>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</strong><br />'
- OFFLINE_PATTERN = ur'<span class = "red">ZadanÜ súbor neexistuje z jedného z nasledujúcich dÎvodov:</span>'
-
- LINK_PATTERN = r'<form name = "formular" action = "([^"]+)" method = "post">'
- ID_PATTERN = r'<input type = "hidden" name = "id" value = "([^"]+)" />'
- CAPTCHA_PATTERN = r'<img src="(/obrazky/obrazky\.php\?fid=[^"]+)" alt="" />'
- IMG_PATTERN = ur'<strong>PRE ZVÄČŠENIE KLIKNITE NA OBRÁZOK</strong><br /><a href = "([^"]+)">'
-
-
- def process(self, pyfile):
- self.html = self.load(pyfile.url, decode=True)
- self.getFileInfo()
-
- m = re.search(self.IMG_PATTERN, self.html)
- if m:
- url = "http://ulozisko.sk" + m.group(1)
- self.download(url)
- else:
- self.handleFree()
-
-
- def handleFree(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
- parsed_url = 'http://www.ulozisko.sk' + m.group(1)
-
- m = re.search(self.ID_PATTERN, self.html)
- if m is None:
- self.error(_("ID_PATTERN not found"))
- id = m.group(1)
-
- self.logDebug("URL:" + parsed_url + ' ID:' + id)
-
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m is None:
- self.error(_("CAPTCHA_PATTERN not found"))
- captcha_url = 'http://www.ulozisko.sk' + m.group(1)
-
- captcha = self.decryptCaptcha(captcha_url, cookies=True)
-
- self.logDebug("CAPTCHA_URL:" + captcha_url + ' CAPTCHA:' + captcha)
-
- self.download(parsed_url, post={
- "antispam": captcha,
- "id": id,
- "name": self.pyfile.name,
- "but": "++++STIAHNI+S%DABOR++++"
- })
-
-
-getInfo = create_getInfo(UloziskoSk)
diff --git a/pyload/plugins/hoster/UnibytesCom.py b/pyload/plugins/hoster/UnibytesCom.py
deleted file mode 100644
index d0d65840d..000000000
--- a/pyload/plugins/hoster/UnibytesCom.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class UnibytesCom(SimpleHoster):
- __name = "UnibytesCom"
- __type = "hoster"
- __version = "0.11"
-
- __pattern = r'https?://(?:www\.)?unibytes\.com/[\w .-]{11}B'
-
- __description = """UniBytes.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- HOSTER_DOMAIN = "unibytes.com"
-
- INFO_PATTERN = r'<span[^>]*?id="fileName"[^>]*>(?P<N>[^>]+)</span>\s*\((?P<S>\d.*?)\)'
-
- WAIT_PATTERN = r'Wait for <span id="slowRest">(\d+)</span> sec'
- LINK_PATTERN = r'<a href="([^"]+)">Download</a>'
-
-
- def handleFree(self):
- domain = "http://www.%s/" % self.HOSTER_DOMAIN
- action, post_data = self.parseHtmlForm('id="startForm"')
-
- for _i in xrange(8):
- self.logDebug(action, post_data)
- self.html = self.load(urljoin(domain, action), post=post_data, follow_location=False)
-
- m = re.search(r'location:\s*(\S+)', self.req.http.header, re.I)
- if m:
- url = m.group(1)
- break
-
- if '>Somebody else is already downloading using your IP-address<' in self.html:
- self.wait(10 * 60, True)
- self.retry()
-
- if post_data['step'] == 'last':
- m = re.search(self.LINK_PATTERN, self.html)
- if m:
- url = m.group(1)
- self.correctCaptcha()
- break
- else:
- self.invalidCaptcha()
-
- last_step = post_data['step']
- action, post_data = self.parseHtmlForm('id="stepForm"')
-
- if last_step == 'timer':
- m = re.search(self.WAIT_PATTERN, self.html)
- self.wait(int(m.group(1)) if m else 60, False)
- elif last_step in ("captcha", "last"):
- post_data['captcha'] = self.decryptCaptcha(urljoin(domain, "/captcha.jpg"))
- else:
- self.fail(_("No valid captcha code entered"))
-
- self.download(url)
-
-
-getInfo = create_getInfo(UnibytesCom)
diff --git a/pyload/plugins/hoster/UnrestrictLi.py b/pyload/plugins/hoster/UnrestrictLi.py
deleted file mode 100644
index 2ac39eeb5..000000000
--- a/pyload/plugins/hoster/UnrestrictLi.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from datetime import datetime, timedelta
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-
-
-def secondsToMidnight(gmt=0):
- now = datetime.utcnow() + timedelta(hours=gmt)
- if now.hour is 0 and now.minute < 10:
- midnight = now
- else:
- midnight = now + timedelta(days=1)
- midnight = midnight.replace(hour=0, minute=10, second=0, microsecond=0)
- return int((midnight - now).total_seconds())
-
-
-class UnrestrictLi(Hoster):
- __name = "UnrestrictLi"
- __type = "hoster"
- __version = "0.12"
-
- __pattern = r'https?://(?:[^/]*\.)?(unrestrict|unr)\.li'
-
- __description = """Unrestrict.li hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def setup(self):
- self.chunkLimit = 16
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- if re.match(self.__pattern, pyfile.url):
- new_url = pyfile.url
- elif not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "Unrestrict.li")
- self.fail(_("No Unrestrict.li account provided"))
- else:
- self.logDebug("Old URL: %s" % pyfile.url)
- for _i in xrange(5):
- page = self.load('https://unrestrict.li/unrestrict.php',
- post={'link': pyfile.url, 'domain': 'long'})
- self.logDebug("JSON data: " + page)
- if page != '':
- break
- else:
- self.logInfo(_("Unable to get API data, waiting 1 minute and retry"))
- self.retry(5, 60, "Unable to get API data")
-
- if 'Expired session' in page or ("You are not allowed to "
- "download from this host" in page and self.premium):
- self.account.relogin(self.user)
- self.retry()
- elif "File offline" in page:
- self.offline()
- elif "You are not allowed to download from this host" in page:
- self.fail(_("You are not allowed to download from this host"))
- elif "You have reached your daily limit for this host" in page:
- self.logWarning(_("Reached daily limit for this host"))
- self.retry(5, secondsToMidnight(gmt=2), "Daily limit for this host reached")
- elif "ERROR_HOSTER_TEMPORARILY_UNAVAILABLE" in page:
- self.logInfo(_("Hoster temporarily unavailable, waiting 1 minute and retry"))
- self.retry(5, 60, "Hoster is temporarily unavailable")
- page = json_loads(page)
- new_url = page.keys()[0]
- self.api_data = page[new_url]
-
- if new_url != pyfile.url:
- self.logDebug("New URL: " + new_url)
-
- if hasattr(self, 'api_data'):
- self.setNameSize()
-
- self.download(new_url, disposition=True)
-
- if self.getConfig("history"):
- self.load("https://unrestrict.li/history/", get={'delete': "all"})
- self.logInfo(_("Download history deleted"))
-
-
- def setNameSize(self):
- if 'name' in self.api_data:
- self.pyfile.name = self.api_data['name']
- if 'size' in self.api_data:
- self.pyfile.size = self.api_data['size']
diff --git a/pyload/plugins/hoster/UpleaCom.py b/pyload/plugins/hoster/UpleaCom.py
deleted file mode 100644
index 2ec420962..000000000
--- a/pyload/plugins/hoster/UpleaCom.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urljoin
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class UpleaCom(XFSHoster):
- __name = "UpleaCom"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'https?://(?:www\.)?uplea\.com/dl/\w{15}'
-
- __description = """Uplea.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Redleon", "")]
-
-
- HOSTER_DOMAIN = "uplea.com"
-
- NAME_PATTERN = r'class="agmd size18">(?P<N>.+?)<'
- SIZE_PATTERN = r'size14">(?P<S>[\d.,]+) (?P<U>[\w^_])</span>'
-
- OFFLINE_PATTERN = r'>You followed an invalid or expired link'
-
- LINK_PATTERN = r'"(http?://\w+\.uplea\.com/anonym/.*?)"'
- WAIT_PATTERN = r'timeText:([\d.]+),'
- STEP_PATTERN = r'<a href="(/step/.+)">'
-
-
- def setup(self):
- self.multiDL = False
- self.chunkLimit = 1
- self.resumeDownload = True
-
-
- def handleFree(self):
- m = re.search(self.STEP_PATTERN, self.html)
- if m is None:
- self.error("STEP_PATTERN not found")
-
- self.html = self.load(urljoin("http://uplea.com/", m.group(1)))
-
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- self.wait(int(m.group(1)), True)
- self.retry()
-
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error("LINK_PATTERN not found")
-
- self.wait(15)
- self.download(m.group(1), disposition=True)
-
-
-getInfo = create_getInfo(UpleaCom)
diff --git a/pyload/plugins/hoster/UploadStationCom.py b/pyload/plugins/hoster/UploadStationCom.py
deleted file mode 100644
index f29bcef50..000000000
--- a/pyload/plugins/hoster/UploadStationCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class UploadStationCom(DeadHoster):
- __name = "UploadStationCom"
- __type = "hoster"
- __version = "0.52"
-
- __pattern = r'http://(?:www\.)?uploadstation\.com/file/(?P<id>\w+)'
-
- __description = """UploadStation.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(UploadStationCom)
diff --git a/pyload/plugins/hoster/UploadableCh.py b/pyload/plugins/hoster/UploadableCh.py
deleted file mode 100644
index 7c85ef486..000000000
--- a/pyload/plugins/hoster/UploadableCh.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import sleep
-
-from pyload.plugins.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class UploadableCh(SimpleHoster):
- __name = "UploadableCh"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?uploadable\.ch/file/(?P<ID>\w+)'
-
- __description = """Uploadable.ch hoster plugin"""
- __license = "GPLv3"
- __authors = [("zapp-brannigan", "fuerst.reinje@web.de"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- FILE_INFO_PATTERN = r'div id=\"file_name\" title=.*>(?P<N>.+)<span class=\"filename_normal\">\((?P<S>[\d.]+) (?P<U>\w+)\)</span><'
-
- OFFLINE_PATTERN = r'>(File not available|This file is no longer available)'
- TEMP_OFFLINE_PATTERN = r'<div class="icon_err">'
-
- WAIT_PATTERN = r'data-time="(\d+)" data-format'
-
- FILE_URL_REPLACEMENTS = [(__pattern + ".*", r'http://www.uploadable.ch/file/\g<ID>')]
-
-
- def setup(self):
- self.multiDL = False
- self.chunkLimit = 1
-
-
- def handleFree(self):
- # Click the "free user" button and wait
- a = self.load(self.pyfile.url, cookies=True, post={'downloadLink': "wait"}, decode=True)
- self.logDebug(a)
-
- m = re.search(self.WAIT_PATTERN, a)
- if m is not None:
- self.wait(int(m.group(1))) #: Expected output: {"waitTime":30}
- else:
- self.error("WAIT_PATTERN")
-
- # Make the recaptcha appear and show it the pyload interface
- b = self.load(self.pyfile.url, cookies=True, post={'checkDownload': "check"}, decode=True)
- self.logDebug(b) #: Expected output: {"success":"showCaptcha"}
-
- recaptcha = ReCaptcha(self)
-
- challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
-
- # Submit the captcha solution
- self.load("http://www.uploadable.ch/checkReCaptcha.php",
- cookies=True,
- post={'recaptcha_challenge_field' : challenge,
- 'recaptcha_response_field' : response,
- 'recaptcha_shortencode_field': self.info['ID']},
- decode=True)
-
- self.wait(3)
-
- # Get ready for downloading
- self.load(self.pyfile.url, cookies=True, post={'downloadLink': "show"}, decode=True)
-
- self.wait(3)
-
- # Download the file
- self.download(self.pyfile.url, cookies=True, post={'download': "normal"}, disposition=True)
-
-
- def checkFile(self):
- check = self.checkDownload({'wait_or_reconnect': re.compile("Please wait for"),
- 'is_html' : re.compile("<head>")})
-
- if check == "wait_or_reconnect":
- self.logInfo("Downloadlimit reached, please wait or reconnect")
- self.wait(60 * 60, True)
- self.retry()
-
- elif check == "is_html":
- self.error("Downloaded file is an html file")
-
-
-getInfo = create_getInfo(UploadableCh)
diff --git a/pyload/plugins/hoster/UploadboxCom.py b/pyload/plugins/hoster/UploadboxCom.py
deleted file mode 100644
index b8cbf3963..000000000
--- a/pyload/plugins/hoster/UploadboxCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class UploadboxCom(DeadHoster):
- __name = "Uploadbox"
- __type = "hoster"
- __version = "0.05"
-
- __pattern = r'http://(?:www\.)?uploadbox\.com/files/.+'
-
- __description = """UploadBox.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(UploadboxCom)
diff --git a/pyload/plugins/hoster/UploadedTo.py b/pyload/plugins/hoster/UploadedTo.py
deleted file mode 100644
index cfb92b0dd..000000000
--- a/pyload/plugins/hoster/UploadedTo.py
+++ /dev/null
@@ -1,245 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://ul.to/044yug9o
-# http://ul.to/gzfhd0xs
-
-import re
-
-from time import sleep
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.Plugin import chunks
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.utils import html_unescape, parseFileSize
-
-
-key = "bGhGMkllZXByd2VEZnU5Y2NXbHhYVlZ5cEE1bkEzRUw=".decode('base64')
-
-
-def getID(url):
- """ returns id from file url"""
- m = re.match(UploadedTo.__pattern, url)
- return m.group('ID')
-
-
-def getAPIData(urls):
- post = {"apikey": key}
-
- idMap = {}
-
- for i, url in enumerate(urls):
- id = getID(url)
- post['id_%s' % i] = id
- idMap[id] = url
-
- for _i in xrange(5):
- api = unicode(getURL("http://uploaded.net/api/filemultiple", post=post, decode=False), 'iso-8859-1')
- if api != "can't find request":
- break
- else:
- sleep(3)
-
- result = {}
-
- if api:
- for line in api.splitlines():
- data = line.split(",", 4)
- if data[1] in idMap:
- result[data[1]] = (data[0], data[2], data[4], data[3], idMap[data[1]])
-
- return result
-
-
-def parseFileInfo(self, url='', html=''):
- if not html and hasattr(self, "html"):
- html = self.html
-
- name = url
- size = 0
- fileid = None
-
- if re.search(self.OFFLINE_PATTERN, html):
- # File offline
- status = 1
- else:
- m = re.search(self.INFO_PATTERN, html)
- if m:
- name, fileid = html_unescape(m.group('N')), m.group('ID')
- size = parseFileSize(m.group('S'))
- status = 2
- else:
- status = 3
-
- return name, size, status, fileid
-
-
-def getInfo(urls):
- for chunk in chunks(urls, 80):
- result = []
-
- api = getAPIData(chunk)
-
- for data in api.itervalues():
- if data[0] == "online":
- result.append((html_unescape(data[2]), data[1], 2, data[4]))
-
- elif data[0] == "offline":
- result.append((data[4], 0, 1, data[4]))
-
- yield result
-
-
-class UploadedTo(Hoster):
- __name = "UploadedTo"
- __type = "hoster"
- __version = "0.75"
-
- __pattern = r'https?://(?:www\.)?(uploaded\.(to|net)|ul\.to)(/file/|/?\?id=|.*?&id=|/)(?P<ID>\w+)'
-
- __description = """Uploaded.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org"),
- ("mkaay", "mkaay@mkaay.de"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("netpok", "netpok@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- INFO_PATTERN = r'<a href="file/(?P<ID>\w+)" id="filename">(?P<N>[^<]+)</a> &nbsp;\s*<small[^>]*>(?P<S>[^<]+)</small>'
- OFFLINE_PATTERN = r'<small class="cL">Error: 404</small>'
- DL_LIMIT_PATTERN = r'You have reached the max. number of possible free downloads for this hour'
-
-
- def setup(self):
- self.multiDL = self.resumeDownload = self.premium
- self.chunkLimit = 1 # critical problems with more chunks
-
- self.fileID = getID(self.pyfile.url)
- self.pyfile.url = "http://uploaded.net/file/%s" % self.fileID
-
-
- def process(self, pyfile):
- self.load("http://uploaded.net/language/en", just_header=True)
-
- api = getAPIData([pyfile.url])
-
- # TODO: fallback to parse from site, because api sometimes delivers wrong status codes
-
- if not api:
- self.logWarning(_("No response for API call"))
-
- self.html = unicode(self.load(pyfile.url, decode=False), 'iso-8859-1')
- name, size, status, self.fileID = parseFileInfo(self)
- self.logDebug(name, size, status, self.fileID)
- if status == 1:
- self.offline()
- elif status == 2:
- pyfile.name, pyfile.size = name, size
- else:
- self.error(_("file info"))
-
- elif api == 'Access denied':
- self.fail(_("API key invalid"))
-
- else:
- if self.fileID not in api:
- self.offline()
-
- self.data = api[self.fileID]
- if self.data[0] != "online":
- self.offline()
-
- pyfile.name = html_unescape(self.data[2])
-
- # pyfile.name = self.get_file_name()
-
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
-
-
- def handlePremium(self):
- info = self.account.getAccountInfo(self.user, True)
- self.logDebug("%(name)s: Use Premium Account (%(left)sGB left)" % {"name": self.__name,
- "left": info['trafficleft'] / 1024 / 1024})
- if int(self.data[1]) / 1024 > info['trafficleft']:
- self.logInfo(_("Not enough traffic left"))
- self.account.empty(self.user)
- self.resetAccount()
- self.fail(_("Traffic exceeded"))
-
- header = self.load("http://uploaded.net/file/%s" % self.fileID, just_header=True)
- if 'location' in header:
- #Direct download
- self.logDebug("Direct download link detected")
- self.download(header['location'])
- else:
- #Indirect download
- self.html = self.load("http://uploaded.net/file/%s" % self.fileID)
- m = re.search(r'<div class="tfree".*\s*<form method="post" action="(.*?)"', self.html)
- if m is None:
- self.fail(_("Download URL not m. Try to enable direct downloads"))
- url = m.group(1)
- self.download(url, post={})
-
-
- def handleFree(self):
- self.html = self.load(self.pyfile.url, decode=True)
-
- if 'var free_enabled = false;' in self.html:
- self.logError(_("Free-download capacities exhausted"))
- self.retry(24, 5 * 60)
-
- m = re.search(r"Current waiting period: <span>(\d+)</span> seconds", self.html)
- if m is None:
- self.fail(_("File not downloadable for free users"))
- self.setWait(int(m.group(1)))
-
- self.html = self.load("http://uploaded.net/js/download.js", decode=True)
-
- url = "http://uploaded.net/io/ticket/captcha/%s" % self.fileID
- downloadURL = ""
-
- recaptcha = ReCaptcha(self)
-
- for _i in xrange(5):
- challenge, response = recaptcha.challenge()
- options = {"recaptcha_challenge_field": challenge, "recaptcha_response_field": response}
- self.wait()
-
- result = self.load(url, post=options)
- self.logDebug("Result: %s" % result)
-
- if "limit-size" in result:
- self.fail(_("File too big for free download"))
- elif "limit-slot" in result: # Temporary restriction so just wait a bit
- self.setWait(30 * 60, True)
- self.wait()
- self.retry()
- elif "limit-parallel" in result:
- self.fail(_("Cannot download in parallel"))
- elif "limit-dl" in result or self.DL_LIMIT_PATTERN in result: # limit-dl
- self.setWait(3 * 60 * 60, True)
- self.wait()
- self.retry()
- elif '"err":"captcha"' in result:
- self.invalidCaptcha()
- elif "type:'download'" in result:
- self.correctCaptcha()
- downloadURL = re.search("url:'([^']+)", result).group(1)
- break
- else:
- self.error(_("Unknown error: %s") % result)
-
- if not downloadURL:
- self.fail(_("No Download url retrieved/all captcha attempts failed"))
-
- self.download(downloadURL, disposition=True)
- check = self.checkDownload({"limit-dl": self.DL_LIMIT_PATTERN})
- if check == "limit-dl":
- self.setWait(3 * 60 * 60, True)
- self.wait()
- self.retry()
diff --git a/pyload/plugins/hoster/UploadhereCom.py b/pyload/plugins/hoster/UploadhereCom.py
deleted file mode 100644
index 798b3d817..000000000
--- a/pyload/plugins/hoster/UploadhereCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class UploadhereCom(DeadHoster):
- __name = "UploadhereCom"
- __type = "hoster"
- __version = "0.12"
-
- __pattern = r'http://(?:www\.)?uploadhere\.com/\w{10}'
-
- __description = """Uploadhere.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(UploadhereCom)
diff --git a/pyload/plugins/hoster/UploadheroCom.py b/pyload/plugins/hoster/UploadheroCom.py
deleted file mode 100644
index 74229a3a3..000000000
--- a/pyload/plugins/hoster/UploadheroCom.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://uploadhero.co/dl/wQBRAVSM
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class UploadheroCom(SimpleHoster):
- __name = "UploadheroCom"
- __type = "hoster"
- __version = "0.16"
-
- __pattern = r'http://(?:www\.)?uploadhero\.com?/dl/\w+'
-
- __description = """UploadHero.co plugin"""
- __license = "GPLv3"
- __authors = [("mcmyst", "mcmyst@hotmail.fr"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'<div class="nom_de_fichier">(?P<N>.*?)</div>'
- SIZE_PATTERN = r'Taille du fichier : </span><strong>(?P<S>.*?)</strong>'
- OFFLINE_PATTERN = r'<p class="titre_dl_2">|<div class="raison"><strong>Le lien du fichier ci-dessus n\'existe plus.'
-
- COOKIES = [("uploadhero.co", "lang", "en")]
-
- IP_BLOCKED_PATTERN = r'href="(/lightbox_block_download\.php\?min=.*?)"'
- IP_WAIT_PATTERN = r'<span id="minutes">(\d+)</span>.*\s*<span id="seconds">(\d+)</span>'
-
- CAPTCHA_PATTERN = r'"(/captchadl\.php\?\w+)"'
- FREE_URL_PATTERN = r'var magicomfg = \'<a href="(http://[^<>"]*?)"|"(http://storage\d+\.uploadhero\.co/\?d=\w+/[^<>"/]+)"'
- PREMIUM_URL_PATTERN = r'<a href="([^"]+)" id="downloadnow"'
-
-
- def handleFree(self):
- self.checkErrors()
-
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m is None:
- self.error(_("CAPTCHA_PATTERN not found"))
- captcha_url = "http://uploadhero.co" + m.group(1)
-
- for _i in xrange(5):
- captcha = self.decryptCaptcha(captcha_url)
- self.html = self.load(self.pyfile.url, get={"code": captcha})
- m = re.search(self.FREE_URL_PATTERN, self.html)
- if m:
- self.correctCaptcha()
- download_url = m.group(1) or m.group(2)
- break
- else:
- self.invalidCaptcha()
- else:
- self.fail(_("No valid captcha code entered"))
-
- self.download(download_url)
-
-
- def handlePremium(self):
- self.logDebug("%s: Use Premium Account" % self.__name)
- link = re.search(self.PREMIUM_URL_PATTERN, self.html).group(1)
- self.download(link)
-
-
- def checkErrors(self):
- m = re.search(self.IP_BLOCKED_PATTERN, self.html)
- if m:
- self.html = self.load("http://uploadhero.co%s" % m.group(1))
-
- m = re.search(self.IP_WAIT_PATTERN, self.html)
- wait_time = (int(m.group(1)) * 60 + int(m.group(2))) if m else 5 * 60
- self.wait(wait_time, True)
- self.retry()
-
- self.info.pop('error', None)
-
-
-getInfo = create_getInfo(UploadheroCom)
diff --git a/pyload/plugins/hoster/UploadingCom.py b/pyload/plugins/hoster/UploadingCom.py
deleted file mode 100644
index a6a3b2774..000000000
--- a/pyload/plugins/hoster/UploadingCom.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pycurl import HTTPHEADER
-
-from pyload.utils import json_loads
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, timestamp
-
-
-class UploadingCom(SimpleHoster):
- __name = "UploadingCom"
- __type = "hoster"
- __version = "0.39"
-
- __pattern = r'http://(?:www\.)?uploading\.com/files/(?:get/)?(?P<ID>\w+)'
-
- __description = """Uploading.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("mkaay", "mkaay@mkaay.de"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'id="file_title">(?P<N>.+)</'
- SIZE_PATTERN = r'size tip_container">(?P<S>[\d.,]+) (?P<U>[\w^_]+)<'
- OFFLINE_PATTERN = r'(Page|file) not found'
-
- COOKIES = [("uploading.com", "lang", "1"),
- (".uploading.com", "language", "1"),
- (".uploading.com", "setlang", "en"),
- (".uploading.com", "_lang", "en")]
-
-
- def process(self, pyfile):
- if not "/get/" in pyfile.url:
- pyfile.url = pyfile.url.replace("/files", "/files/get")
-
- self.html = self.load(pyfile.url, decode=True)
- self.getFileInfo()
-
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
-
-
- def handlePremium(self):
- postData = {'action': 'get_link',
- 'code': self.info['pattern']['ID'],
- 'pass': 'undefined'}
-
- self.html = self.load('http://uploading.com/files/get/?JsHttpRequest=%d-xml' % timestamp(), post=postData)
- url = re.search(r'"link"\s*:\s*"(.*?)"', self.html)
- if url:
- url = url.group(1).replace("\\/", "/")
- self.download(url)
-
- raise Exception("Plugin defect")
-
-
- def handleFree(self):
- m = re.search('<h2>((Daily )?Download Limit)</h2>', self.html)
- if m:
- self.pyfile.error = m.group(1)
- self.logWarning(self.pyfile.error)
- self.retry(6, (6 * 60 if m.group(2) else 15) * 60, self.pyfile.error)
-
- ajax_url = "http://uploading.com/files/get/?ajax"
- self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
- self.req.http.lastURL = self.pyfile.url
-
- res = json_loads(self.load(ajax_url, post={'action': 'second_page', 'code': self.info['pattern']['ID']}))
-
- if 'answer' in res and 'wait_time' in res['answer']:
- wait_time = int(res['answer']['wait_time'])
- self.logInfo(_("Waiting %d seconds") % wait_time)
- self.wait(wait_time)
- else:
- self.error(_("No AJAX/WAIT"))
-
- res = json_loads(self.load(ajax_url, post={'action': 'get_link', 'code': self.info['pattern']['ID'], 'pass': 'false'}))
-
- if 'answer' in res and 'link' in res['answer']:
- url = res['answer']['link']
- else:
- self.error(_("No AJAX/URL"))
-
- self.html = self.load(url)
- m = re.search(r'<form id="file_form" action="(.*?)"', self.html)
- if m:
- url = m.group(1)
- else:
- self.error(_("No URL"))
-
- self.download(url)
-
- check = self.checkDownload({"html": re.compile("\A<!DOCTYPE html PUBLIC")})
- if check == "html":
- self.logWarning(_("Redirected to a HTML page, wait 10 minutes and retry"))
- self.wait(10 * 60, True)
-
-
-getInfo = create_getInfo(UploadingCom)
diff --git a/pyload/plugins/hoster/UploadkingCom.py b/pyload/plugins/hoster/UploadkingCom.py
deleted file mode 100644
index 3db1e15c4..000000000
--- a/pyload/plugins/hoster/UploadkingCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class UploadkingCom(DeadHoster):
- __name = "UploadkingCom"
- __type = "hoster"
- __version = "0.14"
-
- __pattern = r'http://(?:www\.)?uploadking\.com/\w{10}'
-
- __description = """UploadKing.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
-getInfo = create_getInfo(UploadkingCom)
diff --git a/pyload/plugins/hoster/UpstoreNet.py b/pyload/plugins/hoster/UpstoreNet.py
deleted file mode 100644
index 2594efe21..000000000
--- a/pyload/plugins/hoster/UpstoreNet.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.captcha import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class UpstoreNet(SimpleHoster):
- __name = "UpstoreNet"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'https?://(?:www\.)?upstore\.net/'
-
- __description = """Upstore.Net File Download Hoster"""
- __license = "GPLv3"
- __authors = [("igel", "igelkun@myopera.com")]
-
-
- INFO_PATTERN = r'<div class="comment">.*?</div>\s*\n<h2 style="margin:0">(?P<N>.*?)</h2>\s*\n<div class="comment">\s*\n\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
- OFFLINE_PATTERN = r'<span class="error">File not found</span>'
-
- WAIT_PATTERN = r'var sec = (\d+)'
- CHASH_PATTERN = r'<input type="hidden" name="hash" value="([^"]*)">'
- LINK_PATTERN = r'<a href="(https?://.*?)" target="_blank"><b>'
-
-
- def handleFree(self):
- # STAGE 1: get link to continue
- m = re.search(self.CHASH_PATTERN, self.html)
- if m is None:
- self.error(_("CHASH_PATTERN not found"))
- chash = m.group(1)
- self.logDebug("Read hash " + chash)
- # continue to stage2
- post_data = {'hash': chash, 'free': 'Slow download'}
- self.html = self.load(self.pyfile.url, post=post_data, decode=True)
-
- # STAGE 2: solv captcha and wait
- # first get the infos we need: recaptcha key and wait time
- recaptcha = ReCaptcha(self)
-
- # try the captcha 5 times
- for i in xrange(5):
- m = re.search(self.WAIT_PATTERN, self.html)
- if m is None:
- self.error(_("Wait pattern not found"))
- wait_time = int(m.group(1))
-
- # then, do the waiting
- self.wait(wait_time)
-
- # then, handle the captcha
- challenge, response = recaptcha.challenge()
- post_data.update({'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field' : response})
-
- self.html = self.load(self.pyfile.url, post=post_data, decode=True)
-
- # STAGE 3: get direct link
- m = re.search(self.LINK_PATTERN, self.html, re.S)
- if m:
- break
-
- if m is None:
- self.error(_("Download link not found"))
-
- direct = m.group(1)
- self.download(direct, disposition=True)
-
-
-getInfo = create_getInfo(UpstoreNet)
diff --git a/pyload/plugins/hoster/UptoboxCom.py b/pyload/plugins/hoster/UptoboxCom.py
deleted file mode 100644
index a8ecbaf68..000000000
--- a/pyload/plugins/hoster/UptoboxCom.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class UptoboxCom(XFSHoster):
- __name = "UptoboxCom"
- __type = "hoster"
- __version = "0.16"
-
- __pattern = r'https?://(?:www\.)?uptobox\.com/\w{12}'
-
- __description = """Uptobox.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = "uptobox.com"
-
- INFO_PATTERN = r'"para_title">(?P<N>.+) \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)'
- OFFLINE_PATTERN = r'>(File not found|Access Denied|404 Not Found)'
-
- LINK_PATTERN = r'"(https?://\w+\.uptobox\.com/d/.*?)"'
-
- ERROR_PATTERN = r'>(You have to wait.+till next download.)<' #@TODO: Check XFSHoster ERROR_PATTERN
-
-
- def setup(self):
- self.multiDL = True
- self.chunkLimit = 1
- self.resumeDownload = True
-
-
-getInfo = create_getInfo(UptoboxCom)
diff --git a/pyload/plugins/hoster/VeehdCom.py b/pyload/plugins/hoster/VeehdCom.py
deleted file mode 100644
index 0eb6ba64a..000000000
--- a/pyload/plugins/hoster/VeehdCom.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-
-
-class VeehdCom(Hoster):
- __name = "VeehdCom"
- __type = "hoster"
- __version = "0.23"
-
- __pattern = r'http://veehd\.com/video/\d+_\S+'
- __config = [("filename_spaces", "bool", "Allow spaces in filename", False),
- ("replacement_char", "str", "Filename replacement character", "_")]
-
- __description = """Veehd.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("cat", "cat@pyload")]
-
-
- def setup(self):
- self.multiDL = True
- self.req.canContinue = True
-
-
- def process(self, pyfile):
- self.download_html()
- if not self.file_exists():
- self.offline()
-
- pyfile.name = self.get_file_name()
- self.download(self.get_file_url())
-
-
- def download_html(self):
- url = self.pyfile.url
- self.logDebug("Requesting page: %s" % url)
- self.html = self.load(url)
-
-
- def file_exists(self):
- if not self.html:
- self.download_html()
-
- if '<title>Veehd</title>' in self.html:
- return False
- return True
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
-
- m = re.search(r'<title[^>]*>([^<]+) on Veehd</title>', self.html)
- if m is None:
- self.error(_("Video title not found"))
-
- name = m.group(1)
-
- # replace unwanted characters in filename
- if self.getConfig('filename_spaces'):
- pattern = '[^\w ]+'
- else:
- pattern = '[^\w.]+'
-
- return re.sub(pattern, self.getConfig('replacement_char'), name) + '.avi'
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- if not self.html:
- self.download_html()
-
- m = re.search(r'<embed type="video/divx" src="(http://([^/]*\.)?veehd\.com/dl/[^"]+)"',
- self.html)
- if m is None:
- self.error(_("Embedded video url not found"))
-
- return m.group(1)
diff --git a/pyload/plugins/hoster/VeohCom.py b/pyload/plugins/hoster/VeohCom.py
deleted file mode 100644
index 25f109b2b..000000000
--- a/pyload/plugins/hoster/VeohCom.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class VeohCom(SimpleHoster):
- __name = "VeohCom"
- __type = "hoster"
- __version = "0.21"
-
- __pattern = r'http://(?:www\.)?veoh\.com/(tv/)?(watch|videos)/(?P<ID>v\w+)'
- __config = [("quality", "Low;High;Auto", "Quality", "Auto")]
-
- __description = """Veoh.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'<meta name="title" content="(?P<N>.*?)"'
- OFFLINE_PATTERN = r'>Sorry, we couldn\'t find the video you were looking for'
-
- URL_REPLACEMENTS = [(__pattern + ".*", r'http://www.veoh.com/watch/\g<ID>')]
-
- COOKIES = [("veoh.com", "lassieLocale", "en")]
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.chunkLimit = -1
-
-
- def handleFree(self):
- quality = self.getConfig("quality")
- if quality == "Auto":
- quality = ("High", "Low")
- for q in quality:
- pattern = r'"fullPreviewHash%sPath":"(.+?)"' % q
- m = re.search(pattern, self.html)
- if m:
- self.pyfile.name += ".mp4"
- link = m.group(1).replace("\\", "")
- self.download(link)
- return
- else:
- self.logInfo(_("No %s quality video found") % q.upper())
- else:
- self.fail(_("No video found!"))
-
-
-getInfo = create_getInfo(VeohCom)
diff --git a/pyload/plugins/hoster/VidPlayNet.py b/pyload/plugins/hoster/VidPlayNet.py
deleted file mode 100644
index 829e61f07..000000000
--- a/pyload/plugins/hoster/VidPlayNet.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Test links:
-# BigBuckBunny_320x180.mp4 - 61.7 Mb - http://vidplay.net/38lkev0h3jv0
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class VidPlayNet(XFSHoster):
- __name = "VidPlayNet"
- __type = "hoster"
- __version = "0.04"
-
- __pattern = r'https?://(?:www\.)?vidplay\.net/\w{12}'
-
- __description = """VidPlay.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
-
-
- HOSTER_DOMAIN = "vidplay.net"
-
- NAME_PATTERN = r'<b>Password:</b></div>\s*<h[1-6]>(?P<N>[^<]+)</h[1-6]>'
-
-
-getInfo = create_getInfo(VidPlayNet)
diff --git a/pyload/plugins/hoster/VimeoCom.py b/pyload/plugins/hoster/VimeoCom.py
deleted file mode 100644
index 9a5f65ceb..000000000
--- a/pyload/plugins/hoster/VimeoCom.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class VimeoCom(SimpleHoster):
- __name = "VimeoCom"
- __type = "hoster"
- __version = "0.03"
-
- __pattern = r'https?://(?:www\.)?(player\.)?vimeo\.com/(video/)?(?P<ID>\d+)'
- __config = [("quality", "Lowest;Mobile;SD;HD;Highest", "Quality", "Highest"),
- ("original", "bool", "Try to download the original file first", True)]
-
- __description = """Vimeo.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'<title>(?P<N>.+) on Vimeo<'
- OFFLINE_PATTERN = r'class="exception_header"'
- TEMP_OFFLINE_PATTERN = r'Please try again in a few minutes.<'
-
- URL_REPLACEMENTS = [(__pattern + ".*", r'https://www.vimeo.com/\g<ID>')]
-
- COOKIES = [("vimeo.com", "language", "en")]
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.chunkLimit = -1
-
-
- def handleFree(self):
- password = self.getPassword()
-
- if self.js and 'class="btn iconify_down_b"' in self.html:
- html = self.js.eval(self.load(self.pyfile.url, get={'action': "download", 'password': password}, decode=True))
- pattern = r'href="(?P<URL>http://vimeo\.com.+?)".*?\>(?P<QL>.+?) '
- else:
- id = re.match(self.__pattern, self.pyfile.url).group("ID")
- html = self.load("https://player.vimeo.com/video/" + id, get={'password': password})
- pattern = r'"(?P<QL>\w+)":{"profile".*?"(?P<URL>http://pdl\.vimeocdn\.com.+?)"'
-
- link = dict((l.group('QL').lower(), l.group('URL')) for l in re.finditer(pattern, html))
-
- if self.getConfig("original"):
- if "original" in link:
- self.download(link[q])
- return
- else:
- self.logInfo(_("Original file not downloadable"))
-
- quality = self.getConfig("quality")
- if quality == "Highest":
- qlevel = ("hd", "sd", "mobile")
- elif quality == "Lowest":
- qlevel = ("mobile", "sd", "hd")
- else:
- qlevel = quality.lower()
-
- for q in qlevel:
- if q in link:
- self.download(link[q])
- return
- else:
- self.logInfo(_("No %s quality video found") % q.upper())
- else:
- self.fail(_("No video found!"))
-
-
-getInfo = create_getInfo(VimeoCom)
diff --git a/pyload/plugins/hoster/Vipleech4uCom.py b/pyload/plugins/hoster/Vipleech4uCom.py
deleted file mode 100644
index 471552170..000000000
--- a/pyload/plugins/hoster/Vipleech4uCom.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class Vipleech4uCom(DeadHoster):
- __name = "Vipleech4uCom"
- __type = "hoster"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?vipleech4u\.com/manager\.php'
-
- __description = """Vipleech4u.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Kagenoshin", "kagenoshin@gmx.ch")]
-
-
-getInfo = create_getInfo(Vipleech4uCom)
diff --git a/pyload/plugins/hoster/WarserverCz.py b/pyload/plugins/hoster/WarserverCz.py
deleted file mode 100644
index c0e042620..000000000
--- a/pyload/plugins/hoster/WarserverCz.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class WarserverCz(DeadHoster):
- __name = "WarserverCz"
- __type = "hoster"
- __version = "0.13"
-
- __pattern = r'http://(?:www\.)?warserver\.cz/stahnout/\d+'
-
- __description = """Warserver.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
-getInfo = create_getInfo(WarserverCz)
diff --git a/pyload/plugins/hoster/WebshareCz.py b/pyload/plugins/hoster/WebshareCz.py
deleted file mode 100644
index 4b26de627..000000000
--- a/pyload/plugins/hoster/WebshareCz.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.SimpleHoster import SimpleHoster
-
-
-def getInfo(urls):
- for url in urls:
- fid = re.search(WebshareCz.__pattern, url).group('ID')
- api_data = getURL("https://webshare.cz/api/file_info/", post={'ident': fid})
-
- if 'File not found' in api_data:
- file_info = (url, 0, 1, url)
- else:
- name = re.search('<name>(.+)</name>', api_data).group(1)
- size = re.search('<size>(.+)</size>', api_data).group(1)
- file_info = (name, size, 2, url)
-
- yield file_info
-
-
-class WebshareCz(SimpleHoster):
- __name = "WebshareCz"
- __type = "hoster"
- __version = "0.14"
-
- __pattern = r'https?://(?:www\.)?webshare\.cz/(?:#/)?file/(?P<ID>\w+)'
-
- __description = """WebShare.cz hoster plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- def handleFree(self):
- api_data = self.load('https://webshare.cz/api/file_link/', post={'ident': self.fid})
-
- self.logDebug("API data: " + api_data)
-
- m = re.search('<link>(.+)</link>', api_data)
- if m is None:
- self.error(_("Unable to detect direct link"))
-
- self.download(m.group(1), disposition=True)
-
-
- def getFileInfo(self):
- self.logDebug("URL: %s" % self.pyfile.url)
-
- self.fid = re.match(self.__pattern, self.pyfile.url).group('ID')
-
- self.load(self.pyfile.url)
- api_data = self.load('https://webshare.cz/api/file_info/', post={'ident': self.fid})
-
- if 'File not found' in api_data:
- self.offline()
- else:
- self.pyfile.name = re.search('<name>(.+)</name>', api_data).group(1)
- self.pyfile.size = re.search('<size>(.+)</size>', api_data).group(1)
-
- self.logDebug("FILE NAME: %s FILE SIZE: %s" % (self.pyfile.name, self.pyfile.size))
diff --git a/pyload/plugins/hoster/WrzucTo.py b/pyload/plugins/hoster/WrzucTo.py
deleted file mode 100644
index 1f9a19a5a..000000000
--- a/pyload/plugins/hoster/WrzucTo.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pycurl import HTTPHEADER
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class WrzucTo(SimpleHoster):
- __name = "WrzucTo"
- __type = "hoster"
- __version = "0.02"
-
- __pattern = r'http://(?:www\.)?wrzuc\.to/(\w+(\.wt|\.html)|(\w+/?linki/\w+))'
-
- __description = """Wrzuc.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'id="file_info">\s*<strong>(?P<N>.*?)</strong>'
- SIZE_PATTERN = r'class="info">\s*<tr>\s*<td>(?P<S>.*?)</td>'
-
- COOKIES = [("wrzuc.to", "language", "en")]
-
-
- def setup(self):
- self.multiDL = True
-
-
- def handleFree(self):
- data = dict(re.findall(r'(md5|file): "(.*?)"', self.html))
- if len(data) != 2:
- self.error(_("No file ID"))
-
- self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
- self.req.http.lastURL = self.pyfile.url
- self.load("http://www.wrzuc.to/ajax/server/prepair", post={"md5": data['md5']})
-
- self.req.http.lastURL = self.pyfile.url
- self.html = self.load("http://www.wrzuc.to/ajax/server/download_link", post={"file": data['file']})
-
- data.update(re.findall(r'"(download_link|server_id)":"(.*?)"', self.html))
- if len(data) != 4:
- self.error(_("No download URL"))
-
- download_url = "http://%s.wrzuc.to/pobierz/%s" % (data['server_id'], data['download_link'])
- self.download(download_url)
-
-
-getInfo = create_getInfo(WrzucTo)
diff --git a/pyload/plugins/hoster/WuploadCom.py b/pyload/plugins/hoster/WuploadCom.py
deleted file mode 100644
index 67f8d4bfb..000000000
--- a/pyload/plugins/hoster/WuploadCom.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class WuploadCom(DeadHoster):
- __name = "WuploadCom"
- __type = "hoster"
- __version = "0.23"
-
- __pattern = r'http://(?:www\.)?wupload\..*?/file/((\w+/)?\d+)(/.*)?'
-
- __description = """Wupload.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("Paul King", "")]
-
-
-getInfo = create_getInfo(WuploadCom)
diff --git a/pyload/plugins/hoster/X7To.py b/pyload/plugins/hoster/X7To.py
deleted file mode 100644
index 471fc1695..000000000
--- a/pyload/plugins/hoster/X7To.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class X7To(DeadHoster):
- __name = "X7To"
- __type = "hoster"
- __version = "0.41"
-
- __pattern = r'http://(?:www\.)?x7\.to/'
-
- __description = """X7.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("ernieb", "ernieb")]
-
-
-getInfo = create_getInfo(X7To)
diff --git a/pyload/plugins/hoster/XFileSharingPro.py b/pyload/plugins/hoster/XFileSharingPro.py
deleted file mode 100644
index 7f352e781..000000000
--- a/pyload/plugins/hoster/XFileSharingPro.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
-
-
-class XFileSharingPro(XFSHoster):
- __name = "XFileSharingPro"
- __type = "hoster"
- __version = "0.43"
-
- __pattern = r'^unmatchable$'
-
- __description = """XFileSharingPro dummy hoster plugin for hook"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- URL_REPLACEMENTS = [("/embed-", "/")]
-
-
- def _log(self, type, args):
- msg = " | ".join([str(a).strip() for a in args if a])
- logger = getattr(self.log, type)
- logger("%s: %s: %s" % (self.__name, self.HOSTER_NAME, msg or _("%s MARK" % type.upper())))
-
-
- def init(self):
- super(XFileSharingPro, self).init()
-
- self.__pattern = self.core.pluginManager.hosterPlugins[self.__name]['pattern']
-
- self.HOSTER_DOMAIN = re.match(self.__pattern, self.pyfile.url).group(1).lower()
- self.HOSTER_NAME = "".join([str.capitalize() for str in self.HOSTER_DOMAIN.split('.')])
-
- account = self.core.accountManager.getAccountPlugin(self.HOSTER_NAME)
-
- if account and account.canUse():
- self.account = account
- elif self.account:
- self.account.HOSTER_DOMAIN = self.HOSTER_DOMAIN
- else:
- return
-
- self.user, data = self.account.selectAccount()
- self.req = self.account.getAccountRequest(self.user)
- self.premium = self.account.isPremium(self.user)
-
-
- def setup(self):
- self.chunkLimit = 1
- self.resumeDownload = self.premium
- self.multiDL = True
-
-
-getInfo = create_getInfo(XFileSharingPro)
diff --git a/pyload/plugins/hoster/XHamsterCom.py b/pyload/plugins/hoster/XHamsterCom.py
deleted file mode 100644
index 8b713a33b..000000000
--- a/pyload/plugins/hoster/XHamsterCom.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-
-from pyload.utils import json_loads
-from pyload.plugins.Hoster import Hoster
-
-
-def clean_json(json_expr):
- json_expr = re.sub('[\n\r]', '', json_expr)
- json_expr = re.sub(' +', '', json_expr)
- json_expr = re.sub('\'', '"', json_expr)
-
- return json_expr
-
-
-class XHamsterCom(Hoster):
- __name = "XHamsterCom"
- __type = "hoster"
- __version = "0.12"
-
- __pattern = r'http://(?:www\.)?xhamster\.com/movies/.+'
- __config = [("type", ".mp4;.flv", "Preferred type", ".mp4")]
-
- __description = """XHamster.com hoster plugin"""
- __license = "GPLv3"
- __authors = []
-
-
- def process(self, pyfile):
- self.pyfile = pyfile
-
- if not self.file_exists():
- self.offline()
-
- if self.getConfig("type"):
- self.desired_fmt = self.getConfig("type")
-
- pyfile.name = self.get_file_name() + self.desired_fmt
- self.download(self.get_file_url())
-
-
- def download_html(self):
- url = self.pyfile.url
- self.html = self.load(url)
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- if not self.html:
- self.download_html()
-
- flashvar_pattern = re.compile('flashvars = ({.*?});', re.S)
- json_flashvar = flashvar_pattern.search(self.html)
-
- if not json_flashvar:
- self.error(_("flashvar not found"))
-
- j = clean_json(json_flashvar.group(1))
- flashvars = json_loads(j)
-
- if flashvars['srv']:
- srv_url = flashvars['srv'] + '/'
- else:
- self.error(_("srv_url not found"))
-
- if flashvars['url_mode']:
- url_mode = flashvars['url_mode']
-
-
- else:
- self.error(_("url_mode not found"))
-
- if self.desired_fmt == ".mp4":
- file_url = re.search(r"<a href=\"" + srv_url + "(.+?)\"", self.html)
- if file_url is None:
- self.error(_("file_url not found"))
- file_url = file_url.group(1)
- long_url = srv_url + file_url
- self.logDebug("long_url = " + long_url)
- else:
- if flashvars['file']:
- file_url = unquote(flashvars['file'])
- else:
- self.error(_("file_url not found"))
-
- if url_mode == '3':
- long_url = file_url
- self.logDebug("long_url = " + long_url)
- else:
- long_url = srv_url + "key=" + file_url
- self.logDebug("long_url = " + long_url)
-
- return long_url
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
-
- pattern = r'<title>(.*?) - xHamster\.com</title>'
- name = re.search(pattern, self.html)
- if name is None:
- pattern = r'<h1 >(.*)</h1>'
- name = re.search(pattern, self.html)
- if name is None:
- pattern = r'http://[www.]+xhamster\.com/movies/.*/(.*?)\.html?'
- name = re.match(file_name_pattern, self.pyfile.url)
- if name is None:
- pattern = r'<div id="element_str_id" style="display:none;">(.*)</div>'
- name = re.search(pattern, self.html)
- if name is None:
- return "Unknown"
-
- return name.group(1)
-
-
- def file_exists(self):
- """ returns True or False
- """
- if not self.html:
- self.download_html()
- if re.search(r"(.*Video not found.*)", self.html) is not None:
- return False
- else:
- return True
diff --git a/pyload/plugins/hoster/XVideosCom.py b/pyload/plugins/hoster/XVideosCom.py
deleted file mode 100644
index 90aac5979..000000000
--- a/pyload/plugins/hoster/XVideosCom.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-
-from pyload.plugins.Hoster import Hoster
-
-
-class XVideosCom(Hoster):
- __name = "XVideos.com"
- __type = "hoster"
- __version = "0.10"
-
- __pattern = r'http://(?:www\.)?xvideos\.com/video(\d+)/.*'
-
- __description = """XVideos.com hoster plugin"""
- __license = "GPLv3"
- __authors = []
-
-
- def process(self, pyfile):
- site = self.load(pyfile.url)
- pyfile.name = "%s (%s).flv" % (
- re.search(r"<h2>([^<]+)<span", site).group(1),
- re.match(self.__pattern, pyfile.url).group(1),
- )
- self.download(unquote(re.search(r"flv_url=([^&]+)&", site).group(1)))
diff --git a/pyload/plugins/hoster/Xdcc.py b/pyload/plugins/hoster/Xdcc.py
deleted file mode 100644
index b94c08df2..000000000
--- a/pyload/plugins/hoster/Xdcc.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import socket
-import struct
-import sys
-import time
-
-from os import makedirs
-from os.path import exists, join
-from select import select
-
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import safe_join
-
-
-class Xdcc(Hoster):
- __name = "Xdcc"
- __type = "hoster"
- __version = "0.32"
-
- __config = [("nick", "str", "Nickname", "pyload"),
- ("ident", "str", "Ident", "pyloadident"),
- ("realname", "str", "Realname", "pyloadreal")]
-
- __description = """Download from IRC XDCC bot"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.com")]
-
-
- def setup(self):
- self.debug = 0 # 0,1,2
- self.timeout = 30
- self.multiDL = False
-
-
- def process(self, pyfile):
- # change request type
- self.req = pyfile.m.core.requestFactory.getRequest(self.__name, type="XDCC")
-
- self.pyfile = pyfile
- for _i in xrange(0, 3):
- try:
- nmn = self.doDownload(pyfile.url)
- self.logDebug("Download of %s finished." % nmn)
- return
- except socket.error, e:
- if hasattr(e, "errno"):
- errno = e.errno
- else:
- errno = e.args[0]
-
- if errno == 10054:
- self.logDebug("Server blocked our ip, retry in 5 min")
- self.setWait(300)
- self.wait()
- continue
-
- self.fail(_("Failed due to socket errors. Code: %d") % errno)
-
- self.fail(_("Server blocked our ip, retry again later manually"))
-
-
- def doDownload(self, url):
- self.pyfile.setStatus("waiting") # real link
-
- m = re.match(r'xdcc://(.*?)/#?(.*?)/(.*?)/#?(\d+)/?', url)
- server = m.group(1)
- chan = m.group(2)
- bot = m.group(3)
- pack = m.group(4)
- nick = self.getConfig('nick')
- ident = self.getConfig('ident')
- real = self.getConfig('realname')
-
- temp = server.split(':')
- ln = len(temp)
- if ln == 2:
- host, port = temp
- elif ln == 1:
- host, port = temp[0], 6667
- else:
- self.fail(_("Invalid hostname for IRC Server: %s") % server)
-
- #######################
- # CONNECT TO IRC AND IDLE FOR REAL LINK
- dl_time = time.time()
-
- sock = socket.socket()
- sock.connect((host, int(port)))
- if nick == "pyload":
- nick = "pyload-%d" % (time.time() % 1000) # last 3 digits
- sock.send("NICK %s\r\n" % nick)
- sock.send("USER %s %s bla :%s\r\n" % (ident, host, real))
- time.sleep(3)
- sock.send("JOIN #%s\r\n" % chan)
- sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
-
- # IRC recv loop
- readbuffer = ""
- done = False
- retry = None
- m = None
- while True:
-
- # done is set if we got our real link
- if done:
- break
-
- if retry:
- if time.time() > retry:
- retry = None
- dl_time = time.time()
- sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
-
- else:
- if (dl_time + self.timeout) < time.time(): # todo: add in config
- sock.send("QUIT :byebye\r\n")
- sock.close()
- self.fail(_("XDCC Bot did not answer"))
-
- fdset = select([sock], [], [], 0)
- if sock not in fdset[0]:
- continue
-
- readbuffer += sock.recv(1024)
- temp = readbuffer.split("\n")
- readbuffer = temp.pop()
-
- for line in temp:
- if self.debug is 2:
- print "*> " + unicode(line, errors='ignore')
- line = line.rstrip()
- first = line.split()
-
- if first[0] == "PING":
- sock.send("PONG %s\r\n" % first[1])
-
- if first[0] == "ERROR":
- self.fail(_("IRC-Error: %s") % line)
-
- msg = line.split(None, 3)
- if len(msg) != 4:
- continue
-
- msg = {
- "origin": msg[0][1:],
- "action": msg[1],
- "target": msg[2],
- "text": msg[3][1:]
- }
-
- if nick == msg['target'][0:len(nick)] and "PRIVMSG" == msg['action']:
- if msg['text'] == "\x01VERSION\x01":
- self.logDebug("Sending CTCP VERSION")
- sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
- elif msg['text'] == "\x01TIME\x01":
- self.logDebug("Sending CTCP TIME")
- sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
- elif msg['text'] == "\x01LAG\x01":
- pass # don't know how to answer
-
- if not (bot == msg['origin'][0:len(bot)]
- and nick == msg['target'][0:len(nick)]
- and msg['action'] in ("PRIVMSG", "NOTICE")):
- continue
-
- if self.debug is 1:
- print "%s: %s" % (msg['origin'], msg['text'])
-
- if "You already requested that pack" in msg['text']:
- retry = time.time() + 300
-
- if "you must be on a known channel to request a pack" in msg['text']:
- self.fail(_("Wrong channel"))
-
- m = re.match('\x01DCC SEND (.*?) (\d+) (\d+)(?: (\d+))?\x01', msg['text'])
- if m:
- done = True
-
- # get connection data
- ip = socket.inet_ntoa(struct.pack('L', socket.ntohl(int(m.group(2)))))
- port = int(m.group(3))
- packname = m.group(1)
-
- if len(m.groups()) > 3:
- self.req.filesize = int(m.group(4))
-
- self.pyfile.name = packname
-
- download_folder = self.config['general']['download_folder']
- filename = safe_join(download_folder, packname)
-
- self.logInfo(_("Downloading %s from %s:%d") % (packname, ip, port))
-
- self.pyfile.setStatus("downloading")
- newname = self.req.download(ip, port, filename, sock, self.pyfile.setProgress)
- if newname and newname != filename:
- self.logInfo(_("%(name)s saved as %(newname)s") % {"name": self.pyfile.name, "newname": newname})
- filename = newname
-
- # kill IRC socket
- # sock.send("QUIT :byebye\r\n")
- sock.close()
-
- self.lastDownload = filename
- return self.lastDownload
diff --git a/pyload/plugins/hoster/YibaishiwuCom.py b/pyload/plugins/hoster/YibaishiwuCom.py
deleted file mode 100644
index d1b6bf793..000000000
--- a/pyload/plugins/hoster/YibaishiwuCom.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.utils import json_loads
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class YibaishiwuCom(SimpleHoster):
- __name = "YibaishiwuCom"
- __type = "hoster"
- __version = "0.13"
-
- __pattern = r'http://(?:www\.)?(?:u\.)?115\.com/file/(?P<ID>\w+)'
-
- __description = """115.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- NAME_PATTERN = r'file_name: \'(?P<N>.+?)\''
- SIZE_PATTERN = r'file_size: \'(?P<S>.+?)\''
- OFFLINE_PATTERN = ur'<h3><i style="color:red;">哎呀提取码䞍存圚䞍劚搜搜看吧</i></h3>'
-
- LINK_PATTERN = r'(/\?ct=(pickcode|download)[^"\']+)'
-
-
- def handleFree(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
- url = m.group(1)
- self.logDebug(('FREEUSER' if m.group(2) == 'download' else 'GUEST') + ' URL', url)
-
- res = json_loads(self.load("http://115.com" + url, decode=False))
- if "urls" in res:
- mirrors = res['urls']
- elif "data" in res:
- mirrors = res['data']
- else:
- mirrors = None
-
- for mr in mirrors:
- try:
- url = mr['url'].replace("\\", "")
- self.logDebug("Trying URL: " + url)
- self.download(url)
- break
- except Exception:
- continue
- else:
- self.fail(_("No working link found"))
-
-
-getInfo = create_getInfo(YibaishiwuCom)
diff --git a/pyload/plugins/hoster/YoupornCom.py b/pyload/plugins/hoster/YoupornCom.py
deleted file mode 100644
index c9a2a0f12..000000000
--- a/pyload/plugins/hoster/YoupornCom.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Hoster import Hoster
-
-
-class YoupornCom(Hoster):
- __name = "YoupornCom"
- __type = "hoster"
- __version = "0.20"
-
- __pattern = r'http://(?:www\.)?youporn\.com/watch/.+'
-
- __description = """Youporn.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("willnix", "willnix@pyload.org")]
-
-
- def process(self, pyfile):
- self.pyfile = pyfile
-
- if not self.file_exists():
- self.offline()
-
- pyfile.name = self.get_file_name()
- self.download(self.get_file_url())
-
-
- def download_html(self):
- url = self.pyfile.url
- self.html = self.load(url, post={"user_choice": "Enter"}, cookies=False)
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- if not self.html:
- self.download_html()
-
- return re.search(r'(http://download\.youporn\.com/download/\d+\?save=1)">', self.html).group(1)
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
-
- file_name_pattern = r'<title>(.+) - '
- return re.search(file_name_pattern, self.html).group(1).replace("&amp;", "&").replace("/", "") + '.flv'
-
-
- def file_exists(self):
- """ returns True or False
- """
- if not self.html:
- self.download_html()
- if re.search(r"(.*invalid video_id.*)", self.html) is not None:
- return False
- else:
- return True
diff --git a/pyload/plugins/hoster/YourfilesTo.py b/pyload/plugins/hoster/YourfilesTo.py
deleted file mode 100644
index 0a1e98cb5..000000000
--- a/pyload/plugins/hoster/YourfilesTo.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-
-from pyload.plugins.Hoster import Hoster
-
-
-class YourfilesTo(Hoster):
- __name = "YourfilesTo"
- __type = "hoster"
- __version = "0.21"
-
- __pattern = r'(http://)?(?:www\.)?yourfiles\.(to|biz)/\?d=\w+'
-
- __description = """Youfiles.to hoster plugin"""
- __license = "GPLv3"
- __authors = [("jeix", "jeix@hasnomail.de"),
- ("skydancer", "skydancer@hasnomail.de")]
-
-
- def process(self, pyfile):
- self.pyfile = pyfile
- self.prepare()
- self.download(self.get_file_url())
-
-
- def prepare(self):
- if not self.file_exists():
- self.offline()
-
- self.pyfile.name = self.get_file_name()
-
- wait_time = self.get_waiting_time()
- self.setWait(wait_time)
- self.wait()
-
-
- def get_waiting_time(self):
- if not self.html:
- self.download_html()
-
- #var zzipitime = 15;
- m = re.search(r'var zzipitime = (\d+);', self.html)
- if m:
- sec = int(m.group(1))
- else:
- sec = 0
-
- return sec
-
-
- def download_html(self):
- url = self.pyfile.url
- self.html = self.load(url)
-
-
- def get_file_url(self):
- """ returns the absolute downloadable filepath
- """
- url = re.search(r"var bla = '(.*?)';", self.html)
- if url:
- url = url.group(1)
- url = unquote(url.replace("http://http:/http://", "http://").replace("dumdidum", ""))
- return url
- else:
- self.error(_("Absolute filepath not found"))
-
-
- def get_file_name(self):
- if not self.html:
- self.download_html()
-
- return re.search("<title>(.*)</title>", self.html).group(1)
-
-
- def file_exists(self):
- """ returns True or False
- """
- if not self.html:
- self.download_html()
-
- if re.search(r"HTTP Status 404", self.html) is not None:
- return False
- else:
- return True
diff --git a/pyload/plugins/hoster/YoutubeCom.py b/pyload/plugins/hoster/YoutubeCom.py
deleted file mode 100644
index 612c37eb4..000000000
--- a/pyload/plugins/hoster/YoutubeCom.py
+++ /dev/null
@@ -1,185 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import os
-import re
-import subprocess
-
-from urllib import unquote
-
-from pyload.plugins.Hoster import Hoster
-from pyload.plugins.internal.SimpleHoster import replace_patterns
-from pyload.utils import html_unescape
-
-
-def which(program):
- """Works exactly like the unix command which
-
- Courtesy of http://stackoverflow.com/a/377028/675646"""
-
- def is_exe(fpath):
- return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
-
- fpath, fname = os.path.split(program)
-
- if fpath:
- if is_exe(program):
- return program
- else:
- for path in os.environ['PATH'].split(os.pathsep):
- path = path.strip('"')
- exe_file = os.path.join(path, program)
- if is_exe(exe_file):
- return exe_file
-
- return None
-
-
-class YoutubeCom(Hoster):
- __name = "YoutubeCom"
- __type = "hoster"
- __version = "0.40"
-
- __pattern = r'https?://(?:[^/]*\.)?(?:youtube\.com|youtu\.be)/watch.*?[?&]v=.*'
- __config = [("quality", "sd;hd;fullhd;240p;360p;480p;720p;1080p;3072p", "Quality Setting", "hd"),
- ("fmt", "int", "FMT/ITAG Number (5-102, 0 for auto)", 0),
- (".mp4", "bool", "Allow .mp4", True),
- (".flv", "bool", "Allow .flv", True),
- (".webm", "bool", "Allow .webm", False),
- (".3gp", "bool", "Allow .3gp", False),
- ("3d", "bool", "Prefer 3D", False)]
-
- __description = """Youtube.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("spoob", "spoob@pyload.org"),
- ("zoidberg", "zoidberg@mujmail.cz")]
-
-
- URL_REPLACEMENTS = [(r'youtu\.be/', 'youtube.com/')]
-
- # Invalid characters that must be removed from the file name
- invalidChars = u'\u2605:?><"|\\'
-
- # name, width, height, quality ranking, 3D
- formats = {5 : (".flv" , 400 , 240 , 1 , False),
- 6 : (".flv" , 640 , 400 , 4 , False),
- 17 : (".3gp" , 176 , 144 , 0 , False),
- 18 : (".mp4" , 480 , 360 , 2 , False),
- 22 : (".mp4" , 1280, 720 , 8 , False),
- 43 : (".webm", 640 , 360 , 3 , False),
- 34 : (".flv" , 640 , 360 , 4 , False),
- 35 : (".flv" , 854 , 480 , 6 , False),
- 36 : (".3gp" , 400 , 240 , 1 , False),
- 37 : (".mp4" , 1920, 1080, 9 , False),
- 38 : (".mp4" , 4096, 3072, 10, False),
- 44 : (".webm", 854 , 480 , 5 , False),
- 45 : (".webm", 1280, 720 , 7 , False),
- 46 : (".webm", 1920, 1080, 9 , False),
- 82 : (".mp4" , 640 , 360 , 3 , True ),
- 83 : (".mp4" , 400 , 240 , 1 , True ),
- 84 : (".mp4" , 1280, 720 , 8 , True ),
- 85 : (".mp4" , 1920, 1080, 9 , True ),
- 100: (".webm", 640 , 360 , 3 , True ),
- 101: (".webm", 640 , 360 , 4 , True ),
- 102: (".webm", 1280, 720 , 8 , True )}
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
-
-
- def process(self, pyfile):
- pyfile.url = replace_patterns(pyfile.url, self.URL_REPLACEMENTS)
- html = self.load(pyfile.url, decode=True)
-
- if re.search(r'<div id="player-unavailable" class="\s*player-width player-height\s*">', html):
- self.offline()
-
- if "We have been receiving a large volume of requests from your network." in html:
- self.tempOffline()
-
- #get config
- use3d = self.getConfig("3d")
- if use3d:
- quality = {"sd": 82, "hd": 84, "fullhd": 85, "240p": 83, "360p": 82,
- "480p": 82, "720p": 84, "1080p": 85, "3072p": 85}
- else:
- quality = {"sd": 18, "hd": 22, "fullhd": 37, "240p": 5, "360p": 18,
- "480p": 35, "720p": 22, "1080p": 37, "3072p": 38}
- desired_fmt = self.getConfig("fmt")
- if desired_fmt and desired_fmt not in self.formats:
- self.logWarning(_("FMT %d unknown, using default") % desired_fmt)
- desired_fmt = 0
- if not desired_fmt:
- desired_fmt = quality.get(self.getConfig("quality"), 18)
-
- #parse available streams
- streams = re.search(r'"url_encoded_fmt_stream_map": "(.*?)",', html).group(1)
- streams = [x.split('\u0026') for x in streams.split(',')]
- streams = [dict((y.split('=', 1)) for y in x) for x in streams]
- streams = [(int(x['itag']), unquote(x['url'])) for x in streams]
- #self.logDebug("Found links: %s" % streams)
- self.logDebug("AVAILABLE STREAMS: %s" % [x[0] for x in streams])
-
- #build dictionary of supported itags (3D/2D)
- allowed = lambda x: self.getConfig(self.formats[x][0])
- streams = [x for x in streams if x[0] in self.formats and allowed(x[0])]
- if not streams:
- self.fail(_("No available stream meets your preferences"))
- fmt_dict = dict([x for x in streams if self.formats[x[0]][4] == use3d] or streams)
-
- self.logDebug("DESIRED STREAM: ITAG:%d (%s) %sfound, %sallowed" %
- (desired_fmt, "%s %dx%d Q:%d 3D:%s" % self.formats[desired_fmt],
- "" if desired_fmt in fmt_dict else "NOT ", "" if allowed(desired_fmt) else "NOT "))
-
- #return fmt nearest to quality index
- if desired_fmt in fmt_dict and allowed(desired_fmt):
- fmt = desired_fmt
- else:
- sel = lambda x: self.formats[x][3] # select quality index
- comp = lambda x, y: abs(sel(x) - sel(y))
-
- self.logDebug("Choosing nearest fmt: %s" % [(x, allowed(x), comp(x, desired_fmt)) for x in fmt_dict.keys()])
- fmt = reduce(lambda x, y: x if comp(x, desired_fmt) <= comp(y, desired_fmt) and
- sel(x) > sel(y) else y, fmt_dict.keys())
-
- self.logDebug("Chosen fmt: %s" % fmt)
- url = fmt_dict[fmt]
- self.logDebug("URL: %s" % url)
-
- #set file name
- file_suffix = self.formats[fmt][0] if fmt in self.formats else ".flv"
- file_name_pattern = '<meta name="title" content="(.+?)">'
- name = re.search(file_name_pattern, html).group(1).replace("/", "")
-
- # Cleaning invalid characters from the file name
- name = name.encode('ascii', 'replace')
- for c in self.invalidChars:
- name = name.replace(c, '_')
-
- pyfile.name = html_unescape(name)
-
- time = re.search(r"t=((\d+)m)?(\d+)s", pyfile.url)
- ffmpeg = which("ffmpeg")
- if ffmpeg and time:
- m, s = time.groups()[1:]
- if m is None:
- m = "0"
-
- pyfile.name += " (starting at %s:%s)" % (m, s)
- pyfile.name += file_suffix
-
- filename = self.download(url)
-
- if ffmpeg and time:
- inputfile = filename + "_"
- os.rename(filename, inputfile)
-
- subprocess.call([
- ffmpeg,
- "-ss", "00:%s:%s" % (m, s),
- "-i", inputfile,
- "-vcodec", "copy",
- "-acodec", "copy",
- filename])
- os.remove(inputfile)
diff --git a/pyload/plugins/hoster/ZDF.py b/pyload/plugins/hoster/ZDF.py
deleted file mode 100644
index d5526d42a..000000000
--- a/pyload/plugins/hoster/ZDF.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from xml.etree.ElementTree import fromstring
-
-from pyload.plugins.Hoster import Hoster
-
-
-# Based on zdfm by Roland Beermann (http://github.com/enkore/zdfm/)
-class ZDF(Hoster):
- __name = "ZDF Mediathek"
- __type = "hoster"
- __version = "0.80"
-
- __pattern = r'http://(?:www\.)?zdf\.de/ZDFmediathek/\D*(\d+)\D*'
-
- __description = """ZDF.de hoster plugin"""
- __license = "GPLv3"
- __authors = []
-
- XML_API = "http://www.zdf.de/ZDFmediathek/xmlservice/web/beitragsDetails?id=%i"
-
-
- @staticmethod
- def video_key(video):
- return (
- int(video.findtext("videoBitrate", "0")),
- any(f.text == "progressive" for f in video.iter("facet")),
- )
-
-
- @staticmethod
- def video_valid(video):
- return video.findtext("url").startswith("http") and video.findtext("url").endswith(".mp4") and \
- video.findtext("facets/facet").startswith("progressive")
-
-
- @staticmethod
- def get_id(url):
- return int(re.search(r"\D*(\d{4,})\D*", url).group(1))
-
-
- def process(self, pyfile):
- xml = fromstring(self.load(self.XML_API % self.get_id(pyfile.url)))
-
- status = xml.findtext("./status/statuscode")
- if status != "ok":
- self.fail(_("Error retrieving manifest"))
-
- video = xml.find("video")
- title = video.findtext("information/title")
-
- pyfile.name = title
-
- target_url = sorted((v for v in video.iter("formitaet") if self.video_valid(v)),
- key=self.video_key)[-1].findtext("url")
-
- self.download(target_url)
diff --git a/pyload/plugins/hoster/ZShareNet.py b/pyload/plugins/hoster/ZShareNet.py
deleted file mode 100644
index a2265bfcc..000000000
--- a/pyload/plugins/hoster/ZShareNet.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-
-
-class ZShareNet(DeadHoster):
- __name = "ZShareNet"
- __type = "hoster"
- __version = "0.21"
-
- __pattern = r'https?://(?:ww[2w]\.)?zshares?\.net/.+'
-
- __description = """ZShare.net hoster plugin"""
- __license = "GPLv3"
- __authors = [("espes", ""),
- ("Cptn Sandwich", "")]
-
-
-getInfo = create_getInfo(ZShareNet)
diff --git a/pyload/plugins/hoster/ZeveraCom.py b/pyload/plugins/hoster/ZeveraCom.py
deleted file mode 100644
index 9fcffadbc..000000000
--- a/pyload/plugins/hoster/ZeveraCom.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.Hoster import Hoster
-
-
-class ZeveraCom(Hoster):
- __name = "ZeveraCom"
- __type = "hoster"
- __version = "0.21"
-
- __pattern = r'http://(?:www\.)?zevera\.com/.*'
-
- __description = """Zevera.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = True
- self.chunkLimit = 1
-
-
- def process(self, pyfile):
- if not self.account:
- self.logError(_("Please enter your %s account or deactivate this plugin") % "zevera.com")
- self.fail(_("No zevera.com account provided"))
-
- self.logDebug("Old URL: %s" % pyfile.url)
-
- if self.account.getAPIData(self.req, cmd="checklink", olink=pyfile.url) != "Alive":
- self.fail(_("Offline or not downloadable - contact Zevera support"))
-
- header = self.account.getAPIData(self.req, just_header=True, cmd="generatedownloaddirect", olink=pyfile.url)
- if not "location" in header:
- self.fail(_("Unable to initialize download"))
-
- self.download(header['location'], disposition=True)
-
- check = self.checkDownload({"error": 'action="ErrorDownload.aspx'})
- if check == "error":
- self.fail(_("Error response received - contact Zevera support"))
diff --git a/pyload/plugins/hoster/ZippyshareCom.py b/pyload/plugins/hoster/ZippyshareCom.py
deleted file mode 100644
index b6022e448..000000000
--- a/pyload/plugins/hoster/ZippyshareCom.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from os.path import join
-from urlparse import urljoin
-
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-
-
-class ZippyshareCom(SimpleHoster):
- __name = "ZippyshareCom"
- __type = "hoster"
- __version = "0.62"
-
- __pattern = r'(?P<HOST>http://www\d{0,2}\.zippyshare\.com)/v(?:/|iew\.jsp.*key=)(?P<KEY>\d+)'
-
- __description = """Zippyshare.com hoster plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- NAME_PATTERN = r'("\d{6,}/"[ ]*\+.+?"/|<title>Zippyshare.com - )(?P<N>.+?)("|</title>)'
- SIZE_PATTERN = r'>Size:.+?">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
-
- OFFLINE_PATTERN = r'>File does not exist on this server<'
-
- COOKIES = [("zippyshare.com", "ziplocale", "en")]
-
-
- def setup(self):
- self.multiDL = True
- self.chunkLimit = -1
- self.resumeDownload = True
-
-
- def handleFree(self):
- url = self.get_link()
- self.download(url)
-
-
- def get_checksum(self):
- try:
- m = re.search(r'\+[ ]*\((\d+)[ ]*\%[ ]*(\d+)[ ]*\+[ ]*(\d+)[ ]*\%[ ]*(\d+)\)[ ]*\+', self.html)
- if m:
- a1, a2, c1, c2 = map(int, m.groups())
- else:
- a1, a2 = map(int, re.search(r'\(\'downloadB\'\).omg = (\d+)%(\d+)', self.html).groups())
- c1, c2 = map(int, re.search(r'\(\'downloadB\'\).omg\) \* \((\d+)%(\d+)', self.html).groups())
-
- b = (a1 % a2) * (c1 % c2)
- except Exception:
- self.error(_("Unable to calculate checksum"))
- else:
- return b + 18
-
-
- def get_link(self):
- checksum = self.get_checksum()
- p_url = join("d", self.info['pattern']['KEY'], str(checksum), self.pyfile.name)
- dl_link = urljoin(self.info['pattern']['HOST'], p_url)
- return dl_link
-
-
-getInfo = create_getInfo(ZippyshareCom)
diff --git a/pyload/plugins/internal/BasePlugin.py b/pyload/plugins/internal/BasePlugin.py
deleted file mode 100644
index 7a29391b6..000000000
--- a/pyload/plugins/internal/BasePlugin.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urllib import unquote
-from urlparse import urljoin, urlparse
-
-from pyload.network.HTTPRequest import BadHeader
-from pyload.plugins.internal.SimpleHoster import create_getInfo
-from pyload.plugins.Hoster import Hoster
-
-
-class BasePlugin(Hoster):
- __name = "BasePlugin"
- __type = "hoster"
- __version = "0.25"
-
- __pattern = r'^unmatchable$'
-
- __description = """Base plugin when any other didnt fit"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- @classmethod
- def getInfo(cls, url="", html=""): #@TODO: Move to hoster class in 0.4.10
- return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3 if url else 1, 'url': unquote(url) or ""}
-
-
- def setup(self):
- self.chunkLimit = -1
- self.resumeDownload = True
-
-
- def process(self, pyfile):
- """main function"""
-
- pyfile.name = self.getInfo(pyfile.url)['name']
-
- if not pyfile.url.startswith("http"):
- self.fail(_("No plugin matched"))
-
- for _i in xrange(5):
- try:
- self.downloadFile(pyfile)
-
- except BadHeader, e:
- if e.code is 404:
- self.offline()
-
- elif e.code in (401, 403):
- self.logDebug("Auth required", "Received HTTP status code: %d" % e.code)
-
- account = self.core.accountManager.getAccountPlugin('Http')
- servers = [x['login'] for x in account.getAllAccounts()]
- server = urlparse(pyfile.url).netloc
-
- if server in servers:
- self.logDebug("Logging on to %s" % server)
- self.req.addAuth(account.accounts[server]['password'])
- else:
- for pwd in self.getPassword().splitlines():
- if ":" in pwd:
- self.req.addAuth(pwd.strip())
- break
- else:
- self.fail(_("Authorization required"))
- else:
- self.fail(e)
- else:
- break
- else:
- self.fail(_("No file downloaded")) #@TODO: Move to hoster class in 0.4.10
-
- if self.checkDownload({'empty': re.compile(r"^$")}) is "empty": #@TODO: Move to hoster in 0.4.10
- self.fail(_("Empty file"))
-
-
- def downloadFile(self, pyfile):
- url = pyfile.url
-
- for i in xrange(1, 7): #@TODO: retrieve the pycurl.MAXREDIRS value set by req
- header = self.load(url, ref=True, cookies=True, just_header=True, decode=True)
-
- if 'location' not in header or not header['location']:
- if 'code' in header and header['code'] not in (200, 201, 203, 206):
- self.logDebug("Received HTTP status code: %d" % header['code'])
- self.fail(_("File not found"))
- else:
- break
-
- location = header['location']
-
- self.logDebug("Redirect #%d to: %s" % (i, location))
-
- if urlparse(location).scheme:
- url = location
- else:
- p = urlparse(url)
- base = "%s://%s" % (p.scheme, p.netloc)
- url = urljoin(base, location)
- else:
- self.fail(_("Too many redirects"))
-
- self.download(unquote(url), disposition=True)
diff --git a/pyload/plugins/internal/DeadCrypter.py b/pyload/plugins/internal/DeadCrypter.py
deleted file mode 100644
index b1e963290..000000000
--- a/pyload/plugins/internal/DeadCrypter.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from urllib import unquote
-from urlparse import urlparse
-
-from pyload.plugins.Crypter import Crypter as _Crypter
-from pyload.plugins.internal.SimpleCrypter import create_getInfo
-
-
-class DeadCrypter(_Crypter):
- __name = "DeadCrypter"
- __type = "crypter"
- __version = "0.04"
-
- __pattern = r'^unmatchable$'
-
- __description = """Crypter is no longer available"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it")]
-
-
- @classmethod
- def getInfo(cls, url="", html=""):
- return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 1, 'url': url}
-
-
- def setup(self):
- self.pyfile.error = "Crypter is no longer available"
- self.offline() #@TODO: self.offline("Crypter is no longer available")
-
-
-getInfo = create_getInfo(DeadCrypter)
diff --git a/pyload/plugins/internal/DeadHoster.py b/pyload/plugins/internal/DeadHoster.py
deleted file mode 100644
index b7f930fc5..000000000
--- a/pyload/plugins/internal/DeadHoster.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from urllib import unquote
-from urlparse import urlparse
-
-from pyload.plugins.Hoster import Hoster as _Hoster
-from pyload.plugins.internal.SimpleHoster import create_getInfo
-
-
-class DeadHoster(_Hoster):
- __name = "DeadHoster"
- __type = "hoster"
- __version = "0.14"
-
- __pattern = r'^unmatchable$'
-
- __description = """Hoster is no longer available"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz")]
-
-
- @classmethod
- def getInfo(cls, url="", html=""):
- return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 1, 'url': url}
-
-
- def setup(self):
- self.pyfile.error = "Hoster is no longer available"
- self.offline() #@TODO: self.offline("Hoster is no longer available")
-
-
-getInfo = create_getInfo(DeadHoster)
diff --git a/pyload/plugins/internal/MultiHoster.py b/pyload/plugins/internal/MultiHoster.py
deleted file mode 100644
index 1b8ecfb03..000000000
--- a/pyload/plugins/internal/MultiHoster.py
+++ /dev/null
@@ -1,202 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.plugins.Addon import Addon
-from pyload.utils import remove_chars
-
-
-class MultiHoster(Addon):
- __name = "MultiHoster"
- __type = "addon"
- __version = "0.20"
-
- __description = """Base multi-hoster plugin"""
- __license = "GPLv3"
- __authors = [("pyLoad Team", "admin@pyload.org")]
-
-
- HOSTER_REPLACEMENTS = [("1fichier.com", "onefichier.com"), ("2shared.com", "twoshared.com"),
- ("4shared.com", "fourshared.com"), ("cloudnator.com", "shragle.com"),
- ("easy-share.com", "crocko.com"), ("freakshare.net", "freakshare.com"),
- ("hellshare.com", "hellshare.cz"), ("ifile.it", "filecloud.io"),
- ("putlocker.com", "firedrive.com"), ("share-rapid.cz", "multishare.cz"),
- ("sharerapid.cz", "multishare.cz"), ("ul.to", "uploaded.to"),
- ("uploaded.net", "uploaded.to")]
- HOSTER_EXCLUDED = []
-
-
- def setup(self):
- self.interval = 12 * 60 * 60 #: reload hosters every 12h
- self.hosters = []
- self.supported = []
- self.new_supported = []
-
-
- def getConfig(self, option, default=''):
- """getConfig with default value - subclass may not implements all config options"""
- try:
- # Fixed loop due to getConf deprecation in 0.4.10
- return super(MultiHoster, self).getConfig(option)
- except KeyError:
- return default
-
-
- def getHosterCached(self):
- if not self.hosters:
- try:
- hosterSet = self.toHosterSet(self.getHoster()) - set(self.HOSTER_EXCLUDED)
- except Exception, e:
- self.logError(e)
- return []
-
- try:
- configMode = self.getConfig('hosterListMode', 'all')
- if configMode in ("listed", "unlisted"):
- configSet = self.toHosterSet(self.getConfig('hosterList', '').replace('|', ',').replace(';', ',').split(','))
-
- if configMode == "listed":
- hosterSet &= configSet
- else:
- hosterSet -= configSet
-
- except Exception, e:
- self.logError(e)
-
- self.hosters = list(hosterSet)
-
- return self.hosters
-
-
- def toHosterSet(self, hosters):
- hosters = set((str(x).strip().lower() for x in hosters))
-
- for rep in self.HOSTER_REPLACEMENTS:
- if rep[0] in hosters:
- hosters.remove(rep[0])
- hosters.add(rep[1])
-
- hosters.discard('')
- return hosters
-
-
- def getHoster(self):
- """Load list of supported hoster
-
- :return: List of domain names
- """
- raise NotImplementedError
-
-
- def activate(self):
- if self.cb:
- self.core.scheduler.removeJob(self.cb)
-
- self.setConfig("activated", True) #: config not in sync after plugin reload
-
- cfg_interval = self.getConfig("interval", None) #: reload interval in hours
- if cfg_interval is not None:
- self.interval = cfg_interval * 60 * 60
-
- if self.interval:
- self._periodical()
- else:
- self.periodical()
-
-
- def periodical(self):
- """reload hoster list periodically"""
- self.logInfo(_("Reloading supported hoster list"))
-
- old_supported = self.supported
- self.supported = []
- self.new_supported = []
- self.hosters = []
-
- self.overridePlugins()
-
- old_supported = [hoster for hoster in old_supported if hoster not in self.supported]
- if old_supported:
- self.logDebug("UNLOAD", ", ".join(old_supported))
- for hoster in old_supported:
- self.unloadHoster(hoster)
-
-
- def overridePlugins(self):
- pluginMap = dict((name.lower(), name) for name in self.core.pluginManager.hosterPlugins.keys())
- accountList = [name.lower() for name, data in self.core.accountManager.accounts.iteritems() if data]
- excludedList = []
-
- for hoster in self.getHosterCached():
- name = remove_chars(hoster.lower(), "-.")
-
- if name in accountList:
- excludedList.append(hoster)
- else:
- if name in pluginMap:
- self.supported.append(pluginMap[name])
- else:
- self.new_supported.append(hoster)
-
- if not self.supported and not self.new_supported:
- self.logError(_("No Hoster loaded"))
- return
-
- module = self.core.pluginManager.getPlugin(self.__type, self.__name)
- klass = getattr(module, self.__name)
-
- # inject plugin plugin
- self.logDebug("Overwritten Hosters", ", ".join(sorted(self.supported)))
- for hoster in self.supported:
- dict = self.core.pluginManager.hosterPlugins[hoster]
- dict['new_module'] = module
- dict['new_name'] = self.__name
-
- if excludedList:
- self.logInfo(_("The following hosters were not overwritten - account exists"), ", ".join(sorted(excludedList)))
-
- if self.new_supported:
- self.logDebug("New Hosters", ", ".join(sorted(self.new_supported)))
-
- # create new regexp
- regexp = r'.*(%s).*' % "|".join([x.replace(".", "\.") for x in self.new_supported])
- if hasattr(klass, "__pattern") and isinstance(klass.__pattern, basestring) and '://' in klass.__pattern:
- regexp = r'%s|%s' % (klass.__pattern, regexp)
-
- self.logDebug("Regexp", regexp)
-
- dict = self.core.pluginManager.hosterPlugins[self.__name]
- dict['pattern'] = regexp
- dict['re'] = re.compile(regexp)
-
-
- def unloadHoster(self, hoster):
- dict = self.core.pluginManager.hosterPlugins[hoster]
- if "module" in dict:
- del dict['module']
-
- if "new_module" in dict:
- del dict['new_module']
- del dict['new_name']
-
-
- def deactivate(self):
- """Remove override for all hosters. Scheduler job is removed by AddonManager"""
- for hoster in self.supported:
- self.unloadHoster(hoster)
-
- # reset pattern
- klass = getattr(self.core.pluginManager.getPlugin(self.__type, self.__name), self.__name)
- dict = self.core.pluginManager.hosterPlugins[self.__name]
- dict['pattern'] = getattr(klass, "__pattern", r'^unmatchable$')
- dict['re'] = re.compile(dict['pattern'])
-
-
- def downloadFailed(self, pyfile):
- """remove plugin override if download fails but not if file is offline/temp.offline"""
- if pyfile.hasStatus("failed") and self.getConfig("unloadFailing", True):
- hdict = self.core.pluginManager.hosterPlugins[pyfile.pluginname]
- if "new_name" in hdict and hdict['new_name'] == self.__name:
- self.logDebug("Unload MultiHoster", pyfile.pluginname, hdict)
- self.unloadHoster(pyfile.pluginname)
- pyfile.setStatus("queued")
diff --git a/pyload/plugins/internal/SimpleCrypter.py b/pyload/plugins/internal/SimpleCrypter.py
deleted file mode 100644
index e9de95073..000000000
--- a/pyload/plugins/internal/SimpleCrypter.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from urlparse import urlparse
-
-from pyload.plugins.Crypter import Crypter
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, replace_patterns, set_cookies
-from pyload.utils import fixup
-
-
-class SimpleCrypter(Crypter, SimpleHoster):
- __name = "SimpleCrypter"
- __type = "crypter"
- __version = "0.32"
-
- __pattern = r'^unmatchable$'
- __config = [("use_subfolder", "bool", "Save package to subfolder", True), #: Overrides core.config['general']['folder_per_package']
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
-
- __description = """Simple decrypter plugin"""
- __license = "GPLv3"
- __authors = [("stickell", "l.stickell@yahoo.it"),
- ("zoidberg", "zoidberg@mujmail.cz"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- """
- Following patterns should be defined by each crypter:
-
- LINK_PATTERN: group(1) must be a download link or a regex to catch more links
- example: LINK_PATTERN = r'<div class="link"><a href="(.+?)"'
-
- NAME_PATTERN: (optional) folder name or webpage title
- example: NAME_PATTERN = r'<title>Files of: (?P<N>[^<]+) folder</title>'
-
- OFFLINE_PATTERN: (optional) Checks if the file is yet available online
- example: OFFLINE_PATTERN = r'File (deleted|not found)'
-
- TEMP_OFFLINE_PATTERN: (optional) Checks if the file is temporarily offline
- example: TEMP_OFFLINE_PATTERN = r'Server maintainance'
-
-
- You can override the getLinks method if you need a more sophisticated way to extract the links.
-
-
- If the links are splitted on multiple pages you can define the PAGES_PATTERN regex:
-
- PAGES_PATTERN: (optional) group(1) should be the number of overall pages containing the links
- example: PAGES_PATTERN = r'Pages: (\d+)'
-
- and its loadPage method:
-
-
- def loadPage(self, page_n):
- return the html of the page number page_n
- """
-
- LINK_PATTERN = None
-
- NAME_REPLACEMENTS = [("&#?\w+;", fixup)]
- URL_REPLACEMENTS = []
-
- TEXT_ENCODING = False #: Set to True or encoding name if encoding in http header is not correct
- COOKIES = True #: or False or list of tuples [(domain, name, value)]
-
- LOGIN_ACCOUNT = False
- LOGIN_PREMIUM = False
-
-
- def prepare(self):
- self.info = {}
- self.links = []
-
- if self.LOGIN_ACCOUNT and not self.account:
- self.fail(_("Required account not found"))
-
- if self.LOGIN_PREMIUM and not self.premium:
- self.fail(_("Required premium account not found"))
-
- self.req.setOption("timeout", 120)
-
- if isinstance(self.COOKIES, list):
- set_cookies(self.req.cj, self.COOKIES)
-
- self.pyfile.url = replace_patterns(self.pyfile.url, self.URL_REPLACEMENTS)
-
-
- def decrypt(self, pyfile):
- self.prepare()
-
- self.preload()
-
- if self.html is None:
- self.fail(_("No html retrieved"))
-
- self.checkInfo()
-
- self.links = self.getLinks()
-
- if hasattr(self, 'PAGES_PATTERN') and hasattr(self, 'loadPage'):
- self.handleMultiPages()
-
- self.logDebug("Package has %d links" % len(self.links))
-
- if self.links:
- self.packages = [(self.info['name'], self.links, self.info['folder'])]
-
-
- def checkStatus(self):
- status = self.info['status']
-
- if status is 1:
- self.offline()
-
- elif status is 6:
- self.tempOffline()
-
-
- def checkNameSize(self):
- name = self.info['name']
- url = self.info['url']
-
- if name and name != url:
- self.pyfile.name = name
- else:
- self.pyfile.name = name = self.info['name'] = urlparse(name).path.split('/')[-1]
-
- folder = self.info['folder'] = name
-
- self.logDebug("File name: %s" % name,
- "File folder: %s" % folder)
-
-
- def getLinks(self):
- """
- Returns the links extracted from self.html
- You should override this only if it's impossible to extract links using only the LINK_PATTERN.
- """
- return re.findall(self.LINK_PATTERN, self.html)
-
-
- def handleMultiPages(self):
- try:
- m = re.search(self.PAGES_PATTERN, self.html)
- pages = int(m.group(1))
- except Exception:
- pages = 1
-
- for p in xrange(2, pages + 1):
- self.html = self.loadPage(p)
- self.links += self.getLinks()
diff --git a/pyload/plugins/internal/SimpleHoster.py b/pyload/plugins/internal/SimpleHoster.py
deleted file mode 100644
index 0c1c2ae5a..000000000
--- a/pyload/plugins/internal/SimpleHoster.py
+++ /dev/null
@@ -1,530 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import time
-from urllib import unquote
-from urlparse import urljoin, urlparse
-
-from pyload.datatype.PyFile import statusMap as _statusMap
-from pyload.network.CookieJar import CookieJar
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Hoster import Hoster
-from pyload.utils import fixup, formatSize, parseFileSize
-
-
-#@TODO: Adapt and move to PyFile in 0.4.10
-statusMap = dict((v, k) for k, v in _statusMap.iteritems())
-
-
-def replace_patterns(string, ruleslist):
- for r in ruleslist:
- rf, rt = r
- string = re.sub(rf, rt, string)
- return string
-
-
-def set_cookies(cj, cookies):
- for cookie in cookies:
- if isinstance(cookie, tuple) and len(cookie) == 3:
- domain, name, value = cookie
- cj.setCookie(domain, name, value)
-
-
-def parseHtmlTagAttrValue(attr_name, tag):
- m = re.search(r"%s\s*=\s*([\"']?)((?<=\")[^\"]+|(?<=')[^']+|[^>\s\"'][^>\s]*)\1" % attr_name, tag, re.I)
- return m.group(2) if m else None
-
-
-def parseHtmlForm(attr_str, html, input_names={}):
- for form in re.finditer(r"(?P<TAG><form[^>]*%s[^>]*>)(?P<CONTENT>.*?)</?(form|body|html)[^>]*>" % attr_str,
- html, re.S | re.I):
- inputs = {}
- action = parseHtmlTagAttrValue("action", form.group('TAG'))
-
- for inputtag in re.finditer(r'(<(input|textarea)[^>]*>)([^<]*(?=</\2)|)', form.group('CONTENT'), re.S | re.I):
- name = parseHtmlTagAttrValue("name", inputtag.group(1))
- if name:
- value = parseHtmlTagAttrValue("value", inputtag.group(1))
- if not value:
- inputs[name] = inputtag.group(3) or ''
- else:
- inputs[name] = value
-
- if input_names:
- # check input attributes
- for key, val in input_names.iteritems():
- if key in inputs:
- if isinstance(val, basestring) and inputs[key] == val:
- continue
- elif isinstance(val, tuple) and inputs[key] in val:
- continue
- elif hasattr(val, "search") and re.match(val, inputs[key]):
- continue
- break #: attibute value does not match
- else:
- break #: attibute name does not match
- else:
- return action, inputs #: passed attribute check
- else:
- # no attribute check
- return action, inputs
-
- return {}, None #: no matching form found
-
-
-#: Deprecated
-def parseFileInfo(plugin, url="", html=""):
- info = plugin.getInfo(url, html)
- return info['name'], info['size'], info['status'], info['url']
-
-
-#@TODO: Remove in 0.4.10
-#@NOTE: Every plugin must have own parseInfos classmethod to work with 0.4.10
-def create_getInfo(plugin):
- return lambda urls: [(info['name'], info['size'], info['status'], info['url']) for info in plugin.parseInfos(urls)]
-
-
-def timestamp():
- return int(time() * 1000)
-
-
-#@TODO: Move to hoster class in 0.4.10
-def _isDirectLink(self, url, resumable=True):
- header = self.load(url, ref=True, just_header=True, decode=True)
-
- if not 'location' in header or not header['location']:
- return ""
-
- location = header['location']
-
- resumable = False #@NOTE: Testing...
-
- if resumable: #: sometimes http code may be wrong...
- if 'location' in self.load(location, ref=True, cookies=True, just_header=True, decode=True):
- return ""
- else:
- if not 'code' in header or header['code'] != 302:
- return ""
-
- if urlparse(location).scheme:
- link = location
- else:
- p = urlparse(url)
- base = "%s://%s" % (p.scheme, p.netloc)
- link = urljoin(base, location)
-
- return link
-
-
-class SimpleHoster(Hoster):
- __name = "SimpleHoster"
- __type = "hoster"
- __version = "0.72"
-
- __pattern = r'^unmatchable$'
-
- __description = """Simple hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- """
- Info patterns should be defined by each hoster:
-
- INFO_PATTERN: (optional) Name and Size of the file
- example: INFO_PATTERN = r'(?P<N>file_name) (?P<S>file_size) (?P<U>size_unit)'
- or
- NAME_PATTERN: (optional) Name that will be set for the file
- example: NAME_PATTERN = r'(?P<N>file_name)'
- SIZE_PATTERN: (optional) Size that will be checked for the file
- example: SIZE_PATTERN = r'(?P<S>file_size) (?P<U>size_unit)'
-
- HASHSUM_PATTERN: (optional) Hash code and type of the file
- example: HASHSUM_PATTERN = r'(?P<H>hash_code) (?P<T>MD5)'
-
- OFFLINE_PATTERN: (optional) Check if the file is yet available online
- example: OFFLINE_PATTERN = r'File (deleted|not found)'
-
- TEMP_OFFLINE_PATTERN: (optional) Check if the file is temporarily offline
- example: TEMP_OFFLINE_PATTERN = r'Server (maintenance|maintainance)'
-
-
- Error handling patterns are all optional:
-
- WAIT_PATTERN: (optional) Detect waiting time
- example: WAIT_PATTERN = r''
-
- PREMIUM_ONLY_PATTERN: (optional) Check if the file can be downloaded only with a premium account
- example: PREMIUM_ONLY_PATTERN = r'Premium account required'
-
- ERROR_PATTERN: (optional) Detect any error preventing download
- example: ERROR_PATTERN = r''
-
-
- Instead overriding handleFree and handlePremium methods you can define the following patterns for direct download:
-
- LINK_FREE_PATTERN: (optional) group(1) should be the direct link for free download
- example: LINK_FREE_PATTERN = r'<div class="link"><a href="(.+?)"'
-
- LINK_PREMIUM_PATTERN: (optional) group(1) should be the direct link for premium download
- example: LINK_PREMIUM_PATTERN = r'<div class="link"><a href="(.+?)"'
- """
-
- NAME_REPLACEMENTS = [("&#?\w+;", fixup)]
- SIZE_REPLACEMENTS = []
- URL_REPLACEMENTS = []
-
- TEXT_ENCODING = False #: Set to True or encoding name if encoding value in http header is not correct
- COOKIES = True #: or False or list of tuples [(domain, name, value)]
- FORCE_CHECK_TRAFFIC = False #: Set to True to force checking traffic left for premium account
- CHECK_DIRECT_LINK = None #: Set to True to check for direct link, set to None to do it only if self.account is True
- MULTI_HOSTER = False #: Set to True to leech other hoster link (according its multihoster hook if available)
-
-
- @classmethod
- def parseInfos(cls, urls):
- for url in urls:
- url = replace_patterns(url, cls.URL_REPLACEMENTS)
- yield cls.getInfo(url)
-
-
- @classmethod
- def getInfo(cls, url="", html=""):
- info = {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3, 'url': url}
-
- if not html:
- try:
- if not url:
- info['error'] = "missing url"
- info['status'] = 1
- raise
-
- try:
- html = getURL(url, cookies=cls.COOKIES, decode=not cls.TEXT_ENCODING)
-
- if isinstance(cls.TEXT_ENCODING, basestring):
- html = unicode(html, cls.TEXT_ENCODING)
-
- except BadHeader, e:
- info['error'] = "%d: %s" % (e.code, e.content)
-
- if e.code is 404:
- info['status'] = 1
- raise
-
- if e.code is 503:
- info['status'] = 6
- raise
- except Exception:
- return info
-
- online = False
-
- if hasattr(cls, "OFFLINE_PATTERN") and re.search(cls.OFFLINE_PATTERN, html):
- info['status'] = 1
-
- elif hasattr(cls, "TEMP_OFFLINE_PATTERN") and re.search(cls.TEMP_OFFLINE_PATTERN, html):
- info['status'] = 6
-
- else:
- try:
- info['pattern'] = re.match(cls.__pattern, url).groupdict() #: pattern groups will be saved here, please save api stuff to info['api']
- except Exception:
- pass
-
- for pattern in ("INFO_PATTERN", "NAME_PATTERN", "SIZE_PATTERN", "HASHSUM_PATTERN"):
- try:
- attr = getattr(cls, pattern)
- dict = re.search(attr, html).groupdict()
-
- if all(True for k in dict if k not in info['pattern']):
- info['pattern'].update(dict)
-
- except AttributeError:
- continue
-
- else:
- online = True
-
- if online:
- info['status'] = 2
-
- if 'N' in info['pattern']:
- info['name'] = replace_patterns(unquote(info['pattern']['N'].strip()), cls.NAME_REPLACEMENTS)
-
- if 'S' in info['pattern']:
- size = replace_patterns(info['pattern']['S'] + info['pattern']['U'] if 'U' in info else info['pattern']['S'],
- cls.SIZE_REPLACEMENTS)
- info['size'] = parseFileSize(size)
-
- elif isinstance(info['size'], basestring):
- unit = info['units'] if 'units' in info else None
- info['size'] = parseFileSize(info['size'], unit)
-
- if 'H' in info['pattern']:
- hashtype = info['pattern']['T'] if 'T' in info['pattern'] else "hash"
- info[hashtype] = info['pattern']['H']
-
- return info
-
-
- def setup(self):
- self.resumeDownload = self.multiDL = self.premium
-
-
- def prepare(self):
- self.info = {}
- self.link = "" #@TODO: Move to hoster class in 0.4.10
- self.directDL = False #@TODO: Move to hoster class in 0.4.10
- self.multihost = False #@TODO: Move to hoster class in 0.4.10
-
- self.req.setOption("timeout", 120)
-
- if isinstance(self.COOKIES, list):
- set_cookies(self.req.cj, self.COOKIES)
-
- if (self.MULTI_HOSTER
- and (self.__pattern != self.core.pluginManager.hosterPlugins[self.__name]['pattern']
- or re.match(self.__pattern, self.pyfile.url) is None)):
-
- self.logInfo("Multi hoster detected")
-
- if self.account:
- self.multihost = True
- return
- else:
- self.fail(_("Only registered or premium users can use url leech feature"))
-
- if self.CHECK_DIRECT_LINK is None:
- self.directDL = bool(self.account)
-
- self.pyfile.url = replace_patterns(self.pyfile.url, self.URL_REPLACEMENTS)
-
-
- def preload(self):
- self.html = self.load(self.pyfile.url, cookies=bool(self.COOKIES), decode=not self.TEXT_ENCODING)
-
- if isinstance(self.TEXT_ENCODING, basestring):
- self.html = unicode(self.html, self.TEXT_ENCODING)
-
-
- def process(self, pyfile):
- self.prepare()
-
- if self.multihost:
- self.logDebug("Looking for leeched download link...")
- self.handleMulti()
-
- elif self.directDL:
- self.logDebug("Looking for direct download link...")
- self.handleDirect()
-
- if not self.link:
- self.preload()
-
- if self.html is None:
- self.fail(_("No html retrieved"))
-
- self.checkErrors()
-
- premium_only = 'error' in self.info and self.info['error'] == "premium-only"
-
- self._updateInfo(self.getInfo(pyfile.url, self.html))
-
- self.checkNameSize()
-
- #: Usually premium only pages doesn't show any file information
- if not premium_only:
- self.checkStatus()
-
- if self.premium and (not self.FORCE_CHECK_TRAFFIC or self.checkTrafficLeft()):
- self.logDebug("Handled as premium download")
- self.handlePremium()
-
- elif premium_only:
- self.fail(_("Link require a premium account to be handled"))
-
- else:
- self.logDebug("Handled as free download")
- self.handleFree()
-
- self.downloadLink(self.link)
- self.checkFile()
-
-
- def downloadLink(self, link):
- if not link:
- return
-
- self.download(link, disposition=True)
-
-
- def checkFile(self):
- if self.checkDownload({'empty': re.compile(r"^$")}) is "empty": #@TODO: Move to hoster in 0.4.10
- self.fail(_("Empty file"))
-
-
- def checkErrors(self):
- if hasattr(self, 'ERROR_PATTERN'):
- m = re.search(self.ERROR_PATTERN, self.html)
- if m:
- errmsg = self.info['error'] = m.group(1)
- self.error(errmsg)
-
- if hasattr(self, 'PREMIUM_ONLY_PATTERN'):
- m = re.search(self.PREMIUM_ONLY_PATTERN, self.html)
- if m:
- self.info['error'] = "premium-only"
- return
-
- if hasattr(self, 'WAIT_PATTERN'):
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- wait_time = sum([int(v) * {"hr": 3600, "hour": 3600, "min": 60, "sec": 1}[u.lower()] for v, u in
- re.findall(r'(\d+)\s*(hr|hour|min|sec)', m, re.I)])
- self.wait(wait_time, False)
- return
-
- self.info.pop('error', None)
-
-
- def checkStatus(self):
- status = self.info['status']
-
- if status is 1:
- self.offline()
-
- elif status is 6:
- self.tempOffline()
-
- elif status is not 2:
- self.logInfo(_("File status: %s") % statusMap[status],
- _("File info: %s") % self.info)
- self.error(_("No file info retrieved"))
-
-
- def checkNameSize(self):
- name = self.info['name']
- size = self.info['size']
- url = self.info['url']
-
- if name and name != url:
- self.pyfile.name = name
- else:
- self.pyfile.name = name = self.info['name'] = urlparse(name).path.split('/')[-1]
-
- if size > 0:
- self.pyfile.size = size
- else:
- size = "Unknown"
-
- self.logDebug("File name: %s" % name,
- "File size: %s" % size)
-
-
- def checkInfo(self):
- self.checkErrors()
-
- self._updateInfo(self.getInfo(self.pyfile.url, self.html or ""))
-
- self.checkNameSize()
- self.checkStatus()
-
-
- #: Deprecated
- def getFileInfo(self):
- self.info = {}
- self.checkInfo()
- return self.info
-
-
- def _updateInfo(self, info):
- self.logDebug(_("File info (before update): %s") % self.info)
- self.info.update(info)
- self.logDebug(_("File info (after update): %s") % self.info)
-
-
- def handleDirect(self):
- link = _isDirectLink(self, self.pyfile.url, self.resumeDownload)
-
- if link:
- self.logInfo(_("Direct download link detected"))
-
- self.link = link
-
- self._updateInfo(self.getInfo(self.pyfile.url))
- self.checkNameSize()
- else:
- self.logDebug(_("Direct download link not found"))
-
-
- def handleMulti(self): #: Multi-hoster handler
- pass
-
-
- def handleFree(self):
- if not hasattr(self, 'LINK_FREE_PATTERN'):
- self.fail(_("Free download not implemented"))
-
- try:
- m = re.search(self.LINK_FREE_PATTERN, self.html)
- if m is None:
- self.error(_("Free download link not found"))
-
- self.link = m.group(1)
-
- except Exception, e:
- self.fail(e)
-
-
- def handlePremium(self):
- if not hasattr(self, 'LINK_PREMIUM_PATTERN'):
- self.fail(_("Premium download not implemented"))
-
- try:
- m = re.search(self.LINK_PREMIUM_PATTERN, self.html)
- if m is None:
- self.error(_("Premium download link not found"))
-
- self.link = m.group(1)
-
- except Exception, e:
- self.fail(e)
-
-
- def longWait(self, wait_time=None, max_tries=3):
- if wait_time and isinstance(wait_time, (int, long, float)):
- time_str = "%dh %dm" % divmod(wait_time / 60, 60)
- else:
- wait_time = 900
- time_str = _("(unknown time)")
- max_tries = 100
-
- self.logInfo(_("Download limit reached, reconnect or wait %s") % time_str)
-
- self.setWait(wait_time, True)
- self.wait()
- self.retry(max_tries=max_tries, reason=_("Download limit reached"))
-
-
- def parseHtmlForm(self, attr_str="", input_names={}):
- return parseHtmlForm(attr_str, self.html, input_names)
-
-
- def checkTrafficLeft(self):
- traffic = self.account.getAccountInfo(self.user, True)['trafficleft']
-
- if traffic is None:
- return False
- elif traffic == -1:
- return True
- else:
- self.logInfo(_("Filesize: %s, Traffic left for user %s: %s") % (formatSize(size), self.user, formatSize(traffic)))
- return self.pyfile.size <= traffic
-
-
- def error(self, reason="", type="parse"):
- return super(SimpleHoster, self).error(self, reason, type)
diff --git a/pyload/plugins/internal/UnRar.py b/pyload/plugins/internal/UnRar.py
deleted file mode 100644
index 97785fa97..000000000
--- a/pyload/plugins/internal/UnRar.py
+++ /dev/null
@@ -1,221 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import os
-import re
-
-from glob import glob
-from os.path import basename, join
-from string import digits
-from subprocess import Popen, PIPE
-
-from pyload.plugins.internal.AbstractExtractor import AbtractExtractor, WrongPassword, ArchiveError, CRCError
-from pyload.utils import safe_join, decode
-
-
-def renice(pid, value):
- if os.name != "nt" and value:
- try:
- Popen(["renice", str(value), str(pid)], stdout=PIPE, stderr=PIPE, bufsize=-1)
- except Exception:
- print "Renice failed"
-
-
-class UnRar(AbtractExtractor):
- __name = "UnRar"
- __version = "0.19"
-
- __description = """Rar extractor plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- CMD = "unrar"
-
- # there are some more uncovered rar formats
- re_version = re.compile(r'UNRAR ([\w .]+?) freeware')
- re_splitfile = re.compile(r'(.*)\.part(\d+)\.rar$', re.I)
- re_partfiles = re.compile(r'.*\.(rar|r\d+)', re.I)
- re_filelist = re.compile(r'(.+)\s+(\d+)\s+(\d+)\s+')
- re_filelist5 = re.compile(r'(.+)\s+(\d+)\s+\d\d-\d\d-\d\d\s+\d\d:\d\d\s+(.+)')
- re_wrongpwd = re.compile(r'(Corrupt file or wrong password|password incorrect)', re.I)
-
-
- @staticmethod
- def checkDeps():
- if os.name == "nt":
- UnRar.CMD = join(pypath, "UnRAR.exe")
- p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE)
- p.communicate()
- else:
- try:
- p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE)
- p.communicate()
- except OSError:
-
- # fallback to rar
- UnRar.CMD = "rar"
- p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE)
- p.communicate()
-
- return True
-
-
- @staticmethod
- def getTargets(files_ids):
- result = []
-
- for file, id in files_ids:
- if not file.endswith(".rar"):
- continue
-
- match = UnRar.re_splitfile.findall(file)
- if match:
- # only add first parts
- if int(match[0][1]) == 1:
- result.append((file, id))
- else:
- result.append((file, id))
-
- return result
-
-
- def init(self):
- self.passwordProtected = False
- self.headerProtected = False #: list files will not work without password
- self.smallestFile = None #: small file to test passwords
- self.password = "" #: save the correct password
-
-
- def checkArchive(self):
- p = self.call_unrar("l", "-v", self.file)
- out, err = p.communicate()
- if self.re_wrongpwd.search(err):
- self.passwordProtected = True
- self.headerProtected = True
- return True
-
- # output only used to check if passworded files are present
- if self.re_version.search(out):
- for attr, size, name in self.re_filelist5.findall(out):
- if attr.startswith("*"):
- self.passwordProtected = True
- return True
- else:
- for name, size, packed in self.re_filelist.findall(out):
- if name.startswith("*"):
- self.passwordProtected = True
- return True
-
- self.listContent()
- if not self.files:
- raise ArchiveError("Empty Archive")
-
- return False
-
-
- def checkPassword(self, password):
- # at this point we can only verify header protected files
- if self.headerProtected:
- p = self.call_unrar("l", "-v", self.file, password=password)
- out, err = p.communicate()
- if self.re_wrongpwd.search(err):
- return False
-
- return True
-
-
- def extract(self, progress, password=None):
- command = "x" if self.fullpath else "e"
-
- p = self.call_unrar(command, self.file, self.out, password=password)
- renice(p.pid, self.renice)
-
- progress(0)
- progressstring = ""
- while True:
- c = p.stdout.read(1)
- # quit loop on eof
- if not c:
- break
- # reading a percentage sign -> set progress and restart
- if c == '%':
- progress(int(progressstring))
- progressstring = ""
- # not reading a digit -> therefore restart
- elif c not in digits:
- progressstring = ""
- # add digit to progressstring
- else:
- progressstring = progressstring + c
- progress(100)
-
- # retrieve stderr
- err = p.stderr.read()
-
- if "CRC failed" in err and not password and not self.passwordProtected:
- raise CRCError
- elif "CRC failed" in err:
- raise WrongPassword
- if err.strip(): #: raise error if anything is on stderr
- raise ArchiveError(err.strip())
- if p.returncode:
- raise ArchiveError("Process terminated")
-
- if not self.files:
- self.password = password
- self.listContent()
-
-
- def getDeleteFiles(self):
- if ".part" in basename(self.file):
- return glob(re.sub("(?<=\.part)([01]+)", "*", self.file, re.I))
- # get files which matches .r* and filter unsuited files out
- parts = glob(re.sub(r"(?<=\.r)ar$", "*", self.file, re.I))
- return filter(lambda x: self.re_partfiles.match(x), parts)
-
-
- def listContent(self):
- command = "vb" if self.fullpath else "lb"
- p = self.call_unrar(command, "-v", self.file, password=self.password)
- out, err = p.communicate()
-
- if "Cannot open" in err:
- raise ArchiveError("Cannot open file")
-
- if err.strip(): #: only log error at this point
- self.m.logError(err.strip())
-
- result = set()
-
- for f in decode(out).splitlines():
- f = f.strip()
- result.add(safe_join(self.out, f))
-
- self.files = result
-
-
- def call_unrar(self, command, *xargs, **kwargs):
- args = []
- # overwrite flag
- args.append("-o+") if self.overwrite else args.append("-o-")
-
- if self.excludefiles:
- for word in self.excludefiles.split(';'):
- args.append("-x%s" % word)
-
- # assume yes on all queries
- args.append("-y")
-
- # set a password
- if "password" in kwargs and kwargs['password']:
- args.append("-p%s" % kwargs['password'])
- else:
- args.append("-p-")
-
- # NOTE: return codes are not reliable, some kind of threading, cleanup whatever issue
- call = [self.CMD, command] + args + list(xargs)
- self.m.logDebug(" ".join(call))
-
- p = Popen(call, stdout=PIPE, stderr=PIPE)
-
- return p
diff --git a/pyload/plugins/internal/UnZip.py b/pyload/plugins/internal/UnZip.py
deleted file mode 100644
index ea8bcc283..000000000
--- a/pyload/plugins/internal/UnZip.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import sys
-import zipfile
-
-from pyload.plugins.internal.AbstractExtractor import AbtractExtractor
-
-
-class UnZip(AbtractExtractor):
- __name = "UnZip"
- __version = "0.10"
-
- __description = """Zip extractor plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- @staticmethod
- def checkDeps():
- return sys.version_info[:2] >= (2, 6)
-
-
- @staticmethod
- def getTargets(files_ids):
- result = []
-
- for file, id in files_ids:
- if file.endswith(".zip"):
- result.append((file, id))
-
- return result
-
-
- def extract(self, progress, password=None):
- z = zipfile.ZipFile(self.file)
- self.files = z.namelist()
- z.extractall(self.out)
-
-
- def getDeleteFiles(self):
- return [self.file]
diff --git a/pyload/plugins/internal/UpdateManager.py b/pyload/plugins/internal/UpdateManager.py
deleted file mode 100644
index dd5d29d0b..000000000
--- a/pyload/plugins/internal/UpdateManager.py
+++ /dev/null
@@ -1,300 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import sys
-
-from operator import itemgetter
-from os import path, remove, stat
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.Addon import Expose, Addon, threaded
-from pyload.utils import safe_join
-
-
-class UpdateManager(Addon):
- __name = "UpdateManager"
- __type = "addon"
- __version = "0.40"
-
- __config = [("activated" , "bool" , "Activated" , True ),
- ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
- ("interval" , "int" , "Check interval in hours" , 8 ),
- ("reloadplugins", "bool" , "Monitor plugins for code changes (debug mode only)", True ),
- ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , True )]
-
- __description = """Check for updates"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # event_list = ["pluginConfigChanged"]
-
- SERVER_URL = "http://updatemanager.pyload.org"
- MIN_INTERVAL = 6 * 60 * 60 #: 6h minimum check interval (value is in seconds)
-
-
- def pluginConfigChanged(self, plugin, name, value):
- if name == "interval":
- interval = value * 60 * 60
- if self.MIN_INTERVAL <= interval != self.interval:
- self.core.scheduler.removeJob(self.cb)
- self.interval = interval
- self.initPeriodical()
- else:
- self.logDebug("Invalid interval value, kept current")
-
- elif name == "reloadplugins":
- if self.cb2:
- self.core.scheduler.removeJob(self.cb2)
- if value is True and self.core.debug:
- self.periodical2()
-
-
- def activate(self):
- self.pluginConfigChanged(self.__name, "interval", self.getConfig("interval"))
- x = lambda: self.pluginConfigChanged(self.__name, "reloadplugins", self.getConfig("reloadplugins"))
- self.core.scheduler.addJob(10, x, threaded=False)
-
-
- def deactivate(self):
- self.pluginConfigChanged(self.__name, "reloadplugins", False)
-
-
- def setup(self):
- self.cb2 = None
- self.interval = self.MIN_INTERVAL
- self.updating = False
- self.info = {'pyload': False, 'version': None, 'plugins': False}
- self.mtimes = {} #: store modification time for each plugin
-
-
- def periodical2(self):
- if not self.updating:
- self.autoreloadPlugins()
-
- self.cb2 = self.core.scheduler.addJob(4, self.periodical2, threaded=False)
-
-
- @Expose
- def autoreloadPlugins(self):
- """ reload and reindex all modified plugins """
- modules = filter(
- lambda m: m and (m.__name.startswith("pyload.plugins.") or
- m.__name.startswith("userplugins.")) and
- m.__name.count(".") >= 2, sys.modules.itervalues()
- )
-
- reloads = []
-
- for m in modules:
- root, type, name = m.__name.rsplit(".", 2)
- id = (type, name)
- if type in self.core.pluginManager.plugins:
- f = m.__file__.replace(".pyc", ".py")
- if not path.isfile(f):
- continue
-
- mtime = stat(f).st_mtime
-
- if id not in self.mtimes:
- self.mtimes[id] = mtime
- elif self.mtimes[id] < mtime:
- reloads.append(id)
- self.mtimes[id] = mtime
-
- return True if self.core.pluginManager.reloadPlugins(reloads) else False
-
-
- def periodical(self):
- if self.info['pyload'] or self.getConfig("nodebugupdate") and self.core.debug:
- return
-
- self.updateThread()
-
-
- def server_request(self):
- try:
- return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines()
- except Exception:
- self.logWarning(_("Unable to contact server to get updates"))
-
-
- @threaded
- def updateThread(self):
- self.updating = True
-
- status = self.update(onlyplugin=self.getConfig("mode") == "plugins only")
-
- if status == 2:
- self.core.api.restart()
- else:
- self.updating = False
-
-
- @Expose
- def updatePlugins(self):
- """ simple wrapper for calling plugin update quickly """
- return self.update(onlyplugin=True)
-
-
- @Expose
- def update(self, onlyplugin=False):
- """ check for updates """
- data = self.server_request()
-
- if not data:
- exitcode = 0
-
- elif data[0] == "None":
- self.logInfo(_("No new pyLoad version available"))
- updates = data[1:]
- exitcode = self._updatePlugins(updates)
-
- elif onlyplugin:
- exitcode = 0
-
- else:
- newversion = data[0]
- self.logInfo(_("*** New pyLoad Version %s available ***") % newversion)
- self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***"))
- exitcode = 3
- self.info['pyload'] = True
- self.info['version'] = newversion
-
- return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required; 3 = No plugins updated, new pyLoad version available
-
-
- def _updatePlugins(self, updates):
- """ check for plugin updates """
-
- if self.info['plugins']:
- return False #: plugins were already updated
-
- exitcode = 0
- updated = []
-
- vre = re.compile(r'__version.*=.*("|\')([\d.]+)')
- url = updates[0]
- schema = updates[1].split('|')
-
- if "BLACKLIST" in updates:
- blacklist = updates[updates.index('BLACKLIST') + 1:]
- updates = updates[2:updates.index('BLACKLIST')]
- else:
- blacklist = None
- updates = updates[2:]
-
- upgradable = sorted(map(lambda x: dict(zip(schema, x.split('|'))), updates),
- key=itemgetter("type", "name"))
-
- for plugin in upgradable:
- filename = plugin['name']
- type = plugin['type']
- version = plugin['version']
-
- if filename.endswith(".pyc"):
- name = filename[:filename.find("_")]
- else:
- name = filename.replace(".py", "")
-
- plugins = getattr(self.core.pluginManager, "%sPlugins" % type)
-
- oldver = float(plugins[name]['version']) if name in plugins else None
- newver = float(version)
-
- if not oldver:
- msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)"
- elif newver > oldver:
- msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)"
- else:
- continue
-
- self.logInfo(_(msg) % {'type' : type,
- 'name' : name,
- 'oldver': oldver,
- 'newver': newver})
- try:
- content = getURL(url % plugin)
- m = vre.search(content)
-
- if m and m.group(2) == version:
- f = open(safe_join("userplugins", prefix, filename), "wb")
- f.write(content)
- f.close()
- updated.append((prefix, name))
- else:
- raise Exception, _("Version mismatch")
-
- except Exception, e:
- self.logError(_("Error updating plugin %s") % filename, e)
-
- if blacklist:
- blacklisted = map(lambda x: (x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]), blacklist)
-
- # Always protect internal plugins from removing
- for i, n, t in blacklisted.enumerate():
- if t == "internal":
- del blacklisted[i]
-
- blacklisted = sorted(blacklisted)
- removed = self.removePlugins(blacklisted)
- for t, n in removed:
- self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % {
- 'type': t,
- 'name': n,
- })
-
- if updated:
- reloaded = self.core.pluginManager.reloadPlugins(updated)
- if reloaded:
- self.logInfo(_("Plugins updated and reloaded"))
- exitcode = 1
- else:
- self.logInfo(_("*** Plugins have been updated, but need a pyLoad restart to be reloaded ***"))
- self.info['plugins'] = True
- exitcode = 2
- else:
- self.logInfo(_("No plugin updates available"))
-
- return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required
-
-
- @Expose
- def removePlugins(self, type_plugins):
- """ delete plugins from disk """
-
- if not type_plugins:
- return
-
- self.logDebug("Requested deletion of plugins: %s" % type_plugins)
-
- removed = []
-
- for type, name in type_plugins:
- err = False
- file = name + ".py"
-
- for root in ("userplugins", path.join(pypath, "pyload", "plugins")):
-
- filename = safe_join(root, type, file)
- try:
- remove(filename)
- except Exception, e:
- self.logDebug("Error deleting: %s" % path.basename(filename), e)
- err = True
-
- filename += "c"
- if path.isfile(filename):
- try:
- if type == "addon":
- self.manager.deactivateAddon(name)
- remove(filename)
- except Exception, e:
- self.logDebug("Error deleting: %s" % path.basename(filename), e)
- err = True
-
- if not err:
- id = (type, name)
- removed.append(id)
-
- return removed #: return a list of the plugins successfully removed
diff --git a/pyload/plugins/internal/XFSAccount.py b/pyload/plugins/internal/XFSAccount.py
deleted file mode 100644
index 937ed1831..000000000
--- a/pyload/plugins/internal/XFSAccount.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import gmtime, mktime, strptime
-from urlparse import urljoin
-
-from pyload.plugins.Account import Account
-from pyload.plugins.internal.SimpleHoster import parseHtmlForm, set_cookies
-
-
-class XFSAccount(Account):
- __name = "XFSAccount"
- __type = "account"
- __version = "0.32"
-
- __description = """XFileSharing account plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = None
- HOSTER_URL = None
-
- COOKIES = [(HOSTER_DOMAIN, "lang", "english")]
-
- PREMIUM_PATTERN = r'\(Premium only\)'
-
- VALID_UNTIL_PATTERN = r'Premium.[Aa]ccount expire:.*?(\d{1,2} [\w^_]+ \d{4})'
-
- TRAFFIC_LEFT_PATTERN = r'Traffic available today:.*?<b>\s*(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
- TRAFFIC_LEFT_UNIT = "MB" #: used only if no group <U> was found
-
- LEECH_TRAFFIC_PATTERN = r'Leech Traffic left:<b>.*?(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
- LEECH_TRAFFIC_UNIT = "MB" #: used only if no group <U> was found
-
- LOGIN_FAIL_PATTERN = r'>\s*(Incorrect Login or Password|Error<)'
-
-
- def init(self):
- # if not self.HOSTER_DOMAIN:
- # self.fail(_("Missing HOSTER_DOMAIN"))
-
- if not self.HOSTER_URL:
- self.HOSTER_URL = "http://www.%s/" % self.HOSTER_DOMAIN
-
-
- def loadAccountInfo(self, user, req):
- validuntil = None
- trafficleft = None
- leechtraffic = None
- premium = None
-
- html = req.load(self.HOSTER_URL, get={'op': "my_account"}, decode=True)
-
- premium = True if re.search(self.PREMIUM_PATTERN, html) else False
-
- m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
- expiredate = m.group(1).strip()
- self.logDebug("Expire date: " + expiredate)
-
- try:
- validuntil = mktime(strptime(expiredate, "%d %B %Y"))
-
- except Exception, e:
- self.logError(e)
-
- else:
- self.logDebug("Valid until: %s" % validuntil)
-
- if validuntil > mktime(gmtime()):
- premium = True
- trafficleft = -1
- else:
- premium = False
- validuntil = None #: registered account type (not premium)
- else:
- self.logDebug("VALID_UNTIL_PATTERN not found")
-
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- if m:
- try:
- traffic = m.groupdict()
- size = traffic['S']
-
- if "nlimited" in size:
- trafficleft = -1
- if validuntil is None:
- validuntil = -1
- else:
- if 'U' in traffic:
- unit = traffic['U']
- elif isinstance(self.TRAFFIC_LEFT_UNIT, basestring):
- unit = self.TRAFFIC_LEFT_UNIT
- else:
- unit = ""
-
- trafficleft = self.parseTraffic(size + unit)
-
- except Exception, e:
- self.logError(e)
- else:
- self.logDebug("TRAFFIC_LEFT_PATTERN not found")
-
- leech = [m.groupdict() for m in re.finditer(self.LEECH_TRAFFIC_PATTERN, html)]
- if leech:
- leechtraffic = 0
- try:
- for traffic in leech:
- size = traffic['S']
-
- if "nlimited" in size:
- leechtraffic = -1
- if validuntil is None:
- validuntil = -1
- break
- else:
- if 'U' in traffic:
- unit = traffic['U']
- elif isinstance(self.LEECH_TRAFFIC_UNIT, basestring):
- unit = self.LEECH_TRAFFIC_UNIT
- else:
- unit = ""
-
- leechtraffic += self.parseTraffic(size + unit)
-
- except Exception, e:
- self.logError(e)
- else:
- self.logDebug("LEECH_TRAFFIC_PATTERN not found")
-
- return {'validuntil': validuntil, 'trafficleft': trafficleft, 'leechtraffic': leechtraffic, 'premium': premium}
-
-
- def login(self, user, data, req):
- if isinstance(self.COOKIES, list):
- set_cookies(req.cj, self.COOKIES)
-
- url = urljoin(self.HOSTER_URL, "login.html")
- html = req.load(url, decode=True)
-
- action, inputs = parseHtmlForm('name="FL"', html)
- if not inputs:
- inputs = {'op': "login",
- 'redirect': self.HOSTER_URL}
-
- inputs.update({'login': user,
- 'password': data['password']})
-
- html = req.load(self.HOSTER_URL, post=inputs, decode=True)
-
- if re.search(self.LOGIN_FAIL_PATTERN, html):
- self.wrongPassword()
diff --git a/pyload/plugins/internal/XFSCrypter.py b/pyload/plugins/internal/XFSCrypter.py
deleted file mode 100644
index 280f87654..000000000
--- a/pyload/plugins/internal/XFSCrypter.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
-
-
-class XFSCrypter(SimpleCrypter):
- __name = "XFSCrypter"
- __type = "crypter"
- __version = "0.04"
-
- __pattern = r'^unmatchable$'
-
- __description = """XFileSharing decrypter plugin"""
- __license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = None
- HOSTER_NAME = None
-
- URL_REPLACEMENTS = [(r'&?per_page=\d+', ""), (r'[?/&]+$', ""), (r'(.+/[^?]+)$', r'\1?'), (r'$', r'&per_page=10000')]
-
- COOKIES = [(HOSTER_DOMAIN, "lang", "english")]
-
- LINK_PATTERN = r'<(?:td|TD).*?>\s*<a href="(.+?)".*?>.+?(?:</a>)?\s*</(?:td|TD)>'
- NAME_PATTERN = r'<[tT]itle>.*?\: (?P<N>.+) folder</[tT]itle>'
-
- OFFLINE_PATTERN = r'>\s*\w+ (Not Found|file (was|has been) removed)'
- TEMP_OFFLINE_PATTERN = r'>\s*\w+ server (is in )?(maintenance|maintainance)'
diff --git a/pyload/plugins/internal/XFSHoster.py b/pyload/plugins/internal/XFSHoster.py
deleted file mode 100644
index 8479be03f..000000000
--- a/pyload/plugins/internal/XFSHoster.py
+++ /dev/null
@@ -1,339 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from random import random
-from time import sleep
-
-from pyload.plugins.hoster.UnrestrictLi import secondsToMidnight
-from pyload.plugins.internal.captcha import ReCaptcha, SolveMedia
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-from pyload.utils import html_unescape
-
-
-class XFSHoster(SimpleHoster):
- __name = "XFSHoster"
- __type = "hoster"
- __version = "0.27"
-
- __pattern = r'^unmatchable$'
-
- __description = """XFileSharing hoster plugin"""
- __license = "GPLv3"
- __authors = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- HOSTER_DOMAIN = None
- HOSTER_NAME = None
-
- TEXT_ENCODING = False
- COOKIES = [(HOSTER_DOMAIN, "lang", "english")]
- CHECK_DIRECT_LINK = None
- MULTI_HOSTER = True #@NOTE: Should be default to False for safe, but I'm lazy...
-
- NAME_PATTERN = r'(>Filename:</b></td><td nowrap>|name="fname" value="|<span class="name">)(?P<N>.+?)(\s*<|")'
- SIZE_PATTERN = r'(>Size:</b></td><td>|>File:.*>|<span class="size">)(?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)'
-
- OFFLINE_PATTERN = r'>\s*\w+ (Not Found|file (was|has been) removed)'
- TEMP_OFFLINE_PATTERN = r'>\s*\w+ server (is in )?(maintenance|maintainance)'
-
- WAIT_PATTERN = r'<span id="countdown_str">.*?>(\d+)</span>|id="countdown" value=".*?(\d+).*?"'
- PREMIUM_ONLY_PATTERN = r'>This file is available for Premium Users only'
- ERROR_PATTERN = r'(?:class=["\']err["\'].*?>|<[Cc]enter><b>|>Error</td>|>\(ERROR:)(?:\s*<.+?>\s*)*(.+?)(?:["\']|<|\))'
-
- LEECH_LINK_PATTERN = r'<h2>Download Link</h2>\s*<textarea[^>]*>([^<]+)'
- LINK_PATTERN = None #: final download url pattern
-
- CAPTCHA_PATTERN = r'(https?://[^"\']+?/captchas?/[^"\']+)'
- CAPTCHA_BLOCK_PATTERN = r'>Enter code.*?<div.*?>(.+?)</div>'
- RECAPTCHA_PATTERN = None
- SOLVEMEDIA_PATTERN = None
-
- FORM_PATTERN = None
- FORM_INPUTS_MAP = None #: dict passed as input_names to parseHtmlForm
-
-
- def setup(self):
- self.chunkLimit = 1
- self.resumeDownload = self.multiDL = self.premium
-
-
- def prepare(self):
- """ Initialize important variables """
- if not self.HOSTER_DOMAIN:
- self.fail(_("Missing HOSTER_DOMAIN"))
-
- if not self.HOSTER_NAME:
- self.HOSTER_NAME = "".join([str.capitalize() for str in self.HOSTER_DOMAIN.split('.')])
-
- if not self.LINK_PATTERN:
- pattern = r'(https?://(www\.)?([^/]*?%s|\d+\.\d+\.\d+\.\d+)(\:\d+)?(/d/|(/files)?/\d+/\w+/).+?)["\'<]'
- self.LINK_PATTERN = pattern % self.HOSTER_DOMAIN.replace('.', '\.')
-
- self.captcha = None
- self.errmsg = None
- self.passwords = self.getPassword().splitlines()
-
- super(XFSHoster, self).prepare()
-
- if self.CHECK_DIRECT_LINK is None:
- self.directDL = bool(self.premium)
-
-
- def handleFree(self):
- link = self.getDownloadLink()
-
- if link:
- if self.captcha:
- self.correctCaptcha()
-
- self.download(link, ref=True, cookies=True, disposition=True)
-
- elif self.errmsg:
- if 'captcha' in self.errmsg:
- self.fail(_("No valid captcha code entered"))
- else:
- self.fail(self.errmsg)
-
- else:
- self.fail(_("Download link not found"))
-
-
- def handlePremium(self):
- return self.handleFree()
-
-
- def getDownloadLink(self):
- for i in xrange(1, 6):
- self.logDebug("Getting download link: #%d" % i)
-
- self.checkErrors()
-
- m = re.search(self.LINK_PATTERN, self.html, re.S)
- if m:
- break
-
- data = self.getPostParameters()
-
- self.html = self.load(self.pyfile.url, post=data, ref=True, decode=True, follow_location=False)
-
- m = re.search(r'Location\s*:\s*(.+)', self.req.http.header, re.I)
- if m and not "op=" in m.group(1):
- break
-
- m = re.search(self.LINK_PATTERN, self.html, re.S)
- if m:
- break
- else:
- self.logError(data['op'] if 'op' in data else _("UNKNOWN"))
- return ""
-
- self.errmsg = None
-
- return m.group(1)
-
-
- def handleMulti(self):
- #only tested with easybytez.com
- self.html = self.load("http://www.%s/" % self.HOSTER_DOMAIN)
-
- action, inputs = self.parseHtmlForm()
-
- upload_id = "%012d" % int(random() * 10 ** 12)
- action += upload_id + "&js_on=1&utype=prem&upload_type=url"
-
- inputs['tos'] = '1'
- inputs['url_mass'] = self.pyfile.url
- inputs['up1oad_type'] = 'url'
-
- self.logDebug(action, inputs)
-
- self.req.setOption("timeout", 600) #: wait for file to upload to easybytez.com
-
- self.html = self.load(action, post=inputs)
-
- self.checkErrors()
-
- action, inputs = self.parseHtmlForm('F1')
- if not inputs:
- if self.errmsg:
- self.retry(reason=self.errmsg)
- else:
- self.error(_("TEXTAREA F1 not found"))
-
- self.logDebug(inputs)
-
- stmsg = inputs['st']
-
- if stmsg == 'OK':
- self.html = self.load(action, post=inputs)
-
- elif 'Can not leech file' in stmsg:
- self.retry(20, 3 * 60, _("Can not leech file"))
-
- elif 'today' in stmsg:
- self.retry(wait_time=secondsToMidnight(gmt=2), reason=_("You've used all Leech traffic today"))
-
- else:
- self.fail(stmsg)
-
- #get easybytez.com link for uploaded file
- m = re.search(self.LEECH_LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LEECH_LINK_PATTERN not found"))
-
- header = self.load(m.group(1), just_header=True, decode=True)
-
- if 'location' in header: #: Direct download link
- self.link = header['location']
- else:
- self.fail(_("Download link not found"))
-
-
- def checkErrors(self):
- m = re.search(self.PREMIUM_ONLY_PATTERN, self.html)
- if m:
- self.info['error'] = "premium-only"
- return
-
- m = re.search(self.ERROR_PATTERN, self.html)
-
- if m is None:
- self.errmsg = None
- else:
- self.errmsg = m.group(1).strip()
-
- self.logWarning(re.sub(r"<.*?>", " ", self.errmsg))
-
- if 'wait' in self.errmsg:
- wait_time = sum([int(v) * {"hr": 3600, "hour": 3600, "min": 60, "sec": 1}[u.lower()] for v, u in
- re.findall(r'(\d+)\s*(hr|hour|min|sec)', self.errmsg, re.I)])
- self.wait(wait_time, True)
-
- elif 'country' in self.errmsg:
- self.fail(_("Downloads are disabled for your country"))
-
- elif 'captcha' in self.errmsg:
- self.invalidCaptcha()
-
- elif 'premium' in self.errmsg and 'require' in self.errmsg:
- self.fail(_("File can be downloaded by premium users only"))
-
- elif 'limit' in self.errmsg:
- if 'days' in self.errmsg:
- delay = secondsToMidnight(gmt=2)
- retries = 3
- else:
- delay = 1 * 60 * 60
- retries = 24
-
- self.wantReconnect = True
- self.retry(retries, delay, _("Download limit exceeded"))
-
- elif 'countdown' in self.errmsg or 'Expired' in self.errmsg:
- self.retry(reason=_("Link expired"))
-
- elif 'maintenance' in self.errmsg or 'maintainance' in self.errmsg:
- self.tempOffline()
-
- elif 'download files up to' in self.errmsg:
- self.fail(_("File too large for free download"))
-
- else:
- self.wantReconnect = True
- self.retry(wait_time=60, reason=self.errmsg)
-
- if self.errmsg:
- self.info['error'] = self.errmsg
- else:
- self.info.pop('error', None)
-
-
- def getPostParameters(self):
- if self.FORM_PATTERN or self.FORM_INPUTS_MAP:
- action, inputs = self.parseHtmlForm(self.FORM_PATTERN or "", self.FORM_INPUTS_MAP or {})
- else:
- action, inputs = self.parseHtmlForm(input_names={'op': re.compile(r'^download')})
-
- if not inputs:
- action, inputs = self.parseHtmlForm('F1')
- if not inputs:
- if self.errmsg:
- self.retry(reason=self.errmsg)
- else:
- self.error(_("TEXTAREA F1 not found"))
-
- self.logDebug(inputs)
-
- if 'op' in inputs:
- if "password" in inputs:
- if self.passwords:
- inputs['password'] = self.passwords.pop(0)
- else:
- self.fail(_("Missing password"))
-
- if not self.premium:
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- wait_time = int(m.group(1))
- self.setWait(wait_time, False)
-
- self.captcha = self.handleCaptcha(inputs)
-
- self.wait()
- else:
- inputs['referer'] = self.pyfile.url
-
- if self.premium:
- inputs['method_premium'] = "Premium Download"
- inputs.pop('method_free', None)
- else:
- inputs['method_free'] = "Free Download"
- inputs.pop('method_premium', None)
-
- return inputs
-
-
- def handleCaptcha(self, inputs):
- m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m:
- captcha_url = m.group(1)
- inputs['code'] = self.decryptCaptcha(captcha_url)
- return 1
-
- m = re.search(self.CAPTCHA_BLOCK_PATTERN, self.html, re.S)
- if m:
- captcha_div = m.group(1)
- numerals = re.findall(r'<span.*?padding-left\s*:\s*(\d+).*?>(\d)</span>', html_unescape(captcha_div))
- self.logDebug(captcha_div)
- inputs['code'] = "".join([a[1] for a in sorted(numerals, key=lambda num: int(num[0]))])
- self.logDebug("Captcha code: %s" % inputs['code'], numerals)
- return 2
-
- recaptcha = ReCaptcha(self)
- try:
- captcha_key = re.search(self.RECAPTCHA_PATTERN, self.html).group(1)
- except Exception:
- captcha_key = recaptcha.detect_key()
- else:
- self.logDebug("ReCaptcha key: %s" % captcha_key)
-
- if captcha_key:
- inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge(captcha_key)
- return 3
-
- solvemedia = SolveMedia(self)
- try:
- captcha_key = re.search(self.SOLVEMEDIA_PATTERN, self.html).group(1)
- except Exception:
- captcha_key = solvemedia.detect_key()
- else:
- self.logDebug("SolveMedia key: %s" % captcha_key)
-
- if captcha_key:
- inputs['adcopy_challenge'], inputs['adcopy_response'] = solvemedia.challenge(captcha_key)
- return 4
-
- return 0
diff --git a/pyload/plugins/ocr/GigasizeCom.py b/pyload/plugins/ocr/GigasizeCom.py
deleted file mode 100644
index 36d57d38f..000000000
--- a/pyload/plugins/ocr/GigasizeCom.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.OCR import OCR
-
-
-class GigasizeCom(OCR):
- __name = "GigasizeCom"
- __type = "ocr"
- __version = "0.10"
-
- __description = """Gigasize.com ocr plugin"""
- __license = "GPLv3"
- __authors = [("pyLoad Team", "admin@pyload.org")]
-
-
- def __init__(self):
- OCR.__init__(self)
-
-
- def get_captcha(self, image):
- self.load_image(image)
- self.threshold(2.8)
- self.run_tesser(True, False, False, True)
- return self.result_captcha
diff --git a/pyload/plugins/ocr/LinksaveIn.py b/pyload/plugins/ocr/LinksaveIn.py
deleted file mode 100644
index e2ec1b7de..000000000
--- a/pyload/plugins/ocr/LinksaveIn.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# -*- coding: utf-8 -*-
-
-try:
- from PIL import Image
-except ImportError:
- import Image
-
-from glob import glob
-from os import sep
-from os.path import abspath, dirname
-
-from pyload.plugins.OCR import OCR
-
-
-class LinksaveIn(OCR):
- __name = "LinksaveIn"
- __type = "ocr"
- __version = "0.10"
-
- __description = """Linksave.in ocr plugin"""
- __license = "GPLv3"
- __authors = [("pyLoad Team", "admin@pyload.org")]
-
-
- def __init__(self):
- OCR.__init__(self)
- self.data_dir = dirname(abspath(__file__)) + sep + "LinksaveIn" + sep
-
-
- def load_image(self, image):
- im = Image.open(image)
- frame_nr = 0
-
- lut = im.resize((256, 1))
- lut.putdata(range(256))
- lut = list(lut.convert("RGB").getdata())
-
- new = Image.new("RGB", im.size)
- npix = new.load()
- while True:
- try:
- im.seek(frame_nr)
- except EOFError:
- break
- frame = im.copy()
- pix = frame.load()
- for x in xrange(frame.size[0]):
- for y in xrange(frame.size[1]):
- if lut[pix[x, y]] != (0,0,0):
- npix[x, y] = lut[pix[x, y]]
- frame_nr += 1
- new.save(self.data_dir+"unblacked.png")
- self.image = new.copy()
- self.pixels = self.image.load()
- self.result_captcha = ''
-
-
- def get_bg(self):
- stat = {}
- cstat = {}
- img = self.image.convert("P")
- for bgpath in glob(self.data_dir+"bg/*.gif"):
- stat[bgpath] = 0
- bg = Image.open(bgpath)
-
- bglut = bg.resize((256, 1))
- bglut.putdata(range(256))
- bglut = list(bglut.convert("RGB").getdata())
-
- lut = img.resize((256, 1))
- lut.putdata(range(256))
- lut = list(lut.convert("RGB").getdata())
-
- bgpix = bg.load()
- pix = img.load()
- for x in xrange(bg.size[0]):
- for y in xrange(bg.size[1]):
- rgb_bg = bglut[bgpix[x, y]]
- rgb_c = lut[pix[x, y]]
- try:
- cstat[rgb_c] += 1
- except Exception:
- cstat[rgb_c] = 1
- if rgb_bg == rgb_c:
- stat[bgpath] += 1
- max_p = 0
- bg = ""
- for bgpath, value in stat.iteritems():
- if max_p < value:
- bg = bgpath
- max_p = value
- return bg
-
-
- def substract_bg(self, bgpath):
- bg = Image.open(bgpath)
- img = self.image.convert("P")
-
- bglut = bg.resize((256, 1))
- bglut.putdata(range(256))
- bglut = list(bglut.convert("RGB").getdata())
-
- lut = img.resize((256, 1))
- lut.putdata(range(256))
- lut = list(lut.convert("RGB").getdata())
-
- bgpix = bg.load()
- pix = img.load()
- orgpix = self.image.load()
- for x in xrange(bg.size[0]):
- for y in xrange(bg.size[1]):
- rgb_bg = bglut[bgpix[x, y]]
- rgb_c = lut[pix[x, y]]
- if rgb_c == rgb_bg:
- orgpix[x, y] = (255,255,255)
-
-
- def eval_black_white(self):
- new = Image.new("RGB", (140, 75))
- pix = new.load()
- orgpix = self.image.load()
- thresh = 4
- for x in xrange(new.size[0]):
- for y in xrange(new.size[1]):
- rgb = orgpix[x, y]
- r, g, b = rgb
- pix[x, y] = (255,255,255)
- if r > max(b, g)+thresh:
- pix[x, y] = (0,0,0)
- if g < min(r, b):
- pix[x, y] = (0,0,0)
- if g > max(r, b)+thresh:
- pix[x, y] = (0,0,0)
- if b > max(r, g)+thresh:
- pix[x, y] = (0,0,0)
- self.image = new
- self.pixels = self.image.load()
-
-
- def get_captcha(self, image):
- self.load_image(image)
- bg = self.get_bg()
- self.substract_bg(bg)
- self.eval_black_white()
- self.to_greyscale()
- self.image.save(self.data_dir+"cleaned_pass1.png")
- self.clean(4)
- self.clean(4)
- self.image.save(self.data_dir+"cleaned_pass2.png")
- letters = self.split_captcha_letters()
- final = ""
- for n, letter in enumerate(letters):
- self.image = letter
- self.image.save(ocr.data_dir+"letter%d.png" % n)
- self.run_tesser(True, True, False, False)
- final += self.result_captcha
-
- return final
diff --git a/pyload/plugins/ocr/NetloadIn.py b/pyload/plugins/ocr/NetloadIn.py
deleted file mode 100644
index 1ba710316..000000000
--- a/pyload/plugins/ocr/NetloadIn.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.OCR import OCR
-
-
-class NetloadIn(OCR):
- __name = "NetloadIn"
- __type = "ocr"
- __version = "0.10"
-
- __description = """Netload.in ocr plugin"""
- __license = "GPLv3"
- __authors = [("pyLoad Team", "admin@pyload.org")]
-
-
- def __init__(self):
- OCR.__init__(self)
-
-
- def get_captcha(self, image):
- self.load_image(image)
- self.to_greyscale()
- self.clean(3)
- self.clean(3)
- self.run_tesser(True, True, False, False)
-
- self.result_captcha = self.result_captcha.replace(" ", "")[:4] # cut to 4 numbers
-
- return self.result_captcha
diff --git a/pyload/plugins/ocr/ShareonlineBiz.py b/pyload/plugins/ocr/ShareonlineBiz.py
deleted file mode 100644
index 1ce359d38..000000000
--- a/pyload/plugins/ocr/ShareonlineBiz.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.OCR import OCR
-
-
-class ShareonlineBiz(OCR):
- __name = "ShareonlineBiz"
- __type = "ocr"
- __version = "0.10"
-
- __description = """Shareonline.biz ocr plugin"""
- __license = "GPLv3"
- __authors = [("RaNaN", "RaNaN@pyload.org")]
-
-
- def __init__(self):
- OCR.__init__(self)
-
-
- def get_captcha(self, image):
- self.load_image(image)
- self.to_greyscale()
- self.image = self.image.resize((160, 50))
- self.pixels = self.image.load()
- self.threshold(1.85)
- #self.eval_black_white(240)
- #self.derotate_by_average()
-
- letters = self.split_captcha_letters()
-
- final = ""
- for letter in letters:
- self.image = letter
- self.run_tesser(True, True, False, False)
- final += self.result_captcha
-
- return final
-
- #tesseract at 60%
diff --git a/pyload/remote/ClickAndLoadBackend.py b/pyload/remote/ClickAndLoadBackend.py
index 8c7906997..a73ea7f24 100644
--- a/pyload/remote/ClickAndLoadBackend.py
+++ b/pyload/remote/ClickAndLoadBackend.py
@@ -13,7 +13,7 @@ try:
except Exception:
pass
-from pyload.manager.RemoteManager import BackendBase
+from pyload.manager.Remote import BackendBase
core = None
js = None
diff --git a/pyload/remote/SocketBackend.py b/pyload/remote/SocketBackend.py
index c85e59f42..a1c885347 100644
--- a/pyload/remote/SocketBackend.py
+++ b/pyload/remote/SocketBackend.py
@@ -2,7 +2,7 @@
import SocketServer
-from pyload.manager.RemoteManager import BackendBase
+from pyload.manager.Remote import BackendBase
class RequestHandler(SocketServer.BaseRequestHandler):
diff --git a/pyload/remote/ThriftBackend.py b/pyload/remote/ThriftBackend.py
index 2b46e25c0..16917cfc9 100644
--- a/pyload/remote/ThriftBackend.py
+++ b/pyload/remote/ThriftBackend.py
@@ -3,7 +3,7 @@
from os.path import exists
-from pyload.manager.RemoteManager import BackendBase
+from pyload.manager.Remote import BackendBase
from pyload.remote.thriftbackend.Processor import Processor
from pyload.remote.thriftbackend.Protocol import ProtocolFactory
diff --git a/pyload/remote/socketbackend/create_ttypes.py b/pyload/remote/socketbackend/create_ttypes.py
index cf357bd53..5bfbcafa0 100644
--- a/pyload/remote/socketbackend/create_ttypes.py
+++ b/pyload/remote/socketbackend/create_ttypes.py
@@ -1,11 +1,16 @@
# -*- coding: utf-8 -*-
import inspect
+import os
+import platform
import sys
-from os.path import abspath, dirname, join
-sys.path.append(join(pypath, "pyload", "lib"))
-sys.path.append(join(pypath, "pyload", "remote"))
+
+if "64" in platform.machine():
+ sys.path.append(os.path.join(pypath, "lib64"))
+sys.path.append(os.path.join(pypath, "lib"))
+
+sys.path.append(os.path.join(pypath, "pyload", "remote"))
from pyload.remote.thriftbackend.thriftgen.pyload import ttypes
from pyload.remote.thriftbackend.thriftgen.pyload.Pyload import Iface
@@ -30,7 +35,7 @@ def main():
enums.append(klass)
- f = open(join(pypath, "pyload", "api", "types.py"), "wb")
+ f = open(os.path.join(pypath, "pyload", "api", "types.py"), "wb")
f.write(
"""# -*- coding: utf-8 -*-
diff --git a/pyload/remote/thriftbackend/ThriftClient.py b/pyload/remote/thriftbackend/ThriftClient.py
index d5159320e..7c2a1cb01 100644
--- a/pyload/remote/thriftbackend/ThriftClient.py
+++ b/pyload/remote/thriftbackend/ThriftClient.py
@@ -2,13 +2,17 @@
import sys
from socket import error
-from os.path import dirname, abspath, join
from traceback import print_exc
try:
import thrift
+
except ImportError:
- sys.path.append(abspath(join(dirname(abspath(__file__)), "..", "..", "lib")))
+ import platform
+
+ if "64" in platform.machine():
+ sys.path.append(os.path.join(pypath, "lib64"))
+ sys.path.append(os.path.join(pypath, "lib"))
from thrift.transport import TTransport
#from thrift.transport.TZlibTransport import TZlibTransport
diff --git a/pyload/remote/thriftbackend/ThriftTest.py b/pyload/remote/thriftbackend/ThriftTest.py
index 05c6ba40e..c9c0d3cf3 100644
--- a/pyload/remote/thriftbackend/ThriftTest.py
+++ b/pyload/remote/thriftbackend/ThriftTest.py
@@ -1,10 +1,13 @@
# -*- coding: utf-8 -*-
+import os
+import platform
import sys
-from os.path import join, abspath, dirname
-path = join((abspath(dirname(__file__))), "..", "..", "lib")
-sys.path.append(path)
+if "64" in platform.machine():
+ sys.path.append(os.path.join(pypath, "lib64"))
+sys.path.append(os.path.join(pypath, "lib"))
+
from pyload.remote.thriftbackend.thriftgen.pyload import Pyload
from pyload.remote.thriftbackend.thriftgen.pyload.ttypes import *
diff --git a/pyload/webui/app/pyload.py b/pyload/webui/app/pyload.py
index ccd2c30f0..2d1a8eb86 100644
--- a/pyload/webui/app/pyload.py
+++ b/pyload/webui/app/pyload.py
@@ -46,12 +46,12 @@ def pre_processor():
plugins = True
- return {"user": user,
- 'status': status,
+ return {"user" : user,
+ 'status' : status,
'captcha': captcha,
- 'perms': perms,
- 'url': request.url,
- 'update': update,
+ 'perms' : perms,
+ 'url' : request.url,
+ 'update' : update,
'plugins': plugins}